diff --git a/.devcontainer/.gitignore b/.devcontainer/.gitignore new file mode 100644 index 0000000000..bddc5a8f6d --- /dev/null +++ b/.devcontainer/.gitignore @@ -0,0 +1,40 @@ +* + +!/assets/ +!/assets/cradles/ +!/assets/cradles/cabal/ +!/assets/cradles/stack/ +!/conf/ +!/conf/shell/ +!/conf/shell/var/ +!/conf/shell/var/tmp/ +!/conf/shell/var/tmp/snippets/ +!/conf/stack/ +!/conf/stack/etc/ +!/conf/stack/etc/stack/ +!/ghc-*/ +!/scripts/ +!/scripts/usr/ +!/scripts/usr/local/ +!/scripts/usr/local/bin/ +!/vsix/ +!/vsix/var/ +!/vsix/var/tmp/ +!/vsix/var/tmp/extensions/ + +!/assets/cradles/cabal/hie.yaml +!/assets/cradles/stack/hie.yaml +!/conf/shell/var/tmp/snippets/*.sh +!/conf/stack/etc/stack/config.yaml +!/ghc-*/devcontainer.json +!/scripts/usr/local/bin/*.sh +!/vsix/var/tmp/extensions/*.vsix + +!/devcontainer.json +!/GHC.Dockerfile +!/init +!/init.cmd +!/LICENSE +!/README.md + +!.gitignore diff --git a/.devcontainer/GHC.Dockerfile b/.devcontainer/GHC.Dockerfile new file mode 100644 index 0000000000..bd54903bd5 --- /dev/null +++ b/.devcontainer/GHC.Dockerfile @@ -0,0 +1,117 @@ +ARG BUILD_ON_IMAGE=quay.io/benz0li/ghc-musl +ARG GHC_VERSION=latest +ARG SUBTAG +ARG HLS_VERSION +ARG STACK_VERSION + +ARG HLS_GHC_VERSION=${HLS_VERSION:+$GHC_VERSION} +ARG HLS_IMAGE_TAG=${HLS_VERSION:-none}-ghc${HLS_GHC_VERSION:-all}${SUBTAG:+-}${SUBTAG} + +ARG STACK_VERSION_OVERRIDE=${STACK_VERSION} + +FROM ${BUILD_ON_IMAGE}:${GHC_VERSION}${SUBTAG:+-}${SUBTAG} AS files + +RUN mkdir /files + +COPY conf/shell /files +COPY conf/stack /files +COPY scripts /files +COPY vsix /files + +## Ensure file modes are correct +RUN find /files -type d -exec chmod 755 {} \; \ + && find /files -type f -exec chmod 644 {} \; \ + && find /files/usr/local/bin -type f -exec chmod 755 {} \; + +FROM quay.io/benz0li/hlssi:${HLS_IMAGE_TAG} AS hlssi + +FROM quay.io/benz0li/hlsi:latest AS hlsi + +FROM ghcr.io/hadolint/hadolint:latest as hsi + +FROM docker.io/koalaman/shellcheck:stable AS sci + +FROM ${BUILD_ON_IMAGE}:${GHC_VERSION}${SUBTAG:+-}${SUBTAG} + +COPY --from=files /files / + +RUN sysArch="$(uname -m)" \ + ## Ensure that common CA certificates + ## and OpenSSL libraries are up to date + && apk upgrade --no-cache ca-certificates openssl-dev \ + ## Install pip + && apk add --no-cache py3-pip \ + ## Install terminal multiplexers + && apk add --no-cache screen tmux \ + ## Install yamllint + && apk add --no-cache yamllint + +## Update environment +ARG USE_ZSH_FOR_ROOT +ARG LANG +ARG TZ + +ARG LANG_OVERRIDE=${LANG} +ARG TZ_OVERRIDE=${TZ} + +ENV LANG=${LANG_OVERRIDE:-$LANG} \ + TZ=${TZ_OVERRIDE:-$TZ} + + ## Change root's shell to ZSH +RUN if [ -n "$USE_ZSH_FOR_ROOT" ]; then \ + apk add --no-cache zsh shadow; \ + fix-chsh.sh; \ + chsh -s /bin/zsh; \ + fi \ + ## Update timezone if requested + && if [ "$TZ" != "" ]; then \ + apk add --no-cache tzdata; \ + fi \ + ## Info about timezone + && echo "TZ is set to $TZ" \ + ## Add/Update locale if requested + && if [ "$LANG" != "C.UTF-8" ]; then \ + if [ -n "$LANG" ]; then \ + apk add --no-cache musl-locales musl-locales-lang; \ + fi; \ + sed -i "s/LANG=C.UTF-8/LANG=$LANG/" /etc/profile.d/*locale.sh; \ + sed -i "s/LANG:-C.UTF-8/LANG:-$LANG/" /etc/profile.d/*locale.sh; \ + sed -i "s/LC_COLLATE=C/LC_COLLATE=$LANG/" /etc/profile.d/*locale.sh; \ + sed -i "s/LC_COLLATE:-C/LC_COLLATE:-$LANG/" /etc/profile.d/*locale.sh; \ + fi \ + ## Info about locale + && echo "LANG is set to $LANG" + +## Copy binaries as late as possible to avoid cache busting +## Install HLS +COPY --from=hlssi /usr/local /usr/local +## Install HLint +COPY --from=hlsi /usr/local /usr/local +## Install Haskell Dockerfile Linter +COPY --from=hsi /bin/hadolint /usr/local/bin +## Install ShellCheck +COPY --from=sci --chown=root:root /bin/shellcheck /usr/local/bin + +ARG HLS_VERSION +ARG STACK_VERSION + +ARG STACK_VERSION_OVERRIDE + +ENV HLS_VERSION=${HLS_VERSION} \ + STACK_VERSION=${STACK_VERSION_OVERRIDE:-$STACK_VERSION} + +RUN if [ -n "$STACK_VERSION_OVERRIDE" ]; then \ + ## Install Stack + cd /tmp || exit ;\ + curl -sSLO https://github.com/commercialhaskell/stack/releases/download/v"$STACK_VERSION"/stack-"$STACK_VERSION"-linux-"$(uname -m)".tar.gz; \ + curl -sSLO https://github.com/commercialhaskell/stack/releases/download/v"$STACK_VERSION"/stack-"$STACK_VERSION"-linux-"$(uname -m)".tar.gz.sha256; \ + sha256sum -cs stack-"$STACK_VERSION"-linux-"$(uname -m)".tar.gz.sha256; \ + tar -xzf stack-"$STACK_VERSION"-linux-"$(uname -m)".tar.gz; \ + if dpkg --compare-versions "$GHC_VERSION" lt "9.2.8"; then \ + mv -f stack-"$STACK_VERSION"-linux-"$(uname -m)"/stack /usr/bin/stack; \ + else \ + mv -f stack-"$STACK_VERSION"-linux-"$(uname -m)"/stack /usr/local/bin/stack; \ + fi; \ + ## Clean up + rm -rf /tmp/*; \ + fi diff --git a/.devcontainer/LICENSE b/.devcontainer/LICENSE new file mode 100644 index 0000000000..fb01005636 --- /dev/null +++ b/.devcontainer/LICENSE @@ -0,0 +1,42 @@ +Copyright (c) 2023-2024 Olivier Benz + +The code in this directory is not part of Stack (the software) and, with the +exceptions noted below, is distributed under the terms of the MIT License: + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +This directory also contains code with other copyrights. The affected files, +their copyrights and license statements are listed below. + +-------------------------------------------------------------------------------- +scripts/fix-chsh.sh +Copyright (c) Microsoft Corporation. All rights reserved. + +Licensed under the MIT License. See +https://github.com/devcontainers/features/blob/main/LICENSE for +license information. + +-------------------------------------------------------------------------------- +.devcontainer/init +Copyright (c) 2025 b-data GmbH +Copyright (c) 2023 Microsoft Corporation + +Distributed under the terms of the MIT License. + +-------------------------------------------------------------------------------- \ No newline at end of file diff --git a/.devcontainer/README.md b/.devcontainer/README.md new file mode 100644 index 0000000000..26e6aadf67 --- /dev/null +++ b/.devcontainer/README.md @@ -0,0 +1,11 @@ +# Dev Containers + +For further information, see +[haskellstack.org](https://docs.haskellstack.org): Get involved \> Contributors +\> Dev Containers or [../doc/dev_containers.md](../doc/dev_containers.md). + +## License + +The code in this directory is not part of Stack (the software) and, with the +exceptions noted in [LICENSE](LICENSE), is distributed under the terms of the +MIT License. diff --git a/.devcontainer/assets/cradles/cabal/hie.yaml b/.devcontainer/assets/cradles/cabal/hie.yaml new file mode 100644 index 0000000000..cb6b06a9cb --- /dev/null +++ b/.devcontainer/assets/cradles/cabal/hie.yaml @@ -0,0 +1,19 @@ +cradle: + multi: + - path: "./Setup.hs" + config: + cradle: + direct: + arguments: [] + - path: "./" + config: + cradle: + cabal: + - path: "./src" + component: "lib:stack" + - path: "./app" + component: "stack:exe:stack" + - path: "./tests/integration" + component: "stack:exe:stack-integration-test" + - path: "./tests/unit" + component: "stack:test:stack-unit-test" diff --git a/.devcontainer/assets/cradles/stack/hie.yaml b/.devcontainer/assets/cradles/stack/hie.yaml new file mode 100644 index 0000000000..bcc8ccd9c4 --- /dev/null +++ b/.devcontainer/assets/cradles/stack/hie.yaml @@ -0,0 +1,19 @@ +cradle: + multi: + - path: "./Setup.hs" + config: + cradle: + direct: + arguments: [] + - path: "./" + config: + cradle: + stack: + - path: "./src" + component: "stack:lib" + - path: "./app" + component: "stack:exe:stack" + - path: "./tests/integration" + component: "stack:exe:stack-integration-test" + - path: "./tests/unit" + component: "stack:test:stack-unit-test" diff --git a/.devcontainer/conf/shell/var/tmp/snippets/rc.sh b/.devcontainer/conf/shell/var/tmp/snippets/rc.sh new file mode 100644 index 0000000000..a5b2038353 --- /dev/null +++ b/.devcontainer/conf/shell/var/tmp/snippets/rc.sh @@ -0,0 +1,13 @@ + +# remove potentially present $HOME/.local/bin from PATH +PATH="${PATH/:$HOME\/.local\/bin/}" + +# set PATH so it includes user's private bin if it exists +if [ -d "$HOME/bin" ] && [[ "$PATH" != *"$HOME/bin"* ]] ; then + PATH="$HOME/bin:$PATH" +fi + +# set PATH so it includes user's private bin if it exists +if [ -d "$HOME/.local/bin" ] && [[ "$PATH" != *"$HOME/.local/bin"* ]] ; then + PATH="$HOME/.local/bin:$PATH" +fi diff --git a/.devcontainer/conf/shell/var/tmp/snippets/rc2.sh b/.devcontainer/conf/shell/var/tmp/snippets/rc2.sh new file mode 100644 index 0000000000..f4c09b882c --- /dev/null +++ b/.devcontainer/conf/shell/var/tmp/snippets/rc2.sh @@ -0,0 +1,5 @@ + +# set PATH so it includes cabal's bin if it exists +if [ -d "$HOME/.cabal/bin" ] && [[ "$PATH" != *"$HOME/.cabal/bin"* ]] ; then + PATH="$HOME/.cabal/bin:$PATH" +fi diff --git a/.devcontainer/conf/stack/etc/stack/config.yaml b/.devcontainer/conf/stack/etc/stack/config.yaml new file mode 100644 index 0000000000..37c999c3d6 --- /dev/null +++ b/.devcontainer/conf/stack/etc/stack/config.yaml @@ -0,0 +1,4 @@ +# Use only the GHC available on the PATH +system-ghc: true +# Do not automatically install GHC when necessary +install-ghc: false diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..f02a53e996 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,57 @@ +{ + "name": "GHC 9.10.3 (default)", + "build": { + "dockerfile": "GHC.Dockerfile", + "args": { + "GHC_VERSION": "9.10.3", + // "SUBTAG": "int-native", + "HLS_VERSION": "2.14.0.0", + "USE_ZSH_FOR_ROOT": "unset-to-use-ash", + "LANG": "C.UTF-8", + "TZ": "" + } + }, + + "initializeCommand": [".devcontainer/init"], + "onCreateCommand": "onCreateCommand.sh", + "postCreateCommand": "postCreateCommand.sh", + + "features": { + "ghcr.io/devcontainers/features/common-utils:2": { + "configureZshAsDefaultShell": true, + "upgradePackages": false, + "username": "vscode", + "userUid": "automatic", + "userGid": "automatic" + } + }, + + "customizations": { + "vscode": { + "extensions": [ + "/var/tmp/extensions/eamodio.gitlens-11.7.0.vsix", + "exiasr.hadolint", + "GitHub.vscode-pull-request-github", + "haskell.haskell", + "mhutchie.git-graph", + "mutantdino.resourcemonitor", + "timonwong.shellcheck" + ], + "settings": { + "gitlens.showWelcomeOnInstall": false, + "gitlens.showWhatsNewAfterUpgrades": false, + "haskell.manageHLS": "PATH", + "resmon.show.battery": false, + "resmon.show.cpufreq": false + } + } + }, + + // Set 'remoteUser' to 'root' to connect as root instead. + "remoteUser": "vscode", + + "remoteEnv": { + // Pip: Install packages to the user site + "PIP_USER": "1" + } +} diff --git a/.devcontainer/ghc-9.12.4/devcontainer.json b/.devcontainer/ghc-9.12.4/devcontainer.json new file mode 100644 index 0000000000..67bbf5bc44 --- /dev/null +++ b/.devcontainer/ghc-9.12.4/devcontainer.json @@ -0,0 +1,55 @@ +{ + "name": "GHC 9.12.4 (experimental)", + "build": { + "dockerfile": "../GHC.Dockerfile", + "context": "..", + "args": { + "GHC_VERSION": "9.12.4", + // "SUBTAG": "int-native", + "USE_ZSH_FOR_ROOT": "unset-to-use-ash", + "LANG": "C.UTF-8", + "TZ": "" + } + }, + + "initializeCommand": [".devcontainer/init"], + "onCreateCommand": "onCreateCommand.sh", + "postCreateCommand": "postCreateCommand.sh", + + "features": { + "ghcr.io/devcontainers/features/common-utils:2": { + "configureZshAsDefaultShell": true, + "upgradePackages": false, + "username": "vscode", + "userUid": "automatic", + "userGid": "automatic" + } + }, + + "customizations": { + "vscode": { + "extensions": [ + "/var/tmp/extensions/eamodio.gitlens-11.7.0.vsix", + "exiasr.hadolint", + "GitHub.vscode-pull-request-github", + "mhutchie.git-graph", + "mutantdino.resourcemonitor", + "timonwong.shellcheck" + ], + "settings": { + "gitlens.showWelcomeOnInstall": false, + "gitlens.showWhatsNewAfterUpgrades": false, + "resmon.show.battery": false, + "resmon.show.cpufreq": false + } + } + }, + + // Set 'remoteUser' to 'root' to connect as root instead. + "remoteUser": "vscode", + + "remoteEnv": { + // Pip: Install packages to the user site + "PIP_USER": "1" + } +} diff --git a/.devcontainer/ghc-9.14.1/devcontainer.json b/.devcontainer/ghc-9.14.1/devcontainer.json new file mode 100644 index 0000000000..d5f7b08d58 --- /dev/null +++ b/.devcontainer/ghc-9.14.1/devcontainer.json @@ -0,0 +1,55 @@ +{ + "name": "GHC 9.14.1 (experimental)", + "build": { + "dockerfile": "../GHC.Dockerfile", + "context": "..", + "args": { + "GHC_VERSION": "9.14.1", + // "SUBTAG": "int-native", + "USE_ZSH_FOR_ROOT": "unset-to-use-ash", + "LANG": "C.UTF-8", + "TZ": "" + } + }, + + "initializeCommand": [".devcontainer/init"], + "onCreateCommand": "onCreateCommand.sh", + "postCreateCommand": "postCreateCommand.sh", + + "features": { + "ghcr.io/devcontainers/features/common-utils:2": { + "configureZshAsDefaultShell": true, + "upgradePackages": false, + "username": "vscode", + "userUid": "automatic", + "userGid": "automatic" + } + }, + + "customizations": { + "vscode": { + "extensions": [ + "/var/tmp/extensions/eamodio.gitlens-11.7.0.vsix", + "exiasr.hadolint", + "GitHub.vscode-pull-request-github", + "mhutchie.git-graph", + "mutantdino.resourcemonitor", + "timonwong.shellcheck" + ], + "settings": { + "gitlens.showWelcomeOnInstall": false, + "gitlens.showWhatsNewAfterUpgrades": false, + "resmon.show.battery": false, + "resmon.show.cpufreq": false + } + } + }, + + // Set 'remoteUser' to 'root' to connect as root instead. + "remoteUser": "vscode", + + "remoteEnv": { + // Pip: Install packages to the user site + "PIP_USER": "1" + } +} diff --git a/.devcontainer/init b/.devcontainer/init new file mode 100755 index 0000000000..c61ad81e83 --- /dev/null +++ b/.devcontainer/init @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# Copyright (c) 2025 b-data GmbH +# Copyright (c) 2023 Microsoft Corporation +# Distributed under the terms of the MIT License. +# +# https://docs.github.com/en/authentication/connecting-to-github-with-ssh/working-with-ssh-key-passphrases?platform=windows +# +# Modified by b-data for seamless integration with development containers on Linux/macOS. + +set -e + +# Do not execute on GitHub Codespaces +if [ -z "$CODESPACES" ]; then + # Check the state of the currently running instance of the agent + # 0: running with key(s); 1: running w/o key(s); 2: not running + AGENT_RUN_STATE="$(ssh-add -l > /dev/null 2>&1; echo $?)" + # Only add key(s) if running w/o key(s) + if [ "$AGENT_RUN_STATE" = "1" ]; then + ssh-add 2> /dev/null || true + fi + # Means: Not adding key(s) to a forwarded agent with key(s) +fi + +# https://code.visualstudio.com/remote/advancedcontainers/sharing-git-credentials +# +# On Linux: For an X session, the agent is usually started automatically. +# For a login session, add the following to your `~/.bash_profile` +# or `~/.zprofile`: +# +# if [ -z "$SSH_AUTH_SOCK" ]; then +# # Check for a currently running instance of the agent +# RUNNING_AGENT="$(pgrep -f 'ssh-agent -s' -u "$USER" | wc -l | tr -d '[:space:]')" +# if [ "$RUNNING_AGENT" = "0" ]; then +# mkdir -p -m 0700 "$HOME/.ssh" +# # Launch a new instance of the agent +# ssh-agent -s &> "$HOME/.ssh/ssh-agent" +# fi +# eval "$(cat "$HOME/.ssh/ssh-agent")" > /dev/null +# fi diff --git a/test/integration/tests/3940-base-upgrade-warning/files/src/.gitkeep b/.devcontainer/init.cmd similarity index 100% rename from test/integration/tests/3940-base-upgrade-warning/files/src/.gitkeep rename to .devcontainer/init.cmd diff --git a/.devcontainer/scripts/usr/local/bin/fix-chsh.sh b/.devcontainer/scripts/usr/local/bin/fix-chsh.sh new file mode 100755 index 0000000000..84ab89f1cd --- /dev/null +++ b/.devcontainer/scripts/usr/local/bin/fix-chsh.sh @@ -0,0 +1,18 @@ +#!/bin/bash +#------------------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://github.com/devcontainers/features/blob/main/LICENSE for license information. +#------------------------------------------------------------------------------------------------------------------------- +# +# Docs: https://github.com/devcontainers/features/tree/main/src/common-utils +# Maintainer: The Dev Container spec maintainers + +set -e + +# Fixing chsh always asking for a password on alpine linux +# ref: https://askubuntu.com/questions/812420/chsh-always-asking-a-password-and-get-pam-authentication-failure. +if [ ! -f "/etc/pam.d/chsh" ] || ! grep -Eq '^auth(.*)pam_rootok\.so$' /etc/pam.d/chsh; then + echo "auth sufficient pam_rootok.so" >> /etc/pam.d/chsh +elif [[ -n "$(awk '/^auth(.*)pam_rootok\.so$/ && !/^auth[[:blank:]]+sufficient[[:blank:]]+pam_rootok\.so$/' /etc/pam.d/chsh)" ]]; then + awk '/^auth(.*)pam_rootok\.so$/ { $2 = "sufficient" } { print }' /etc/pam.d/chsh > /tmp/chsh.tmp && mv /tmp/chsh.tmp /etc/pam.d/chsh +fi diff --git a/.devcontainer/scripts/usr/local/bin/onCreateCommand.sh b/.devcontainer/scripts/usr/local/bin/onCreateCommand.sh new file mode 100755 index 0000000000..22b2650489 --- /dev/null +++ b/.devcontainer/scripts/usr/local/bin/onCreateCommand.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash +# Copyright (c) 2023 b-data GmbH. +# Distributed under the terms of the MIT License. + +set -e + +if dpkg --compare-versions "${CABAL_VERSION%.*.*}" le-nl "3.8"; then + mkdir -p "$HOME/.cabal/bin"; +fi +mkdir -p "$HOME/.local/bin" + +# Copy Zsh-related files and folders from the untouched home directory +if [ "$(id -un)" == "root" ]; then + if [ ! -d /root/.oh-my-zsh ]; then + cp -R /home/*/.oh-my-zsh /root; + fi + if [ ! -f /root/.zshrc ]; then + cp /home/*/.zshrc /root; + fi +else + if [ ! -d "$HOME/.oh-my-zsh" ]; then + sudo cp -R /root/.oh-my-zsh "$HOME"; + sudo chown -R "$(id -u)":"$(id -g)" "$HOME/.oh-my-zsh"; + fi + if [ ! -f "$HOME/.zshrc" ]; then + sudo cp /root/.zshrc "$HOME"; + sudo chown "$(id -u)":"$(id -g)" "$HOME/.zshrc"; + fi +fi + +# If existent, prepend the user's private bin to PATH +if ! grep -q "user's private bin" "$HOME/.bashrc"; then + cat "/var/tmp/snippets/rc.sh" >> "$HOME/.bashrc" +fi +if ! grep -q "user's private bin" "$HOME/.zshrc"; then + cat "/var/tmp/snippets/rc.sh" >> "$HOME/.zshrc" +fi + +# Set PATH so it includes cabal's bin if it exists +if ! grep -q "cabal's bin" "$HOME/.bashrc"; then + cat "/var/tmp/snippets/rc2.sh" >> "$HOME/.bashrc" +fi +if ! grep -q "cabal's bin" "$HOME/.zshrc"; then + cat "/var/tmp/snippets/rc2.sh" >> "$HOME/.zshrc" +fi + +# Enable Oh My Zsh plugins +sed -i "s/plugins=(git)/plugins=(cabal git pip screen stack tmux vscode)/g" \ + "$HOME/.zshrc" + +# Remove old .zcompdump files +rm -f "$HOME"/.zcompdump* diff --git a/.devcontainer/scripts/usr/local/bin/postCreateCommand.sh b/.devcontainer/scripts/usr/local/bin/postCreateCommand.sh new file mode 100755 index 0000000000..aa15183142 --- /dev/null +++ b/.devcontainer/scripts/usr/local/bin/postCreateCommand.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +# Copyright (c) 2023 b-data GmbH. +# Distributed under the terms of the MIT License. + +set -e + +# Change ownership of the stack folder +sudo chown "$(id -u)":"$(id -g)" /workspaces/stack + +# Updates list of known packages +cabal update diff --git a/.devcontainer/vsix/var/tmp/extensions/eamodio.gitlens-11.7.0.vsix b/.devcontainer/vsix/var/tmp/extensions/eamodio.gitlens-11.7.0.vsix new file mode 100644 index 0000000000..984e0691b5 Binary files /dev/null and b/.devcontainer/vsix/var/tmp/extensions/eamodio.gitlens-11.7.0.vsix differ diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..0b9805e450 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +/stack.cabal linguist-generated=true diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index ea47429d84..daf617389d 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -3,10 +3,11 @@ name: Bug Report about: Report a bug in Stack --- -Please follow the steps below for reporting a bug: +Please follow the steps below for reporting a bug in Stack: -Make sure that you are using the latest release (currently stack-2.5.1). -See the [upgrade instructions](http://docs.haskellstack.org/en/stable/install_and_upgrade/#upgrade) to upgrade. +Make sure that you are using the latest release (currently Stack 3.9.3). See the +[upgrade instructions](http://docs.haskellstack.org/en/stable/install_and_upgrade/#upgrade) +to upgrade. Please use the following schema for your bug report: @@ -18,10 +19,10 @@ For example: 1. Remove directory *blah*. 2. Run command `stack blah`. -3. Edit file blah. +3. Edit file *blah*. 4. Run command `stack blah`. -Include any `.yaml` configuration if relevant. +Include any `.yaml` configuration, if relevant. ### Expected @@ -31,24 +32,30 @@ What you expected to see and happen. What actually happened. -If you suspect that a stack command misbehaved, please include the output of that command in `--verbose` mode. -If the output is larger than a page please paste the output in a [Gist](https://gist.github.com/). +If you suspect that a Stack command misbehaved, please include the output of +that command in `--verbose` mode. If the output is larger than a page please +paste the output in a [Gist](https://gist.github.com/). -``` -$ stack --verbose +~~~text +stack --verbose -``` +~~~ ### Stack version -``` -$ stack --version -Version 1.9.1, Git revision f9d0042c141660e1d38f797e1d426be4a99b2a3c (6168 commits) x86_64 hpack-0.31.0 -``` +~~~text +stack --version +Version 3.9.3, Git revision c7eb8487a82d5c3e0b88d56f8b8a98be23223eb5 x86_64 hpack-0.39.1 +~~~ ### Method of installation -* Official binary, downloaded from stackage.org or fpcomplete's package repository -* Via cabal-install +* Official binary, downloaded via haskellstack.org or from Stack's repository +* Via GHCup +* Via Cabal (the tool) * An unofficial package repository (please specify which) * Other (please specify) + +### Platform + +Your platform (machine architecture and operating system) diff --git a/.github/ISSUE_TEMPLATE/docs_report.md b/.github/ISSUE_TEMPLATE/docs_report.md new file mode 100644 index 0000000000..73e16f0cc5 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/docs_report.md @@ -0,0 +1,21 @@ +--- +name: Documentation Report +about: Report if Stack's documentation appears inaccurate or incomplete +--- + +Please report problems with Stack's in-app help and error messages as a Stack +bug. Please follow the schema below for reporting if Stack's online +documentation appears inaccurate or incomplete: + +### Location of the documentation + +Please provide the link to the documentation in question. All headings have +permanent links associated with them. + +### Is it inaccurate? + +Why do you think the documentation is wrong? + +### Is it incomplete? + +What do you think is missing from the documentation? diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 32041f5401..699675f815 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -3,8 +3,9 @@ name: Feature Request about: Request a feature be added to Stack, or discuss such a feature --- -Make sure that you are using the latest release (currently stack-2.5.1). -See the [upgrade instructions](http://docs.haskellstack.org/en/stable/install_and_upgrade/#upgrade) to upgrade. +Make sure that you are using the latest release (currently Stack 3.9.3). See the +[upgrade instructions](http://docs.haskellstack.org/en/stable/install_and_upgrade/#upgrade) +to upgrade. -Please state as clearly as possible what feature you are recommending, -with motivation for the reason such a feature should be added. +Please state as clearly as you can what feature you are recommending, and the +motivation for adding the feature. diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md index 4ed1380a44..90e340d705 100644 --- a/.github/ISSUE_TEMPLATE/question.md +++ b/.github/ISSUE_TEMPLATE/question.md @@ -1,27 +1,36 @@ --- name: Question -about: Ask a question about Stack usage +about: Ask a question about how to use Stack --- -Do you have a question regarding stack's usage that isn't covered by [the docs](http://haskellstack.org)? -In that case please ask your question on [Stack Overflow](http://stackoverflow.com) and use [the haskell-stack tag](http://stackoverflow.com/questions/tagged/haskell-stack). -This way your question will be more easily discoverable by other people with the same question. +Do you have a question about how to use Stack that is not covered by the online +[documentation](http://haskellstack.org)? In that case please ask your question +at the [Haskell Community](https://discourse.haskell.org/) forum or on +[Stack Overflow](http://stackoverflow.com) (use the +[haskell-stack](http://stackoverflow.com/questions/tagged/haskell-stack) tag). +This way your question will be more easily discoverable by other people with the +same question. -Question related to stack project templates? Please report it at the [stack-templates](https://github.com/commercialhaskell/stack-templates) repository instead. +Is the question related to Stack project templates? If so, please report it at +the [stack-templates](https://github.com/commercialhaskell/stack-templates) +repository instead. -if you still want to ask the question here instead, please make sure that you are using the latest release (current stack-2.1.1). -See the [upgrade instructions](http://docs.haskellstack.org/en/stable/install_and_upgrade/#upgrade) to upgrade. +If you still want to ask the question here instead, please make sure that you +are using the latest release (currently Stack 3.9.3). See the +[upgrade instructions](http://docs.haskellstack.org/en/stable/install_and_upgrade/#upgrade) +to upgrade. ### Stack version -``` -$ stack --version -Version 1.9.1, Git revision f9d0042c141660e1d38f797e1d426be4a99b2a3c (6168 commits) x86_64 hpack-0.31.0 -``` +~~~text +stack --version +Version 3.9.3, Git revision c7eb8487a82d5c3e0b88d56f8b8a98be23223eb5 x86_64 hpack-0.39.1 +~~~ ### Method of installation -* Official binary, downloaded from stackage.org or fpcomplete's package repository -* Via cabal-install +* Official binary, downloaded via haskellstack.org or from Stack's repository +* Via GHCup +* Via Cabal (the tool) * An unofficial package repository (please specify which) * Other (please specify) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 5f2a40398a..a0613c29cf 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,8 +1,11 @@ -Note: Documentation fixes for https://docs.haskellstack.org/en/stable/ should target the "stable" branch, not master. +Note: Fixes for the online documentation of the current Stack release +(https://docs.haskellstack.org/en/stable/) should target the 'stable' branch, +not the 'master' branch. (The former branch is merged into the latter on +release, if not before.) -Please include the following checklist in your PR: +Please include the following checklist in your pull request: -* [ ] Any changes that could be relevant to users have been recorded in the ChangeLog.md -* [ ] The documentation has been updated, if necessary. +* [ ] Any changes that could be relevant to users have been recorded in ChangeLog.md. +* [ ] The documentation has been updated, if necessary Please also shortly describe how you tested your change. Bonus points for added tests! diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 861a098570..36880d2a9f 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -4,15 +4,29 @@ on: pull_request: push: branches: - - master - - stable - - rc/** + - master + - stable + - rc/** tags: - - '**' - schedule: - - cron: "0 0 * * *" + - '**' workflow_dispatch: +# Stack will use the value of the GH_TOKEN environment variable to authenticate +# its requests of the GitHub REST API, providing a higher request rate limit. +env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +# As of 2026-03-01: +# * ubuntu-latest comes with Stack 3.9.3 and GHC 9.14.1; and +# * windows-latest comes with Stack 3.9.1 and GHC 9.14.1. +# However, macos-15-intel and macos-latest do not come with Haskell tools. +# +# windows-latest no longer comes with NSIS 3.10, for which the default value of +# the 'Unicode' installer attribute is 'true'. However, that is not the +# 'large strings' build of NSIS and creates installers that corrupt the PATH +# environment variable if the default string length of 1024 characters is +# exceeded. + jobs: integration-tests: name: Integration tests @@ -23,149 +37,264 @@ jobs: include: - os: ubuntu-latest release-args: "--alpine" + cache-bust: "2025-11-05" + # On public preview since 16 January 2025 + - os: ubuntu-24.04-arm + # Stack's project-level configuration (stack.yaml) specifies the + # multi-architecture (including Linux/Aarch64) Docker image published + # by Oliver Benz (@benz0li, on GitHub). That image comes with + # Stack 3.9.3. (Note that the online documentation for + # '--docker-stack-exe image' specifies that the host Stack and image + # Stack must have the same version number.) + release-args: "--alpine --stack-args --docker-stack-exe=image" + cache-bust: "2025-11-05" - os: windows-latest release-args: "" + cache-bust: "2025-11-05" + # macos-15-intel will be the last that provides macOS/x86_64 + - os: macos-15-intel + release-args: "" + cache-bust: "2025-12-19" + # macos-latest provides macOS/AArch64 (M1) - os: macos-latest release-args: "" + cache-bust: "2024-11-05" steps: - - name: Clone project - uses: actions/checkout@v2 - - name: Cache dependencies - uses: actions/cache@v1 - with: - path: ~/.stack - key: ${{ runner.os }}-${{ hashFiles('stack.yaml') }} - - shell: bash - name: Install deps and run checks - run: | - set -ex - - # Work around 'git status' always showing symlinks modified on Windows; see - # https://github.com/git-for-windows/git/issues/2653#issuecomment-640234081 - git config --global core.fscache false - - stack upgrade || curl -sSL https://get.haskellstack.org/ | sh -s - -f - - if [[ "${{ matrix.os }}" == "ubuntu-latest" ]] - then - # Retry installing nix due to nondeterministic error - # Fatal error: glibc detected an invalid stdio handle - # See: - # https://github.com/nh2/static-haskell-nix/pull/27#issuecomment-502652181 - # https://github.com/NixOS/nix/issues/2733 - (for i in {1..5}; do bash <(curl -sSL https://nixos.org/nix/install) && exit 0; done; exit 1) - . ~/.nix-profile/etc/profile.d/nix.sh - nix-channel --add https://nixos.org/channels/nixos-19.09 nixpkgs - nix-channel --update # Get GHC 8.2.2 - elif [[ "${{ matrix.os }}" == "windows-latest" ]] - then - choco install nsis-unicode -y - fi + - name: Clone project + uses: actions/checkout@v6 + - name: Cache dependencies on Unix-like OS + if: startsWith(runner.os, 'Linux') || startsWith(runner.os, 'macOS') + uses: actions/cache@v5 + with: + path: ~/.stack + key: ${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('stack.yaml') }}-${{ matrix.cache-bust }} + - name: Cache dependencies on Windows + if: startsWith(runner.os, 'Windows') + uses: actions/cache@v5 + with: + path: | + ~\AppData\Roaming\stack + ~\AppData\Local\Programs\stack + key: ${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('stack.yaml') }}-${{ matrix.cache-bust }} + - name: Install NSIS 3.12 on Windows + if: startsWith(runner.os, 'Windows') + uses: repolevedavaj/install-nsis@v1.2.0 + with: + nsis-version: '3.12' + # Upgrades to a 'large strings' build of NSIS 3.10 tool. See + # https://nsis.sourceforge.io/Special_Builds. + - name: Upgrade NSIS 3.12 on Windows + if: startsWith(runner.os, 'Windows') + shell: bash + run: | + # wget is not available but the Stack-supplied MSYS2 will provide it + stack exec -- wget -O nsis-3.12-strlen_8192.zip https://downloads.sourceforge.net/nsis/NSIS%203/3.12/nsis-3.12-strlen_8192.zip + 7z x -aoa -o"/c/Program Files (x86)/NSIS" nsis-3.12-strlen_8192.zip + # Clean up + rm nsis-3.12-strlen_8192.zip + makensis -VERSION && echo + # Should include defined symbol NSIS_MAX_STRLEN=8192 + makensis -HDRINFO + - name: Install deps and run checks + shell: bash + run: | + set -ex + + if [[ "${{ matrix.os }}" == "ubuntu-24.04-arm" || "${{ matrix.os }}" == "macos-15-intel" || "${{ matrix.os }}" == "macos-latest" ]] + then + # As at 2026-03-01: + # + # * ubuntu-24.04-arm, macos-15-intel and macos-latest do not include + # Haskell tools. + curl -sSL https://get.haskellstack.org/ | sh + fi + + if [[ "${{ matrix.os }}" == "windows-latest" ]] + then + # As at 2026-03-01: + # + # * windows-latest does not include Stack 3.9.3. + stack upgrade + fi + + if [[ "${{ matrix.os }}" == "ubuntu-latest" ]] + then + # Install a faster linker (lld) than Ubuntu's default. + # stack-integration-test will seek to use it as the linker on Linux. + sudo apt-get update + sudo apt-get install -y lld + # Set up Nix for Stack's tests that require it. + # + # Install Nix via the single-user installation... + # + # Retry installing Nix due to nondeterministic error: + # Fatal error: glibc detected an invalid stdio handle + # See: + # https://github.com/nh2/static-haskell-nix/pull/27#issuecomment-502652181 + # https://github.com/NixOS/nix/issues/2733 + (for i in {1..5}; do bash <(curl -sSL https://nixos.org/nix/install) --no-daemon && exit 0; done; exit 1) + # Enter the Nix environment... + . ~/.nix-profile/etc/profile.d/nix.sh + # Add a channel named 'nixpkgs' to the list of subscribed channels... + nix-channel --add https://nixos.org/channels/nixos-25.11 nixpkgs + # Download the Nix expressions for all subscribed channels... + # + # As at 2026-03-01, nixos-25.11 provides GHC 9.10.2. + nix-channel --update + # The NIX_PATH environment variable sets a list of directories used to + # look up the location of Nix expressions using paths enclosed in + # angle brackets (e.g. ). nix.sh no longer sets the NIX_PATH. + # If NIX_PATH is not set, Nix will fall back to + # $HOME/.nix-defexpr/channels, but only in impure and unrestricted + # evaluation mode. See https://github.com/NixOS/nixpkgs/issues/149791. + # Set NIX_PATH... + export NIX_PATH=${NIX_PATH:+$NIX_PATH:}$HOME/.nix-defexpr/channels + fi + + if [[ "${{ matrix.release-args }}" == "--alpine" ]] + then + mkdir -p ~/.stack + touch ~/.stack/config.yaml + cat > ~/.stack/config.yaml <"$asset.sha256" - gpg --digest-algo=sha512 --detach-sig --armor -u 0x575159689BEFB442 "$asset" - done - - name: Set Github ref variables - id: github_ref_vars - run: | - echo ::set-output name=SOURCE_TAG::${GITHUB_REF#refs/tags/} - - name: Create Github release (final) - if: "!startsWith(github.ref, 'refs/tags/rc/')" - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: ${{ github.ref }} - body: | - See https://haskellstack.org/ for installation and upgrade instructions. - - **Changes since v[INSERT PREVIOUS VERSION]:** - - [INSERT CHANGELOG] - - **Thanks to all our contributors for this release:** - - [INSERT CONTRIBUTORS] - draft: true - prerelease: false - - name: Create Github release (release candidate) - if: "startsWith(github.ref, 'refs/tags/rc/')" - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: ${{ github.ref }} - body: | - [APPEND ` (release candidate)` TO RELEASE NAME] - **Changes since v[INSERT PREVIOUS VERSION]:** - - [INSERT CHANGELOG] - draft: true - prerelease: true - - name: Upload assets to Github release (final) - if: "!startsWith(github.ref, 'refs/tags/rc/')" - uses: xresloader/upload-to-github-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - file: "_release/*" - tag_name: ${{ steps.github_ref_vars.outputs.SOURCE_TAG }} - draft: true - prerelease: false - overwrite: true - - name: Upload assets to Github release (release candidate) - if: "startsWith(github.ref, 'refs/tags/rc/')" - uses: xresloader/upload-to-github-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - file: "_release/*" - tag_name: ${{ steps.github_ref_vars.outputs.SOURCE_TAG }} - draft: true - prerelease: true - overwrite: true + - name: Download Linux/x86_64 artifact + uses: actions/download-artifact@v7 + with: + name: Linux-X64 + path: _release + - name: Download macOS/x86_64 artifact + uses: actions/download-artifact@v7 + with: + name: macOS-X64 + path: _release + - name: Download macOS/AArch64 artifact + uses: actions/download-artifact@v7 + with: + name: macOS-ARM64 + path: _release + - name: Download Windows/x86_64 artifact + uses: actions/download-artifact@v7 + with: + name: Windows-X64 + path: _release + - name: Download Linux/AArch64 artifact + uses: actions/download-artifact@v7 + with: + name: Linux-ARM64 + path: _release + - name: Hash and sign assets + shell: bash + env: + RELEASE_SIGNING_KEY: ${{ secrets.RELEASE_SIGNING_KEY }} + run: | + set -e + echo "$RELEASE_SIGNING_KEY"|gpg --import + cd _release + for asset in *; do + shasum -a 256 "$asset" >"$asset.sha256" + gpg --digest-algo=sha512 --detach-sig --armor -u 0x575159689BEFB442 "$asset" + done + - name: Create GitHub release (final) + id: github_release_final + if: "!startsWith(github.ref, 'refs/tags/rc/')" + uses: ncipollo/release-action@v1.20.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + body: | + See https://haskellstack.org/ for installation and upgrade instructions. + + **Changes since v[INSERT PREVIOUS VERSION]:** + + [INSERT CHANGELOG] + + **Thanks to all our contributors for this release:** + + [INSERT CONTRIBUTORS] + draft: true + prerelease: false + - name: Create GitHub release (release candidate) + id: github_release_rc + if: "startsWith(github.ref, 'refs/tags/rc/')" + uses: ncipollo/release-action@v1.20.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + body: | + **Changes since v[INSERT PREVIOUS VERSION]:** + + [INSERT CHANGELOG] + draft: true + prerelease: true + - name: Upload assets to GitHub release (final) + if: "!startsWith(github.ref, 'refs/tags/rc/')" + uses: xresloader/upload-to-github-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + file: "_release/*" + draft: true + prerelease: false + overwrite: true + release_id: ${{ steps.github_release_final.outputs.id }} + - name: Upload assets to GitHub release (release candidate) + if: "startsWith(github.ref, 'refs/tags/rc/')" + uses: xresloader/upload-to-github-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + file: "_release/*" + draft: true + prerelease: true + overwrite: true + release_id: ${{ steps.github_release_rc.outputs.id }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000000..2e4ef930b8 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,32 @@ +name: Linting + +on: + pull_request: + push: + branches: + - master + - stable + - rc/** + +jobs: + style: + name: Linting + runs-on: ubuntu-latest + steps: + - name: Clone project + uses: actions/checkout@v6 + - name: Apply yamllint + uses: ibiqlik/action-yamllint@v3 + with: + format: github + - name: Set up HLint + uses: haskell-actions/hlint-setup@v2 + with: + version: "3.10" + - name: Apply HLint + run: | + set -ex + hlint app/ + hlint src/ + hlint --hint=.hlint-test.yaml tests/unit + hlint --hint=.hlint-test.yaml tests/integration diff --git a/.github/workflows/stan.yml b/.github/workflows/stan.yml new file mode 100644 index 0000000000..1a81dfecc9 --- /dev/null +++ b/.github/workflows/stan.yml @@ -0,0 +1,39 @@ +name: Apply stan + +on: + pull_request: + workflow_dispatch: + +jobs: + build: + name: Apply stan + runs-on: ubuntu-latest + + steps: + - name: Clone project + uses: actions/checkout@v6 + + - name: Cache dependencies + uses: actions/cache@v5 + with: + path: ~/.stack + key: ${{ runner.os }}-${{ hashFiles('stack.yaml') }} + + - name: Install stan + run: | + git clone https://github.com/kowainik/stan.git + cd stan + stack --local-bin-path ../.bin install + cd .. + + - name: Generate .hie for analysis + run: stack build stack:lib + + - name: Run stan + run: .bin/stan report --cabal-file-path=stack.cabal + + - name: Upload HTML report + uses: actions/upload-artifact@v6 + with: + name: Stan_report + path: stan.html diff --git a/.github/workflows/tmate.yml b/.github/workflows/tmate.yml new file mode 100644 index 0000000000..f5d32c289d --- /dev/null +++ b/.github/workflows/tmate.yml @@ -0,0 +1,30 @@ +# See https://github.com/commercialhaskell/stack/issues/6252#issuecomment-1732106343 +# See https://github.com/mxschmitt/action-tmate + +name: tmate +on: + workflow_dispatch: + inputs: + debug_enabled: + type: boolean + description: 'Run with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)' + required: false + default: false + +jobs: + debug: + runs-on: ubuntu-latest + steps: + - name: Sign assets + shell: bash + env: + RELEASE_SIGNING_KEY: ${{ secrets.RELEASE_SIGNING_KEY }} + run: | + echo "$RELEASE_SIGNING_KEY"|gpg --import + # Enable tmate debugging of manually-triggered workflows if the input + # option was provided + - name: Setup tmate session + uses: mxschmitt/action-tmate@v3 + if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }} + with: + limit-access-to-actor: true diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index a32c377b6b..fd955780a5 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -4,41 +4,36 @@ on: pull_request: push: branches: - - master - - stable - - rc/** + - master + - stable + - rc/** workflow_dispatch: +# As of 2026-03-01: +# * ubuntu-latest comes with Stack 3.9.3 and GHC 9.14.1; and +# * windows-latest comes with Stack 3.9.1 and GHC 9.14.1. +# However, macos-15-intel and macos-latest do not come with Haskell tools. + jobs: - style: - name: Style - runs-on: ubuntu-latest - steps: - - name: Clone project - uses: actions/checkout@v2 - - name: hlint - run: | - set -ex - ./etc/scripts/get-hlint.sh - export PATH="$(pwd)"/hlint:$PATH - hlint src/ - hlint src/ --cpp-define=WINDOWS=1 - hlint test/ --cpp-simple pedantic: name: Pedantic runs-on: ubuntu-latest steps: - - name: Clone project - uses: actions/checkout@v2 - - name: Cache dependencies - uses: actions/cache@v1 - with: - path: ~/.stack - key: ${{ runner.os }}-${{ hashFiles('stack.yaml') }} - restore-keys: | - ${{ runner.os }}- - - name: Pedantic build - run: stack --system-ghc build --pedantic + - name: Clone project + uses: actions/checkout@v6 + - name: Cache dependencies + uses: actions/cache@v5 + with: + path: ~/.stack + key: ${{ runner.os }}-${{ hashFiles('stack.yaml') }} + restore-keys: | + ${{ runner.os }}- + - name: Pedantic build + run: | + stack build --pedantic + - name: disable-git-info build + run: | + stack build --pedantic --flag stack:disable-git-info unit-tests: name: Unit tests runs-on: ${{ matrix.os }} @@ -47,60 +42,84 @@ jobs: matrix: include: - os: ubuntu-latest - stack-yaml: stack-ghc-84.yaml - extra-suffix: "" - stack-args: "" - - os: ubuntu-latest - stack-yaml: stack-ghc-86.yaml - extra-suffix: "" - stack-args: "" - - os: ubuntu-latest - stack-yaml: stack-ghc-88.yaml extra-suffix: "" stack-args: "" - os: ubuntu-latest - stack-yaml: stack-ghc-810.yaml - extra-suffix: "" - stack-args: "" - - os: ubuntu-latest - stack-yaml: stack.yaml extra-suffix: "alpine" stack-args: "--docker --system-ghc --no-install-ghc --flag stack:static" - os: windows-latest - stack-yaml: stack.yaml extra-suffix: "" stack-args: "" - os: macos-latest - stack-yaml: stack.yaml extra-suffix: "" stack-args: "" steps: - - name: Clone project - uses: actions/checkout@v2 - - name: Cache dependencies - uses: actions/cache@v1 - with: - path: ~/.stack - key: ${{ runner.os }}-${{ hashFiles('${{ matrix.stack-yaml }}') }}-${{ matrix.extra-suffix }} - - shell: bash - run: | - set -ex - stack upgrade || curl -sSL https://get.haskellstack.org/ | sh -s - -f - stack test ${{ matrix.stack-args }} --haddock --no-haddock-deps --ghc-options="-Werror" --copy-bins --local-bin-path bin + - name: Clone project + uses: actions/checkout@v6 + - name: Cache dependencies on Unix-like OS + if: startsWith(runner.os, 'Linux') || startsWith(runner.os, 'macOS') + uses: actions/cache@v5 + with: + path: ~/.stack + key: ${{ runner.os }}-${{ hashFiles('stack.yaml') }}-${{ matrix.extra-suffix }} + - name: Cache dependencies on Windows + if: startsWith(runner.os, 'Windows') + uses: actions/cache@v5 + with: + path: | + ~\AppData\Roaming\stack + ~\AppData\Local\Programs\stack + key: ${{ runner.os }}-${{ hashFiles('stack.yaml') }}-${{ matrix.extra-suffix }} + + - name: Run tests + shell: bash + run: | + set -ex + + if [[ "${{ matrix.os }}" == "macos-latest" ]] + then + # As at 2026-03-01: + # + # * macos-latest does not include Haskell tools. + curl -sSL https://get.haskellstack.org/ | sh + fi + + if [[ "${{ matrix.os }}" == "windows-latest" ]] + then + # As at 2026-03-01: + # + # * windows-latest does not include Stack 3.9.3. + stack upgrade + fi + + if [[ "${{ matrix.extra-suffix }}" == "alpine" ]] + then + mkdir -p ~/.stack + touch ~/.stack/config.yaml + cat > ~/.stack/config.yaml <\build\stack\autogen\Paths_stack.hs +[[check]] + id = "STAN-0004" + directory = ".stack-work\\dist" + type = "Exclude" + +# Partial: base/last +# On Unix-like operating systems +# To exclude .stack-work/dist//build/stack/autogen/Paths_stack.hs +[[check]] + id = "STAN-0004" + directory = ".stack-work/dist" + type = "Exclude" + +# Partial: ghc-internal/succ +# Usage of partial function 'succ' for enumerable types +# Stack uses Enum in many places +[[check]] + id = "STAN-0010" + scope = "all" + type = "Exclude" + +# Partial: ghc-internal/pred +# Usage of partial function 'pred' for enumerable types +# Stack uses Enum in many places +[[check]] + id = "STAN-0011" + scope = "all" + type = "Exclude" + +# Partial: ghc-internal/toEnum +# Usage of partial function 'toEnum' for enumerable types +# Stack uses Enum in many places +[[check]] + id = "STAN-0012" + scope = "all" + type = "Exclude" + +# Infinite: base/reverse +# Usage of the 'reverse' function that hangs on infinite lists +# Stack uses Data.List.reverse in many places +[[check]] + id = "STAN-0101" + scope = "all" + type = "Exclude" + +# Infinite: base/isSuffixOf +# Usage of the 'isSuffixOf' function that hangs on infinite lists +[[ignore]] + id = "OBS-STAN-0102-luLR/n-540:30" +# ✦ Category: #Infinite #List +# ✦ File: src\Stack\New.hs +# +# 539 ┃ +# 540 ┃ let isPkgSpec f = ".cabal" `L.isSuffixOf` f || "package.yaml" `L.isSuffixOf` f +# 541 ┃ ^^^^^^^^^^^^^^ + +# Infinite: base/isSuffixOf +# Usage of the 'isSuffixOf' function that hangs on infinite lists +[[ignore]] + id = "OBS-STAN-0102-luLR/n-540:65" +# ✦ Category: #Infinite #List +# ✦ File: src\Stack\New.hs +# +# 539 ┃ +# 540 ┃ let isPkgSpec f = ".cabal" `L.isSuffixOf` f || "package.yaml" `L.isSuffixOf` f +# 541 ┃ ^^^^^^^^^^^^^^ + +# Infinite: ghc-internal/isSuffixOf +# Usage of the 'isSuffixOf' function that hangs on infinite lists +[[ignore]] + id = "OBS-STAN-0102-8cspI6-404:41" +# ✦ Category: #Infinite #List +# ✦ File: src\Stack\Coverage.hs +# +# 403 ┃ +# 404 ┃ pure (filter ((".tix" `L.isSuffixOf`) . toFilePath) files) +# 405 ┃ ^^^^^^^^^^^^^^ + +# Infinite: ghc-internal/isSuffixOf +# Usage of the 'isSuffixOf' function that hangs on infinite lists +[[ignore]] + id = "OBS-STAN-0102-8cspI6-437:31" +# ✦ Category: #Infinite #List +# ✦ File: src\Stack\Coverage.hs +# +# 436 ┃ +# 437 ┃ pure (filter ((".tix" `L.isSuffixOf`) . toFilePath) files) +# 438 ┃ ^^^^^^^^^^^^^^ + +# Infinite: ghc-internal/isSuffixOf +# Usage of the 'isSuffixOf' function that hangs on infinite lists +[[ignore]] + id = "OBS-STAN-0102-8cspI6-668:30" +# ✦ Category: #Infinite #List +# ✦ File: src\Stack\Coverage.hs +# +# 667 ┃ +# 668 ┃ pure (filter ((".tix" `L.isSuffixOf`) . toFilePath) files) +# 669 ┃ ^^^^^^^^^^^^^^ + +# Infinite: ghc-internal/isSuffixOf +# Usage of the 'isSuffixOf' function that hangs on infinite lists +[[ignore]] + id = "OBS-STAN-0102-hTeu0Y-664:23" +# ✦ Category: #Infinite #List +# ✦ File: src\Stack\Init.hs +# +# 663 ┃ +# 664 ┃ isCabal = (".cabal" `isSuffixOf`) . toFilePath +# 665 ┃ ^^^^^^^^^^^^ + +# Infinite: base/length +# Usage of the 'length' function that hangs on infinite lists +# Stack uses Data.List.length in many places +[[check]] + id = "STAN-0103" + scope = "all" + type = "Exclude" + +# Anti-pattern: Data.ByteString.Char8.pack +[[ignore]] + id = "OBS-STAN-0203-erw24B-1079:3" +# ✦ Description: Usage of 'pack' function that doesn't handle Unicode characters +# ✦ Category: #AntiPattern +# ✦ File: src\Stack\Build\ExecuteEnv.hs +# +# 1078 ┃ +# 1079 ┃ S8.pack . formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%S%6Q" +# 1080 ┃ ^^^^^^^ + +# Anti-pattern: Data.ByteString.Char8.pack +[[ignore]] + id = "OBS-STAN-0203-tuE+RG-252:24" +# ✦ Description: Usage of 'pack' function that doesn't handle Unicode characters +# ✦ Category: #AntiPattern +# ✦ File: src\Stack\Build\ExecutePackage.hs +# +# 249 ┃ +# 250 ┃ newConfigFileRoot <- S8.pack . toFilePath <$> view configFileRootL +# 251 ┃ ^^^^^^^ + +# Anti-pattern: Data.ByteString.Char8.pack +[[ignore]] + id = "OBS-STAN-0203-hTeu0Y-395:17" +# ✦ Description: Usage of 'pack' function that doesn't handle Unicode characters +# ✦ Category: #AntiPattern +# ✦ File: src\Stack\Init.hs +# +# 394 ┃ +# 395 ┃ commentHelp = BC.pack . intercalate "\n" . map commentLine +# 396 ┃ ^^^^^^^ + +# Anti-pattern: Data.ByteString.Char8.pack +[[ignore]] + id = "OBS-STAN-0203-hTeu0Y-412:26" +# ✦ Description: Usage of 'pack' function that doesn't handle Unicode characters +# ✦ Category: #AntiPattern +# ✦ File: src\Stack\Init.hs +# +# 411 ┃ +# 412 ┃ <> B.byteString (BC.pack $ concat +# 413 ┃ ^^^^^^^ + +# Anti-pattern: Data.ByteString.Char8.pack +[[ignore]] + id = "OBS-STAN-0203-axv1UG-351:30" +# ✦ Description: Usage of 'pack' function that doesn't handle Unicode characters +# ✦ Category: #AntiPattern +# ✦ File: src\Stack\Docker.hs +# +# 350 ┃ +# 351 ┃ hashRepoName = Hash.hash . BS.pack . takeWhile (\c -> c /= ':' && c /= '@') +# 352 ┃ ^^^^^^^ + +# Data types with non-strict fields +# Defining lazy fields in data types can lead to unexpected space leaks +# Stack uses lazy fields in many places +[[check]] + id = "STAN-0206" + scope = "all" + type = "Exclude" + +# Anti-pattern: Slow 'length' for Text +# On Windows +[[check]] + id = "STAN-0208" + file = "src\\Stack\\Build\\ConstructPlan.hs" + type = "Exclude" + +# Anti-pattern: Slow 'length' for Text +# On Unix-like operating systems +[[check]] + id = "STAN-0208" + file = "src/Stack/Build/ConstructPlan.hs" + type = "Exclude" + +# Anti-pattern: Slow 'length' for Text +# On Windows +[[check]] + id = "STAN-0208" + file = "src\\Stack\\Build\\Execute.hs" + type = "Exclude" + +# Anti-pattern: Slow 'length' for Text +# On Unix-like operating systems +[[check]] + id = "STAN-0208" + file = "src/Stack/Build/Execute.hs" + type = "Exclude" + +# Anti-pattern: Slow 'length' for Text +# On Windows +[[check]] + id = "STAN-0208" + file = "src\\Stack\\PackageDump.hs" + type = "Exclude" + +# Anti-pattern: Slow 'length' for Text +# On Unix-like operating systems +[[check]] + id = "STAN-0208" + file = "src/Stack/PackageDump.hs" + type = "Exclude" + +# Anti-pattern: unsafe functions +[[ignore]] + id = "OBS-STAN-0212-5rtOmw-499:33" +# ✦ Description: Usage of unsafe functions breaks referential transparency +# ✦ Category: #Unsafe #AntiPattern +# ✦ File: src\Stack\Constants.hs +# +# 498 ┃ +# 499 ┃ setupGhciShimCode = byteString $(do +# 500 ┃ path <- makeRelativeToProject "src/setup-shim/StackSetupShim.hs" +# 501 ┃ embedFile path) +# 502 ┃ + +# Anti-pattern: unsafe functions +[[ignore]] + id = "OBS-STAN-0212-FNS1cF-68:17" +# ✦ Description: Usage of unsafe functions breaks referential transparency +# ✦ Category: #Unsafe #AntiPattern +# ✦ File: src\Stack\BuildOpts.hs +# +# 67 ┃ +# 68 ┃ buildMonoid = undefined :: BuildOptsMonoid +# 69 ┃ ^^^^^^^^^ + +# Anti-pattern: unsafe functions +[[ignore]] + id = "OBS-STAN-0212-FNS1cF-81:14" +# ✦ Description: Usage of unsafe functions breaks referential transparency +# ✦ Category: #Unsafe #AntiPattern +# ✦ File: src\Stack\BuildOpts.hs +# +# 79 ┃ +# 80 ┃ toMonoid = undefined :: TestOptsMonoid +# 81 ┃ ^^^^^^^^^ + +# Anti-pattern: unsafe functions +[[ignore]] + id = "OBS-STAN-0212-FNS1cF-92:15" +# ✦ Description: Usage of unsafe functions breaks referential transparency +# ✦ Category: #Unsafe #AntiPattern +# ✦ File: src/Stack/BuildOpts.hs +# +# 90 ┃ +# 91 ┃ beoMonoid = undefined :: BenchmarkOptsMonoid +# 92 ┃ ^^^^^^^^^ + +# Anti-pattern: Pattern matching on '_' +# Pattern matching on '_' for sum types can create maintainability issues +# Stack uses pattern matching on '_' in many places. +[[check]] + id = "STAN-0213" + scope = "all" + type = "Exclude" + +# Big tuples +# Using tuples of big size (>= 4) can decrease code readability +# In serveral places Stack uses 4-tuples and in one place Stack uses a +# 5-tuple. +[[check]] + id = "STAN-0302" + scope = "all" + type = "Exclude" diff --git a/.yamllint.yaml b/.yamllint.yaml new file mode 100644 index 0000000000..7c4b3d092b --- /dev/null +++ b/.yamllint.yaml @@ -0,0 +1,18 @@ +# Configuration file for yamllint +extends: default +rules: + comments: + min-spaces-from-content: 1 + document-start: disable + indentation: + spaces: 2 + indent-sequences: false + line-length: disable + new-lines: + type: platform + truthy: + check-keys: false + +# .github/workflows/arm64-release.yml +# .github/workflows/integration-tests.yml +# .github/workflows/unit-tests.yml diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 46632811fa..bc4af1a287 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,5 +1,113 @@ # Contributors Guide +Thank you for considering contributing to the maintenance or development of +Stack, or otherwise supporting users of Stack! We hope that the following +information will encourage and assist you. We start with some advice about +Stack's goals and governance, and approach to supporting users. + +## Stack's goals + +Stack's current goals are: + +* To provide easy to use tooling for Haskell development +* To provide complete support for at least the following three development + environments: Linux, macOS, and Windows +* To address the needs of industrial users, open source maintainers, and other + people +* To focus on the 'curated package set' use case +* To prioritize reproducible build plans + +The goals above are not set in stone. However, any major changes to them +should involve significant public discussion and a public vote by the Stack +maintainer team. + +## Stack's governance + +People involved in maintaining or developing Stack with rights to make commits +to the repository can be classified into two groups: 'committers' and +'maintainers'. + +### Stack's committers + +We encourages a wide range of people to be granted rights to make commits to the +repository. + +People are encouraged to take initiative to make non-controversial +changes, such as documentation improvements, bug fixes, performance +improvements, and feature enhancements. + +Maintainers should be included in discussions of controversial changes and +tricky code changes. + +Our general approach is **"it is easier to ask forgiveness than permission"**. +If there is ever a bad change, it can always be rolled back. + +### Stack's maintainers + +Stack's maintainers are long-term contributors to the project. Michael Snoyman +(@snoyberg) was the founder of Stack, and its initial maintainer - and he has +added others. Michael's current interests and priorities mean that he is no +longer actively involved in adding new features to Stack. + +Maintainers are recognized for their contributions including: + +* Direct code contribution +* Review of pull requests +* Interactions on the GitHub issue tracker +* Documentation management +* External support - for example, hosting or training + +The maintainer team make certain decisions when that is necessary, specifically: + +* How to proceed, if there is disagreement on how to do so on a specific topic +* Whether to add or remove (see further below) a maintainer + +Generally, maintainers are only removed due to non-participation or actions +unhealthy to the project. Removal due to non-participation is not a punishment, +simply a recognition that maintainership is for active participants only. + +We hope that removal due to unhealthy actions will never be necessary, but would +include protection for cases of: + +* Disruptive behavior in public channels related to Stack +* Impairing the codebase through bad commits/merges + +Like committers, maintainers are broadly encouraged to make autonomous +decisions. Each maintainer is empowered to make a unilateral decision. However, +maintainers should favor getting consensus first if: + +* They are uncertain what is the best course of action +* They anticipate that other maintainers or users of Stack will disagree on the + decision + +## Stack's support + +A large part of the general discussion around Stack is on support-related +topics, and that is reflected in the current issue tracker content. Assistance +in responding to such matters is greatly appreciated. + +While support-related matters can be posted here as an 'issue', we encourage the +use of other forums, in particular the +[Haskell Community](https://discourse.haskell.org/) forum. See its 'Learn' +category. We also recommend that forum for general discussions about Stack's +current or desired features. + +Stack is also discussed: + +* in the Haskell + [Stack and Stackage](https://matrix.to/#/#haskell-stack:matrix.org) room + (address `#haskell-stack:matrix.org`) on [Matrix](https://matrix.org/); and + +* on Reddit's [Haskell community](https://www.reddit.com/r/haskell/). + +We encourage use of those other forums because support-related discussions can +clog up the issue tracker and make it more difficult to maintain the project. +People needing support may also get a faster and fuller response on other +forums. + +Additions to the issue tracker are better suited to concrete feature proposals, +bug reports, and other code base discussions (for example, refactorings). + ## Bug Reports Please [open an issue](https://github.com/commercialhaskell/stack/issues/new) @@ -12,35 +120,209 @@ you. Once you have tested and confirmed that the issue is resolved, close the issue. If you are not a member of the project, you will be asked for confirmation and we will close it. - ## Documentation -If you would like to help with documentation, please note that for most cases -the Wiki has been deprecated in favor of markdown files placed in a new `/doc` -subdirectory of the repository itself. Please submit a +Consistent with its goal of being easy to use, Stack aims to maintain a high +quality of in-tool and online documentation. + +The in-tool documentation includes the output when the `--help` flag is +specified and the content of Stack's warning and error messages. + +When drafting documentation it is helpful to have in mind the intended reader +and what they are assumed to know, and not know, already. In that regard, +documentation should aim to meet, at least, the needs of a person who is about +to begin to study computing as an undergraduate but who has not previously +coded using Haskell. That person may be familiar with one popular operating +system but may not be familiar with others. + +The files which make up Stack's online documentation are located in the `doc` +directory of the repository. They are formatted in the +[Markdown syntax](https://daringfireball.net/projects/markdown/), with some +extensions. + +Those files are rendered on [haskellstack.org](http://haskellstack.org) by +[Read the Docs](https://readthedocs.org/) using +[MkDocs](https://www.mkdocs.org/) and the +[Material for MkDocs](https://squidfunk.github.io/mkdocs-material/) theme. The +`stable` branch of the repository provides the 'stable' version of the online +documentation. The `master` branch provides the 'latest' version of the +documentation. + +The 'stable' version of the online documentation is intended to be applicable to +the latest released version of Stack. If you would like to help with that +documentation, please submit a [pull request](https://help.github.com/articles/using-pull-requests/) with your -changes/additions based off the [the stable branch](https://github.com/commercialhaskell/stack/tree/stable). +changes/additions based off the +[stable branch](https://github.com/commercialhaskell/stack/tree/stable). + +The Markdown files are organised into the navigation menu (the table of +contents) in the file `mkdocs.yml`, the configuration file for MkDocs. The +description of a file in the menu can differ from the file's name. The +navigation menu allows files to be organised in a hierarchy. Currently, up to +three levels are used. The top level is: + +* **Welcome!:** The introduction to Stack. This page aims to be no longer than + necessary but also to not assume much existing knowledge on the part of the + reader. It provides a 'quick start' guide to getting and using Stack. +* **How to get & use Stack:** This includes Stack's user's guide, answers to + frequently asked questions, and more thorough explanations of aspects of + Stack. The user's guide is divided into two parts. The first part is + 'introductory', and has the style of a tutorial. The second part is + 'advanced', and has more of a reference style. +* **How Stack works (advanced):** Many users will not need to consult this + advanced documentation. +* **Stack's code (advanced):** Other information useful to people contributing + to, or maintaining, Stack's code, documentation, and other files. +* **Signing key:** How Stack's released executables are signed. +* **Glossary:** A glossary of terms used throughout Stack's in-tool and online + documentation. We aim to describe the same things in the same way in different + places. +* **Version history:** The log of changes to Stack between versions. + +The specific versions of the online documentation (eg `v: v2.9.1`) are generated +from the content of files at the point in the repository's history specified by +the corresponding release tag. Consequently, that content is fixed once +released. + +If the names of Markdown files do not change between versions, then people can +use the flyout on the online documentation to move between different versions of +the same page. For that reason, the names of new Markdown files should be chosen +with care and existing Markdown files should not be deleted or renamed without +due consideration of the consequences. + +The Markdown syntax supported by MkDocs and the Material for MkDocs theme can +differ from the GitHub Flavored Markdown ([GFM](https://github.github.com/gfm/)) +supported for content on GitHub.com. Please refer to the +[MkDocs documentation](https://www.mkdocs.org/user-guide/writing-your-docs/#writing-with-markdown) +and the +[Material for MkDocs reference](https://squidfunk.github.io/mkdocs-material/reference/) +to ensure your pull request will achieve the desired rendering. + +The extensions to the basic Markdown syntax used are set out in `mkdocs.yml` and +include: + +* admonitions +* code blocks, with syntax highlighting provided by + [Pygments](https://pygments.org/) +* content tabs, which can be nested +* icons and emojis + +The files in the `doc` directory of the repository include two symbolic links +(symlinks), `ChangeLog.md` and `CONTRIBUTING.md`. Users of Git on Windows should +be aware of its approach to symbolic links. See the +[Git for Windows Wiki](https://github.com/git-for-windows/git/wiki/Symbolic-Links). +If `git config --show-scope --show-origin core.symlinks` is `false` in a local +repository on Windows, then the files will be checked out as small plain files +that contain the link text See the +[Git documentation](https://git-scm.com/docs/git-config#Documentation/git-config.txt-coresymlinks). + +The online documentation can be previewed using the `mkdocs` tool, as described +in [Getting Started with MkDocs](https://www.mkdocs.org/getting-started/). The +prerequisites are: + +* [Python](https://www.python.org/); and +* the required Python packages set out in `doc/requirements.txt`. They can be + installed using Python's package manager `pip` with: + + ~~~text + pip install --requirement doc/requirements.txt + ~~~ + +Once the required version of `mkdocs` is installed, command `mkdocs serve` in +the same directory as the `mkdocs.yml` file to start a web server. The command +will, eventually, output the URL at which the documentation is being served. + +Command `mkdocs build` to build the documentation. + +=== "Windows" + + With the correct prerequisites (see further below), users of the `make` tool + in the Stack-supplied MSYS2 environment can automate some of these steps + from Stack's project directory with: + + * preview: `stack exec -- make docs-serve`; and + * build: `stack exec -- make _site/index.html`. + + However, Windows and the Stack-supplied MSYS2 environment do not come with + Python or `make` by default. Further, Python on Windows does not use the + `python3` command (used on Unix-like operating systems) to invoke Python. + Further still, in the MSYS2 environment, development versions of packages + `libxml2` and `libxslt` are necessary dependencies. Consequently, the + automation requires the following command to install requirements into the + the MSYS2 environment: + + ~~~text + stack exec -- pacman --sync python make libxml2-devel libxslt-devel + ~~~ + + !!! note + + If the automation fails before the `mkdocs` tool etc is installed, the + directory `.python-doc-virtualenv` created by the automation will need + to be deleted before the automation will work again. + + For most users, the automation will be less convenient than simply using the + `mkdocs serve` command directly. + +=== "Unix-like" + + With `python3` and `make` available on the PATH, users of the `make` tool + can automate some of these steps from Stack's project directory with: + + * preview: `make docs-serve`; and + * build: `make _site/index.html`. + +## Error messages + +Stack catches exceptions thrown by its dependencies or by Stack itself in +`Stack.main`. In addition to exceptions that halt Stack's execution, Stack logs +certain other matters as 'errors'. + +To support the Haskell Foundation's +[Haskell Error Index](https://errors.haskell.org/) initiative, all Stack +error messages generated by Stack itself should have a unique initial line: + +~~~text +Error: [S-nnnn] +~~~ + +where `nnnn` is a four-digit number in the range 1000 to 9999. + +If you create a new Stack error, select a number using a random number generator +(see, for example, [RANDOM.ORG](https://www.random.org/)) and check that number +is not already in use in Stack's code. If it is, pick another until the number +is unique. + +All exceptions generated by Stack itself are implemented using data constructors +of closed sum types. Typically, there is one such type for each module that +exports functions that throw exceptions. This type and the related `instance` +definitions are usually located at the top of the relevant module. -The documentation is rendered on [haskellstack.org](http://haskellstack.org) by -readthedocs.org using Sphinx and CommonMark. Since links and formatting vary -from GFM, please check the documentation there before submitting a PR to fix -those. +Stack supports two types of exceptions: 'pretty' exceptions that are instances +of class `RIO.PrettyPrint.Pretty`, which provides `pretty :: e -> StyleDoc`, and +thrown as expressions of type `RIO.PrettyPrint.PrettyException.PrettyException`; +and other 'plain' exceptions that are simply instances of class +`Control.Exception.Exception` and, hence, instances of class `Show`. These types +and classes are re-exported by `Stack.Prelude`. -If your changes move or rename files, or subsume Wiki content, please continue -to leave a file/page in the old location temporarily, in addition to the new -location. This will allow users time to update any shared links to the old -location. Please also update any links in other files, or on the Wiki, to point -to the new file location. +Stack throws exceptions in parts of the code that should, in principle, be +unreachable. The functions `Stack.Prelude.bugReport` and +`Stack.Prelude.bugPrettyReport` are used to give the messages a consistent +format. The names of the data constructors for those exceptions usually end in +`Bug`. +In a few cases, Stack may throw an exception in 'pure' code. The function +`RIO.impureThrow :: Exception e => e -> a`, re-exported by `Stack.Prelude`, is +used for that purpose. ## Code If you would like to contribute code to fix a bug, add a new feature, or -otherwise improve `stack`, pull requests are most welcome. It's a good idea to +otherwise improve `stack`, pull requests are most welcome. It is a good idea to [submit an issue](https://github.com/commercialhaskell/stack/issues/new) to discuss the change before plowing into writing code. -If you'd like to help out but aren't sure what to work on, look for issues with +If you'd like to help out but are not sure what to work on, look for issues with the [awaiting pull request](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22) label. Issues that are suitable for newcomers to the codebase have the @@ -48,156 +330,544 @@ label. Issues that are suitable for newcomers to the codebase have the label. Best to post a comment to the issue before you start work, in case anyone has already started. -Please include a -[ChangeLog](https://github.com/commercialhaskell/stack/blob/master/ChangeLog.md) -entry and -[documentation](https://github.com/commercialhaskell/stack/tree/master/doc/) -updates with your pull request. +Please include with your pull request: + +* a + [ChangeLog](https://github.com/commercialhaskell/stack/blob/master/ChangeLog.md) + entry; and + +* [documentation](https://github.com/commercialhaskell/stack/tree/master/doc/) + updates. + +## Backwards Compatibility + +The Stack package provides a library and an executable (`stack`) that depends on +the library. The library is intended for use only by the executable. + +Consequently, the Stack package does not need to, and does not, strive for the +compatibility with a range of versions of GHC that a library package (such as +`pantry`) would seek. + +Stack aims to depend on well-known packages. The specific versions on which it +depends at any time are specified by `package.yaml` and `stack.yaml`. It does +not aim to be compatible with more than one version of the `Cabal` package at +any time. At the time of writing (January 2026) the package versions are +primarily ones in Stackage snapshot LTS Haskell 24.37 (for GHC 9.10.3), the +latest version of `Cabal` released on Hackage (`Cabal-3.16.0.0`), +`pantry-0.11.2`, `persistent-2.18.0.0` and the latest version of packages in +the `tls` family (which reduce dependencies on unmaintained packages). + +A Stack executable makes use of Cabal (the library) through a small 'Setup' +executable that it compiles from Haskell source code. The executable compiles +that code with a dependency on the version of Cabal that ships with the +specified GHC compiler. Each release of Stack will normally aim to support all +versions of GHC and the Cabal package in Stackage LTS Haskell snapshots +published within seven years of the release. For example, snapshot LTS Haskell +13.0, published on 23 December 2018, was the first LTS Haskell snapshot to +provide GHC 8.6.3 which comes with `base-4.12.0.0` and `Cabal-2.4.0.1`. +Normally, until, at least, 23 December 2025, Stack releases would aim to support +the immediate predecessor, GHC 8.4.4 and `base-4.11.1.0`, `Cabal-2.2.0.1` and +Haddock 2.20.0. + +When a version of the Stack executable actually ceases to support a version of +GHC and `Cabal`, that should be recorded in Stack's +[ChangeLog](https://github.com/commercialhaskell/stack/blob/master/ChangeLog.md). ## Code Quality -The Stack projects uses [HLint](https://github.com/ndmitchell/hlint) as a code -quality tool. +The Stack project uses [yamllint](https://github.com/adrienverge/yamllint) as a +YAML file quality tool and [HLint](https://github.com/ndmitchell/hlint) as a +code quality tool. + +### Linting of YAML files + +The yamllint configuration extends the tools default and is set out in +`.yamllint.yaml`. In particular, indentation is set at 2 spaces and `- ` in +sequences is treated as part of the indentation. -Note that stack contributors need not dogmatically follow the suggested hints -but are encouraged to debate their usefulness. If you find a hint is not useful -and detracts from readability, consider marking it in the [configuration -file](https://github.com/commercialhaskell/stack/blob/master/.hlint.yaml) to -be ignored. Please refer to the [HLint manual](https://github.com/ndmitchell/hlint#readme) +### Linting of Haskell source code + +The HLint configurations is set out in `.hlint.yaml`. + +Stack contributors need not follow dogmatically the suggested HLint hints but +are encouraged to debate their usefulness. If you find a HLint hint is not +useful and detracts from readability of code, consider marking it in the +[configuration file](https://github.com/commercialhaskell/stack/blob/master/.hlint.yaml) +to be ignored. Please refer to the +[HLint manual](https://github.com/ndmitchell/hlint#readme) for configuration syntax. -Quoting [@mgsloan](https://github.com/commercialhaskell/stack/pulls?utf8=%E2%9C%93&q=is%3Apr%20author%3Amgsloan): +Quoting +[@mgsloan](https://github.com/commercialhaskell/stack/pulls?utf8=%E2%9C%93&q=is%3Apr%20author%3Amgsloan): > We are optimizing for code clarity, not code concision or what HLint thinks. -You can install HLint with stack. You might want to install it in the global +You can install HLint with Stack. You might want to install it in the global project in case you run into dependency conflicts. HLint can report hints in your favourite text editor. Refer to the HLint repository for more details. -To install: +To install, command: -``` +~~~text stack install hlint -``` +~~~ + +Once installed, you can check your changes with command: + +~~~text +stack exec -- sh ./etc/scripts/hlint.sh +~~~ + +## Code syntax + +Stack makes use of GHC's `GHC2024` collection of language extensions. That is +set using the `language` key in the `package.yaml` file. + +The `GHC2024` edition enables the `MonoLocalBinds` language extension. Enabling +the extension can change how GHC infers the types of local bindings (that is, +let-bound or where-bound variables). When enabled it means that the types of a +group of local bindings are not generalised unless: + +* any of its binders has a partial type signature; or + +* all of its free variables are closed. If a variable is closed then its type + definitely has no free type variables. A variable imported from another module + is closed. A variable that is let-bound and has an explicit type signature + with no free type variables is closed. + +Stack makes use of single-constructor types where the constructor has a large +number of fields. Some of those fields have similar types, and so on. Given +that, Stack makes use of `OverloadedRecordDot`, introduced in GHC 9.2.1. It also +makes use of `NoFieldSelectors`, also introduced in GHC 9.2.1, and, where +necessary, `DuplicateRecordFields`. Together, these language extensions enable +the removal from the names of fields of the prefixes that were used historically +to indicate the type and make field names unique. This is because the names of +fields no longer need to be unique in situations where the intended field is +unambiguous. This allows for a terser syntax without loss of expressiveness. +For example: + +~~~haskell +let cliTargets = (boptsCLITargets . bcoBuildOptsCLI) bco +~~~ + +can become: + +~~~haskell +let cliTargets = bco.buildOptsCLI.targets +~~~ + +The intended field is unambiguous in almost all cases. In the case of a few +record updates it is ambiguous. The name of the field needs to be qualified in +those cases. For example: + +~~~haskell +import qualified Stack.Types.Build as ConfigCache ( ConfigCache (..) ) +... +let ignoreComponents :: ConfigCache -> ConfigCache + ignoreComponents cc = cc { ConfigCache.components = Set.empty } +~~~ + +## Code Style + +A single code style is not applied consistently to Stack's code and Stack is not +Procrustean about matters of style. Rules of thumb, however, are: + +* keep pull requests that simply reformat code separate from those that make + other changes to code; and +* when making changes to code other than reformatting, follow the existing style + of the function(s) or module(s) in question. + +That said, the following may help: + +* Stack's code generally avoids the use of C preprocessor (CPP) directives. + Windows and non-Windows code is separated in separate source code directories + and distinguished in Stack's Cabal file. `Stack.Constants.osIsWindows :: Bool` + is provided. Multi-line strings are generally formatted on the assumption that + GHC's `CPP` language pragma is not being used. +* Language pragmas usually start with `NoImplictPrelude`, where applicable, and + then all others are listed alphabetically. The closing `#-}` are aligned, for + purely aesthetic reasons. +* Stack is compiled with GHC's `-Wall` enabled, which includes `-Wtabs` (no tabs + in source code). Most modules are based on two spaces (with one space for a + `where`) for indentation but older and larger modules are still based on four + spaces. +* Stack's code and documentation tends to be based on lines of no more than 80 + characters or, if longer, no longer than necessary. +* Stack uses export lists. +* Stack's imports are listed alphabetically, including `Stack.Prelude`, where + applicable. The module names are left aligned, with space left for `qualified` + where it is absent. +* Stack's code is sufficiently stable that explicit import lists can sensibly be + used. The exception is the import of `Stack.Prelude`. Not all modules have + comprehensive explicit import lists. +* Short explicit import lists follow the module name. Longer lists start on the + line below the module name. Spaces are used to separate listed items from + their enclosing parentheses. +* As noted above, the types used to implement Stack's exceptions and the related + `instance` definitions are usually located at the top of the relevant module. +* In function type signatures, the `::` is kept on the same line as the + function's name. This format is Haskell syntax highlighter-friendly. +* If `where` is used, the declarations follow on a separate line. -Once installed, you can check your changes with: +## Testing -``` -$ ./etc/scripts/hlint.sh -``` +The Stack code has both unit tests and integration tests. -## Testing +### Working with Unit Tests -The Stack code has both unit tests and integration tests. Integration tests can -be found in the [test/integration](https://github.com/commercialhaskell/stack/tree/master/test/integration) -folder and unit tests, in the [src/test](https://github.com/commercialhaskell/stack/tree/master/src/test) -folder. Tests are written using the [Hspec](https://hspec.github.io/) framework. In -order to run the full test suite, you can simply do: +Unit tests can be found in the +[tests/unit](https://github.com/commercialhaskell/stack/tree/master/tests/unit) +directory. Tests are written using the [Hspec](https://hspec.github.io/) +framework. In order to run the full test suite, you can simply command: -```bash -$ stack test -``` +~~~text +stack test +~~~ The `--file-watch` is a very useful option to get quick feedback. However, -running the entire test suite after each file change will slow you down. You'll -need to specify which test suite (unit test or integration) and pass arguments -to specify which module you'd specifically like to run to get quick feedback. A -description of this follows below. +running the entire test suite after each file change will slow you down. You +will need to specify which test suite (unit test or integration) and pass +arguments to specify which module you'd specifically like to run to get quick +feedback. A description of this follows below. -### Working with Unit Tests -If you would like to run the unit tests on their own, you can: +If you would like to run the unit tests on their own, you can command: -```bash -$ stack test stack:stack-test -``` +~~~text +stack test stack:stack-unit-test +~~~ -Running an individual module works like this: +Running an individual module works with a command like this: -```bash -$ stack test stack:stack-test --ta "-m " -``` +~~~text +stack test stack:stack-unit-test --ta "-m " +~~~ Where `` is the name of the module without `Spec.hs`. -You may also load tests into GHCi and run them with: +You may also load tests into GHCi and run them with these command: -```bash -$ stack ghci stack:stack-test --only-main +~~~text +stack ghci stack:stack-unit-test --only-main # GHCi starting up output ... > :main -m "" -``` +~~~ Where again, `` is the name of the module without `Spec.hs`. ### Working with Integration Tests -Running the integration tests is a little involved, you'll need to: +Integration tests can be found in the +[tests/integration](https://github.com/commercialhaskell/stack/tree/master/tests/integration) +folder. + +Running the integration tests is a little involved, you will need to command: -```bash -$ stack build --flag stack:integration-tests stack --exec stack-integration-test -``` +~~~text +stack build --flag stack:integration-tests stack --exec stack-integration-test +~~~ -Running an individual module works like this: +Running an individual module works with a command like this: -```bash -$ stack build --flag stack:integration-tests stack --exec "stack-integration-test -m " -``` +~~~text +stack build --flag stack:integration-tests stack --exec "stack-integration-test -m " +~~~ Where `` is the name of the folder listed in the [test/integration/tests/](https://github.com/commercialhaskell/stack/tree/master/test/integration/tests) -folder. +directory. -You may also achieve this through GHCi with: +You may also achieve this through GHCi with this command: -```bash -$ stack ghci stack:stack-integration-test +~~~text +stack ghci stack:stack-integration-test # GHCi starting up output ... > :main -m "" -``` +~~~ Where again, `` is the name of the folder listed in the [test/integration/tests/](https://github.com/commercialhaskell/stack/tree/master/test/integration/tests) -folder. +directory. + +You can disable a few integration tests through the -n option : + +~~~text +stack build --flag stack:integration-tests stack --exec "stack-integration-test -n -n " +~~~ + +to disable folders named after `` and ``. It is especially +useful when some tests are taking a while to complete. + +On Linux, the `stack-integration-test` executable uses the `lld` linker and +expects it to be on the PATH. The integration tests complete significantly +quicker with `lld` than with the `ld.bfd` linker. + +## Continuous integration (CI) + +We use [GitHub Actions](https://docs.github.com/en/actions) to do CI on Stack. +The configuration of the workflows is in the YAML files in `.github/workflows`. +The current active workflows are: + +### Linting - `lint.yml` -## CI Build rules +This workflow will run if: -We use [Azure](https://dev.azure.com/commercialhaskell/stack/_build) -to do CI builds on Stack. There are two types of build which happens -there: +* there is a pull request +* commits are pushed to these branches: `master`, `stable` and `rc/**` -### Test suite build +The workflow has one job (`style`). It runs on `ubuntu` only and applies +yamllint and Hlint. -This builds the code with `--pedantic`, performs hlint checks and it -runs all test suites on multiple GHC/OS configuration. These are the -rules for triggering it: +### Test suite - `unit-tests.yml` -* CI will run this if commits are pushed to stable, master branch -* CI will run this for any branches starting with `ci/` -* CI will run this for all new PR's. +This workflow will run if: -### Integration based build +* there is a pull request +* commits are pushed to these branches: `master`, `stable` and `rc/**`. +* requested -This build runs the integration tests in the Stack codebase. This is -scheduled to run daily once for both the stable and master branches. +The workflow has two jobs: `pedantic` and `unit-tests`. -Also, you can manually run this on a specific branch from the Azure UI -if you have the appropriate permissions. If you'd specifically like a -branch or PR to run integration tests, add a comment in the PR and we -can queue one up. +The `pedantic` job runs on `ubuntu` only and builds Stack with the +`--pedantic` flag. +The `unit-tests` job runs on a matrix of operating systems and Stack +project-level configuration files (`stack.yaml`, by default). It builds and +tests Stack with the following flags: `--haddock --no-haddock-deps`. -### Skipping build +Its approach to creating a cache depends on the operating system. Its 'Cache +dependencies on Unix-like OS' step caches the Stack root on Unix-like operating +systems. Its 'Cache dependencies on Windows' step caches the same information +on Windows, but takes into account that a relevant directory is located outside +of the Stack root. -There are times (like a minor type fix) where you don't want the CI to -run. For those cases, you can add `[skip ci]` or `[ci skip]` in your -commit message to skip the builds. For more details, [refer -here](https://github.com/Microsoft/azure-pipelines-agent/issues/858#issuecomment-475768046). +### Integration-based - `integration-tests.yml` + +This workflow will run if: + +* there is a pull request +* commits are pushed to these branches: `master`, `stable` and `rc/**` +* any tag is created +* requested + +The workflow has three jobs: `integration-tests`, `linux-arm64` and +`github-release`. + +The `integration-tests` job runs on a matrix of operating systems (`ubuntu`, +`windows` and `macos`) and makes use of the `release.hs` script at +`etc/scripts`. Its approach to creating a cache is the same as for +`unit-tests.yml`, described above. + +Its 'Install deps and run checks' step uses `release.hs check`. + +Its 'Build bindist' step uses `release.hs build`. + +Its 'Upload bindist' step uploads artifacts using the name of the runner's +operating system (`Linux`, `Windows` or `macOS`) as the name for the artifacts. + +The `linux-arm64` job runs on a self-hosted runner for Linux and ARM64. It makes +use of Docker and a Docker file at `etc/dockerfiles/arm64.Dockerfile`. + +Its 'Build bindist' step makes use of a compiled version of `release.hs` script +at `etc/scripts` to command `release build`. + +Its 'Upload bindist' step uploads artifacts using `Linux-ARM64` as the name for +the artifacts. + +The `github-release` job needs `integration-tests` and `linux-arm64`. It only +takes effect if the trigger for the workflow was the creation of a tag. + +Its four steps `Download Linux/Windows/macOS/Linux-ARM64 artifact` download the +named artifacts to path `_release`. + +Its step 'Hash and sign assets' makes use of a 'secret' environment variable +`RELEASE_SIGNING_KEY` established by the owner of the Stack repository. The +variable contains the private key for the GPG key with ID 0x575159689BEFB442. +That key is imported into GPG and then used by GPG to create a detached signature +for each file. + +### Stan tool - `stan.yml` + +[Stan](https://hackage.haskell.org/package/stan) is a Haskell static analysis +tool. As of `stan-0.1.0.1`, it supports GHC >= 9.6.3 and Stack is built with +GHC 9.10.3. The tool is configured by the contents of the `.stan.toml` file. + +This workflow will run if: + +* there is a pull request +* requested + +## Change log + +Stack seeks to maintain a comprehesive and useful change log, that captures all +changes that could be relevant to users of Stack. The change log is also +published at under menu item +"More/Version history". + +The change log for each release or the current, unreleased, version of Stack is +organised under one or more of the headings "Major changes", "Behavior changes", +"Other enhancements" and "Bug fixes". Major changes are those anticipated to be +important to many Stack users. The heading "Release notes" can also be used to +communicate other important information about a release not categorised under +those headings. + +Change log entries aim to be succinct and clear. The change log is not intended +to be a substitute for high quality in-tool and online documentation. + +If you consider that the existing change log is incomplete, inaccurate or +ambiguous, please +[open an issue](https://github.com/commercialhaskell/stack/issues/new) at +Stack's GitHub repository. + +## Haskell Language Server + +You may be using [Visual Studio Code](https://code.visualstudio.com/) (VS Code) +with its +[Haskell extension](https://marketplace.visualstudio.com/items?itemName=haskell.haskell), +which is powered by the +[Haskell Language Server](https://github.com/haskell/haskell-language-server) +(HLS). + +Stack can be built with Stack (which is recommended) or with Cabal (the tool). + +=== "Stack" + + If you use Stack to build Stack, command `stack ghci` in the root directory + of the Stack project should work as expected, if you have first commanded + `stack build` once. `stack build` causes Cabal (the library) to create the + automatically generated module `Stack_build`. + + If `ghc` is not on your PATH, then Haskell Language Server may report the + following error about `Stack.Constants.ghcShowOptionsOutput`: + ~~~text + • Exception when trying to run compile-time code: + ghc: readCreateProcess: does not exist (No such file or directory) + Code: (TH.runIO (readProcess "ghc" ["--show-options"] "") + >>= TH.lift . lines) + • In the untyped splice: + $(TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift + . lines) + ~~~ + + `ghc` should be on the PATH if you run VS Code itself in the Stack + environment: + ~~~text + stack exec -- code . + ~~~ + + The following [cradle (`hie.yaml`)](https://github.com/haskell/hie-bios) + should suffice to configure Haskell Language Server (HLS) explicitly for + `./Setup.hs` and each of the buildable components in Stack's Cabal file: + ~~~yaml + cradle: + multi: + - path: "./Setup.hs" + config: + cradle: + direct: + arguments: [] + - path: "./" + config: + cradle: + stack: + - path: "./src" + component: "stack:lib" + - path: "./app" + component: "stack:exe:stack" + - path: "./tests/integration" + component: "stack:exe:stack-integration-test" + - path: "./tests/unit" + component: "stack:test:stack-unit-test" + ~~~ + +=== "Cabal (the tool)" + + If you use Cabal (the tool) to build Stack, command `cabal repl` in the root + directory of the Stack project should work as expected, if you have GHC and + (on Windows) MSYS2 on the PATH. Stack's custom `./Setup.hs` causes + `cabal repl` to cause Cabal (the library) to create the automatically + generated module `Stack_build`. + + If `ghc` is not on your PATH, then Haskell Language Server may report the + following error about `Stack.Constants.ghcShowOptionsOutput`: + ~~~text + • Exception when trying to run compile-time code: + ghc: readCreateProcess: does not exist (No such file or directory) + Code: (TH.runIO (readProcess "ghc" ["--show-options"] "") + >>= TH.lift . lines) + • In the untyped splice: + $(TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift + . lines) + ~~~ + + `ghc` and (on Windows) MSYS2 should be on the PATH if you run commands + (including `cabal`) in the Stack environment: + ~~~text + stack exec --no-ghc-package-path -- cabal repl + ~~~ + + or + ~~~text + stack exec --no-ghc-package-path -- code . + ~~~ + + Use of GHC's environment variable `GHC_PACKAGE_PATH` is not compatible with + Cabal (the tool). That is why the `--no-ghc-package-path` flag must be + specified with `stack exec` when relying on Cabal (the tool). + + The following [cradle (`hie.yaml`)](https://github.com/haskell/hie-bios) + should suffice to configure Haskell Language Server (HLS) explicitly for + `./Setup.hs` and each of the buildable components in Stack's Cabal file: + ~~~yaml + cradle: + multi: + - path: "./Setup.hs" + config: + cradle: + direct: + arguments: [] + - path: "./" + config: + cradle: + cabal: + - path: "./src" + component: "lib:stack" + - path: "./app" + component: "exe:stack" + - path: "./tests/integration" + component: "exe:stack-integration-test" + - path: "./tests/unit" + component: "test:stack-unit-test" + ~~~ + +A cradle is not committed to Stack's repository because it imposes a choice of +tool used for building. + +## Dev Containers + +A [Development Container](https://containers.dev) (or Dev Container for short) +allows you to use a container as a full‑featured development environment. + +You can run Dev Containers locally/remotely (with VS Code), or create a +[Codespace](https://github.com/features/codespaces) for a branch in a +repository to develop online. + +Stack's default Dev Container is intended for use with its default +project‑level configuration (`stack.yaml`). But there are also Dev Containers +for the experimental project‑level configurations. + +For further information, see the documentation for +[Dev Containers](dev_containers.md). ## Slack channel -If you're making deep changes and real-time communcation with the Stack team -would be helpful, we have a `#stack-collaborators` Slack channel. Please -contact [@borsboom](https://github.com/borsboom) (manny@fpcomplete.com) or -[@snoyberg](https://github.com/snoyberg) (michael@fpcomplete.com) for an -invite. +If you are making deep changes and real-time communication with the Stack team +would be helpful, we have a `#stack-collaborators` Slack channel in the +Haskell Foundation workspace. To join the workspace, follow this +[link](https://haskell-foundation.slack.com/join/shared_invite/zt-z45o9x38-8L55P27r12YO0YeEufcO2w#/shared-invite/email). + +## Matrix room + +There is also a +[Haskell Stack room](https://matrix.to/#/#haskell-stack:matrix.org) +at address `#haskell-stack:matrix.org` on [Matrix](https://matrix.org/). diff --git a/ChangeLog.md b/ChangeLog.md index adff519513..438070e61a 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -1,21 +1,867 @@ # Changelog - ## Unreleased changes Release notes: -**Changes since v2.5.1.1:** +**Changes since v3.9.3:** Major changes: Behavior changes: +* Stack's default Nix integration now includes the `cacert` Nix package, in + order to support Stack's use of `crypton-x509-system >= 1.6.8`. +* Following a change to the Stackage project's server API, the default value of + the `urls` key includes + `recent-snapshots: https://stackage.org/api/v1/snapshots`. + +Other enhancements: + +* Bump to Hpack 0.39.5. +* Experimental: Add flag `--[no-]semaphore` (default: disabled) to Stack's + `build` command, to allow GHC to use a system semaphore to perform compilation + in parallel when possible. Supported, by default, by GHC 9.10.1 or later. The + option is considered experiemental because, on Linux only, musl and non-musl + semaphores are incompatible. +* Add option `--reach ` to Stack's `dot` and `ls dependencies` + commands, to prune packages that cannot reach any of the specified packages in + the dependency graph. +* Add option `--test-suite-timeout-grace=SECONDS` to Stack's `build` command to + request termination of a timed-out test suite process and, after the specified + grace period, force termination. Used together with the existing + `--test-suite-timeout=SECONDS` option. +* In YAML configuration files, the `recent-snapshots` key is introduced (under + the `urls` key), to specify the URL used by Stack's `ls snapshots remote` + command. +* In YAML configuration files (`stack.yaml` and `config.yaml`), an + `!include ` directive is now supported. This allows common + configuration to be shared across multiple files. For example, a project that + maintains multiple project-level configuration files for testing against + different snapshots can use `!include` to avoid duplicating shared settings. +* Stack's `config set` command raises an error if the target configuration file + excludes the key being set and includes an `!include` directive. +* Stack's `config set snapshot` command now works with other snapshot values + in addition to snapshot synonymns. + +Bug fixes: + +* Stack's `dot` and `ls dependencies` commands no longer prune a package with + dependencies only because all its direct dependencies are to be pruned. +* After March 2026, Hackage requires Stack's user agent to be set when applying + digest authentication to a request. Stack's `upload` command now does that, + re-establishing authentication by Hackage username and password. +* Stack 3.9.3 and earlier fail to construct a build plan if project package A + depends on project package B and package B's executables (only) depend on + package A and the name of A is before that of B, alphabetically. That bug is + fixed. + +## v3.9.3 - 2026-02-19 + +Release notes: + +* This release fixes a potential bug for users of Stack's Docker integration. + +**Changes since v3.9.1:** + +Other enhancements: + +* The `resolver` synonym for `snapshot`, informally deprecated from Stack 3.1.1, + is formally deprecated in online and in-app documentation. + +Bug fixes: + +* Stack's Docker integration supports Docker client versions 29.0.0 and greater. + +## v3.9.1 - 2026-01-04 + +**Changes since v3.7.1:** + +Behavior changes: + +* Where applicable and Stack supports the GHC version, only the wired-in + packages of the actual version of GHC used are treated as wired-in packages. +* Stack now recognises `ghc-internal` as a GHC wired-in package. +* The configuration option `package-index` has a new default value: the `keyids` + key lists the keys of the Hackage root key holders applicable from 2025-07-24. +* Stack's `dot` command now treats `--depth` the same way as the + `ls dependencies` command, so that the nodes of + `stack dot --external --depth 0` are the same as the packages listed by + `stack ls dependencies --depth 0`. +* When building GHC from source, on Windows, the default Hadrian build target is + `reloc-binary-dist` and the default path to the GHC built by Hadrian is + `_build/reloc-bindist`. +* Stack's `haddock` command no longer requires a package to have a main library + that exposes modules. +* On Windows, the path segment _platform_\\_hash_\\_ghc version_, under + `.stack-work\install` and `.stack-work\hoogle`, is hashed only once, rather + than twice. + +Other enhancements: + +* Bump to Hpack 0.39.1. +* Consider GHC 9.14 to be a tested compiler and remove warnings. +* Consider Cabal 3.16 to be a tested library and remove warnings. +* From GHC 9.12.1, `base` is not a GHC wired-in package. In configuration files, + the `notify-if-base-not-boot` key is introduced, to allow the exisitng + notification to be muted if unwanted when using such GHC versions. +* Add flag `--[no-]omit-this` (default: disabled) to Stack's `clean` command to + omit directories currently in use from cleaning (when `--full` is not + specified). +* Add option `-w` as synonym for `--stack-yaml`. +* `stack new` now allows `codeberg:` as a service for template downloads +* In YAML configuration files, the `compiler-target` and + `compiler-bindist-path` keys are introduced to allow, when building GHC from + source, the Hadrian build target and Hadrian path to the built GHC to be + specified. + +Bug fixes: + +* `--PROG-option=` passes `--PROG-option=` (and not + `--PROG-option=""`) to Cabal (the library). +* The message S-7151 now presents as an error, with advice, and not as a bug. +* Stack's `dot` command now uses a box to identify all GHC wired-in packages, + not just those with no dependencies (being only `rts`). +* Stack's `dot` command now gives all nodes with no dependencies in the graph + the maximum rank, not just those nodes with no relevant dependencies at all + (being only `rts`, when `--external` is specified). +* Improved error messages for S-4634 and S-8215. +* Improved in-app help for the `--hpack-force` flag. + +## v3.7.1 - 2025-06-28 + +**Changes since v3.5.1:** + +Other enhancements: + +* Bump to Hpack 0.38.1. +* The `--extra-dep` option of Stack's `script` command now accepts a YAML value + specifying any immutable extra-dep. Previously only an extra-dep in the + package index that could be specified by a YAML string (for example, + `acme-missiles-0.3@rev:0`) was accepted. + +Bug fixes: + +* `stack script --package ` now uses GHC's `-package-id` option to + expose the installed package, rather than GHC's `-package` option. For + packages with public sub-libraries, `-package ` can expose an + installed package other than one listed by `ghc-pkg list `. +* Work around `ghc-pkg` bug where, on Windows only, it cannot register a package + into a package database that is also listed in the `GHC_PACKAGE_PATH` + environment variable. In previous versions of Stack, this affected + `stack script` when copying a pre-compiled package from another package + database. +* On Windows, when decompressing, and extracting, tools from archive files, + Stack uses the system temporary directory, rather than the root of the + destination drive, if the former is on the destination drive. + +## v3.5.1 - 2025-03-29 + +**Changes since v3.3.1:** + +Behavior changes: + +* Stack will also warn (message S-8432) if there is any non-ISO/IEC 8859-1 + (Latin-1) character in Stack's 'programs' path, as `hsc2hs` does not work if + there is such a character in the path to its default template + `template-hsc.h`. +* Stack customizes setup using `Cabal`, so if a `setup-depends` field does not + mention it as a dependency, Stack warns and adds the GHC boot package as a + dependency. Previously, Stack would not do so but only warn that build errors + were likely. + +Other enhancements: + +* Bump to Hpack 0.38.0. +* In YAML configuration files, the `install-msys` key is introduced, to enable + or disable the download and installation of Stack-supplied MSYS2 when + necessary (subject to `skip-msys: false`). The default is the same as the + `install-ghc` setting (including if that is set at the command line). + Consequently, the default behaviour of Stack is unaffected. +* Add the `stack config set install-msys` command to configure the + `install-msys` option in YAML configuration files. +* Option `allow-newer-deps` is no longer classified as experimental in + documentation. +* `stack sdist` and `stack upload` report the version of Cabal (the library) + being used to check packages. +* Add the `stack config build-files` command to generate (when applicable) a + Cabal file from a package description in the Hpack format and/or a lock file + for Stack's project-level configuration, without taking any other build steps. + +## v3.3.1 - 2024-12-28 + +**Changes since v3.1.1:** + +Behavior changes: + +* Stack interprets consecutive line ends in the value of the `user-message` + project-specific configuration option as a single blank line. Previously all + line ends were interpreted as white space. +* Stack no longer supports Docker versions before Docker 1.9.1 and, + consequently, if a Docker container is not being run 'detached', its standard + input channel will always be kept open. (Before Docker 1.9.1 the use of an + interactive container could hang in certain circumstances.) +* On Windows, Stack will always warn (message S-8432) if there is a space + character in Stack's 'programs' path, as GHC 9.4.1 and later do not work if + there is a space in the path to the `ghc` executable. S-8432 now presents as a + warning and not an error. +* Stack respects the `--no-run-tests` and `--no-run-benchmarks` flags when + determining build actions. Previously Stack respected the flags when executing + the run test suites or run benchmarks actions for each targeted project + package. + +Other enhancements: + +* Consider GHC 9.10 to be a tested compiler and remove warnings. +* Consider Cabal 3.12 to be a tested library and remove warnings. +* Add flags `--run-tests` and `--run-benchmarks` (the existing defaults) to + Stack's `build` command, which take precedence over the existing + `no-run-tests` and `no-run-benchmarks` configuration options, respectively. +* In configuration files, the `notify-if-no-run-tests` and + `notify-if-no-run-benchmarks` keys are introduced, to allow the exisitng + notification to be muted if unwanted. + +Bug fixes: + +* Stack's in-app messages refer to https://haskellstack.org as currently + structured. (Most URLs in older Stack versions are redirected.) +* Stack's `upgrade` command only treats the current running Stack executable + as '`stack`' if the executable file is named `stack` or, on Windows, + `stack.exe`. Previously only how it was invoked was considered. +* `stack test --no-run-tests --dry-run` no longer reports that Stack would test + project packages with test suites and + `stack bench --no-run-benchmarks --dry-run` no longer reports that Stack + would benchmark project packages with benchmarks. +* `StackSetupShim` compiles with `Cabal >= 3.14.0.0`. + +## v3.1.1 - 2024-07-28 + +Release notes: + +* The change in major version from 2.x to 3.1 marks the dropping of support for + versions of GHC before 8.4, deprecated in Stack 2.15.1. + +**Changes since v2.15.7:** + +Behavior changes: + +* Stack uses the version of the Cabal package that comes with the specified + version of GHC. Stack no longer supports such Cabal versions before 2.2, which + came with versions of GHC before 8.4. Consequently, the `init` command will + not try LTS Haskell before 12.0. +* The `init` command initialises `stack.yaml` with a `snapshot` key rather than + a `resolver` key. +* After installing GHC or another tool, Stack deletes the archive file which + provided the tool. +* Remove hidden flag `--skip-intermediate-deps`, effectively deprecated since + Stack 1.3.0, from `ghci` and `repl` commands. +* The `haddock --haddock-for-hackage` command only seeks to create an archive of + the `-docs` directory for build targets and if flags + excluding the building of project packages are not set. +* The predecessor of configuration option `package-index`, `package-indices` + (deprecated in Stack 2.9.3) has been removed as an alternative option. +* If a build target is a package identifier, and the package version is not in + the snapshot or the package index, Stack will report an error when the target + is parsed. Previously, if another version of the package was in the snapshot, + Stack would construct the build plan with that other version or, if it was + not, Stack would defer an error to the construction of the build plan. +* The `list` command, with a specified snapshot and package, also reports the + version of the package included indirectly in the snapshot (as a boot package + of the compiler specified by the snapshot). +* `stack build --flag *:[-]` now only applies the flag setting to + packages for which the Cabal flag is defined, as opposed to all packages. +* On Unix-like operating systems, drop support for `/etc/stack/config`, + deprecated in Stack 0.1.6.0. +* Drop support for, in the Stack root, directory `global` and file `stack.yaml`, + both deprecated in Stack 0.1.6.0. + +Other enhancements: + +* Bump to Hpack 0.37.0. +* In YAML configuration files, the `msys-environment` key is introduced to + allow, on Windows, the MSYS2 environment to be specified. The default + environment is still `MINGW64` on 64-bit Windows and `MINGW32` on 32-bit + Windows. +* In YAML configuration files, the `default-init-snapshot` key is introduced to + allow a default snapshot to be specified for use with the `stack init` + command, as if it had been specified at the command line. +* Add flags `--haddock-executables`, `--haddock-tests` and + `--haddock-benchmarks` to Stack's `build` command (including the `haddock` + synonym for `build --haddock`) to enable also building Haddock + documentation for executables, test suites and benchmarks. Due to a bug in + Cabal (the library), Stack will ignore the flags with a warning for GHC + versions before 9.4. +* Add flag `--[no-]save-hackage-creds` to Stack's `upload` command, which takes + precedence over the existing `save-hackage-creds` configuration option. +* In YAML configuration files, the `global-hints-location` key is introduced to + allow the location of the global hints YAML specification file to be + specified. +* By default, Hpack 0.12.0 or later will decline to overwrite a Cabal file that + was created by a more recent version of Hpack and Hpack 0.20.0 or later will + decline to overwrite a Cabal file that was modified manually. In YAML + configuration files, the `hpack-force` key is introduced to allow Hpack to + overwrite such a Cabal file. The corresponding `--hpack-force` flag is also + added. +* Add the `stack config set recommend-stack-upgrade` command to configure + whether or not Stack should notify the user if it identifes a new version of + Stack is available in YAML configuration files. +* Add the `ls globals` command to list all global packages for the version of + GHC specified by the snapshot. +* Add `stack -h` (equivalent to `stack --help`). +* In YAML configuration files, the `file-watch-hook` key is introduced to allow + `--file-watch` post-processing to be customised with a executable or `sh` + shell script. +* Add flag `--[no-]allow-newer` to Stack's `build` command, which takes + precedence over the existing `allow-newer` configuration option. + +Bug fixes: + +* The `config set snapshot` and `config set resolver` commands now respect the + presence of a synoymous key. +* The `config set` commands support existing keys only in the form `key: value` + on a single line. The commands now recognise that a line `key:` does not have + that form. +* On Unix-like operating systems, the `test --coverage` command now finds + package keys even for very long package names. +* The Error S-6362 message now acknowledges when the wanted compiler has been + specified at the command line. +* Fix a regression, introduced in Stack 2.11.1, that caused the `script` command + to parse an (otherwise ignored) project-level configuration file. +* Stack no longer makes recommendations about a project-level configuration file + when only a global configuration file is in use. +* Fix a regression, introduced in Stack 2.15.7, that caused GHC 8.10.7 or + earlier to fail to build a package with a `Custom` build type, if GHC option + `-haddock` was specified. + +## v2.15.7 - 2024-05-12 + +Release notes: + +* This release fixes potential bugs. +* The hash that Stack uses to distinguish one build plan from another has + changed for plans that set (as opposed to unset) manually Cabal flags for + immutable dependencies. This will cause Stack to rebuild dependencies for such + plans. + +**Changes since v2.15.5:** + +Major changes: + +* Stack 2.15.5 and earlier cannot build with Cabal (the library) version + `3.12.0.0`. Stack can now build with that Cabal version. + +Behavior changes: + +* Stack's `StackSetupShim` executable, when called with `repl` and + `stack-initial-build-steps`, no longer uses Cabal's `replHook` to apply + `initialBuildSteps` but takes a more direct approach. + +Bug fixes: + +* Fix a regression introduced in Stack 2.15.1 that caused a 'no operation' + `stack build` to be slower than previously. +* The hashes that Stack uses to distinguish one build plan from another now + include the Cabal flags for immutable dependencies set manually. Previously, + in error, only such flags that were unset manually were included. + +## v2.15.5 - 2024-03-28 + +Release notes: + +* This release fixes potential bugs. + +**Changes since v2.15.3:** + +Behavior changes: + +* Following the handover of the Stackage project to the Haskell Foundation, the + default value of the `urls` key is + `latest-snapshot: https://stackage-haddock.haskell.org/snapshots.json`. +* Stack no longer includes the snapshot package database when compiling the + setup executable for a package with `build-type: Configure`. + +## v2.15.3 - 2024-03-07 + +Release notes: + +* With one exception, this release fixes bugs. + +**Changes since v2.15.1:** + +Behavior changes: + +* `stack path --global-config`, `--programs`, and `--local-bin` no longer set + up Stack's environment. + +Bug fixes: + +* Due to a bug, Stack 2.15.1 did not support versions of GHC before 8.2. Stack + now supports GHC versions from 8.0. +* `--haddock-for-hackage` does not ignore `--haddock-arguments`. +* On Windows, package locations that are Git repositories with submodules now + work as intended. +* The `ghc`, `runghc` and `runhaskell` commands accept `--package` values that + are a list of package names or package identifiers separated by spaces and, in + the case of package identifiers, in the same way as if they were specified as + targets to `stack build`. + +## v2.15.1 - 2024-02-09 + +Release notes: + +* After an upgrade from an earlier version of Stack, on first use only, + Stack 2.15.1 may warn that it had trouble loading the CompilerPaths cache. +* The hash used as a key for Stack's pre-compiled package cache has changed, + following the dropping of support for Cabal versions older than `1.24.0.0`. + +**Changes since v2.13.1:** + +Behavior changes: + +* Stack does not leave `*.hi` or `*.o` files in the `setup-exe-src` directory of + the Stack root, and deletes any corresponding to a `setup-.hs` or + `setup-shim-.hs` file, to avoid GHC issue + [#21250](https://gitlab.haskell.org/ghc/ghc/-/issues/21250). +* If Stack's Nix integration is not enabled, Stack will notify the user if a + `nix` executable is on the PATH. This usually indicates the Nix package + manager is available. In YAML configuration files, the `notify-if-nix-on-path` + key is introduced, to allow the notification to be muted if unwanted. +* Drop support for Intero (end of life in November 2019). +* `stack path --stack-root` no longer sets up Stack's environment and does not + load Stack's configuration. +* Stack no longer locks on configuration, so packages (remote and local) can + be configured in parallel. This increases the effective concurrency of builds + that before would use fewer threads. Reconsider your `--jobs` setting + accordingly. See [#84](https://github.com/commercialhaskell/stack/issues/84). +* Stack warns that its support for Cabal versions before `2.2.0.0` is deprecated + and may be removed in the next version of Stack. Removal would mean that + projects using snapshots earlier than `lts-12.0` or `nightly-2018-03-18` + (GHC 8.4.1) might no longer build. See + [#6377](https://github.com/commercialhaskell/stack/issues/6377). +* If Stack's `--resolver` option is not specified, Stack's `unpack` command with + a package name will seek to update the package index before seeking to + download the most recent version of the package in the index. +* If the version of Cabal (the library) provided with the specified GHC can copy + specific components, Stack will copy only the components built and will not + build all executable components at least once. + +Other enhancements: + +* Consider GHC 9.8 to be a tested compiler and remove warnings. +* Stack can build packages with dependencies on public sub-libraries of other + packages. +* Add flag `--no-init` to Stack's `new` command to skip the initialisation of + the newly-created project for use with Stack. +* The HTML file paths produced at the end of `stack haddock` are printed on + separate lines and without a trailing dot. +* Add option of the form `--doctest-option=` to `stack build`, where + `doctest` is a program recognised by versions of the Cabal library from + `1.24.0.0`. +* Experimental: Add flag `--haddock-for-hackage` to Stack's `build` command + (including the `haddock` synonym for `build --haddock`) to enable building + project packages with flags to generate Haddock documentation, and an archive + file, suitable for upload to Hackage. The form of the Haddock documentation + generated for other packages is unaffected. +* Experimental: Add flag `--documentation` (`-d` for short) to Stack's `upload` + command to allow uploading of documentation for packages to Hackage. +* `stack new` no longer rejects project templates that specify a `package.yaml` + in a subdirectory of the project directory. +* Stack will notify the user if Stack has not been tested with the version of + GHC that is being user or a version of Cabal (the library) that has been + found. In YAML configuration files, the `notify-if-ghc-untested` and + `notify-if-cabal-untested` keys are introduced, to allow the notification to + be muted if unwanted. +* The compiler version is included in Stack's build message (e.g. + `stack> build (lib + exe + test) with ghc-9.6.4`). +* Add flag `--candidate` to Stack's `unpack` command, to allow package + candidates to be unpacked locally. +* Stack will notify the user if a specified architecture value is unknown to + Cabal (the library). In YAML configuration files, the `notify-if-arch-unknown` + key is introduced, to allow the notification to be muted if unwanted. +* Add option `--filter ` to Stack's `ls dependencies text` command to + filter out an item from the results, if present. The item can be `$locals` for + all project packages. +* Add option `--snapshot` as synonym for `--resolver`. +* Add the `config set snapshot` command, corresponding to the + `config set resolver` command. + +Bug fixes: + +* Fix the `Curator` instance of `ToJSON`, as regards `expect-haddock-failure`. +* Better error message if a `resolver:` or `snapshot:` value is, in error, a + YAML number. +* Stack accepts all package names that are, in fact, acceptable to Cabal. +* Stack's `sdist` command can check packages with names that include non-ASCII + characters. + +## v2.13.1 - 2023-09-29 + +Release notes: + +* Further to the release notes for Stack 2.3.1, the `-static` suffix has been + removed from the statically-linked Linux/x86_64 executables. +* The executables for Linux/Aarch64 are now statically-linked. +* Executables are now provided for macOS/AArch64. + +**Changes since v2.11.1:** + +Behavior changes: + +* Build artefacts are placed in `.stack-work/dist//` + (hashed to a shorter path on Windows), rather than + `.stack-work/dist//`. This allows build artifacts to + be distinguished by GHC version. +* By default, the `stack build` progress bar is capped to a length equal to the + terminal width. +* When building GHC from source, Stack no longer uses Hadrian's deprecated + `--configure`\\`-c` flag and, instead, seeks to run GHC's Python `boot` and + sh `configure` scripts, and ensure that the `happy` and `alex` executables are + on the PATH. +* When auto-detecting `--ghc-build` on Linux, the `musl` GHC build only is + considered a possible GHC build if `libc.musl-x86_64.so.1` is found in `\lib` + or `\lib64`. +* No longer supports Cabal versions older than `1.24.0.0`. This means projects + using snapshots earlier than `lts-7.0` or `nightly-2016-05-26` (GHC 8.0.1) + will no longer build. GHC 8.0.1 comes with Haddock 2.17.2. +* When unregistering many packages in a single step, Stack can now do that + efficiently. Stack no longer uses GHC-supplied `ghc-pkg unregister` (which is, + currently, slower). +* `stack hpc report`, `stack list`, `stack templates` and `stack uninstall` + output their information to the standard output stream rather than to the + standard error stream. Logging is still to the standard error stream. +* `stack upgrade` no longer assumes that binary upgrade is not supported on a + AArch64 machine architecture. + +Other enhancements: + +* Consider GHC 9.6 to be a tested compiler and remove warnings. +* Consider Cabal 3.10 to be a tested library and remove warnings. +* Bump to Hpack 0.36.0. +* Depend on `pantry-0.9.2`, for support for long filenames and directory names + in archives created by `git archive`. +* Avoid the duplicate resolving of usage files when parsing `*.hi` files into a + set of modules and a collection of resolved usage files. See + [#6123](https://github.com/commercialhaskell/stack/pull/6123). +* Add composable component type flags `--exes`, `--tests` and `--benchmarks` to + Stack's `ide targets` command, to list only those components. +* `stack --verbose` excludes lengthy information about build plan construction + in the debug output by default. The new `stack --[no-]plan-in-log` flag + enables or disables the inclusion of the information in the debug output. +* In YAML configuration files, the `casa` key is introduced, which takes + precedence over the existing `casa-repo-prefix` key. The latter is deprecated. + The new key also allows Stack's use of a Casa (content-addressable storage + archive) server to be disabled and the maximum number of keys per request to + be configured. The default Casa prefix references https://casa.stackage.org, + instead of https://casa.fpcomplete.com. +* Add option `--progress-bar=` to Stack's `build` command to configure + the format of the progress bar, where `` is one of `none`, + `count-only` (only the package count), `capped` (capped to a length equal to + the terminal width) and `full` (the previous format). + +Bug fixes: + +* Restore `stack sdist --pvp-bounds lower` (broken with Stack 2.9.1). +* Restore building of Stack with Cabal flag `disable-git-info` (broken with + Stack 2.11.1). +* With `stack hoogle`, avoid the message + `Minimum version is hoogle-5.0. Found acceptable hoogle- in your index, requiring its installation.` + when a `hoogle` executable has already been found on the `PATH`. +* Stack's sanity check on a selected GHC now passes GHC flag + `-hide-all-packages`, stopping GHC from looking for a package environment in + default locations. +* Restore Stack script files without extensions (broken with Stack 2.11.1). +* Restore message suffix `due to warnings` with `dump-logs: warning` (broken + with Stack 2.11.1). +* On Windows, the `local-programs-path` directory can now be on a different + drive to the system temporary directory and MSYS2 will still be installed. + +## v2.11.1 - 2023-05-18 + +**Changes since v2.9.3:** + +Behavior changes: + +* Add flag `--[no-]-only-local-bin` to Stack's `upgrade` command for a binary + upgrade. If the Stack executable is `my-stack`, the default is + `my-stack upgrade --only-local-bin` where previously it was, effectively, + `my-stack upgrade --no-only-local-bin`. If the Stack executable is `stack`, + the default is `stack upgrade --no-only-local-bin`, the same behaviour as + previously. +* Use `$XDG_CACHE_HOME/stack/ghci-script`, rather than + `/haskell-stack-ghci` (where `` is the directory yielded by the + `temporary` package's `System.IO.Temp.getCanonicalTemporaryDirectory`), as the + base location for GHCi script files generated by `stack ghci` or `stack repl`. + See [#5203](https://github.com/commercialhaskell/stack/issues/5203) +* Drop support for `Cabal` versions before 1.22 and, consequently, GHC versions + before 7.10. +* `stack ghci` and `stack repl` now take into account the values of + `default-language` fields in Cabal files, like they take into account the + values of `default-extensions` fields. +* Removed `--ghc-paths`, `--global-stack-root` and `--local-bin-path` flags for + `stack path`, deprecated in Stack 1.1.0 in favour of `--programs`, + `--stack-root` and `local-bin` respectively. +* On Windows, `stack upgrade` always renames the file of the running Stack + executable (adding extension `.old`) before attempting to write to the + original file name. +* On Windows, `stack upgrade` does not offer `sudo` command alternatives if + attempting to write to the original file name of the running Stack exectuable + results in a 'Permission' error. +* On Linux, Stack's `setup` command now distinguishes GHC build + `tinfo6-libc6-pre232` from existing `tinfo6`. The former refers to systems + where the version of `libc6` (the GNU C Library) is not compatible with + version 2.32. `tinfo6-libc6-pre232` is now a possible value for the + `ghc-build` configuration option. + +Other enhancements: + +* Add options of the form `--PROG-option=` to `stack build`, where + `PROG` is a program recognised by the Cabal library and one of `alex`, `ar`, + `c2hs`, `cpphs`, `gcc`, `greencard`, `happy`, `hsc2hs`, `hscolour`, `ld`, + `pkg-config`, `strip` and `tar`. If Cabal uses the program during the + configuration step, the argument is passed to it. +* By default all `--PROG-option` options are applied to all project packages. + This behaviour can be changed with new configuration option + `apply-prog-options`. +* Add flag `--[no-]use-root` to `stack script` (default disabled). Used with + `--compile` or `--optimize`, when enabled all compilation outputs (including + the executable) are written to a script-specific location in the `scripts` + directory of the Stack root rather than the script's directory, avoiding + clutter of the latter directory. +* Better error message if the value of the `STACK_WORK` environment variable or + `--work-dir` option is not a valid relative path. +* Stack will use the value of the `GH_TOKEN`, or `GITHUB_TOKEN`, environment + variable as credentials to authenticate its GitHub REST API requests. +* `stack uninstall` also shows how to uninstall Stack-supplied tools. + +Bug fixes: + +* Fix incorrect warning if `allow-newer-deps` are specified but `allow-newer` is + `false`. See + [#6068](https://github.com/commercialhaskell/stack/issues/6086). +* `stack build` with `--file-watch` or `--file-watch-poll` outputs 'pretty' + error messages, as intended. See + [#5978](https://github.com/commercialhaskell/stack/issues/5978). +* `stack build` unregisters any project packages for the sub libraries of a + project package that is to be unregistered. See + [#6046](https://github.com/commercialhaskell/stack/issues/6046). +* The warning that sublibrary dependency is not supported is no longer triggered + by internal libraries. + +## v2.9.3.1 - 2023-06-22 + +Hackage-only release of the `stack` package: + +* Supports building against snapshot Stackage LTS Haskell 21.0 (GHC 9.4.5), + without extra-deps. +* Supports build with `persistent-2.14.5.0`, using CPP directives. +* Supports build with `unix-compat-0.7`, by removing reliance on the module + `System.PosixCompat.User` removed in that package. +* Includes `cabal.project` and `cabal.config` files in the package. + +## v2.9.3 - 2022-12-16 + +**Changes since v2.9.1:** + +Behavior changes: + +* In YAML configuration files, the `package-index` key is introduced which takes + precedence over the existing `package-indices` key. The latter is deprecated. +* In YAML configuration files, the `hackage-security` key of the `package-index` + key or the `package-indices` item can be omitted, and the Hackage Security + configuration for the item will default to that for the official Hackage + server. See [#5870](https://github.com/commercialhaskell/stack/issues/5870). +* Add the `stack config set package-index download-prefix` command to set the + location of Stack's package index in YAML configuration files. +* `stack setup` with the `--no-install-ghc` flag warns that the flag and the + command are inconsistent and now takes no action. Previously the flag was + silently ignored. +* To support the Haskell Foundation's + [Haskell Error Index](https://errors.haskell.org/) initiative, all Stack + error messages generated by Stack itself begin with an unique code in the + form `[S-nnnn]`, where `nnnn` is a four-digit number. +* Test suite executables that seek input on the standard input stream (`stdin`) + will not throw an exception. Previously, they would thow an exception, + consistent with Cabal's 'exitcode-stdio-1.0' test suite interface + specification. Pass the flag `--no-tests-allow-stdin` to `stack build` to + enforce Cabal's specification. See + [#5897](https://github.com/commercialhaskell/stack/issues/5897) + +Other enhancements: + +* Help documentation for `stack upgrade` warns that if GHCup is used to install + Stack, only GHCup should be used to upgrade Stack. That is because GHCup uses + an executable named `stack` to manage versions of Stack, that Stack will + likely overwrite on upgrade. +* Add `stack ls dependencies cabal` command, which lists dependencies in the + format of exact Cabal constraints. +* Add `STACK_XDG` environment variable to use the XDG Base Directory + Specification for the Stack root and Stack's global YAML configuration file, + if the Stack root location is not set on the command line or by using the + `STACK_ROOT` environment variable. +* Add `stack path --global-config`, to yield the full path of Stack's + user-specific global YAML configuration file (`config.yaml`). +* Experimental: Add option `allow-newer-deps`, which allows users to specify a + subset of dependencies for which version bounds should be ignored + (`allow-newer-deps: ['foo', 'bar']`). This key has no effect unless + `allow-newer` is enabled. + +Bug fixes: + +* Fix ambiguous module name `Distribution.PackageDescription`, if compiling + `StackSetupShim` with `Cabal-syntax-3.8.1.0` in package database. See + [#5886](https://github.com/commercialhaskell/stack/pull/5886). +* In YAML configuration files, if the `package-indices` key (or the + `hackage-security` key of its item) is omitted, the expiration of timestamps + is now ignored, as intended. See Pantry + [#63](https://github.com/commercialhaskell/pantry/pull/63) + +## v2.9.1 - 2022-09-19 + +**Changes since v2.7.5:** + +Release notes: + +* After an upgrade from an earlier version of Stack, on first use only, + Stack 2.9.1 may warn that it had trouble loading the CompilerPaths cache. +* The support from the Stack team for executable releases now includes + Linux/AArch64 and is limited to: + + * Linux 64-bit/x86_64 (statically-linked) + * Linux AArch64 (dynamically-linked) + * macOS x86_64 + * Windows 64-bit/x86_64 + +Behavior changes: + +* `stack build --coverage` will generate a unified coverage report, even if + there is only one `*.tix` file, in case a package has tested the library of + another package that has not tested its own library. See + [#5713](https://github.com/commercialhaskell/stack/issues/5713) +* `stack --verbose` no longer includes the lengthy raw snapshot layer (rsl) in + the debug output by default. The new `stack --[no-]rsl-in-log` flag enables or + disables the inclusion of the rsl in the debug output. + +Other enhancements: + +* Consider GHC 9.2 and 9.4 to be tested compilers and remove warnings. +* Consider Cabal 3.6 and 3.8 to be a tested libraries and remove warnings. +* Bump to Hpack 0.35.0. +* On Windows, the installer now sets `DisplayVersion` in the registry, enabling + tools like `winget` to properly read the version number. +* Adds flag `--script-no-run-compile` (disabled by default) that uses the + `--no-run` option with `stack script` (and forces the `--compile` option). + This enables a command like `stack --script-no-run-compile Script.hs` to + behave like `stack script --no-run --compile -- Script.hs` but + without having to list all the `` in the Stack interpreter options + comment in `Script.hs` on the command line. That may help test that scripts + compile in CI (continuous integration). See + [#5755](https://github.com/commercialhaskell/stack/issues/5755) +* Fuller help is provided at the command line if a subcommand is missing (for + example, `stack ls` now yields the equivalent of `stack ls --help`). See + [#809](https://github.com/commercialhaskell/stack/issues/809) +* Add build option `--cabal-verbosity=VERBOSITY` to specify the Cabal verbosity + level (the option accepts Cabal's numerical and extended syntax). + See [#1369](https://github.com/commercialhaskell/stack/issues/809) +* Add the possibility of a `sh` script to customise fully GHC installation. See + [#5585](https://github.com/commercialhaskell/stack/pull/5585) +* `tools` subcommand added to `stack ls`, to list stack's installed tools. +* `stack uninstall` shows how to uninstall Stack. +* `--ghc-variant` accepts `int-native` as a variant. + +Bug fixes: + +* Fix `stack clean --full`, so that the files to be deleted are not in use. See + [#5714](https://github.com/commercialhaskell/stack/issues/5714) +* Fix an inconsistency in the pretty formatting of the output of + `stack build --coverage` +* Fix repeated warning about missing parameters when using `stack new` +* Include `pantry-0.5.6`: Remove operational and mirror keys from bootstrap key + set [#53](https://github.com/commercialhaskell/pantry/pull/53) +* Pass any CPP options specified via `cpp-options:` in the Cabal file to GHCi + using GHC's `-optP` flag. See + [#5608](https://github.com/commercialhaskell/stack/pull/5608) +* On Unix-like operating systems, respect the `with-gcc` option when installing + GHC. See [#5609](https://github.com/commercialhaskell/stack/pull/5609) +* Fixed logic in `get_isa()` in `get-stack.sh` to exclude systems that do not + have x86 in their `uname -m` output. See + [5792](https://github.com/commercialhaskell/stack/issues/5792). +* Fixed output of `stack ls snapshots local` on Windows, to behave like that on + Unix-like operating systems. +* Fix non-deterministic test failures when executing a test suite for a + multi-project repository with parallelism enabled. See + [#5024](https://github.com/commercialhaskell/stack/issues/5024) + +## v2.7.5 - 2022-03-06 + +**Changes since v2.7.3:** + +Behavior changes: + +* Cloning git repositories is not per sub-directory anymore, see + [#5411](https://github.com/commercialhaskell/stack/issues/5411) + +Other enhancements: + +* `stack setup` supports installing GHC for macOS/AArch64 (M1) +* `stack upload` supports authentication with a Hackage API key (via + `HACKAGE_KEY` environment variable). + +Bug fixes: + +* Ensure that `extra-path` works for case-insensitive `PATH`s on Windows. + See [rio#237](https://github.com/commercialhaskell/rio/pull/237) +* Fix handling of overwritten `ghc` and `ghc-pkg` locations. + [#5597](https://github.com/commercialhaskell/stack/pull/5597) +* Fix failure to find package when a dependency is shared between projects. + [#5680](https://github.com/commercialhaskell/stack/issues/5680) +* `stack ghci` now uses package flags in `stack.yaml` + [#5434](https://github.com/commercialhaskell/stack/issues/5434) + + +## v2.7.3 - 2021-07-20 + +**Changes since v2.7.1:** + +Other enhancements: + +* `stack upgrade` will download from `haskellstack.org` before trying + `github.com`. See + [#5288](https://github.com/commercialhaskell/stack/issues/5288) +* `stack upgrade` makes less assumptions about archive format. See + [#5288](https://github.com/commercialhaskell/stack/issues/5288) +* Add a `--no-run` flag to the `script` command when compiling. + +Bug fixes: + +* GHC source builds work properly for recent GHC versions again. See + [#5528](https://github.com/commercialhaskell/stack/issues/5528) +* `stack setup` always looks for the unpacked directory name to support + different tar file naming conventions. See + [#5545](https://github.com/commercialhaskell/stack/issues/5545) +* Bump `pantry` version for better OS support. See + [pantry#33](https://github.com/commercialhaskell/pantry/issues/33) +* When building the sanity check for a new GHC install, make sure to clear + `GHC_PACKAGE_PATH`. +* Specifying GHC RTS flags in the `stack.yaml` no longer fails with an error. + [#5568](https://github.com/commercialhaskell/stack/pull/5568) +* `stack setup` will look in sandboxed directories for executables, not + relying on `findExecutables. See + [GHC issue 20074](https://gitlab.haskell.org/ghc/ghc/-/issues/20074) +* Track changes to `setup-config` properly to avoid reconfiguring on every + change. See + [#5578](https://github.com/commercialhaskell/stack/issues/5578) + + +## v2.7.1 - 2021-05-07 + +**Changes since v2.5.1.1:** + +Behavior changes: + * `stack repl` now always warns about GHCi problems with loading multiple packages. It also sets now proper working directory when invoked with one package. See [#5421](https://github.com/commercialhaskell/stack/issues/5421) - * `custom-setup` dependencies are now properly initialized for `stack dist`. This makes `explicit-setup-deps` no longer required and that option was removed. See @@ -23,30 +869,52 @@ Behavior changes: Other enhancements: -* Nix integration now passes `ghcVersion` (in addition to existing `ghc`) to - `shell-file` as an identifier that can be looked up in a compiler attribute set. - +* Consider GHC 9.0 to be a tested compiler and remove warnings. +* Consider Cabal 3.6 to be a tested library and remove warnings. +* Nix integration now passes `ghcVersion` (in addition to existing `ghc`) to + `shell-file` as an identifier that can be looked up in a compiler attribute + set. +* Nix integration now allows Nix integration if the user is ready in nix-shell. + This gets rid of "In Nix shell but reExecL is False" error. * `stack list` is a new command to list package versions in a snapshot. See [#5431](https://github.com/commercialhaskell/stack/pull/5431) +* `custom-preprocessor-extensions` is a new configuration option for allowing + Stack to be aware of any custom preprocessors you have added to `Setup.hs`. + See [#3491](https://github.com/commercialhaskell/stack/issues/3491) +* Added `--candidate` flag to `upload` command to upload a package candidate + rather than publishing the package. +* Error output using `--no-interleaved-output` no longer prepends indenting + whitespace. This allows emacs compilation-mode and vim quickfix to locate + and track errors. See + [#5523](https://github.com/commercialhaskell/stack/pull/5523) Bug fixes: -* `stack new` now suppports branches other than `master` as default for - GitHub repositories. See +* `stack new` now supports branches other than `master` as default for GitHub + repositories. See [#5422](https://github.com/commercialhaskell/stack/issues/5422) +* Ignore all errors from `hi-file-parser`. See + [#5445](https://github.com/commercialhaskell/stack/issues/5445) and + [#5486](https://github.com/commercialhaskell/stack/issues/5486). +* Support basic auth in package-indices. See + [#5509](https://github.com/commercialhaskell/stack/issues/5509). +* Add support for parsing `.hi`. files from GHC 8.10 and 9.0. See + [hi-file-parser#2](https://github.com/commercialhaskell/hi-file-parser/pull/2). + -## v2.5.1.1 +## v2.5.1.1 - 2020-12-09 Hackage-only release: * Support build with persistent-2.11.x and optparse-applicative-0.16.x -## v2.5.1 +## v2.5.1 - 2020-10-15 **Changes since v2.3.3** Major changes: + * Add the `snapshot-location-base` yaml configuration option, which allows to override the default location of snapshot configuration files. This option affects how snapshot synonyms (LTS/Nightly) are expanded to URLs by the @@ -55,14 +923,14 @@ Major changes: Behavior changes: -* File watching now takes into account specified targets, old behavior could - be restored using the new flag `--watch-all` +* File watching now takes into account specified targets, old behavior could be + restored using the new flag `--watch-all` [#5310](https://github.com/commercialhaskell/stack/issues/5310) Other enhancements: -* `stack ls dependencies json` now includes fields `sha256` and `size` for - dependencies of `type` `archive` in `location`. +* The output of `stack ls dependencies json` now includes keys `sha256` and + `size` for dependencies of `type` `archive` in `location`. [#5280](https://github.com/commercialhaskell/stack/issues/5280) * Build failures now show a hint to scroll up to the corresponding section [#5279](https://github.com/commercialhaskell/stack/issues/5279) @@ -82,7 +950,7 @@ Bug fixes: [#5125](https://github.com/commercialhaskell/stack/issues/5125) -## v2.3.3 +## v2.3.3 - 2020-08-06 **Changes since v2.3.1** @@ -93,68 +961,78 @@ Other enhancements: Bug fixes: * When using the `STACK_YAML` env var with Docker, make the path absolute. -* Fix the problem of `stack repl foo:test:bar` failing without a project - build before that. See +* Fix the problem of `stack repl foo:test:bar` failing without a project build + before that. See [#5213](https://github.com/commercialhaskell/stack/issues/5213) -* Fix `stack sdist` introducing unneded sublibrary syntax when using +* Fix `stack sdist` introducing unnecessary sublibrary syntax when using pvp-bounds. See [#5289](https://github.com/commercialhaskell/stack/issues/5289) -## v2.3.1 +## v2.3.1 - 2020-04-29 Release notes: -* We have reduced the number of platforms that we support with binary releases. - The reason behind this is that we've been slowed down in our release process - until now with issues trying to build binaries for less common platforms. In - order to make sure we can address issues more quickly (like supporting new - GHC versions), we're limiting support from the Stack team to: +* We have reduced the number of platforms that we support with executable + releases. The reason behind this is that we have been slowed down in our + release process until now with issues trying to build executables for less + common platforms. In order to make sure we can address issues more quickly + (like supporting new GHC versions), we are limiting support from the Stack + team to: - * Linux 64-bit (static) - * macOS - * Windows 64-bit + * Linux 64-bit/x86_64 (statically-linked) + * macOS x86_64 + * Windows 64-bit/x86_64 - If others want to provide additional binaries, we will definitely be happy + If others want to provide additional executables, we will definitely be happy for the support. But since our CI system is currently able to produce these - three bindists only, that's what we will be providing with the next release. - -* Since we no longer have dynamically linked Linux binaries, we are removing - removing the `-static` suffix from the static Linux binaries. If you have - scripts to download the latest stable Linux binary, update them to use - `linux-x86_64` instead of `linux-x86_64-static` (if you are already using the - former, nothing needs to change). For this release, both are supported, but - the next release will no longer have the `-static` variant. - -* We are also deprecating the download links at https://stackage.org/stack. - See this page for the current installation instructions: + three executables only, that's what we will be providing with the next + release. + +* Since we no longer have dynamically-linked Linux executables, we are removing + the `-static` suffix from the static Linux/x86_64 executables. If you have + scripts to download the latest stable Linux/x86_64 executable, update them to + use `linux-x86_64` instead of `linux-x86_64-static` (if you are already using + the former, nothing needs to change). For this release, both are supported, + but the next release will no longer have the `-static` variant. + +* We are also deprecating the download links at https://stackage.org/stack. See + this page for the current installation instructions: https://docs.haskellstack.org/en/stable/install_and_upgrade/. -* These are the canonical locations to download the latest stable binaries - from, and will continue to be supported going forward: +* These are the canonical locations to download the latest stable binaries from, + and will continue to be supported going forward: - * Linux 64-bit (static): https://get.haskellstack.org/stable/linux-x86_64.tar.gz - * macOS: https://get.haskellstack.org/stable/osx-x86_64.tar.gz - * Windows 64-bit: https://get.haskellstack.org/stable/windows-x86_64.zip + * Linux 64-bit/x86_64 (static): + https://get.haskellstack.org/stable/linux-x86_64.tar.gz + * macOS x86_64: https://get.haskellstack.org/stable/osx-x86_64.tar.gz + * Windows 64-bit/x86_64: + https://get.haskellstack.org/stable/windows-x86_64.zip - As always, binaries for specific versions are available from the Github + As always, binaries for specific versions are available from the GitHub releases: https://github.com/commercialhaskell/stack/releases. **Changes since v2.1.3.1** Major changes: -* `setup-info-locations` yaml configuration now allows overwriting the default locations of `stack-setup-2.yaml`. +* `setup-info-locations` yaml configuration now allows overwriting the default + locations of `stack-setup-2.yaml`. [#5031](https://github.com/commercialhaskell/stack/pull/5031) [#2983](https://github.com/commercialhaskell/stack/issues/2983) [#2913](https://github.com/commercialhaskell/stack/issues/2913) -* The `setup-info` configuration key now allows overwriting parts of the default `setup-info` +* The `setup-info` configuration key now allows overwriting parts of the default + `setup-info` -* The `--setup-info-yaml` command line flag now may be used in all stack commands such as `stack build`, and not only in `stack setup` +* The `--setup-info-yaml` command line flag now may be used in all Stack + commands such as `stack build`, and not only in `stack setup` -* The `--setup-info-yaml` may specify multiple locations for `stack-setup.yaml` files. +* The `--setup-info-yaml` may specify multiple locations for `stack-setup.yaml` + files. -* The `stack upload` can read first reads environment Variable `$HACKAGE_USERNAME` and `$HACKAGE_PASSWORD` if they are missing only then asks for `username` or `password` +* The `stack upload` can read first reads environment Variable + `$HACKAGE_USERNAME` and `$HACKAGE_PASSWORD` if they are missing only then asks + for `username` or `password` * Fully remove GHCJS support. @@ -162,21 +1040,23 @@ Major changes: Behavior changes: -* Remove the deprecated `--stack-setup-yaml` command line argument in favor of `--setup-info-yaml` +* Remove the deprecated `--stack-setup-yaml` command line argument in favor of + `--setup-info-yaml`. See [#2647](https://github.com/commercialhaskell/stack/issues/2647) -* We now recommend checking in generated cabal files for repos. When generating lock files for - extra-deps that only include `package.yaml` files, a deprecation warning will be generated. - Also, those packages will no longer be included in the generated lock files. - See [#5210](https://github.com/commercialhaskell/stack/issues/5210). +* We now recommend checking in generated Cabal files for repos. When generating + lock files for extra-deps that only include `package.yaml` files, a + deprecation warning will be generated. Also, those packages will no longer be + included in the generated lock files. See + [#5210](https://github.com/commercialhaskell/stack/issues/5210). Other enhancements: -* Add `build-output-timestamps` flag in yaml. Setting it to true - prefixes each build log output line with a timestamp. +* Add `build-output-timestamps` flag in yaml. Setting it to true prefixes each + build log output line with a timestamp. -* Show warning about `local-programs-path` with spaces on windows - when running scripts. See +* Show warning about `local-programs-path` with spaces on windows when running + scripts. See [#5013](https://github.com/commercialhaskell/stack/pull/5013) * Add `ls dependencies json` which will print dependencies as JSON. @@ -197,12 +1077,14 @@ Bug fixes: Previously, if you SIGTERMed at the wrong time while running a script, you could end up with an inconsistent database state. -* `--resolver global` doesn't retrieve snapshots list from the internet - beause doesn't need it. See [#5103](https://github.com/commercialhaskell/stack/issues/5103) +* `--resolver global` does not retrieve snapshots list from the internet because + does not need it. See + [#5103](https://github.com/commercialhaskell/stack/issues/5103) * Fix using relative links in haddocks output. See [#4971](https://github.com/commercialhaskell/stack/issues/4971). -* Do not include generated cabal file information in lock files. See + +* Do not include generated Cabal file information in lock files. See [#5045](https://github.com/commercialhaskell/stack/issues/5045). * Use proper Hoogle executable path when installed automatically. See @@ -221,7 +1103,7 @@ Bug fixes: used in multiple projects. See [#5147](https://github.com/commercialhaskell/stack/issues/5147) -## v2.1.3.1 +## v2.1.3.1 - 2019-07-16 Hackage-only release: @@ -230,14 +1112,14 @@ Hackage-only release: * Add `stack.yaml` back to hackage sdist, and add `snapshot.yaml` -## v2.1.3 +## v2.1.3 - 2019-07-13 **Changes since v2.1.1** Behavior changes: -* Disable WAL mode for SQLite3 databases, to improve compatibility with - some platforms and filesystems. See +* Disable WAL mode for SQLite3 databases, to improve compatibility with some + platforms and filesystems. See [#4876](https://github.com/commercialhaskell/stack/issues/4876). * By default, do not perform expiry checks in Hackage Security. See @@ -245,13 +1127,12 @@ Behavior changes: Other enhancements: -* Do not rerun expected test failures. This is mostly a change that - will only affect the Stackage Curator use case, but there is now an - additional message letting the user know when a previously-failed - test case is being rerun. +* Do not rerun expected test failures. This is mostly a change that will only + affect the Stackage Curator use case, but there is now an additional message + letting the user know when a previously-failed test case is being rerun. -* Move configure information for local packages back to .stack-work to - improve caching. See +* Move configure information for project packages back to .stack-work to improve + caching. See [#4893](https://github.com/commercialhaskell/stack/issues/4893). Bug fixes: @@ -270,12 +1151,12 @@ Bug fixes: avoiding a SIGTERM screwing up GHC installation. See [#4888](https://github.com/commercialhaskell/stack/issues/4888). -* Use package complete locations from lock files when resolving dependencies - in `extra-deps`. See +* Use package complete locations from lock files when resolving dependencies in + `extra-deps`. See [#4887](https://github.com/commercialhaskell/stack/issues/4887). -* Set the `HASKELL_DIST_DIR` environment to a proper package dist - directory so `doctest` is able to load modules autogenerated by Cabal. +* Set the `HASKELL_DIST_DIR` environment to a proper package dist directory so + `doctest` is able to load modules autogenerated by Cabal. * Expose package library when running tests. @@ -290,21 +1171,21 @@ Other changes: package). -## v2.1.1.1 +## v2.1.1.1 - 2019-06-14 Hackage-only release that removes `stack.yaml` from the sdist. This is because `stack.yaml` now defines a multi-package project, whereas Hackage works on the basis on individual packages (see [#4860](https://github.com/commercialhaskell/stack/issues/4860)) -If building a `stack` executable for distribution, please download the -source code from https://github.com/commercialhaskell/stack/releases/tag/v2.1.1 -and build it using Stack itself in order to ensure identical behaviour -to official binaries. This package on Hackage is provided for convenience -and bootstrapping purposes. +If building a `stack` executable for distribution, please download the source +code from https://github.com/commercialhaskell/stack/releases/tag/v2.1.1 and +build it using Stack itself in order to ensure identical behaviour to official +binaries. This package on Hackage is provided for convenience and bootstrapping +purposes. -## v2.1.1 +## v2.1.1 - 2019-06-13 The Stack 2 release represents a series of significant changes to how Stack works internally. For the vast majority of cases, these changes are backwards @@ -319,15 +1200,14 @@ features, as listed below. Major changes: -* Switch over to pantry for managing packages. This is a major change - to Stack's internals, and affects user-visible behavior in a few - places. Some highlights: +* Switch over to pantry for managing packages. This is a major change to Stack's + internals, and affects user-visible behavior in a few places. Some highlights: * Drop support for multiple package indices and legacy `00-index.tar` style indices. See [#4137](https://github.com/commercialhaskell/stack/issues/4137). - * Support for archives and repos in the `packages` section has - been removed. Instead, you must use `extra-deps` for such - dependencies. `packages` now only supports local filepaths. + * Support for archives and repos in values of the `packages` key has been + removed. Instead, you must use the `extra-deps` key for such dependencies. + `packages` now only supports local filepaths. * Add support for Git repositories containing (recursive) submodules. * Addition of new configuration options for specifying a "pantry tree" key, which provides more reproducibility around builds, @@ -365,9 +1245,8 @@ Major changes: match `foo.txt`, but not `foo.2.txt`. * Remove the `stack image` command. With the advent of Docker multistage builds, this functionality is no longer useful. For an example, please see - [Building Haskell Apps with - Docker](https://www.fpcomplete.com/blog/2017/12/building-haskell-apps-with-docker). -* Support building GHC from source (experimental) + [Building Haskell Apps with Docker](https://www.fpcomplete.com/blog/2017/12/building-haskell-apps-with-docker). +* Experimental: Support building GHC from source * Stack now supports building and installing GHC from source. The built GHC is uniquely identified by a commit id and an Hadrian "flavour" (Hadrian is the newer GHC build system), hence `compiler` can be set to use a GHC @@ -391,7 +1270,7 @@ Behavior changes: * When using `stack script`, custom snapshot files will be resolved relative to the directory containing the script. * Remove the deprecated `--upgrade-cabal` flag to `stack setup`. -* Support the `drop-packages` field in `stack.yaml` +* Support the `drop-packages` key in `stack.yaml` * Remove the GPG signing code during uploads. The GPG signatures have never been used yet, and there are no plans to implement signature verification. @@ -409,10 +1288,11 @@ Behavior changes: * Interleaved output is now turned on by default, see [#4702](https://github.com/commercialhaskell/stack/issues/4702). In addition, the `packagename> ` prefix is no longer included in - interelaved mode when only building a single target. + interleaved mode when only building a single target. * The `-fhide-source-paths` GHC option is now enabled by default and can be disabled via the `hide-source-paths` configuration option in - `stack.yaml`. See [#3784](https://github.com/commercialhaskell/stack/issues/3784) + `stack.yaml`. See + [#3784](https://github.com/commercialhaskell/stack/issues/3784) * Stack will reconfigure a package if you modify your `PATH` environment variable. See [#3138](https://github.com/commercialhaskell/stack/issues/3138). @@ -428,7 +1308,8 @@ Behavior changes: it will piggy-back on the existing Hackage index updates. You can set `recommend-stack-upgrade: false` to bypass this. See [#1681](https://github.com/commercialhaskell/stack/issues/1681). -* `stack list-dependencies` has been removed in favour of `stack ls dependencies`. +* `stack list-dependencies` has been removed in favour of + `stack ls dependencies`. * The new default for `--docker-auto-pull` is enabled. See [#3332](https://github.com/commercialhaskell/stack/issues/3332). @@ -436,7 +1317,7 @@ Other enhancements: * Support MX Linux in get-stack.sh. Fixes [#4769](https://github.com/commercialhaskell/stack/issues/4769). -* Defer loading up of files for local packages. This allows us to get +* Defer loading up of files for project packages. This allows us to get plan construction errors much faster, and avoid some unnecessary work when only building a subset of packages. This is especially useful for the curator use case. @@ -445,11 +1326,11 @@ Other enhancements: * Adopt the standard proposed at http://no-color.org/, that color should not be added by default if the `NO_COLOR` environment variable is present. * New command `stack ls stack-colors` lists the styles and the associated 'ANSI' - control character sequences that stack uses to color some of its output. See + control character sequences that Stack uses to color some of its output. See `stack ls stack-colors --help` for more information. * New global option `--stack-colors=STYLES`, also available as a - non-project-specific yaml configuration parameter, allows a stack user to - redefine the default styles that stack uses to color some of its output. See + non-project-specific yaml configuration parameter, allows a Stack user to + redefine the default styles that Stack uses to color some of its output. See `stack --help` for more information. * British English spelling of 'color' (colour) accepted as an alias for `--color`, `--stack-colors`, `stack ls stack-colors` at the command line and @@ -463,7 +1344,7 @@ Other enhancements: now include package names so they can be more easily copy-pasted. * Git repos are shared across multiple projects. See [#3551](https://github.com/commercialhaskell/stack/issues/3551) -* Use en_US.UTF-8 locale by default in pure Nix mode so programs won't +* Use en_US.UTF-8 locale by default in pure Nix mode so programs will not crash because of Unicode in their output [#4095](https://github.com/commercialhaskell/stack/issues/4095) * Add `--tree` to `ls dependencies` to list dependencies as tree. @@ -485,7 +1366,7 @@ Other enhancements: with packages from Hackage, not Git repos or archives. * When using the script interpreter with `--optimize` or `--compile`, Stack will perform an optimization of checking whether a newer - executable exists, making reruns significantly faster. There's a + executable exists, making reruns significantly faster. There is a downside to this, however: if you have a multifile script, and change one of the dependency modules, Stack will not automatically detect and recompile. @@ -501,13 +1382,11 @@ Other enhancements: variables. See [#620](https://github.com/commercialhaskell/stack/issues/620). * Less verbose output from `stack setup` on Windows. See [#1212](https://github.com/commercialhaskell/stack/issues/1212). -* Add an optional `ignore-expiry` flag to the `hackage-security` - section of the `~/.stack/config.yaml`. It allows to disable timestamp - expiration verification just like `cabal --ignore-expiry` does. - The flag is not enabled by default so that the default functionality - is not changed. -* Include default values for most command line flags in the `--help` - output. See +* Add an optional `ignore-expiry` key to the `hackage-security` key of + `~/.stack/config.yaml`. It allows disabling of timestamp expiration + verification just like `cabal --ignore-expiry` does. The flag is not enabled + by default so that the default functionality is not changed. +* Include default values for most command line flags in the `--help` output. See [#893](https://github.com/commercialhaskell/stack/issues/893). * Set the `GHC_ENVIRONMENT` environment variable to specify dependency packages explicitly when running test. This is done to prevent @@ -529,12 +1408,11 @@ Other enhancements: * User config files are respected for the script command. See [#3705](https://github.com/commercialhaskell/stack/issues/3705), [#3887](https://github.com/commercialhaskell/stack/issues/3887). -* Set the `GHC_ENVIRONMENT` environment variable to `-` to tell GHC to - ignore any such files when GHC is new enough (>= 8.4.4), otherwise - simply unset the variable. This allows Stack to have control of - package databases when running commands like `stack exec ghci`, even - in the presence of implicit environment files created by `cabal - new-build`. See +* Set the `GHC_ENVIRONMENT` environment variable to `-` to tell GHC to ignore + any such files when GHC is new enough (>= 8.4.4), otherwise simply unset the + variable. This allows Stack to have control of package databases when running + commands like `stack exec ghci`, even in the presence of implicit environment + files created by `cabal new-build`. See [#4706](https://github.com/commercialhaskell/stack/issues/4706). * Use a database cache table to speed up discovery of installed GHCs * You can specify multiple `--test-arguments` options. See @@ -566,14 +1444,14 @@ Bug fixes: * Fix for git packages to update submodules to the correct state. See [#4314](https://github.com/commercialhaskell/stack/pull/4314) * Add `--cabal-files` flag to `stack ide targets` command. -* Don't download ghc when using `stack clean`. +* Do not download ghc when using `stack clean`. * Support loading in GHCi definitions from symlinked C files. Without this patch, Stack will try to find object files in the directory pointed to by symlinks, while GCC will produce the object files in the original directory. See [#4402](https://github.com/commercialhaskell/stack/pull/4402) * Fix handling of GitHub and URL templates on Windows. See - [commercialhaskell/stack#4394](https://github.com/commercialhaskell/stack/issues/4394) + [#4394](https://github.com/commercialhaskell/stack/issues/4394) * Fix `--file-watch` not responding to file modifications when running inside docker on Mac. See [#4506](https://github.com/commercialhaskell/stack/issues/4506) @@ -586,42 +1464,43 @@ Bug fixes: [#4526](https://github.com/commercialhaskell/stack/issues/4526). * Stack handles ABI changes in FreeBSD 12 by differentiating that version from previous. -* Help text for the `templates` subcommand now reflects behaviour in stack 1.9 +* Help text for the `templates` subcommand now reflects behaviour in Stack 1.9 — that it downloads and shows a help file, rather than listing available templates. -* Fix detection of aarch64 platform (this broke when we upgraded to a newer +* Fix detection of AArch64 platform (this broke when we upgraded to a newer Cabal version). -* Docker: fix detecting and pulling missing images with `--docker-auto-pull`, see +* Docker: fix detecting and pulling missing images with `--docker-auto-pull`. + See [#4598](https://github.com/commercialhaskell/stack/issues/4598) * Hackage credentials are not world-readable. See [#2159](https://github.com/commercialhaskell/stack/issues/2159). * Warnings are dumped from logs even when color is enabled. See [#2997](https://github.com/commercialhaskell/stack/issues/2997) -* `stack init` will now work for cabal files with sublibraries. See +* `stack init` will now work for Cabal files with sublibraries. See [#4408](https://github.com/commercialhaskell/stack/issues/4408) * When the Cabal spec version is newer than the global Cabal version, build against the snapshot's Cabal library. See [#4488](https://github.com/commercialhaskell/stack/issues/4488) * Docker: fix detection of expected subprocess failures. This fixes downloading a compatible `stack` executable when the host `stack` is not - compatible with the Docker image (on Linux), and doesn't show an unnecessary + compatible with the Docker image (on Linux), and does not show an unnecessary extra error when the in-container re-exec'ed `stack` exits with failure. * The `stack ghci` command's `--ghc-options` flag now parses multiple options. See [#3315](https://github.com/commercialhaskell/stack/issues/3315). -## v1.9.3.1 +## v1.9.3.1 - 2019-04-18 Hackage-only release with no user facing changes (added compatibility with `rio-0.1.9.2`). -## v1.9.3 +## v1.9.3 - 2018-12-02 Bug fixes: * Stack can now be compiled again inside a directory that does not - contain a `.git` directory, see + contain a `.git` directory. See [#4364](https://github.com/commercialhaskell/stack/issues/4364#issuecomment-431600841) * Handle a change in GHC's hi-dump format around `addDependentFile`, which now includes a hash. See @@ -629,21 +1508,25 @@ Bug fixes: * Allow variables to appear in template file names. -## v1.9.1.1 +## v1.9.1.1 - 2018-11-14 Hackage-only release with no user facing changes. * Stack can now be compiled again inside a directory that does not - contain a `.git` directory, see + contain a `.git` directory. See [#4364](https://github.com/commercialhaskell/stack/issues/4364#issuecomment-431600841) -## v1.9.1 +## v1.9.1 - 2018-10-17 Release notes: -* Statically linked Linux bindists are back again, thanks to [@nh2](https://github.com/nh2). -* We will be deleting the Ubuntu, Debian, CentOS, Fedora, and Arch package repos from `download.fpcomplete.com` soon. These have been deprecated for over a year and have not received new releases, but were left in place for compatibility with older scripts. +* Statically-linked Linux executables are back again, thanks to + [@nh2](https://github.com/nh2). +* We will be deleting the Ubuntu, Debian, CentOS, Fedora, and Arch package repos + from `download.fpcomplete.com` soon. These have been deprecated for over a + year and have not received new releases, but were left in place for + compatibility with older scripts. Major changes: @@ -652,7 +1535,8 @@ Major changes: been modified to match that of Cabal. In particular, this means that for Cabal spec versions less than 2.4, `*.txt` will match `foo.txt`, but not `foo.2.txt`. -* `GHCJS` support is being downgraded to 'experimental'. A warning notifying the user of the experimental status of `GHCJS` will be displayed. +* `GHCJS` support is being downgraded to 'experimental'. A warning notifying the + user of the experimental status of `GHCJS` will be displayed. Behavior changes: @@ -675,23 +1559,32 @@ Behavior changes: [help file](https://github.com/commercialhaskell/stack-templates/blob/master/STACK_HELP.md) with more information on how to discover templates. See: [#4039](https://github.com/commercialhaskell/stack/issues/4039) -* Build tools are now handled in a similar way to `cabal-install`. In - particular, for legacy `build-tools` fields, we use a hard-coded - list of build tools in place of looking up build tool packages in a +* Tools used during building ('build tools') are now handled in a similar way to + `cabal-install`. In particular, for legacy `build-tools` fields, we use a + hard-coded list of build tools in place of looking up build tool packages in a tool map. This both brings Stack's behavior closer into line with `cabal-install`, avoids some bugs, and opens up some possible optimizations/laziness. See: [#4125](https://github.com/commercialhaskell/stack/issues/4125). * Mustache templating is not applied to large files (over 50kb) to - avoid performance degredation. See: + avoid performance degradation. See: [#4133](https://github.com/commercialhaskell/stack/issues/4133). * `stack upload` signs the package by default, as documented. `--no-signature` turns the signing off. [#3739](https://github.com/commercialhaskell/stack/issues/3739) * In case there is a network connectivity issue while trying to - download a template, stack will check whether that template had + download a template, Stack will check whether that template had been downloaded before. In that case, the cached version will be used. See [#3850](https://github.com/commercialhaskell/stack/issues/3850). +* In Stack's script interpreter, `-- stack --verbosity=info script` and + `-- stack script --verbosity=info` now have the same effect and both override + the `--verbosity=error` default in the interpreter. Previously the default + meant the former was equivalent to + `-- stack --verbosity=info script --verbosity=error` and the latter was + equivalent to `-- stack --verbosity=error script --verbosity=info`, with the + subcommand's global option having precedence over the Stack command's + global option in each case. See + [#5326](https://github.com/commercialhaskell/stack/issues/5326). Other enhancements: @@ -702,12 +1595,12 @@ Other enhancements: `extra-deps` of `stack.yaml` * `stack build` suggests trying another GHC version should the build plan end up requiring unattainable `base` version. -* A new sub command `run` has been introduced to build and run a specified executable - similar to `cabal run`. If no executable is provided as the first argument, it - defaults to the first available executable in the project. +* A new sub command `run` has been introduced to build and run a specified + executable similar to `cabal run`. If no executable is provided as the first + argument, it defaults to the first available executable in the project. * `stack build` missing dependency suggestions (on failure to construct a valid - build plan because of missing deps) are now printed with their latest - cabal file revision hash. See + build plan because of missing deps) are now printed with their latest Cabal + file revision hash. See [#4068](https://github.com/commercialhaskell/stack/pull/4068). * Added new `--tar-dir` option to `stack sdist`, that allows to copy the resulting tarball to the specified directory. @@ -719,14 +1612,16 @@ Other enhancements: This should help with [#3510](https://github.com/commercialhaskell/stack/issues/3510). * `stack new` now allows template names of the form `username/foo` to - download from a user other than `commercialstack` on Github, and can be prefixed - with the service `github:`, `gitlab:`, or `bitbucket:`. [#4039](https://github.com/commercialhaskell/stack/issues/4039) + download from a user other than `commercialstack` on GitHub, and can be + prefixed with the service `github:`, `gitlab:`, or `bitbucket:`. See + [#4039](https://github.com/commercialhaskell/stack/issues/4039) * Switch to `githash` to include some unmerged bugfixes in `gitrev` Suggestion to add `'allow-newer': true` now shows path to user config - file where this flag should be put into [#3685](https://github.com/commercialhaskell/stack/issues/3685) + file where this flag should be put into + [#3685](https://github.com/commercialhaskell/stack/issues/3685) * `stack ghci` now asks which main target to load before doing the build, rather than after -* Bump to hpack 0.29.0 +* Bump to Hpack 0.29.0 * With GHC 8.4 and later, Haddock is given the `--quickjump` flag. * It is possible to specify the Hackage base URL to upload packages to, instead of the default of `https://hackage.haskell.org/`, by using `hackage-base-url` @@ -770,7 +1665,7 @@ Bug fixes: main library. See [#3787](https://github.com/commercialhaskell/stack/issues/3787). * Sublibraries are now properly considered for coverage reports when the test - suite depends on the internal library. Before, stack was erroring when + suite depends on the internal library. Before, Stack was erroring when trying to generate the coverage report, see [#4105](https://github.com/commercialhaskell/stack/issues/4105). * Sublibraries are now added to the precompiled cache and recovered from there @@ -778,9 +1673,10 @@ Bug fixes: was a package with a sublibrary in the snapshot resulted in broken builds. This is now fixed, see [#4071](https://github.com/commercialhaskell/stack/issues/4071). -* [#4114](https://github.com/commercialhaskell/stack/issues/4114) Stack pretty prints error messages with proper `error` logging - level instead of `warning` now. This also fixes self-executing scripts - not piping plan construction errors from runhaskell to terminal (issue +* [#4114](https://github.com/commercialhaskell/stack/issues/4114) Stack pretty + prints error messages with proper `error` logging level instead of `warning` + now. This also fixes self-executing scripts not piping plan construction + errors from runhaskell to terminal (issue [#3942](https://github.com/commercialhaskell/stack/issues/3942)). * Fix invalid "While building Setup.hs" when Cabal calls fail. See: [#3934](https://github.com/commercialhaskell/stack/issues/3934) @@ -789,13 +1685,15 @@ Bug fixes: [#3739](https://github.com/commercialhaskell/stack/issues/3739) -## v1.7.1 +## v1.7.1 - 2018-04-27 Release notes: -* aarch64 (64-bit ARM) bindists are now available for the first time. -* Statically linked Linux bindists are no longer available, due to difficulty with GHC 8.2.2 on Alpine Linux. -* 32-bit Linux GMP4 bindists for CentOS 6 are no longer available, since GHC 8.2.2 is no longer being built for that platform. +* AArch64 (64-bit ARM) executables are now available for the first time. +* Statically-linked Linux executables are no longer available, due to difficulty + with GHC 8.2.2 on Alpine Linux. +* 32-bit Linux GMP4 executables for CentOS 6 are no longer available, since + GHC 8.2.2 is no longer being built for that platform. Major changes: @@ -808,11 +1706,11 @@ Behavior changes: this itself since ghc-8.0.2, and Stack's attempted workaround for older versions caused more problems than it solved. * `stack new` no longer initializes a project if the project template contains - a stack.yaml file. + a `stack.yaml` file. Other enhancements: -* A new sub command `ls` has been introduced to stack to view +* A new sub command `ls` has been introduced to Stack to view local and remote snapshots present in the system. Use `stack ls snapshots --help` to get more details about it. * `list-dependencies` has been deprecated. The functionality has @@ -821,22 +1719,23 @@ Other enhancements: for details. * Specify User-Agent HTTP request header on every HTTP request. See [#3628](https://github.com/commercialhaskell/stack/issues/3628) for details. -* `stack setup` looks for GHC bindists and installations by any OS key - that is compatible (rather than only checking a single one). This is +* `stack setup` looks for GHC binary distributions and installations by any OS + key that is compatible (rather than only checking a single one). This is relevant on Linux where different distributions may have different combinations of libtinfo 5/6, ncurses 5/6, and gmp 4/5, and will allow - simpifying the setup-info metadata YAML for future GHC releases. + simplifying the setup-info metadata YAML for future GHC releases. * The build progress bar reports names of packages currently building. * `stack setup --verbose` causes verbose output of GHC configure process. See [#3716](https://github.com/commercialhaskell/stack/issues/3716) -* Improve the error message when an `extra-dep` from a path or git reference can't be found - See [#3808](https://github.com/commercialhaskell/stack/pull/3808) +* Improve the error message when an `extra-dep` from a path or git reference + cannot be found. See + [#3808](https://github.com/commercialhaskell/stack/pull/3808) * Nix integration is now disabled on windows even if explicitly enabled, - since it isn't supported. See + since it is not supported. See [#3600](https://github.com/commercialhaskell/stack/issues/3600) -* `stack build` now supports a new flag `--keep-tmp-files` to retain intermediate - files and directories for the purpose of debugging. - It is best used with ghc's equivalent flag, +* `stack build` now supports a new flag `--keep-tmp-files` to retain + intermediate files and directories for the purpose of debugging. It is best + used with ghc's equivalent flag, i.e. `stack build --keep-tmp-files --ghc-options=-keep-tmp-files`. See [#3857](https://github.com/commercialhaskell/stack/issues/3857) * Improved error messages for snapshot parse exceptions @@ -849,10 +1748,11 @@ Other enhancements: Bug fixes: * The script interpreter's implicit file arguments are now passed before other - arguments. See [#3658](https://github.com/commercialhaskell/stack/issues/3658). + arguments. See + [#3658](https://github.com/commercialhaskell/stack/issues/3658). In particular, this makes it possible to pass `-- +RTS ... -RTS` to specify RTS arguments used when running the script. -* Don't ignore the template `year` parameter in config files, and clarify the +* Do not ignore the template `year` parameter in config files, and clarify the surrounding documentation. See [#2275](https://github.com/commercialhaskell/stack/issues/2275). * Benchmarks used to be run concurrently with other benchmarks @@ -869,23 +1769,24 @@ Bug fixes: [#3589](https://github.com/commercialhaskell/stack/issues/3589#issuecomment) * `stack ghci` now uses correct paths for autogen files with [#3791](https://github.com/commercialhaskell/stack/issues/3791) -* When a package contained sublibraries, stack was always recompiling the +* When a package contained sublibraries, Stack was always recompiling the package. This has been fixed now, no recompilation is being done because of - sublibraries. See [#3899](https://github.com/commercialhaskell/stack/issues/3899). + sublibraries. See + [#3899](https://github.com/commercialhaskell/stack/issues/3899). * The `get-stack.sh` install script now matches manual instructions when it comes to Debian/Fedora/CentOS install dependencies. * Compile Cabal-simple with gmp when using Nix. See [#2944](https://github.com/commercialhaskell/stack/issues/2944) -* `stack ghci` now replaces the stack process with ghci. This improves +* `stack ghci` now replaces the Stack process with ghci. This improves signal handling behavior. In particular, handling of Ctrl-C. To make this possible, the generated files are now left behind after exit. - The paths are based on hashing file contents, and it's stored in the - system temporary directory, so this shouldn't result in too much + The paths are based on hashing file contents, and it is stored in the + system temporary directory, so this should not result in too much garbage. See [#3821](https://github.com/commercialhaskell/stack/issues/3821). -## v1.6.5 +## v1.6.5 - 2018-02-19 Bug fixes: @@ -912,18 +1813,17 @@ Bug fixes: * Includes a patched version of `hackage-security` which fixes both some issues around asynchronous exception handling, and moves from directory locking to file locking, making the update mechanism - resilient against SIGKILL and machine failure. See - [hackage-security #187](https://github.com/haskell/hackage-security/issues/187) - and [#3073](https://github.com/commercialhaskell/stack/issues/3073). - + resilient against SIGKILL and machine failure. See `hackage-security` issue + [#187](https://github.com/haskell/hackage-security/issues/187) + and Stack issue + [#3073](https://github.com/commercialhaskell/stack/issues/3073). -## v1.6.3.1 +## v1.6.3.1 - 2018-02-16 Hackage-only release with no user facing changes (updated to build with -newer version of hpack dependency). - +newer version of Hpack dependency). -## v1.6.3 +## v1.6.3 - 2017-12-23 Enhancements: @@ -932,32 +1832,34 @@ Enhancements: cases where HTTP servers mistakenly set the transfer encoding to `gzip`. See [#3647](https://github.com/commercialhaskell/stack/issues/3647). * Links to docs.haskellstack.org ignore Stack version patchlevel. -* Downloading Docker-compatible `stack` binary ignores Stack version patchlevel. +* Downloading Docker-compatible `stack` executable ignores Stack version + patchlevel. Bug fixes: * For versions of Cabal before 1.24, ensure that the dependencies of non-buildable components are part of the build plan to work around an old - Cabal bug. See [#3631](https://github.com/commercialhaskell/stack/issues/3631). + Cabal bug. See + [#3631](https://github.com/commercialhaskell/stack/issues/3631). * Run the Cabal file checking in the `sdist` command more reliably by allowing the Cabal library to flatten the `GenericPackageDescription` itself. - -## v1.6.1.1 +## v1.6.1.1 - 2017-12-20 Hackage-only release with no user facing changes (updated to build with newer dependency versions). - -## v1.6.1 +## v1.6.1 - 2017-12-07 Major changes: * Complete overhaul of how snapshots are defined, the `packages` and - `extra-deps` fields, and a number of related items. For full - details, please see - [the writeup on these changes](https://www.fpcomplete.com/blog/2017/07/stacks-new-extensible-snapshots). [PR #3249](https://github.com/commercialhaskell/stack/pull/3249), + `extra-deps` keys, and a number of related items. For full + details, please see the + [writeup](https://www.fpcomplete.com/blog/2017/07/stacks-new-extensible-snapshots) + on these changes. + [PR #3249](https://github.com/commercialhaskell/stack/pull/3249), see the PR description for a number of related issues. * Upgraded to version 2.0 of the Cabal library. @@ -994,7 +1896,7 @@ Behavior changes: prompt can be avoided by using the `save-hackage-creds` setting. Please see [#2159](https://github.com/commercialhaskell/stack/issues/2159). * The `GHCRTS` environment variable will no longer be passed through to - every program stack runs. Instead, it will only be passed through + every program Stack runs. Instead, it will only be passed through commands like `exec`, `runghc`, `script`, `ghci`, etc. See [#3444](https://github.com/commercialhaskell/stack/issues/3444). * `ghc-options:` for specific packages will now come after the options @@ -1009,10 +1911,11 @@ Other enhancements: * The `with-hpack` configuration option specifies an Hpack executable to use instead of the Hpack bundled with Stack. Please see [#3179](https://github.com/commercialhaskell/stack/issues/3179). -* It's now possible to skip tests and benchmarks using `--skip` +* It is now possible to skip tests and benchmarks using `--skip` flag -* `GitSHA1` is now `StaticSHA256` and is implemented using the `StaticSize 64 ByteString` for improved performance. - See [#3006](https://github.com/commercialhaskell/stack/issues/3006) +* `GitSHA1` is now `StaticSHA256` and is implemented using the + `StaticSize 64 ByteString` for improved performance. See + [#3006](https://github.com/commercialhaskell/stack/issues/3006) * Dependencies via HTTP(S) archives have been generalized to allow local file path archives, as well as to support setting a cryptographic hash (SHA256) of the contents for better @@ -1038,7 +1941,7 @@ Other enhancements: Sometimes GHC's heuristics would work fine even before this change, for example in `stack ghci`, but this override's GHC's heuristics when they're broken by our collecting and processing GHC's output. -* Extended the `ghc-options` field to support `$locals`, `$targets`, +* Extended the `ghc-options` key to support `$locals`, `$targets`, and `$everything`. See: [#3329](https://github.com/commercialhaskell/stack/issues/3329) * Better error message for case that `stack ghci` file targets are @@ -1050,7 +1953,7 @@ Other enhancements: * Better descriptions are now available for `stack upgrade --help`. See: [#3070](https://github.com/commercialhaskell/stack/issues/3070) * When using Nix, nix-shell now depends always on gcc to prevent build errors - when using the FFI. As ghc depends on gcc anyway, this doesn't increase the + when using the FFI. As ghc depends on gcc anyway, this does not increase the dependency footprint. * `--cwd DIR` can now be passed to `stack exec` in order to execute the program in a different directory. See: @@ -1062,13 +1965,13 @@ Other enhancements: * Addition of `--ghc-options` to `stack script` to pass options directly to GHC. See: [#3454](https://github.com/commercialhaskell/stack/issues/3454) -* Add hpack `package.yaml` to build Stack itself +* Add Hpack `package.yaml` to build Stack itself * Add `ignore-revision-mismatch` setting. See: [#3520](https://github.com/commercialhaskell/stack/issues/3520). * Log when each individual test suite finishes. See: [#3552](https://github.com/commercialhaskell/stack/issues/3552). * Avoid spurious rebuilds when using `--file-watch` by not watching files for - executable, test and benchmark components that aren't a target. See: + executable, test and benchmark components that are not a target. See: [#3483](https://github.com/commercialhaskell/stack/issues/3483). * Stack will now try to detect the width of the running terminal (only on POSIX for the moment) and use that to better display @@ -1079,15 +1982,15 @@ Other enhancements: * Passing non local packages as targets to `stack ghci` will now cause them to be used as `-package` args along with package hiding. -* Detect when user changed .cabal file instead of package.yaml. This - was implemented upstream in hpack. See +* Detect when user changed Cabal file instead of `package.yaml`. This + was implemented upstream in Hpack. See [#3383](https://github.com/commercialhaskell/stack/issues/3383). * Automatically run `autoreconf -i` as necessary when a `configure` script is missing. See [#3534](https://github.com/commercialhaskell/stack/issues/3534) -* GHC bindists can now be identified by their SHA256 checksum in addition to - their SHA1 checksum, allowing for more security in download. -* For filesystem setup-info paths, it's no longer assumed that the +* GHC binary distributions can now be identified by their SHA256 checksum in + addition to their SHA1 checksum, allowing for more security in download. +* For filesystem setup-info paths, it is no longer assumed that the directory is writable, instead a temp dir is used. See [#3188](https://github.com/commercialhaskell/stack/issues/3188). @@ -1100,10 +2003,10 @@ Bug fixes: * `stack haddock` now includes package names for all modules in the Haddock index page. See: [#2886](https://github.com/commercialhaskell/stack/issues/2886) -* Fixed an issue where Stack wouldn't detect missing Docker images +* Fixed an issue where Stack would not detect missing Docker images properly with newer Docker versions. [#3171](https://github.com/commercialhaskell/stack/pull/3171) -* Previously, cabal files with just test-suite could cause build to fail +* Previously, Cabal files with just test-suite could cause build to fail ([#2862](https://github.com/commercialhaskell/stack/issues/2862)) * If an invalid snapshot file has been detected (usually due to mismatched hashes), Stack will delete the downloaded file and @@ -1113,7 +2016,7 @@ Bug fixes: Cabal's flag parser, which allows multiple sequential dashes. See [#3345](https://github.com/commercialhaskell/stack/issues/3345) * Now clears the hackage index cache if it is older than the - downloaded index. Fixes potential issue if stack was interrupted when + downloaded index. Fixes potential issue if Stack was interrupted when updating index. See [#3033](https://github.com/commercialhaskell/stack/issues/3033) * The Stack install script now respects the `-d` option. @@ -1121,7 +2024,7 @@ Bug fixes: * `stack script` can now handle relative paths to source files. See [#3372](https://github.com/commercialhaskell/stack/issues/3372). * Fixes explanation of why a target is needed by the build plan, when the - target is an extra dependency from the commandline. + target is an extra-dep from the commandline. See [#3378](https://github.com/commercialhaskell/stack/issues/3378). * Previously, if you delete a yaml file from ~/.stack/build-plan, it would trust the etag and not re-download. Fixed in this version. @@ -1145,12 +2048,12 @@ Bug fixes: * When `--pvp-bounds` is enabled for sdist or upload, internal dependencies could cause errors when uploaded to hackage. This is fixed, see [#3290](https://github.com/commercialhaskell/stack/issues/3290) -* Fixes a bug where nonexistent hackage versions would cause stack to +* Fixes a bug where nonexistent hackage versions would cause Stack to suggest the same package name, without giving version info. See [#3562](https://github.com/commercialhaskell/stack/issues/3562) * Fixes a bug that has existed since 1.5.0, where `stack setup --upgrade-cabal` would say that Cabal is already the latest - version, when it wasn't. + version, when it was not. * Ensure that an `extra-dep` from a local directory is not treated as a `$locals` for GHC options purposes. See [#3574](https://github.com/commercialhaskell/stack/issues/3574). @@ -1159,20 +2062,18 @@ Bug fixes: [#3229](https://github.com/commercialhaskell/stack/issues/3229) for more info. - -## 1.5.1 +## 1.5.1 - 2017-08-05 Bug fixes: -* Stack eagerly tries to parse all cabal files related to a +* Stack eagerly tries to parse all Cabal files related to a snapshot. Starting with Stackage Nightly 2017-07-31, snapshots are using GHC 8.2.1, and the `ghc.cabal` file implicitly referenced uses the (not yet supported) Cabal 2.0 file format. Future releases of - Stack will both be less eager about cabal file parsing and support + Stack will both be less eager about Cabal file parsing and support Cabal 2.0. This patch simply bypasses the error for invalid parsing. - -## 1.5.0 +## 1.5.0 - 2017-07-25 Behavior changes: @@ -1190,7 +2091,7 @@ Other enhancements: * Updates to store-0.4.1, which has improved performance and better error reporting for version tags. A side-effect of this is that all of stack's binary caches will be invalidated. -* `stack solver` will now warn about unexpected cabal-install versions. +* `stack solver` will now warn about unexpected `cabal-install` versions. See [#3044](https://github.com/commercialhaskell/stack/issues/3044) * Upstream packages unpacked to a temp dir are now deleted as soon as possible to avoid running out of space in `/tmp`. @@ -1208,30 +2109,29 @@ Other enhancements: of sources being included in Haddock output. See [#3099](https://github.com/commercialhaskell/stack/issues/3099) * `stack ghci` will now skip building all local targets, even if they have - downstream deps, as long as it's registered in the DB. -* The pvp-bounds feature now supports adding `-revision` to the end of - each value, e.g. `pvp-bounds: both-revision`. This means that, when - uploading to Hackage, Stack will first upload your tarball with an - unmodified `.cabal` file, and then upload a cabal file revision with - the PVP bounds added. This can be useful—especially combined - with the - [Stackage no-revisions feature](http://www.snoyman.com/blog/2017/04/stackages-no-revisions-field)—as - a method to ensure PVP compliance without having to proactively fix - bounds issues for Stackage maintenance. + downstream deps, as long as it is registered in the DB. +* The `pvp-bounds` feature now supports adding `-revision` to the end of each + value, e.g. `pvp-bounds: both-revision`. This means that, when uploading to + Hackage, Stack will first upload your tarball with an unmodified Cabal file, + and then upload a Cabal file revision with the PVP bounds added. This can be + useful - especially combined with the + [Stackage no-revisions feature](http://www.snoyman.com/blog/2017/04/stackages-no-revisions-field) - as + a method to ensure PVP compliance without having to proactively fix bounds + issues for Stackage maintenance. * Expose a `save-hackage-creds` configuration option * On GHC <= 7.8, filters out spurious linker warnings on windows See [#3127](https://github.com/commercialhaskell/stack/pull/3127) * Better error messages when creating or building packages which alias wired-in packages. See [#3172](https://github.com/commercialhaskell/stack/issues/3172). -* MinGW bin folder now is searched for dynamic libraries. See [#3126](https://github.com/commercialhaskell/stack/issues/3126) +* MinGW bin folder now is searched for dynamic libraries. See + [#3126](https://github.com/commercialhaskell/stack/issues/3126) * When using Nix, nix-shell now depends always on git to prevent runtime errors while fetching metadata -* The `stack unpack` command now accepts a form where an explicit - Hackage revision hash is specified, e.g. `stack unpack - foo-1.2.3@gitsha1:deadbeef`. Note that this should be considered - _experimental_, Stack will likely move towards a different hash - format in the future. +* Experimental: The `stack unpack` command now accepts a form where an explicit + Hackage revision hash is specified, e.g. + `stack unpack foo-1.2.3@gitsha1:deadbeef`. Note that Stack will likely move + towards a different hash format in the future. * Binary "stack upgrade" will now warn if the installed executable is not on the PATH or shadowed by another entry. * Allow running tests on tarball created by sdist and upload @@ -1242,7 +2142,7 @@ Bug fixes: * Fixes case where `stack build --profile` might not cause executables / tests / benchmarks to be rebuilt. See [#2984](https://github.com/commercialhaskell/stack/issues/2984) -* `stack ghci file.hs` now loads the file even if it isn't part of +* `stack ghci file.hs` now loads the file even if it is not part of your project. * `stack clean --full` now works when docker is enabled. See [#2010](https://github.com/commercialhaskell/stack/issues/2010) @@ -1258,7 +2158,7 @@ Bug fixes: groups on Ubuntu Yakkety (16.10). See [#3092](https://github.com/commercialhaskell/stack/issues/3092) * Switching a package between extra-dep and local package now forces - rebuild (previously it wouldn't if versions were the same). + rebuild (previously it would not if versions were the same). See [#2147](https://github.com/commercialhaskell/stack/issues/2147) * `stack upload` no longer reveals your password when you type it on MinTTY-based Windows shells, such as Cygwin and MSYS2. @@ -1267,7 +2167,7 @@ Bug fixes: have Windows-style line endings (CRLF) -## 1.4.0 +## 1.4.0 - 2017-03-15 Release notes: @@ -1295,9 +2195,9 @@ Behavior changes: * The default package metadata backend has been changed from Git to the 01-index.tar.gz file, from the hackage-security project. This is - intended to address some download speed issues from Github for + intended to address some download speed issues from GitHub for people in certain geographic regions. There is now full support for - checking out specific cabal file revisions from downloaded tarballs + checking out specific Cabal file revisions from downloaded tarballs as well. If you manually specify a package index with only a Git URL, Git will still be used. See [#2780](https://github.com/commercialhaskell/stack/issues/2780) @@ -1306,7 +2206,7 @@ Behavior changes: the given snapshot instead of taking the latest version. For example, `stack --resolver lts-7.14 unpack mtl` will get version 2.2.1 of `mtl`, regardless of the latest version available in the - package indices. This will also force the same cabal file revision + package indices. This will also force the same Cabal file revision to be used as is specified in the snapshot. Unpacking via a package identifier (e.g. `stack --resolver lts-7.14 @@ -1316,7 +2216,7 @@ Behavior changes: For backwards compatibility with tools relying on the presence of a `00-index.tar`, Stack will copy the `01-index.tar` file to `00-index.tar`. Note, however, that these files are different; most - importantly, 00-index contains only the newest revisions of cabal + importantly, 00-index contains only the newest revisions of Cabal files, while 01-index contains all versions. You may still need to update your tooling. * Passing `--(no-)nix-*` options now no longer implies `--nix`, except for @@ -1326,16 +2226,17 @@ Behavior changes: Other enhancements: * Internal cleanup: configuration types are now based much more on lenses -* `stack build` and related commands now allow the user to disable debug symbol stripping - with new `--no-strip`, `--no-library-stripping`, and `--no-executable-shipping` flags, - closing [#877](https://github.com/commercialhaskell/stack/issues/877). - Also turned error message for missing targets more readable ([#2384](https://github.com/commercialhaskell/stack/issues/2384)) +* `stack build` and related commands now allow the user to disable debug symbol + stripping with new `--no-strip`, `--no-library-stripping`, and + `--no-executable-shipping` flags, closing + [#877](https://github.com/commercialhaskell/stack/issues/877). Also turned + error message for missing targets more readable + ([#2384](https://github.com/commercialhaskell/stack/issues/2384)) * `stack haddock` now shows index.html paths when documentation is already up to date. Resolved [#781](https://github.com/commercialhaskell/stack/issues/781) -* Respects the `custom-setup` field introduced in Cabal 1.24. This - supercedes any `explicit-setup-deps` settings in your `stack.yaml` - and trusts the package's `.cabal` file to explicitly state all its - dependencies. +* Respects the `custom-setup` stanza introduced in Cabal 1.24. This supercedes + any `explicit-setup-deps` settings in your `stack.yaml` and trusts the + package's Cabal file to explicitly state all its dependencies. * If system package installation fails, `get-stack.sh` will fail as well. Also shows warning suggesting to run `apt-get update` or similar, depending on the OS. @@ -1343,25 +2244,28 @@ Other enhancements: * When `stack ghci` is run with a config with no packages (e.g. global project), it will now look for source files in the current work dir. ([#2878](https://github.com/commercialhaskell/stack/issues/2878)) -* Bump to hpack 0.17.0 to allow `custom-setup` and `!include "..."` in `package.yaml`. +* Bump to Hpack 0.17.0 to allow `custom-setup` and `!include "..."` in + `package.yaml`. * The script interpreter will now output error logging. In particular, this means it will output info about plan construction errors. ([#2879](https://github.com/commercialhaskell/stack/issues/2879)) * `stack ghci` now takes `--flag` and `--ghc-options` again (inadvertently removed in 1.3.0). ([#2986](https://github.com/commercialhaskell/stack/issues/2986)) -* `stack exec` now takes `--rts-options` which passes the given arguments inside of - `+RTS ... args .. -RTS` to the executable. This works around stack itself consuming - the RTS flags on Windows. ([#2640](https://github.com/commercialhaskell/stack/issues/2640)) +* `stack exec` now takes `--rts-options` which passes the given arguments inside + of `+RTS ... args .. -RTS` to the executable. This works around Stack itself + consuming the RTS flags on Windows. + ([#2640](https://github.com/commercialhaskell/stack/issues/2640)) * Upgraded `http-client-tls` version, which now offers support for the `socks5://` and `socks5h://` values in the `http_proxy` and `https_proxy` environment variables. Bug fixes: -* Bump to hpack 0.16.0 to avoid character encoding issues when reading and +* Bump to Hpack 0.16.0 to avoid character encoding issues when reading and writing on non-UTF8 systems. -* `stack ghci` will no longer ignore hsSourceDirs that contain `..`. ([#2895](https://github.com/commercialhaskell/stack/issues/2895)) +* `stack ghci` will no longer ignore hsSourceDirs that contain `..`. + ([#2895](https://github.com/commercialhaskell/stack/issues/2895)) * `stack list-dependencies --license` now works for wired-in-packages, like base. ([#2871](https://github.com/commercialhaskell/stack/issues/2871)) * `stack setup` now correctly indicates when it uses system ghc @@ -1369,7 +2273,7 @@ Bug fixes: * Fix to `stack config set`, in 1.3.2 it always applied to the global project. ([#2709](https://github.com/commercialhaskell/stack/issues/2709)) -* Previously, cabal files without exe or lib would fail on the "copy" step. +* Previously, Cabal files without exe or lib would fail on the "copy" step. ([#2862](https://github.com/commercialhaskell/stack/issues/2862)) * `stack upgrade --git` now works properly. Workaround for affected versions (>= 1.3.0) is to instead run `stack upgrade --git --source-only`. @@ -1381,14 +2285,15 @@ Bug fixes: packages depending on local packages which have changed. ([#2904](https://github.com/commercialhaskell/stack/issues/2904)) -## 1.3.2 +## 1.3.2 - 2016-12-27 Bug fixes: * `stack config set` can now be used without a compiler installed [#2852](https://github.com/commercialhaskell/stack/issues/2852). -* `get-stack.sh` now installs correct binary on ARM for generic linux and raspbian, - closing [#2856](https://github.com/commercialhaskell/stack/issues/2856). +* `get-stack.sh` now installs correct executable on ARM for generic Linux and + raspbian, closing + [#2856](https://github.com/commercialhaskell/stack/issues/2856). * Correct the testing of whether a package database exists by checking for the `package.cache` file itself instead of the containing directory. @@ -1401,21 +2306,19 @@ Bug fixes: ignoring locale settings on a local machine. See [Yesod mailing list discussion](https://groups.google.com/d/msg/yesodweb/ZyWLsJOtY0c/aejf9E7rCAAJ) -## 1.3.0 +## 1.3.0 - 2016-12-12 Release notes: -* For the _next_ stack release after this one, we are planning - changes to our Linux releases, including dropping our Ubuntu, - Debian, CentOS, and Fedora package repositories and switching to - statically linked binaries. See +* For the _next_ Stack release after this one, we are planning changes to our + Linux releases, including dropping our Ubuntu, Debian, CentOS, and Fedora + package repositories and switching to statically-linked executables. See [#2534](https://github.com/commercialhaskell/stack/issues/2534). - Note that upgrading without a package manager has gotten easier - with new binary upgrade support in `stack upgrade` (see the Major - Changes section below for more information). In addition, the - get.haskellstack.org script no longer installs from Ubuntu, - Debian, CentOS, or Fedora package repositories. Instead it places - a generic binary in /usr/local/bin. + Note that upgrading without a package manager has gotten easier with new + binary upgrade support in `stack upgrade` (see the Major Changes section below + for more information). In addition, the get.haskellstack.org script no longer + installs from Ubuntu, Debian, CentOS, or Fedora package repositories. Instead + it places a generic executable in /usr/local/bin. Major changes: @@ -1425,9 +2328,9 @@ Major changes: Docker- and Nix-enabled projects continue to use the GHC installations in their environment by default. - NB: Scripts that previously used stack in combination with a system GHC - installation should now include a `stack setup` line or use the `--install-ghc` - flag. + NB: Scripts that previously used Stack in combination with a system GHC + installation should now include a `stack setup` line or use the + `--install-ghc` flag. [#2221](https://github.com/commercialhaskell/stack/issues/2221) * `stack ghci` now defaults to skipping the build of target packages, because @@ -1436,11 +2339,10 @@ Major changes: because it should no longer be necessary. See [#1364](https://github.com/commercialhaskell/stack/issues/1364) -* Stack is now capable of doing binary upgrades instead of always - recompiling a new version from source. Running `stack upgrade` will - now default to downloading a binary version of Stack from the most - recent release, if one is available. See `stack upgrade --help` for - more options. +* Stack is now capable of doing binary upgrades instead of always recompiling a + new version from source. Running `stack upgrade` will now default to + downloading a Stack executable from the most recent release, if one is + available. See `stack upgrade --help` for more options. [#1238](https://github.com/commercialhaskell/stack/issues/1238) Behavior changes: @@ -1460,12 +2362,13 @@ Other enhancements: * `stack haddock` now supports `--haddock-internal`. See [#2229](https://github.com/commercialhaskell/stack/issues/2229) -* Add support for `system-ghc` and `install-ghc` fields to `stack config set` command. +* Add support for `system-ghc` and `install-ghc` subcommands to + `stack config set` command. * Add `ghc-build` option to override autodetected GHC build to use (e.g. gmp4, tinfo6, nopie) on Linux. * `stack setup` detects systems where gcc enables PIE by default (such as Ubuntu - 16.10 and Hardened Gentoo) and adjusts the GHC `configure` options accordingly. - [#2542](https://github.com/commercialhaskell/stack/issues/2542) + 16.10 and Hardened Gentoo) and adjusts the GHC `configure` options + accordingly. [#2542](https://github.com/commercialhaskell/stack/issues/2542) * Upload to Hackage with HTTP digest instead of HTTP basic. * Make `stack list-dependencies` understand all of the `stack dot` options too. * Add the ability for `stack list-dependencies` to list dependency licenses by @@ -1477,20 +2380,21 @@ Other enhancements: console. [#426](https://github.com/commercialhaskell/stack/issues/426) * Add the `--open` option to "stack hpc report" command, causing the report to be opened in the browser. -* The `stack config set` command now accepts a `--global` flag for suitable fields - which causes it to modify the global user configuration (`~/.stack/config.yaml`) - instead of the project configuration. +* The `stack config set` command now accepts a `--global` flag for suitable + subcommands which causes it to modify the global user configuration + (`~/.stack/config.yaml`) instead of the project configuration. [#2675](https://github.com/commercialhaskell/stack/pull/2675) -* Information on the latest available snapshots is now downloaded from S3 instead of - stackage.org, increasing reliability in case of stackage.org outages. - [#2653](https://github.com/commercialhaskell/stack/pull/2653) +* Information on the latest available snapshots is now downloaded from S3 + instead of stackage.org, increasing reliability in case of stackage.org + outages. [#2653](https://github.com/commercialhaskell/stack/pull/2653) * `stack dot` and `stack list-dependencies` now take targets and flags. [#1919](https://github.com/commercialhaskell/stack/issues/1919) * Deprecate `stack setup --stack-setup-yaml` for `--setup-info-yaml` based - on discussion in [#2647](https://github.com/commercialhaskell/stack/issues/2647). + on discussion in + [#2647](https://github.com/commercialhaskell/stack/issues/2647). * The `--main-is` flag for GHCI now implies the TARGET, fixing [#1845](https://github.com/commercialhaskell/stack/issues/1845). -* `stack ghci` no longer takes all build options, as many weren't useful +* `stack ghci` no longer takes all build options, as many were not useful [#2199](https://github.com/commercialhaskell/stack/issues/2199) * `--no-time-in-log` option, to make verbose logs more diffable [#2727](https://github.com/commercialhaskell/stack/issues/2727) @@ -1511,9 +2415,9 @@ Other enhancements: [#2545](https://github.com/commercialhaskell/stack/issues/2545) * Docker: redirect stdout of `docker pull` to stderr so that it will not interfere with output of other commands. -* Nix & docker can be activated at the same time, in order to run stack in a nix-shell - in a container, preferably from an image already containing the nix dependencies - in its /nix/store +* Nix & docker can be activated at the same time, in order to run Stack in a + nix-shell in a container, preferably from an image already containing the nix + dependencies in its /nix/store * Stack/nix: Dependencies can be added as nix GC roots, so they are not removed when running `nix-collect-garbage` @@ -1522,8 +2426,8 @@ Bug fixes: * Fixed a gnarly bug where programs and package tarballs sometimes have corrupted downloads. See [#2657](https://github.com/commercialhaskell/stack/issues/2568). -* Add proper support for non-ASCII characters in file paths for the `sdist` command. - See [#2549](https://github.com/commercialhaskell/stack/issues/2549) +* Add proper support for non-ASCII characters in file paths for the `sdist` + command. See [#2549](https://github.com/commercialhaskell/stack/issues/2549) * Never treat `extra-dep` local packages as targets. This ensures things like test suites are not run for these packages, and that build output is not hidden due to their presence. @@ -1535,13 +2439,13 @@ Bug fixes: [#2568](https://github.com/commercialhaskell/stack/issues/2568) * Fixed running `stack hpc report` on package targets. [#2664](https://github.com/commercialhaskell/stack/issues/2664) -* Fix a long-standing performance regression where stack would parse the .dump-hi - files of the library components of local packages twice. +* Fix a long-standing performance regression where Stack would parse the + `.dump-hi` files of the library components of local packages twice. [#2658](https://github.com/commercialhaskell/stack/pull/2658) * Fixed a regression in "stack ghci --no-load", where it would prompt for a main module to load. [#2603](https://github.com/commercialhaskell/stack/pull/2603) * Build Setup.hs files with the threaded RTS, mirroring the behavior of - cabal-install and enabling more complex build systems in those files. + `cabal-install` and enabling more complex build systems in those files. * Fixed a bug in passing along `--ghc-options` to ghcjs. They were being provided as `--ghc-options` to Cabal, when it needs to be `--ghcjs-options`. [#2714](https://github.com/commercialhaskell/stack/issues/2714) @@ -1551,7 +2455,7 @@ Bug fixes: * `stack setup --reinstall` now behaves as expected. [#2554](https://github.com/commercialhaskell/stack/issues/2554) -## 1.2.0 +## 1.2.0 - 2016-09-16 Release notes: @@ -1562,9 +2466,9 @@ Release notes: * The fix for [#2175](https://github.com/commercialhaskell/stack/issues/2175) - entails that stack must perform a full clone of a large Git repo of + entails that Stack must perform a full clone of a large Git repo of Hackage meta-information. The total download size is about 200 MB. - Please be aware of this when upgrading your stack installation. + Please be aware of this when upgrading your Stack installation. * If you use Mac OS X, you may want to delay upgrading to macOS Sierra as there are reports of GHC panics when building some packages (including Stack @@ -1574,20 +2478,23 @@ Release notes: [store#37](https://github.com/fpco/store/issues/37)). Please stay with version 1.1.2 for now on those architectures. This will be rectified soon! -* We are now releasing a - [statically linked Stack binary for 64-bit Linux](https://get.haskellstack.org/stable/linux-x86_64-static.tar.gz). +* We are now releasing a statically-linked Stack executable for + [64-bit Linux](https://get.haskellstack.org/stable/linux-x86_64-static.tar.gz). Please try it and let us know if you run into any trouble on your platform. + The executable is linked against a version of the GNU Multiple Precision + Arithmetic Library (GMP), which is licensed under the GNU Lesser General + Public License, Version 3, 29 June 2007 (LGPL). * We are planning some changes to our Linux releases, including dropping our Ubuntu, Debian, CentOS, and Fedora package repositories and switching to - statically linked binaries. We would value your feedback in + statically-linked executables. We would value your feedback in [#2534](https://github.com/commercialhaskell/stack/issues/2534). Major changes: * Add `stack hoogle` command. [#55](https://github.com/commercialhaskell/stack/issues/55) -* Support for absolute file path in `url` field of `setup-info` or `--ghc-bindist` +* Support for absolute file path in `url` key of `setup-info` or `--ghc-bindist` * Add support for rendering GHCi scripts targeting different GHCi like applications [#2457](https://github.com/commercialhaskell/stack/pull/2457) @@ -1601,21 +2508,23 @@ Behavior changes: * Support -ferror-spans syntax in GHC error messages. * Avoid unpacking ghc to `/tmp` [#996](https://github.com/commercialhaskell/stack/issues/996) -* The Linux `gmp4` GHC bindist is no longer considered a full-fledged GHC - variant and can no longer be specified using the `ghc-variant` option, - and instead is treated more like a slightly different platform. +* The Linux `gmp4` GHC binary distribution is no longer considered a + full-fledged GHC variant and can no longer be specified using the + `ghc-variant` option, and instead is treated more like a slightly different + platform. Other enhancements: * Use the `store` package for binary serialization of most caches. -* Only require minor version match for Docker stack exe. +* Only require minor version match for Docker Stack exe. This way, we can make patch releases for version bounds and similar build issues without needing to upload new binaries for Docker. -* Stack/Nix: Passes the right ghc derivation as an argument to the `shell.nix` when a - custom `shell.nix` is used - See [#2243](https://github.com/commercialhaskell/stack/issues/2243) -* Stack/Nix: Sets `LD_LIBRARY_PATH` so packages using C libs for Template Haskell can work - (See _e.g._ [this HaskellR issue](https://github.com/tweag/HaskellR/issues/253)) +* Stack/Nix: Passes the right ghc derivation as an argument to the `shell.nix` + when a custom `shell.nix` is used. See + [#2243](https://github.com/commercialhaskell/stack/issues/2243) +* Stack/Nix: Sets `LD_LIBRARY_PATH` so packages using C libs for Template + Haskell can work (See _e.g._ + [this HaskellR issue](https://github.com/tweag/HaskellR/issues/253)) * Parse CLI arguments and configuration files into less permissive types, improving error messages for bad inputs. [#2267](https://github.com/commercialhaskell/stack/issues/2267) @@ -1629,9 +2538,11 @@ Other enhancements: * `stack setup` no longer unpacks to the system temp dir on posix systems. [#996](https://github.com/commercialhaskell/stack/issues/996) * `stack setup` detects libtinfo6 and ncurses6 and can download alternate GHC - bindists [#257](https://github.com/commercialhaskell/stack/issues/257) + binary distributions + [#257](https://github.com/commercialhaskell/stack/issues/257) [#2302](https://github.com/commercialhaskell/stack/issues/2302). -* `stack setup` detects Linux ARMv7 downloads appropriate GHC bindist +* `stack setup` detects Linux ARMv7 downloads appropriate GHC binary + distribution [#2103](https://github.com/commercialhaskell/stack/issues/2103) * Custom `stack` binaries list dependency versions in output for `--version`. See [#2222](https://github.com/commercialhaskell/stack/issues/2222) @@ -1659,7 +2570,7 @@ Other enhancements: Bug fixes: -* Fix cabal warning about use of a deprecated cabal flag +* Fix Cabal warning about use of a deprecated Cabal flag [#2350](https://github.com/commercialhaskell/stack/issues/2350) * Support most executable extensions on Windows [#2225](https://github.com/commercialhaskell/stack/issues/2225) @@ -1676,7 +2587,7 @@ Bug fixes: [#2266](https://github.com/commercialhaskell/stack/issues/2266) * Apply ghc-options to snapshot packages [#2289](https://github.com/commercialhaskell/stack/issues/2289) -* stack sdist: Fix timestamp in tarball +* `stack sdist`: Fix timestamp in tarball [#2394](https://github.com/commercialhaskell/stack/pull/2394) * Allow global Stack arguments with a script [#2316](https://github.com/commercialhaskell/stack/issues/2316) @@ -1692,7 +2603,7 @@ Bug fixes: [#2424](https://github.com/commercialhaskell/stack/issues/2424) * When marking exe installed, remove old version [#2373](https://github.com/commercialhaskell/stack/issues/2373) -* Stop truncating all-cabal-hashes git repo +* Stop truncating `all-cabal-hashes` git repo [#2175](https://github.com/commercialhaskell/stack/issues/2175) * Handle non-ASCII filenames on Windows [#2491](https://github.com/commercialhaskell/stack/issues/2491) @@ -1701,7 +2612,7 @@ Bug fixes: [#1957](https://github.com/commercialhaskell/stack/issues/1957) * Only pre-load compiler version when using nix integration [#2459](https://github.com/commercialhaskell/stack/issues/2459) -* Solver: parse cabal errors also on Windows +* Solver: parse Cabal errors also on Windows [#2502](https://github.com/commercialhaskell/stack/issues/2502) * Allow exec and ghci commands in interpreter mode. Scripts can now automatically open in the repl by using `exec ghci` @@ -1710,7 +2621,7 @@ Bug fixes: * Now consider a package to be dirty when an extra-source-file is changed. See [#2040](https://github.com/commercialhaskell/stack/issues/2040) -## 1.1.2 +## 1.1.2 - 2016-05-20 Release notes: @@ -1734,10 +2645,10 @@ Behavior changes: --compiler-exe`. See [#2123](https://github.com/commercialhaskell/stack/issues/2123) * For packages specified in terms of a git or hg repo, the hash used in the - location has changed. This means that existing downloads from older stack - versions won't be used. This is a side-effect of the fix to + location has changed. This means that existing downloads from older Stack + versions will not be used. This is a side-effect of the fix to [#2133](https://github.com/commercialhaskell/stack/issues/2133) -* `stack upgrade` no longer pays attention to local stack.yaml files, just the +* `stack upgrade` no longer pays attention to local `stack.yaml` files, just the global config and CLI options. [#1392](https://github.com/commercialhaskell/stack/issues/1392) * `stack ghci` now uses `:add` instead of `:load`, making it potentially work @@ -1753,7 +2664,7 @@ Other enhancements: [#1620](https://github.com/commercialhaskell/stack/issues/1620) * `DESTDIR` is filtered from environment when installing GHC. See [#1460](https://github.com/commercialhaskell/stack/issues/1460) -* `stack haddock` now supports `--hadock-arguments`. See +* `stack haddock` now supports `--haddock-arguments`. See [#2144](https://github.com/commercialhaskell/stack/issues/2144) * Signing: warn if GPG_TTY is not set as per `man gpg-agent` @@ -1780,7 +2691,7 @@ Bug fixes: [#1982](https://github.com/commercialhaskell/stack/issues/1982) * Signing: always use `--with-fingerprints` -## 1.1.0 +## 1.1.0 - 2016-05-04 Release notes: @@ -1791,11 +2702,11 @@ Behavior changes: * Snapshot packages are no longer built with executable profiling. See [#1179](https://github.com/commercialhaskell/stack/issues/1179). -* `stack init` now ignores symlinks when searching for cabal files. It also now +* `stack init` now ignores symlinks when searching for Cabal files. It also now ignores any directory that begins with `.` (as well as `dist` dirs) - before it would only ignore `.git`, `.stack-work`, and `dist`. -* The stack executable is no longer built with `-rtsopts`. Before, when - `-rtsopts` was enabled, stack would process `+RTS` options even when intended +* The Stack executable is no longer built with `-rtsopts`. Before, when + `-rtsopts` was enabled, Stack would process `+RTS` options even when intended for some other program, such as when used with `stack exec -- prog +RTS`. See [#2022](https://github.com/commercialhaskell/stack/issues/2022). * The `stack path --ghc-paths` option is deprecated and renamed to `--programs`. @@ -1805,29 +2716,30 @@ Behavior changes: `stack path --global-stack-root` flag and the `global-stack-root` field in the output of `stack path` are being deprecated and replaced with the `stack-root` flag and output field. - Additionally, the stack root can now be specified via the + Additionally, the Stack root can now be specified via the `--stack-root` command-line flag. See [#1148](https://github.com/commercialhaskell/stack/issues/1148). * `stack sig` GPG-related sub-commands were removed (folded into `upload` and `sdist`) * GPG signing of packages while uploading to Hackage is now the default. Use `upload --no-signature` if you would rather not contribute your package - signature. If you don't yet have a GPG keyset, read this + signature. If you do not yet have a GPG keyset, read this [blog post on GPG keys](https://fpcomplete.com/blog/2016/05/stack-security-gnupg-keys). - We can add a stack.yaml config setting to disable signing if some people + We can add a `stack.yaml` config setting to disable signing if some people desire it. We hope that people will sign. Later we will be adding GPG signature verification options. * `stack build pkg-1.2.3` will now build even if the snapshot has a different package version - it is treated as an extra-dep. `stack build local-pkg-1.2.3` is an error even if the version number matches the local package [#2028](https://github.com/commercialhaskell/stack/issues/2028). -* Having a `nix:` section no longer implies enabling nix build. This allows the - user to globally configure whether nix is used (unless the project overrides - the default explicitly). See +* A `nix` key in a Stack YAML configuration file no longer implies enabling a + Nix build. This allows the user to globally configure whether Nix is used + (unless the project overrides the default explicitly). See [#1924](https://github.com/commercialhaskell/stack/issues/1924). -* Remove deprecated valid-wanted field. -* Docker: mount home directory in container [#1949](https://github.com/commercialhaskell/stack/issues/1949). -* Deprecate `--local-bin-path` instead `--local-bin`. +* Remove deprecated `valid-wanted` key. +* Docker: mount home directory in container + [#1949](https://github.com/commercialhaskell/stack/issues/1949). +* Deprecate `stack path --local-bin-path`; instead use `--local-bin`. * `stack image`: allow absolute source paths for `add`. Other enhancements: @@ -1836,18 +2748,19 @@ Other enhancements: * Fix too much rebuilding when enabling/disabling profiling flags. * `stack build pkg-1.0` will now build `pkg-1.0` even if the snapshot specifies a different version (it introduces a temporary extra-dep) -* Experimental support for `--split-objs` added +* Experimental: Support for `--split-objs` added [#1284](https://github.com/commercialhaskell/stack/issues/1284). * `git` packages with submodules are supported by passing the `--recursive` flag to `git clone`. -* When using [hpack](https://github.com/sol/hpack), only regenerate cabal files - when hpack files change. -* hpack files can now be used in templates +* When using [Hpack](https://github.com/sol/hpack), only regenerate Cabal files + when Hpack files change. +* Hpack files can now be used in templates * `stack ghci` now runs ghci as a separate process [#1306](https://github.com/commercialhaskell/stack/issues/1306) * Retry when downloading snapshots and package indices * Many build options are configurable now in `stack.yaml`: -``` + + ~~~yaml build: library-profiling: true executable-profiling: true @@ -1869,15 +2782,17 @@ Other enhancements: no-run-benchmarks: true reconfigure: true cabal-verbose: true -``` + ~~~ + * A number of URLs are now configurable, useful for firewalls. See [#1794](https://github.com/commercialhaskell/stack/issues/1884). * Suggest causes when executables are missing. * Allow `--omit-packages` even without `--solver`. -* Improve the generated stack.yaml. +* Improve the generated `stack.yaml`. * Improve ghci results after :load Main module collision with main file path. * Only load the hackage index if necessary - [#1883](https://github.com/commercialhaskell/stack/issues/1883), [#1892](https://github.com/commercialhaskell/stack/issues/1892). + [#1883](https://github.com/commercialhaskell/stack/issues/1883), + [#1892](https://github.com/commercialhaskell/stack/issues/1892). * init: allow local packages to be deps of deps [#1965](https://github.com/commercialhaskell/stack/issues/1965). * Always use full fingerprints from GPG @@ -1885,25 +2800,26 @@ Other enhancements: * Default to using `gpg2` and fall back to `gpg` [#1976](https://github.com/commercialhaskell/stack/issues/1976). * Add a flag for --verbosity silent. -* Add `haddock --open` flag [#1396](https://github.com/commercialhaskell/stack/issues/1396). +* Add `haddock --open` flag + [#1396](https://github.com/commercialhaskell/stack/issues/1396). Bug fixes: * Package tarballs would fail to unpack. [#1884](https://github.com/commercialhaskell/stack/issues/1884). * Fixed errant warnings about missing modules, after deleted and removed from - cabal file [#921](https://github.com/commercialhaskell/stack/issues/921) + Cabal file [#921](https://github.com/commercialhaskell/stack/issues/921) [#1805](https://github.com/commercialhaskell/stack/issues/1805). -* Now considers a package to dirty when the hpack file is changed +* Now considers a package to dirty when the Hpack file is changed [#1819](https://github.com/commercialhaskell/stack/issues/1819). -* Nix: cancelling a stack build now exits properly rather than dropping into a +* Nix: cancelling a Stack build now exits properly rather than dropping into a nix-shell [#1778](https://github.com/commercialhaskell/stack/issues/1778). * `allow-newer: true` now causes `--exact-configuration` to be passed to Cabal. See [#1579](https://github.com/commercialhaskell/stack/issues/1579). * `stack solver` no longer fails with `InvalidRelFile` for relative package paths including `..`. See [#1954](https://github.com/commercialhaskell/stack/issues/1954). -* Ignore emacs lock files when finding .cabal +* Ignore emacs lock files when finding Cabal files [#1897](https://github.com/commercialhaskell/stack/issues/1897). * Use lenient UTF-8 decode for build output [#1945](https://github.com/commercialhaskell/stack/issues/1945). @@ -1915,40 +2831,41 @@ Bug fixes: * Fix: Rebuilding when disabling profiling [#2023](https://github.com/commercialhaskell/stack/issues/2023). -## 1.0.4.3 +## 1.0.4.3 - 2016-04-07 Bug fixes: -* Don't delete contents of ~/.ssh when using `stack clean --full` with Docker +* Do not delete contents of ~/.ssh when using `stack clean --full` with Docker enabled [#2000](https://github.com/commercialhaskell/stack/issues/2000) -## 1.0.4.2 +## 1.0.4.2 - 2016-03-09 -Build with path-io-1.0.0. There are no changes in behaviour from 1.0.4, -so no binaries are released for this version. +Build with `path-io-1.0.0`. There are no changes in behaviour from 1.0.4, so no +binaries are released for this version. -## 1.0.4.1 +## 1.0.4.1 - 2016-02-21 -Fixes build with aeson-0.11.0.0. There are no changes in behaviour from 1.0.4, +Fixes build with `aeson-0.11.0.0`. There are no changes in behaviour from 1.0.4, so no binaries are released for this version. -## 1.0.4 +## 1.0.4 - 2016-02-20 Major changes: * Some notable changes in `stack init`: - * Overall it should now be able to initialize almost all existing cabal + * Overall it should now be able to initialize almost all existing Cabal packages out of the box as long as the package itself is consistently defined. - * Choose the best possible snapshot and add extra dependencies on top - of a snapshot resolver rather than a compiler resolver - + * Choose the best possible snapshot and add extra-deps on top + of a snapshot other than a compiler snapshot - [#1583](https://github.com/commercialhaskell/stack/pull/1583) * Automatically omit a package (`--omit-packages`) when it is compiler incompatible or when there are packages with conflicting dependency - requirements - [#1674](https://github.com/commercialhaskell/stack/pull/1674). + requirements. See + [#1674](https://github.com/commercialhaskell/stack/pull/1674). * Some more changes for a better user experience. Please refer to the doc guide for details. -* Add support for hpack, alternative package description format +* Add support for Hpack, alternative package description format [#1679](https://github.com/commercialhaskell/stack/issues/1679) Other enhancements: @@ -1958,7 +2875,7 @@ Other enhancements: * Docker: strip suffix from docker --version. [#1653](https://github.com/commercialhaskell/stack/issues/1653) * Docker: pass USER and PWD environment variables into container. -* On each run, stack will test the stack root directory (~/.stack), and the +* On each run, Stack will test the Stack root directory (~/.stack), and the project and package work directories (.stack-work) for whether they are owned by the current user and abort if they are not. This precaution can be disabled with the `--allow-different-user` flag or `allow-different-user` @@ -1997,13 +2914,13 @@ Bug fixes: * Add space before auto-generated bench opts (makes profiling options work uniformly for applications and benchmark suites) [#1771](https://github.com/commercialhaskell/stack/issues/1771). -* Don't try to find plugin if it resembles flag. +* Do not try to find plugin if it resembles flag. * Setup.hs changes cause package dirtiness [#1711](https://github.com/commercialhaskell/stack/issues/1711). * Send "stack templates" output to stdout [#1792](https://github.com/commercialhaskell/stack/issues/1792). -## 1.0.2 +## 1.0.2 - 2016-01-18 Release notes: @@ -2029,10 +2946,10 @@ Other enhancements: - Docker: pass supplementary groups and umask into container - If git fetch fails wipe the directory and try again from scratch [#1418](https://github.com/commercialhaskell/stack/issues/1418) -- Warn if newly installed executables won't be available on the PATH +- Warn if newly installed executables will not be available on the PATH [#1362](https://github.com/commercialhaskell/stack/issues/1362) -- stack.yaml: for `stack image container`, specify multiple images to generate, - and which executables should be added to those images +- `stack.yaml`: for `stack image container`, specify multiple images to + generate, and which executables should be added to those images - GHCI: add interactive Main selection [#1068](https://github.com/commercialhaskell/stack/issues/1068) - Care less about the particular name of a GHCJS sdist folder @@ -2042,7 +2959,7 @@ Other enhancements: Bug fixes: -- Don't share precompiled packages between GHC/platform variants and Docker +- Do not share precompiled packages between GHC/platform variants and Docker [#1551](https://github.com/commercialhaskell/stack/issues/1551) - Properly redownload corrupted downloads with the correct file size. [Mailing list discussion](https://groups.google.com/d/msg/haskell-stack/iVGDG5OHYxs/FjUrR5JsDQAJ) @@ -2065,11 +2982,11 @@ Bug fixes: - Use globaldb path for querying Cabal version [#1647](https://github.com/commercialhaskell/stack/issues/1647) -## 1.0.0 +## 1.0.0 - 2015-12-24 Release notes: -* We're calling this version 1.0.0 in preparation for Stackage +* We are calling this version 1.0.0 in preparation for Stackage LTS 4. Note, however, that this does not mean the code's API will be stable as this is primarily an end-user tool. @@ -2094,7 +3011,7 @@ Enhancements: [#1453](https://github.com/commercialhaskell/stack/issues/1453) * Improve Unicode handling in project/package names [#1337](https://github.com/commercialhaskell/stack/issues/1337) -* Fix ambiguity between a stack command and a filename to execute (prefer +* Fix ambiguity between a Stack command and a filename to execute (prefer `stack` subcommands) [#1471](https://github.com/commercialhaskell/stack/issues/1471) * Support multi line interpreter directive comments @@ -2108,7 +3025,7 @@ Enhancements: Bug fixes: -* Nix: stack exec options are passed properly to the stack sub process +* Nix: `stack exec` options are passed properly to the Stack sub process [#1538](https://github.com/commercialhaskell/stack/issues/1538) * Nix: specifying a shell-file works in any current working directory [#1547](https://github.com/commercialhaskell/stack/issues/1547) @@ -2120,7 +3037,7 @@ Bug fixes: [#1480](https://github.com/commercialhaskell/stack/issues/1480) * Restrict commands allowed in interpreter mode [#1504](https://github.com/commercialhaskell/stack/issues/1504) -* stack ghci doesn't see preprocessed files for executables +* `stack ghci` does not see preprocessed files for executables [#1347](https://github.com/commercialhaskell/stack/issues/1347) * All test suites run even when only one is requested [#1550](https://github.com/commercialhaskell/stack/pull/1550) @@ -2128,14 +3045,14 @@ Bug fixes: [#1535](https://github.com/commercialhaskell/stack/issues/1535) * Fix test coverage bug on windows -## 0.1.10.1 +## 0.1.10.1 - 2015-12-13 Bug fixes: * `stack image container` did not actually build an image [#1473](https://github.com/commercialhaskell/stack/issues/1473) -## 0.1.10.0 +## 0.1.10.0 - 2015-12-04 Release notes: @@ -2193,21 +3110,21 @@ Bug fixes: * Docker-built binaries and libraries in different path [#911](https://github.com/commercialhaskell/stack/issues/911) [#1367](https://github.com/commercialhaskell/stack/issues/1367) -* Docker: `--resolver` argument didn't effect selected image tag +* Docker: `--resolver` argument did not effect selected image tag * GHCi: Spaces in filepaths caused module loading issues [#1401](https://github.com/commercialhaskell/stack/issues/1401) -* GHCi: cpp-options in cabal files weren't used +* GHCi: cpp-options in Cabal files were not used [#1419](https://github.com/commercialhaskell/stack/issues/1419) -* Benchmarks couldn't be run independently of each other +* Benchmarks could not be run independently of each other [#1412](https://github.com/commercialhaskell/stack/issues/1412) * Send output of building setup to stderr [#1410](https://github.com/commercialhaskell/stack/issues/1410) -## 0.1.8.0 +## 0.1.8.0 - 2015-11-20 Major changes: -* GHCJS can now be used with stackage snapshots via the new `compiler` field. +* GHCJS can now be used with stackage snapshots via the new `compiler` key. * Windows installers are now available: [download them here](http://docs.haskellstack.org/en/stable/install_and_upgrade/#windows) [#613](https://github.com/commercialhaskell/stack/issues/613) @@ -2271,16 +3188,16 @@ Bug fixes: [#1105](https://github.com/commercialhaskell/stack/issues/1105) * Fix: Global options did not work consistently after subcommand [#519](https://github.com/commercialhaskell/stack/issues/519) -* Fix: 'stack ghci' doesn't notice that a module got deleted +* Fix: 'stack ghci' does not notice that a module got deleted [#1180](https://github.com/commercialhaskell/stack/issues/1180) -* Rebuild when cabal file is changed +* Rebuild when Cabal file is changed * Fix: Paths in GHC warnings not canonicalized, nor those for packages in subdirectories or outside the project root [#1259](https://github.com/commercialhaskell/stack/issues/1259) * Fix: unlisted files in tests and benchmarks trigger extraneous second build [#838](https://github.com/commercialhaskell/stack/issues/838) -## 0.1.6.0 +## 0.1.6.0 - 2015-10-15 Major changes: @@ -2290,7 +3207,8 @@ Major changes: content. The reason is to avoid the 260 character path limitation on Windows. See [#1027](https://github.com/commercialhaskell/stack/pull/1027) -* Rename config files and clarify their purposes [#969](https://github.com/commercialhaskell/stack/issues/969) +* Rename config files and clarify their purposes + [#969](https://github.com/commercialhaskell/stack/issues/969) * `~/.stack/stack.yaml` --> `~/.stack/config.yaml` * `~/.stack/global` --> `~/.stack/global-project` * `/etc/stack/config` --> `/etc/stack/config.yaml` @@ -2303,247 +3221,405 @@ Other enhancements: [#1046](https://github.com/commercialhaskell/stack/issues/1046). You can still get this behavior by running the following yourself: `stack exec -- pacman -Sy --noconfirm git`. -* Typing enter during --file-watch triggers a rebuild [#1023](https://github.com/commercialhaskell/stack/pull/1023) -* Use Haddock's `--hyperlinked-source` (crosslinked source), if available [#1070](https://github.com/commercialhaskell/stack/pull/1070) -* Use Stack-installed GHCs for `stack init --solver` [#1072](https://github.com/commercialhaskell/stack/issues/1072) -* New experimental `stack query` command [#1087](https://github.com/commercialhaskell/stack/issues/1087) -* By default, stack no longer rebuilds a package due to GHC options changes. This behavior can be tweaked with the `rebuild-ghc-options` setting. [#1089](https://github.com/commercialhaskell/stack/issues/1089) -* By default, ghc-options are applied to all local packages, not just targets. This behavior can be tweaked with the `apply-ghc-options` setting. [#1089](https://github.com/commercialhaskell/stack/issues/1089) -* Docker: download or override location of stack executable to re-run in container [#974](https://github.com/commercialhaskell/stack/issues/974) -* Docker: when Docker Engine is remote, don't run containerized processes as host's UID/GID [#194](https://github.com/commercialhaskell/stack/issues/194) -* Docker: `set-user` option to enable/disable running containerized processes as host's UID/GID [#194](https://github.com/commercialhaskell/stack/issues/194) -* Custom Setup.hs files are now precompiled instead of interpreted. This should be a major performance win for certain edge cases (biggest example: [building Cabal itself](https://github.com/commercialhaskell/stack/issues/1041)) while being either neutral or a minor slowdown for more common cases. -* `stack test --coverage` now also generates a unified coverage report for multiple test-suites / packages. In the unified report, test-suites can contribute to the coverage of other packages. +* Typing enter during --file-watch triggers a rebuild + [#1023](https://github.com/commercialhaskell/stack/pull/1023) +* Use Haddock's `--hyperlinked-source` (crosslinked source), if available + [#1070](https://github.com/commercialhaskell/stack/pull/1070) +* Use Stack-installed GHCs for `stack init --solver` + [#1072](https://github.com/commercialhaskell/stack/issues/1072) +* Experimental: Add `stack query` command + [#1087](https://github.com/commercialhaskell/stack/issues/1087) +* By default, Stack no longer rebuilds a package due to GHC options changes. + This behavior can be tweaked with the `rebuild-ghc-options` setting. + [#1089](https://github.com/commercialhaskell/stack/issues/1089) +* By default, ghc-options are applied to all local packages, not just targets. + This behavior can be tweaked with the `apply-ghc-options` setting. + [#1089](https://github.com/commercialhaskell/stack/issues/1089) +* Docker: download or override location of Stack executable to re-run in + container [#974](https://github.com/commercialhaskell/stack/issues/974) +* Docker: when Docker Engine is remote, do not run containerized processes as + host's UID/GID [#194](https://github.com/commercialhaskell/stack/issues/194) +* Docker: `set-user` option to enable/disable running containerized processes as + host's UID/GID [#194](https://github.com/commercialhaskell/stack/issues/194) +* Custom Setup.hs files are now precompiled instead of interpreted. This should + be a major performance win for certain edge cases (biggest example: + [building Cabal itself](https://github.com/commercialhaskell/stack/issues/1041)) + while being either neutral or a minor slowdown for more common cases. +* `stack test --coverage` now also generates a unified coverage report for + multiple test-suites / packages. In the unified report, test-suites can + contribute to the coverage of other packages. Bug fixes: * Ignore stack-built executables named `ghc` [#1052](https://github.com/commercialhaskell/stack/issues/1052) * Fix quoting of output failed command line arguments -* Mark executable-only packages as installed when copied from cache [#1043](https://github.com/commercialhaskell/stack/pull/1043) -* Canonicalize temporary directory paths [#1047](https://github.com/commercialhaskell/stack/pull/1047) -* Put code page fix inside the build function itself [#1066](https://github.com/commercialhaskell/stack/issues/1066) -* Add `explicit-setup-deps` option [#1110](https://github.com/commercialhaskell/stack/issues/1110), and change the default to the old behavior of using any package in the global and snapshot database [#1025](https://github.com/commercialhaskell/stack/issues/1025) -* Precompiled cache checks full package IDs on Cabal < 1.22 [#1103](https://github.com/commercialhaskell/stack/issues/1103) -* Pass -package-id to ghci [#867](https://github.com/commercialhaskell/stack/issues/867) -* Ignore global packages when copying precompiled packages [#1146](https://github.com/commercialhaskell/stack/issues/1146) - -## 0.1.5.0 +* Mark executable-only packages as installed when copied from cache + [#1043](https://github.com/commercialhaskell/stack/pull/1043) +* Canonicalize temporary directory paths + [#1047](https://github.com/commercialhaskell/stack/pull/1047) +* Put code page fix inside the build function itself + [#1066](https://github.com/commercialhaskell/stack/issues/1066) +* Add `explicit-setup-deps` option + [#1110](https://github.com/commercialhaskell/stack/issues/1110), and change + the default to the old behavior of using any package in the global and + snapshot database + [#1025](https://github.com/commercialhaskell/stack/issues/1025) +* Precompiled cache checks full package IDs on Cabal < 1.22 + [#1103](https://github.com/commercialhaskell/stack/issues/1103) +* Pass -package-id to ghci + [#867](https://github.com/commercialhaskell/stack/issues/867) +* Ignore global packages when copying precompiled packages + [#1146](https://github.com/commercialhaskell/stack/issues/1146) + +## 0.1.5.0 - 2015-09-24 Major changes: -* On Windows, we now use a full MSYS2 installation in place of the previous PortableGit. This gives you access to the pacman package manager for more easily installing libraries. -* Support for custom GHC binary distributions [#530](https://github.com/commercialhaskell/stack/issues/530) - * `ghc-variant` option in stack.yaml to specify the variant (also +* On Windows, we now use a full MSYS2 installation in place of the previous + PortableGit. This gives you access to the pacman package manager for more + easily installing libraries. +* Support for custom GHC binary distributions + [#530](https://github.com/commercialhaskell/stack/issues/530) + * `ghc-variant` option in `stack.yaml` to specify the variant (also `--ghc-variant` command-line option) - * `setup-info` in stack.yaml, to specify where to download custom binary + * `setup-info` in `stack.yaml`, to specify where to download custom binary distributions (also `--ghc-bindist` command-line option) * Note: On systems with libgmp4 (aka `libgmp.so.3`), such as CentOS 6, you - may need to re-run `stack setup` due to the centos6 GHC bindist being - treated like a variant -* A new `--pvp-bounds` flag to the sdist and upload commands allows automatic adding of PVP upper and/or lower bounds to your dependencies + may need to re-run `stack setup` due to the centos6 GHC binary + distribution being treated like a variant +* A new `--pvp-bounds` flag to the sdist and upload commands allows automatic + adding of PVP upper and/or lower bounds to your dependencies Other enhancements: -* Adapt to upcoming Cabal installed package identifier format change [#851](https://github.com/commercialhaskell/stack/issues/851) +* Adapt to upcoming Cabal installed package identifier format change + [#851](https://github.com/commercialhaskell/stack/issues/851) * `stack setup` takes a `--stack-setup-yaml` argument -* `--file-watch` is more discerning about which files to rebuild for [#912](https://github.com/commercialhaskell/stack/issues/912) +* `--file-watch` is more discerning about which files to rebuild for + [#912](https://github.com/commercialhaskell/stack/issues/912) * `stack path` now supports `--global-pkg-db` and `--ghc-package-path` -* `--reconfigure` flag [#914](https://github.com/commercialhaskell/stack/issues/914) [#946](https://github.com/commercialhaskell/stack/issues/946) -* Cached data is written with a checksum of its structure [#889](https://github.com/commercialhaskell/stack/issues/889) +* `--reconfigure` flag + [#914](https://github.com/commercialhaskell/stack/issues/914) + [#946](https://github.com/commercialhaskell/stack/issues/946) +* Cached data is written with a checksum of its structure + [#889](https://github.com/commercialhaskell/stack/issues/889) * Fully removed `--optimizations` flag * Added `--cabal-verbose` flag -* Added `--file-watch-poll` flag for polling instead of using filesystem events (useful for running tests in a Docker container while modifying code in the host environment. When code is injected into the container via a volume, the container won't propagate filesystem events). -* Give a preemptive error message when `-prof` is given as a GHC option [#1015](https://github.com/commercialhaskell/stack/issues/1015) -* Locking is now optional, and will be turned on by setting the `STACK_LOCK` environment variable to `true` [#950](https://github.com/commercialhaskell/stack/issues/950) -* Create default stack.yaml with documentation comments and commented out options [#226](https://github.com/commercialhaskell/stack/issues/226) -* Out of memory warning if Cabal exits with -9 [#947](https://github.com/commercialhaskell/stack/issues/947) +* Added `--file-watch-poll` flag for polling instead of using filesystem events + (useful for running tests in a Docker container while modifying code in the + host environment. When code is injected into the container via a volume, the + container will not propagate filesystem events). +* Give a preemptive error message when `-prof` is given as a GHC option + [#1015](https://github.com/commercialhaskell/stack/issues/1015) +* Locking is now optional, and will be turned on by setting the `STACK_LOCK` + environment variable to `true` + [#950](https://github.com/commercialhaskell/stack/issues/950) +* Create default `stack.yaml` with documentation comments and commented out + options [#226](https://github.com/commercialhaskell/stack/issues/226) +* Out of memory warning if Cabal exits with -9 + [#947](https://github.com/commercialhaskell/stack/issues/947) Bug fixes: -* Hacky workaround for optparse-applicative issue with `stack exec --help` [#806](https://github.com/commercialhaskell/stack/issues/806) -* Build executables for local extra deps [#920](https://github.com/commercialhaskell/stack/issues/920) -* copyFile can't handle directories [#942](https://github.com/commercialhaskell/stack/pull/942) -* Support for spaces in Haddock interface files [fpco/minghc#85](https://github.com/fpco/minghc/issues/85) -* Temporarily building against a "shadowing" local package? [#992](https://github.com/commercialhaskell/stack/issues/992) -* Fix Setup.exe name for --upgrade-cabal on Windows [#1002](https://github.com/commercialhaskell/stack/issues/1002) -* Unlisted dependencies no longer trigger extraneous second build [#838](https://github.com/commercialhaskell/stack/issues/838) +* Hacky workaround for optparse-applicative issue with `stack exec --help` + [#806](https://github.com/commercialhaskell/stack/issues/806) +* Build executables for local extra-deps + [#920](https://github.com/commercialhaskell/stack/issues/920) +* copyFile cannot handle directories + [#942](https://github.com/commercialhaskell/stack/pull/942) +* Support for spaces in Haddock interface files + [fpco/minghc#85](https://github.com/fpco/minghc/issues/85) +* Temporarily building against a "shadowing" local package? + [#992](https://github.com/commercialhaskell/stack/issues/992) +* Fix `Setup.exe` name for `--upgrade-cabal` on Windows + [#1002](https://github.com/commercialhaskell/stack/issues/1002) +* Unlisted dependencies no longer trigger extraneous second build + [#838](https://github.com/commercialhaskell/stack/issues/838) -## 0.1.4.1 +## 0.1.4.1 - 2015-09-04 Fix stack's own Haddocks. No changes to functionality (only comments updated). -## 0.1.4.0 +## 0.1.4.0 - 2015-09-04 Major changes: -* You now have more control over how GHC versions are matched, e.g. "use exactly this version," "use the specified minor version, but allow patches," or "use the given minor version or any later minor in the given major release." The default has switched from allowing newer later minor versions to a specific minor version allowing patches. For more information, see [#736](https://github.com/commercialhaskell/stack/issues/736) and [#784](https://github.com/commercialhaskell/stack/pull/784). +* You now have more control over how GHC versions are matched, e.g. "use exactly + this version," "use the specified minor version, but allow patches," or "use + the given minor version or any later minor in the given major release." The + default has switched from allowing newer later minor versions to a specific + minor version allowing patches. For more information, see + [#736](https://github.com/commercialhaskell/stack/issues/736) and + [#784](https://github.com/commercialhaskell/stack/pull/784). * Support added for compiling with GHCJS -* stack can now reuse prebuilt binaries between snapshots. That means that, if you build package foo in LTS-3.1, that binary version can be reused in LTS-3.2, assuming it uses the same dependencies and flags. [#878](https://github.com/commercialhaskell/stack/issues/878) +* Stack can now reuse prebuilt binaries between snapshots. That means that, if + you build package foo in LTS-3.1, that binary version can be reused in + LTS-3.2, assuming it uses the same dependencies and flags. + [#878](https://github.com/commercialhaskell/stack/issues/878) Other enhancements: -* Added the `--docker-env` argument, to set environment variables in Docker container. -* Set locale environment variables to UTF-8 encoding for builds to avoid "commitBuffer: invalid argument" errors from GHC [#793](https://github.com/commercialhaskell/stack/issues/793) -* Enable transliteration for encoding on stdout and stderr [#824](https://github.com/commercialhaskell/stack/issues/824) -* By default, `stack upgrade` automatically installs GHC as necessary [#797](https://github.com/commercialhaskell/stack/issues/797) -* Added the `ghc-options` field to stack.yaml [#796](https://github.com/commercialhaskell/stack/issues/796) -* Added the `extra-path` field to stack.yaml -* Code page changes on Windows only apply to the build command (and its synonyms), and can be controlled via a command line flag (still defaults to on) [#757](https://github.com/commercialhaskell/stack/issues/757) -* Implicitly add packages to extra-deps when a flag for them is set [#807](https://github.com/commercialhaskell/stack/issues/807) -* Use a precompiled Setup.hs for simple build types [#801](https://github.com/commercialhaskell/stack/issues/801) -* Set --enable-tests and --enable-benchmarks optimistically [#805](https://github.com/commercialhaskell/stack/issues/805) -* `--only-configure` option added [#820](https://github.com/commercialhaskell/stack/issues/820) +* Added the `--docker-env` argument, to set environment variables in Docker + container. +* Set locale environment variables to UTF-8 encoding for builds to avoid + "commitBuffer: invalid argument" errors from GHC + [#793](https://github.com/commercialhaskell/stack/issues/793) +* Enable transliteration for encoding on stdout and stderr + [#824](https://github.com/commercialhaskell/stack/issues/824) +* By default, `stack upgrade` automatically installs GHC as necessary + [#797](https://github.com/commercialhaskell/stack/issues/797) +* Added the `ghc-options` key to `stack.yaml` + [#796](https://github.com/commercialhaskell/stack/issues/796) +* Added the `extra-path` key to `stack.yaml` +* Code page changes on Windows only apply to the build command (and its + synonyms), and can be controlled via a command line flag (still defaults to + on) [#757](https://github.com/commercialhaskell/stack/issues/757) +* Implicitly add packages to extra-deps when a flag for them is set + [#807](https://github.com/commercialhaskell/stack/issues/807) +* Use a precompiled Setup.hs for simple build types + [#801](https://github.com/commercialhaskell/stack/issues/801) +* Set --enable-tests and --enable-benchmarks optimistically + [#805](https://github.com/commercialhaskell/stack/issues/805) +* `--only-configure` option added + [#820](https://github.com/commercialhaskell/stack/issues/820) * Check for duplicate local package names -* Stop nagging people that call `stack test` [#845](https://github.com/commercialhaskell/stack/issues/845) -* `--file-watch` will ignore files that are in your VCS boring/ignore files [#703](https://github.com/commercialhaskell/stack/issues/703) +* Stop nagging people that call `stack test` + [#845](https://github.com/commercialhaskell/stack/issues/845) +* `--file-watch` will ignore files that are in your VCS boring/ignore files + [#703](https://github.com/commercialhaskell/stack/issues/703) * Add `--numeric-version` option Bug fixes: -* `stack init --solver` fails if `GHC_PACKAGE_PATH` is present [#860](https://github.com/commercialhaskell/stack/issues/860) -* `stack solver` and `stack init --solver` check for test suite and benchmark dependencies [#862](https://github.com/commercialhaskell/stack/issues/862) -* More intelligent logic for setting UTF-8 locale environment variables [#856](https://github.com/commercialhaskell/stack/issues/856) +* `stack init --solver` fails if `GHC_PACKAGE_PATH` is present + [#860](https://github.com/commercialhaskell/stack/issues/860) +* `stack solver` and `stack init --solver` check for test suite and benchmark + dependencies [#862](https://github.com/commercialhaskell/stack/issues/862) +* More intelligent logic for setting UTF-8 locale environment variables + [#856](https://github.com/commercialhaskell/stack/issues/856) * Create missing directories for `stack sdist` -* Don't ignore .cabal files with extra periods [#895](https://github.com/commercialhaskell/stack/issues/895) +* Do not ignore Cabal files with extra periods + [#895](https://github.com/commercialhaskell/stack/issues/895) * Deprecate unused `--optimizations` flag -* Truncated output on slow terminals [#413](https://github.com/commercialhaskell/stack/issues/413) +* Truncated output on slow terminals + [#413](https://github.com/commercialhaskell/stack/issues/413) -## 0.1.3.1 +## 0.1.3.1 - 2015-08-12 Bug fixes: -* Ignore disabled executables [#763](https://github.com/commercialhaskell/stack/issues/763) +* Ignore disabled executables + [#763](https://github.com/commercialhaskell/stack/issues/763) -## 0.1.3.0 +## 0.1.3.0 - 2015-08-12 Major changes: -* Detect when a module is compiled but not listed in the cabal file ([#32](https://github.com/commercialhaskell/stack/issues/32)) - * A warning is displayed for any modules that should be added to `other-modules` in the .cabal file - * These modules are taken into account when determining whether a package needs to be built -* Respect TemplateHaskell addDependentFile dependency changes ([#105](https://github.com/commercialhaskell/stack/issues/105)) - * TH dependent files are taken into account when determining whether a package needs to be built. -* Overhauled target parsing, added `--test` and `--bench` options [#651](https://github.com/commercialhaskell/stack/issues/651) - * For details, see [Build commands documentation](http://docs.haskellstack.org/en/stable/build_command/) +* Detect when a module is compiled but not listed in the Cabal file + ([#32](https://github.com/commercialhaskell/stack/issues/32)) + * A warning is displayed for any modules that should be added to + `other-modules` in the Cabal file + * These modules are taken into account when determining whether a package + needs to be built +* Respect TemplateHaskell addDependentFile dependency changes + ([#105](https://github.com/commercialhaskell/stack/issues/105)) + * TH dependent files are taken into account when determining whether a + package needs to be built. +* Overhauled target parsing, added `--test` and `--bench` options + [#651](https://github.com/commercialhaskell/stack/issues/651) + * For details, see + [Build commands documentation](http://docs.haskellstack.org/en/stable/build_command/) Other enhancements: -* Set the `HASKELL_DIST_DIR` environment variable [#524](https://github.com/commercialhaskell/stack/pull/524) -* Track build status of tests and benchmarks [#525](https://github.com/commercialhaskell/stack/issues/525) +* Set the `HASKELL_DIST_DIR` environment variable + [#524](https://github.com/commercialhaskell/stack/pull/524) +* Track build status of tests and benchmarks + [#525](https://github.com/commercialhaskell/stack/issues/525) * `--no-run-tests` [#517](https://github.com/commercialhaskell/stack/pull/517) -* Targets outside of root dir don't build [#366](https://github.com/commercialhaskell/stack/issues/366) -* Upper limit on number of flag combinations to test [#543](https://github.com/commercialhaskell/stack/issues/543) -* Fuzzy matching support to give better error messages for close version numbers [#504](https://github.com/commercialhaskell/stack/issues/504) -* `--local-bin-path` global option. Use to change where binaries get placed on a `--copy-bins` [#342](https://github.com/commercialhaskell/stack/issues/342) +* Targets outside of root dir do not build + [#366](https://github.com/commercialhaskell/stack/issues/366) +* Upper limit on number of flag combinations to test + [#543](https://github.com/commercialhaskell/stack/issues/543) +* Fuzzy matching support to give better error messages for close version numbers + [#504](https://github.com/commercialhaskell/stack/issues/504) +* `--local-bin-path` global option. Use to change where binaries get placed on a + `--copy-bins` [#342](https://github.com/commercialhaskell/stack/issues/342) * Custom snapshots [#111](https://github.com/commercialhaskell/stack/issues/111) -* --force-dirty flag: Force treating all local packages as having dirty files (useful for cases where stack can't detect a file change) -* GHC error messages: display file paths as absolute instead of relative for better editor integration -* Add the `--copy-bins` option [#569](https://github.com/commercialhaskell/stack/issues/569) -* Give warnings on unexpected config keys [#48](https://github.com/commercialhaskell/stack/issues/48) +* --force-dirty flag: Force treating all local packages as having dirty files + (useful for cases where Stack cannot detect a file change) +* GHC error messages: display file paths as absolute instead of relative for + better editor integration +* Add the `--copy-bins` option + [#569](https://github.com/commercialhaskell/stack/issues/569) +* Give warnings on unexpected config keys + [#48](https://github.com/commercialhaskell/stack/issues/48) * Remove Docker `pass-host` option -* Don't require cabal-install to upload [#313](https://github.com/commercialhaskell/stack/issues/313) -* Generate indexes for all deps and all installed snapshot packages [#143](https://github.com/commercialhaskell/stack/issues/143) -* Provide `--resolver global` option [#645](https://github.com/commercialhaskell/stack/issues/645) - * Also supports `--resolver nightly`, `--resolver lts`, and `--resolver lts-X` -* Make `stack build --flag` error when flag or package is unknown [#617](https://github.com/commercialhaskell/stack/issues/617) -* Preserve file permissions when unpacking sources [#666](https://github.com/commercialhaskell/stack/pull/666) +* Do not require `cabal-install` to upload + [#313](https://github.com/commercialhaskell/stack/issues/313) +* Generate indexes for all deps and all installed snapshot packages + [#143](https://github.com/commercialhaskell/stack/issues/143) +* Provide `--resolver global` option + [#645](https://github.com/commercialhaskell/stack/issues/645) + * Also supports `--resolver nightly`, `--resolver lts`, and + `--resolver lts-X` +* Make `stack build --flag` error when flag or package is unknown + [#617](https://github.com/commercialhaskell/stack/issues/617) +* Preserve file permissions when unpacking sources + [#666](https://github.com/commercialhaskell/stack/pull/666) * `stack build` etc work outside of a project -* `list-dependencies` command [#638](https://github.com/commercialhaskell/stack/issues/638) -* `--upgrade-cabal` option to `stack setup` [#174](https://github.com/commercialhaskell/stack/issues/174) +* `list-dependencies` command + [#638](https://github.com/commercialhaskell/stack/issues/638) +* `--upgrade-cabal` option to `stack setup` + [#174](https://github.com/commercialhaskell/stack/issues/174) * `--exec` option [#651](https://github.com/commercialhaskell/stack/issues/651) -* `--only-dependencies` implemented correctly [#387](https://github.com/commercialhaskell/stack/issues/387) +* `--only-dependencies` implemented correctly + [#387](https://github.com/commercialhaskell/stack/issues/387) Bug fixes: -* Extensions from the `other-extensions` field no longer enabled by default [#449](https://github.com/commercialhaskell/stack/issues/449) -* Fix: haddock forces rebuild of empty packages [#452](https://github.com/commercialhaskell/stack/issues/452) -* Don't copy over executables excluded by component selection [#605](https://github.com/commercialhaskell/stack/issues/605) -* Fix: stack fails on Windows with git package in stack.yaml and no git binary on path [#712](https://github.com/commercialhaskell/stack/issues/712) +* Extensions from the Cabal `other-extensions` field no longer enabled by + default [#449](https://github.com/commercialhaskell/stack/issues/449) +* Fix: haddock forces rebuild of empty packages + [#452](https://github.com/commercialhaskell/stack/issues/452) +* Do not copy over executables excluded by component selection + [#605](https://github.com/commercialhaskell/stack/issues/605) +* Fix: Stack fails on Windows with git package in `stack.yaml` and no git + executable on the PATH + [#712](https://github.com/commercialhaskell/stack/issues/712) * Fixed GHCi issue: Specifying explicit package versions (#678) * Fixed GHCi issue: Specifying -odir and -hidir as .stack-work/odir (#529) * Fixed GHCi issue: Specifying A instead of A.ext for modules (#498) -## 0.1.2.0 +## 0.1.2.0 - 2015-07-05 -* Add `--prune` flag to `stack dot` [#487](https://github.com/commercialhaskell/stack/issues/487) -* Add `--[no-]external`,`--[no-]include-base` flags to `stack dot` [#437](https://github.com/commercialhaskell/stack/issues/437) -* Add `--ignore-subdirs` flag to init command [#435](https://github.com/commercialhaskell/stack/pull/435) -* Handle attempt to use non-existing resolver [#436](https://github.com/commercialhaskell/stack/pull/436) +* Add `--prune` flag to `stack dot` + [#487](https://github.com/commercialhaskell/stack/issues/487) +* Add `--[no-]external`,`--[no-]include-base` flags to `stack dot` + [#437](https://github.com/commercialhaskell/stack/issues/437) +* Add `--ignore-subdirs` flag to init command + [#435](https://github.com/commercialhaskell/stack/pull/435) +* Handle attempt to use non-existing resolver + [#436](https://github.com/commercialhaskell/stack/pull/436) * Add `--force` flag to `init` command -* exec style commands accept the `--package` option (see [Reddit discussion](http://www.reddit.com/r/haskell/comments/3bd66h/stack_runghc_turtle_as_haskell_script_solution/)) -* `stack upload` without arguments doesn't do anything [#439](https://github.com/commercialhaskell/stack/issues/439) -* Print latest version of packages on conflicts [#450](https://github.com/commercialhaskell/stack/issues/450) -* Flag to avoid rerunning tests that haven't changed [#451](https://github.com/commercialhaskell/stack/issues/451) -* stack can act as a script interpreter (see [Script interpreter] (https://github.com/commercialhaskell/stack/wiki/Script-interpreter) and [Reddit discussion](http://www.reddit.com/r/haskell/comments/3bd66h/stack_runghc_turtle_as_haskell_script_solution/)) -* Add the __`--file-watch`__ flag to auto-rebuild on file changes [#113](https://github.com/commercialhaskell/stack/issues/113) +* exec style commands accept the `--package` option (see + [Reddit discussion](http://www.reddit.com/r/haskell/comments/3bd66h/stack_runghc_turtle_as_haskell_script_solution/)) +* `stack upload` without arguments doe snot do anything + [#439](https://github.com/commercialhaskell/stack/issues/439) +* Print latest version of packages on conflicts + [#450](https://github.com/commercialhaskell/stack/issues/450) +* Flag to avoid rerunning tests that have not changed + [#451](https://github.com/commercialhaskell/stack/issues/451) +* Stack can act as a script interpreter (see [Script interpreter] + (https://github.com/commercialhaskell/stack/wiki/Script-interpreter) and + [Reddit discussion](http://www.reddit.com/r/haskell/comments/3bd66h/stack_runghc_turtle_as_haskell_script_solution/)) +* Add the __`--file-watch`__ flag to auto-rebuild on file changes + [#113](https://github.com/commercialhaskell/stack/issues/113) * Rename `stack docker exec` to `stack exec --plain` -* Add the `--skip-msys` flag [#377](https://github.com/commercialhaskell/stack/issues/377) -* `--keep-going`, turned on by default for tests and benchmarks [#478](https://github.com/commercialhaskell/stack/issues/478) -* `concurrent-tests: BOOL` [#492](https://github.com/commercialhaskell/stack/issues/492) -* Use hashes to check file dirtiness [#502](https://github.com/commercialhaskell/stack/issues/502) -* Install correct GHC build on systems with libgmp.so.3 [#465](https://github.com/commercialhaskell/stack/issues/465) -* `stack upgrade` checks version before upgrading [#447](https://github.com/commercialhaskell/stack/issues/447) - -## 0.1.1.0 - -* Remove GHC uncompressed tar file after installation [#376](https://github.com/commercialhaskell/stack/issues/376) -* Put stackage snapshots JSON on S3 [#380](https://github.com/commercialhaskell/stack/issues/380) -* Specifying flags for multiple packages [#335](https://github.com/commercialhaskell/stack/issues/335) -* single test suite failure should show entire log [#388](https://github.com/commercialhaskell/stack/issues/388) -* valid-wanted is a confusing option name [#386](https://github.com/commercialhaskell/stack/issues/386) -* stack init in multi-package project should use local packages for dependency checking [#384](https://github.com/commercialhaskell/stack/issues/384) -* Display information on why a snapshot was rejected [#381](https://github.com/commercialhaskell/stack/issues/381) -* Give a reason for unregistering packages [#389](https://github.com/commercialhaskell/stack/issues/389) +* Add the `--skip-msys` flag + [#377](https://github.com/commercialhaskell/stack/issues/377) +* `--keep-going`, turned on by default for tests and benchmarks + [#478](https://github.com/commercialhaskell/stack/issues/478) +* `concurrent-tests: BOOL` + [#492](https://github.com/commercialhaskell/stack/issues/492) +* Use hashes to check file dirtiness + [#502](https://github.com/commercialhaskell/stack/issues/502) +* Install correct GHC build on systems with libgmp.so.3 + [#465](https://github.com/commercialhaskell/stack/issues/465) +* `stack upgrade` checks version before upgrading + [#447](https://github.com/commercialhaskell/stack/issues/447) + +## 0.1.1.0 - 2015-06-26 + +* Remove GHC uncompressed tar file after installation + [#376](https://github.com/commercialhaskell/stack/issues/376) +* Put stackage snapshots JSON on S3 + [#380](https://github.com/commercialhaskell/stack/issues/380) +* Specifying flags for multiple packages + [#335](https://github.com/commercialhaskell/stack/issues/335) +* single test suite failure should show entire log + [#388](https://github.com/commercialhaskell/stack/issues/388) +* valid-wanted is a confusing option name + [#386](https://github.com/commercialhaskell/stack/issues/386) +* `stack init` in multi-package project should use local packages for dependency + checking [#384](https://github.com/commercialhaskell/stack/issues/384) +* Display information on why a snapshot was rejected + [#381](https://github.com/commercialhaskell/stack/issues/381) +* Give a reason for unregistering packages + [#389](https://github.com/commercialhaskell/stack/issues/389) * `stack exec` accepts the `--no-ghc-package-path` parameter -* Don't require build plan to upload [#400](https://github.com/commercialhaskell/stack/issues/400) -* Specifying test components only builds/runs those tests [#398](https://github.com/commercialhaskell/stack/issues/398) +* Do not require build plan to upload + [#400](https://github.com/commercialhaskell/stack/issues/400) +* Specifying test components only builds/runs those tests + [#398](https://github.com/commercialhaskell/stack/issues/398) * `STACK_EXE` environment variable * Add the `stack dot` command -* `stack upgrade` added [#237](https://github.com/commercialhaskell/stack/issues/237) -* `--stack-yaml` command line flag [#378](https://github.com/commercialhaskell/stack/issues/378) -* `--skip-ghc-check` command line flag [#423](https://github.com/commercialhaskell/stack/issues/423) +* `stack upgrade` added + [#237](https://github.com/commercialhaskell/stack/issues/237) +* `--stack-yaml` command line flag + [#378](https://github.com/commercialhaskell/stack/issues/378) +* `--skip-ghc-check` command line flag + [#423](https://github.com/commercialhaskell/stack/issues/423) Bug fixes: -* Haddock links to global packages no longer broken on Windows [#375](https://github.com/commercialhaskell/stack/issues/375) -* Make flags case-insensitive [#397](https://github.com/commercialhaskell/stack/issues/397) -* Mark packages uninstalled before rebuilding [#365](https://github.com/commercialhaskell/stack/issues/365) - -## 0.1.0.0 - -* Fall back to cabal dependency solver when a snapshot can't be found -* Basic implementation of `stack new` [#137](https://github.com/commercialhaskell/stack/issues/137) -* `stack solver` command [#364](https://github.com/commercialhaskell/stack/issues/364) -* `stack path` command [#95](https://github.com/commercialhaskell/stack/issues/95) +* Haddock links to global packages no longer broken on Windows + [#375](https://github.com/commercialhaskell/stack/issues/375) +* Make flags case-insensitive + [#397](https://github.com/commercialhaskell/stack/issues/397) +* Mark packages uninstalled before rebuilding + [#365](https://github.com/commercialhaskell/stack/issues/365) + +## 0.1.0.0 - 2015-06-23 + +* Fall back to Cabal dependency solver when a snapshot cannot be found +* Basic implementation of `stack new` + [#137](https://github.com/commercialhaskell/stack/issues/137) +* `stack solver` command + [#364](https://github.com/commercialhaskell/stack/issues/364) +* `stack path` command + [#95](https://github.com/commercialhaskell/stack/issues/95) * Haddocks [#143](https://github.com/commercialhaskell/stack/issues/143): * Build for dependencies * Use relative links * Generate module contents and index for all packages in project -## 0.0.3 +## 0.0.3 - 2015-06-17 -* `--prefetch` [#297](https://github.com/commercialhaskell/stack/issues/297) -* `upload` command ported from stackage-upload [#225](https://github.com/commercialhaskell/stack/issues/225) -* `--only-snapshot` [#310](https://github.com/commercialhaskell/stack/issues/310) +* `--prefetch` + [#297](https://github.com/commercialhaskell/stack/issues/297) +* `upload` command ported from stackage-upload + [#225](https://github.com/commercialhaskell/stack/issues/225) +* `--only-snapshot` + [#310](https://github.com/commercialhaskell/stack/issues/310) * `--resolver` [#224](https://github.com/commercialhaskell/stack/issues/224) * `stack init` [#253](https://github.com/commercialhaskell/stack/issues/253) -* `--extra-include-dirs` and `--extra-lib-dirs` [#333](https://github.com/commercialhaskell/stack/issues/333) -* Specify intra-package target [#201](https://github.com/commercialhaskell/stack/issues/201) - -## 0.0.2 - -* Fix some Windows specific bugs [#216](https://github.com/commercialhaskell/stack/issues/216) -* Improve output for package index updates [#227](https://github.com/commercialhaskell/stack/issues/227) -* Automatically update indices as necessary [#227](https://github.com/commercialhaskell/stack/issues/227) +* `--extra-include-dirs` and `--extra-lib-dirs` + [#333](https://github.com/commercialhaskell/stack/issues/333) +* Specify intra-package target + [#201](https://github.com/commercialhaskell/stack/issues/201) + +## 0.0.2 - 2015-06-14 + +* Fix some Windows specific bugs + [#216](https://github.com/commercialhaskell/stack/issues/216) +* Improve output for package index updates + [#227](https://github.com/commercialhaskell/stack/issues/227) +* Automatically update indices as necessary + [#227](https://github.com/commercialhaskell/stack/issues/227) * --verbose flag [#217](https://github.com/commercialhaskell/stack/issues/217) -* Remove packages (HTTPS and Git) [#199](https://github.com/commercialhaskell/stack/issues/199) +* Remove packages (HTTPS and Git) + [#199](https://github.com/commercialhaskell/stack/issues/199) * Config values for system-ghc and install-ghc * Merge `stack deps` functionality into `stack build` -* `install` command [#153](https://github.com/commercialhaskell/stack/issues/153) and [#272](https://github.com/commercialhaskell/stack/issues/272) -* overriding architecture value (useful to force 64-bit GHC on Windows, for example) +* `install` command + [#153](https://github.com/commercialhaskell/stack/issues/153) and + [#272](https://github.com/commercialhaskell/stack/issues/272) +* overriding architecture value (useful to force 64-bit GHC on Windows, for + example) * Overhauled test running (allows cycles, avoids unnecessary recompilation, etc) -## 0.0.1 +## 0.0.1 - 2015-06-09 -* First public release, beta quality +* First public release, as the Haskell Tool Stack (beta quality). diff --git a/LICENSE b/LICENSE index f2a24c5694..06497eb902 100644 --- a/LICENSE +++ b/LICENSE @@ -1,24 +1,824 @@ -Copyright (c) 2015-2020, Stack contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Stack nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. +Stack +===== + +Copyright (c) 2015-2026, Stack contributors. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of Stack nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL STACK CONTRIBUTORS BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +DISCLAIMED. IN NO EVENT SHALL STACK CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +The GNU Multiple Precision Arithmetic Library (GMP) +=================================================== + +Stack executables for Linux distributed under Releases at Stack's GitHub +repository (https://github.com/commercialhaskell/stack) may be statically-linked +(as indicted there or in Stack's Change Log) and may be linked against a version +of GMP, which is licensed under the GNU Lesser General Public License, +Version 3, 29 June 2007 (LGPL). + +In accordance with the LGPL, the full source code of Stack is provided at the +same GitHub repository, allowing you to modify and relink the GMP portion, if +desired. + +GMP source code is available at: https://gmplib.org/ + +A copy of the LGPL is reproduced below. + + + +GNU LESSER GENERAL PUBLIC LICENSE +================================= + +Version 3, 29 June 2007 + +Copyright (C) 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license +document, but changing it is not allowed. + +This version of the GNU Lesser General Public License incorporates the terms and +conditions of version 3 of the GNU General Public License, supplemented by the +additional permissions listed below. + +## 0. Additional Definitions. + +As used herein, "this License" refers to version 3 of the GNU Lesser General +Public License, and the "GNU GPL" refers to version 3 of the GNU General Public +License. + +"The Library" refers to a covered work governed by this License, other than an +Application or a Combined Work as defined below. + +An "Application" is any work that makes use of an interface provided by the +Library, but which is not otherwise based on the Library. Defining a subclass of +a class defined by the Library is deemed a mode of using an interface provided +by the Library. + +A "Combined Work" is a work produced by combining or linking an Application with +the Library. The particular version of the Library with which the Combined Work +was made is also called the "Linked Version". + +The "Minimal Corresponding Source" for a Combined Work means the Corresponding +Source for the Combined Work, excluding any source code for portions of the +Combined Work that, considered in isolation, are based on the Application, and +not on the Linked Version. + +The "Corresponding Application Code" for a Combined Work means the object code +and/or source code for the Application, including any data and utility programs +needed for reproducing the Combined Work from the Application, but excluding the +System Libraries of the Combined Work. + +## 1. Exception to Section 3 of the GNU GPL. + +You may convey a covered work under sections 3 and 4 of this License without +being bound by section 3 of the GNU GPL. + +## 2. Conveying Modified Versions. + +If you modify a copy of the Library, and, in your modifications, a facility +refers to a function or data to be supplied by an Application that uses the +facility (other than as an argument passed when the facility is invoked), then +you may convey a copy of the modified version: + +- a) under this License, provided that you make a good faith effort to ensure + that, in the event an Application does not supply the function or data, the + facility still operates, and performs whatever part of its purpose remains + meaningful, or +- b) under the GNU GPL, with none of the additional permissions of this + License applicable to that copy. + +## 3. Object Code Incorporating Material from Library Header Files. + +The object code form of an Application may incorporate material from a header +file that is part of the Library. You may convey such object code under terms of +your choice, provided that, if the incorporated material is not limited to +numerical parameters, data structure layouts and accessors, or small macros, +inline functions and templates (ten or fewer lines in length), you do both of +the following: + +- a) Give prominent notice with each copy of the object code that the Library + is used in it and that the Library and its use are covered by this License. +- b) Accompany the object code with a copy of the GNU GPL and this license + document. + +## 4. Combined Works. + +You may convey a Combined Work under terms of your choice that, taken together, +effectively do not restrict modification of the portions of the Library +contained in the Combined Work and reverse engineering for debugging such +modifications, if you also do each of the following: + +- a) Give prominent notice with each copy of the Combined Work that the + Library is used in it and that the Library and its use are covered by this + License. +- b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. +- c) For a Combined Work that displays copyright notices during execution, + include the copyright notice for the Library among these notices, as well as + a reference directing the user to the copies of the GNU GPL and this license + document. +- d) Do one of the following: + - 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form suitable for, + and under terms that permit, the user to recombine or relink the + Application with a modified version of the Linked Version to produce a + modified Combined Work, in the manner specified by section 6 of the GNU + GPL for conveying Corresponding Source. + - 1) Use a suitable shared library mechanism for linking with the Library. + A suitable mechanism is one that (a) uses at run time a copy of the + Library already present on the user's computer system, and (b) will + operate properly with a modified version of the Library that is + interface-compatible with the Linked Version. +- e) Provide Installation Information, but only if you would otherwise be + required to provide such information under section 6 of the GNU GPL, and + only to the extent that such information is necessary to install and execute + a modified version of the Combined Work produced by recombining or relinking + the Application with a modified version of the Linked Version. (If you use + option 4d0, the Installation Information must accompany the Minimal + Corresponding Source and Corresponding Application Code. If you use option + 4d1, you must provide the Installation Information in the manner specified + by section 6 of the GNU GPL for conveying Corresponding Source.) + +## 5. Combined Libraries. + +You may place library facilities that are a work based on the Library side by +side in a single library together with other library facilities that are not +Applications and are not covered by this License, and convey such a combined +library under terms of your choice, if you do both of the following: + +- a) Accompany the combined library with a copy of the same work based on the + Library, uncombined with any other library facilities, conveyed under the + terms of this License. +- b) Give prominent notice with the combined library that part of it is a work + based on the Library, and explaining where to find the accompanying + uncombined form of the same work. + +## 6. Revised Versions of the GNU Lesser General Public License. + +The Free Software Foundation may publish revised and/or new versions of the GNU +Lesser General Public License from time to time. Such new versions will be +similar in spirit to the present version, but may differ in detail to address +new problems or concerns. + +Each version is given a distinguishing version number. If the Library as you +received it specifies that a certain numbered version of the GNU Lesser General +Public License "or any later version" applies to it, you have the option of +following the terms and conditions either of that published version or of any +later version published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser General Public +License, you may choose any version of the GNU Lesser General Public License +ever published by the Free Software Foundation. + +If the Library as you received it specifies that a proxy can decide whether +future versions of the GNU Lesser General Public License shall apply, that +proxy's public statement of acceptance of any version is permanent authorization +for you to choose that version for the Library. + + + +GNU GENERAL PUBLIC LICENSE +========================== + +Version 3, 29 June 2007 + +Copyright (C) 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license +document, but changing it is not allowed. + +## Preamble + +The GNU General Public License is a free, copyleft license for software and +other kinds of works. + +The licenses for most software and other practical works are designed to take +away your freedom to share and change the works. By contrast, the GNU General +Public License is intended to guarantee your freedom to share and change all +versions of a program--to make sure it remains free software for all its users. +We, the Free Software Foundation, use the GNU General Public License for most of +our software; it applies also to any other work released this way by its +authors. You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our +General Public Licenses are designed to make sure that you have the freedom to +distribute copies of free software (and charge for them if you wish), that you +receive source code or can get it if you want it, that you can change the +software or use pieces of it in new free programs, and that you know you can do +these things. + +To protect your rights, we need to prevent others from denying you these rights +or asking you to surrender the rights. Therefore, you have certain +responsibilities if you distribute copies of the software, or if you modify it: +responsibilities to respect the freedom of others. + +For example, if you distribute copies of such a program, whether gratis or for a +fee, you must pass on to the recipients the same freedoms that you received. You +must make sure that they, too, receive or can get the source code. And you must +show them these terms so they know their rights. + +Developers that use the GNU GPL protect your rights with two steps: (1) assert +copyright on the software, and (2) offer you this License giving you legal +permission to copy, distribute and/or modify it. + +For the developers' and authors' protection, the GPL clearly explains that there +is no warranty for this free software. For both users' and authors' sake, the +GPL requires that modified versions be marked as changed, so that their problems +will not be attributed erroneously to authors of previous versions. + +Some devices are designed to deny users access to install or run modified +versions of the software inside them, although the manufacturer can do so. This +is fundamentally incompatible with the aim of protecting users' freedom to +change the software. The systematic pattern of such abuse occurs in the area of +products for individuals to use, which is precisely where it is most +unacceptable. Therefore, we have designed this version of the GPL to prohibit +the practice for those products. If such problems arise substantially in other +domains, we stand ready to extend this provision to those domains in future +versions of the GPL, as needed to protect the freedom of users. + +Finally, every program is threatened constantly by software patents. States +should not allow patents to restrict development and use of software on +general-purpose computers, but in those that do, we wish to avoid the special +danger that patents applied to a free program could make it effectively +proprietary. To prevent this, the GPL assures that patents cannot be used to +render the program non-free. + +The precise terms and conditions for copying, distribution and modification +follow. + +## TERMS AND CONDITIONS + +### 0. Definitions. + +"This License" refers to version 3 of the GNU General Public License. + +"Copyright" also means copyright-like laws that apply to other kinds of works, +such as semiconductor masks. + +"The Program" refers to any copyrightable work licensed under this License. Each +licensee is addressed as "you". "Licensees" and "recipients" may be individuals +or organizations. + +To "modify" a work means to copy from or adapt all or part of the work in a +fashion requiring copyright permission, other than the making of an exact copy. +The resulting work is called a "modified version" of the earlier work or a work +"based on" the earlier work. + +A "covered work" means either the unmodified Program or a work based on the +Program. + +To "propagate" a work means to do anything with it that, without permission, +would make you directly or secondarily liable for infringement under applicable +copyright law, except executing it on a computer or modifying a private copy. +Propagation includes copying, distribution (with or without modification), +making available to the public, and in some countries other activities as well. + +To "convey" a work means any kind of propagation that enables other parties to +make or receive copies. Mere interaction with a user through a computer network, +with no transfer of a copy, is not conveying. + +An interactive user interface displays "Appropriate Legal Notices" to the extent +that it includes a convenient and prominently visible feature that (1) displays +an appropriate copyright notice, and (2) tells the user that there is no +warranty for the work (except to the extent that warranties are provided), that +licensees may convey the work under this License, and how to view a copy of this +License. If the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + +### 1. Source Code. + +The "source code" for a work means the preferred form of the work for making +modifications to it. "Object code" means any non-source form of a work. + +A "Standard Interface" means an interface that either is an official standard +defined by a recognized standards body, or, in the case of interfaces specified +for a particular programming language, one that is widely used among developers +working in that language. + +The "System Libraries" of an executable work include anything, other than the +work as a whole, that (a) is included in the normal form of packaging a Major +Component, but which is not part of that Major Component, and (b) serves only to +enable use of the work with that Major Component, or to implement a Standard +Interface for which an implementation is available to the public in source code +form. A "Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system (if any) on +which the executable work runs, or a compiler used to produce the work, or an +object code interpreter used to run it. + +The "Corresponding Source" for a work in object code form means all the source +code needed to generate, install, and (for an executable work) run the object +code and to modify the work, including scripts to control those activities. +However, it does not include the work's System Libraries, or general-purpose +tools or generally available free programs which are used unmodified in +performing those activities but which are not part of the work. For example, +Corresponding Source includes interface definition files associated with source +files for the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, such as by +intimate data communication or control flow between those subprograms and other +parts of the work. + +The Corresponding Source need not include anything that users can regenerate +automatically from other parts of the Corresponding Source. + +The Corresponding Source for a work in source code form is that same work. + +### 2. Basic Permissions. + +All rights granted under this License are granted for the term of copyright on +the Program, and are irrevocable provided the stated conditions are met. This +License explicitly affirms your unlimited permission to run the unmodified +Program. The output from running a covered work is covered by this License only +if the output, given its content, constitutes a covered work. This License +acknowledges your rights of fair use or other equivalent, as provided by +copyright law. + +You may make, run and propagate covered works that you do not convey, without +conditions so long as your license otherwise remains in force. You may convey +covered works to others for the sole purpose of having them make modifications +exclusively for you, or provide you with facilities for running those works, +provided that you comply with the terms of this License in conveying all +material for which you do not control copyright. Those thus making or running +the covered works for you must do so exclusively on your behalf, under your +direction and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + +Conveying under any other circumstances is permitted solely under the conditions +stated below. Sublicensing is not allowed; section 10 makes it unnecessary. + +### 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + +No covered work shall be deemed part of an effective technological measure under +any applicable law fulfilling obligations under article 11 of the WIPO copyright +treaty adopted on 20 December 1996, or similar laws prohibiting or restricting +circumvention of such measures. + +When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention is +effected by exercising rights under this License with respect to the covered +work, and you disclaim any intention to limit operation or modification of the +work as a means of enforcing, against the work's users, your or third parties' +legal rights to forbid circumvention of technological measures. + +### 4. Conveying Verbatim Copies. + +You may convey verbatim copies of the Program's source code as you receive it, +in any medium, provided that you conspicuously and appropriately publish on each +copy an appropriate copyright notice; keep intact all notices stating that this +License and any non-permissive terms added in accord with section 7 apply to the +code; keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + +You may charge any price or no price for each copy that you convey, and you may +offer support or warranty protection for a fee. + +### 5. Conveying Modified Source Versions. + +You may convey a work based on the Program, or the modifications to produce it +from the Program, in the form of source code under the terms of section 4, +provided that you also meet all of these conditions: + +- a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. +- b) The work must carry prominent notices stating that it is + released under this License and any conditions added under + section 7. This requirement modifies the requirement in section 4 + to "keep intact all notices". +- c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. +- d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + +A compilation of a covered work with other separate and independent works, which +are not by their nature extensions of the covered work, and which are not +combined with it such as to form a larger program, in or on a volume of a +storage or distribution medium, is called an "aggregate" if the compilation and +its resulting copyright are not used to limit the access or legal rights of the +compilation's users beyond what the individual works permit. Inclusion of a +covered work in an aggregate does not cause this License to apply to the other +parts of the aggregate. + +### 6. Conveying Non-Source Forms. + +You may convey a covered work in object code form under the terms of sections 4 +and 5, provided that you also convey the machine-readable Corresponding Source +under the terms of this License, in one of these ways: + +- a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. +- b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the Corresponding + Source from a network server at no charge. +- c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. +- d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. +- e) Convey the object code using peer-to-peer transmission, + provided you inform other peers where the object code and + Corresponding Source of the work are being offered to the general + public at no charge under subsection 6d. + +A separable portion of the object code, whose source code is excluded from the +Corresponding Source as a System Library, need not be included in conveying the +object code work. + +A "User Product" is either (1) a "consumer product", which means any tangible +personal property which is normally used for personal, family, or household +purposes, or (2) anything designed or sold for incorporation into a dwelling. In +determining whether a product is a consumer product, doubtful cases shall be +resolved in favor of coverage. For a particular product received by a particular +user, "normally used" refers to a typical or common use of that class of +product, regardless of the status of the particular user or of the way in which +the particular user actually uses, or expects or is expected to use, the +product. A product is a consumer product regardless of whether the product has +substantial commercial, industrial or non-consumer uses, unless such uses +represent the only significant mode of use of the product. + +"Installation Information" for a User Product means any methods, procedures, +authorization keys, or other information required to install and execute +modified versions of a covered work in that User Product from a modified version +of its Corresponding Source. The information must suffice to ensure that the +continued functioning of the modified object code is in no case prevented or +interfered with solely because modification has been made. + +If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as part of a +transaction in which the right of possession and use of the User Product is +transferred to the recipient in perpetuity or for a fixed term (regardless of +how the transaction is characterized), the Corresponding Source conveyed under +this section must be accompanied by the Installation Information. But this +requirement does not apply if neither you nor any third party retains the +ability to install modified object code on the User Product (for example, the +work has been installed in ROM). + +The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates for a +work that has been modified or installed by the recipient, or for the User +Product in which it has been modified or installed. Access to a network may be +denied when the modification itself materially and adversely affects the +operation of the network or violates the rules and protocols for communication +across the network. + +Corresponding Source conveyed, and Installation Information provided, in accord +with this section must be in a format that is publicly documented (and with an +implementation available to the public in source code form), and must require no +special password or key for unpacking, reading or copying. + +### 7. Additional Terms. + +"Additional permissions" are terms that supplement the terms of this License by +making exceptions from one or more of its conditions. Additional permissions +that are applicable to the entire Program shall be treated as though they were +included in this License, to the extent that they are valid under applicable +law. If additional permissions apply only to part of the Program, that part may +be used separately under those permissions, but the entire Program remains +governed by this License without regard to the additional permissions. + +When you convey a copy of a covered work, you may at your option remove any +additional permissions from that copy, or from any part of it. (Additional +permissions may be written to require their own removal in certain cases when +you modify the work.) You may place additional permissions on material, added by +you to a covered work, for which you have or can give appropriate copyright +permission. + +Notwithstanding any other provision of this License, for material you add to a +covered work, you may (if authorized by the copyright holders of that material) +supplement the terms of this License with terms: + +- a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or +- b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or +- c) Prohibiting misrepresentation of the origin of that material, + or requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or +- d) Limiting the use for publicity purposes of names of licensors + or authors of the material; or +- e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or +- f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions + of it) with contractual assumptions of liability to the recipient, + for any liability that these contractual assumptions directly + impose on those licensors and authors. + +All other non-permissive additional terms are considered "further restrictions" +within the meaning of section 10. If the Program as you received it, or any part +of it, contains a notice stating that it is governed by this License along with +a term that is a further restriction, you may remove that term. If a license +document contains a further restriction but permits relicensing or conveying +under this License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does not survive +such relicensing or conveying. + +If you add terms to a covered work in accord with this section, you must place, +in the relevant source files, a statement of the additional terms that apply to +those files, or a notice indicating where to find the applicable terms. + +Additional terms, permissive or non-permissive, may be stated in the form of a +separately written license, or stated as exceptions; the above requirements +apply either way. + +### 8. Termination. + +You may not propagate or modify a covered work except as expressly provided +under this License. Any attempt otherwise to propagate or modify it is void, and +will automatically terminate your rights under this License (including any +patent licenses granted under the third paragraph of section 11). + +However, if you cease all violation of this License, then your license from a +particular copyright holder is reinstated (a) provisionally, unless and until +the copyright holder explicitly and finally terminates your license, and (b) +permanently, if the copyright holder fails to notify you of the violation by +some reasonable means prior to 60 days after the cessation. + +Moreover, your license from a particular copyright holder is reinstated +permanently if the copyright holder notifies you of the violation by some +reasonable means, this is the first time you have received notice of violation +of this License (for any work) from that copyright holder, and you cure the +violation prior to 30 days after your receipt of the notice. + +Termination of your rights under this section does not terminate the licenses of +parties who have received copies or rights from you under this License. If your +rights have been terminated and not permanently reinstated, you do not qualify +to receive new licenses for the same material under section 10. + +### 9. Acceptance Not Required for Having Copies. + +You are not required to accept this License in order to receive or run a copy of +the Program. Ancillary propagation of a covered work occurring solely as a +consequence of using peer-to-peer transmission to receive a copy likewise does +not require acceptance. However, nothing other than this License grants you +permission to propagate or modify any covered work. These actions infringe +copyright if you do not accept this License. Therefore, by modifying or +propagating a covered work, you indicate your acceptance of this License to do +so. + +### 10. Automatic Licensing of Downstream Recipients. + +Each time you convey a covered work, the recipient automatically receives a +license from the original licensors, to run, modify and propagate that work, +subject to this License. You are not responsible for enforcing compliance by +third parties with this License. + +An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered work results +from an entity transaction, each party to that transaction who receives a copy +of the work also receives whatever licenses to the work the party's predecessor +in interest had or could give under the previous paragraph, plus a right to +possession of the Corresponding Source of the work from the predecessor in +interest, if the predecessor has it or can get it with reasonable efforts. + +You may not impose any further restrictions on the exercise of the rights +granted or affirmed under this License. For example, you may not impose a +license fee, royalty, or other charge for exercise of rights granted under this +License, and you may not initiate litigation (including a cross-claim or +counterclaim in a lawsuit) alleging that any patent claim is infringed by +making, using, selling, offering for sale, or importing the Program or any +portion of it. + +### 11. Patents. + +A "contributor" is a copyright holder who authorizes use under this License of +the Program or a work on which the Program is based. The work thus licensed is +called the contributor's "contributor version". + +A contributor's "essential patent claims" are all patent claims owned or +controlled by the contributor, whether already acquired or hereafter acquired, +that would be infringed by some manner, permitted by this License, of making, +using, or selling its contributor version, but do not include claims that would +be infringed only as a consequence of further modification of the contributor +version. For purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of this License. + +Each contributor grants you a non-exclusive, worldwide, royalty-free patent +license under the contributor's essential patent claims, to make, use, sell, +offer for sale, import and otherwise run, modify and propagate the contents of +its contributor version. + +In the following three paragraphs, a "patent license" is any express agreement +or commitment, however denominated, not to enforce a patent (such as an express +permission to practice a patent or covenant not to sue for patent infringement). +To "grant" such a patent license to a party means to make such an agreement or +commitment not to enforce a patent against the party. + +If you convey a covered work, knowingly relying on a patent license, and the +Corresponding Source of the work is not available for anyone to copy, free of +charge and under the terms of this License, through a publicly available network +server or other readily accessible means, then you must either (1) cause the +Corresponding Source to be so available, or (2) arrange to deprive yourself of +the benefit of the patent license for this particular work, or (3) arrange, in a +manner consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have actual +knowledge that, but for the patent license, your conveying the covered work in a +country, or your recipient's use of the covered work in a country, would +infringe one or more identifiable patents in that country that you have reason +to believe are valid. + +If, pursuant to or in connection with a single transaction or arrangement, you +convey, or propagate by procuring conveyance of, a covered work, and grant a +patent license to some of the parties receiving the covered work authorizing +them to use, propagate, modify or convey a specific copy of the covered work, +then the patent license you grant is automatically extended to all recipients of +the covered work and works based on it. + +A patent license is "discriminatory" if it does not include within the scope of +its coverage, prohibits the exercise of, or is conditioned on the non-exercise +of one or more of the rights that are specifically granted under this License. +You may not convey a covered work if you are a party to an arrangement with a +third party that is in the business of distributing software, under which you +make payment to the third party based on the extent of your activity of +conveying the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory patent +license (a) in connection with copies of the covered work conveyed by you (or +copies made from those copies), or (b) primarily for and in connection with +specific products or compilations that contain the covered work, unless you +entered into that arrangement, or that patent license was granted, prior to +28 March 2007. + +Nothing in this License shall be construed as excluding or limiting any implied +license or other defenses to infringement that may otherwise be available to you +under applicable patent law. + +### 12. No Surrender of Others' Freedom. + +If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not excuse +you from the conditions of this License. If you cannot convey a covered work so +as to satisfy simultaneously your obligations under this License and any other +pertinent obligations, then as a consequence you may not convey it at all. For +example, if you agree to terms that obligate you to collect a royalty for +further conveying from those to whom you convey the Program, the only way you +could satisfy both those terms and this License would be to refrain entirely +from conveying the Program. + +### 13. Use with the GNU Affero General Public License. + +Notwithstanding any other provision of this License, you have permission to link +or combine any covered work with a work licensed under version 3 of the GNU +Affero General Public License into a single combined work, and to convey the +resulting work. The terms of this License will continue to apply to the part +which is the covered work, but the special requirements of the GNU Affero +General Public License, section 13, concerning interaction through a network +will apply to the combination as such. + +### 14. Revised Versions of this License. + +The Free Software Foundation may publish revised and/or new versions of the GNU +General Public License from time to time. Such new versions will be similar in +spirit to the present version, but may differ in detail to address new problems +or concerns. + +Each version is given a distinguishing version number. If the Program specifies +that a certain numbered version of the GNU General Public License "or any later +version" applies to it, you have the option of following the terms and +conditions either of that numbered version or of any later version published by +the Free Software Foundation. If the Program does not specify a version number +of the GNU General Public License, you may choose any version ever published by +the Free Software Foundation. + +If the Program specifies that a proxy can decide which future versions of the +GNU General Public License can be used, that proxy's public statement of +acceptance of a version permanently authorizes you to choose that version for +the Program. + +Later license versions may give you additional or different permissions. +However, no additional obligations are imposed on any author or copyright holder +as a result of your choosing to follow a later version. + +### 15. Disclaimer of Warranty. + +THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER +PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER +EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE +QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE +DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +### 16. Limitation of Liability. + +IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY +COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS +PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, +INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE +THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED +INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE +PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY +HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +### 17. Interpretation of Sections 15 and 16. + +If the disclaimer of warranty and limitation of liability provided above cannot +be given local legal effect according to their terms, reviewing courts shall +apply local law that most closely approximates an absolute waiver of all civil +liability in connection with the Program, unless a warranty or assumption of +liability accompanies a copy of the Program in return for a fee. + +END OF TERMS AND CONDITIONS + +## How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible use +to the public, the best way to achieve this is to make it free software which +everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach +them to the start of each source file to most effectively state the exclusion of +warranty; and each file should have at least the "copyright" line and a pointer +to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + +If the program does terminal interaction, make it output a short notice like +this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands \`show w' and \`show c' should show the appropriate +parts of the General Public License. Of course, your program's commands might be +different; for a GUI interface, you would use an "about box". + +You should also get your employer (if you work as a programmer) or school, if +any, to sign a "copyright disclaimer" for the program, if necessary. For more +information on this, and how to apply and follow the GNU GPL, see +. + +The GNU General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may consider +it more useful to permit linking proprietary applications with the library. If +this is what you want to do, use the GNU Lesser General Public License instead +of this License. But first, please read +. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000..fab6bcf8fd --- /dev/null +++ b/Makefile @@ -0,0 +1,12 @@ +# With the other prerequisites, this file allows users of the 'make' tool to +# automate the use of the 'mkdocs' tool to preview or build Stack's online +# documentation. See CONTRIBUTING.md for more information. + +# Preview Stack's online documentation with `make docs-serve`: +.PHONY: docs-serve +docs-serve: + $(MAKE) -C doc docs-serve + +# Build Stack's online documentation with `make _site/index.html`: +_site/index.html: doc/*.md + $(MAKE) -C doc docs-build diff --git a/README.md b/README.md index 290c5c5f3c..57aa0575b9 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,28 @@ -## The Haskell Tool Stack +## Stack -![Unit tests](https://github.com/commercialhaskell/stack/workflows/Unit%20tests/badge.svg) -![Integration tests](https://github.com/commercialhaskell/stack/workflows/Integration%20tests/badge.svg) +[![Unit tests](https://github.com/commercialhaskell/stack/workflows/Unit%20tests/badge.svg)](https://github.com/commercialhaskell/stack/actions/workflows/unit-tests.yml) +[![Integration tests](https://github.com/commercialhaskell/stack/workflows/Integration%20tests/badge.svg)](https://github.com/commercialhaskell/stack/actions/workflows/integration-tests.yml) [![Release](https://img.shields.io/github/release/commercialhaskell/stack.svg)](https://github.com/commercialhaskell/stack/releases) -[![Join the chat at https://gitter.im/commercialhaskell/stack](https://badges.gitter.im/commercialhaskell/stack.svg)](https://gitter.im/commercialhaskell/stack?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -Stack is a cross-platform program for developing Haskell projects. It is intended for Haskellers both new and experienced. +Stack is a program for developing Haskell projects. It is aimed at new and +experienced users of Haskell and seeks to support them fully on Linux, macOS +and Windows. -See [haskellstack.org](http://haskellstack.org) or the [doc](./doc) directory for more -information. +See [haskellstack.org](http://haskellstack.org), or the [doc](./doc) directory +of this repository, for more information. + +### Learning to use Stack + +If you are learning to use Stack and have questions, a discussion at the +[Haskell Community](https://discourse.haskell.org/) forum may help. See its +'Learn' category. + +### Community + +You can participate with the Stack community in the following areas: + +* the [Haskell Community](https://discourse.haskell.org/) forum + +* the Haskell + [Stack and Stackage](https://matrix.to/#/#haskell-stack:matrix.org) + room (address `#haskell-stack:matrix.org`) on [Matrix](https://matrix.org/) diff --git a/Setup.hs b/Setup.hs index 95a2306132..f34a82df6b 100644 --- a/Setup.hs +++ b/Setup.hs @@ -1,58 +1,93 @@ -{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} -module Main (main) where +module Main + ( main + ) where -import Data.List ( nub, sortBy ) -import Data.Ord ( comparing ) -import Distribution.Package ( PackageId, UnitId, packageVersion, packageName ) -import Distribution.PackageDescription ( PackageDescription(), Executable(..) ) -import Distribution.InstalledPackageInfo (sourcePackageId, installedUnitId) -import Distribution.Simple ( defaultMainWithHooks, UserHooks(..), simpleUserHooks ) -import Distribution.Simple.Utils ( rewriteFileEx, createDirectoryIfMissingVerbose ) -import Distribution.Simple.BuildPaths ( autogenPackageModulesDir ) -import Distribution.Simple.PackageIndex (allPackages, dependencyClosure) -import Distribution.Simple.Setup ( BuildFlags(buildVerbosity), fromFlag ) -import Distribution.Simple.LocalBuildInfo ( installedPkgs, withLibLBI, withExeLBI, LocalBuildInfo(), ComponentLocalBuildInfo(componentPackageDeps) ) -import Distribution.Types.PackageName (PackageName, unPackageName) -import Distribution.Types.UnqualComponentName (unUnqualComponentName) -import Distribution.Verbosity ( Verbosity, normal ) -import Distribution.Pretty ( prettyShow ) -import System.FilePath ( () ) +import Data.List ( nub, sortOn ) +import Distribution.InstalledPackageInfo + ( installedUnitId, sourcePackageId ) +import Distribution.Package ( UnitId, packageName, packageVersion ) +import Distribution.PackageDescription + ( Executable (..), PackageDescription ) +import Distribution.Pretty ( prettyShow ) +import Distribution.Simple + ( UserHooks(..), defaultMainWithHooks, simpleUserHooks ) +import Distribution.Simple.BuildPaths ( autogenPackageModulesDir ) +import Distribution.Simple.LocalBuildInfo + ( ComponentLocalBuildInfo (..), LocalBuildInfo, installedPkgs + , withExeLBI, withLibLBI + ) +import Distribution.Simple.PackageIndex + ( allPackages, dependencyClosure ) +import Distribution.Simple.Setup + ( BuildFlags (..), ReplFlags (..), fromFlag ) +import Distribution.Simple.Utils + ( createDirectoryIfMissingVerbose, rewriteFileEx ) +import Distribution.Types.PackageName ( unPackageName ) +import Distribution.Types.UnqualComponentName + ( unUnqualComponentName ) +import Distribution.Utils.Path ( interpretSymbolicPathCWD ) +import Distribution.Verbosity ( Verbosity, normal ) +import System.FilePath ( () ) main :: IO () main = defaultMainWithHooks simpleUserHooks { buildHook = \pkg lbi hooks flags -> do - generateBuildModule (fromFlag (buildVerbosity flags)) pkg lbi - buildHook simpleUserHooks pkg lbi hooks flags + generateBuildModule (fromFlag (buildVerbosity flags)) pkg lbi + buildHook simpleUserHooks pkg lbi hooks flags + -- The 'cabal repl' hook corresponds to the 'cabal build' hook and is added + -- because, with a Cabal-based cradle, Haskell Language Server makes use of + -- 'cabal repl'. + , replHook = \pkg lbi hooks flags args -> do + generateBuildModule (fromFlag (replVerbosity flags)) pkg lbi + replHook simpleUserHooks pkg lbi hooks flags args } -generateBuildModule :: Verbosity -> PackageDescription -> LocalBuildInfo -> IO () +generateBuildModule :: + Verbosity + -> PackageDescription + -> LocalBuildInfo + -> IO () generateBuildModule verbosity pkg lbi = do - let dir = autogenPackageModulesDir lbi + let dir = interpretSymbolicPathCWD (autogenPackageModulesDir lbi) createDirectoryIfMissingVerbose verbosity True dir withLibLBI pkg lbi $ \_ libcfg -> do - withExeLBI pkg lbi $ \exe clbi -> - rewriteFileEx normal (dir "Build_" ++ exeName' exe ++ ".hs") $ unlines - [ "module Build_" ++ exeName' exe ++ " where" + withExeLBI pkg lbi $ \exe clbi -> do + let name = exeName' exe + rewriteFileEx normal (dir "Build_" ++ name ++ ".hs") $ unlines + [ "{-|" + , "Module : Build_" ++ name + , "License : BSD-3-Clause" + , "-}" , "" + , "module Build_" ++ name + , " ( deps" + , " ) where" + , "" + , "-- | The dependencies against which \\'" ++ name ++ "\\' is built." , "deps :: [String]" - , "deps = " ++ (show $ formatdeps (transDeps libcfg clbi)) + , "deps = " ++ show (formatdeps (transDeps libcfg clbi)) ] where exeName' = unUnqualComponentName . exeName - formatdeps = map formatone . sortBy (comparing unPackageName') + formatdeps = map formatone . sortOn unPackageName' formatone p = unPackageName' p ++ "-" ++ prettyShow (packageVersion p) unPackageName' = unPackageName . packageName - transDeps xs ys = - either (map sourcePackageId . allPackages) handleDepClosureFailure $ dependencyClosure allInstPkgsIdx availInstPkgIds - where - allInstPkgsIdx = installedPkgs lbi - allInstPkgIds = map installedUnitId $ allPackages allInstPkgsIdx - -- instPkgIds includes `stack-X.X.X`, which is not a dependency hence is missing from allInstPkgsIdx. Filter that out. - availInstPkgIds = filter (`elem` allInstPkgIds) $ testDeps xs ys - handleDepClosureFailure unsatisfied = - error $ - "Computation of transitive dependencies failed." ++ - if null unsatisfied then "" else " Unresolved dependencies: " ++ show unsatisfied + transDeps xs ys = either + (map sourcePackageId . allPackages) + handleDepClosureFailure $ dependencyClosure allInstPkgsIdx availInstPkgIds + where + allInstPkgsIdx = installedPkgs lbi + allInstPkgIds = map installedUnitId $ allPackages allInstPkgsIdx + -- instPkgIds includes `stack-X.X.X`, which is not a dependency hence is + -- missing from allInstPkgsIdx. Filter that out. + availInstPkgIds = filter (`elem` allInstPkgIds) $ testDeps xs ys + handleDepClosureFailure unsatisfied = + error $ + "Computation of transitive dependencies failed." + ++ if null unsatisfied + then "" + else " Unresolved dependencies: " ++ show unsatisfied testDeps :: ComponentLocalBuildInfo -> ComponentLocalBuildInfo -> [UnitId] -testDeps xs ys = map fst $ nub $ componentPackageDeps xs ++ componentPackageDeps ys +testDeps xs ys = + map fst $ nub $ componentPackageDeps xs ++ componentPackageDeps ys diff --git a/SetupHooks.hs b/SetupHooks.hs new file mode 100644 index 0000000000..d9b152deaa --- /dev/null +++ b/SetupHooks.hs @@ -0,0 +1,103 @@ +{-# LANGUAGE OverloadedStrings #-} + +-- | See https://github.com/well-typed/hooks-build-type. As part of their work, +-- Well-Typed reviewed stack-2.13.1 and identified that it used a pre-build hook +-- to generate, for the stack main library component, a module that lists all +-- the dependencies of stack (both library and executable), which is used in +-- `Stack.BuildInfo` to be listed. They also wrote an experimental patch, the +-- source code of which is below (with some reformatting). +-- +-- This would be used if Stack's build type was 'Hooks' rather than 'Custom'. + +module SetupHooks + ( setupHooks + ) where + +import Data.List ( nub, sortBy ) +import Data.Ord ( comparing ) +import Distribution.InstalledPackageInfo + ( installedUnitId, sourcePackageId ) +import Distribution.Package + ( PackageId, UnitId, packageName, packageVersion ) +import Distribution.PackageDescription + ( PackageDescription (..), Executable (..), componentNameRaw + ) +import Distribution.Pretty ( prettyShow ) +import Distribution.Simple + ( UserHooks(..), defaultMainWithHooks, simpleUserHooks ) +import Distribution.Simple.BuildPaths ( autogenComponentModulesDir ) +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.PackageIndex + ( allPackages, dependencyClosure ) +import Distribution.Simple.Setup ( BuildFlags (..), fromFlag ) +import Distribution.Simple.SetupHooks +import Distribution.Simple.Utils + ( createDirectoryIfMissingVerbose, rewriteFileEx ) +import Distribution.Types.PackageName ( PackageName, unPackageName ) +import Distribution.Types.UnqualComponentName + ( unUnqualComponentName ) +import Distribution.Verbosity ( Verbosity, normal ) +import System.FilePath ( () ) + +setupHooks :: SetupHooks +setupHooks = + noSetupHooks + { buildHooks = + noBuildHooks + { preBuildComponentHook = Just preBuildHook } + } + +preBuildHook :: BuildingWhat -> LocalBuildInfo -> TargetInfo -> IO () +preBuildHook flags lbi tgt + | CLibName LMainLibName <- componentName $ targetComponent tgt = + generateBuildModule (buildingWhatVerbosity flags) (localPkgDescr lbi) + lbi tgt + | otherwise = pure () + +generateBuildModule :: + Verbosity + -> PackageDescription + -> LocalBuildInfo + -> TargetInfo + -> IO () +generateBuildModule verbosity pkg lbi mainLibTargetInfo = do + -- Generate a module in the stack library component that lists all the + -- dependencies of stack (both the library and the executable). + createDirectoryIfMissingVerbose verbosity True autogenDir + withExeLBI pkg lbi $ \ _ exeCLBI -> do + rewriteFileEx normal buildModulePath $ unlines + [ "module Build_" ++ pkgNm + , " ( deps" + , " ) where" + , "" + , "deps :: [String]" + , "deps = " ++ (show $ formatdeps (transDeps mainLibCLBI exeCLBI)) + ] + where + mainLibCLBI = targetCLBI mainLibTargetInfo + autogenDir = autogenComponentModulesDir lbi mainLibCLBI + pkgNm :: String + pkgNm = unPackageName' $ package pkg + buildModulePath = autogenDir "Build_" ++ pkgNm ++ ".hs" + formatdeps = map formatone . sortBy (comparing unPackageName') + formatone p = unPackageName' p ++ "-" ++ prettyShow (packageVersion p) + unPackageName' = unPackageName . packageName + transDeps xs ys = either + (map sourcePackageId . allPackages) + handleDepClosureFailure $ dependencyClosure allInstPkgsIdx availInstPkgIds + where + allInstPkgsIdx = installedPkgs lbi + allInstPkgIds = map installedUnitId $ allPackages allInstPkgsIdx + -- instPkgIds includes `stack-X.X.X`, which is not a dependency hence is + -- missing from allInstPkgsIdx. Filter that out. + availInstPkgIds = filter (`elem` allInstPkgIds) $ testDeps xs ys + handleDepClosureFailure unsatisfied = + error $ + "Computation of transitive dependencies failed." + ++ if null unsatisfied + then "" + else " Unresolved dependencies: " ++ show unsatisfied + +testDeps :: ComponentLocalBuildInfo -> ComponentLocalBuildInfo -> [UnitId] +testDeps xs ys = + map fst $ nub $ componentPackageDeps xs ++ componentPackageDeps ys diff --git a/app/Main.hs b/app/Main.hs new file mode 100644 index 0000000000..151009bdf6 --- /dev/null +++ b/app/Main.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +module Main + ( main + ) where + +import RIO ( IO ) +import qualified Stack + +-- | The entry point for the Stack executable. +main :: IO () +main = Stack.main diff --git a/cabal.config b/cabal.config new file mode 100644 index 0000000000..f965ed41a0 --- /dev/null +++ b/cabal.config @@ -0,0 +1,235 @@ +constraints: + , Cabal ==3.16.0.0 + , Cabal-syntax ==3.16.0.0 + , Glob ==0.10.2 + , OneTuple ==0.4.2 + , QuickCheck ==2.15.0.1 + , StateVar ==1.2.2 + , Win32 ==2.14.1.0 + , aeson ==2.2.3.0 + , aeson-warning-parser ==0.1.1 + , alex ==3.5.4.0 + , annotated-wl-pprint ==0.7.0 + , ansi-terminal ==1.1.4 + , ansi-terminal-types ==1.1.3 + , appar ==0.1.8 + , array ==0.5.8.0 + , assoc ==1.1.1 + , async ==2.2.5 + , atomic-counter ==0.1.2.4 + , attoparsec ==0.14.4 + , attoparsec-aeson ==2.2.2.0 + , auto-update ==0.2.6 + , base ==4.20.2.0 + , base-orphans ==0.9.3 + , base16 ==1.0 + , base16-bytestring ==1.0.2.0 + , base64 ==1.0 + , base64-bytestring ==1.2.1.0 + , basement ==0.0.16 + , bifunctors ==5.6.2 + , binary ==0.8.9.3 + , bitvec ==1.1.5.0 + , blaze-builder ==0.4.4.1 + , blaze-html ==0.9.2.0 + , blaze-markup ==0.8.3.0 + , byteorder ==1.0.4 + , bytestring ==0.12.2.0 + , casa-client ==0.0.3 + , casa-types ==0.0.3 + , case-insensitive ==1.2.1.0 + , cborg ==0.2.10.0 + , cereal ==0.5.8.3 + , character-ps ==0.1 + , clock ==0.8.4 + , cmdargs ==0.10.22 + , colour ==2.3.6 + , comonad ==5.0.9 + , companion ==0.1.0 + , conduit ==1.3.6.1 + , conduit-combinators ==1.3.0 + , conduit-extra ==1.3.8 + , containers ==0.7 + , contravariant ==1.5.5 + , cookie ==0.5.1 + , cryptohash-sha256 ==0.11.102.1 + , crypton ==1.0.4 + , crypton-asn1-encoding ==0.10.0 + , crypton-asn1-parse ==0.10.0 + , crypton-asn1-types ==0.4.1 + , crypton-conduit ==0.2.3 + , crypton-connection ==0.4.5 + , crypton-pem ==0.3.0 + , crypton-socks ==0.6.2 + , crypton-x509 ==1.8.0 + , crypton-x509-store ==1.8.0 + , crypton-x509-system ==1.8.0 + , crypton-x509-validation ==1.8.0 + , data-default ==0.8.0.1 + , data-default-class ==0.2.0.0 + , data-fix ==0.3.4 + , deepseq ==1.5.0.0 + , digest ==0.0.2.1 + , directory ==1.3.8.5 + , directory-ospath-streaming ==0.2.2 + , distributive ==0.6.2.1 + , dlist ==1.0 + , easy-file ==0.2.5 + , ech-config ==0.0.1 + , echo ==0.1.4 + , ed25519 ==0.0.5.0 + , exceptions ==0.10.9 + , extra ==1.8.1 + , fast-logger ==3.2.6 + , file-embed ==0.0.16.0 + , file-io ==0.1.5 + , filelock ==0.1.1.8 + , filepath ==1.5.4.0 + , fsnotify ==0.4.4.0 + , generic-deriving ==1.14.6 + , generically ==0.1.1 + , ghc-bignum ==1.3 + , ghc-boot ==9.10.3 + , ghc-boot-th ==9.10.3 + , ghc-internal ==9.1003.0 + , ghc-platform ==0.1.0.0 + , ghc-prim ==0.12.0 + , githash ==0.1.7.0 + , hackage-security ==0.6.3.2 + , half ==0.3.3 + , happy ==2.1.7 + , happy-lib ==2.1.7 + , hashable ==1.5.0.0 + , haskell-src-exts ==1.23.1 + , haskell-src-meta ==0.8.15 + , hi-file-parser ==0.1.8.0 + , hpack ==0.38.3 + , hpc ==0.7.0.2 + , hpke ==0.0.0 + , http-api-data ==0.6.2 + , http-client ==0.7.19 + , http-client-tls ==0.3.6.4 + , http-conduit ==2.3.9.1 + , http-download ==0.2.1.0 + , http-types ==0.12.4 + , indexed-traversable ==0.1.4 + , indexed-traversable-instances ==0.1.2 + , infer-license ==0.2.0 + , integer-conversion ==0.1.1 + , integer-gmp ==1.1 + , integer-logarithms ==1.0.4 + , iproute ==1.7.15 + , libyaml ==0.1.4 + , libyaml-clib ==0.2.5 + , lift-type ==0.1.2.0 + , lifted-base ==0.2.3.12 + , megaparsec ==9.7.0 + , memory ==0.18.0 + , microlens ==0.4.14.0 + , microlens-mtl ==0.2.1.0 + , microlens-th ==0.4.3.17 + , mime-types ==0.1.2.0 + , mintty ==0.1.4 + , monad-control ==1.0.3.1 + , monad-logger ==0.3.42 + , monad-loops ==0.4.3 + , mono-traversable ==1.0.21.0 + , mtl ==2.3.1 + , mtl-compat ==0.2.2 + , mustache ==2.4.3.1 + , neat-interpolation ==0.5.1.4 + , network ==3.2.8.0 + , network-byte-order ==0.1.7 + , network-uri ==2.6.4.2 + , old-locale ==1.0.0.7 + , old-time ==1.1.0.4 + , open-browser ==0.4.0.0 + , optparse-applicative ==0.18.1.0 + , optparse-simple ==0.1.1.4 + , os-string ==2.0.7 + , pantry ==0.11.2 + , parsec ==3.1.18.0 + , parser-combinators ==1.3.0 + , path ==0.9.6 + , path-io ==1.8.2 + , path-pieces ==0.2.1 + , persistent ==2.18.0.0 + , persistent-sqlite ==2.13.3.1 + , persistent-template ==2.12.0.0 + , pretty ==1.1.3.6 + , prettyprinter ==1.7.1 + , prettyprinter-ansi-terminal ==1.1.3 + , primitive ==0.9.1.0 + , process ==1.6.26.1 + , project-template ==0.2.1.0 + , random ==1.2.1.3 + , replace-megaparsec ==1.5.0.1 + , resource-pool ==0.4.0.0 + , resourcet ==1.3.0 + , retry ==0.9.3.1 + , rio ==0.1.24.0 + , rio-orphans ==0.1.2.0 + , rio-prettyprint ==0.1.9.0 + , rts ==1.0.2 + , safe ==0.3.21 + , safe-exceptions ==0.1.7.4 + , scientific ==0.3.8.0 + , semaphore-compat ==1.0.0 + , semialign ==1.3.1 + , semigroupoids ==6.0.1 + , serialise ==0.2.6.1 + , silently ==1.2.5.4 + , split ==0.2.5 + , splitmix ==0.1.3.1 + , stack ==3.10.0 + , static-bytes ==0.1.1 + , stm ==2.5.3.1 + , stm-chans ==3.0.0.9 + , streaming-commons ==0.2.3.1 + , strict ==0.5.1 + , string-interpolate ==0.3.4.0 + , syb ==0.7.2.4 + , tagged ==0.8.9 + , tar ==0.6.4.0 + , tar-conduit ==0.4.1 + , tasty ==1.5.3 + , template-haskell ==2.22.0.0 + , temporary ==1.3 + , text ==2.1.3 + , text-conversions ==0.3.1.1 + , text-iso8601 ==0.1.1 + , text-metrics ==0.3.3 + , text-short ==0.1.6 + , th-abstraction ==0.7.1.0 + , th-compat ==0.1.6 + , th-expand-syns ==0.4.12.0 + , th-lift ==0.8.6 + , th-lift-instances ==0.1.20 + , th-orphans ==0.13.16 + , th-reify-many ==0.1.10 + , these ==1.2.1 + , time ==1.12.2 + , time-compat ==1.9.8 + , time-hourglass ==0.3.0 + , tls ==2.2.2 + , transformers ==0.6.1.1 + , transformers-base ==0.4.6 + , transformers-compat ==0.7.2 + , typed-process ==0.2.13.0 + , unix ==2.8.6.0 + , unix-compat ==0.7.4.1 + , unix-time ==0.4.17 + , unliftio ==0.2.25.1 + , unliftio-core ==0.2.1.0 + , unordered-containers ==0.2.20.1 + , utf8-string ==1.0.2 + , uuid-types ==1.0.6 + , vault ==0.3.1.5 + , vector ==0.13.2.0 + , vector-algorithms ==0.9.1.0 + , vector-stream ==0.1.0.1 + , witherable ==0.5 + , yaml ==0.11.11.2 + , zip-archive ==0.4.3.2 + , zlib ==0.7.1.1 + , zlib-clib ==1.3.1 diff --git a/cabal.project b/cabal.project new file mode 100644 index 0000000000..91b417ea97 --- /dev/null +++ b/cabal.project @@ -0,0 +1,40 @@ +-- This file is a configuration file for Cabal (the tool). It is provided to +-- assist some users of that tool to develop Stack. For information about +-- `cabal.project` files, see: +-- https://cabal.readthedocs.io/en/stable/cabal-project.html. +-- +-- For information about possible limitations of the `cabal.config` files +-- corresponding to Stackage package sets that are made available by Stackage +-- see: https://github.com/fpco/stackage-server/issues/232. +-- +-- `import:` is only available to users of Cabal (the tool) >= 3.8.1.0. +-- +-- The constraints in file `cabal.config` can be obtained by commanding: +-- +-- > stack ls dependencies cabal > cabal.config +-- +-- However, be aware that, in respect of the `unix` package or the `Win32` +-- package (that may come with GHC, depending on the operating system): +-- +-- * on Windows, the Stack project does not depend on `unix` but depends on +-- `Win32`; and +-- +-- * on non-Windows operating systems, the Stack project does not depend on +-- `Win32` but depends on `unix`. +-- +-- The command above will add one of the two packages to `cabal.config` but omit +-- the other. A comprehensive `cabal.config` will need to be created by editing +-- the command's output. +-- +-- Also be aware that there may be other packages required only on non-Windows +-- systems. For example, the `hinotify` package. +-- +-- Be sure to set `with-compiler: ghc-x.y.z` below to the version of GHC that is +-- specified by the snapshot specifed in Stack's project-level YAML +-- configuration file (`stack.yaml`). The relevant version of GHC can be +-- confirmed by reviewing the snapshot on Stackage. For example, at: +-- https://www.stackage.org/lts-24.37/cabal.config. +-- +with-compiler: ghc-9.10.3 +import: cabal.config +packages: . diff --git a/doc/GUIDE.md b/doc/GUIDE.md deleted file mode 100644 index cacaaac516..0000000000 --- a/doc/GUIDE.md +++ /dev/null @@ -1,1758 +0,0 @@ -
- -# User guide - -stack is a modern, cross-platform build tool for Haskell code. - -This guide takes a new stack user through the typical workflows. This guide -will not teach Haskell or involve much code, and it requires no prior experience -with the Haskell packaging system or other build tools. - -__NOTE__ This document is probably out of date in some places and -deserves a refresh. If you find this document helpful, please drop a -note on [issue #4252](https://github.com/commercialhaskell/stack/issues/4252). - -## Stack's functions - -stack handles the management of your toolchain (including GHC — the Glasgow -Haskell Compiler — and, for Windows users, MSYS), building and registering -libraries, building build tool dependencies, and more. While it can use existing -tools on your system, stack has the capacity to be your one-stop shop for all -Haskell tooling you need. This guide will follow that stack-centric approach. - -### What makes stack special? - -The primary stack design point is __reproducible builds__. If you run `stack build` -today, you should get the same result running `stack build` tomorrow. -There are some cases that can break that rule (changes in your operating system -configuration, for example), but, overall, stack follows this design philosophy -closely. To make this a simple process, stack uses curated package sets -called __snapshots__. - -stack has also been designed from the ground up to be user friendly, with an -intuitive, discoverable command line interface. For many users, simply -downloading stack and reading `stack --help` will be enough to get up and -running. This guide provides a more gradual tour for users who prefer that -learning style. - -To build your project, stack uses a `stack.yaml` file in the root directory of -your project as a sort of blueprint. That file contains a reference, called a -__resolver__, to the snapshot which your package will be built against. - -Finally, stack is __isolated__: it will not make changes outside of specific -stack directories. stack-built files generally go in either the stack root -directory (default `~/.stack` or, on Windows, `%LOCALAPPDATA%\Programs\stack`) -or `./.stack-work` directories local to each project. The stack root directory -holds packages belonging to snapshots and any stack-installed versions of GHC. -Stack will not tamper with any system version of GHC or interfere with packages -installed by `cabal` or any other build tools. - -_NOTE_ In this guide, we'll use commands as run on a GNU/Linux system -(specifically Ubuntu 14.04, 64-bit) and share output from that. Output on other -systems — or with different versions of stack — will be slightly different, but -all commands work cross-platform, unless explicitly stated otherwise. - -## Downloading and Installation - -The [documentation dedicated to downloading -stack](install_and_upgrade.md) has the most -up-to-date information for a variety of operating systems, including multiple -GNU/Linux flavors. Instead of repeating that content here, please go check out -that page and come back here when you can successfully run `stack --version`. -The rest of this session will demonstrate the installation procedure on a -vanilla Ubuntu 14.04 machine. - -``` -michael@d30748af6d3d:~$ sudo apt-get install wget -# installing ... -michael@d30748af6d3d:~$ wget -qO- https://get.haskellstack.org/ | sh -# downloading ... -michael@d30748af6d3d:~$ stack --help -# help output ... -``` - -With stack now up and running, you're good to go. Though not required, we -recommend setting your PATH environment variable to include `$HOME/.local/bin`: - -``` -michael@d30748af6d3d:~$ echo 'export PATH=$HOME/.local/bin:$PATH' >> ~/.bashrc -``` - -## Hello World Example - -With stack installed, let's create a new project from a template and walk -through the most common stack commands. - -### stack new - -We'll start off with the `stack new` command to create a new *project*, that -will contain a Haskell *package* of the same name. So let's pick a valid -package name first: - -> A package is identified by a globally-unique package name, which consists -> of one or more alphanumeric words separated by hyphens. To avoid ambiguity, -> each of these words should contain at least one letter. - -(From the [Cabal users guide](https://www.haskell.org/cabal/users-guide/developing-packages.html#developing-packages)) - -We'll call our project `helloworld`, and we'll use the `new-template` project -template: - -``` -michael@d30748af6d3d:~$ stack new helloworld new-template -``` - -For this first stack command, there's quite a bit of initial setup it needs to -do (such as downloading the list of packages available upstream), so you'll see -a lot of output. Over the course of this guide a lot of the content will begin -to make more sense. - -We now have a project in the `helloworld` directory! - -### stack build - -Next, we'll run the most important stack command: `stack build`. - -``` -michael@d30748af6d3d:~$ cd helloworld -michael@d30748af6d3d:~/helloworld$ stack build -# installing ... building ... -``` - -stack needs a GHC in order to build your project. stack will discover that you -are missing it and will install it for you. You can do this manually by using -the `stack setup` command. - -You'll get intermediate download percentage statistics while the download is -occurring. This command may take some time, depending on download speeds. - -__NOTE__: GHC will be installed to your global stack root directory, so -calling `ghc` on the command line won't work. See the `stack exec`, -`stack ghc`, and `stack runghc` commands below for more information. - -Once a GHC is installed, stack will then build your project. - -### stack exec - -Looking closely at the output of the previous command, you can see that it built -both a library called "helloworld" and an executable called "helloworld-exe". -We'll explain more in the next section, but, for now, just notice that the -executables are installed in our project's `./.stack-work` directory. - -Now, Let's use `stack exec` to run our executable (which just outputs the string -"someFunc"): - -``` -michael@d30748af6d3d:~/helloworld$ stack exec helloworld-exe -someFunc -``` - -`stack exec` works by providing the same reproducible environment that was used -to build your project to the command that you are running. Thus, it knew where -to find `helloworld-exe` even though it is hidden in the `./.stack-work` -directory. - -### stack test - -Finally, like all good software, helloworld actually has a test suite. - -Let's run it with `stack test`: - -``` -michael@d30748af6d3d:~/helloworld$ stack test -# build output ... -``` - -Reading the output, you'll see that stack first builds the test suite and then -automatically runs it for us. For both the `build` and `test` command, already -built components are not built again. You can see this by running `stack build` -and `stack test` a second time: - -``` -michael@d30748af6d3d:~/helloworld$ stack build -michael@d30748af6d3d:~/helloworld$ stack test -# build output ... -``` - -## Inner Workings of stack - -In this subsection, we'll dissect the helloworld example in more detail. - -### Files in helloworld - -Before studying stack more, let's understand our project a bit better. - -``` -michael@d30748af6d3d:~/helloworld$ find * -type f -LICENSE -README.md -Setup.hs -app/Main.hs -helloworld.cabal -package.yaml -src/Lib.hs -stack.yaml -test/Spec.hs -``` - -The `app/Main.hs`, `src/Lib.hs`, and `test/Spec.hs` files are all Haskell -source files that compose the actual functionality of our project (we won't -dwell on them here). - -The `LICENSE` file and `README.md` have no impact on the -build. - -The `helloworld.cabal` file is updated automatically as part of the -`stack build` process and should not be modified. - -The files of interest here are `Setup.hs`, `stack.yaml`, and `package.yaml`. - -The `Setup.hs` file is a component of the Cabal build system which stack uses. -It's technically not needed by stack, but it is still considered good practice -in the Haskell world to include it. The file we're using is straight -boilerplate: - -```haskell -import Distribution.Simple -main = defaultMain -``` - -Next, let's look at our `stack.yaml` file, which gives our project-level settings. - -If you're familiar with YAML, you may recognize that the `flags` and -`extra-deps` keys have empty values. We'll see more interesting usages for these -fields later. Let's focus on the other two fields. `packages` tells stack which -local packages to build. In our simple example, we have only a single package in -our project, located in the same directory, so `'.'` suffices. However, stack -has powerful support for multi-package projects, which we'll elaborate on as -this guide progresses. - -The final field is `resolver`. This tells stack *how* to build your package: -which GHC version to use, versions of package dependencies, and so on. Our -value here says to use [LTS Haskell version -3.2](https://www.stackage.org/lts-3.2), which implies GHC 7.10.2 (which is why -`stack setup` installs that version of GHC). There are a number of values you -can use for `resolver`, which we'll cover later. - -Another file important to the build is `package.yaml`. - -Since Stack 1.6.1, the `package.yaml` is the preferred package format that is -provided built-in by stack through [the hpack tool](https://github.com/sol/hpack). -The default behaviour is to generate the `.cabal` file from this `package.yaml`, -and accordingly you should **not** modify the `.cabal` file. - -It is also important to remember that stack is built on top of the Cabal build system. Therefore, an -understanding of the moving parts in Cabal are necessary. In Cabal, we have individual -*packages*, each of which contains a single `.cabal` file. The `.cabal` file -can define 1 or more *components*: a library, executables, test suites, and benchmarks. -It also specifies additional information such as library dependencies, default language -pragmas, and so on. - -In this guide, we'll discuss the bare minimum necessary to understand how to -modify a `package.yaml` file. You can see a full list of the available options -at the [hpack documentation](https://github.com/sol/hpack#quick-reference). Haskell.org -has the definitive [reference for the `.cabal` file format](https://www.haskell.org/cabal/users-guide/developing-packages.html). - -### The setup command - -As we saw above, the `build` command installed GHC for us. Just for kicks, -let's manually run the `setup` command: - -``` -michael@d30748af6d3d:~/helloworld$ stack setup -stack will use a sandboxed GHC it installed -For more information on paths, see 'stack path' and 'stack exec env' -To use this GHC and packages outside of a project, consider using: -stack ghc, stack ghci, stack runghc, or stack exec -``` - -Thankfully, the command is smart enough to know not to perform an installation -twice. As the command output above indicates, you can use `stack path` -for quite a bit of path information (which we'll play with more later). - -For now, we'll just look at where GHC is installed: - -``` -michael@d30748af6d3d:~/helloworld$ stack exec -- which ghc -/home/michael/.stack/programs/x86_64-linux/ghc-7.10.2/bin/ghc -``` - -As you can see from that path (and as emphasized earlier), the installation is -placed to not interfere with any other GHC installation, whether system-wide or -even different GHC versions installed by stack. - -## Cleaning your project - -You can clean up build artifacts for your project using the `stack clean` and `stack purge` commands. - -### `stack clean` - -`stack clean` deletes the local working directories containing compiler output. -By default, that means the contents of directories in `.stack-work/dist`, for all the `.stack-work` directories within a project. - -Use `stack clean ` to delete the output for the package _specific-package_ only. - -### `stack purge` - -`stack purge` deletes the local stack working directories, including extra-deps, git dependencies and the compiler output (including logs). -It does not delete any snapshot packages, compilers or programs installed using `stack install`. This essentially -reverts the project to a completely fresh state, as if it had never been built. -`stack purge` is just a shortcut for `stack clean --full` - -### The build command - -The build command is the heart and soul of stack. It is the engine that powers -building your code, testing it, getting dependencies, and more. Quite a bit of -the remainder of this guide will cover more advanced `build` functions and -features, such as building test and Haddocks at the same time, or constantly -rebuilding blocking on file changes. - -*On a philosophical note:* Running the build command twice with the same -options and arguments should generally be a no-op (besides things like -rerunning test suites), and should, in general, produce a reproducible result -between different runs. - -## Adding dependencies - -Let's say we decide to modify our `helloworld` source a bit to use a new library, -perhaps the ubiquitous text package. In `src/Lib.hs`, we can, for example add: - -```haskell -{-# LANGUAGE OverloadedStrings #-} -module Lib - ( someFunc - ) where - -import qualified Data.Text.IO as T - -someFunc :: IO () -someFunc = T.putStrLn "someFunc" -``` - -When we try to build this, things don't go as expected: - -```haskell -michael@d30748af6d3d:~/helloworld$ stack build -# build failure output (abridged for clarity) ... -/helloworld/src/Lib.hs:5:1: error: - Could not find module `Data.Text.IO' - Use -v to see a list of the files searched for. - | -5 | import qualified Data.Text.IO as T - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -``` - -This means that the package containing the module in question is not available. To tell -stack to use [text](https://hackage.haskell.org/package/text), you need to add it to your -`package.yaml` file — specifically in your `dependencies` section, like this: - -``` -dependencies: -- base >= 4.7 && < 5 -- text # added here -``` - -Now if we rerun `stack build`, we should get a successful result: - -``` -michael@d30748af6d3d:~/helloworld$ stack build -# build output ... -``` - -This output means that the text package was downloaded, configured, built, and -locally installed. Once that was done, we moved on to building our local package -(helloworld). At no point did we need to ask stack to build dependencies — it -does so automatically. - -### Listing Dependencies - -Let's have stack add a few more dependencies to our project. First, we'll include two new packages in the -`dependencies` section for our library in our `package.yaml`: - -``` -dependencies: -- filepath -- containers -``` - -After adding these two dependencies, we can again run `stack build` to have them installed: - -``` -michael@d30748af6d3d:~/helloworld$ stack build -# build output ... -``` - -Finally, to find out which versions of these libraries stack installed, we can ask stack to `ls dependencies`: - -``` -michael@d30748af6d3d:~/helloworld$ stack ls dependencies -# dependency output ... -``` - -### extra-deps - -Let's try a more off-the-beaten-track package: the joke -[acme-missiles](http://www.stackage.org/package/acme-missiles) package. Our -source code is simple: - -```haskell -module Lib - ( someFunc - ) where - -import Acme.Missiles - -someFunc :: IO () -someFunc = launchMissiles -``` - -Again, we add this new dependency to the `package.yaml` file like this: - -``` -dependencies: -- base >= 4.7 && < 5 -- text -- filepath -- containers -- acme-missiles # added -``` - -However, rerunning `stack build` shows us the following error message: - -``` -michael@d30748af6d3d:~/helloworld$ stack build -# build failure output ... -``` - -It says that it was unable to construct the build plan. - -This brings us to the next major topic in using stack. - -## Curated package sets - -Remember above when `stack new` selected some [LTS resolver](https://github.com/fpco/lts-haskell#readme) -for us? That defined our build plan and available packages. When we tried using the -`text` package, it just worked, because it was part of the LTS *package set*. - -But `acme-missiles` is not part of that package set, so building failed. - -To add this new dependency, we'll use the `extra-deps` field in `stack.yaml` to -define extra dependencies not present in the resolver. You can add this like so: - -```yaml -extra-deps: -- acme-missiles-0.3 # not in the LTS -``` - -Now `stack build` will succeed. - -With that out of the way, let's dig a little bit more into these package sets, -also known as *snapshots*. We mentioned the LTS resolvers, and you can get quite a bit of -information about it at [https://www.stackage.org/lts](https://www.stackage.org/lts), including: - -* The appropriate resolver value (`resolver: lts-16.15`, as is currently the latest LTS) -* The GHC version used -* A full list of all packages available in this snapshot -* The ability to perform a Hoogle search on the packages in this snapshot -* A [list of all modules](https://www.stackage.org/lts/docs) in a snapshot, - which can be useful when trying to determine which package to add to your - `package.yaml` file. - -You can also see a [list of all available -snapshots](https://www.stackage.org/snapshots). You'll notice two flavors: LTS -(for "Long Term Support") and Nightly. You can read more about them on the -[LTS Haskell Github page](https://github.com/fpco/lts-haskell#readme). If you're -not sure which to use, start with LTS Haskell (which stack will lean towards by -default as well). - -## Resolvers and changing your compiler version - -Let's explore package sets a bit further. Instead of lts-16.15, let's change our -`stack.yaml` file to use [the latest nightly](https://www.stackage.org/nightly). Right now, -this is currently 2020-03-24 - please see the resolve from the link above to get the latest. - -Then, Rerunning `stack build` will produce: - -``` -michael@d30748af6d3d:~/helloworld$ stack build -Downloaded nightly-2020-03-24 build plan. -# build output ... -``` - -We can also change resolvers on the command line, which can be useful in a -Continuous Integration (CI) setting, like on Travis. For example: - -``` -michael@d30748af6d3d:~/helloworld$ stack --resolver lts-16.15 build -Downloaded lts-16.15 build plan. -# build output ... -``` - -When passed on the command line, you also get some additional "short-cut" -versions of resolvers: `--resolver nightly` will use the newest Nightly resolver -available, `--resolver lts` will use the newest LTS, and `--resolver lts-2` will -use the newest LTS in the 2.X series. The reason these are only available on the -command line and not in your `stack.yaml` file is that using them: - -1. Will slow down your build (since stack then needs to download information on - the latest available LTS each time it builds) -2. Produces unreliable results (since a build run today may proceed differently - tomorrow because of changes outside of your control) - -### Changing GHC versions - -Finally, let's try using an older LTS snapshot. We'll use the newest 2.X -snapshot: - -``` -michael@d30748af6d3d:~/helloworld$ stack --resolver lts-2 build -# build output ... -``` - -This succeeds, automatically installing the necessary GHC along the way. So, -we see that different LTS versions use different GHC versions and stack can -handle that. - -### Other resolver values - -We've mentioned `nightly-YYYY-MM-DD` and `lts-X.Y` values for the resolver. -There are actually other options available, and the list will grow over time. -At the time of writing: - -* `ghc-X.Y.Z`, for requiring a specific GHC version but no additional packages -* Experimental custom snapshot support - -The most up-to-date information can always be found in the -[stack.yaml documentation](yaml_configuration.md#resolver). - -## Existing projects - -Alright, enough playing around with simple projects. Let's take an open source -package and try to build it. We'll be ambitious and use -[yackage](https://www.stackage.org/package/yackage), a local package server -using [Yesod](http://www.yesodweb.com/). To get the code, we'll use the -`stack unpack` command: - -``` -cueball:~$ stack unpack yackage-0.8.0 -Unpacked yackage-0.8.0 to /var/home/harendra/yackage-0.8.0/ -cueball:~$ cd yackage-0.8.0/ -``` - -Note that you can also unpack to the directory of your liking instead of -the current one by issuing: - -``` -cueball:~$ stack unpack yackage-0.8.0 --to ~/work -``` - -This will create a `yackage-0.8.0` directory inside `~/work` - -### stack init -This new directory does not have a `stack.yaml` file, so we need to make one -first. We could do it by hand, but let's be lazy instead with the `stack init` -command: - -``` -cueball:~/yackage-0.8.0$ stack init -# init output ... -``` - -stack init does quite a few things for you behind the scenes: - -* Finds all of the `.cabal` files in your current directory and subdirectories - (unless you use `--ignore-subdirs`) and determines the packages and versions - they require -* Finds the best combination of snapshot and package flags that allows everything to - compile with minimum external dependencies -* It tries to look for the best matching snapshot from latest LTS, latest - nightly, other LTS versions in that order - -Assuming it finds a match, it will write your `stack.yaml` file, and everything -will work. - -(Note: yackage does not currently support hpack, but you can also hpack-convert should you need to generate a package.yaml). - -#### Excluded Packages - -Sometimes multiple packages in your project may have conflicting requirements. -In that case `stack init` will fail, so what do you do? - -You could manually create `stack.yaml` by omitting some packages to resolve the -conflict. Alternatively you can ask `stack init` to do that for you by -specifying `--omit-packages` flag on the command line. Let's see how that -works. - -To simulate a conflict we will use acme-missiles-0.3 in yackage and we will -also copy `yackage.cabal` to another directory and change the name of the file -and package to yackage-test. In this new package we will use acme-missiles-0.2 -instead. Let's see what happens when we re-run stack init: - -``` -cueball:~/yackage-0.8.0$ stack init --force --omit-packages -# init failure output ... -``` - -Looking at `stack.yaml`, you will see that the excluded packages have been -commented out under the `packages` field. In case wrong packages are excluded -you can uncomment the right one and comment the other one. - -Packages may get excluded due to conflicting requirements among user packages -or due to conflicting requirements between a user package and the resolver -compiler. If all of the packages have a conflict with the compiler then all of -them may get commented out. - -When packages are commented out you will see a warning every time you run a -command which needs the configuration file. The warning can be disabled by -editing the configuration file and removing it. - -#### Using a specific resolver - -Sometimes you may want to use a specific resolver for your project instead of -`stack init` picking one for you. You can do that by using -`stack init --resolver `. - -You can also init with a compiler resolver if you do not want to use a -snapshot. That will result in all of your project's dependencies being put -under the `extra-deps` section. - -#### Installing the compiler - -stack will automatically install the compiler when you run `stack build` but you can -manually specify the compiler by running `stack setup `. - -#### Miscellaneous and diagnostics - -_Add selected packages_: If you want to use only selected packages from your -project directory you can do so by explicitly specifying the package directories -on the command line. - -_Duplicate package names_: If multiple packages under the directory tree have -same name, stack init will report those and automatically ignore one of them. - -_Ignore subdirectories_: By default stack init searches all the subdirectories -for `.cabal` files. If you do not want that then you can use `--ignore-subdirs` -command line switch. - -_Cabal warnings_: stack init will show warnings if there were issues in reading -a cabal package file. You may want to pay attention to the warnings as -sometimes they may result in incomprehensible errors later on during dependency -solving. - -_Package naming_: If the `Name` field defined in a cabal file does not match -with the cabal file name then `stack init` will refuse to continue. - -_Cabal install errors_: stack init uses `cabal-install` to determine external -dependencies. When cabal-install encounters errors, cabal errors are displayed -as is by stack init for diagnostics. - -_User warnings_: When packages are excluded or external dependencies added -stack will show warnings every time configuration file is loaded. You can -suppress the warnings by editing the config file and removing the warnings from -it. You may see something like this: - -``` -cueball:~/yackage-0.8.0$ stack build -Warning: Some packages were found to be incompatible with the resolver and have been left commented out in the packages section. -Warning: Specified resolver could not satisfy all dependencies. Some external packages have been added as dependencies. -You can suppress this message by removing it from stack.yaml -``` -## Different databases - -Time to take a short break from hands-on examples and discuss a little -architecture. stack has the concept of multiple *databases*. A database -consists of a GHC package database (which contains the compiled version of a -library), executables, and a few other things as well. To give you an idea: - -``` -michael@d30748af6d3d:~/helloworld$ ls .stack-work/install/x86_64-linux/lts-3.2/7.10.2/ -bin doc flag-cache lib pkgdb -``` - -Databases in stack are *layered*. For example, the database listing we just gave -is called a *local* database. That is layered on top of a *snapshot* database, -which contains the libraries and executables specified in the snapshot itself. -Finally, GHC itself ships with a number of libraries and executables, which -forms the *global* database. To get a quick idea of this, we can look at the -output of the `stack exec -- ghc-pkg list` command in our helloworld project: - -``` -/home/michael/.stack/programs/x86_64-linux/ghc-7.10.2/lib/ghc-7.10.2/package.conf.d - Cabal-1.22.4.0 - array-0.5.1.0 - base-4.8.1.0 - bin-package-db-0.0.0.0 - binary-0.7.5.0 - bytestring-0.10.6.0 - containers-0.5.6.2 - deepseq-1.4.1.1 - directory-1.2.2.0 - filepath-1.4.0.0 - ghc-7.10.2 - ghc-prim-0.4.0.0 - haskeline-0.7.2.1 - hoopl-3.10.0.2 - hpc-0.6.0.2 - integer-gmp-1.0.0.0 - pretty-1.1.2.0 - process-1.2.3.0 - rts-1.0 - template-haskell-2.10.0.0 - terminfo-0.4.0.1 - time-1.5.0.1 - transformers-0.4.2.0 - unix-2.7.1.0 - xhtml-3000.2.1 -/home/michael/.stack/snapshots/x86_64-linux/nightly-2015-08-26/7.10.2/pkgdb - stm-2.4.4 -/home/michael/helloworld/.stack-work/install/x86_64-linux/nightly-2015-08-26/7.10.2/pkgdb - acme-missiles-0.3 - helloworld-0.1.0.0 -``` - -Notice that acme-missiles ends up in the *local* database. Anything which is -not installed from a snapshot ends up in the local database. This includes: -your own code, extra-deps, and in some cases even snapshot packages, if you -modify them in some way. The reason we have this structure is that: - -* it lets multiple projects reuse the same binary builds of many snapshot - packages, -* but doesn't allow different projects to "contaminate" each other by putting - non-standard content into the shared snapshot database - -Typically, the process by which a snapshot package is marked as modified is -referred to as "promoting to an extra-dep," meaning we treat it just like a -package in the extra-deps section. This happens for a variety of reasons, -including: - -* changing the version of the snapshot package -* changing build flags -* one of the packages that the package depends on has been promoted to an - extra-dep - -As you probably guessed, there are multiple snapshot databases available, e.g.: - -``` -michael@d30748af6d3d:~/helloworld$ ls ~/.stack/snapshots/x86_64-linux/ -lts-2.22 lts-3.1 lts-3.2 nightly-2015-08-26 -``` - -These databases don't get layered on top of each other; they are each used -separately. - -In reality, you'll rarely — if ever — interact directly with these databases, -but it's good to have a basic understanding of how they work so you can -understand why rebuilding may occur at different points. - -## The build synonyms - -Let's look at a subset of the `stack --help` output: - -``` -build Build the package(s) in this directory/configuration -install Shortcut for 'build --copy-bins' -test Shortcut for 'build --test' -bench Shortcut for 'build --bench' -haddock Shortcut for 'build --haddock' -``` - -Note that four of these commands are just synonyms for the `build` command. They -are provided for convenience for common cases (e.g., `stack test` instead of -`stack build --test`) and so that commonly expected commands just work. - -What's so special about these commands being synonyms? It allows us to make -much more composable command lines. For example, we can have a command that -builds executables, generates Haddock documentation (Haskell API-level docs), -and builds and runs your test suites, with: - -``` -stack build --haddock --test -``` - -You can even get more inventive as you learn about other flags. For example, -take the following: - -``` -stack build --pedantic --haddock --test --exec "echo Yay, it succeeded" --file-watch -``` - -This will: - -* turn on all warnings and errors -* build your library and executables -* generate Haddocks -* build and run your test suite -* run the command `echo Yay, it succeeded` when that completes -* after building, watch for changes in the files used to build the project, and - kick off a new build when done - -### install and copy-bins - -It's worth calling out the behavior of the install command and `--copy-bins` -option, since this has confused a number of users (especially when compared to -behavior of other tools like cabal-install). The `install` command does -precisely one thing in addition to the build command: it copies any generated -executables to the local bin path. You may recognize the default value for that -path: - -``` -michael@d30748af6d3d:~/helloworld$ stack path --local-bin -/home/michael/.local/bin -``` - -That's why the download page recommends adding that directory to your `PATH` -environment variable. This feature is convenient, because now you can simply -run `executable-name` in your shell instead of having to run -`stack exec executable-name` from inside your project directory. - -Since it's such a point of confusion, let me list a number of things stack does -*not* do specially for the install command: - -* stack will always build any necessary dependencies for your code. The install - command is not necessary to trigger this behavior. If you just want to build a - project, run `stack build`. -* stack will *not* track which files it's copied to your local bin path nor - provide a way to automatically delete them. There are many great tools out - there for managing installation of binaries, and stack does not attempt to - replace those. -* stack will not necessarily be creating a relocatable executable. If your - executables hard-codes paths, copying the executable will not change those - hard-coded paths. - * At the time of writing, there's no way to change those kinds of paths with - stack, but see [issue #848 about - --prefix](https://github.com/commercialhaskell/stack/issues/848) for - future plans. - -That's really all there is to the install command: for the simplicity of what -it does, it occupies a much larger mental space than is warranted. - -## Targets, locals, and extra-deps - -We haven't discussed this too much yet, but, in addition to having a number of -synonyms *and* taking a number of options on the command line, the build command -*also* takes many arguments. These are parsed in different ways, and can be used -to achieve a high level of flexibility in telling stack exactly what you want -to build. - -We're not going to cover the full generality of these arguments here; instead, -there's [documentation covering the full build command -syntax](build_command.md). -Here, we'll just point out a few different types of arguments: - -* You can specify a *package name*, e.g. `stack build vector`. - * This will attempt to build the vector package, whether it's a local - package, in your extra-deps, in your snapshot, or just available upstream. - If it's just available upstream but not included in your locals, - extra-deps, or snapshot, the newest version is automatically promoted to - an extra-dep. -* You can also give a *package identifier*, which is a package name plus - version, e.g. `stack build yesod-bin-1.4.14`. - * This is almost identical to specifying a package name, except it will (1) - choose the given version instead of latest, and (2) error out if the given - version conflicts with the version of a local package. -* The most flexibility comes from specifying individual *components*, e.g. - `stack build helloworld:test:helloworld-test` says "build the test suite - component named helloworld-test from the helloworld package." - * In addition to this long form, you can also shorten it by skipping what - type of component it is, e.g. `stack build helloworld:helloworld-test`, or - even skip the package name entirely, e.g. `stack build :helloworld-test`. -* Finally, you can specify individual *directories* to build to trigger building - of any local packages included in those directories or subdirectories. - -When you give no specific arguments on the command line (e.g., `stack build`), -it's the same as specifying the names of all of your local packages. If you -just want to build the package for the directory you're currently in, you can -use `stack build .`. - -### Components, --test, and --bench - -Here's one final important yet subtle point. Consider our helloworld package: -it has a library component, an executable helloworld-exe, and a test suite -helloworld-test. When you run `stack build helloworld`, how does it know which -ones to build? By default, it will build the library (if any) and all of the -executables but ignore the test suites and benchmarks. - -This is where the `--test` and `--bench` flags come into play. If you use them, -those components will also be included. So `stack build --test helloworld` will -end up including the helloworld-test component as well. - -You can bypass this implicit adding of components by being much more explicit, -and stating the components directly. For example, the following will not build -the helloworld-exe executable: - -``` -michael@d30748af6d3d:~/helloworld$ stack clean -michael@d30748af6d3d:~/helloworld$ stack build :helloworld-test -helloworld-0.1.0.0: configure (test) -Configuring helloworld-0.1.0.0... -helloworld-0.1.0.0: build (test) -Preprocessing library helloworld-0.1.0.0... -[1 of 1] Compiling Lib ( src/Lib.hs, .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/Lib.o ) -In-place registering helloworld-0.1.0.0... -Preprocessing test suite 'helloworld-test' for helloworld-0.1.0.0... -[1 of 1] Compiling Main ( test/Spec.hs, .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/helloworld-test/helloworld-test-tmp/Main.o ) -Linking .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/helloworld-test/helloworld-test ... -helloworld-0.1.0.0: test (suite: helloworld-test) -Test suite not yet implemented -``` - -We first cleaned our project to clear old results so we know exactly what stack -is trying to do. Notice that it builds the helloworld-test test suite, and the -helloworld library (since it's used by the test suite), but it does not build -the helloworld-exe executable. - -And now the final point: the last line shows that our command also *runs* the -test suite it just built. This may surprise some people who would expect tests -to only be run when using `stack test`, but this design decision is what allows -the `stack build` command to be as composable as it is (as described -previously). The same rule applies to benchmarks. To spell it out completely: - -* The --test and --bench flags simply state which components of a package should - be built, if no explicit set of components is given -* The default behavior for any test suite or benchmark component which has been - built is to also run it - -You can use the `--no-run-tests` and `--no-run-benchmarks` (from stack-0.1.4.0 -and on) flags to disable running of these components. You can also use -`--no-rerun-tests` to prevent running a test suite which has already passed and -has not changed. - -NOTE: stack doesn't build or run test suites and benchmarks for non-local -packages. This is done so that running a command like `stack test` doesn't need -to run 200 test suites! - -## Multi-package projects - -Until now, everything we've done with stack has used a single-package project. -However, stack's power truly shines when you're working on multi-package -projects. All the functionality you'd expect to work just does: dependencies -between packages are detected and respected, dependencies of all packages are -just as one cohesive whole, and if anything fails to build, the build commands -exits appropriately. - -Let's demonstrate this with the wai-app-static and yackage packages: - -``` -michael@d30748af6d3d:~$ mkdir multi -michael@d30748af6d3d:~$ cd multi/ -michael@d30748af6d3d:~/multi$ stack unpack wai-app-static-3.1.1 yackage-0.8.0 -wai-app-static-3.1.1: download -Unpacked wai-app-static-3.1.1 to /home/michael/multi/wai-app-static-3.1.1/ -Unpacked yackage-0.8.0 to /home/michael/multi/yackage-0.8.0/ -michael@d30748af6d3d:~/multi$ stack init -Writing default config file to: /home/michael/multi/stack.yaml -Basing on cabal files: -- /home/michael/multi/yackage-0.8.0/yackage.cabal -- /home/michael/multi/wai-app-static-3.1.1/wai-app-static.cabal - -Checking against build plan lts-3.2 -Selected resolver: lts-3.2 -Wrote project config to: /home/michael/multi/stack.yaml -michael@d30748af6d3d:~/multi$ stack build --haddock --test -# Goes off to build a whole bunch of packages -``` - -If you look at the `stack.yaml`, you'll see exactly what you'd expect: - -```yaml -flags: - yackage: - upload: true - wai-app-static: - print: false -packages: -- yackage-0.8.0/ -- wai-app-static-3.1.1/ -extra-deps: [] -resolver: lts-3.2 -``` - -Notice that multiple directories are listed in the `packages` key. - -In addition to local directories, you can also refer to packages available in a -Git repository or in a tarball over HTTP/HTTPS. This can be useful for using a -modified version of a dependency that hasn't yet been released upstream. - -Please note that when adding upstream packages directly to your project it is -important to distinguish _local packages_ from the upstream _dependency -packages_. Otherwise you may have trouble running `stack ghci`. See -[stack.yaml documentation](yaml_configuration.md#packages) for more details. - -## Flags and GHC options - -There are two common ways to alter how a package will install: with Cabal flags -and with GHC options. - -### Cabal flag management - -In the `stack.yaml` file above, you can see that `stack init` has detected that — -for the yackage package — the upload flag can be set to true, and for -wai-app-static, the print flag to false (it's chosen those values because -they're the default flag values, and their dependencies are compatible with the -snapshot we're using.) To change a flag setting, we can use the command -line `--flag` option: - - stack build --flag yackage:-upload - -This means: when compiling the yackage package, turn off the upload flag (thus -the `-`). Unlike other tools, stack is explicit about which package's flag you -want to change. It does this for two reasons: - -1. There's no global meaning for Cabal flags, and therefore two packages can - use the same flag name for completely different things. -2. By following this approach, we can avoid unnecessarily recompiling snapshot - packages that happen to use a flag that we're using. - -You can also change flag values on the command line for extra-dep and snapshot -packages. If you do this, that package will automatically be promoted to an -extra-dep, since the build plan is different than what the plan snapshot -definition would entail. - -### GHC options - -GHC options follow a similar logic as in managing Cabal flags, with a few -nuances to adjust for common use cases. Let's consider: - - stack build --ghc-options="-Wall -Werror" - -This will set the `-Wall -Werror` options for all *local targets*. Note that -this will not affect extra-dep and snapshot packages at all. This design -provides us with reproducible and fast builds. - -(By the way: the above GHC options have a special convenience flag: -`--pedantic`.) - -There's one extra nuance about command line GHC options: Since they only apply -to local targets, if you change your local targets, they will no longer apply -to other packages. Let's play around with an example from the wai repository, -which includes the wai and warp packages, the latter depending on the former. -If we run: - - stack build --ghc-options=-O0 wai - -It will build all of the dependencies of wai, and then build wai with all -optimizations disabled. Now let's add in warp as well: - - stack build --ghc-options=-O0 wai warp - -This builds the additional dependencies for warp, and then builds warp with -optimizations disabled. Importantly: it does not rebuild wai, since wai's -configuration has not been altered. Now the surprising case: - -``` -michael@d30748af6d3d:~/wai$ stack build --ghc-options=-O0 warp -wai-3.0.3.0-5a49351d03cba6cbaf906972d788e65d: unregistering (flags changed from ["--ghc-options","-O0"] to []) -warp-3.1.3-a91c7c3108f63376877cb3cd5dbe8a7a: unregistering (missing dependencies: wai) -wai-3.0.3.0: configure -``` - -You may expect this to be a no-op: neither wai nor warp has changed. However, -stack will instead recompile wai with optimizations enabled again, and then -rebuild warp (with optimizations disabled) against this newly built wai. The -reason: reproducible builds. If we'd never built wai or warp before, trying to -build warp would necessitate building all of its dependencies, and it would do -so with default GHC options (optimizations enabled). This dependency would -include wai. So when we run: - - stack build --ghc-options=-O0 warp - -We want its behavior to be unaffected by any previous build steps we took. -While this specific corner case does catch people by surprise, the overall goal -of reproducible builds is- in the stack maintainers' views- worth the -confusion. - -Final point: if you have GHC options that you'll be regularly passing to your -packages, you can add them to your `stack.yaml` file (starting with -stack-0.1.4.0). See [the documentation section on -ghc-options](yaml_configuration.md#ghc-options) -for more information. - -## path - -NOTE: That's it, the heavy content of this guide is done! Everything from here -on out is simple explanations of commands. Congratulations! - -Generally, you don't need to worry about where stack stores various files. But -some people like to know this stuff. That's when the `stack path` command is -useful. - -``` -michael@d30748af6d3d:~/wai$ stack path -global-stack-root: /home/michael/.stack -stack-root: /home/michael/.stack -project-root: /home/michael/wai -config-location: /home/michael/wai/stack.yaml -bin-path: /home/michael/.stack/snapshots/x86_64-linux/lts-2.17/7.8.4/bin:/home/michael/.stack/programs/x86_64-linux/ghc-7.8.4/bin:/home/michael/.stack/programs/x86_64-linux/ghc-7.10.2/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin -programs: /home/michael/.stack/programs/x86_64-linux -compiler: /home/michael/.stack/programs/x86_64-linux/ghc-7.8.4/bin/ghc -compiler-bin: /home/michael/.stack/programs/x86_64-linux/ghc-7.8.4/bin -local-bin-path: /home/michael/.local/bin -extra-include-dirs: -extra-library-dirs: -snapshot-pkg-db: /home/michael/.stack/snapshots/x86_64-linux/lts-2.17/7.8.4/pkgdb -local-pkg-db: /home/michael/wai/.stack-work/install/x86_64-linux/lts-2.17/7.8.4/pkgdb -global-pkg-db: /home/michael/.stack/programs/x86_64-linux/ghc-7.8.4/lib/ghc-7.8.4/package.conf.d -ghc-package-path: /home/michael/wai/.stack-work/install/x86_64-linux/lts-2.17/7.8.4/pkgdb:/home/michael/.stack/snapshots/x86_64-linux/lts-2.17/7.8.4/pkgdb:/home/michael/.stack/programs/x86_64-linux/ghc-7.8.4/lib/ghc-7.8.4/package.conf.d -snapshot-install-root: /home/michael/.stack/snapshots/x86_64-linux/lts-2.17/7.8.4 -local-install-root: /home/michael/wai/.stack-work/install/x86_64-linux/lts-2.17/7.8.4 -snapshot-doc-root: /home/michael/.stack/snapshots/x86_64-linux/lts-2.17/7.8.4/doc -local-doc-root: /home/michael/wai/.stack-work/install/x86_64-linux/lts-2.17/7.8.4/doc -dist-dir: .stack-work/dist/x86_64-linux/Cabal-1.18.1.5 -local-hpc-root: /home/michael/wai/.stack-work/install/x86_64-linux/lts-2.17/7.8.4/hpc -``` - -In addition, `stack path` accepts command line arguments to state which of -these keys you're interested in, which can be convenient for scripting. As a -simple example, let's find out the sandboxed versions of GHC that stack installed: - -``` -michael@d30748af6d3d:~/wai$ ls $(stack path --programs)/*.installed -/home/michael/.stack/programs/x86_64-linux/ghc-7.10.2.installed -/home/michael/.stack/programs/x86_64-linux/ghc-7.8.4.installed -``` - -(Yes, that command requires a \*nix shell, and likely won't run on Windows.) - -While we're talking about paths, to wipe our stack install completely, here's -what needs to be removed: - -1. The stack executable itself -2. The stack root, e.g. `$HOME/.stack` on non-Windows systems or, on Windows, - `%LOCALAPPDATA%\Programs\stack`. - * See `stack path --stack-root` - * On Windows, you will also need to delete `stack path --programs` -3. Any local `.stack-work` directories inside a project - -## exec - -We've already used `stack exec` multiple times in this guide. As you've -likely already guessed, it allows you to run executables, but with a slightly -modified environment. In particular: `stack exec` looks for executables on -stack's bin paths, and sets a few additional environment variables (like adding -those paths to `PATH`, and setting `GHC_PACKAGE_PATH`, which tells GHC which -package databases to use). - -If you want to see exactly what the modified environment looks like, try: - - stack exec env - -The only issue is how to distinguish flags to be passed to stack versus those -for the underlying program. Thanks to the optparse-applicative library, stack -follows the Unix convention of `--` to separate these, e.g.: - -``` -michael@d30748af6d3d:~$ stack exec --package stm -- echo I installed the stm package via --package stm -Run from outside a project, using implicit global project config -Using latest snapshot resolver: lts-16.15 -Writing global (non-project-specific) config file to: /home/michael/.stack/global/stack.yaml -Note: You can change the snapshot via the resolver field there. -I installed the stm package via --package stm -``` - -Flags worth mentioning: - -* `--package foo` can be used to force a package to be installed before running - the given command. -* `--no-ghc-package-path` can be used to stop the `GHC_PACKAGE_PATH` environment - variable from being set. Some tools — notably cabal-install — do not behave - well with that variable set. - -## ghci (the repl) - -GHCi is the interactive GHC environment, a.k.a. the REPL. You *could* access it -with: - - stack exec ghci - -But that won't load up locally written modules for access. For that, use the -`stack ghci` command. To then load modules from your project, use the `:m` -command (for "module") followed by the module name. - -IMPORTANT NOTE: If you have added upstream packages to your project please make -sure to mark them as *dependency package*s for faster and reliable usage of -`stack ghci`. Otherwise GHCi may have trouble due to conflicts of compilation -flags or having to unnecessarily interpret too many modules. See -[stack.yaml documentation](yaml_configuration.md#packages) to learn how to mark -a package as a *dependency package*. - -## ghc/runghc - -You'll sometimes want to just compile (or run) a single Haskell source file, -instead of creating an entire Cabal package for it. You can use `stack exec ghc` -or `stack exec runghc` for that. As simple helpers, we also provide the -`stack ghc` and `stack runghc` commands, for these common cases. - -## script interpreter - -stack also offers a very useful feature for running files: a script -interpreter. For too long have Haskellers felt shackled to bash or Python -because it's just too hard to create reusable source-only Haskell scripts. -stack attempts to solve that. - -You can use `stack ` to execute a Haskell source file or specify -`stack` as the interpreter using a shebang line on a Unix like operating systems. -Additional stack options can be specified using a special Haskell comment in -the source file to specify dependencies and automatically install them before -running the file. - -An example will be easiest to understand: - -``` -michael@d30748af6d3d:~$ cat turtle-example.hs -#!/usr/bin/env stack --- stack --resolver lts-6.25 script --package turtle -{-# LANGUAGE OverloadedStrings #-} -import Turtle -main = echo "Hello World!" -michael@d30748af6d3d:~$ chmod +x turtle-example.hs -michael@d30748af6d3d:~$ ./turtle-example.hs -Completed 5 action(s). -Hello World! -michael@d30748af6d3d:~$ ./turtle-example.hs -Hello World! -``` - -The first run can take a while (as it has to download GHC if necessary and build -dependencies), but subsequent runs are able to reuse everything already built, -and are therefore quite fast. - -The first line in the source file is the usual "shebang" to use stack as a -script interpreter. The second line, is a Haskell comment providing additional -options to stack (due to the common limitation of the "shebang" line only being -allowed a single argument). In this case, the options tell stack to use the -lts-3.2 resolver, automatically install GHC if it is not already installed, and -ensure the turtle package is available. - -If you're on Windows: you can run `stack turtle.hs` instead of `./turtle.hs`. -The shebang line is not required in that case. - -### Just-in-time compilation - -You can add the `--compile` flag to make stack compile the script, -and then run the compiled executable. Compilation is done quickly, -without optimization. To compile with optimization, use the `--optimize` flag -instead. Compilation is done only if needed; if the executable already exists, -and is newer than the script, stack just runs the executable directly. - -This feature can be good for speed (your script runs faster) and also -for durability (the executable remains runnable even if the script is -disturbed, eg due to changes in your installed ghc/snapshots, changes -to source files during git bisect, etc.) - -### Using multiple packages - -You can also specify multiple packages, either with multiple `--package` -arguments, or by providing a comma or space separated list. For example: - -``` -#!/usr/bin/env stack -{- stack - script - --resolver lts-6.25 - --package turtle - --package "stm async" - --package http-client,http-conduit --} -``` - -### Stack configuration for scripts - -With the `script` command, all Stack configuration files are ignored to provide a -completely reliable script running experience. However, see the example below -with `runghc` for an approach to scripts which will respect your configuration -files. When using `runghc`, if the current working directory is inside a -project then that project's stack configuration is effective when running the -script. Otherwise the script uses the global project configuration specified in -`~/.stack/global-project/stack.yaml`. - -### Specifying interpreter options - -The stack interpreter options comment must specify a single valid stack command -line, starting with `stack` as the command followed by the stack options to use -for executing this file. The comment must always be on the line immediately -following the shebang line when the shebang line is present otherwise it must -be the first line in the file. The comment must always start in the first -column of the line. - -When many options are needed a block style comment may be more convenient to -split the command on multiple lines for better readability. You can also -specify ghc options the same way as you would on command line i.e. by -separating the stack options and ghc options with a `--`. Here is an example of -a multi line block comment with ghc options: - -``` -#!/usr/bin/env stack -{- stack - script - --resolver lts-6.25 - --package turtle - -- - +RTS -s -RTS --} -``` - -### Writing independent and reliable scripts - -With the release of Stack 1.4.0, there is a new command, `script`, which will -automatically: - -* Install GHC and libraries if missing -* Require that all packages used be explicitly stated on the command line - -This ensures that your scripts are _independent_ of any prior deployment -specific configuration, and are _reliable_ by using exactly the same version of -all packages every time it runs so that the script does not break by -accidentally using incompatible package versions. - -In previous versions of Stack, the `runghc` command was used for scripts -instead. In order to achieve the same effect with the `runghc` command, you can -do the following: - -1. Use the `--install-ghc` option to install the compiler automatically -2. Explicitly specify all packages required by the script using the -`--package` option. Use `-hide-all-packages` ghc option to force -explicit specification of all packages. -3. Use the `--resolver` Stack option to ensure a specific GHC version and - package set is used. - -Even with this configuration, it is still possible for configuration -files to impact `stack runghc`, which is why `stack script` is strongly -recommended in general. For those curious, here is an example with `runghc`: - -``` -#!/usr/bin/env stack -{- stack - --resolver lts-6.25 - --install-ghc - runghc - --package base - --package turtle - -- - -hide-all-packages - -} -``` - -The `runghc` command is still very useful, especially when you're working on a -project and want to access the package databases and configurations used by -that project. See the next section for more information on configuration files. - -### Platform-specific script issues - -On Mac OSX: - -- Avoid `{-# LANGUAGE CPP #-}` in stack scripts; it breaks the hashbang line - ([GHC #6132](https://gitlab.haskell.org/ghc/ghc/issues/6132)) - -- Use a compiled executable, not another script, in the hashbang line. - Eg `#!/usr/bin/env runhaskell` will work but `#!/usr/local/bin/runhaskell` would not. - -### Loading scripts in ghci - -Sometimes you want to load your script in ghci REPL to play around with your -program. In those cases, you can use `exec ghci` option in the script to achieve -it. Here is an example: - -``` -#!/usr/bin/env stack -{- stack - --resolver lts-8.2 - --install-ghc - exec ghci - --package text --} -``` - -## Finding project configs, and the implicit global project - -Whenever you run something with stack, it needs a `stack.yaml` project file. The -algorithm stack uses to find this is: - -1. Check for a `--stack-yaml` option on the command line -2. Check for a `STACK_YAML` environment variable -3. Check the current directory and all ancestor directories for a `stack.yaml` - -The first two provide a convenient method for using an alternate configuration. -For example: `stack build --stack-yaml stack-7.8.yaml` can be used by your CI -system to check your code against GHC 7.8. Setting the `STACK_YAML` environment -variable can be convenient if you're going to be running commands like `stack ghc` -in other directories, but you want to use the configuration you defined in -a specific project. - -If stack does not find a `stack.yaml` in any of the three specified locations, -the *implicit global* logic kicks in. You've probably noticed that phrase a few -times in the output from commands above. Implicit global is essentially a hack -to allow stack to be useful in a non-project setting. When no implicit global -config file exists, stack creates one for you with the latest LTS snapshot as -the resolver. This allows you to do things like: - -* compile individual files easily with `stack ghc` -* build executables without starting a project, e.g. `stack install pandoc` - -Keep in mind that there's nothing magical about this implicit global -configuration. It has no impact on projects at all. Every package you install -with it is put into isolated databases just like everywhere else. The only magic -is that it's the catch-all project whenever you're running stack somewhere else. - -## Setting stack root location - -`stack path --stack-root` will tell you the location of the "stack root". Among -other things, this is where stack stores downloaded programs and snapshot -packages. This location can be configured by setting the STACK_ROOT environment -variable or passing the `--stack-root` commandline option. It is particularly -useful to do this on Windows, where filepaths are limited (MAX_PATH), and things -can break when this limit is exceeded. - -## `stack.yaml` vs `.cabal` files - -Now that we've covered a lot of stack use cases, this quick summary of -`stack.yaml` vs `.cabal` files will hopefully make sense and be a good reminder for -future uses of stack: - -* A project can have multiple packages. -* Each project has a `stack.yaml`. -* Each package has a `.cabal` file. -* The `.cabal` file specifies which packages are dependencies. -* The `stack.yaml` file specifies which packages are available to be used. -* `.cabal` specifies the components, modules, and build flags provided by a package -* `stack.yaml` can override the flag settings for individual packages -* `stack.yaml` specifies which packages to include - -## Comparison to other tools - -stack is not the only tool around for building Haskell code. stack came into -existence due to limitations with some of the existing tools. If you're -unaffected by those limitations and are happily building Haskell code, you may -not need stack. If you're suffering from some of the common problems in other -tools, give stack a try instead. - -If you're a new user who has no experience with other tools, we recommend going -with stack. The defaults match modern best practices in Haskell development, and -there are less corner cases you need to be aware of. You *can* develop Haskell -code with other tools, but you probably want to spend your time writing code, -not convincing a tool to do what you want. - -Before jumping into the differences, let me clarify an important similarity: - -__Same package format.__ stack, cabal-install, and presumably all other tools -share the same underlying Cabal package format, consisting of a `.cabal` file, -modules, etc. This is a Good Thing: we can share the same set of upstream -libraries, and collaboratively work on the same project with stack, -cabal-install, and NixOS. In that sense, we're sharing the same ecosystem. - -Now the differences: - -* __Curation vs dependency solving as a default__. - * stack defaults to using curation (Stackage snapshots, LTS Haskell, - Nightly, etc) as a default instead of defaulting to dependency solving, as - cabal-install does. This is just a default: as described above, stack can - use dependency solving if desired, and cabal-install can use curation. - However, most users will stick to the defaults. The stack team firmly - believes that the majority of users want to simply ignore dependency - resolution nightmares and get a valid build plan from day 1, which is why - we've made this selection of default behavior. -* __Reproducible__. - * stack goes to great lengths to ensure that `stack build` today does the - same thing tomorrow. cabal-install does not: build plans can be affected - by the presence of preinstalled packages, and running `cabal update` can - cause a previously successful build to fail. With stack, changing the - build plan is always an explicit decision. -* __Automatically building dependencies__. - * In cabal-install, you need to use `cabal install` to trigger dependency - building. This is somewhat necessary due to the previous point, since - building dependencies can, in some cases, break existing installed - packages. So for example, in stack, `stack test` does the same job as - `cabal install --run-tests`, though the latter *additionally* performs an - installation that you may not want. The closer command equivalent is - `cabal install --enable-tests --only-dependencies && cabal configure --enable-tests && cabal build && cabal test` - (newer versions of - cabal-install may make this command shorter). -* __Isolated by default__. - * This has been a pain point for new stack users. In cabal, the - default behavior is a non-isolated build where working on two projects can - cause the user package database to become corrupted. The cabal solution to - this is sandboxes. stack, however, provides this behavior by default via - its databases. In other words: when you use stack, there's __no need for - sandboxes__, everything is (essentially) sandboxed by default. - -__Other tools for comparison (including active and historical)__ - -* [cabal-dev](https://hackage.haskell.org/package/cabal-dev) (deprecated in favor of cabal-install) -* [cabal-meta](https://hackage.haskell.org/package/cabal-meta) inspired a lot of the multi-package functionality of stack. If you're still using cabal-install, cabal-meta is relevant. For stack work, the feature set is fully subsumed by stack. -* [cabal-src](https://hackage.haskell.org/package/cabal-src) is mostly irrelevant in the presence of both stack and cabal sandboxes, both of which make it easier to add additional package sources easily. The mega-sdist executable that ships with cabal-src is, however, still relevant. Its functionality may some day be folded into stack -* [stackage-cli](https://hackage.haskell.org/package/stackage-cli) was an initial attempt to make cabal-install work more easily with curated snapshots, but due to a slight impedance mismatch between cabal.config constraints and snapshots, it did not work as well as hoped. It is deprecated in favor of stack. - - -## Fun features - -This is just a quick collection of fun and useful feature stack supports. - -### Templates - -We started off using the `new` command to create a project. stack provides -multiple templates to start a new project from: - -``` -michael@d30748af6d3d:~$ stack templates -# Stack Templates - -The `stack new` command will create a new project based on a project template. -Templates can be located on the local filesystem, on Github, or arbitrary URLs. -For more information, please see the user guide: - -https://docs.haskellstack.org/en/stable/GUIDE/#templates - -There are many templates available, some simple examples: - - stack new myproj # uses the default template - stack new myproj2 rio # uses the rio template - stack new website yesodweb/sqlite # Yesod server with SQLite DB - -For more information and other templates, please see the `stack-templates` -Wiki: - -https://github.com/commercialhaskell/stack-templates/wiki - -Please feel free to add your own templates to the Wiki for discoverability. - -Want to improve this text? Send us a PR! - -https://github.com/commercialhaskell/stack-templates/edit/master/STACK_HELP.md -``` - -You can specify one of these templates following your project name -in the `stack new` command: - -``` -michael@d30748af6d3d:~$ stack new my-yesod-project yesodweb/simple -Downloading template "yesod-simple" to create project "my-yesod-project" in my-yesod-project/ ... -Using the following authorship configuration: -author-email: example@example.com -author-name: Example Author Name -Copy these to /home/michael/.stack/config.yaml and edit to use different values. -Writing default config file to: /home/michael/my-yesod-project/stack.yaml -Basing on cabal files: -- /home/michael/my-yesod-project/my-yesod-project.cabal - -Checking against build plan lts-3.2 -Selected resolver: lts-3.2 -Wrote project config to: /home/michael/my-yesod-project/stack.yaml -``` - -The default `stack-templates` repository is on [Github](https://github.com/commercialhaskell/stack-templates), -under the user account `commercialstack`. You can download templates from a -different Github user by prefixing the username and a slash: - -``` -stack new my-yesod-project yesodweb/simple -``` - -Then it would be downloaded from Github, user account `yesodweb`, -repo `stack-templates`, and file `yesod-simple.hsfiles`. - -You can even download templates from a service other that Github, such as -[Gitlab](https://gitlab.com) or [Bitbucket](https://bitbucket.com): - -``` -stack new my-project gitlab:user29/foo -``` - -That template would be downloaded from Gitlab, user account `user29`, -repo `stack-templates`, and file `foo.hsfiles`. - -If you need more flexibility, you can specify the full URL of the template: - -``` -stack new my-project https://my-site.com/content/template9.hsfiles -``` - -(The `.hsfiles` extension is optional; it will be added if it's not specified.) - -Alternatively you can use a local template by specifying the path: - -``` -stack new project ~/location/of/your/template.hsfiles -``` - -As a starting point for creating your own templates, you can use [the "simple" template](https://github.com/commercialhaskell/stack-templates/blob/master/simple.hsfiles). -An introduction into template-writing and a place for submitting official templates, -you will find at [the stack-templates repository](https://github.com/commercialhaskell/stack-templates#readme). - -### Editor integration - -For editor integration, stack has a related project called -[intero](https://github.com/commercialhaskell/intero). It is particularly well -supported by emacs, but some other editors have integration for it as well. - -### Visualizing dependencies - -If you'd like to get some insight into the dependency tree of your packages, you -can use the `stack dot` command and Graphviz. More information is -[available in the Dependency visualization documentation](dependency_visualization.md). - -### Travis with caching - -This content has been moved to a dedicated -[Travis CI document](https://docs.haskellstack.org/en/stable/travis_ci/). - -### Shell auto-completion - -Love tab-completion of commands? You're not alone. If you're on bash, just run -the following (or add it to `.bashrc`): - - eval "$(stack --bash-completion-script stack)" - -For more information and other shells, see [the Shell auto-completion wiki -page](https://docs.haskellstack.org/en/stable/shell_autocompletion) - -### Docker - -Stack is able to build your code inside a Docker image, which means -even more reproducibility to your builds, since you and the rest of -your team will always have the same system libraries. - -### Nix - -stack provides an integration with [Nix](http://nixos.org/nix), -providing you with the same two benefits as the first Docker -integration discussed above: - -* more reproducible builds, since fixed versions of any system - libraries and commands required to build the project are - automatically built using Nix and managed locally per-project. These - system packages never conflict with any existing versions of these - libraries on your system. That they are managed locally to the - project means that you don't need to alter your system in any way to - build any odd project pulled from the Internet. -* implicit sharing of system packages between projects, so you don't - have more copies on-disk than you need to. - -When using the Nix integration, Stack downloads and builds Haskell dependencies -as usual, but resorts on Nix to provide non-Haskell dependencies that exist in -the Nixpkgs. - -Both Docker and Nix are methods to *isolate* builds and thereby make -them more reproducible. They just differ in the means of achieving -this isolation. Nix provides slightly weaker isolation guarantees than -Docker, but is more lightweight and more portable (Linux and OS -X mainly, but also Windows). For more on Nix, its command-line -interface and its package description language, read the -[Nix manual](http://nixos.org/nix/manual). But keep in mind that the -point of stack's support is to obviate the need to write any Nix code -in the common case or even to learn how to use the Nix tools (they're -called under the hood). - -For more information, see -[the Nix-integration documentation](nix_integration.md). - -## Power user commands - -The following commands are a little more powerful, and won't be needed by all -users. Here's a quick rundown: - -* `stack update` will download the most recent set of packages from your package - indices (e.g. Hackage). Generally, stack runs this for you automatically - when necessary, but it can be useful to do this manually sometimes. -* `stack unpack` is a command we've already used quite a bit for examples, but - most users won't use it regularly. It does what you'd expect: downloads a - tarball and unpacks it. It accept optional `--to` argument to specify - the destination directory. -* `stack sdist` generates an uploading tarball containing your package code -* `stack upload` uploads an sdist to Hackage. As of - version [1.1.0](https://docs.haskellstack.org/en/v1.1.0/ChangeLog/) stack - will also attempt to GPG sign your packages as - per - [our blog post](https://www.fpcomplete.com/blog/2016/05/stack-security-gnupg-keys). - * `--no-signature` disables signing of packages - * `username` and `password` can be read by environment - - ```bash - export $HACKAGE_USERNAME="" - export $HACKAGE_PASSWORD="" - ``` - -* `stack upgrade` will build a new version of stack from source. - * `--git` is a convenient way to get the most recent version from master for - those testing and living on the bleeding edge. -* `stack ls snapshots` will list all the local snapshots by - default. You can also view the remote snapshots using `stack ls - snapshots remote`. It also supports option for viewing only lts - (`-l`) and nightly (`-n`) snapshots. -* `stack ls dependencies` lists all of the packages and versions used for a - project -* `stack list [PACKAGE]...` list the version of the specified package(s) in a - snapshot, or without an argument list all the snapshot's package versions. - If no resolver is specified the latest package version from Hackage is given. -* `stack sig` subcommand can help you with GPG signing & verification - * `sign` will sign an sdist tarball and submit the signature to - sig.commercialhaskell.org for storage in the sig-archive git repo. - (Signatures will be used later to verify package integrity.) - -## Debugging - -To profile a component of the current project, simply pass the `--profile` -flag to `stack`. The `--profile` flag turns on the `--enable-library-profiling` -and `--enable-executable-profiling` Cabal options _and_ passes the `+RTS -p` -runtime options to any testsuites and benchmarks. - -For example the following command will build the `my-tests` testsuite with -profiling options and create a `my-tests.prof` file in the current directory -as a result of the test run. - - stack test --profile my-tests - -The `my-tests.prof` file now contains time and allocation info for the test run. - -To create a profiling report for an executable, e.g. `my-exe`, you can -run - - stack exec --profile -- my-exe +RTS -p - -For more fine-grained control of compilation options there are the -`--library-profiling` and `--executable-profiling` flags which will turn on the -`--enable-library-profiling` and `--enable-executable-profiling` Cabal -options respectively. -Custom GHC options can be passed in with `--ghc-options "more options here"`. - -To enable compilation with profiling options by default you can add the -following snippet to your `stack.yaml` or `~/.stack/config.yaml`: - -``` -build: - library-profiling: true - executable-profiling: true -``` - -### Further reading - -For more commands and uses, see [the official GHC chapter on -profiling](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/profiling.html), -[the Haskell wiki](https://wiki.haskell.org/How_to_profile_a_Haskell_program), -and [the chapter on profiling in Real World -Haskell](http://book.realworldhaskell.org/read/profiling-and-optimization.html). - -### Tracing - -To generate a backtrace in case of exceptions during a test or benchmarks run, -use the `--trace` flag. Like `--profile` this compiles with profiling options, -but adds the `+RTS -xc` runtime option. - -### Debugging symbols - -Building with debugging symbols in the [DWARF information](https://ghc.haskell.org/trac/ghc/wiki/DWARF) is supported by `stack`. This can be done by passing the flag `--ghc-options="-g"` and also to override the default behaviour of stripping executables of debugging symbols by passing either one of the following flags: `--no-strip`, `--no-library-stripping` or `--no-executable-stripping`. - -In Windows GDB can be isntalled to debug an executable with `stack exec -- pacman -S gdb`. Windows visual studio compiler's debugging format PDB is not supported at the moment. This might be possible by [separating](https://stackoverflow.com/questions/866721/how-to-generate-gcc-debug-symbol-outside-the-build-target) debugging symbols and [converting](https://github.com/rainers/cv2pdb) their format. Or as an option when [using the LLVM backend](http://blog.llvm.org/2017/08/llvm-on-windows-now-supports-pdb-debug.html). - -## More resources - -There are lots of resources available for learning more about stack: - -* `stack --help` -* `stack --version` — identify the version and Git hash of the stack executable -* `--verbose` (or `-v`) — much more info about internal operations (useful for bug reports) -* The [home page](http://haskellstack.org) -* The [stack mailing list](https://groups.google.com/d/forum/haskell-stack) -* The [FAQ](faq.md) -* The [stack wiki](https://github.com/commercialhaskell/stack/wiki) -* The [haskell-stack tag on Stack Overflow](http://stackoverflow.com/questions/tagged/haskell-stack) -* [Another getting started with stack tutorial](http://seanhess.github.io/2015/08/04/practical-haskell-getting-started.html) -* [Why is stack not cabal?](https://www.fpcomplete.com/blog/2015/06/why-is-stack-not-cabal) diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 0000000000..5cc272536a --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,22 @@ +# With the necessary prerequisites, this file allows users of the 'make' tool to +# automate the use of the 'mkdocs' tool to preview or build Stack's online +# documentation. See CONTRIBUTING.md and the file 'Makefile' in Stack's project +# directory for more information. + +PYTHON_VIRTUALENV:=.python-doc-virtualenv +MK_DOCS_CMD:=$(PYTHON_VIRTUALENV)/bin/mkdocs + +../$(PYTHON_VIRTUALENV)/bin/activate: + +# Note, the python3 command is not used with Python on Windows + python3 -m venv ../$(PYTHON_VIRTUALENV) + +# Note, in the MSYS2 environment, the development versions of the libxml2 and +# libxslt packages are also required but not installed by pip + (. ../$(PYTHON_VIRTUALENV)/bin/activate && pip install -r requirements.txt) + +docs-serve: ../$(PYTHON_VIRTUALENV)/bin/activate + cd .. && $(MK_DOCS_CMD) serve + +docs-build: ../$(PYTHON_VIRTUALENV)/bin/activate + cd .. && $(MK_DOCS_CMD) build diff --git a/doc/README.md b/doc/README.md index fc58398734..515208c81b 100644 --- a/doc/README.md +++ b/doc/README.md @@ -1,219 +1,431 @@
-# The Haskell Tool Stack +# Welcome to Stack -Stack is a cross-platform program for developing Haskell -projects. It is aimed at Haskellers both new and experienced. +Welcome to the [Haskell](https://www.haskell.org/) programming language and +Stack! Stack is an established program for developing Haskell projects.[^1] It +is aimed at new and experienced users of Haskell and seeks to support them fully +on Linux, macOS and Windows. - +[^1]: + The project's first public commit was on 29 April 2015. It changed its name + to the Haskell Tool Stack on 18 May 2015. It is now widely known simply as + Stack. -It features: +Haskell code is compiled by the +[Glasgow Haskell Compiler](https://www.haskell.org/ghc/) (GHC), which can also +be used interactively. -* Installing GHC automatically, in an isolated location. + + +Stack features include: + +* Installing GHC automatically. * Installing packages needed for your project. * Building your project. * Testing your project. * Benchmarking your project. +* Using GHC interactively. + +Stack is used at the command line. You will need terminal software for your +system (which will likely come with its operating system) and a program to edit +code files. There are a number of freely-available and popular code editors that +have Haskell extensions. + +## How to install Stack + +Stack can be installed on most Unix-like operating systems (including macOS) and +Windows. It will require at least about 5 GB of disk space, for use with one +version of GHC. + +Stack can be installed directly or by using the GHCup tool. + +=== "Directly" + + Stack can be installed directly on various operating systems. + + === "Linux" + + For most Linux distributions, on x86_64 or AArch64 machine + architectures, the easiest way to install Stack is to command either: + + ~~~text + curl -sSL https://get.haskellstack.org/ | sh + ~~~ + + or: + + ~~~text + wget -qO- https://get.haskellstack.org/ | sh + ~~~ + + These commands download a script file and run it using `sh`. + + ??? question "Will the installation script need root access?" + + The script at [get.haskellstack.org](https://get.haskellstack.org/) + will ask for root access using `sudo`. It needs such access in order + to use your platform's package manager to install dependencies and + to install to `/usr/local/bin`. If you prefer more control, follow + the manual installation instructions in the guide to + [setting up](install_and_upgrade.md). + + === "macOS" + + From late 2020, Apple began a transition from Mac computers with Intel + processors (Intel-based Mac) to + [Mac computers with Apple silicon](https://support.apple.com/en-gb/HT211814). + + === "Intel-based" + + For most Intel-based Mac computers, the easiest way to install Stack + is to command either: + + ~~~text + curl -sSL https://get.haskellstack.org/ | sh + ~~~ + + or: + + ~~~text + wget -qO- https://get.haskellstack.org/ | sh + ~~~ + + These commands download a script file and run it using `sh`. + + ??? question "Will the installation script need root access?" + + The script at + [get.haskellstack.org](https://get.haskellstack.org/) will ask + for root access using `sudo`. It needs such access in order + to use your platform's package manager to install dependencies + and to install to `/usr/local/bin`. If you prefer more control, + follow the manual installation instructions in the guide to + [setting up](install_and_upgrade.md). + + === "Apple silicon" + + Mac computers with Apple silicon have an M series chip. These chips + use an architecture known as ARM64 or AArch64. + + For Mac computers with Apple silicon, the easiest way to install + Stack is to command either: + + ~~~text + curl -sSL https://get.haskellstack.org/ | sh + ~~~ + + or: + + ~~~text + wget -qO- https://get.haskellstack.org/ | sh + ~~~ + + These commands download a script file and run it using `sh`. + + ??? question "Will the installation script need root access?" + + The script at + [get.haskellstack.org](https://get.haskellstack.org/) will ask + for root access using `sudo`. It needs such access in order + to use your platform's package manager to install dependencies + and to install to `/usr/local/bin`. If you prefer more control, + follow the manual installation instructions in the guide to + [setting up](install_and_upgrade.md). + + === "Windows" + + Most machines using the Windows operating system have a x86_64 + architecture. More recently, Microsoft has provided Windows on Arm that + runs on other processors. + + === "x86_64" + + On 64-bit Windows, the easiest way to install Stack is to download + and install the + [Windows installer](https://get.haskellstack.org/stable/windows-x86_64-installer.exe). + + !!! info + + By default, the Windows installer will set the + [Stack root](topics/stack_root.md) to `C:\sr`. + + !!! note + + Systems with antivirus software may need to add Stack to the + list of 'trusted' applications. + + ??? warning "I have a Windows username with a space in it" + + GHC 9.4.1 and later have a bug which means they do not work if + the path to the `ghc` executable has a space character in it. + The default location for Stack's 'programs' directory will have + a space in the path if the value of the `USERNAME` environment + variable includes a space. + + A solution is to configure Stack to use a different location for + its 'programs' directory. For further information, see the + [`local-programs-path`](configure/yaml/non-project.md#local-programs-path) + non-project specific configuration option documentation. + + === "Windows on Arm" + + The GHC project does not yet provide a version of GHC that runs on + Windows on Arm. + + === "Other/direct downloads" + + For other operating systems and direct downloads see the guide to + [setting up](install_and_upgrade.md). + +=== "GHCup" + + The separate [GHCup](https://www.haskell.org/ghcup/) project provides a tool + that can be used to install Stack and other Haskell-related tools, including + GHC and + [Haskell Language Server](https://github.com/haskell/haskell-language-server) + (HLS). HLS is a program that is used by Haskell extensions for popular code + editors. -#### How to install + GHCup provides Stack for some combinations of machine architecture and + operating system not provided elsewhere. -Stack can be installed on most Unix-like (Un*x) operating systems, including -macOS, and on Windows. + By default, the script to install GHCup (which can be run more than once) + also configures Stack so that if Stack needs a version of GHC, GHCup takes + over obtaining and installing that version. -For most Un*x operating systems, the easiest way to install is to run: +??? question "How do I upgrade Stack?" - curl -sSL https://get.haskellstack.org/ | sh + Follow the advice under [setting up](install_and_upgrade.md#upgrade-stack). -or: +??? question "How do I remove Stack?" - wget -qO- https://get.haskellstack.org/ | sh + For information about how to uninstall Stack, command: -On Windows, you can download and install the -[Windows 64-bit Installer](https://get.haskellstack.org/stable/windows-x86_64-installer.exe). + ~~~text + stack uninstall + ~~~ -For other operating systems and direct downloads, check out the -[install and upgrade guide](install_and_upgrade.md). + To uninstall Stack, it should be sufficient to delete: -Note that the [get.haskellstack.org](https://get.haskellstack.org/) -script will ask for root access using `sudo` in order to use your -platform's package manager to install dependencies and to install to -`/usr/local/bin`. If you prefer more control, follow the manual -installation instructions in the -[install and upgrade guide](install_and_upgrade.md). + 1. the Stack root directory (see `stack path --stack-root`, before you + uninstall); + 2. if different, the directory containing Stack's global configuration file + (see `stack path --global-config`, before you uninstall); + 3. on Windows, the directory containing Stack's tools (see + `stack path --programs`, before you uninstall), which is usually located + outside of the Stack root directory; and + 4. the `stack` executable file (see `which stack`, on Unix-like operating + systems, or `where.exe stack`, on Windows). -#### How to upgrade + You may also want to delete ``.stack-work`` directories in any Haskell + projects that you have built using Stack. -If you already have `stack` installed, upgrade it to the latest version -by running: +## Quick Start guide - stack upgrade +Once Stack is installed, you can get an immediate experience of using it to +build an executable with Haskell. +### Step 1: Start your new project -#### Quick Start Guide +A complex project can have more than one package and each package can have more +than one executable (program). However, to start a new single-package project +named `my-project`, issue these four commands in a terminal (click +:material-plus-circle: to learn more about each command): -First you need to [install it (see previous section)](#how-to-install). +~~~shell +stack new my-project # (1)! +cd my-project # (2)! +stack build # (3)! +stack exec my-project-exe # (4)! +~~~ -##### Start your new project: +1. Create a new directory named `my-project`. It contains all the files needed + to start a project correctly, using a default template. -```bash -stack new my-project -cd my-project -stack setup -stack build -stack exec my-project-exe -``` +2. Change the current working directory to `my-project`. -- The `stack new` command will create a new directory containing all - the needed files to start a project correctly. -- The `stack setup` will download the compiler if necessary in an isolated - location (default `~/.stack`) that won't interfere with any system-level - installations. (For information on installation paths, please use the - `stack path` command.). -- The `stack build` command will build the minimal project. -- `stack exec my-project-exe` will execute the command. -- If you just want to install an executable using stack, then all you have to do - is `stack install `. +3. Build the template project and create an executable named `my-project-exe`. -If you want to launch a REPL: + First, if necessary, Stack will download a version of GHC in an isolated + location. That will not interfere with other GHC installations on your + system. (On Windows, if necessary, Stack will also download + [MSYS2](https://www.msys2.org/). MSYS2 is a project that provides popular + tools for developers on Windows.) -```bash -stack ghci -``` +4. Run (execute) the built executable, in Stack's environment. -Run `stack` for a complete list of commands. +For a complete list of Stack's commands, and flags and options common to those +commands, simply command: -##### Workflow +~~~text +stack +~~~ -The `stack new` command should have created the following files: +For help on a particular Stack command, including flags and options specific to +that command, for example `stack build`, command: -``` +~~~text +stack build --help +~~~ + +If you want to launch a run-eval-print loop (REPL) environment, then command: + +~~~shell +stack repl # (1)! +~~~ + +1. `stack ghci` can be used instead of `stack repl`. GHCi is GHC's REPL tool. + +People organise Haskell code into packages. If you want to use Stack to install +an executable provided by a Haskell package, then all you have to do is command: + +~~~text +stack install +~~~ + +### Step 2: Next steps + +The `stack new my-project` command in step one should have created the following +files and directories, among others. Click :material-plus-circle: to learn more +about each file: + +~~~shell . ├── app -│   └── Main.hs -├── ChangeLog.md -├── LICENSE -├── my-project.cabal -├── package.yaml -├── README.md -├── Setup.hs +│   └── Main.hs # (1)! ├── src -│   └── Lib.hs -├── stack.yaml -└── test - └── Spec.hs - - 3 directories, 10 files -``` - -So to manage your library: - -1. Edit files in the `src/` directory. - - The `app` directory should preferably contain only files related to - executables. - -2. If you need to include another library (for example the package - [`text`](https://hackage.haskell.org/package/text)): - - - Add the package `text` to the file `package.yaml` - in the section `dependencies: ...`. - - Run `stack build` another time. - - `stack build` will update my-project.cabal for you. - If desired you can update the .cabal file manually - and stack will use .cabal instead of package.yaml. - -3. If you get an error that tells you your package isn't in the LTS. - Just try to add a new version in the `stack.yaml` file in the `extra-deps` section. - -That was a really fast introduction on how to start to code in Haskell using `stack`. -If you want to go further, we highly recommend you to read the [`stack` guide](GUIDE.md). - -#### How to contribute - -This assumes that you have already installed a version of stack, and have `git` -installed. - -1. Clone `stack` from git with - `git clone https://github.com/commercialhaskell/stack.git`. -2. Enter into the stack folder with `cd stack`. -3. Build `stack` using a pre-existing `stack` install with - `stack setup && stack build`. -4. Once `stack` finishes building, check the stack version with - `stack exec stack -- --version`. Make sure the version is the latest. -5. Look for issues tagged with - [newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3a%22newcomer+friendly%22) - and - [awaiting pull request](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22) - labels. - -Build from source as a one-liner: - -```bash -git clone https://github.com/commercialhaskell/stack.git && \ -cd stack && \ -stack setup && \ -stack build -``` - -If you need to check your changes quickly run: - -```bash -stack ghci -λ: :main --stack-root /path/to/root/ --stack-yaml /path/to/stack.yaml COMMAND -``` - -This allows you to set a special stack root (instead of `~/.stack/` or, on -Windows, `%LOCALAPPDATA%\Programs\stack`) and to target your commands at a -particular `stack.yaml` instead of the one found in the current directory. - -#### Complete guide to stack - -This repository also contains a complete [user guide to using -stack](GUIDE.md), covering all of the most common use cases. - - -#### Questions, Feedback, Discussion - -* For frequently asked questions about detailed or specific use-cases, please - see [the FAQ](faq.md). -* For general questions, comments, feedback and support, please write - to [the stack mailing list](https://groups.google.com/d/forum/haskell-stack). -* For bugs, issues, or requests, please - [open an issue](https://github.com/commercialhaskell/stack/issues/new). -* When using Stack Overflow, please use [the haskell-stack - tag](http://stackoverflow.com/questions/tagged/haskell-stack). - -#### Why Stack? - -Stack is a build tool for Haskell designed to answer the needs of -Haskell users new and experienced alike. It has a strong focus on -reproducible build plans, multi-package projects, and a consistent, -easy-to-learn interface, while providing the customizability and -power experienced developers need. As a build tool, Stack does not -stand alone. It is built on the great work provided by: - -* The __Glasgow Haskell Compiler__ (GHC), the premier Haskell - compiler. Stack will manage your GHC installations and automatically - select the appropriate compiler version for your project. -* The __Cabal build system__, a specification for defining Haskell - packages, together with a library for performing builds. -* The __Hackage package repository__, providing more than ten thousand - open source libraries and applications to help you get your work - done. -* The __Stackage package collection__, a curated set of packages from - Hackage which are regularly tested for compatibility. Stack defaults - to using Stackage package sets to avoid dependency problems. - -Stack is provided by a team of volunteers and companies under the -auspices of the [Commercial Haskell](http://commercialhaskell.com/) -group. The project was spearheaded by -[FP Complete](https://www.fpcomplete.com/) to answer the needs of -commercial Haskell users, and has since become a thriving open source -project meeting the needs of Haskell users of all stripes. - -If you'd like to get involved with Stack, check out the -[newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3a%22newcomer+friendly%22) -label on the Github issue tracker. - -#### How to uninstall -Removing ``~/.stack`` and ``/usr/local/bin/stack`` should be sufficient. You may want to delete ``.stack-work`` folders in any Haskell projects that you have built. +│   └── Lib.hs # (2)! +├── test +│ └── Spec.hs # (3)! +├── my-project.cabal # (4)! +├── package.yaml # (5)! +└── stack.yaml # (6)! +~~~ + +1. The Haskell source code for the executable (application). + + As your project develops you can add further source code files to the `app` + directory. + +2. The executable uses a library. The Haskell source code for the library. + + As your project develops you can add further source code files to the `src` + directory. + +3. The package has a test suite executable. The Haskell source code for the + test suite. + + As your project develops you can add further source code files to the `test` + directory. + +4. A file describing the package in the Cabal format, including other packages + on which depends. Stack generates it from the contents of the `package.yaml` + file. + + If the `package.yaml` file is deleted, Stack will use the Cabal file. + +5. A file describing the package in the Hpack format. Stack generates the + `my-project.cabal` file from its contents. + + If you want, you can delete the file and update the Cabal file directly. + + As your project develops, you may need to depend on a library provided by + another Haskell package. If you do, add the name of that new package to + the `dependencies:` section. + +6. Stack's project-level configuration. This specifies a snapshot that, in + turn, specifies a version of GHC and a set of package versions chosen to + work well together. It also identifies the local packages in the project. + + If you add a new package as a dependency in the package description, and + Stack reports that the Stack configuration has no specified version for it, + then follow Stack's likely recommended action to add a specific version to + the `extra-deps:` section. + +That was a really fast introduction on how to start to code in Haskell using +Stack. If you want to go further, we recommend you read Stack's guide to +[getting started](tutorial/index.md). + +## Complete guide to Stack + +A complete guide to Stack is available, covering the most common ways to +[use Stack](tutorial/index.md), its [commands](commands/index.md), its +[configuration](configure/index.md), specific [topics](topics/index.md), and +[frequently asked questions](faq.md). Terms used in Stack's documentation are +also explained in the [glossary](glossary.md). + +## Why Stack? + +Stack has a strong focus on plans for building that are reproducible; projects +that have more than one package; and a consistent, easy-to-learn set of +Stack commands. It also aims to provide the ability to customise and power that +experienced developers need. + +Stack does not stand alone. It is built on the great work provided by: + +
+ +- :fontawesome-solid-gears:{ .lg .middle } __Glasgow Haskell Compiler__ + + The premier Haskell compiler. Stack will manage your GHC + installations and automatically select the appropriate version of GHC for + your project. + + --- + + [:octicons-arrow-right-24: Learn more](https://www.haskell.org/ghc/) + +- :fontawesome-solid-trowel-bricks:{ .lg .middle } __Cabal build system__ + + A specification for defining Haskell packages and a library for performing + builds.[^2] + + [^2]: + Cabal is also the name of a tool used for building Haskell code, + provided by the `cabal-install` package. This guide distinguishes + between them by Cabal (the library) and Cabal (the tool). + + --- + + [:octicons-arrow-right-24: Learn more](https://hackage.haskell.org/package/Cabal) + +- :octicons-database-24:{ .lg .middle } __Hackage__ + + A repository of Haskell packages providing thousands of open source + libraries and applications to help you get your work done. + + --- + + [:octicons-arrow-right-24: Learn more](https://hackage.haskell.org/) + +- :fontawesome-solid-cubes-stacked:{ .lg .middle } __Stackage__ + + Sets of packages from Hackage that are chosen to work well together and + with a specific version of GHC. + + --- + + [:octicons-arrow-right-24: Learn more](https://www.stackage.org/) + +
+ +Stack is provided by a team of volunteers and companies under the auspices of +the [Commercial Haskell](http://commercialhaskell.com/) group. The project was +originally spearheaded by [FP Complete](https://www.fpcomplete.com/) to answer +the needs of commercial Haskell users. It has since become a thriving open +source project meeting the needs of Haskell users of all types. + +## Questions? + +For answers to frequently asked questions about Stack, please see the +[FAQ](faq.md). + +For general questions please post to the +[Haskell Community](https://discourse.haskell.org/about) forum. + +## Get involved! + +Follow the advice under [get involved](community/index.md) for feedback and +discussion about Stack, or if you want to know how to contribute to its +maintenance or development. diff --git a/doc/SIGNING_KEY.md b/doc/SIGNING_KEY.md index b9a4846f51..22ce6de6b2 100644 --- a/doc/SIGNING_KEY.md +++ b/doc/SIGNING_KEY.md @@ -2,14 +2,49 @@ # Signing key -Releases are signed with the GPG key of the individual who builds -them. Authorized keys will always be signed by key ID 0x575159689BEFB442, -with the key and signature uploaded to the -[SKS keyserver pool](https://sks-keyservers.net/). +Each released Stack executable is signed with either: -This is the public key block for 0x575159689BEFB442: +* the GPG key with ID 0x575159689BEFB442; or +* the GPG key of a person that has been authorised by the GPG key with ID + 0x575159689BEFB442. -``` +The signature is in an `*.asc` file. For example: + +~~~text +stack-2.7.5-linux-x86_64-bin +stack-2.7.5-linux-x86_64-bin.asc +~~~ + +The signature can be verified with GPG, as follows: + +~~~text +# Receive the public key from a keyserver +gpg --keyserver keyserver.ubuntu.com --recv-keys 0x575159689BEFB442 +# Get information about the key +gpg --keyid-format long --list-keys 0x575159689BEFB442 +pub rsa2048/575159689BEFB442 2015-06-02 [SC] + C5705533DA4F78D8664B5DC0575159689BEFB442 +uid [ unknown] FPComplete +sub rsa2048/85A738994664AB89 2015-06-02 [E] + +# Attempt to verify the file using the signature file. The public key has not +# yet been certified with a trusted signature. +gpg --verify stack-2.7.5-linux-x86_64-bin.asc stack-2.7.5-linux-x86_64-bin +gpg: Signature made 06/03/2022 15:15:21 GMT Standard Time +gpg: using RSA key C5705533DA4F78D8664B5DC0575159689BEFB442 +gpg: Good signature from "FPComplete " [unknown] +gpg: WARNING: This key is not certified with a trusted signature! +gpg: There is no indication that the signature belongs to the owner. +Primary key fingerprint: C570 5533 DA4F 78D8 664B 5DC0 5751 5968 9BEF B442 +~~~ + +The GPG key with ID 0x575159689BEFB442, and keys it has signed, have been +uploaded to the +[Ubuntu Keyserver](https://keyserver.ubuntu.com/pks/lookup?search=0x575159689BEFB442&fingerprint=on&op=index). + +This is the public key block for GPG key ID 0x575159689BEFB442: + +~~~text -----BEGIN PGP PUBLIC KEY BLOCK----- Version: GnuPG v1 @@ -40,4 +75,4 @@ F3mtEFEtmJ6ljSks5tECxfJFvQlkpILBbGvHfuljKMeaj+iN+bsHmV4em/ELB1ku N9Obs/bFDBMmQklIdLP7dOunDjY4FwwcFcXdNyg= =YUsC -----END PGP PUBLIC KEY BLOCK----- -``` +~~~ diff --git a/doc/appveyor.yml b/doc/appveyor.yml deleted file mode 100644 index 0eecd7f691..0000000000 --- a/doc/appveyor.yml +++ /dev/null @@ -1,40 +0,0 @@ -build: off - -before_test: -# http://help.appveyor.com/discussions/problems/6312-curl-command-not-found -- set PATH=C:\Program Files\Git\mingw64\bin;%PATH% - -- curl -sS -ostack.zip -L --insecure https://get.haskellstack.org/stable/windows-x86_64.zip -- 7z x stack.zip stack.exe - -clone_folder: "c:\\stack" -environment: - global: - STACK_ROOT: "c:\\sr" - - # Override the temp directory to avoid sed escaping issues - # See https://github.com/haskell/cabal/issues/5386 - TMP: "c:\\tmp" - - matrix: - - ARGS: "" - #- ARGS: "--resolver lts-2" - #- ARGS: "--resolver lts-3" - #- ARGS: "--resolver lts-6" - #- ARGS: "--resolver lts-7" - # - ARGS: "--resolver lts-9" - # - ARGS: "--resolver lts-11" - - ARGS: "--resolver lts-12" - - ARGS: "--resolver lts-13" - - ARGS: "--resolver lts-14" - - ARGS: "--resolver lts-15" - #- ARGS: "--resolver nightly" - -test_script: - -# Install toolchain, but do it silently due to lots of output -- stack %ARGS% setup > nul - -# The ugly echo "" hack is to avoid complaints about 0 being an invalid file -# descriptor -- echo "" | stack %ARGS% --no-terminal test diff --git a/doc/azure/azure-linux-template.yml b/doc/azure/azure-linux-template.yml index 4bcdba7acf..ceeb59e8fa 100644 --- a/doc/azure/azure-linux-template.yml +++ b/doc/azure/azure-linux-template.yml @@ -5,16 +5,12 @@ jobs: vmImage: ${{ parameters.vmImage }} strategy: matrix: - GHC 8.0: - ARGS: "--resolver lts-9" - GHC 8.2: - ARGS: "--resolver lts-11" - GHC 8.4: - ARGS: "--resolver lts-12" GHC 8.6: ARGS: "--resolver lts-14" GHC 8.8: - ARGS: "--resolver lts-15" + ARGS: "--resolver lts-16" + GHC 8.10: + ARGS: "--resolver lts-17" nightly: ARGS: "--resolver nightly" steps: diff --git a/doc/azure/azure-osx-template.yml b/doc/azure/azure-osx-template.yml index fc3ad14b15..72d357adab 100644 --- a/doc/azure/azure-osx-template.yml +++ b/doc/azure/azure-osx-template.yml @@ -5,16 +5,12 @@ jobs: vmImage: ${{ parameters.vmImage }} strategy: matrix: - GHC 8.0: - ARGS: "--resolver lts-9" - GHC 8.2: - ARGS: "--resolver lts-11" - GHC 8.4: - ARGS: "--resolver lts-12" GHC 8.6: ARGS: "--resolver lts-14" GHC 8.8: - ARGS: "--resolver lts-15" + ARGS: "--resolver lts-16" + GHC 8.10: + ARGS: "--resolver lts-17" nightly: ARGS: "--resolver nightly" steps: diff --git a/doc/azure/azure-pipelines.yml b/doc/azure/azure-pipelines.yml index 499b2ef572..c19f2e72a8 100644 --- a/doc/azure/azure-pipelines.yml +++ b/doc/azure/azure-pipelines.yml @@ -5,10 +5,10 @@ # # https://docs.haskellstack.org/en/stable/azure_ci/ # -# Copy these contents into the root directory of your Github project in a file +# Copy these contents into the root directory of your GitHub project in a file # named azure-pipelines.yml # -# For better organization, you split various jobs into seprate parts +# For better organization, you split various jobs into separate parts # and each of them are controlled via individual file. jobs: - template: azure-linux-template.yml @@ -25,6 +25,6 @@ jobs: - template: azure-windows-template.yml parameters: - name: Windows - vmImage: windows-latest - os: windows + name: Windows + vmImage: windows-latest + os: windows diff --git a/doc/azure/azure-simple.yml b/doc/azure/azure-simple.yml index 4f7e8478db..dd28e02305 100644 --- a/doc/azure/azure-simple.yml +++ b/doc/azure/azure-simple.yml @@ -5,7 +5,7 @@ # # https://docs.haskellstack.org/en/stable/azure_ci/ # -# Copy these contents into the root directory of your Github project in a file +# Copy these contents into the root directory of your GitHub project in a file # named azure-pipelines.yml # Choose a agent virtual image diff --git a/doc/azure/azure-windows-template.yml b/doc/azure/azure-windows-template.yml index b60f02704d..3f15427da9 100644 --- a/doc/azure/azure-windows-template.yml +++ b/doc/azure/azure-windows-template.yml @@ -5,16 +5,12 @@ jobs: vmImage: ${{ parameters.vmImage }} strategy: matrix: - GHC 8.0: - ARGS: "--resolver lts-9" - GHC 8.2: - ARGS: "--resolver lts-11" - GHC 8.4: - ARGS: "--resolver lts-12" GHC 8.6: ARGS: "--resolver lts-14" GHC 8.8: - ARGS: "--resolver lts-15" + ARGS: "--resolver lts-16" + GHC 8.10: + ARGS: "--resolver lts-17" nightly: ARGS: "--resolver nightly" steps: diff --git a/doc/azure_ci.md b/doc/azure_ci.md deleted file mode 100644 index 3a6b5f0196..0000000000 --- a/doc/azure_ci.md +++ /dev/null @@ -1,169 +0,0 @@ -
- -# Azure CI - -This page documents how to use Stack on [Azure -CI](http://dev.azure.com/). - -## Quick Start - -Note that you have to create [azure -pipelines](#creating-azure-pipelines) for your project and then you -need to put the relevant configuration files: - -* For simple Azure configuration, copy-paste the - [azure-simple](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-simple.yml) - file into `azure-pipelines.yml`. -* For complex Azure configuration, you need to take the below linked - four files and put all of them into the `.azure` directory. - -For a more detailed explanation, you can read further. - -## Simple and Complex configuration - -We provide two fully baked configuration ready to be -used on your projects: - -* [The simple Azure configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-simple.yml) - is intended for applications that do not require multiple GHC - support or cross-platform support. It builds and tests your project - with just the settings present in your `stack.yaml` file. -* The complex Azure configuration is intended for projects that need - to support multiple GHC versions and multiple OSes, such as open - source libraries to be released to Hackage. It tests against Stack - for different resolves on Linux, macOS and Windows. These are the - files for the complex configuration: - - [azure-pipelines.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-pipelines.yml) : This is the starter file used by the Azure CI. - - [azure-linux-template.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-linux-template.yml) : Template for Azure Linux build - - [azure-osx-template.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-osx-template.yml) : Template for Azure macOS build - - [azure-windows-template.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-windows-template.yml) : Template for Azure Windows build - - __NOTE__: It is likely going to be necessary to modify this configuration to - match the needs of your project, such as tweaking the build matrix to alter - which GHC versions you test against, or to specify GHC-version-specific - `stack.yaml` files if necessary. Don't be surprised if it doesn't work the - first time around. See the multiple GHC section below for more information. - -## Creating Azure Pipelines - -Each of these configurations is ready to be used immediately. But -before we go into where to put them, we have to create pipeline for -your project in Azure CI platform: - -* Go to [dev.azure.com](https://dev.azure.com). You have to initially - sign-in to your microsoft account there. -* Once you have logged in to your Microsoft account, you have to sign - in to [Azure - devops](https://user-images.githubusercontent.com/737477/52465678-70963080-2ba5-11e9-83d8-84112b140236.png) - from there. -* You will be [greeted with a - dashboard](https://user-images.githubusercontent.com/737477/52465677-70963080-2ba5-11e9-904a-c15c7c0524ef.png) - where you can create your projects. -* Click the "Create Project" button and fill the [relevant information - in the dialog](https://user-images.githubusercontent.com/737477/52465676-70963080-2ba5-11e9-82a4-093ee58f11c9.png) and then click the "Create" button. -* This will lead you to the [project - dashboard](https://user-images.githubusercontent.com/737477/52465675-6ffd9a00-2ba5-11e9-917e-3dec251fcc87.png) - page where you can create pipelines. -* Click on "Pipelines" in the left menu. This will load the [pipelines - page](https://user-images.githubusercontent.com/737477/52465673-6ffd9a00-2ba5-11e9-97a4-04e703ae1fbc.png) - on the right. -* Click on the button "New Pipeline" and you have to follow through - the wizard there. You need to choose your github repository (or - Azure repos) and follow the wizard. Note that in the [Configure - step](https://user-images.githubusercontent.com/737477/52465670-6ffd9a00-2ba5-11e9-83a3-9fffdacbf249.png) - you have to select the "Starter Pipeline". This will open up an - [editor - window](https://user-images.githubusercontent.com/737477/52465669-6f650380-2ba5-11e9-9662-e9c6fc2682b5.png). You - can leave the existing yaml configuration there as it is and click - the "Save and run" button. That will popup a - [dialog](https://user-images.githubusercontent.com/737477/52465668-6f650380-2ba5-11e9-9203-6347a609e3c4.png). Select - the relevant option and click "Save and run" button. (Note that this - step would have created `azure-pipelines.yml` in your repository, - you have replace that with the appropriate configuration file.) - -The rest of this document explains the details of common Azure -configurations for those of you who want to tweak the above -configuration files or write your own. - -*Note:* both Azure and Stack infrastructures are actively - developed. We try to document best practices at the moment. - -## Infrastructure - -Note that you need at least one agent to build your code. You can -specify which virtual image you want to choose using this configuration: - -``` yaml -pool: - vmImage: ubuntu-latest -``` - -The other popular options are `macOS-latest`, `windows-latest` for Mac -and Windows respectively. You can find the [complete -list](https://docs.microsoft.com/en-us/azure/devops/pipelines/agents/hosted?view=vsts&tabs=yaml) -here. You also have the option to select a specific supported ubuntu -version like `ubuntu-18.08`. - -## Installing Stack - -Currently there is only one reasonable way to install Stack: fetch precompiled -binary from the Github. - -```yaml -- script: | - mkdir -p ~/.local/bin - curl -L https://get.haskellstack.org/stable/linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack' - displayName: Install Stack -``` - -## Installing GHC - -There are two ways to install GHC: - -- Let Stack download GHC -- Install GHC using apt package manger. This method is only applicable - for Debian based images. - -But we only use the first method of using Stack to download GHC. - -### Multiple GHC - parametrised builds - -For different GHC versions, you probably want to use different -`stack.yaml` files. If you don't want to put a specific `stack.yaml` -for a particular resolver and still want to test it, you have specify -your resolver argument in `ARGS` environment variable (you will see an -example below). -``` -strategy: - matrix: - GHC 8.0: - ARGS: "--resolver lts-9" - GHC 8.2: - ARGS: "--resolver lts-11" - GHC 8.4: - ARGS: "--resolver lts-12" - GHC 8.6: - ARGS: "--resolver lts-14" - GHC 8.8: - ARGS: "--resolver lts-15" - nightly: - ARGS: "--resolver nightly" -``` - -## Running tests - -After the environment setup, actual test running is simple: - -```yaml -stack $ARGS test --bench --no-run-benchmarks --haddock --no-haddock-deps -``` - -## Other details - -Some Stack commands will run for long time. To avoid timeouts, use the [timeoutInMinutes](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/phases?tabs=yaml&view=azdevops#timeouts) for jobs. - -## Examples - -- [commercialhaskell/stack](https://github.com/commercialhaskell/stack/blob/master/azure-pipelines.yml) -- [psibi/tldr-hs](http://github.com/psibi/tldr-hs) -- [psibi/wai-slack-middleware](https://github.com/psibi/wai-slack-middleware) diff --git a/doc/build_command.md b/doc/build_command.md deleted file mode 100644 index c8e5765d03..0000000000 --- a/doc/build_command.md +++ /dev/null @@ -1,208 +0,0 @@ -
- -# Build command - -## Overview - -The primary command you use in stack is build. This page describes the build -command's interface. The goal of the interface is to do the right thing for -simple input, and allow a lot of flexibility for more complicated goals. See the -[build command section of the user guide](GUIDE.md#the-build-command) for info -beyond the CLI aspects of the build command. - -## Synonyms - -One potential point of confusion is the synonym commands for `build`. These are -provided to match commonly expected command line interfaces, and to make common -workflows shorter. The important thing to note is that all of these are just -the `build` command in disguise. Each of these commands are called out as -synonyms in the `--help` output. These commands are: - -* `stack test` is the same as `stack build --test` -* `stack bench` is the same as `stack build --bench` -* `stack haddock` is the same as `stack build --haddock` -* `stack install` is the same as `stack build --copy-bins` - -The advantage of the synonym commands is that they're convenient and short. The -advantage of the options is that they compose. For example, `stack build --test --copy-bins` -will build libraries, executables, and test suites, run the test -suites, and then copy the executables to your local bin path (more on this -below). - -## Components - -Components are a subtle yet important point to how build operates under the -surface. Every cabal package is made up of one or more components. It can have -0 or 1 libraries, and then 0 or more of executable, test, and benchmark -components. stack allows you to call out a specific component to be built, e.g. -`stack build mypackage:test:mytests` will build the `mytests` component of the -`mypackage` package. `mytests` must be a test suite component. - -We'll get into the details of the target syntax for how to select components in -the next section. In this section, the important point is: whenever you target -a test suite or a benchmark, it's built __and also run__, unless you explicitly -disable running via `--no-run-tests` or `--no-run-benchmarks`. Case in point: -the previous command will in fact build the `mytests` test suite *and* run it, -even though you haven't used the `stack test` command or the `--test` option. -(We'll get to what exactly `--test` does below.) - -This gives you a lot of flexibility in choosing what you want stack to do. You -can run a single test component from a package, run a test component from one -package and a benchmark from another package, etc. - -One final note on components: you can only control components for local -packages, not dependencies. With dependencies, stack will *always* build the -library (if present) and all executables, and ignore test suites and -benchmarks. If you want more control over a package, you must add it to your -`packages` setting in your stack.yaml file. - -## Target syntax - -In addition to a number of options (like the aforementioned `--test`), `stack build` -takes a list of zero or more *targets* to be built. There are a number -of different syntaxes supported for this list: - -* *package*, e.g. `stack build foobar`, is the most commonly used target. It - will try to find the package in the following locations: local packages, - extra dependencies, snapshots, and package index (e.g. Hackage). If it's - found in the package index, then the latest version of that package from - the index is implicitly added to your extra dependencies. - - This is where the `--test` and `--bench` flags come into play. If the - package is a local package, then all of the test suite and benchmark - components are selected to be built, respectively. In any event, the - library and executable components are also selected to be built. - -* *package identifier*, e.g. `stack build foobar-1.2.3`, is usually used to - include specific package versions from the index. If the version selected - conflicts with an existing local package or extra dep, then stack fails - with an error. Otherwise, this is the same as calling `stack build foobar`, - except instead of using the latest version from the index, the version - specified is used. - -* *component*. Instead of referring to an entire package and letting stack - decide which components to build, you select individual components from - inside a package. This can be done for more fine-grained control over which - test suites to run, or to have a faster compilation cycle. There are - multiple ways to refer to a specific component (provided for convenience): - - * `packagename:comptype:compname` is the most explicit. The available - comptypes are `exe`, `test`, and `bench`. - * Side note: When any `exe` component is specified, all of the package's executable components will be built. This is due to limitations in all currently released versions of Cabal. See [issue#1046](https://github.com/commercialhaskell/stack/issues/1406) - * `packagename:compname` allows you to leave off the component type, as - that will (almost?) always be redundant with the component name. For - example, `stack build mypackage:mytestsuite`. - * `:compname` is a useful shortcut, saying "find the component in all of - the local packages." This will result in an error if multiple packages - have a component with the same name. To continue the above example, - `stack build :mytestsuite`. - * Side note: the commonly requested `run` command is not available - because it's a simple combination of `stack build :exename && stack exec exename` - -* *directory*, e.g. `stack build foo/bar`, will find all local packages that - exist in the given directory hierarchy and then follow the same procedure as - passing in package names as mentioned above. There's an important caveat - here: if your directory name is parsed as one of the above target types, it - will be treated as that. Explicitly starting your target with `./` can be a - good way to avoid that, e.g. `stack build ./foo` - -Finally: if you provide no targets (e.g., running `stack build`), stack will -implicitly pass in all of your local packages. If you only want to target -packages in the current directory or deeper, you can pass in `.`, e.g. `stack build .`. - -To get a list of the available targets in your project, use `stack ide targets`. - -## Controlling what gets built - -Stack will automatically build the necessary -dependencies. See the -[build command section of the user guide](GUIDE.md#the-build-command) for -details of how these dependencies get specified. - -In addition to specifying targets, you can also control what gets built with the -following flags: - -* `--haddock`, to build documentation. This may cause a lot of packages to get - re-built, so that the documentation links work. - -* `--force-dirty`, to force rebuild of packages even when it doesn't seem - necessary based on file dirtiness. - -* `--reconfigure`, to force reconfiguration even when it doesn't seem necessary - based on file dirtiness. This is sometimes useful with custom Setup.hs files, - in particular when they depend on external data files. - -* `--dry-run`, to build nothing and output information about the build plan. - -* `--only-dependencies`, to skip building the targets. - -* `--only-snapshot`, to only build snapshot dependencies, which are cached and - shared with other projects. - -* `--keep-going`, to continue building packages even after some build step - fails. The packages which depend upon the failed build won't get built. - -* `--skip`, to skip building components of a local package. It allows - you to skip test suites and benchmark without specifying other components - (e.g. `stack test --skip long-test-suite` will run the tests without the - `long-test-suite` test suite). Be aware that skipping executables won't work - the first time the package is built due to - [an issue in cabal](https://github.com/commercialhaskell/stack/issues/3229). - This option can be specified multiple times to skip multiple components. - -## Flags - -There are a number of other flags accepted by `stack build`. Instead of listing -all of them, please use `stack build --help`. Some particularly convenient ones -worth mentioning here since they compose well with the rest of the build system -as described: - -* `--file-watch` will rebuild your project every time a file changes, by default - it will take into account all files belonging to the targets you specify, - alternatively one could specify `--watch-all` which will make Stack watch - any local files (from project packages or from local dependencies) -* `--exec "cmd [args]"` will run a command after a successful build - -To come back to the composable approach described above, consider this final -example (which uses the [wai repository](https://github.com/yesodweb/wai/)): - -``` -stack build --file-watch --test --copy-bins --haddock wai-extra :warp warp:doctest --exec 'echo Yay, it worked!' -``` - -This command will: - -* Start stack up in file watch mode, waiting for files in your project to - change. When first starting, and each time a file changes, it will do all of - the following. -* Build the wai-extra package and its test suites -* Build the `warp` executable -* Build the warp package's doctest component (which, as you may guess, is a - test site) -* Run all of the wai-extra package's test suite components and the doctest test - suite component -* If all of that succeeds: - * Copy generated executables to the local bin path - * Run the command `echo Yay, it worked!` - -## Build output - -Starting with Stack 2.1, output of all packages being built scrolls by in a -streaming fashion. The output from each package built will be prefixed by the -package name, e.g. `mtl> Building ...`. This will include the output from -dependencies being built, not just targets. - -To disable this behaviour, you can pass `--no-interleaved-output`, or add -`interleaved-output: false` to your `stack.yaml` file. When disabled: - - * When building a single target package (e.g., `stack build` in a project - with only one package, or `stack build package-name` in a multi-package - project), the build output from GHC will be hidden for building all - dependencies, and will be displayed for the one target package. - - * By default, when building multiple target packages, the output from these - will end up in a log file instead of on the console unless it contains - errors or warnings, to avoid problems of interleaved output and decrease - console noise. If you would like to see this content instead, you can use - the `--dump-logs` command line option, or add `dump-logs: all` to your - `stack.yaml` file. diff --git a/doc/build_overview.md b/doc/build_overview.md index 6e44863255..0795f4f9f6 100644 --- a/doc/build_overview.md +++ b/doc/build_overview.md @@ -1,15 +1,17 @@
-# Build Overview +# Build overview -__NOTE__ This document should *not be considered accurate* until this -note is removed. +!!! warning -This is a work-in-progress document covering the build process used by Stack. -It was started following the Pantry rewrite work in Stack (likely to -land as Stack 2.0), and contains some significant changes/simplifications from -how things used to work. This document will likely not fully be reflected in -the behavior of Stack itself until late in the Stack 2.0 development cycle. + This document should not be considered accurate until this warning is + removed. + + This is a work-in-progress document covering the build process used by + Stack. It was started following the Pantry rewrite work in Stack 2.1.1, and + contains some significant changes/simplifications from how things used to + work. This document will likely not fully be reflected in the behavior of + Stack itself until late in the Stack 2.0 development cycle. ## Terminology @@ -50,35 +52,36 @@ Given these inputs, Stack attempts the following process when performing a build ## Find the `stack.yaml` file -* Check for a `--stack-yaml` CLI arg, and use that -* Check for a `STACK_YAML` env var +* Check for a `--stack-yaml` or `-w` command line argument, and use that +* Check for a `STACK_YAML` environment variable * Look for a `stack.yaml` in this directory or ancestor directories * Fall back to the default global project This file is parsed to provide the following config values: -* `resolver` (required field) -* `compiler` (optional field) -* `packages` (optional field, defaults to `["."]`) -* `extra-deps` (optional field, defaults to `[]`) -* `flags` (optional field, defaults to `{}`) -* `ghc-options` (optional field, defaults to `{}`) +* `snapshot` (or, alternatively, `resolver` (deprecated)) (required key) +* `compiler` (optional key) +* `packages` (optional key, value defaults to `["."]`) +* `extra-deps` (optional key, value defaults to `[]`) +* `flags` (optional key, value defaults to `{}`) +* `ghc-options` (optional key, value defaults to `{}`) `flags` and `ghc-options` break down into both _by name_ (applied to a -specific package) and _general_ (general option `*` for flags is only available in CLI). +specific package) and _general_ (general option `*` for flags is only available +in CLI). ## Wanted compiler, dependencies, and project packages -* If the `--resolver` CLI is present, ignore the `resolver` and - `compiler` config values -* Load up the snapshot indicated by the `resolver` (either config - value or CLI arg). This will provide: +* If the `--snapshot` CLI is present, ignore the `snapshot` (or `resolver` + (deprecated)) and `compiler` config values +* Load up the indicated snapshot (either config value or CLI arg). This will + provide: * A map from package name to package location, flags, GHC options, and if a package should be hidden. All package locations here are immutable. * A wanted compiler version, e.g. `ghc-8.6.5` * If the `--compiler` CLI arg is set, or the `compiler` config value - is set (and `--resolver` CLI arg is not set), ignore the wanted + is set (and `--snapshot` CLI arg is not set), ignore the wanted compiler from the snapshot and use the specified wanted compiler * Parse `extra-deps` into a `Map PackageName PackageLocation`, containing both mutable and immutable package locations. Parse @@ -91,13 +94,16 @@ specific package) and _general_ (general option `*` for flags is only available snapshot packages that have been replaced. * Apply the `flags` and `ghc-options` by name to these packages overwriting any previous values coming from a snapshot. If any values are specified - but no matching package is found, it's an error. If a flag is not defined - in the corresponding package cabal file, it's an error. + but no matching package is found, it is an error. If a flag is not defined + in the corresponding package cabal file, it is an error. * We are now left with the following: * A wanted compiler version - * A map from package name to immutable packages with package config (flags, GHC options, hidden) - * A map from package name to mutable packages as dependencies with package config - * A map from package name to mutable packages as project packages with package config + * A map from package name to immutable packages with package config (flags, + GHC options, hidden) + * A map from package name to mutable packages as dependencies with package + config + * A map from package name to mutable packages as project packages with + package config ## Get actual compiler @@ -154,13 +160,13 @@ specific components). Named CLI flags are applied to specific packages by updating the config in one of the four maps. If a flag is specified and no package -is found, it's an error. Note that flag settings are added _on top of_ +is found, it is an error. Note that flag settings are added _on top of_ previous settings in this case, and does not replace them. That is, if previously we have `singleton (FlagName "foo") True` and now add `singleton (FlagName "bar") True`, both `foo` and `bar` will now be true. If any flags are specified but no matching package is found, -it's an error. If a flag is not defined in the corresponding package -cabal file, it's an error. +it is an error. If a flag is not defined in the corresponding package +Cabal file, it is an error. ## Apply CLI GHC options @@ -176,7 +182,7 @@ project package which uses that flag name. General options are divided into the following categories: -* `$locals` is deprecated, it's now a synonym for `$project` +* `$locals` is deprecated, it is now a synonym for `$project` * `$project` applies to all project packages, not to any dependencies * `$targets` applies to all project packages that are targets, not to any dependencies or non-target project packages. This is the default option @@ -193,17 +199,18 @@ they get prepended otherwise they get used as is. Use some deterministic binary serialization and SHA256 thereof to get a hash of the following information: -* Actual compiler (GHC version, path, *FIXME* probably some other - unique info from GHC, I've heard that `ghc --info` gives you - something) +* Actual compiler (GHC version, path, *FIXME* probably some other unique info + from GHC, I have heard that `ghc --info` gives you something) * Global database map * Immutable dependency map Motivation: Any package built from the immutable dependency map and installed in this database will never need to be rebuilt. -*FIXME* Caveat: do we need to take profiling settings into account -here? How about Haddock status? +!!! bug "To do" + + Caveat: do we need to take profiling settings into account here? How about + Haddock status? ## Determine actual target components @@ -222,21 +229,24 @@ here? How about Haddock status? * Apply flags, platform, and actual GHC version to resolve dependencies in any package analyzed * Include all library dependencies for all enabled components -* Include all build tool dependencies for all enabled components - (using the fun backwards compat logic for `build-tools`) +* Include all dependencies for tools used during building ('build tools') for + all enabled components (using the fun backwards compat logic for + `build-tools`) * Apply the logic recursively to come up with a full build plan -* If a task depends exclusively on immutable packages, mark it as - immutable. Otherwise, it's mutable. The former go into the snapshot - database, the latter into the local database. +* If a task depends exclusively on immutable packages, mark it as immutable. + Otherwise, it is mutable. The former go into the snapshot database, the latter + into the local database. We now have a set of tasks of packages/components to build, with full config information for each package, and dependencies that must be built first. -*FIXME* There's some logic to deal with cyclic dependencies between -test suites and benchmarks, where a task can be broken up into -individual components versus be kept as a single task. Need to -document this better. Currently it's the "all in one" logic. +!!! bug "To do" + + There is some logic to deal with cyclic dependencies between test suites and + benchmarks, where a task can be broken up into individual components versus + be kept as a single task. Need to document this better. Currently it is the + "all in one" logic. ## Unregister local modified packages @@ -258,7 +268,7 @@ document this better. Currently it's the "all in one" logic. * If all good: do nothing * Otherwise, for immutable tasks: check the precompiled cache for an identical package installation (same GHC, dependencies, etc). If - present: copy that over, and we're done. + present: copy that over, and we are done. * Otherwise, perform the build, register, write to the Stack specific "is installed" stuff, and (for immutable tasks) register to the precompiled cache diff --git a/doc/commands/bench_command.md b/doc/commands/bench_command.md new file mode 100644 index 0000000000..e4b014b39e --- /dev/null +++ b/doc/commands/bench_command.md @@ -0,0 +1,29 @@ +
+ +# The `stack bench` command + +~~~text +stack bench [TARGET] [--dry-run] [--pedantic] [--fast] [--ghc-options OPTIONS] + [--flag PACKAGE:[-]FLAG] [--dependencies-only | --only-snapshot | + --only-dependencies | --only-locals] [--file-watch | + --file-watch-poll] [--watch-all] [--exec COMMAND [ARGUMENT(S)]] + [--only-configure] [--trace] [--profile] [--no-strip] + [--[no-]library-profiling] [--[no-]executable-profiling] + [--[no-]library-stripping] [--[no-]executable-stripping] + [--[no-]haddock] [--haddock-arguments HADDOCK_ARGS] + [--[no-]open] [--[no-]haddock-deps] [--[no-]haddock-internal] + [--[no-]haddock-hyperlink-source] [--[no-]haddock-for-hackage] + [--[no-]copy-bins] [--[no-]copy-compiler-tool] [--[no-]prefetch] + [--[no-]keep-going] [--[no-]keep-tmp-files] [--[no-]force-dirty] + [--[no-]test] [--[no-]rerun-tests] [--ta|--test-arguments TEST_ARGS] + [--coverage] [--[no-]run-tests] [--test-suite-timeout SECONDS] + [--test-suite-timeout-grace SECONDS] [--[no-]tests-allow-stdin] + [--[no-]bench] [--ba|--benchmark-arguments BENCH_ARGS] + [--[no-]run-benchmarks] [--[no-]reconfigure] + [--cabal-verbosity VERBOSITY | --[no-]cabal-verbose] + [--[no-]split-objs] [--skip ARG] [--[no-]interleaved-output] + [--ddump-dir ARG] +~~~ + +`stack bench` is a synonym for `stack build --bench`. For further information, +see the documentation for the [`stack build`](build_command.md) command. diff --git a/doc/commands/build_command.md b/doc/commands/build_command.md new file mode 100644 index 0000000000..57df5563b1 --- /dev/null +++ b/doc/commands/build_command.md @@ -0,0 +1,1278 @@ +
+ +# The `stack build` command and its synonyms + +~~~text +stack build [TARGET] [--dry-run] [--pedantic] [--fast] [--ghc-options OPTIONS] + [--flag PACKAGE:[-]FLAG] [--dependencies-only | --only-snapshot | + --only-dependencies | --only-locals] [--file-watch | + --file-watch-poll] [--watch-all] [--exec COMMAND [ARGUMENT(S)]] + [--only-configure] [--[no-]semaphore] [--trace] [--profile] + [--no-strip] [--[no-]library-profiling] + [--[no-]executable-profiling] [--[no-]library-stripping] + [--[no-]executable-stripping] [--[no-]haddock] + [--haddock-arguments HADDOCK_ARGS] [--[no-]open] + [--[no-]haddock-deps] [--[no-]haddock-internal] + [--[no-]haddock-hyperlink-source] [--[no-]haddock-for-hackage] + [--[no-]copy-bins] [--[no-]copy-compiler-tool] [--[no-]prefetch] + [--[no-]keep-going] [--[no-]keep-tmp-files] [--[no-]force-dirty] + [--[no-]test] [--[no-]rerun-tests] [--ta|--test-arguments TEST_ARGS] + [--coverage] [--[no-]run-tests] [--test-suite-timeout SECONDS] + [--test-suite-timeout-grace SECONDS] [--[no-]tests-allow-stdin] + [--[no-]bench] [--ba|--benchmark-arguments BENCH_ARGS] + [--[no-]run-benchmarks] [--[no-]reconfigure] + [--cabal-verbosity VERBOSITY | --[no-]cabal-verbose] + [--[no-]split-objs] [--skip ARG] [--[no-]interleaved-output] + [--ddump-dir ARG] +~~~ + +`stack build` and its synonyms (`stack test`, `stack bench`, `stack haddock` and +`stack install`) are Stack's primany command. The command provides a simple +interface for simple tasks and flexibility for more complicated goals. + +See the introductory part of Stack's +[user's guide](../tutorial/hello_world_example.md#the-stack-build-command) for +an introduction to the command. + +## Synonyms + +The synonym commands for `stack build` are: + +|Synonym command|Equivalent `stack build` command flag| +|---------------|-------------------------------------| +|`stack test` |`stack build --test` | +|`stack bench` |`stack build --bench` | +|`stack haddock`|`stack build --haddock` | +|`stack install`|`stack build --copy-bins` | + +The advantage of the synonym commands is that they are convenient and short. The +advantage of the flags is that they compose. See the examples below. + +## Components + +Every Cabal package is made up of one or more components. It can have an +optional public library component, one or more optional executable components, +one or more optional test suite components, and one or more optional benchmark +components. + +Stack allows you to identify a specific component to be built. For example, +`stack build mypackage:test:mytests` will build (and run - see further below) +the `mytests` component of the `mypackage` package. `mytests` must be a test +suite component. + +By default, if a test suite component is targeted, the component is built and +run. The running behaviour can be disabled with the `--no-run-tests` flag. +Similarly, if a benchmark component is targeted, it is built and run unless the +running behaviour is disabled with the `--no-run-benchmarks` flag. + +This ability to specify a component applies only to a project package. With +dependencies, Stack will *always* build the library (if present) and all +executables (if any), and ignore test suites and benchmarks. If you want more +control over a package, you must add it to your `packages` setting in your +project-level configuration file (`stack.yaml`, by default). + +## Target syntax + +`stack build` takes a list of one or more optional *targets* to be built. The +supported syntaxes for targets are as follows: + +* no targets specified +* *package* +* *package identifier* +* project package *component* +* *local directory* + +### No targets specified + +Example: `stack build` + +`stack build` with no targets specified will build all project packages. + +### Target: *package* + +Example: `stack build foobar` + +Stack will try to find the package in the following locations: + +* project packages, +* extra-deps, +* the snapshot, and +* the package index (e.g. Hackage). + +If the package is found in the package index, then the latest version of that +package from the index is implicitly added as an extra-dep. + +If the package is a project package, the library and executable components are +selected to be built. If the `--test` and `--bench` flags are set, then all of +the test suite and benchmark components, respectively, are selected to be built. + +If *package* is a GHC boot package (packages that come with GHC and are included +in GHC's global package database), the behaviour can be complex: + +* If the boot package has not been 'replaced', then `stack build` will, + effectively, do nothing. + +* If the boot package has been 'replaced' then `stack build` will specify the + latest version of that package in the package index, which may differ from the + version provided by the version of GHC specified by the snapshot. + +A boot package will be treated as 'replaced' if the package is included directly +in the Stackage snapshot or it depends on a package included directly in the +snapshot. + +!!! note + + Stackage snapshots are not expected to include directly any boot packages + but some such snapshots may include directly some boot packages. In + particular, some snapshots include directly `Win32` (which is a boot package + on Windows) while most do not. + + For example, if `Cabal` (a boot package) is not a project package or an + extra-dep, then `stack build Cabal` with Stackage snapshot LTS Haskell 20.25 + will: + + * on Windows, try to build the latest version of `Cabal` in the package + index (because that snapshot includes `Win32` directly, and `Cabal` + depends on `Win32` and so is treated as 'replaced'); and + * on non-Windows, effectively, do nothing (because `Cabal` is not + 'replaced'). + +### Target: *package identifier* + +Example: `stack build foobar-1.2.3` + +If the package name is that of a project package, then Stack fails with an +error. + +If the package version is an extra-dep or in the snapshot, then Stack will use +that version. + +If the package version is in the package index (e.g. Hackage) then Stack will +use the latest revision of that version from the package index. + +Otherwise, Stack will fail with an error. + +### Target: project package *component* + +Examples: + +* `stack build my-package:lib` +* `stack build my-package:exe:my-executable` +* `stack build my-package:test:my-test-suite` +* `stack build my-package:bench:my-benchmark` +* `stack build my-package:my-test-suite` +* `stack build :my-test-suite` + +You can select individual components from inside a project package to be built. +This can be done for more fine-grained control over which test suites to run, or +to have a faster compilation cycle. + +There are multiple ways to refer to a specific component: + +* `:lib` or `::` (where the + component type, ``, is one of `exe`, `test`, or `bench`) is the + most explicit. The library component type (`lib`) does not have an + associated component name, ``. + +* `:` allows you to leave out the component type, as + that will often be unique for a given component name. + +* `:` is a useful shortcut, saying "find the component`` + in all of the project packages". This will result in an error if more than + one package has a component with the specified name. + +For further information about available targets, see the +[`stack ide targets` command](ide_command.md). + +### Target: *local directory* + +Examples: + +* `stack build foo/bar` +* `stack build ./foo` +* `stack build .` + +Stack will find all project packages that exist in the given directory hierarchy +and then follow the same procedure as passing in package names as mentioned +above. + +`stack build .` will target project packages in the current working directory or +its subdirectories. + +!!! note + + If the directory name is parsed as one of the other target types, it will + be treated as that. Explicitly starting the target with `./` can avoid that. + For example, `stack build ./foo`. + +## Controlling what gets built + +Stack will rebuild a targeted project package if it considers one or more of +its files to be dirty. + +Stack will consider a package to be dirty if a file is added to the +`extra-source-files` field of its Cabal file or the contents of an existing file +listed in the `extra-source-files` field is changed. + +??? note "GHC's recompilation checker and Template Haskell" + + GHC's recompilation checker (which is on by default) stops compilation early + if GHC can determine that a module does not need to be recompiled. + + For modules that use Template Haskell, when the module is compiled, GHC can + determine dependencies, or be told about dependent files, of the code + inserted by the splice. (Instances of the `Quasi` class promise to provide + `qAddDependentFile`; see package `template-haskell`.) + + However, GHC cannot be told of as yet *unknown* dependent files when a + module using Template Haskell is compiled. For example, this can affect the + `embedDir` function provided by package `file-embed`, when files are added + to the directory in question after the module is compiled. The resolution is + either to specify GHC's `-fforce-recomp` option (to turn off the + recompilation checker for the package) or to do a clean build. + +Stack will automatically build the necessary dependencies. See the introductory +part of Stack's +[user's guide](../tutorial/building_your_project.md#adding-dependencies) for +information about how these dependencies get specified. + +If a package description specifies a custom build type, it must also specify a +custom setup. That should list the dependencies needed to compile `Setup.hs`. +Stack further customises the setup, using the `Cabal` package. If that package +is not listed, Stack will warn and add the GHC boot package as a dependency. + +In addition to specifying targets, you can also control what gets built, or +retained, with the flags and options listed below. You can also affect what gets +built by specifying Cabal (the library) options for the configure step +of the Cabal build process (for further information, see the documentation for +the [configure-options](../configure/yaml/non-project.md#configure-options) +configuration option). + +### `--[no-]allow-newer` flag + +[:octicons-tag-24: 3.1.1](https://github.com/commercialhaskell/stack/releases/tag/v3.1.1) + +Overrides: [`allow-newer`](../configure/yaml/non-project.md#allow-newer) +non-project specific configuration option + +Pass the flag to enable or disable the ignoring of lower and upper version +bounds in Cabal files. + +!!! info + + The name `allow-newer` was chosen to match a commonly-used Cabal option + which ignored only upper version bounds. + +### `--bench` flag + +Pass the flag to add benchmark components to the targets, if specific components +are not identified. The `stack bench` synonym sets this flag. + +### `--dependencies-only` flag + +Pass the flag to skip building the targets. The flag `--only-dependencies` has +the same effect. + +### `--[no-]dry-run` flag + +Default: Disabled + +Set the flag to build nothing and output information about the build plan. + +### `--flag` option + +The option can be specified multiple times. It has two forms: + +* `--flag :[-]`; and + +* `--flag *:[-]`. + +`stack build --flag :[-]` sets (or unsets) the +specified Cabal flag for the specified package. Stack will report an error if: + +* a package of that name is not known to Stack; or + +* a flag of that name is not a flag of that package. + +This overrides: + +* any Cabal flag specifications for the package in the snapshot; + +* any Cabal flag specifications for the package in Stack's project-level + configuration file (`stack.yaml`); and + +* any use of `--flag *` (see below). + +`stack build --flag *:[-]` sets (or unsets) the specified Cabal flag +for all packages (project packages and dependencies) for which the flag is +defined. + +This overrides: + +* any Cabal flag specifications for the relevant packages in the snapshot; and + +* any Cabal flag specifications for the relevant packages in Stack's + project-level configuration file (`stack.yaml`). + +!!! info + + `flag *:[-] inspects the Cabal file of each package in the + snapshot. Consequently, its use will add a few seconds to the duration of + a build. + +!!! note + + For a package included directly in the snapshot, if the Cabal flag + specifications differ from the Cabal flag specifications (if any) in the + snapshot, then the package will automatically be promoted to be an + [extra-dep](../configure/yaml/project.md#extra-deps). + +!!! note + + In order to set a Cabal flag for a GHC boot package, the package must be + specified as an [extra-dep](../configure/yaml/project.md#extra-deps). + +### `--[no-]force-dirty` flag + +Default: Disabled + +Set the flag to force rebuild of packages even when it does not seem necessary +based on file dirtiness. + +### `--[no-]haddock` flag + +Default: Disabled + +The [`stack haddock`](haddock_command.md) synonym sets this flag. + +Set the flag to build Haddock documentation. This may cause a lot of packages to +get re-built, so that the documentation links work. + +Stack applies Haddock's `--gen-contents` and `--gen-index` flags to generate a +single HTML contents and index for multiple sets of Haddock documentation. + +!!! warning + + On Windows, the values for the `haddock-interfaces` and `haddock-html` keys + in the `*.conf` files for boot packages provided with certain versions of + GHC (in its `lib\package.conf.d` directory) can be corrupt and refer to + non-existent files and directories. For example, in the case of GHC 9.0.1 + to GHC 9.8.1 the references are to + `${pkgroot}/../../docs/html/libraries/...` or + `${pkgroot}/../../doc/html/libraries/...` instead of + `${pkgroot}/../docs/html/libraries/...` or + `${pkgroot}/../doc/html/libraries/...`. Until those values are corrected, + Haddock documentation will be missing links to what those packages expose. + +### `--haddock-arguments` option + +When building Haddock documentation (see the +[`--[no-]haddock`](#-no-haddock-flag) flag), +`--haddock-arguments ` passes the specified arguments to +the Haddock tool. + +Specified arguments are separated by spaces. Arguments can be unquoted (if they +do not contain space or `"` characters) or quoted (`""`). Quoted arguments can +include 'escaped' characters, escaped with an initial `\` character. + +!!! note + + Haddock's `--latex` flag is incompatible with the Haddock flags used by + Stack to generate a single HTML contents and index. + +### `--[no-]haddock-deps` flag + +Default: Enabled (if building Haddock documentation) + +When building Haddock documentation (see the +[`--[no-]haddock`](#-no-haddock-flag) flag), unset the flag to disable building +documentation for dependencies. + +### `--[no-]haddock-for-hackage` flag + +:octicons-beaker-24: Experimental + +[:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) + +Default: Disabled + +When building Haddock documentation (see the +[`--[no-]haddock`](#-no-haddock-flag) flag), set the flag to build project +packages with flags to generate documentation suitable for upload to Hackage. +The form of the documentation generated for other packages is unaffected. + +For each project package, the generated Haddock documentation files are in +directory `doc\html\-docs\`, relative to Stack's dist work +directory (see [`stack path --dist-dir`](path_command.md)). + +Unless flags are set to exclude the building of project packages, for each +targeted project package with generated documentation, an archive of the +`-docs` directory and its contents is in Stack's dist work +directory. (The flags that exclude project packages are +[`--only-dependencies`](#-only-dependencies-flag), +[`--dependencies-only`](#-dependencies-only-flag), or +[`--only-snapshot`](#-only-snapshot-flag).) + +If the flag is set: + +* the [`--[no-]haddock-hyperlink-source`](#-no-haddock-hyperlink-source-flag) + flag is ignored and `--haddock-hyperlink-source` is implied; +* the [`--[no-]haddock-deps`](#-no-haddock-deps-flag) flag is ignored and the + default value for the flag is implied; +* the [`--[no-]haddock-internal`](#-no-haddock-internal-flag) flag is + ignored and `--no-haddock-internal` is implied; +* the [`--[no-]open`](#-no-open-flag) flag is ignored and `--no-open` is + implied; and +* the [`--[no-]force-dirty`](#-no-force-dirty-flag) flag is ignored and + `--force-dirty` is implied. + +!!! info + + Stack does not distinguish the building of Haddock documentation for Hackage + from the building of Haddock documentation generally, which is why the + `--force-dirty` flag is implied. + +!!! note + + If set, Haddock will warn that `-source-*` options are ignored when + `--hyperlinked-source` is enabled. That is due to a known bug in Cabal + (the libiary). + +!!! note + + If set, Cabal (the library) will report that documentation has been created + in `index.html` and `.txt` files. Those files do not exist. + That false report is due to a known bug in Cabal (the library). + +### `--[no-]haddock-hyperlink-source` flag + +Default: Enabled (if building Haddock documentation) + +When building Haddock documentation (see the +[`--[no-]haddock`](#-no-haddock-flag) flag), unset the flag to disable building +documentation with hyperlinked sources. + +If the [`--haddock-for-hackage`](#-no-haddock-for-hackage-flag) flag is passed, +this flag is ignored. + +### `--[no-]haddock-benchmarks` flag + +Default: Disabled + +When building Haddock documentation (see the +[`--[no-]haddock`](#-no-haddock-flag) flag), set the flag to enable building +documentation for benchmark components of packages. + +If the [`--haddock-for-hackage`](#-no-haddock-for-hackage-flag) flag is passed, +this flag is ignored. + +!!! note + + This feature is not supported by versions of Cabal (the library) provided + with GHC 9.2.8 and earlier. + +!!! warning + + Due to a bug in versions of Cabal (the library) provided with GHC 9.8.2 and + earlier, if there is more than one executable (including test suites and + benchmarks) in a project package or more than one project package with an + executable, the Haddock documentation for the `Main` module of one + executable will overwrite the Haddock documentation for others. + +### `--[no-]haddock-executables` flag + +Default: Disabled + +When building Haddock documentation (see the +[`--[no-]haddock`](#-no-haddock-flag) flag), set the flag to enable building +documentation for executable components of packages. + +If the [`--haddock-for-hackage`](#-no-haddock-for-hackage-flag) flag is passed, +this flag is ignored. + +!!! note + + This feature is not supported by versions of Cabal (the library) provided + with GHC 9.2.8 and earlier. + +!!! warning + + Due to a bug in versions of Cabal (the library) provided with GHC 9.8.2 and + earlier, if there is more than one executable (including test suites and + benchmarks) in a project package or more than one project package with an + executable, the Haddock documentation for the `Main` module of one + executable will overwrite the Haddock documentation for others. + +### `--[no-]haddock-internal` flag + +Default: Disabled + +When building Haddock documentation (see the +[`--[no-]haddock`](#-no-haddock-flag) flag), set the flag to enable building +documentation for internal modules. + +If the [`--haddock-for-hackage`](#-no-haddock-for-hackage-flag) flag is passed, +this flag is ignored. + +### `--[no-]haddock-tests` flag + +Default: Disabled + +When building Haddock documentation (see the +[`--[no-]haddock`](#-no-haddock-flag) flag), set the flag to enable building +documentation for test suite components of packages. + +If the [`--haddock-for-hackage`](#-no-haddock-for-hackage-flag) flag is passed, +this flag is ignored. + +!!! note + + This feature is not supported by versions of Cabal (the library) provided + with GHC 9.2.8 and earlier. + +!!! warning + + Due to a bug in versions of Cabal (the library) provided with GHC 9.8.2 and + earlier, if there is more than one executable (including test suites and + benchmarks) in a project package or more than one project package with an + executable, the Haddock documentation for the `Main` module of one + executable will overwrite the Haddock documentation for others. + +### `--[no-]keep-going` flag + +Default (`stack build`): Disabled + +Default (`stack test` or `stack bench`): Enabled + +Set the flag to continue building packages even after some build step fails. +The packages which depend upon the failed build will not get built. + +### `--[no-]keep-tmp-files` flag + +Default: Disabled + +Set the flag to keep intermediate files and build directories that would +otherwise be considered temporary and deleted. It may be useful to inspect +these, if a build fails. By default, they are not kept. + +### `--only-configure` flag + +[:octicons-tag-24: 0.1.4.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.4.0) + +Pass the flag to perform only the configure step, not any builds. This is +intended for tool usage. It may break when used on multiple packages at once. + +!!! note + + If there are downstream actions that require a package to be built then a + full build will occur, even if the flag is passed. + +### `--only-dependencies` flag + +Pass the flag to skip building the targets. The flag `--dependencies-only` has +the same effect. + +### `--only-locals` flag + +Pass the flag to build only packages in the local database. Fails if the build +plan includes packages in the snapshot database. + +### `--only-snapshot` flag + +Pass the flag to build only snapshot dependencies, which are cached and shared +with other projects. + +### `--[no-]reconfigure` flag + +Default: Disabled + +Set the flag to force reconfiguration even when it does not seem necessary based +on file dirtiness. This is sometimes useful with custom `Setup.hs` files, in +particular when they depend on external data files. + +### `--skip` option + +`stack build --skip ` skips building the specified components of a +project package. It allows you to skip test suites and benchmark without +specifying other components (e.g. `stack test --skip long-test-suite` will run +the tests without the `long-test-suite` test suite). Be aware that skipping +executables will not work the first time the package is built due to an issue in +[Cabal](https://github.com/commercialhaskell/stack/issues/3229). + +This option can be specified multiple times to skip multiple components. + +### `--test` flag + +Pass the flag to add test suite components to the targets, if specific +components are not identified. The `stack test` synonym sets this flag. + +## Controlling when building occurs + +### `--file-watch` flag + +Pass the flag to rebuild your project every time a file changes. By default it +will take into account all files belonging to the targets you specify. See also +the `--watch-all` flag. + +### `--file-watch-poll` flag + +Like the `--file-watch` flag, but based on polling the file system instead of +using events to determine if a file has changed. + +### `--watch-all` flag + +[:octicons-tag-24: 2.5.1](https://github.com/commercialhaskell/stack/releases/tag/v2.5.1) + +Pass the flag to rebuild your project every time any local file changes (from +project packages or from dependencies located locally). See also the +`--file-watch` flag. + +## Controlling what happens after building + +### `--benchmark-arguments`, `--ba` option + +`stack build --bench --benchmark-arguments=` will pass the +specified argument, or arguments, to each benchmark when it is run. + +Specified arguments are separated by spaces. Arguments can be unquoted (if they +do not contain space or `"` characters) or quoted (`""`). Quoted arguments can +include 'escaped' characters, escaped with an initial `\` character. + +Account may need to be taken of the shell's approach to the processing of +command line arguments: + +=== "Unix-like (Bash or Zsh)" + + For example, to pass `word` and `words with spaces` in Bash, or Zsh: + + `stack test --benchmark-arguments 'word "words with spaces"'` + + The content of single quotes is taken literally, but cannot contain a single + quote. + + For example, to pass `'a single quoted string'`: + + In Bash, or Zsh (if `RC_QUOTES` option not set): + + `stack bench --benchmark-arguments \"\''a single quoted string'\'\"` + + Outside of single quotes, `\"` escapes a double quote and `\'` escapes a + single quote. The content of single quotes is taken literally, but cannot + contain a single quote. + + In Zsh (if `RC_QUOTES` option set): + + `stack bench --benchmark-arguments '"''a single quoted string''"'` + + The content of single quotes is taken literally. Within single quotes, `''` + escapes a single quote. + +=== "Windows" + + For example, to pass `word` and `words with spaces` in PowerShell: + + `stack test --benchmark-arguments 'word "words with spaces"'` + + The content of single quotes is taken literally. + + For example, to pass `'a single quoted string'` in PowerShell: + + `stack bench --benchmark-arguments '"''a single quoted string''"'` + + The content of single quotes is taken literally. Within single quotes, `''` + escapes a single quote. + +!!! note "Runtime system (RTS) options" + + RTS options must be quoted to prevent the RTS extracting them as its own + when the Stack executable is run. + +### `--exec` option + +`stack build --exec ' []'` will run the specified command +after a successful build. + +Specified arguments are separated by spaces. Arguments can be unquoted (if they +do not contain space or `"` characters) or quoted (`""`). Quoted arguments can +include 'escaped' characters, escaped with an initial `\` character. + +Account may need to be taken of the shell's approach to the processing of +command line arguments: + +=== "Unix-like (Bash or Zsh)" + + For example, to pass `word` and `words with spaces` in Bash, or Zsh: + + `stack build --exec ' word "words with spaces"'` + + The content of single quotes is taken literally, but cannot contain a single + quote. + + For example, to pass `'a single quoted string'`: + + In Bash, or Zsh (if `RC_QUOTES` option not set): + + `stack build --exec ' '\"\''a single quoted string'\'\"` + + Outside of single quotes, `\"` escapes a double quote and `\'` escapes a + single quote. The content of single quotes is taken literally, but cannot + contain a single quote. + + In Zsh (if `RC_QUOTES` option set): + + `stack build --exec ' "''a single quoted string''"'` + + The content of single quotes is taken literally. Within single quotes, `''` + escapes a single quote. + +=== "Windows" + + For example, to pass `word` and `words with spaces` in PowerShell: + + `stack build --exec ' word "words with spaces"'` + + The content of single quotes is taken literally. + + For example, to pass `'a single quoted string'` in PowerShell: + + `stack build --exec ' "''a single quoted string''"'` + + The content of single quotes is taken literally. Within single quotes, `''` + escapes a single quote. + +### `--[no-]rerun-tests` flag + +Default: Enabled + +Unset the flag to disable the automatic running of targeted test-suites that +have already been successful. + +### `--[no-]run-benchmarks` flag + +Default: Enabled + +Unset the flag to disable the automatic running of targeted benchmarks. + +### `--[no-]run-tests` flag + +Default: Enabled + +Unset the flag to disable the automatic running of targeted test suites. + +### `--test-arguments`, `--ta` option + +`stack build --test --test-arguments=` will pass the specified +argument, or arguments, to each test when it is run. This option can be +specified multiple times. + +Specified arguments are separated by spaces. Arguments can be unquoted (if they +do not contain space or `"` characters) or quoted (`""`). Quoted arguments can +include 'escaped' characters, escaped with an initial `\` character. + +Account may need to be taken of the shell's approach to the processing of +command line arguments: + +=== "Unix-like (Bash or Zsh)" + + For example, to pass `word` and `words with spaces` in Bash, or Zsh: + + `stack test --test-arguments 'word "words with spaces"'` + + The content of single quotes is taken literally, but cannot contain a single + quote. + + For example, to pass `'a single quoted string'`: + + In Bash, or Zsh (if `RC_QUOTES` option not set): + + `stack test --test-arguments \"\''a single quoted string'\'\"` + + Outside of single quotes, `\"` escapes a double quote and `\'` escapes a + single quote. The content of single quotes is taken literally, but cannot + contain a single quote. + + In Zsh (if `RC_QUOTES` option set): + + `stack bench --benchmark-arguments '"''a single quoted string''"'` + + The content of single quotes is taken literally. Within single quotes, `''` + escapes a single quote. + +=== "Windows" + + For example, to pass `word` and `words with spaces` in PowerShell: + + `stack test --test-arguments 'word "words with spaces"'` + + The content of single quotes is taken literally. + + For example, to pass `'a single quoted string'` in PowerShell: + + `stack test --test-arguments '"''a single quoted string''"'` + + The content of single quotes is taken literally. Within single quotes, `''` + escapes a single quote. + +!!! note "Runtime system (RTS) options" + + RTS options must be quoted to prevent the RTS extracting them as its own + when the Stack executable is run. + +### `--test-suite-timeout` option + +Default: None + +`stack build --test --test-suite-timeout=` wraps each running test +suite in a timeout so that the test suite fails if no result is available within +the specified number of seconds. The option is ignored if the number of seconds +is not positive. + +!!! note + + The `--test-suite-timeout` option, in isolation, is not guaranteed to + terminate a timed-out test suite process. In that regard, see the + [`--test-suite-timeout-grace`](#-test-suite-timeout-option). + +### `--test-suite-timeout-grace` option + +Default: None + +`stack build --test --test-suite-timeout= --test-suite-timeout-grace=` +uses staged timeout termination for each running test suite: if +[`--test-suite-timeout`](#-test-suite-timeout-option) causes the test suite to +fail, Stack requests the termination of the test suite process and waits the +specified number of seconds as a grace period before termination of the +process is forced. The option is ignored if the number of seconds is not +positive. + +## Flags affecting GHC's behaviour + +### `--[no-]executable-profiling` flag + +Default: Disabled + +Set the flag to enable executable profiling for TARGETs and all its +dependencies. + +The flag affects the location of the local project installation directory. See +the [`stack path --local-install-root`](path_command.md) command. + +### `--[no-]executable-stripping` flag + +Default: Enabled + +Unset the flag to disable executable stripping for TARGETs and all its +dependencies. + +The flag may affect the location of the local project installation directory. +See the [`stack path --local-install-root`](path_command.md) command. + +### `--fast` flag + +GHC has many flags that specify individual optimisations of the compiler. GHC +also uses its `-O*` flags to specify convenient 'packages' of GHC optimisation +flags. GHC's flags are evaluated from left to right and later flags can override +the effect of earlier ones. + +If no GHC `-O*` type flag is specified, GHC takes that to mean "Please +compile quickly; I'm not over-bothered about compiled-code quality." GHC's `-O0` +flag reverts to the same settings as if no `-O*` flags had been specified. + +Pass Stack's `--fast` flag to add `-O0` to the flags and options passed to GHC. +The effect of `--fast` can be overriden with Stack's +[`--ghc-options`](#-ghc-options-option) command line options. + +!!! note + + With one exception, GHC's `-O` flag is always passed to GHC first (being + Cabal's default behaviour). The exception is if Cabal's + `--disable-optimization` flag or `--enable-optimization[=n]`, `-O[n]` + options are used during the configure step of the Cabal build process; see + Stack's + [`configure-options`](../configure/yaml/non-project.md#configure-options) + non-project specific configuration option. + +### `--ghc-options` option + +GHC command line options can be specified for a package in its Cabal file +(including one created from a `package.yaml` file). This option augments and, if +applicable (see below), overrides any such GHC command line options and those +specified in Stack's configuration files - see the +[`ghc-options`](../configure/yaml/non-project.md#ghc-options) non-project +specific configuration option. + +`stack build --ghc-options ` passes the specified command line +options to GHC, depending on Stack's +[`apply-ghc-options`](../configure/yaml/non-project.md#apply-ghc-options) +non-project specific configuration option. This option can be specified multiple +times. + +GHC's command line options are _order-dependent_ and evaluated from left to +right. Later options can override the effect of earlier ones. Any GHC command +line options for a package specified at Stack's command line are applied after +those specified in Stack's configuration files. + +### `--[no-]library-profiling` flag + +Default: Disabled + +Set the flag to enable library profiling for TARGETs and all its dependencies. + +The flag affects the location of the local project installation directory. See +the [`stack path --local-install-root`](path_command.md) command. + +### `--[no-]library-stripping` flag + +Default: Enabled + +Unset the flag to disable library stripping for TARGETs and all its +dependencies. + +The flag may affect the location of the local project installation directory. +See the [`stack path --local-install-root`](path_command.md) command. + +### `--pedantic` flag + +Pass the flag to build your project with the GHC options `-Wall` and `-Werror`. +`-Wall` turns on all warning options that indicate potentially suspicious code. +`-Werror` makes any warning into a fatal error. + +### `--profile` flag + +Pass the flag to enable profiling in libraries, executables, etc. for all +expressions, and generate a profiling report in tests or benchmarks. + +The flag affects the location of the local project installation directory. See +the [`stack path --local-install-root`](path_command.md) command. + +### `--[no]-semaphore` flag + +:octicons-beaker-24: Experimental + +:octicons-tag-24: UNRELEASED + +Default: Disabled + +This flag allows GHC to use a system semaphore to perform compilation in +parallel when possible. + +!!! info + + GHC 9.8.1 and later can act as a jobserver client, which enables two or more + GHC processes running at once to share system resources with each other, + communicating via a system semaphore. This GHC feature is supported by + Cabal 3.12.0.0 (a boot package of GHC 9.10.1) and later. The flag is ignored + with a warning when the feature is unsupported. + +!!! warning + + On Linux, musl and non-musl system semaphores are incompatible. That means + that a Stack executable built on Alpine Linux (such as the official Stack + for Linux) creates system semaphores that cannot be used by a GHC executable + built on non-musl Linux distributions. + +### `--[no-]split-objs` flag + +:octicons-beaker-24: Experimental + +Default: Disabled + +Set the flag to enable the GHC option `-split-objs`. This will reduce output +size (at the cost of build time). + +!!! note + + The behaviour of this feature may be changed and improved. You will need to + clean your project's Stack working directory before use. If you want to + compile all dependencies with split-objs, you will need to delete the + snapshot (and all snapshots that could reference that snapshot). + +!!! note + + GHC's `-split-objs` flag was deprecated in favour of `-split-sections` in + GHC 8.2.1 and was not supported by GHC on any platform from GHC 8.10.1. + +### `--no-strip` flag + +Pass the flag to disable DWARF debugging symbol stripping in libraries, +executables, etc. for all expressions, producing larger executables but allowing +the use of standard debuggers/profiling tools/other utilities that use debugging +symbols. + +The flag affects the location of the local project installation directory. See +the [`stack path --local-install-root`](path_command.md) command. + +### `--trace` flag + +Pass the flag to enable profiling in libraries, executables, etc. for all +expressions, and generate a backtrace on exception. + +The flag affects the location of the local project installation directory. See +the [`stack path --local-install-root`](path_command.md) command. + +## Flags affecting other tools' behaviour + +### `--PROG-option` options + +[:octicons-tag-24: 2.11.1](https://github.com/commercialhaskell/stack/releases/tag/v2.11.1) + +`PROG` is a program recognised by Cabal (the library) and one of `alex`, `ar`, +`c2hs`, `cpphs`, `gcc`, `greencard`, `happy`, `hsc2hs`, `hscolour`, `ld`, +`pkg-config`, `strip` and `tar`. + +`stack build --PROG-option ` passes the specified command line +argument to `PROG`, if it used by Cabal during the configuration step. This +option can be specified multiple times. For example, if the program `happy` is +used by Cabal during the configuration step, you could command +`stack build --happy-option=--ghc` or `stack build --happy-option --ghc` to pass +to `happy` its `--ghc` flag. + +By default, all and any `--PROG-option` options on Stack's command line are +applied to all project packages (targets or otherwise). This behaviour can be +changed. See the +[`apply-prog-options`](../configure/yaml/non-project.md#apply-prog-options) +configuration option. + +Stack can also be configured to pass Cabal's `--PROG-option`, `--PROG-options` +or other options to Cabal during the configuration step. For further +information, see the documentation for the +[configure-options](../configure/yaml/non-project.md#configure-options) +configuration option. + +## Flags relating to build outputs + +### `--[no]-cabal-verbose` flag + +Default: Disabled + +Set the flag to enable verbose output from Cabal (the library). This flag is an +alternative to the `--cabal-verbosity` option. + +### `--[no]-cabal-verbosity` option + +`stack build --cabal-verbosity ` sets the specified verbosity +level for output from Cabal (the library). It accepts Cabal's numerical and +extended syntax. This option is an alternative to setting the `--cabal-verbose` +flag. + +### `--[no-]copy-bins` flag + +[:octicons-tag-24: 0.1.3.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.3.0) + +Default: Disabled + +Set the flag to enable copying of built executable files (binaries) of targets +to Stack's local binary directory (see `stack path --local-bin`). The +`stack install` synonym sets this flag. + +### `--[no-]copy-compiler-tool` flag + +[:octicons-tag-24: 1.6.1](https://github.com/commercialhaskell/stack/releases/tag/v1.6.1) + +Default: Disabled + +Set the flag to enable copying of built executable files (binaries) of targets +to Stack's compiler tools binary directory (see +`stack path --compiler-tools-bin`). + +### `--coverage` flag + +Pass the flag to generate a code coverage report. For further information, see +the [code coverage](hpc_command.md) documentation. + +### `--ddump-dir` option + +GHC has a number of `ddump-*` flags and options to allow dumping out of +intermediate structures produced by the compiler. They include the +`-ddump-to-file` flag that causes the output from other flags to be dumped to a +file or files. + +`stack build --ddump_dir ` causes Stack to copy `*.dump-*` +files to subdirectories of the specified directory, which is relative to Stack's +working directory for the project. + +For example: + +~~~text +stack build --ghc-options "-ddump-to-file -ddump-timings" --ddump-dir my-ddump-dir +~~~ + +### `--[no-]interleaved-output` flag + +[:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) + +Default: Enabled + +Set the flag for interleaved output. With interleaved output, each line of +output from each package being built (targets and dependencies) is sent to the +console as it happens and output relating to different packages can be +interleaved. Each line will be prefixed with the name of the relevant package. +The spacing between the prefix and the output will be set based on the longest +relevant package name, so that the start of the output itself aligns. For +example (extract): + +~~~text +hpack > build +mustache > configure +hpack > Preprocessing library for hpack-0.35.0.. +hpack > Building library for hpack-0.35.0.. +mustache > Configuring mustache-2.4.1... +hpack > [ 1 of 29] Compiling Data.Aeson.Config.Key +hpack > [ 2 of 29] Compiling Data.Aeson.Config.KeyMap +mustache > build +hpack > [ 3 of 29] Compiling Data.Aeson.Config.Util +mustache > Preprocessing library for mustache-2.4.1.. +mustache > Building library for mustache-2.4.1.. +hpack > [ 4 of 29] Compiling Hpack.Haskell +hpack > [ 5 of 29] Compiling Hpack.Utf8 +mustache > [1 of 8] Compiling Paths_mustache +hpack > [ 6 of 29] Compiling Imports +hpack > [ 7 of 29] Compiling Hpack.Util +mustache > [2 of 8] Compiling Text.Mustache.Internal +~~~ + +Unset the flag for non-interleaved output. With non-interleaved output, the +build output from GHC (as opposed to from Stack) in respect of dependencies is +ignored. The behaviour then depends whether there is one target package or more +than one. There can be one target if the project has a single package or if one +package is targeted in a multi-package project (for example, using +`stack build `). + +* **One target package:** The build output for the target package is sent to the + standard error stream of the console as it happens. + +* **More than one target package:** The build output from GHC (as opposed to + from Stack) for each target package is sent to a log file for that package, + unless an error occurs that prevents that. If color in output is in use, there + will be two files, one with extension `.log` without color codes and one with + extension `.log-color` with color codes. At the end of the build, the location + of the directory containing the log files is reported. To also output the + contents of the log files to the standard error output stream of the console + at the end of the build, use Stack's `dump-logs` option. For further + information about that option, see the + [`dump-logs](../configure/yaml/non-project.md#dump-logs) non-project + specific configuration option documentation. The default `dump-logs` mode is + to output the contents of any log files that include GHC warnings. + +### `--[no]-open` flag + +Default: Disabled + +Set the flag to enable opening the local Haddock documentation in the browser. + +## Other flags and options + +### `--[no]-prefetch` flag + +Default: Disabled + +Set the flag to enable fetching packages necessary for the build immediately. +This can be useful with `stack build --dry-run`. + +### `--progress-bar` option + +[:octicons-tag-24: 2.13.1](https://github.com/commercialhaskell/stack/releases/tag/v2.13.1) + +Default: `capped` + +`stack build --progress-bar ` sets the format of the progress bar, where +`` is one of `none` (no bar), `count-only` (only the package count is +displayed), `capped` (the bar showing package builds in progress is capped to a +length equal to the terminal width), and `full` (the bar is uncapped). On +terminals where 'backspace' has no effect if the cursor is in the first column, +bars longer than the terminal width will not be 'sticky' at the bottom of the +screen. + +### `--tests-allow-stdin` flag + +[:octicons-tag-24: 2.9.3](https://github.com/commercialhaskell/stack/releases/tag/v2.9.3) + +Default: Enabled + +Cabal defines a test suite interface +['exitcode-stdio-1.0'](https://hackage.haskell.org/package/Cabal-syntax-3.8.1.0/docs/Distribution-Types-TestSuiteInterface.html#v:TestSuiteExeV1.0) +where the test suite takes the form of an executable and the executable takes +nothing on the standard input stream (`stdin`). Pass this flag to override that +specification and allow the executable to receive input on that stream. If you +pass `--no-tests-allow-stdin` and the executable seeks input on the standard +input stream, an exception will be thrown. + +## Examples + +All the following examples assume that: + +* if `stack build` is commanded outside of a project directory, there is no + `stack.yaml` file in the current directory or ancestor directory and, + consequently, the project-level configuration will be determined by a + `stack.yaml` file in the `global-project` directory in the + [Stack root](../topics/stack_root.md) (for further information, see the + [configuration](../configure/yaml/index.md) documentation); and + +* if `stack build` is commanded in a project directory, there is a + `stack.yaml` file in that directory. + +Examples: + +* In the project directory, `stack build --test --copy-bins` or, equivalently, + `stack test --copy-bins` or `stack install --test`, will build libraries, + executables, and test suites, run the test suites, and then copy the + executables to Stack's local binary directory (see + `stack path --local-bin`). This is an example of the flags composing. + +* The following example uses a clone of the + `wai` [repository](https://github.com/yesodweb/wai/). The `wai` project + comprises a number of packages, including `wai-extra` and `warp`. In the + `wai` project directory, the command: + + ~~~text + stack build --file-watch --test --copy-bins --haddock wai-extra :warp warp:doctest --exec 'echo Yay, it worked!' + ~~~ + + will start Stack up in file watch mode, waiting for files in your project to + change. When first starting, and each time a file changes, it will do all of + the following. + + * Build the `wai-extra` package and its test suites + * Build the `warp` executable + * Build the `warp` package's `doctest` component (which is a test site) + * Run all of the `wai-extra` package's test suite components and the + `doctest` test suite component + * If all of that succeeds: + * Copy generated executables to Stack's local binary directory (see + `stack path --local-bin`) + * Run the command `echo Yay, it worked!` + +* The following example uses the `Adga` package and assumes that `Adga-2.6.3` + is the latest version in the package index (e.g. Hackage) and is not a + version in the snapshot specified by the `stack.yaml` in the + `global-project` directory in the Stack root. + + Outside a project directory, `stack build Adga-2.6.3 --copy-bins` or, + equivalently, `stack install Agda-2.6.3`, will attempt to build the + libraries and executables of the identified version of the package in the + package index (using the `stack.yaml` file in the `global-project` + directory in the Stack root), and then copy the executables to Stack's local + binary directory (see `stack path --local-bin`). + + If a different snapshot is required to build the identified version of the + package, then that can be specified at the command line. For example, to use + the most recent Stackage Nightly snapshot: + + ~~~text + stack --snapshot nightly install Agda-2.6.3 + ~~~ + + Alternatively, Stack can be used to unpack the package from the package + index into a local project directory named after the package identifier (for + further infomation, see the [`stack unpack` command](unpack_command.md) + documentation) and, if the package does not provide its own Stack + configuration file (`stack.yaml`, by default), to attempt to initialise that + configuration (for further information, see the + [`stack init` command](init_command.md) documentation). For example: + + ~~~text + stack unpack Agda-2.6.3 + cd Agda-2.6.3 # Change to the project directory + stack init # Attempt to create a project stack.yaml file + stack install # Equivalent to stack build --copy-bins + ~~~ diff --git a/doc/commands/clean_command.md b/doc/commands/clean_command.md new file mode 100644 index 0000000000..f3f053c6a8 --- /dev/null +++ b/doc/commands/clean_command.md @@ -0,0 +1,30 @@ +
+ +# The `stack clean` command + +Either + +~~~text +stack clean [PACKAGE] [--[no-]omit-this] +~~~ + +or + +~~~text +stack clean --full +~~~ + +`stack clean` deletes build artefacts for one or more project packages. + +By default: + +* all project packages are cleaned. Pass one or more project package names to + specify individual project packages; and + +* the `dist` directory and all of its subdirectories in the Stack work directory + for each relevant project package are deleted. Pass the flag `--omit-this` to + omit, from cleaning, the `dist` work directory (see `stack path --dist-dir`) + and its subdirectories currently in use. + +`stack clean --full` deletes the Stack work directories of the project and its +project packages. diff --git a/doc/commands/config_command.md b/doc/commands/config_command.md new file mode 100644 index 0000000000..084249520a --- /dev/null +++ b/doc/commands/config_command.md @@ -0,0 +1,199 @@ +
+ +# The `stack config` commands + +~~~text +stack config COMMAND + +Available commands: + build-files Generate (when applicable) a Cabal file from a + package description in the Hpack format and/or a lock + file for Stack's project-level configuration. + env Print environment variables for use in a shell. + set Set a key in a configuration file to value. +~~~ + +The `stack config` commands provide assistance with accessing or modifying +Stack's configuration. See `stack config` for the available commands. + +## The `stack config build-files` command + +~~~text +stack config build-files +~~~ + +`stack config build-files` generates (when applicable): + +* a Cabal file from a package description in the Hpack format (`package.yaml`); + and/or + +* a [lock file](../topics/lock_files.md) for Stack's project-level + configuration (by default, `stack.yaml`); + +without taking any other build steps. + +## The `stack config env` command + +~~~text +stack config env [--[no-]locals] [--[no-]ghc-package-path] [--[no-]stack-exe] + [--[no-]locale-utf8] [--[no-]keep-ghc-rts] +~~~ + +`stack config env` outputs a script that sets or unsets environment variables +for a Stack environment. Flags modify the script that is output: + +* `--[no-]locals` (enabled by default) include/exclude project package + information +* `--[no-]ghc-package-path` (enabled by default) set `GHC_PACKAGE_PATH` + environment variable or not +* `--[no-]stack-exe` (enabled by default) set `STACK_EXE` environment variable + or not +* `--[no-]locale-utf8` (disabled by default) set the `GHC_CHARENC` + environment variable to `UTF-8` or not +* `--[no-]keep-ghc-rts` (disabled by default) keep/discard any `GHCRTS` + environment variable + +The command also accepts flags and options of the +[`stack build`](build_command.md#flags-affecting-ghcs-behaviour) command that +affect the location of the local project installation directory, such as +`--profile` and `--no-strip`. For further information, see the documentation of +the [project Stack work directory](../topics/stack_work.md). + +## The `stack config set` commands + +~~~text +stack config set COMMAND + +Available commands: + install-ghc Configure whether or not Stack should automatically + install GHC when necessary. + install-msys Configure whether or not Stack should automatically + install MSYS2 when necessary. + package-index Configure Stack's package index + recommend-stack-upgrade Configure whether or not Stack should notify the user + if it identifes a new version of Stack is available. + resolver Change the snapshot of the current project, using the + (deprecated) resolver key. + snapshot Change the snapshot of the current project. + system-ghc Configure whether or not Stack should use a system + GHC installation. +~~~ + +The `stack config set` commands allow the values of keys in configuration files +to be set. See `stack config set` for the available keys. + +!!! note + + The `config set` commands support an existing key only in the form + `key: value` on a single line. + +!!! warning + + The `config set` commands cannot add a new key to a configuration file that + uses [`!include`](../configure/yaml/include.md) directives. Stack will report + an error if it detects `!include` directives in the target configuration file + and the key being set is not already present. Existing keys can be modified + even in files that use `!include`. + +## The `stack config set install-ghc` command + +~~~text +stack config set install-ghc [--global] true|false +~~~ + +`stack config set install-ghc true` or `false` sets the +[`install-ghc`](../configure/yaml/non-project.md#install-ghc) non-project +specific configuration option in a configuration file, accordingly. By default, +the project-level configuration file (`stack.yaml`, by default) is altered. The +`--global` flag specifies the user-specific global configuration file +(`config.yaml`). + +## The `stack config set install-msys` command + +[:octicons-tag-24: 3.5.1](https://github.com/commercialhaskell/stack/releases/tag/v3.5.1) + +~~~text +stack config set install-msys [--global] true|false +~~~ + +`stack config set install-msys true` or `false` sets the +[`install-msys`](../configure/yaml/non-project.md#install-msys) non-project +specific configuration option in a configuration file, accordingly. By default, +the project-level configuration file (`stack.yaml`, by default) is altered. The +`--global` flag specifies the user-specific global configuration file +(`config.yaml`). + +## The `stack config set package-index download-prefix` command + +[:octicons-tag-24: 2.9.3](https://github.com/commercialhaskell/stack/releases/tag/v2.9.3) + +~~~text +stack config set package-index download-prefix [--global] [URL] +~~~ + +`stack config set package-index download-prefix ` sets the +`download-prefix` key of the +[`package-index`](../configure/yaml/non-project.md#package-index) non-project +specific configuration option in a configuration file, accordingly. By default, +the project-level configuration file (`stack.yaml`, by default) is altered. The +`--global` flag specifies the user-specific global configuration file +(`config.yaml`). + +## The `stack config set recommend-stack-upgrade` command + +~~~text +stack config set recommend-stack-upgrade [--project] true|false +~~~ + +`stack config set recommend-stack-upgrade true` or `false` sets the +[`recommend-stack-upgrade`](../configure/yaml/non-project.md#recommend-stack-upgrade) +non-project specific configuration option in a configuration file, accordingly. +By default, the user-specific global configuration file (`config.yaml`) is +altered. The `--project` flag specifies the project-level configuration file +(`stack.yaml`, by default). + +## The `stack config set resolver` command + +~~~text +stack config set resolver SNAPSHOT +~~~ + +A command corresponding to the +[`stack config set snapshot` command](#the-stack-config-set-snapshot-command) +but using the (deprecated) `resolver` key instead of the `snapshot` key. + +## The `stack config set snapshot` command + +[:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) + +~~~text +stack config set snapshot SNAPSHOT +~~~ + +`stack config set snapshot ` sets the +[`snapshot`](../configure/yaml/project.md#snapshot) project-specific +configuration option in the project-level configuration file (`stack.yaml`, by +default) to the specified snapshot. + +A snapshot of `lts` or `nightly` will be translated into the most recent +available. A snapshot of `lts-22` will be translated into the most recent +available in the `lts-22` sequence. + +Snapshot values that are compiler versions, a URL or a local file path are also +accepted. + +If a (deprecated) `resolver` key is present, it will be replaced by a `snapshot` +key. + +## The `stack config set system-ghc` command + +~~~text +stack config set system-ghc [--global] true|false +~~~ + +`stack config set system-ghc true` or `false` sets the +[`system-ghc`](../configure/yaml/non-project.md#system-ghc) non-project +specific configuration option in a configuration file, accordingly. By default, +the project-level configuration file (`stack.yaml`, by default) is altered. The +`--global` flag specifies the user-specific global configuration file +(`config.yaml`). diff --git a/doc/commands/docker_command.md b/doc/commands/docker_command.md new file mode 100644 index 0000000000..9af4f4c8a8 --- /dev/null +++ b/doc/commands/docker_command.md @@ -0,0 +1,37 @@ +
+ +# The `stack docker` commands + +~~~text +stack docker COMMAND + +Available commands: + pull Pull latest version of Docker image from registry + reset Reset the Docker sandbox +~~~ + +Stack is able to build your code inside a Docker image, which means even more +reproducibility to your builds, since you and the rest of your team will always +have the same system libraries. + +For further information, see the +[Docker integration](../topics/docker_integration.md) documentation. + +## The `stack docker pull` command + +~~~text +stack docker pull +~~~ + +`stack docker pull` pulls the latest version of the Docker image from the +registry. + +## The `stack docker reset` command + +~~~text +stack docker reset [--keep-home] +~~~ + +`stack docker reset` resets the Docker sandbox. + +Pass the flag `--keep-home` to preserve the sandbox's home directory. diff --git a/doc/commands/dot_command.md b/doc/commands/dot_command.md new file mode 100644 index 0000000000..83975ff71a --- /dev/null +++ b/doc/commands/dot_command.md @@ -0,0 +1,126 @@ +
+ +# The `stack dot` command + +~~~text +stack dot [--[no-]external] [--[no-]include-base] [--depth DEPTH] + [--prune PACKAGES] [--reach PACKAGES] [TARGET] + [--flag PACKAGE:[-]FLAG] [--test] [--bench] [--global-hints] +~~~ + +A package and its dependencies and the direct dependency relationships between +them form a directed graph. [Graphviz](https://www.graphviz.org/) is open source +software that visualises graphs. It provides the DOT language for defining +graphs and the `dot` executable for drawing directed graphs. Graphviz is +available to [download](https://www.graphviz.org/download/) for Linux, Windows, +macOS and FreeBSD. + +`stack dot` produces output, to the standard output stream, in the DOT language +to represent the relationships between your packages and their dependencies. + +By default: + +* external dependencies are excluded from the output. Pass the flag + `--external` to include external dependencies; +* the `base` package and its dependencies are included in the output. Pass the + flag `--no-include-base` to exclude `base` and its dependencies; +* there is no limit to the depth of the resolution of dependencies. Pass the + `--depth ` option to limit the depth; +* all relevant packages are included in the output. Pass the + `--prune ` option to exclude the specified packages (including + project packages). Pass the `--reach ` option to exclude packages + (including project packages) that cannot reach any of the specified packages + in the dependency graph. In both cases, `` is a list of package + names separated by commas; +* for all relevant project packages, relevant dependencies are included in the + output. However, each project package for which dependencies are included + can be specified as a target argument. The argument uses the same format as + the [`stack build` command](build_command.md) but components of project + packages are ignored. Non-project packages are also ignored; +* Cabal flags are as specified by the package description files and the + project-level configuration file (`stack.yaml`, by default). Pass the + option `--flag :` or + `--flag :-` to set or unset a Cabal flag. This + option can be specified multiple times; +* test components of project packages are excluded from the output. Pass the + flag `--test` to include test components; +* benchmark components of project packages are excluded from the output. Pass + the flag `--bench` to include benchmark components; and +* global packages for the specified version of GHC are those specified by the + global package database of an installed GHC. Pass the flag `--global-hints` + to use a hint file for global packages. If a hint file is used, GHC does not + need to be installed. + +All GHC wired-in packages are identified by a rectangular box. + +Nodes with no dependencies in the graph are given the maximum rank in the DOT +language (that is, the `dot` executable will place those nodes on the bottom row +of the diagram). + +## Examples + +The following examples are based on the package +[`wreq-0.5.4.3`](https://hackage.haskell.org/package/wreq-0.5.4.3) and the boot +packages of GHC 9.10.3. In each case, the output from `stack dot` is piped as an +input into Graphviz's `dot` or `twopi` executables, and the executable produces +output in the form of a SVG file named `wreq-example*.svg`. + +* A simple example: + + ~~~text + stack dot | dot -Tsvg -o wreq-example1.svg + ~~~ + + [![wreq-example1.svg](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example1.svg)](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example1.svg) + +* Include external dependencies: + + ~~~text + stack dot --external | dot -Tsvg -o wreq-example2.svg + ~~~ + + [![wreq-example2.svg](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example2.svg)](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example2.svg) + +* Include external dependencies, limit the depth and save the output from + `stack dot` as an intermediate file (`wreq-example3.dot`). + + ~~~text + stack dot --external --depth 2 > wreq-example3.dot + dot -Tsvg -o wreq-example3.svg wreq-example3.dot + ~~~ + +* Include external dependencies, exclude `base` and limit the depth: + + ~~~text + stack dot --no-include-base --external --depth 2 | dot -Tsvg -o wreq-example4.svg + ~~~ + + [![wreq-example4.svg](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example4.svg)](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example4.svg) + +* Include external dependencies and prune `base` and other packages: + + ~~~text + stack dot --external --prune base,lens,wreq-examples,http-client,aeson,tls,http-client-tls,exceptions | dot -Tsvg -o wreq-example5.svg + ~~~ + + [![wreq-example5.svg](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example5.svg)](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example5.svg) + +* Include external dependencies, prune `base` and other packages, and use a + different Graphviz executable to draw the graph: + + Graphviz's `twopi` executable draws graphs in a radial layout. + + ~~~text + stack dot --external --prune base,lens,wreq-examples,http-client,aeson,tls,http-client-tls,exceptions | twopi -Groot=wreq -Goverlap=false -Tsvg -o wreq-example6.svg + ~~~ + + [![wreq-example6.svg](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example6.svg)](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example6.svg) + +* Include external dependencies and prune packages that cannot reach any of + `memory` and `basement` in the dependency graph: + + ~~~text + stack dot --external --reach memory,basement | dot -Tsvg -o wreq-example7.svg + ~~~ + + [![wreq-example7.svg](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example7.svg)](https://cdn.jsdelivr.net/gh/commercialhaskell/stack@master/doc/img/dot_command/wreq-example7.svg) diff --git a/doc/commands/eval_command.md b/doc/commands/eval_command.md new file mode 100644 index 0000000000..08a0ca09ca --- /dev/null +++ b/doc/commands/eval_command.md @@ -0,0 +1,20 @@ +
+ +# The `stack eval` command + +~~~text +stack eval CODE [--[no-]ghc-package-path] [--[no-]stack-exe] + [--package PACKAGE] [--rts-options RTSFLAG] [--cwd DIR] +~~~ + +GHC has an +[expression-evaluation mode](https://downloads.haskell.org/ghc/latest/docs/users_guide/using.html#eval-mode), +set by passing the GHC option +`-e `. Commanding `stack eval ` is equivalent to commanding: + +~~~text +stack exec ghc -- -e +~~~ + +For further information, see the [`stack exec` command](exec_command.md) +documentation. diff --git a/doc/commands/exec_command.md b/doc/commands/exec_command.md new file mode 100644 index 0000000000..7f07d5eb63 --- /dev/null +++ b/doc/commands/exec_command.md @@ -0,0 +1,76 @@ +
+ +# The `stack exec` command + +~~~text +stack exec COMMAND + [-- ARGUMENT(S) (e.g. stack exec ghc-pkg -- describe base)] + [--[no-]ghc-package-path] [--[no-]stack-exe] [--package PACKAGE] + [--rts-options RTSFLAG] [--cwd DIR] +~~~ + +`stack exec` executes the specified executable as a command in the Stack +environment. If an executable is not specified, the first argument after `--` is +taken to be the executable. Otherwise, all arguments after `--` are taken to be +command line arguments for the specified executable. + +By default: + +* the `GHC_PACKAGE_PATH` environment variable is set for the command's process. + Pass the flag `--no-ghc-package-path` to not set the environment variable; + +* if the operating system provides a reliable way to determine it and where a + result was available, the `STACK_EXE` environment variable is set to the path + to the current Stack executable for the command's process. Pass the flag + `--no-stack-exe` to not set the environment variable; and + +* the specified executable is executed in the current directory. Pass the option + `--cwd ` to execute the executable in the specified directory. + +The option `--package ` has no effect for the `stack exec` command. For +further information about its use, see the [`stack ghc` command](ghc_command.md) +documentation or the [`stack runghc` command](runghc_command.md) documentation. + +Pass the option `--rts-option ` to specify a GHC RTS flag or option. +The option can be specified multiple times. All specified GHC RTS flags and +options are added to the arguments for the specified executable between +arguments `+RTS` and `-RTS`. + +Specified GHC RTS flags and options are separated by spaces. Items can be +unquoted (if they do not contain space or `"` characters) or quoted (`""`). +Quoted items can include 'escaped' characters, escaped with an initial `\` +character. + +Account may need to be taken of the shell's approach to the processing of +command line arguments. For example, to pass `'a single quoted string'`: + +=== "Unix-like (Bash or Zsh)" + + In Bash, or Zsh (if `RC_QUOTES` option not set): + + `stack exec -- \''a single quoted string'\'` + + Outside of single quotes, `\'` escapes a single quote. The content of single + quotes is taken literally, but cannot contain a single quote. + + In Zsh (if `RC_QUOTES` option set): + + `stack exec -- '''a single quoted string'''` + + The content of single quotes is taken literally. Within single quotes, `''` + escapes a single quote. + +=== "Windows" + + In PowerShell: + + `stack exec -- '''a single quoted string'''` + + The content of single quotes is taken literally. Within single quotes, `''` + escapes a single quote. + +The command also accepts flags and options of the +[`stack build`](build_command.md#flags-affecting-ghcs-behaviour) command that +affect the location of the local project installation directory, such as +`--profile` and `--no-strip`. For further information, see the documentation of +the [project Stack work directory](../topics/stack_work.md). diff --git a/doc/commands/ghc_command.md b/doc/commands/ghc_command.md new file mode 100644 index 0000000000..9ede8d4c8b --- /dev/null +++ b/doc/commands/ghc_command.md @@ -0,0 +1,21 @@ +
+ +# The `stack ghc` command + +~~~text +stack ghc [-- ARGUMENT(S) (e.g. stack ghc -- X.hs -o x)] + [--[no-]ghc-package-path] [--[no-]stack-exe] [--package PACKAGE] + [--rts-options RTSFLAG] [--cwd DIR] +~~~ + +`stack ghc` has the same effect as, and is provided as a shorthand for, +[`stack exec ghc`](exec_command.md), with the exception of the `--package` +option. + +Pass the option `--package ` to add the initial GHC argument +`-package-id=`, where `` is the unit ID of the specified +package in the installed package database. The option can be a list of package +names or package identifiers separated by spaces. The option can also be +specified multiple times. The approach taken to these packages is the same as if +they were specified as targets to +[`stack build`](build_command.md#target-syntax). diff --git a/doc/commands/ghci_command.md b/doc/commands/ghci_command.md new file mode 100644 index 0000000000..38dfb00c0c --- /dev/null +++ b/doc/commands/ghci_command.md @@ -0,0 +1,165 @@ +
+ +# The `stack ghci` and `stack repl` commands + +~~~text +stack ghci [TARGET/FILE] [--pedantic] [--ghci-options OPTIONS] + [--ghc-options OPTIONS] [--flag PACKAGE:[-]FLAG] [--with-ghc GHC] + [--[no-]load] [--package PACKAGE] [--main-is TARGET] + [--load-local-deps] [--[no-]package-hiding] [--only-main] [--trace] + [--profile] [--no-strip] [--[no-]test] [--[no-]bench] +~~~ + +A read–evaluate–print loop (REPL) environment takes single user inputs, executes +them, and returns the result to the user. GHCi is GHC's interactive environment. +The `stack ghci` or `stack repl` commands, which are equivalent, allow you to +load components and files of your project into GHCi. + +The command accepts the same TARGET syntax as +[`stack build`](build_command.md#target-syntax). By default: + +* Stack loads up GHCi with all the library and executable components of all the + packages in the project. Pass the flag `--test` to include test suite + components (unlike `stack build`, test suites will not be run). Pass the flag + `--bench` to include benchmark components (unlike `stack build`, benchmarks + will not be run). + +It is also possible to specify a module source code file. For example: + +~~~text +stack ghci src/MyFile.hs +~~~ + +Stack will identify which component the file is associated with, and use the +options from that component. + +Pass the `--package` option to load GHCi with an additional package that is not +a direct dependency of your components. This option can be specified multiple +times. + +Pass the option `--flag :` or +`--flag ` to set or unset a Cabal flag. This option +can be specified multiple times. The same Cabal flag name can be set (or unset) +for multiple packages with: + +~~~text +--flag *:[-] +~~~ + +!!! note + + In order to set a Cabal flag for a GHC boot package, the package must either + be an extra-dep or the package version must be specified with the + `--package` option. + +By default: + +* Stack uses the GHC specified in Stack's configuration. Pass the `--with-ghc` + option with a file path to the executable to specify a different GHC + executable; + +* Stack performs an inital build step. Pass the `--no-build` flag to skip the + step. Pass the `--ghc-options` option to pass flags or options to GHC. Pass + the `--profile`, `--no-strip`, `--trace` flags for the same behaviour as in + the case of the `stack build` command. + + !!! info + + Not performing the initial build step speeds up the startup of GHCi. It + only works if the dependencies of the loaded packages have already been + built. + +* Stack runs GHCi via `ghc --interactive`. Pass the `--ghc-options` option to + pass flags or options to GHC (during the initial build step) and to GHCi. + Pass the `--pedantic` flag to pass the GHC options `-Wall` and `-Werror` to + GHCi (only). Pass the `--ghci-options` option to pass flags or options to + GHCi (only). + +* Stack configures GHCi to hide unnecessary packages, unless no packages are + targeted and no additional packages are specified. Pass the + `--package-hiding` flag to hide unnecessary packages or + `--no-package-hiding` flag not to hide unnecessary packages. + +* Stack loads and imports all of the modules for each target. Pass the + `--no-load` flag to skip the loading of modules. Pass the `--only-main` flag + to skip the loading of modules other than the main module. Pass the + `--load-local-deps` flag to include all local dependencies of targets. + + !!! info + + Not loading modules speeds up the startup of GHCi. Once in GHCi, you can + use `:load myModule` to load a specific module in your project. + + !!! info + + The `--only-main` flag can be useful if: + + 1. You are loading the project in order to run it in GHCi (e.g. via + `main`), and you intend to reload while developing. Without flag, + you will need to quit and restart GHCi whenever a module gets + deleted. With the flag, reloading should work fine in this case. + + 2. If many of your modules have exports named the same thing, then you + will need to refer to them using qualified names. To avoid this, + use the `--only-main` flag to start with a blank slate and just + import the modules you are interested in. + +* If there are multiple definitions for the `Main` module, Stack will ask you + to select one from a list of options. Pass the `--main-is ` option + to specify which `Main` module to load. + +Stack combines all of the GHC options of components. + +!!! note + + Combining GHC options should work out when packages share similar + conventions. However, conflicts may arise, such as when one component + defines default extensions which are not assumed by another. For example, + specifying `NoImplicitPrelude` in one component but not another is likely to + cause failures. GHCi will be run with `-XNoImplicitPrelude`, but it is + likely that modules in the other component assume that the `Prelude` is + implicitly imported. + +`stack ghci` configures GHCi by using a GHCi script file. Such files are located +in subdirectories of `/stack/ghci-script`, where +`` refers to the +[XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) +for user-specific non-essential (cached) data. + +=== "Unix-like" + + The default for `` is `$HOME/.cache`. + +=== "Windows" + + On Windows, the default for `` is `$Env:LOCALAPPDATA`. + +=== "Windows (Command Prompt)" + + On Windows, the default for `` is `%LOCALAPPDATA%`. + +## Running plain GHCi + +`stack ghci` always runs GHCi configured to load code from packages in your +project. In particular, this means it passes in flags like `-hide-all-packages` +and `-package-id=` in order to configure which packages are visible to GHCi. + +For doing experiments which just involve packages installed in your databases, +it may be useful to run GHCi plainly like: + +~~~text +stack exec ghci +~~~ + +This will run a plain GHCi in an environment which includes `GHC_PACKAGE_PATH`, +and so will have access to your databases. + +!!! note + + Running `stack ghci` on a pristine copy of the code does not currently build + libraries + (issue [#2790](https://github.com/commercialhaskell/stack/issues/2790)) or + internal libraries + (issue [#4148](https://github.com/commercialhaskell/stack/issues/4148)). It + is recommended to always use `stack build` before using `stack ghci`, until + these two issues are closed. diff --git a/doc/commands/haddock_command.md b/doc/commands/haddock_command.md new file mode 100644 index 0000000000..519fa5a933 --- /dev/null +++ b/doc/commands/haddock_command.md @@ -0,0 +1,31 @@ +
+ +# The `stack haddock` command + +~~~text +stack haddock [TARGET] [--dry-run] [--pedantic] [--fast] [--ghc-options OPTIONS] + [--flag PACKAGE:[-]FLAG] [--dependencies-only | --only-snapshot | + --only-dependencies | --only-locals] [--file-watch | + --file-watch-poll] [--watch-all] [--exec COMMAND [ARGUMENT(S)]] + [--only-configure] [--trace] [--profile] [--no-strip] + [--[no-]library-profiling] [--[no-]executable-profiling] + [--[no-]library-stripping] [--[no-]executable-stripping] + [--[no-]haddock] [--haddock-arguments HADDOCK_ARGS] + [--[no-]open] [--[no-]haddock-deps] [--[no-]haddock-internal] + [--[no-]haddock-hyperlink-source] [--[no-]haddock-for-hackage] + [--[no-]copy-bins] [--[no-]copy-compiler-tool] [--[no-]prefetch] + [--[no-]keep-going] [--[no-]keep-tmp-files] [--[no-]force-dirty] + [--[no-]test] [--[no-]rerun-tests] + [--ta|--test-arguments TEST_ARGS] [--coverage] [--[no-]run-tests] + [--test-suite-timeout SECONDS] + [--test-suite-timeout-grace SECONDS] [--[no-]tests-allow-stdin] + [--[no-]bench] [--ba|--benchmark-arguments BENCH_ARGS] + [--[no-]run-benchmarks] [--[no-]reconfigure] + [--cabal-verbosity VERBOSITY | --[no-]cabal-verbose] + [--[no-]split-objs] [--skip ARG] [--[no-]interleaved-output] + [--ddump-dir ARG] +~~~ + +`stack haddock` is a synonym for `stack build --haddock`. For further +information, see the documentation for the +[`stack build`](build_command.md#-no-haddock-flag) command. diff --git a/doc/commands/hoogle_command.md b/doc/commands/hoogle_command.md new file mode 100644 index 0000000000..ff67e52232 --- /dev/null +++ b/doc/commands/hoogle_command.md @@ -0,0 +1,33 @@ +
+ +# The `stack hoogle` command + +~~~text +stack hoogle [-- ARGUMENT(S) (e.g. 'stack hoogle -- server --local')] + [--[no-]setup] [--rebuild] [--server] +~~~ + +Hoogle is a Haskell API search engine. `stack hoogle` runs Hoogle. Stack needs +Hoogle version 5 or greater. + +Stack will use a Hoogle database (`database.hoo`) specific to the project's +source map and the version of GHC, located in a subdirectory of subdirectory +`hoogle` of Stack's work directory for the project. + +By default: + +* if a `hoogle` executable is found on the `PATH`, Stack will try to use it. + Otherwise, Stack will try to identify an executable as a build target. If + the Hoogle database does not exist, Stack will generate it with + `hoogle generate --local`. `hoogle generate --local` queries `ghc-pkg` and + generates links for all packages which have documentation and Hoogle input + files (`*.txt`) generated. Pass the flag `--no-setup` to skip such setup; + +* the existing Hoogle database is used. Pass the flag `--rebuild` to trigger + the generation of a new Hoogle database (generated as above); and + +* `hoogle` is passed the specified arguments (if any). The arguments are + usually the subject of the search. Pass the flag `--server` to first pass + `server --local --port 8080` before those arguments. + `hoogle server --local --port 8080` starts a local Hoogle web server, using + port 8080, that allows the following of `file://` links. diff --git a/doc/commands/hpc_command.md b/doc/commands/hpc_command.md new file mode 100644 index 0000000000..a4948c083e --- /dev/null +++ b/doc/commands/hpc_command.md @@ -0,0 +1,167 @@ +
+ +# The `stack hpc` commands + +~~~text +stack hpc COMMAND + +Available commands: + report Generate unified HPC coverage report from tix files + and project targets +~~~ + +Code coverage is a measure of the degree to which the source code of a program +is executed when a test suite is run. +[Haskell Program Coverage (HPC)](https://ku-fpg.github.io/software/hpc/) is a +code coverage tool for Haskell that is provided with GHC. Code coverage is +enabled by passing the flag `--coverage` to `stack build`. + +`stack hpc` provides commands specific to HPC. Command `stack hpc` for the +available commands. + +The following refers to the local HPC root directory. Its location can be +obtained by command: + +~~~text +stack path --local-hpc-root +~~~ + +## The `stack hpc report` command + +~~~text +stack hpc report [TARGET_OR_TIX] [--all] [--destdir DIR] [--open] +~~~ + +The `stack hpc report` command generates a report for a selection of targets and +`.tix` files. + +Pass the flag `--all` for a report that uses all stored results. + +Pass the flag `--open` to open the HTML report in your browser. + +## The `extra-tix-files` directory + +During the execution of the build, you can place additional tix files in the +`extra-tix-files` subdirectory in the local HPC root directory, in order for +them to be included in the unified report. A couple caveats: + +1. These tix files must be generated by executables that are built against the + exact same library versions. Also note that, on subsequent builds with + coverage, the local HPC root directory will be recursively deleted. It + just stores the most recent coverage data. + +2. These tix files will not be considered by `stack hpc report` unless listed + explicitly by file name. + +## Examples + +If we have three different packages with test suites, packages `A`, `B`, and +`C`, the default unified report will have coverage from all three. If we want a +unified report with just two, we can instead command: + +~~~text +stack hpc report A B +~~~ + +This will output to the standard output stream a summary report for the combined +coverage from `A` and `B`'s test suites. It will also log the path to the HTML +for the corresponding full report. + +This command also supports taking extra `.tix` files. If you have also built an +executable, against exactly the same library versions of `A`, `B`, and `C`, then +you could command the following: + +~~~text +stack exec -- an-exe +stack hpc report A B C an-exe.tix +~~~ + +or, equivalently: + +~~~text +stack exec -- an-exe +stack hpc report --all an-exe.tix +~~~ + +This report will consider all test results as well as the newly generated +`an-exe.tix` file. + +## Usage + +`stack test --coverage` is quite streamlined for the following use-case: + +1. You have test suites which exercise your project packages. + +2. These test suites link against your library, rather than building the + library directly. Coverage information is only given for libraries, ignoring + the modules which get compiled directly into your executable. A common case + where this does not happen is when your test suite and library both have + something like `hs-source-dirs: src/`. In this case, when building your test + suite you may also be compiling your library, instead of just linking + against it. + +When your project has these properties, you will get the following: + +1. Summary coverage reports, sent to the standard output stream in the build + output, and a log of the paths to the HTML for the corresponding full + reports. + +2. A summary unified report, sent to the standard output stream, and a log of + the path to the HTML for the corresponding full report. These reports + consider the coverage on all local libraries, based on all of the tests that + were run. + +3. An index of all generated HTML reports, in `index.html` in the local + HPC root directory, and a log of the path to the HTML for that index. + +## Implementation details + +Most users can get away with just understanding the above documentation. +However, advanced users may want to understand exactly how `--coverage` works: + +1. The GHC option `-fhpc` gets passed to all project packages. This tells GHC to + output executables that track coverage information and output them to `.tix` + files. `the-exe-name.tix` files will get written to the working directory of + the executable. + + When switching on this flag, it will usually cause all project packages to be + rebuilt (see issue + [#1940](https://github.com/commercialhaskell/stack/issues/1940)). + +2. Before the build runs with `--coverage`, the contents of the local HPC root + directory gets deleted. This prevents old reports from getting mixed + with new reports. If you want to preserve report information from multiple + runs, copy the contents of this path to a new directory. + +3. Before a test run, if a `test-name.tix` file exists in the package directory, + it will be deleted. + +4. After a test run, it will expect a `test-name.tix` file to exist. This file + will then get loaded, modified, and outputted to + `pkg-name/test-name/test-name.tix` in the local HPC root directory. + + The `.tix` file gets modified to remove coverage file that is not associated + with a library. So, this means that you will not get coverage information for + the modules compiled in the `executable` or `test-suite` stanza of your Cabal + file. This makes it possible to directly union multiple `*.tix` files from + different executables (assuming they are using the exact same versions of the + project packages). + + If there is enough popular demand, it may be possible in the future to give + coverage information for modules that are compiled directly into the + executable. See issue + [#1359](https://github.com/commercialhaskell/stack/issues/1359). + +5. Once we have a `.tix` file for a test, we also generate a summary report and + a corresponding full report using HTML. The summary report is sent to the + standard output stream. The index of the test-specific HTML report is + available at `pkg-name/test-name/index.html` in the local HPC root directory. + +6. After the build completes, if there are multiple output `*.tix` files, they + get combined into a unified report. The index of this report will be + available at `combined/all/index.html` in the local HPC root directory. + +7. Finally, an index of the resulting coverage reports is generated. It links to + the individual coverage reports (one for each test-suite), as well as the + unified report. This index is available at `index.html` in the local HPC root + directory. diff --git a/doc/commands/ide_command.md b/doc/commands/ide_command.md new file mode 100644 index 0000000000..3554b19704 --- /dev/null +++ b/doc/commands/ide_command.md @@ -0,0 +1,72 @@ +
+ +# The `stack ide` commands + +~~~text +stack ide COMMAND + +Available commands: + packages List all available local loadable packages + targets List all available Stack targets +~~~ + +The `stack ide` commands provide information that may be of use in an +integrated development environment (IDE). See `stack ide` for the available +commands. + +## The `stack ide packages` command + +~~~text +stack ide packages [--stdout] [--cabal-files] +~~~ + +`stack ide packages` lists all available project packages that are loadable. + +By default: + +* its output is sent to the standard error stream. Pass the flag `--stdout` to + change to the standard output stream; and +* the output is the package name (without its version). Pass the flag + `--cabal-files` to change to the full path to the package's Cabal file. + +## The `stack ide targets` command + +~~~text +stack ide targets [--exes] [--tests] [--benchmarks] [--stdout] +~~~ + +`stack ide targets` lists all available Stack targets. Alternatively, pass one +or more of the flags `--exes`, `--tests` and `--benchmarks` to list only targets +of those component types. + +By default, its output is sent to the standard error stream. Pass the flag +`--stdout` to change to the standard output stream. + +For example, for the Stack project itself, command: + +~~~text +cd stack +stack ide targets +~~~ + +and the output from the second command is: + +~~~text +stack:lib +stack:exe:stack +stack:exe:stack-integration-test +stack:test:stack-unit-test +~~~ + +or command: + +~~~text +stack ide targets --exes +~~~ + +and the output is: + +~~~text +stack:exe:stack +stack:exe:stack-integration-test +~~~ diff --git a/doc/commands/index.md b/doc/commands/index.md new file mode 100644 index 0000000000..817dcf9b15 --- /dev/null +++ b/doc/commands/index.md @@ -0,0 +1,166 @@ +--- +title: Commands +--- +
+ +# Commands (advanced) + +Some of Stack's features will not be needed regularly or by all users. This part +of the guide and the part on [configuration](../configure/index.md) provide +information about some of those features, organised as a reference guide. Some +of the features are complex and separate pages are dedicated to them. + +## stack command + +Stack is usually used with one of its commands (see further below). However: + +* `stack`, `stack --help` (or `-h`) lists Stack's commands, and flags and + options common to those commands; + +* `stack --help` (or `-h`) — provides help on the particular Stack + command, including flags and options specific to the command; and + +* `stack --version` identifies the version and Git hash of the Stack executable. + +!!! info "Runtime system (RTS) options" + + The Stack executable is built by GHC with the `-rtsopts=some` option. This + means that the RTS extracts command-line arguments bracketed between `+RTS` + and `-RTS` as its own when Stack is run. Only the following RTS + options are available: + + * `-?` (e.g. `stack +RTS -?`) causes the RTS to output information about + RTS options; and + + * `--info` (e.g. `stack +RTS --info`) causes the RTS to output information + about the RTS. + + To avoid the RTS extracting such command-line arguments as its own when + using Stack commands such as [`exec`](exec_command.md), + [`test`](test_command.md) or [`bench`](bench_command.md), see the + documentation for those commands. + +## Stack commands (thematic) + +### Setting up + +* [`setup`](setup_command.md) - get GHC for a Stack project (usually not needed) +* [`update`](update_command.md) - update the package index (usually not needed) +* [`new`](new_command.md) - create a new project with Stack +* [`init`](init_command.md) - initialise Stack's project-level YAML + configuration file for an existing project + +### Building + +* [`build`](build_command.md) - build packages +* [`test`](build_command.md) - a synonym for `stack build --test` +* [`bench`](build_command.md) - a synonym for `stack build --bench` +* [`haddock`](build_command.md) - a synonym for `stack build --haddock` +* [`install`](build_command.md) - a synonym for `stack build --copy-bins` +* [`run`](run_command.md) - build and run an executable + +### Docker-related + +* [`docker`](docker_command.md) - use Stack with Docker + +### Executing in the Stack environment + +* [`exec`](exec_command.md) - executate a command in the Stack environment +* [`ghc`](ghc_command.md) - run `ghc` +* [`eval`](eval_command.md) - evaluate some Haskell code inline +* [`runghc`](runghc_command.md) - run `runghc` +* [`runhaskell`](runghc_command.md) - a synonym for `stack runghc` + +### Using GHC interactively + +* [`ghci`](ghci_command.md) - run GHCi, a REPL environment +* [`repl`](ghci_command.md) - a synonym for `stack ghci` + +### Down/up loading local packages from/to Hackage + +* [`unpack`](unpack_command.md) - unpack one or more packages locally +* [`sdist`](sdist_command.md) - create an archive file for a package, in a form + accepted by Hackage +* [`upload`](upload_command.md) - upload a package to Hackage + +### Cleaning-up + +* [`clean`](clean_command.md) - delete build artefacts for the project packages +* [`purge`](purge_command.md) - delete the Stack working directories + +### Amending Stack's configuration files + +* [`config set`](config_command.md) - modify Stack's configuration + +### Using Haskell code as a script +* [`script`](script_command.md) - run a Haskell source file as a script + +### Getting information + +* [`path`](path_command.md) - information about locations used by Stack +* [`ls`](ls_command.md) - list information about Stack +* [`list`](list_command.md) - list packages on Hackage or in a snapshot +* [`ide`](ide_command.md) - information for an integrated development + environment (IDE) +* [`query`](query_command.md) - information about the build +* [`config env`](config_command.md) - modify Stack's configuration +* [`templates`](templates_command.md) - information about templates for use with + `stack new` +* [`uninstall`](uninstall_command.md) - information about how to uninstall Stack + +### Using tools in Stack's environment + +* [`hoogle`](hoogle_command.md) - run `hoogle` +* [`dot`](dot_command.md) - dependency visualization +* [`hpc`](hpc_command.md) - generate Haskell Program Coverage (HPC) code + coverage reports + +### Managing Stack versions + +* [`upgrade`](upgrade_command.md) - upgrade Stack + +## Stack commands (alphabetical) + +Stack's commands are listed below, in alphabetical order. + +* [`bench`](build_command.md) - a synonym for `stack build --bench` +* [`build`](build_command.md) - build packages +* [`clean`](clean_command.md) - delete build artefacts for the project packages +* [`config`](config_command.md) - access and modify Stack's configuration +* [`docker`](docker_command.md) - use Stack with Docker +* [`dot`](dot_command.md) - dependency visualization +* [`eval`](eval_command.md) - evaluate some Haskell code inline +* [`exec`](exec_command.md) - executate a command in the Stack environment +* [`haddock`](build_command.md) - a synonym for `stack build --haddock` +* [`hoogle`](hoogle_command.md) - run `hoogle` +* [`hpc`](hpc_command.md) - generate Haskell Program Coverage (HPC) code + coverage reports +* [`ghc`](ghc_command.md) - run `ghc` +* [`ghci`](ghci_command.md) - run GHCi, a REPL environment +* [`ide`](ide_command.md) - information for an integrated development + environment (IDE) +* [`init`](init_command.md) - initialise Stack's project-level YAML + configuration file for an existing project +* [`install`](build_command.md) - a synonym for `stack build --copy-bins` +* [`list`](list_command.md) - list packages on Hackage or in a snapshot +* [`ls`](ls_command.md) - list information about Stack +* [`new`](new_command.md) - create a new project with Stack +* [`path`](path_command.md) - information about locations used by Stack +* [`purge`](purge_command.md) - delete the Stack working directories +* [`query`](query_command.md) - information about the build +* [`repl`](ghci_command.md) - a synonym for `stack ghci` +* [`run`](run_command.md) - build and run an executable +* [`runghc`](runghc_command.md) - run `runghc` +* [`runhaskell`](runghc_command.md) - a synonym for `stack runghc` +* [`script`](script_command.md) - run a Haskell source file as a script +* [`sdist`](sdist_command.md) - create an archive file for a package, in a form + accepted by Hackage +* [`setup`](setup_command.md) - get GHC for a Stack project +* [`templates`](templates_command.md) - information about templates for use with + `stack new` +* [`test`](build_command.md) - a synonym for `stack build --test` +* [`uninstall`](uninstall_command.md) - information about how to uninstall Stack +* [`unpack`](unpack_command.md) - unpack one or more packages locally +* [`update`](update_command.md) - update the package index +* [`upgrade`](upgrade_command.md) - upgrade Stack +* [`upload`](upload_command.md) - upload a package to Hackage diff --git a/doc/commands/init_command.md b/doc/commands/init_command.md new file mode 100644 index 0000000000..cc5384be06 --- /dev/null +++ b/doc/commands/init_command.md @@ -0,0 +1,41 @@ +
+ +# The `stack init` command + +~~~text +stack init [DIR(S)] [--omit-packages] [--force] [--ignore-subdirs] +~~~ + +`stack init` initialises Stack's default project-level configuration file +(`stack.yaml`) for an existing project, based on the Cabal file or +`package.yaml` file for each of its packages. + +By default: + +* Stack searches for Cabal and `package.yaml` files in the current directory. + Specify one or more directories as arguments to cause Stack to search them; + +* Stack also searches for Cabal and `package.yaml` files in subdirectories. Pass + the flag `--ignore-subdirs` to ignore subdirectories; + +* Stack will not overwrite an existing `stack.yaml` file. Pass the flag + `--force` to allow overwriting; and + +* Stack will not initialise if there are conflicting or incompatable user + packages. Pass the flag `--omit-packages` to cause Stack to ignore such + matters while initialising. + +If a snapshot is specified at the command line, `stack init` will try to use it. +For further information, see the documentation for the +[`--snapshot`](../configure/global_flags.md#-snapshot-option) option. + +Otherwise, `stack init` will try to use the following Stackage snapshots in +order of preference, using the first that is compatable: the most recent LTS +Haskell, the most recent Stackage Nightly, and other LTS Haskell (most recent +first). + +!!! note + + If Cabal (the tool) has been used in the directory, consider commanding + `cabal clean` before applying `stack init`, in case Cabal has created any + unintended Cabal files. diff --git a/doc/commands/install_command.md b/doc/commands/install_command.md new file mode 100644 index 0000000000..6e73f574ae --- /dev/null +++ b/doc/commands/install_command.md @@ -0,0 +1,31 @@ +
+ +# The `stack install` command + +~~~text +stack install [TARGET] [--dry-run] [--pedantic] [--fast] [--ghc-options OPTIONS] + [--flag PACKAGE:[-]FLAG] [--dependencies-only | --only-snapshot | + --only-dependencies | --only-locals] [--file-watch | + --file-watch-poll] [--watch-all] [--exec COMMAND [ARGUMENT(S)]] + [--only-configure] [--trace] [--profile] [--no-strip] + [--[no-]library-profiling] [--[no-]executable-profiling] + [--[no-]library-stripping] [--[no-]executable-stripping] + [--[no-]haddock] [--haddock-arguments HADDOCK_ARGS] + [--[no-]open] [--[no-]haddock-deps] [--[no-]haddock-internal] + [--[no-]haddock-hyperlink-source] [--[no-]haddock-for-hackage] + [--[no-]copy-bins] [--[no-]copy-compiler-tool] [--[no-]prefetch] + [--[no-]keep-going] [--[no-]keep-tmp-files] [--[no-]force-dirty] + [--[no-]test] [--[no-]rerun-tests] + [--ta|--test-arguments TEST_ARGS] [--coverage] [--[no-]run-tests] + [--test-suite-timeout SECONDS] + [--test-suite-timeout-grace SECONDS] [--[no-]tests-allow-stdin] + [--[no-]bench] [--ba|--benchmark-arguments BENCH_ARGS] + [--[no-]run-benchmarks] [--[no-]reconfigure] + [--cabal-verbosity VERBOSITY | --[no-]cabal-verbose] + [--[no-]split-objs] [--skip ARG] [--[no-]interleaved-output] + [--ddump-dir ARG] +~~~ + +`stack install` is a synonym for `stack build --copybins`. For further +information, see the documentation for the [`stack build`](build_command.md) +command. diff --git a/doc/commands/list_command.md b/doc/commands/list_command.md new file mode 100644 index 0000000000..dad5cabee5 --- /dev/null +++ b/doc/commands/list_command.md @@ -0,0 +1,64 @@ +
+ +# The `stack list` command + +[:octicons-tag-24: 2.7.1](https://github.com/commercialhaskell/stack/releases/tag/v2.7.1) + +~~~text +stack list [PACKAGE] +~~~ + +`stack list ` will send to the standard output stream the latest +version of the package from Hackage. If the package name cannot be found on +Hackage, even after updating the package index, suggestions (not necessarily +good ones) will be made about the intended package name. + +`stack --snapshot list ` will send to the standard +output stream the version of the package included in the specified snapshot +(either directly or indirectly, if a boot package of the compiler specified by +the snapshot). If the package name cannot be found in the snapshot, the command +will fail, identifying only the package(s) that did not appear in the snapshot. + +More than one package name can be specified. + +`stack --snapshot list` will send to the standard output stream a +list of all the packages included directly in the specified snapshot (that is, +excluding those included only indirectly as a boot package of the compiler +specified by the snapshot). + +For example: + +~~~text +stack list base unix Win32 acme-missiles pantry +base-4.21.0.0 +unix-2.8.6.0 +Win32-2.14.1.0 +acme-missiles-0.3 +pantry-0.10.1 + +stack list paltry +Could not find package paltry, updating +... +Package index cache populated +Error: [S-4926] + * Could not find package paltry on Hackage. Perhaps you meant one of: + tasty, retry, path, pretty, pasty, xattr, alloy, para, pappy and + alure. + +stack --snapshot lts-24.37 list base unix Win32 acme-missiles pantry +Error: [S-4926] + * Package does not appear in snapshot (directly or indirectly): acme-missiles. + +stack --snapshot lts-24.37 list base unix Win32 pantry +base-4.20.2.0 +unix-2.8.7.0 +Win32-2.14.1.0 +pantry-0.10.1 + +stack --snapshot lts-24.37 list +AC-Angle-1.0 +ALUT-2.4.0.3 +... +zot-0.0.3 +zstd-0.1.3.0 +~~~ diff --git a/doc/commands/ls_command.md b/doc/commands/ls_command.md new file mode 100644 index 0000000000..9485b849d1 --- /dev/null +++ b/doc/commands/ls_command.md @@ -0,0 +1,280 @@ +
+ +# The `stack ls` commands + +~~~text +stack ls COMMAND + +Available commands: + dependencies View the dependencies + globals View global packages + snapshots View snapshots (local by default) + stack-colors View Stack's output styles + stack-colours View Stack's output styles (alias for 'stack-colors') + tools View Stack's installed tools +~~~ + +The `stack ls` commands list different types of information. Command `stack ls` +for the available commands. + +## The `stack ls dependencies` command + +Either + +~~~text +stack ls dependencies COMMAND + +Available commands: + cabal Print dependencies as exact Cabal constraints + json Print dependencies as JSON + text Print dependencies as text (default) + tree Print dependencies as tree +~~~ + +or + +~~~text +stack ls dependencies [--[no-]license] [--separator SEP] [--filter ITEM] + [--[no-]external] [--[no-]include-base] [--depth DEPTH] + [--prune PACKAGES] [TARGET] [--flag PACKAGE:[-]FLAG] + [--test] [--bench] [--global-hints] +~~~ + +`stack ls dependencies` lists package versions used for a project. + +By default: + +* with the `text` or `tree` subcommand (see below), the package name is + followed by its version. Pass the `--license` flag to follow the package + name with its licence. (Consistent with the Cabal package description format + specification, only the American English spelling (license) is accepted.) +* With the `text` or `tree` subcommand, the separator between the package name + and what follows is a space character. Pass the `--separator` option to + specify a different separator; +* with the `text` command, all relevant package names are included. Pass the + `--filter` option to specify an item to be filtered out from the results, if + present. An item can be `$locals` (for all project packages) or a package + name. It can be specified multiple times; + + !!! note + + The special value `$locals` will need to be enclosed with single quotes + to distinguish it from a shell variable. + +* external dependencies are excluded from the output. Pass the flag + `--external` to include external dependencies; +* the `base` package and its dependencies are included in the output. Pass the + flag `--no-include-base` to exclude `base` and its dependencies; +* there is no limit to the depth of the resolution of dependencies. Pass the + `--depth ` option to limit the depth; +* all relevant packages are included in the output. Pass the + `--prune ` option to exclude the specified packages (including + project packages). Pass the `--reach ` option to exclude packages + (including project packages) that cannot reach any of the specified packages + in the dependency graph. In both cases, `` is a list of package + names separated by commas; +* for all relevant project packages, relevant dependencies are included in the + output. However, each project package for which dependencies are included + can be specified as a target argument. The argument uses the same format as + the [`stack build` command](build_command.md) but components of project + packages are ignored. Non-project packages are also ignored; + + !!! note + + If the first target is one of `cabal`, `json`, `text` and `tree`, then a + subcommand must be specified. + +* test components of project packages are excluded from the output. Pass the + flag `--test` to include test components; +* benchmark components of project packages are excluded from the output. Pass + the flag `--bench` to include benchmark components; and +* global packages for the specified version of GHC are those specified by the + global package database of an installed GHC. Pass the flag `--global-hints` + to use a hint file for global packages. If a hint file is used, GHC does not + need to be installed. + +Subcommands specify the format of the output, as follows: + +* `cabal` lists the packages in the format of exact Cabal constraints. + + ~~~text + stack ls dependencies cabal [--[no-]external] [--[no-]include-base] + [--depth DEPTH] [--prune PACKAGES] [TARGET] + [--flag PACKAGE:[-]FLAG] [--test] [--bench] + [--global-hints] + ~~~ + + For example (extract): + + ~~~text + constraints: + , Cabal ==3.6.3.0 + , Cabal-syntax ==3.6.0.0 + , Glob ==0.10.2 + ~~~ + +* `json` lists dependencies in JSON format (an array of objects). + + ~~~text + stack ls dependencies json [--[no-]external] [--[no-]include-base] + [--depth DEPTH] [--prune PACKAGES] [TARGET] + [--flag PACKAGE:[-]FLAG] [--test] [--bench] + [--global-hints] + ~~~ + + For example (extract): + + ~~~text + [{"dependencies":["base","bytestring"],"license":"BSD3","location":{"type":"hackage","url":"https://hackage.haskell.org/package/zlib-0.6.3.0"},"name":"zlib","version":"0.6.3.0"}, + ~~~ + + Each object has the following keys: + + ~~~json + name: zlib + version: 0.6.3.0 + location: + type: hackage + url: https://hackage.haskell.org/package/zlib-0.6.3.0 + licence: BSD3 + dependencies: + - base + - bytestring + ~~~ + +* `text` (the default) lists the packages, each on a separate line. + + ~~~text + stack ls dependencies text [--[no-]license] [--separator SEP] + [--filter ITEM] [--[no-]external] + [--[no-]include-base] [--depth DEPTH] + [--prune PACKAGES] [TARGET] + [--flag PACKAGE:[-]FLAG] [--test] [--bench] + [--global-hints] + ~~~ + + For example (extract): + + ~~~text + Cabal 3.6.3.0 + Cabal-syntax 3.6.0.0 + Glob 0.10.2 + ~~~ + +* `tree` lists dependencies in the format of a tree. + + ~~~text + stack ls dependencies tree [--[no-]license] [--separator SEP] + [--[no-]external] [--[no-]include-base] + [--depth DEPTH] [--prune PACKAGES] [TARGET] + [--flag PACKAGE:[-]FLAG] [--test] [--bench] + [--global-hints] + ~~~ + + For example (extract): + + ~~~text + Packages + └─┬ stack 2.10.0 + ├─┬ Cabal 3.6.3.0 + │ ├─┬ Win32 2.12.0.1 + │ │ ├─┬ base 4.16.3.0 + │ │ │ ├─┬ ghc-bignum 1.2 + │ │ │ │ └─┬ ghc-prim 0.8.0 + │ │ │ │ └── rts 1.0.2 + │ │ │ ├─┬ ghc-prim 0.8.0 + ~~~ + +## The `stack ls globals` command + +~~~text +stack ls globals [--[no-]global-hints] +~~~ + +`stack ls globals` will list all the global packages in alphabetical order. + +By default: + +* the global packages are those for the version of GHC specified by the snapshot + according to a hints file. Pass the flag `--no-global-hints` to use the global + package database of an installed GHC. + +!!! note + + For example, on Windows, `stack ls globals` will include a version of the + `Win32` and `unix` packages but `stack ls globals --no-global-hints` will + exclude the `unix` package - and vice versa on Unix-like operating systems. + +## The `stack ls snapshots` command + +~~~text +stack ls snapshots [COMMAND] [-l|--lts] [-n|--nightly] + +Available commands: + local View local snapshots + remote View remote snapshots +~~~ + +`stack ls snapshots` will list all the local snapshots by default. You can also +view the remote snapshots using `stack ls snapshots remote`. It also supports +options for viewing only lts (`-l`) and nightly (`-n`) snapshots. + +## The `stack ls stack-colors` command + +~~~text +stack ls stack-colors [--[no-]basic] [--[no-]sgr] [--[no-]example] +~~~ + +The British English spelling is also accepted (`stack ls stack-colours`). + +`stack ls stack-colors` will list all of Stack's output styles. A number of +different formats for the output are available, see +`stack ls stack-colors --help`. + +The default is a full report, with the equivalent SGR instructions and an +example of the applied style. The latter can be disabled with flags `--no-sgr` +and `--no-example`. + +The flag `--basic` specifies a more basic report, in the format that is accepted +by Stack's command line option `--stack-colors` and the +[`stack-colors`](../configure/yaml/non-project.md#stack-colors) non-project +specific configuration option. + +## The `stack ls tools` command + +~~~text +stack ls tools [--filter TOOL_NAME] +~~~ + +`stack ls tools` will list Stack's installed tools. On Unix-like operating +systems, they will be one or more versions of GHC. On Windows, they will include +MSYS2. For example, on Windows the command: + +~~~text +stack ls tools +~~~ + +yields output like: + +~~~text +ghc-9.4.1 +ghc-9.2.4 +ghc-9.0.2 +msys2-20210604 +~~~ + +The `--filter ` option will filter the output by a tool name (e.g. +'ghc', 'ghc-git' or 'msys2'). The tool name is case sensitive. For example the +command: + +~~~text +stack ls tools --filter ghc +~~~ + +yields output like: + +~~~text +ghc-9.4.1 +ghc-9.2.4 +ghc-9.0.2 +~~~ diff --git a/doc/commands/new_command.md b/doc/commands/new_command.md new file mode 100644 index 0000000000..e9eb3ff117 --- /dev/null +++ b/doc/commands/new_command.md @@ -0,0 +1,147 @@ +
+ +# The `stack new` command + +~~~text +stack new PACKAGE_NAME [--bare] [--[no-]init] [TEMPLATE_NAME] + [-p|--param KEY:VALUE] [DIR(S)] [--omit-packages] [--force] + [--ignore-subdirs] +~~~ + +`stack new` creates a new project using a project template. + +By default: + +* the project is created in a new directory named after the package. Pass the + `--bare` flag to create the project in the current directory; + +* the project is initialised for use with Stack. Pass the `--no-init` flag to + skip such initialisation; and + +* the project template is the one specified by the +[default-template](../configure/yaml/non-project.md#default-template) option. + +A package name acceptable to Cabal comprises an alphanumeric 'word'; or two or +more such words, with the words separated by a hyphen/minus character (`-`). A +word cannot be comprised only of the characters `0` to `9`. + +An alphanumeric character is one in one of the Unicode Letter categories +(Lu (uppercase), Ll (lowercase), Lt (titlecase), Lm (modifier), or Lo (other)) +or Number categories (Nd (decimal), Nl (letter), or No (other)). + +!!! note + + `stack new` will decline to accept a package name that is a GHC wired-in + package for a version of GHC that is supported by Stack. + + Stack treats the following as the names of 'wired-in' packages: `base`, + `ghc-bignum`, `ghc-prim`, `ghc`, `ghc-internal`, `integer-gmp`, + `integer-simple`, `interactive`, `rts` and `template-haskell`. + +!!! note + + In the case of Hackage and acceptable package names, an alphanumeric + character is limited to one of `A` to `Z`, `a` to `z`, and `0` to `9`. + +!!! note + + The name of a project is not constrained to be an acceptable package name. A + single-package project can be renamed to differ from the name of its + package. + +The `--param :` option specifies a key-value pair to populate a key +in a template. The option can be specified multiple times. + +The arguments specifying directories and the `--ignore-subdirs`, `--force` and +`--omit-packages` flags are as for the [`stack init` command](init_command.md). +These arguments are ignored if the `--no-init` flag is passed. + +If a snapshot is specified at the command line and the project is initialised +for use with Stack, `stack new` will try to use it. For further information, see +the documentation for the +[`--snapshot`](../configure/global_flags.md#-snapshot-option) option. + +## Project templates + +A project template file can be located in a repository named `stack-templates` +on GitHub, GitLab, Bitbucket or Codeberg; at a URL; or on the local file system. + +Project template file names have the extension `.hsfiles`. The extension does +not need to be specified with `stack new`. + +A project template file `my-template.hsfiles` in a repository +`username/stack-templates` on GitHub, GitLab, Bitbucket or Codeberg can be +specified with `stack new` as: + +~~~test +:username/my-template +~~~ + +where `` is one of `github` for [GitHub](https://github.com/), +`gitlab` for [GitLab](https://gitlab.com), `bitbucket` for +[Bitbucket](https://bitbucket.com), or `codeberg:` for +[Codeberg](https://codeberg.org). + +The default service is GitHub, the default username is `commercialhaskell` and +the default project template name is `new-template`. + +## Examples + +Create a project for package `my-project` in new directory `my-project` with the +default project template file and initialise it for use with Stack: + +~~~text +stack new my-project +~~~ + +Create a project for package `my-package` in the current directory with the +default project template file and initialise it for use with Stack: + +~~~text +stack new my-package --bare +~~~ + +Create a project with the `rio` project template at the default repository and +initialise it for use with Stack: + +~~~text +stack new my-project rio +~~~ + +Create a project with the `mysql` project template provided by the +`yesodweb/stack-templates` repository on GitHub and initialise it for use with +Stack: + +~~~text +stack new my-project yesodweb/mysql +~~~ + +Create a project with the `my-template` project template provided by the +`username/stack-templates` repository on Bitbucket and initialise it for use +with Stack: + +~~~text +stack new my-project bitbucket:username/my-template +~~~ + +Create a project with the `my-template.hsfiles` project template file at +`https://example.com` and initialise it for use with Stack: + +~~~text +stack new my-project https://example.com/my-template +~~~ + +Create a project with the local project template file +`/my-template.hsfiles` and initialise it for use with Stack: + +~~~text +stack new my-project /my-template +~~~ + +Create a project with the `simple` project template file at the default +repository (which does not use Hpack and a `package.yaml` file) and do not +initialise it for use with Stack (`stack init` could be used subsequently): + +~~~text +stack new my-project --no-init simple +~~~ diff --git a/doc/commands/path_command.md b/doc/commands/path_command.md new file mode 100644 index 0000000000..bf66a3b509 --- /dev/null +++ b/doc/commands/path_command.md @@ -0,0 +1,49 @@ +
+ +# The `stack path` command + +~~~text +stack path [--stack-root] [--global-config] [--programs] [--local-bin] + [--project-root] [--config-location] [--bin-path] [--compiler-exe] + [--compiler-bin] [--compiler-tools-bin] [--extra-include-dirs] + [--extra-library-dirs] [--snapshot-pkg-db] [--local-pkg-db] + [--global-pkg-db] [--ghc-package-path] [--snapshot-install-root] + [--local-install-root] [--snapshot-doc-root] [--local-doc-root] + [--local-hoogle-root] [--dist-dir] [--local-hpc-root] +~~~ + +`stack path` provides information about files and locations used by Stack. + +Pass the following flags for information about specific files or locations: + +|Flag |File or location | +|-----------------------|------------------------------------------------------| +|--bin-path |The PATH in the Stack environment. | +|--compiler-bin |The directory containing the GHC executable. | +|--compiler-exe |The GHC executable. | +|--compiler-tools-bin |The directory containing binaries specific to a particular compiler.| +|--config-location |Stack's project-level configuration file (`stack.yaml`, by default).| +|--dist-dir |The dist work directory, relative to the package directory.| +|--extra-include-dirs |Extra include directories. | +|--extra-library-dirs |Extra library directories. | +|--ghc-package-path |The `GHC_PACKAGE_PATH` environment variable. | +|--global-config |Stack's user-specific global configuration file (`config.yaml`).| +|--global-pkg-db |The global package database. | +|--local-bin |The directory in which Stack installs executables. | +|--local-doc-root |The root directory for local project documentation. | +|--local-hoogle-root |The root directory for local project documentation. | +|--local-hpc-root |The root directory for .tix files and HPC reports. | +|--local-install-root |The root directory for local project installation. | +|--local-pkg-db |The local package database. | +|--programs |The root directory for GHC and other Stack-supplied tools.| +|--project-root |The project root directory.| +|--snapshot-doc-root |The root directory for snapshot documentation. | +|--snapshot-install-root|The root directory for snapshot installation. | +|--snapshot-pkg-db |The snapshot package database. | +|--stack-root |The Stack root. | + +The command also accepts flags and options of the +[`stack build`](build_command.md#flags-affecting-ghcs-behaviour) command that +affect the location of the local project installation directory, such as +`--profile` and `--no-strip`. For further information, see the documentation of +the [project Stack work directory](../topics/stack_work.md). diff --git a/doc/commands/purge_command.md b/doc/commands/purge_command.md new file mode 100644 index 0000000000..440c21d5d7 --- /dev/null +++ b/doc/commands/purge_command.md @@ -0,0 +1,10 @@ +
+ +# The `stack purge` command + +~~~text +stack purge +~~~ + +`stack purge` has the same effect as, and is provided as a shorthand for, +[`stack clean --full`](clean_command.md). diff --git a/doc/commands/query_command.md b/doc/commands/query_command.md new file mode 100644 index 0000000000..ef64349232 --- /dev/null +++ b/doc/commands/query_command.md @@ -0,0 +1,44 @@ +
+ +# The `stack query` command + +:octicons-beaker-24: Experimental + +[:octicons-tag-24: 0.1.6.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.6.0) + +~~~text +stack query [SELECTOR...] +~~~ + +`stack query` outputs certain build information. For example, for a +multi-package project `multi` specifying snapshot `lts-19.25` (GHC 9.0.2) and +with two project packages, `my-package-A` (version 0.1.0.0) and `my-package-B` +(version 0.2.0.0), command `stack query` outputs: + +~~~text +compiler: + actual: ghc-9.0.2 + wanted: ghc-9.0.2 +locals: + my-package-A: + path: \multi\my-package-A\ + version: 0.1.0.0 + my-package-B: + path: \multi\my-package-B\ + version: 0.2.0.0 +~~~ + +The component parts of the information can be specified using 'selectors' with +the command. In the example above the selectors include `compiler`, +`compiler actual`, `locals`, `locals my-package-A`, and +`locals my-package-A version`. For example, commanding: + +~~~text +stack query locals my-package-B path +~~~ + +results in output: + +~~~text +\multi\my-package-B\ +~~~ diff --git a/doc/commands/repl_command.md b/doc/commands/repl_command.md new file mode 100644 index 0000000000..1af632c6d2 --- /dev/null +++ b/doc/commands/repl_command.md @@ -0,0 +1,14 @@ +
+ +# The `stack repl` command + +~~~text +stack repl [TARGET/FILE] [--pedantic] [--ghci-options OPTIONS] + [--ghc-options OPTIONS] [--flag PACKAGE:[-]FLAG] [--with-ghc GHC] + [--[no-]load] [--package PACKAGE] [--main-is TARGET] + [--load-local-deps] [--[no-]package-hiding] [--only-main] [--trace] + [--profile] [--no-strip] [--[no-]test] [--[no-]bench] +~~~ + +The `stack repl` command is equivalent to the `stack ghci` command. For further +information, see the [`stack ghci`](ghci_command.md) documentation. diff --git a/doc/commands/run_command.md b/doc/commands/run_command.md new file mode 100644 index 0000000000..f0aaeda298 --- /dev/null +++ b/doc/commands/run_command.md @@ -0,0 +1,46 @@ +
+ +# The `stack run` command + +~~~text +stack run [-- ARGUMENT(S) (e.g. stack run -- file.txt)] + [--[no-]ghc-package-path] [--[no-]stack-exe] + [--package PACKAGE] [--rts-options RTSFLAG] [--cwd DIR] +~~~ + +`stack run` builds a project executable and runs it. If the command has a first +argument and it is recognised as the name of an executable component of a +project package then that is built. Otherwise, the project's first executable is +built. If the project has no executables Stack reports no executables found as +an error. + +!!! note + + To identify a project's first executable, and search for the name of an + executable component, Stack lists the executable components, in order, for + each package, listed in order. For example: + + `packageA:a-exe` < `packageA:b-exe` < `packageB:a-exe` < `packageB:b-exe` + +Everything after `--` on the command line is interpreted as a command line +argument to be passed to what is run, other than a first argument recognised as +the name of an executable component of a project package. + +By default: + +* the `GHC_PACKAGE_PATH` environment variable is set for the subprocess. Pass + the `--no-ghc-package-path` flag to not set the variable; and + +* the `STACK_EXE` environment variable is set with the path to Stack. Pass the + `--no-stack-exe` flag to not set the variable. + +The `--cwd` option can be used to set the working directory before the +executable is run. + +The `--package` option (which can be specified multiple times) can be used to +add a package name to build targets. + +The `--rts-options` option (which can be specified multiple times) can be used +to pass a list of GHC's +[runtime system (RTS) options](https://downloads.haskell.org/~ghc/latest/docs/users_guide/runtime_control.html#) +to the executable when it is run. (The `+RTS` and `-RTS` must not be included.) diff --git a/doc/commands/runghc_command.md b/doc/commands/runghc_command.md new file mode 100644 index 0000000000..55023f070e --- /dev/null +++ b/doc/commands/runghc_command.md @@ -0,0 +1,22 @@ +
+ +# The `stack runghc` and `stack runhaskell` commands + +~~~text +stack runghc [-- ARGUMENT(S) (e.g. stack runghc -- X.hs)] + [--[no-]ghc-package-path] [--[no-]stack-exe] [--package PACKAGE] + [--rts-options RTSFLAG] [--cwd DIR] +~~~ + +`stack runhaskell` has the same effect as `stack runghc`. `stack runghc` has the +same effect as, and is provided as a shorthand for, +[`stack exec runghc`](exec_command.md), with the exception of the `--package` +option. + +Pass the option `--package ` to add the initial GHC argument +`-package-id=`, where `` is the unit ID of the specified +package in the installed package database. The option can be a list of package +names or package identifiers separated by spaces. The option can also be +specified multiple times. The approach taken to these packages is the same as if +they were specified as targets to +[`stack build`](build_command.md#target-syntax). diff --git a/doc/commands/runhaskell_command.md b/doc/commands/runhaskell_command.md new file mode 100644 index 0000000000..cf55c070b6 --- /dev/null +++ b/doc/commands/runhaskell_command.md @@ -0,0 +1,12 @@ +
+ +# The `stack runhaskell` command + +~~~text +stack runhaskell [-- ARGUMENT(S) (e.g. stack runghc -- X.hs)] + [--[no-]ghc-package-path] [--[no-]stack-exe] + [--package PACKAGE] [--rts-options RTSFLAG] [--cwd DIR] +~~~ + +The `stack runhaskell` command is equivalent to the `stack runghc` command. For +further information, see the [`stack runghc`](runghc_command.md) documentation. diff --git a/doc/commands/script_command.md b/doc/commands/script_command.md new file mode 100644 index 0000000000..92eb1c4027 --- /dev/null +++ b/doc/commands/script_command.md @@ -0,0 +1,384 @@ +
+ +# The `stack script` command + +~~~text +stack script [--package PACKAGE] FILE + [-- ARGUMENT(S) (e.g. stack script X.hs -- argument(s) to program).] + [--compile | --optimize] [--[no-]use-root] [--ghc-options OPTIONS] + [--extra-dep EXTRA-DEP] [--no-run] +~~~ + +The `stack script` command either runs a specified Haskell source file (using +GHC's `runghc`) or, optionally, compiles such a file (using GHC) and, by +default, runs it. + +## Global configuration files + +Non-project level configuration options in global configuration files +(`config.yaml`) are not ignored by the `stack script` command. + +!!! info + + Non-project level configuration options may be useful if + [`allow-newer`](../configure/yaml/non-project.md#allow-newer) and/or + [`allow-newer-deps`](../configure/yaml/non-project.md#allow-newer-deps) are + required. + +## Project-level configuration file + +The `stack script` command ignores any project-level configuration file +(`stack.yaml`, by default), including in the `global-project` directory in the +Stack root. + +!!! info + + The `stack script` command can be contrasted with the + [`stack ghc`](ghc_command.md) and [`stack runghc`](runghc_command.md) + commands, which do not ignore any project-level configuration file. + +## GHC + +The `stack script` command behaves as if the +[`--install-ghc`](../configure/global_flags.md#-no-install-ghc-flag) flag had +been passed at the command line. + +## Snapshot and extra-deps + +A snapshot must be specified on the command line, using the `--snapshot` option. +For example: + +~~~text +stack script --snapshot lts-24.37 MyScript.hs +~~~ + +An immutable extra-dep can be added to the snapshot on the command line with the +`--extra-dep` option (which can be specified multiple times). + +An extra-dep is specified using a valid YAML value. For further information, see +the [package location](../topics/package_location.md) documentation. Examples +are: + +~~~text +--extra-dep acme-missiles-0.3@rev:0 +--extra-dep '{git: git@github.com:yesodweb/wai, commit: '2f8a8e1b771829f4a8a77c0111352ce45a14c30f', subdirs: [auto-update, wai]} +--extra-dep acme-missiles-0.3.tar.gz +~~~ + +Relative paths to local archive files are assumed to be relative to the +directory in which the script file is located. + +GHC boot packages that have been 'replaced' (see further below) can be specified +as an `--extra-dep`. + +## Required packages + +The names of required packages can be either deduced or specified. + +The `base` package associated with the version of GHC specified by the snapshot +is always available. + +If no packages are specified, all the required packages that are in the snapshot +or are a GHC boot package (packages that come with GHC and are included in GHC's +global package database), will be deduced by reference to the `import` +statements in the source file. In that regard, Stack assumes that: + +* a line that begins `import` is an `import` statement; +* `import` may be followed by `qualified` on the same line; +* consistent with GHC's + [`PackageImports`](https://downloads.haskell.org/ghc/latest/docs/users_guide/exts/package_qualified_imports.html) + language extension, that if `import` or `import qualified` is followed by + `""` on the same line, that `` is the name of a required package; +* otherwise, `import` or `import qualified` is followed by the module name on + the same line. Stack will not deduce the names of hidden packages from + module names or the names of blacklisted packages. + +!!! note + + The first time that Stack deduces package names from module names can take + some time. Use the `--verbose` option to understand Stack's progress. + +!!! note + + The installed packages of modules exposed by public sub-libraries will not + be deduced, because those installed packages are hidden. + +!!! info + + Certain packages are blacklisted because they expose one or more modules + with names that are the same as modules exposed by more popular packages. + The blacklisted packages are `Glob`, `HTF`, `async-dejafu`, + `binary-ieee754`, `cipher-aes`, `cipher-blowfish`, `cipher-camellia`, + `cipher-des`, `cipher-rc4`, `control-monad-free`, `courier`, `crypto-api`, + `crypto-cipher-types`, `crypto-numbers`, `crypto-pubkey`, `crypto-random`, + `cryptohash`, `cryptohash-conduit`, `cryptohash-md5`, `cryptohash-sha1`, + `cryptohash-sha256`, `fay-base`, `gl`, `gtk3`, `hashmap`, `hledger-web`, + `hxt-unicode`, `kawhi`, `language-c`, `log`, `monad-extras`, `monads-tf`, + `nanospec`, `newtype-generics`, `objective`, `plot-gtk3`, `prompt`, + `regex-compat-tdfa`, `regex-pcre-builtin`, `rerebase`, `svg-tree` and `zip`. + +Alternatively, each required package can be specified by name on the command +line with the `--package` option (which can be specified multiple times). A +single `--package` option can also refer to a list of package names, separated +by a space or comma character. If the package is not in the snapshot, the most +recent version in the package index (e.g. Hackage) will be obtained. + +In the case of a named public sub-library of a Cabal package, the required +installed package is specified by the 'munged' package name. For example, for +public sub-library `my-library` of Cabal package `my-package` the munged name of +the installed package is `z-my-library-z-my-package`. + +If a required package is a GHC boot package, the behaviour can be complex. If +the boot package has not been 'replaced', then it will be used in Stack's build +plan. However, if the boot package has been 'replaced', the latest version of +that package in the package index will be used in Stack's build plan, which may +differ from the version provided by the version of GHC specified by the +snapshot. A boot package will be treated as 'replaced' if the package is +included directly in the Stackage snapshot or it depends on a package included +directly in the snapshot. Stackage snapshots do not include directly most boot +packages but some snapshots may include directly some boot packages. In +particular, some snapshots include directly `Win32` (which is a boot package on +Windows) while others do not. + +!!! info + + GHC has the concept of 'installed packages' (which differ from 'Cabal + packages') in package databases. An installed package has a name. An + installed package corresponding to the main (unnamed) library of a Cabal + package has the same name as the Cabal package. An installed package + corresponding to a sub-library of a Cabal package has a 'munged' name that + reflects the name of the Cabal package and the name of the sub-library. An + installed package corresponding to a sub-library also has a `package-name`, + which is the name of the Cabal package. + + The `--package` option of `stack script` makes use of GHC's `-package-id` + option to expose an installed package, rather than its `-package` option. + The latter option treats `package-name` (if it exists) as if it were also + the name of the installed package. That means, for a Cabal package with one + or more sub-libraries, the GHC option `-package=` cannot distinguish + between (a) the installed package `` corresponding to the main library + of Cabal package `` and (b) an installed package corresponding to a + sub-library of that Cabal package. The installed package that GHC picks to + expose is indeterminate. This can cause GHC to pick the wrong installed + package and to report that it cannot load a module because it is a member of + a hidden package. + +## Compilation + +The source file can be compiled by passing either the `--compile` flag (no +optimization) or the `--optimize` flag (compilation with optimization). If the +file is compiled, passing the `--no-run` flag will mean the compiled code is not +run. + +By default, all the compilation outputs (including the executable) are written +to the directory of the source file. Pass the `--use-root` flag to write such +outputs to a script-specific location in the `scripts` directory of the Stack +root. The location reflects the absolute path to the source file, but ignoring +the drive. This can avoid clutter in the source file directory. + +## GHC options + +Additional options can be passed to GHC using the `--ghc-options` option. + +## Script arguments + +Everything after `--` on the command line is interpreted as a command line +argument to be passed to what is run. + +## Examples + +### Example 1 + +A Haskell source file `MyScript.hs` at location +`Users/jane/my-project` (where `` could be `/` on Unix-like +operating systems or `C:/` or similar on Windows): + +~~~haskell +module Main (main) where + +import Data.List (intercalate) +import System.Environment (getArgs) + +import Acme.Missiles (launchMissiles) + +main :: IO () +main = do + advices <- getArgs + launchMissiles + putStrLn $ intercalate "\n" advices +~~~ + +can be compiled and run, with arguments, with: + +~~~text +stack --snapshot lts-24.37 script --package acme-missiles --compile MyScript.hs -- "Don't panic!" "Duck and cover!" +~~~ + +`acme-missiles-0.3` (the most recent version in the package index) will be used. + +All the compilation outputs (like `Main.hi`, `Main.o`, and the executable +`MyScript`) will be written to the `my-project` directory. + +If compiled and run with the additional flag `--use-root`, all the compilation +outputs will be written to a directory named `MyScript.hs` at +`Users/jane/my-project/` in the `scripts` directory of the Stack root. + +### Example 2 + +As for Example 1, but `acme-missiles-0.2` is specified by adding it to the +snapshot as an extra-dep. The `stack script` command is specified using Stack's +[script interpreter](../topics/scripts.md). + +~~~haskell +{- stack script + -- snapshot lts-24.37 + -- extra-dep acme-missiles-0.2 + -- package acme-missiles +-} +module Main (main) where + +import Data.List (intercalate) +import System.Environment (getArgs) + +import Acme.Missiles (launchMissiles) + +main :: IO () +main = do + advices <- getArgs + launchMissiles + putStrLn $ intercalate "\n" advices +~~~ + +~~~text +stack MyScript.hs "Don't panic!" "Duck and cover!" +~~~ + +### Example 3 + +Stackage snapshot LTS Haskell 20.25 includes GHC boot package `Win32` directly. +On Windows only, GHC boot packages `Cabal`, `directory`, `process` and `time` +all depend on `Win32` and, consequently, are all treated as 'replaced'. +Consequently, for example, Stack will: + +* on Windows, try to construct a build plan based on the latest version of + `Cabal` in the package index; and +* on non-Windows, use the boot package in the build plan (because `Cabal` is not + 'replaced'). + +Consider also the following script extract, based on snapshot Stackage + LTS Haskell 20.25, where considerations on Windows differ from non-Windows. The +`stack script` command is specified using Stack's +[script interpreter](../topics/scripts.md). + +=== "Windows" + + ~~~haskell + {- stack script + --snapshot lts-20.25 + --extra-dep acme-missiles-0.3 + --extra-dep directory-1.3.6.2 + --extra-dep process-1.6.16.0 + --extra-dep time-1.11.1.1 + -} + + import Acme.Missiles -- from acme-missiles + import Data.Time.Clock.System -- from time + import System.Time.Extra -- from extra + + ... + ~~~ + + `acme-missiles` is not in the snapshot and so needs to be specified as an + extra-dep. + + Stack can deduce that the module imports imply that the required packages + are `acme-missiles`, `time` and `extra` (which is in the snapshot). + + `extra` depends on `directory` and `process`. If `directory` and `process` + are not specified as extra-deps, Stack will complain that they have been + 'pruned'. + + `directory-1.3.6.2` depends on `time < 1.12`. If `time` is not specified as + an extra-dep, Stack will try to construct a build plan based on the latest + version in the package index (which will fail, as the latest version is + `>= 1.12`) + +=== "Unix-like" + + ~~~haskell + {- stack script + --snapshot lts-20.25 + --extra-dep acme-missiles-0.3 + -} + + import Acme.Missiles -- from acme-missiles + import Data.Time.Clock.System -- from time + import System.Time.Extra -- from extra + + ... + ~~~ + + `acme-missiles` is not in the snapshot and so needs to be specified as an + extra-dep. + + Stack can deduce that the module imports imply that the required packages + are `acme-missiles`, `time` and `extra` (which is in the snapshot). + + All the other dependencies required are either GHC boot packages (which have + not been 'replaced') or in the snapshot. + +### Example 4 + +A Haskell source file `MyScript.hs`, as follows: + +~~~haskell +{- stack script + --snapshot lts-24.37 +-} + +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PackageImports #-} + +module Main (main) where + +import "text" Data.Text (Text (..), unpack) + +main :: IO () +main = putStrLn $ unpack "This is text." +~~~ + +As module `Data.Text` is exposed by a number of packages that are included, +directly or indirectly, in the specified snapshot (`incipit-base`, +`incipit-core`, `relude` and `text`), `PackageImports` and `"text"` are required +to specify which module is being imported. + +### Example 5 + +Stackage snapshot LTS Haskell 23.18 specifies Cabal package `vector-0.13.2.0` +which includes public sub-library `benchmarks-O2`. The sub-library exposes +module `Bench.Vector.TestData.ParenTree` which exports `parenTree`. The +following is a valid script: + +~~~haskell +{- stack script + --snapshot lts-23.18 + --package z-vector-z-benchmarks-O2 +-} +{-# LANGUAGE LambdaCase #-} + +import Bench.Vector.TestData.ParenTree ( parenTree ) +import System.Environment ( getArgs ) + +main :: IO () +main = getArgs >>= \case + [] -> putStrLn "An initial argument is required." + (arg:_) -> do + let n = read arg + if n >= 0 && even n + then do + putStrLn "A balanced binary tree structure" + putStrLn $ "with " <> show n <> " parentheses positions:" + print $ parenTree n + else + putStrLn "A positive even integer argument is required." +~~~ diff --git a/doc/commands/sdist_command.md b/doc/commands/sdist_command.md new file mode 100644 index 0000000000..e0ad8c6898 --- /dev/null +++ b/doc/commands/sdist_command.md @@ -0,0 +1,47 @@ +
+ +# The `stack sdist` command + +~~~text +stack sdist [DIR] [--pvp-bounds PVP-BOUNDS] [--ignore-check] + [--[no-]test-tarball] [--tar-dir ARG] +~~~ + +Hackage only accepts packages for uploading in a standard form, a compressed +archive ('tarball') in the format produced by Cabal's `sdist` action. + +`stack sdist` generates a file for your package, in the format accepted by +Hackage for uploads. The command will report the location of the generated file. + +By default: + +* a file is generated for each project package. In the alternative, one or + more project package directories can be specified; + +* the command will check the package for common mistakes. Pass the flag + `--ignore-check` to disable the checks; + +* Stack will not test the generated file by attempting to build it. Pass the + flag `--test-tarball` to cause Stack to test the generated file; + +* the generated file will be created in the `dist` directory of the project + package directory. For information about the directory's location, command + [`stack path --dist-dir`](path_command.md). Pass the option + ``--tar-dir ` to also copy the file to the specified + directory; and + +* no PVP version bounds are added to the Cabal file of the package. Pass the + option `--pvp-bounds ` to determine whether and, if so, + how bounds should be added. The available modes for basic use are: `none`, + `lower`, `upper`, and `both`. The available modes for use with Cabal file + revisions are `lower-revision`, `upper-revision` and `both-revision`. + + For futher information, see the + [`pvp-bounds`](../configure/yaml/non-project.md#pvp-bounds) non-project + specific configuration option documentation. + +!!! note + + Stack checks a package for common mistakes using checks provided by Cabal + (the library). The version of Cabal used by Stack to check may be + older than the version used by Stack to build. diff --git a/doc/commands/setup_command.md b/doc/commands/setup_command.md new file mode 100644 index 0000000000..fccb980c3c --- /dev/null +++ b/doc/commands/setup_command.md @@ -0,0 +1,115 @@ +
+ +# The `stack setup` command + +~~~text +stack setup [GHC_VERSION] [--[no-]reinstall] [--ghc-bindist URL] + [--ghcjs-boot-options GHCJS_BOOT] [--[no-]ghcjs-boot-clean] +~~~ + +`stack setup` attempts to install a version of GHC and, on Windows, MSYS2. + +By default: + +* the version of GHC is the one required by the project. Specify the version of + GHC as an argument to attempt to install a different version of GHC. For + example `stack setup 9.10.3` will attempt to install GHC 9.10.3; and + +* an attempt to install is made only if the version of GHC is not already + available to Stack. Pass the flag `--reinstall` (disabled by default) to + attempt to install the version of GHC regardless of whether it is already + available to Stack. + +Pass the option `--ghc-bindist ` to specify the URL of the GHC to be +downloaded and installed. This option requires the use of the `--ghc-variant` +option specifying a custom GHC variant. For further information about the +`--ghc-variant` option, see the +[`ghc-variant`](../configure/yaml/non-project.md#ghc-variant) non-project +specific configuration option documentation. + +If Stack is configured not to install GHC (`install-ghc: false` or passing the +`--no-install-ghc` flag) or not to install MSYS2 (`install-msys: false` or +passing the `--no-install-msys` flag) then `stack setup` will warn that the flag +and the command are inconsistent and take no action. + +=== "Linux" + + A particular binary distribution of GHC will depend on certain libraries, + which need to be available. + + There are many different Linux distributions and different versions of a + particular Linux distribution. One Linux distribution/version may make + available different libraries to another Linux distribution/version. + + In attempting to identify the particular binary distribution of GHC that is + required on Linux, Stack will refer to the presence or absence of certain + libraries or the versions of those libraries. + + For example, Stack 3.1.1 considers: + + * If `libc.musl-x86_64.so.1` is present. This file is provided by the + [musl libc](https://musl.libc.org/). + + [:octicons-tag-24: 2.13.1](https://github.com/commercialhaskell/stack/releases/tag/v2.13.1) + + * The version of `libc6` (if musl libc is not applicable), the + [GNU C Library](https://www.gnu.org/software/libc/) (glibc), that is + present. The GNU C Library is designed to be backwards compatible. + + [:octicons-tag-24: 2.11.1](https://github.com/commercialhaskell/stack/releases/tag/v2.11.1) + + * If `libgmp.so.3` or `libgmp.so.10` is present. These files are provided + by different versions of the + [GNU Multiple Precision Arithmetic Library](https://gmplib.org/). + + * If `libncursesw.so.6` is present. This file is provided by a shared + library for terminal handling with wide character support. + + * If `libtinfo.so.5` or `libtinfo.so.6` is present. These files are + provided by different versions of a shared low-level terminfo library + for terminal handling. + + Stack 3.1.1 uses `ghc-build`: + + * `musl` to indicate `libc.musl-x86_64.so.1` is present and Stack should use + the GHC binary distribution for Alpine Linux. + * `tinfo6` to indicate `libgmp.so.10` and `libtinfo.so.6` are present and + `libc6` is compatible with `libc6` 2.32. + * `tinfo6-libc6-pre232` to indicate `libgmp.so.10` and `libtinfo.so.6` are + present and `libc6` is not compatible with `libc6` 2.32. + * `ncurses6` to indicate `libgmp.so.10` and `libncursesw.so.6` are present + * `gmp4` to indicate `libgmp.so.3` is present + + By default, Stack associates: + + * the `tinfo6` build with the 'Fedora 33' binary distribution of GHC 9.4.1 + to 9.4.3 and 9.6.1 and later. Those binary distributions require versions + of `libc6` that are compatible with `libc6` 2.32; + + * the `tinfo6-libc6-pre232` build with the 'Debian 10' binary distribution + of GHC 9.4.1 to 9.4.3 and the 'Rocky 8' binary distribution of GHC 9.6.1 + and later. Those binary distributions require versions of `libc6` that are + compatible with `libc6` 2.28; and + + * the `tinfo6` and `tinfo6-libc6-pre232` builds with the 'Fedora 27' binary + distribution of GHC 9.4.4 to 9.4.8. Those binary distributions require + versions of `libc6` that are compatible with `libc6` 2.26. + +=== "Windows" + + Since Windows 10 version 1607, Windows has been 'long file paths' capable + but that capability is not enabled by default. Consequently, Stack does not + assume that it is being used on a system that is 'long file paths' enabled. + + Stack uses the 7-Zip tool to decompress, and extract tools from, downloaded + archive files. Aiming to avoid long file paths, Stack does so in a temporary + directory (named `stack-tmp-`) on the drive of the final destination + (either in the system temporary directory, where applicable, or the root of + the drive). Consequently, Stack needs permission to create a directory in + that location. + + A Windows user account will usually have permission to create a directory in + the system temporary directory. A Windows user account of type Administrator + will have permission to create a directory in the root of the system drive + (`C:\`, by convention) but a Standard user account may well not have + permission. diff --git a/doc/commands/templates_command.md b/doc/commands/templates_command.md new file mode 100644 index 0000000000..5eabfb59e1 --- /dev/null +++ b/doc/commands/templates_command.md @@ -0,0 +1,35 @@ +
+ +# The `stack templates` command + +~~~text +stack templates +~~~ + +`stack templates` provides information to the standard output stream about +project templates used with the [`stack new` command](new_command.md). + +Project templates are specified in `.hsfiles` files. The format of those files +is documented at the +[`commercialhaskell/stack-templates`](https://github.com/commercialhaskell/stack-templates#project-template-format) +repository on GitHub. + +Any GitHub, GitLab, Bitbucket or Codeberg repository named `stack-templates` +can provide project template files. For example, a template file +`username/stack-templates/my-template.hsfiles` on GitHub can be identified as +`username/my-template` when using `stack new`. The relevant service can be +specified by a prefix: `github:` for [GitHub](https://github.com/) (the default +service), `gitlab:` for [GitLab](https://gitlab.com), `bitbucket:` for +[Bitbucket](https://bitbucket.com), or `codeberg:` for +[Codeberg](https://codeberg.org). + +[`commercialhaskell/stack-templates`](https://github.com/commercialhaskell/stack-templates#project-template-format) +on GitHub is the default repository for project templates. Its username +(`commercialhaskell`) does not need to be specified when using `stack new`. + +The project template that `stack new` uses by default is named `new-template` +and provided at the default repository. + +The default repository provides 24 other project templates. Its Wiki provides +a description of some of those templates and information about the location of +other templates. diff --git a/doc/commands/test_command.md b/doc/commands/test_command.md new file mode 100644 index 0000000000..12f37f5544 --- /dev/null +++ b/doc/commands/test_command.md @@ -0,0 +1,29 @@ +
+ +# The `stack test` command + +~~~text +stack test [TARGET] [--dry-run] [--pedantic] [--fast] [--ghc-options OPTIONS] + [--flag PACKAGE:[-]FLAG] [--dependencies-only | --only-snapshot | + --only-dependencies | --only-locals] [--file-watch | + --file-watch-poll] [--watch-all] [--exec COMMAND [ARGUMENT(S)]] + [--only-configure] [--trace] [--profile] [--no-strip] + [--[no-]library-profiling] [--[no-]executable-profiling] + [--[no-]library-stripping] [--[no-]executable-stripping] + [--[no-]haddock] [--haddock-arguments HADDOCK_ARGS] + [--[no-]open] [--[no-]haddock-deps] [--[no-]haddock-internal] + [--[no-]haddock-hyperlink-source] [--[no-]haddock-for-hackage] + [--[no-]copy-bins] [--[no-]copy-compiler-tool] [--[no-]prefetch] + [--[no-]keep-going] [--[no-]keep-tmp-files] [--[no-]force-dirty] + [--[no-]test] [--[no-]rerun-tests] [--ta|--test-arguments TEST_ARGS] + [--coverage] [--[no-]run-tests] [--test-suite-timeout SECONDS] + [--test-suite-timeout-grace SECONDS] [--[no-]tests-allow-stdin] + [--[no-]bench] [--ba|--benchmark-arguments BENCH_ARGS] + [--[no-]run-benchmarks] [--[no-]reconfigure] + [--cabal-verbosity VERBOSITY | --[no-]cabal-verbose] + [--[no-]split-objs] [--skip ARG] [--[no-]interleaved-output] + [--ddump-dir ARG] +~~~ + +`stack test` is a synonym for `stack build --test`. For further information, +see the documentation for the [`stack build`](build_command.md) command. diff --git a/doc/commands/uninstall_command.md b/doc/commands/uninstall_command.md new file mode 100644 index 0000000000..411c879fb7 --- /dev/null +++ b/doc/commands/uninstall_command.md @@ -0,0 +1,11 @@ +
+ +# The `stack uninstall` command + +~~~text +stack uninstall +~~~ + +`stack uninstall` provides information to the standard output stream about how +to uninstall Stack or a Stack-supplied tool (such as GHC or, on Windows, MSYS2). +It does not itself uninstall Stack or a Stack-supplied tool. diff --git a/doc/commands/unpack_command.md b/doc/commands/unpack_command.md new file mode 100644 index 0000000000..b339cfc3fe --- /dev/null +++ b/doc/commands/unpack_command.md @@ -0,0 +1,51 @@ +
+ +# The `stack unpack` command + +~~~text +stack unpack TARGET [--candidate] [--to DIR] +~~~ + +`stack unpack` downloads an archive file for one or more specified target +packages from the package index (e.g. Hackage), or one or more specified target +package candidates, and unpacks each archive into a subdirectory named after the +package version. + +In the case of packages from the package index, a target can be a package +name only. In that case, by default: + +* if Stack's `--snapshot` option is not specified, the download is for the + most recent version of the package in the package index. Stack will first + seek to update the index; and + +* if Stack's `--snapshot` option is specified, the download is for the version + of the package included directly in the specified snapshot. + +!!! note + + Stackage snapshots are not expected to include directly GHC boot packages + (packages that come with GHC and are included in GHC's global package + database) but some such snapshots may include directly some boot packages. + In particular, some snapshots include directly `Win32` (which is a boot + package on Windows) while most do not. + +Otherwise, a target should specify a package name and version (for example, +`acme-missiles-0.3`). In the case of package versions from the package index, +optionally, a revision in the package index can be specified by appending +`@rev:` or `@sha256:` (for example, `acme-missiles-0.3@rev:0`). + +By default: + +* the download is from the package index. Pass the flag `--candidate` to + specify package candidates; and + + !!! note + + Stack assumes that a package candidate archive is a `.tar.gz` file named + after the package version and located at endpoint + `package\\candidate\`. This is true of Hackage. + +* the target is unpacked into a subdirectory of the current directory. Pass + the option `--to ` to specify an alternative destination + directory to the current directory. The destination directory can be an + absolute one or relative to the current directory. diff --git a/doc/commands/update_command.md b/doc/commands/update_command.md new file mode 100644 index 0000000000..b5099c65cf --- /dev/null +++ b/doc/commands/update_command.md @@ -0,0 +1,12 @@ +
+ +# The `stack update` command + +~~~text +stack update +~~~ + +Generally, Stack automatically updates the package index when necessary. + +`stack update` will download the most recent set of packages from your package +indices (e.g. Hackage). diff --git a/doc/commands/upgrade_command.md b/doc/commands/upgrade_command.md new file mode 100644 index 0000000000..59a08dadd6 --- /dev/null +++ b/doc/commands/upgrade_command.md @@ -0,0 +1,105 @@ +
+ +# The `stack upgrade` command + +Either: + +~~~text +stack upgrade [--binary-only] [--binary-platform ARG] [--force-download] + [--[no-]only-local-bin] [--binary-version ARG] [--github-org ARG] + [--github-repo ARG] +~~~ + +or: + +~~~text +stack upgrade [--source-only] [--git] [--git-repo ARG] [--git-branch ARG] +~~~ + +`stack upgrade` will get a new version of Stack. It can also get a version +before the current version (downgrade). + +!!! warning + + If you use GHCup to install Stack, use only GHCup to upgrade Stack. + +By default: + +* the new version will be from an existing binary distribution. Pass the + `--source-only` flag to specify compiling from source code. The + `--binary-only` and `--source-only` flags are alternatives; + +* the new version will not overwrite the existing version unless it is newer. + Pass the `--force-download` flag to force a download; + +* when an existing binary distribution is applicable, it will be put in Stack's + local binary directory (see `stack path --local-bin`) and named `stack` + (replacing any existing executable named `stack` there); + +* if the current running Stack executable is '`stack`' (that is, it was invoked + as `stack` or, on Windows, `stack.exe` - this is case insensitive - and the + Stack executable file is named `stack` or, on Windows, `stack.exe` - this is + case sensitive), an existing binary distribution will replace it. If the + executable is located outside of Stack's local binary directory, pass the + `--only-local-bin` flag to skip that step; + +* if the current running Stack executable is not '`stack`' (as described above), + an existing binary distribution will only be put in Stack's local binary + directory and named `stack`. Pass the `--no-only-local-bin` flag to replace + also the current running executable; + +* the new version will be the latest available. Pass the + `--binary-version ` option to specify the version (this implies + `--force-download`); + +* the binary distribution will be sought from the GitHub organisation/user + `commercialhaskell`. Pass the `--github-org ` option to specify a + different GitHub user; + +* the binary distribution will be sought from the GitHub repository `stack`. + Pass the `--github-repo ` option to specify a different + repository; and + +* the binary distribution will be sought for the current platform. Pass the + `--binary-platform ` option to specify a different platform + (`--`). + +When compiling from source code, by default: + +* Stack will obtain the source code for the most recent version in the package + index (eg Hackage). Pass the flag `--git` to specify the most recent version + from the `master` branch of Stack's repository (pass the option + `--git-branch ` to specify a different branch and the option + `--git-repo ` to specify a different repository). + +!!! note + + An earlier version of Stack could be inconsistent with some of the current + contents of the Stack root. For further information about the contents of + the Stack root and configuring its location, see the documentation about the + [Stack root](../topics/stack_root.md). + +## Examples + +* `stack upgrade` seeks an upgrade to the latest version of Stack available as a + binary distribution for the platform, if newer. + +* `stack upgrade --force-download` seeks an upgrade to the latest version of + Stack available as a binary distribution for the platform, even if not newer. + +* If the Stack executable is invoked as `my-stack`, `my-stack upgrade` seeks + only to put the latest version of Stack available as a binary distribution for + the platform, if newer, in Stack's local binary directory and name it `stack`. + `my-stack upgrade --no-only-local-bin` seeks also to upgrade `my-stack` to the + latest version of Stack available. + +* `stack upgrade --binary-version 2.15.1` seeks an upgrade to Stack 2.15.1 if + available as a binary distribution for the platform, even if not newer. + +* `stack upgrade --source-only` seeks an upgrade by building Stack with + Stack from the latest version of the source code in the package index + (i.e. Hackage). + +* `stack upgrade --source-only --git` seeks an upgrade by building Stack with + Stack from the latest version of the source code in the `master` branch of + Stack's repository. diff --git a/doc/commands/upload_command.md b/doc/commands/upload_command.md new file mode 100644 index 0000000000..f9ab6d2a40 --- /dev/null +++ b/doc/commands/upload_command.md @@ -0,0 +1,151 @@ +
+ +# The `stack upload` command + +~~~text +stack upload [ITEM] [-d|--documentation] [--pvp-bounds PVP-BOUNDS] + [--ignore-check] [--[no-]test-tarball] [--tar-dir ARG] + [--candidate] [--[no-]save-hackage-creds] [--setup-info-yaml URL] + [--snapshot-location-base URL] +~~~ + +By default: + +* the command uploads one or more packages. Pass the flag `--documentation` + (`-d` for short) to upload documentation for one or more packages; + +* the upload is a package to be published or documentation for a published + package. Pass the flag `--candidate` to upload a + [package candidate](http://hackage.haskell.org/upload#candidates) or + documentation for a package candidate; and + +* the command prompts to save the user's Hackage username and password in a + local file. Pass the flag `--no-save-hackage-creds` to avoid the prompt. + +At least one `ITEM` must be specified. For example, if the current working +directory is a package directory: + +~~~text +stack upload . +~~~ + +## Upload one or more packages + +Hackage accepts packages for uploading in a standard form, a compressed archive +('tarball') in the format produced by Cabal's `sdist` action. + +If `ITEM` is a relative path to an sdist tarball, `stack upload` uploads the +package to Hackage. + +If `ITEM` is a relative path to a package directory, `stack upload` generates a +file for your package, in the format accepted by Hackage for uploads, and +uploads the package to Hackage. + +By default: + +* the command will check each package for common mistakes. For further + information, see the [`stack sdist` command](sdist_command.md) documentation. + Pass the flag `--ignore-check` to disable such checks; and + +* Stack will not test the resulting package archive. Pass the flag + `--test-tarball` to cause Stack to test each resulting package archive, by + attempting to build it. + +The `--pvp-bounds ` option determines whether and, if so, how +PVP version bounds should be added to the Cabal file of the package. The +available modes for basic use are: `none`, `lower`, `upper`, and `both`. The +available modes for use with Cabal file revisions are `lower-revision`, +`upper-revision` and `both-revision`. + +For futher information, see the +[`pvp-bounds`](../configure/yaml/non-project.md#pvp-bounds) non-project +specific configuration option documentation. + +The `--tar-dir ` option determines whether the package +archive should be copied to the specified directory. + +## Upload documentation for a package + +:octicons-beaker-24: Experimental + +[:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) + +Hackage accepts documentation for a package for uploading in a standard form and +in a compressed archive ('tarball') in the `.tar.gz` format. + +For further information about how to create such an archive file, see the +documentation for the +[`stack haddock --haddock-for-hackage`](build_command.md#-no-haddock-for-hackage-flag) +command. + +If `ITEM` is a relative path to a package directory, +`stack upload --documentation` uploads an existing archive +file of documentation for the specified package to Hackage. + +If the `--documentation` flag is passed then flags specific to package upload +are ignored. + +## The `HACKAGE_USERNAME` and `HACKAGE_PASSWORD` environment variables + +[:octicons-tag-24: 2.3.1](https://github.com/commercialhaskell/stack/releases/tag/v2.3.1) + +`stack upload` will request a Hackage username and password to authenticate. +This can be avoided by setting the `HACKAGE_USERNAME` and `HACKAGE_PASSWORD` +environment variables. For +example: + +=== "Unix-like" + + ~~~text + export $HACKAGE_USERNAME="" + export $HACKAGE_PASSWORD="" + stack upload . + ~~~ + +=== "Windows" + + ~~~text + $Env:HACKAGE_USERNAME='' + $Env:HACKAGE_PASSWORD='' + stack upload . + ~~~ + +=== "Windows (Command Prompt)" + + ~~~text + set HACKAGE_USERNAME= + set HACKAGE_PASSWORD= + stack upload . + ~~~ + +## The `HACKAGE_KEY` environment variable + +[:octicons-tag-24: 2.7.5](https://github.com/commercialhaskell/stack/releases/tag/v2.7.5) + +Hackage allows its members to register an API authentification token and to +authenticate using the token. + +A Hackage API authentification token can be used with `stack upload` instead of +username and password, by setting the `HACKAGE_KEY` environment variable. For +example: + +=== "Unix-like" + + ~~~text + HACKAGE_KEY= + stack upload . + ~~~ + +=== "Windows" + + ~~~text + $Env:HACKAGE_KEY= + stack upload . + ~~~ + +=== "Windows (Command Prompt)" + + ~~~text + set HACKAGE_KEY= + stack upload . + ~~~ diff --git a/doc/community/index.md b/doc/community/index.md new file mode 100644 index 0000000000..72efd2361f --- /dev/null +++ b/doc/community/index.md @@ -0,0 +1,75 @@ +--- +title: Get involved +--- +
+ +# Get involved + +## Feedback and discussion + +* For general comments, feedback and support, please post to the + [Haskell Community](https://discourse.haskell.org/about) forum. +* For bugs, issues, or requests, please + [open an issue](https://github.com/commercialhaskell/stack/issues/new). +* When using Stack Overflow, please use the + [haskell-stack](http://stackoverflow.com/questions/tagged/haskell-stack) tag. + +## How to contribute to the maintenance or development of Stack + +A [guide](../CONTRIBUTING.md) is provided to help potential contributors to the +Stack project. + +If you have already installed a version of Stack and the +[Git application](https://git-scm.com/) the followings steps should get you +started with building Stack from source with Stack: + +1. Clone the `stack` repository from GitHub with the command: + + ~~~text + git clone https://github.com/commercialhaskell/stack.git + ~~~ + +2. Change the current working directory to the cloned `stack` directory with + the command: + + ~~~text + cd stack + ~~~ + +3. Build the `stack` executable using a preexisting installation of Stack with + the command: + + ~~~text + stack build + ~~~ + +4. Once the `stack` executable has been built, check its version with the + command: + + ~~~text + stack exec -- stack --version + ~~~ + + Make sure the version is the latest one. + +5. In the GitHub repository's issue tracker, look for issues tagged with + [newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3a%22newcomer+friendly%22) + and + [awaiting pull request](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22) + labels. + +If you need to check your changes quickly command: + +~~~text +stack repl +~~~ + +and then, at the REPL's prompt, command: + +~~~text +:main --stack-root= --stack-yaml= +~~~ + +This allows you to set a special Stack root (instead of the default Stack root) +and to target your commands at a particular `stack.yaml` file instead of the one +found in the current directory. diff --git a/doc/configure/customisation_scripts.md b/doc/configure/customisation_scripts.md new file mode 100644 index 0000000000..52258ec145 --- /dev/null +++ b/doc/configure/customisation_scripts.md @@ -0,0 +1,142 @@ +
+ +# Customisation scripts + +## GHC installation customisation + +[:octicons-tag-24: 2.9.1](https://github.com/commercialhaskell/stack/releases/tag/v2.9.1) + +On Unix-like operating systems and Windows, Stack's installation procedure can +be fully customised by placing a `sh` shell script (a 'hook') in the +[Stack root](../topics/stack_root.md) directory at `hooks/ghc-install.sh`. On +Unix-like operating systems, the script file must be made executable. The script +is run by the `sh` application (which is provided by MSYS2 on Windows). + +The script **must** return an exit code of `0` and the standard output **must** +be the absolute path to the GHC binary that was installed. Otherwise Stack will +ignore the script and possibly fall back to its own installation procedure. + +When `system-ghc: true`, the script is not run. That is because the two +mechanisms reflect distinct concepts, namely: + +* `system-ghc: true` causes Stack to search the PATH for a version of GHC; and + +* `hooks/ghc-install.sh` causes Stack to execute a script that is intended to + send to standard output a path to a version of GHC. The path in question may + or may not be in the PATH. The script may also do other things, including + installation. + +When `install-ghc: false`, the script is still run. That allows you to ensure +that only your script will install GHC and Stack will not default to its own +installation logic, even when the script fails. + +The following environment variables are always available to the script: + +* `HOOK_GHC_TYPE = "bindist" | "git" | "ghcjs"` + +For "bindist", additional variables are: + +* `HOOK_GHC_VERSION = ` + +For "git", additional variables are: + +* `HOOK_GHC_COMMIT = ` +* `HOOK_GHC_FLAVOR = ` + +For "ghcjs", additional variables are: + +* `HOOK_GHC_VERSION = ` +* `HOOK_GHCJS_VERSION = ` + +An example script is: + +~~~sh +#!/bin/sh + +set -eu + +case $HOOK_GHC_TYPE in + bindist) + # install GHC here, not printing to stdout, e.g.: + # command install $HOOK_GHC_VERSION >/dev/null + ;; + git) + >&2 echo "Hook does not support installing from source" + exit 1 + ;; + *) + >&2 echo "Unsupported GHC installation type: $HOOK_GHC_TYPE" + exit 2 + ;; +esac + +echo "location/to/ghc/executable" +~~~ + +If the following script is installed by GHCup, GHCup makes use of it, so that if +Stack needs a version of GHC, GHCup takes over obtaining and installing that +version: + +~~~sh +#!/bin/sh + +set -eu + +case $HOOK_GHC_TYPE in + bindist) + ghc_path=$( + ghcup whereis ghc "$HOOK_GHC_VERSION" || + { + ghcup install ghc "$HOOK_GHC_VERSION" >/dev/null && + ghcup whereis ghc "$HOOK_GHC_VERSION" + } + ) || { + >&2 echo "Installing $HOOK_GHC_VERSION via ghcup failed" + exit 3 + } + printf "%s" "${ghc_path}" + ;; + git) + # TODO: should be somewhat possible + >&2 echo "Hook doesn't support installing from source" + exit 1 + ;; + *) + >&2 echo "Unsupported GHC installation type: $HOOK_GHC_TYPE" + exit 2 + ;; +esac +~~~ + +## `--file-watch` post-processing + +[:octicons-tag-24: 3.1.1](https://github.com/commercialhaskell/stack/releases/tag/v3.1.1) + +On Unix-like operating systems and Windows, Stack's `build --file-watch` +post-processing can be fully customised by specifying an executable or a `sh` +shell script (a 'hook') using the +[`file-watch-hook`](yaml/non-project.md#file-watch-hook) +non-project specific configuration option. On Unix-like operating systems, the +script file must be made executable. A script is run by the `sh` application +(which is provided by MSYS2 on Windows). + +The following environment variables are always available to the executable or +script: + +* `HOOK_FW_RESULT` (Equal to `""` if the build did not fail. Equal to the result + of `displayException e`, if exception `e` thown during the build.) + +An example script is: + +~~~sh +#!/bin/sh + +set -eu + +if [ -z "$HOOK_FW_RESULT" ]; then + echo "Success! Waiting for next file change." +else + echo "Build failed with exception:" + echo $HOOK_FW_RESULT +fi +~~~ diff --git a/doc/configure/environment_variables.md b/doc/configure/environment_variables.md new file mode 100644 index 0000000000..069fe01b4c --- /dev/null +++ b/doc/configure/environment_variables.md @@ -0,0 +1,173 @@ +
+ +# Stack's environment variables + +The environment variables listed in alphabetal order below can affect how Stack +behaves. + +If using Stack's +[Nix integration](../topics/nix_integration.md#pure-and-impure-nix-shells) and a +*pure* Nix build environment (or *shell*), by default, these environment +variables will not be forwarded from your user session to the Nix shell. +However, the Nix shell can be configured to keep specific environment variables. + +## `GH_TOKEN` or `GITHUB_TOKEN` + +[:octicons-tag-24: 2.11.1](https://github.com/commercialhaskell/stack/releases/tag/v2.11.1) + +Stack will use the value of the `GH_TOKEN` or, in the alternative, +`GITHUB_TOKEN` environment variable (if not an empty string) as credentials to +authenticate its requests of the GitHub REST API, using HTTP 'Basic' +authentication. + +GitHub limits the rate of unauthenticated requests to its API, although most +users of Stack will not experience that limit from the use of Stack alone. The +limit for authenticated requests is significantly higher. + +For more information about authentication of requests of the GitHub REST API, +see GitHub's REST API documentation. + +## `HACKAGE_KEY` + +[:octicons-tag-24: 2.7.5](https://github.com/commercialhaskell/stack/releases/tag/v2.7.5) + +Related command: [`stack upload`](../commands/upload_command.md) + +Hackage allows its members to register an API authentification token and to +authenticate using the token. + +A Hackage API authentification token can be used with `stack upload` instead of +username and password, by setting the `HACKAGE_KEY` environment variable. For +example: + +=== "Unix-like" + + ~~~text + HACKAGE_KEY= + stack upload . + ~~~ + +=== "Windows" + + ~~~text + $Env:HACKAGE_KEY= + stack upload . + ~~~ + +=== "Windows (Command Prompt)" + + ~~~text + set HACKAGE_KEY= + stack upload . + ~~~ + +## `HACKAGE_USERNAME` and `HACKAGE_PASSWORD` + +[:octicons-tag-24: 2.3.1](https://github.com/commercialhaskell/stack/releases/tag/v2.3.1) + +Related command: [`stack upload`](../commands/upload_command.md) + +`stack upload` will request a Hackage username and password to authenticate. +This can be avoided by setting the `HACKAGE_USERNAME` and `HACKAGE_PASSWORD` +environment variables. For +example: + +=== "Unix-like" + + ~~~text + export $HACKAGE_USERNAME="" + export $HACKAGE_PASSWORD="" + stack upload . + ~~~ + +=== "Windows" + + ~~~text + $Env:HACKAGE_USERNAME='' + $Env:HACKAGE_PASSWORD='' + stack upload . + ~~~ + +=== "Windows (Command Prompt)" + + ~~~text + set HACKAGE_USERNAME= + set HACKAGE_PASSWORD= + stack upload . + ~~~ + +## `NO_COLOR` + +Related command: all commands that can produce colored output using control +character sequences. + +Stack follows the standard at http://no-color.org/. Stack checks for a +`NO_COLOR` environment variable. When it is present and not an empty string +(regardless of its value), Stack prevents the addition of control character +sequences for color to its output. + +## `STACK_CONFIG` + +Related command: all commands that make use of Stack's +[global configuration](yaml/index.md) files (`config.yaml`). + +The environment variable `STACK_CONFIG` can be used to specify an absolute path +to the user-specific global configuration file, overriding the default. + +## `STACK_GLOBAL_CONFIG` + +Related command: all commands that make use of Stack's +[global configuration](yaml/index.md) files (`config.yaml`). + +The environment variable `STACK_GLOBAL_CONFIG` can be used to specify an +absolute path to the system-wide global configuration file, overriding the +default. + +## `STACK_ROOT` + +Related command: all commands that make use of Stack's +[user-specific global configuration](yaml/index.md) file (`config.yaml`). + +Overridden by: Stack's global +[`--stack-root`](global_flags.md#-stack-root-option) option. + +The environment variable `STACK_ROOT` can be used to specify the +[Stack root](../topics/stack_root.md) directory. + +## `STACK_WORK` + +Related command: all commands that make use of Stack's work directories. + +Overridden by: Stack's [`work-dir`](yaml/non-project.md#work-dir) non-project +specific configuration option, or global +[`--work-dir`](global_flags.md#-work-dir-option) option. + +The environment variable `STACK_WORK` can be used to specify the path of Stack's +work directory, within a local project or package directory, and override +Stack's default of `.stack-work`. The path must be a relative one, relative to +the root directory of the project or package. The relative path cannot include a +`..` (parent directory) component. + +## `STACK_XDG` + +Related command: all commands that make use of Stack's +[user-specific global configuration](yaml/index.md) file (`config.yaml`). + +Overridden by: the use of Stack's `STACK_ROOT` environment variable, or the use +of Stack's global +[`--stack-root`](global_flags.md#-stack-root-option) option. + +On Unix-like operating systems and Windows, Stack can be configured to follow +the XDG Base Directory Specification if the environment variable `STACK_XDG` is +set to any non-empty value. + +## `STACK_YAML` + +Related command: all commands that make use of Stack's +[project-level configuration](yaml/index.md). + +Overridden by: Stack's global +[`--stack-yaml`](global_flags.md#-stack-yaml-or-w-option) option. + +The environment variable `STACK_YAML` can be used to specify Stack's +project-level configuration file. diff --git a/doc/configure/global_flags.md b/doc/configure/global_flags.md new file mode 100644 index 0000000000..f870890cd6 --- /dev/null +++ b/doc/configure/global_flags.md @@ -0,0 +1,444 @@ +
+ +# Stack's global flags and options + +Stack can also be configured by flags and options on the command line. Global +flags and options apply to all of Stack's commands. In addition, all of Stack's +commands accept the `--setup-info-yaml` and `--snapshot-location-base` options +and the `--help` flag. + +## `--allow-different-user` flag + +Restrictions: POSIX systems only + +Default: True, if inside Docker; false otherwise + +Enable/disable permitting users other than the owner of the +[Stack root](../topics/stack_root.md) directory to use a Stack installation. For +further information, see the documentation for the corresponding non-project +specific configuration [option](yaml/non-project.md#allow-different-user). + +## `--arch` option + +Pass the option `--arch ` to specify the relevant machine +architecture. For further information, see the documentation for the +corresponding non-project specific configuration +[option](yaml/non-project.md#arch). + +## `--bash-completion-index` option + +Visibility: Hidden + +See the [shell auto-completion](../topics/shell_autocompletion.md) +documentation. + +## `--bash-completion-script` option + +Visibility: Hidden + +See the [shell auto-completion](../topics/shell_autocompletion.md) +documentation. + +## `--bash-completion-index` option + +Visibility: Hidden + +See the [shell auto-completion](../topics/shell_autocompletion.md) +documentation. + +## `--color` or `-colour` options + +Pass the option `stack --color ` to specify when to use color in output. +For further information, see the documentation for the corresponding non-project +specific configuration [option](yaml/non-project.md#color). + +## `--compiler` option + +Pass the option `--compiler ` to specify the compiler (and, +implicitly, its boot packages). For further information, see the +[`compiler`](yaml/non-project.md#compiler) non-project specific configuration +option documentation. + +## `--custom-preprocessor-extensions` option + +Pass the option `--custom-preprocessor-extensions ` to specify an +extension used for a custom preprocessor. For further information, see the +documentation for the corresponding project specific configuration +[option](yaml/project.md#custom-preprocessor-extensions). + +## `--docker*` flags and options + +Stack supports automatically performing builds inside a Docker container. For +further information see `stack --docker-help` or the +[Docker integration](../topics/docker_integration.md) documentation. + +## `--[no-]dump-logs` flag + +Default: Dump warning logs + +Enables/disables the dumping of the build output logs for project packages to +the console. For further information, see the documentation for the +corresponding non-project specific configuration +[option](yaml/non-project.md#dump-logs). + +## `--extra-include-dirs` option + +Pass the option `--extra-include-dirs ` to specify an extra directory +to check for C header files. The option can be specified multiple times. For +further information, see the documentation for the corresponding non-project +specific configuration [option](yaml/non-project.md#extra-include-dirs). + +## `--extra-lib-dirs` option + +Pass the option `--extra-lib-dirs ` to specify an extra directory +to check for libraries. The option can be specified multiple times. For further +information, see the documentation for the corresponding non-project specific +configuration [option](yaml/non-project.md#extra-lib-dirs). + +## `--fish-completion-script` option + +Visibility: Hidden + +See the [shell auto-completion](../topics/shell_autocompletion.md) +documentation. + +## `--ghc-build` option + +Pass the option `--ghc-build ` to specify the relevant specialised GHC +build. For further information, see the documentation for the corresponding +non-project specific configuration [option](yaml/non-project.md#ghc-build). + +## `--ghc-variant` option + +Pass the option `--ghc-variant ` to specify the relevant GHC variant. +For further information, see the documentation for the corresponding non-project +specific configuration [option](yaml/non-project.md#ghc-variant). + +## `--help` or `-h` flags + +Pass the `--help` (or `-h`) flag to cause Stack to list its commands and flags +and options common to those commands. Alternatively, command + +~~~text +stack +~~~ + +for the same information. + +## `--[no-]hpack-force` flag + +[:octicons-tag-24: 3.1.1](https://github.com/commercialhaskell/stack/releases/tag/v3.1.1) + +Default: Disabled + +By default, Hpack 0.12.0 or later will decline to overwrite a Cabal file that +was created by a more recent version of Hpack and Hpack 0.20.0 or later will +decline to overwrite a Cabal file that has been modified manually. Pass the flag +`--hpack-force` to allow Hpack to overwrite such a Cabal file. + +## `--hpack-numeric-version` flag + +Pass the flag `--hpack-numeric-version` to cause Stack to report the numeric +version of its built-in Hpack library to the standard output stream (e.g. +`0.35.0`) and quit. + +## `--[no-]install-ghc` flag + +Default: Enabled +([:octicons-tag-24: 1.6.1](https://github.com/commercialhaskell/stack/releases/tag/v1.6.1)) + +If the specified GHC version is not available, enables/disables Stack seeking to +download and install that version if it is needed. On Windows, +`--no-install-ghc` also disables the download and installation of the +Stack-supplied MSYS2 when it is needed. For further information, see the +documentation for the corresponding non-project specific configuration +[option](yaml/non-project.md#install-ghc). + +!!! note + + The `--[no-]install-ghc` flag does not specify whether Stack checks the + availability of either a 'system' GHC executable on the PATH or a + Stack-supplied GHC executable. In that regard, see the + [`system-ghc`](yaml/non-project.md#system-ghc) option. + +## `--[no-]install-msys` flag + +[:octicons-tag-24: 3.5.1](https://github.com/commercialhaskell/stack/releases/tag/v3.5.1) + +Restrictions: Windows systems only + +Default: Same as the [`install-ghc`](yaml/non-project.md#install-ghc) setting +(including if that is set on the command line) + +If Stack is checking for the Stack-supplied MSYS2 when Stack is setting up the +environment, enables/disables the download and installation of MSYS2 when +necessary. For further information, see the documentation for the corresponding +non-project specific configuration [option](yaml/non-project.md#install-msys). + +To skip entirely checking for the Stack-supplied MSYS2, see the documentation +for the [`skip-msys`](yaml/non-project.md#skip-msys) configuration option. + +## `--jobs` or `-j` option + +Pass the option `--jobs ` to specify the number of concurrent +jobs (Stack actions during building) to run. + +When [building GHC from source](../topics/GHC_from_source.md), specifies the +`-j[]` flag of GHC's Hadrian build system. + +By default, Stack specifies a number of concurrent jobs equal to the number of +CPUs (cores) that the machine has. In some circumstances, that default can cause +some machines to run out of memory during building. If those circumstances +arise, specify `--jobs 1`. + +This configuration option is distinct from GHC's own `-j[]` flag, which +relates to parallel compilation of modules within a package. + +For further information, see the documentation for the corresponding non-project +specific configuration option: [`jobs`](yaml/non-project.md#jobs). + +## `--local-bin-path` option + +Pass the option `--local-bin-path ` to set the target directory for +[`stack build --copy-bins`](../commands/build_command.md#-no-copy-bins-flag) and +`stack install`. An absolute or relative path can be specified. A relative path +at the command line is always assumed to be relative to the current directory. + +For further information, see the documentation for the corresponding non-project +specific configuration [option](yaml/non-project.md#local-bin-path). + +## `--lock-file` option + +Default: `read-write`, if snapshot specified in the project-level configuration +file; `read-only`, if a different snapshot is specified on the command line. + +Pass the option `--lock-file ` to specify how Stack interacts with lock +files. Valid modes are: + +* `error-on-write`: Stack reports an error, rather than write a lock file; +* `ignore`: Stack ignores lock files; +* `read-only`: Stack only reads lock files; and +* `read-write`: Stack reads and writes lock files. + +## `--[no-]modify-code-page` flag + +Restrictions: Windows systems only + +Default: Enabled + +Enables/disables setting the codepage to support UTF-8. For further information, +see the documentation for the corresponding non-project specific configuration +[option](yaml/non-project.md#modify-code-page). + +## `--nix*` flags and options + +Stack can be configured to integrate with Nix. For further information, see +`stack --nix-help` or the [Nix integration](../topics/nix_integration.md) +documentation. + +## `--numeric-version` flag + +Pass the flag `--numeric-version` to cause Stack to report its numeric version +to the standard output stream (e.g. `2.9.1`) and quit. + +## `--[no-]plan-in-log` flag + +[:octicons-tag-24: 2.13.1](https://github.com/commercialhaskell/stack/releases/tag/v2.13.1) + +Default: Disabled + +Enables/disables the logging of build plan construction in debug output. +Information about the build plan construction can be lengthy. If you do not need +it, it is best omitted from the debug output. + +## `--resolver` option + +[:octicons-thumbsdown-24: 3.9.3](https://github.com/commercialhaskell/stack/releases/tag/v3.9.3) + +The `--resolver` option (deprecated) and the [`--snapshot`](#-snapshot-option) +option are synonyms. + +More than one of these options is prohibited. + +## `--[no-]rsl-in-log` flag + +[:octicons-tag-24: 2.9.1](https://github.com/commercialhaskell/stack/releases/tag/v2.9.1) + +Default: Disabled + +Enables/disables the logging of the raw snapshot layer (rsl) in debug output. +Information about the raw snapshot layer can be lengthy. If you do not need it, +it is best omitted from the debug output. + +## `--[no-]script-no-run-compile` flag + +Default: Disabled + +Enables/disables the use of options `--no-run --compile` with the +[`stack script` command](../commands/script_command.md). + +## `--silent` flag + +Equivalent to the `--verbosity silent` option. + +## `--[no-]skip-ghc-check` option + +Default: Disabled + +Enables/disables the skipping of checking the GHC version and architecture. For +further information, see the documentation for the corresponding non-project +specific configuration [option](yaml/non-project.md#skip-ghc-check). + +## `--[no-]skip-msys` option + +Restrictions: Windows systems only + +Default: Disabled + +Enables/disables the skipping of checking for the Stack-supplied MSYS2 (and +installing that MSYS2, if it is not installed) when Stack is setting up the +environment. For further information, see the documentation for the +corresponding non-project specific configuration +[option](yaml/non-project.md#skip-msys). + +To prevent installation of MSYS2, if it is not installed, see the documentation +for the [`install-msys`](yaml/non-project.md#install-msys) configuration option. + +## `--snapshot` option + +[:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) + +Pass the option `--snapshot ` to specify the snapshot. For further +information, see the [`snapshot`](yaml/project.md#snapshot) project-specific +configuration option documentation. + +## `--stack-colors` or `--stack-colours` options + +Pass the option `--stack-colors ` to specify Stack's output styles. For +further information, see the documentation for the corresponding non-project +specific configuration [option](yaml/non-project.md#stack-colors). + +## `--stack-root` option + +Overrides: `STACK_ROOT` environment variable + +Pass the option `--stack-root ` to specify the +path to the [Stack root](../topics/stack_root.md) directory. The path must be an +absolute one. + +## `--stack-yaml` or `-w` option + +Default: `stack.yaml` + +Overrides: `STACK_YAML` enviroment variable + +Pass the option `--stack-yaml ` to specify Stack's project-level YAML +configuration file. + +## `--[no-]system-ghc` flag + +Default: Disabled +([:octicons-tag-24: 1.3.0](https://github.com/commercialhaskell/stack/releases/tag/v1.3.0)) + +Enables/disables Stack seeking to use a 'system' GHC executable (that is, one on +the PATH) rather than a Stack-supplied GHC executable (or vice versa, if +disabled). For further information, see the documentation for the corresponding +non-project specific configuration [option](yaml/non-project.md#system-ghc). + +!!! note + + The `--[no-]system-ghc` flag does not specify Stack's behaviour if the + specified GHC version is not already available. In that regard, see the + [`install-ghc`](yaml/non-project.md#install-ghc) option. + +## `--[no-]terminal` flag + +Default: Stack is running in a terminal (as detected) + +Enables/disables whether Stack is running in a terminal. + +## `--terminal-width` option + +Default: the terminal width (if detected); otherwise `100` + +Pass the option `--terminal-width ` to specify the width of the terminal, +used by Stack's pretty printed messages. + +## `--[no-]time-in-logs` flag + +Default: Enabled + +Enables/disables the inclusion of time stamps against logging entries when the +verbosity level is 'debug'. + +## `--verbose` or `-v` flags + +Equivalent to the `--verbosity debug` option. + +## `--verbosity` option + +Default: `info` + +Pass the option `--verbosity ` to specify the level for logging. +Possible levels are `silent`, `error`, `warn`, `info` and `debug`, in order of +increasing amounts of information provided by logging. + +## `--version` flag + +Pass the flag `--version` to cause Stack to report its version to standard +output and quit. For versions that are release candidates, the report will list +the dependencies that Stack has been compiled with. + +## `--with-gcc` option + +Pass the option `--with-gcc ` to specify use of a GCC executable. +For further information, see the documentation for the corresponding non-project +specific configuration [option](yaml/non-project.md#with-gcc). + +## `--with-hpack` option + +Pass the option `--with-hpack ` to specify use of an Hpack executable. +For further information, see the documentation for the corresponding +non-project specific configuration [option](yaml/non-project.md#with-hpack). + +## `--work-dir` option + +Default: `.stack-work` + +Overrides: [`STACK_WORK`](environment_variables.md#stack_work) environment +variable, and [`work-dir`](yaml/non-project.md#work-dir) non-project specific +configuration option. + +Pass the option `--work-dir ` to specify the +path to Stack's work directory, within a local project or package directory. The +path must be a relative one, relative to the the root directory of the project +or package. The relative path cannot include a `..` (parent directory) +component. + +## `--zsh-completion-script` option + +Visibility: Hidden + +See the [shell auto-completion](../topics/shell_autocompletion.md) +documentation. + +## `--setup-info-yaml` command option + +Default: `https://raw.githubusercontent.com/commercialhaskell/stackage-content/master/stack/stack-setup-2.yaml` + +The `--setup-info-yaml ` command option specifies the location of a +`setup-info` dictionary. The option can be specified multiple times. + +## `--snapshot-location-base` command option + +Default: `https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master` + +The `--snapshot-location-base ` command option specifies the base location +of snapshots. For further information, see the documentation for the +corresponding non-project specific configuration +[option](yaml/non-project.md#snapshot-location-base). + +## `--help` or `-h` command flags + +If Stack is passed the `--help` (or `-h`) command flag, it will output help for +the command. diff --git a/doc/configure/index.md b/doc/configure/index.md new file mode 100644 index 0000000000..c278424650 --- /dev/null +++ b/doc/configure/index.md @@ -0,0 +1,41 @@ +--- +title: Configure +--- +
+ +# Configuration (advanced) + +Some of Stack's features will not be needed regularly or by all users. This part +of the guide and the part on Stack's [commands](../commands/index.md) provide +information about those features, organised as a reference guide. Some of the +features are complex and separate pages are dedicated to them. + +The behaviour of Stack is configurable using environment variables, YAML +configuration files, global flags and options on the command line and +customisation scripts. + +## Environment variables + +The existence or content of certain environment variables can affect how Stack +behaves. For further information, see the +[environment variables](environment_variables.md) documentation. + +## Configuration files + +Stack is configured by the content of files in the YAML formal. A global +configuration file contains non-project specific options. A project-level +configuration file contains project-specific options and may contain non-project +specific options. For further information, see the +[configuration](yaml/index.md) documentation. + +## Global flags and options + +Stack can also be configured by flags and options on the command line. Global +flags and options apply to all of Stack's commands. For further information, see +the [global flags and options](global_flags.md) documentation. + +## Customisation scripts + +Stack's behaviour can also be affected by customisation scripts. For further +information, see the [customisation scripts](customisation_scripts.md) +documentation. diff --git a/doc/configure/yaml/include.md b/doc/configure/yaml/include.md new file mode 100644 index 0000000000..2e56c60f26 --- /dev/null +++ b/doc/configure/yaml/include.md @@ -0,0 +1,117 @@ +
+ +# The `!include` directive + +Stack's configuration files are in the [YAML](https://yaml.org/) format. Stack +also supports the use of an `!include` local tag together with scalar content +that represents an absolute or relative path to another file. This provides a +directive that allows the content of one YAML file to be included in another. + +The directive can be used in both +[project-level and global](index.md#project-level-and-global-configuration-files) +configuration files. + +!!! note + + An included relative file path is relative to the directory containing the + file with the `!include` directive. + +!!! warning + + The [`stack config set`](../../commands/config_command.md#the-stack-config-set-commands) + commands cannot modify a configuration file that excludes the relevant key + and uses `!include` directives. + +## Including a value + +A value for a key can be provided by an included file. For example, given a file +`snapshot.yaml` in the project directory with the content: + +~~~yaml +lts-24.37 +~~~ + +the following project-level configuration file would use `lts-24.37` as the +snapshot: + +~~~yaml +snapshot: !include snapshot.yaml +~~~ + +The included file replaces the `!include` directive with its content, so this is +equivalent to: + +~~~yaml +snapshot: lts-24.37 +~~~ + +## Including a sequence + +The value provided by an included file is not limited to scalar content. It can +be a YAML sequence. For example, given a file `extra-deps.yaml` in the project +directory with the content: + +~~~yaml +- acme-missiles-0.3 +- text-short-0.1.6 +~~~ + +the following project-level configuration file would use those as extra-deps: + +~~~yaml +snapshot: lts-24.37 +extra-deps: !include extra-deps.yaml +~~~ + +## Merging mappings + +YAML's merge key (`<<`) is used to indicate that all of the keys of one or more +specified mappings should be inserted into the current mapping. + +YAML's merge key can be combined with an `!include` directive to merge the +content of an included file into the current mapping. For example, given a file +`shared-config.yaml` in the project directory with the content: + +~~~yaml +ghc-options: + "$everything": -Wall +flags: + my-package: + my-flag: true +~~~ + +the following project-level configuration file would merge those options: + +~~~yaml +snapshot: lts-24.37 +<<: !include shared-config.yaml +~~~ + +This is equivalent to: + +~~~yaml +snapshot: lts-24.37 +ghc-options: + "$everything": -Wall +flags: + my-package: + my-flag: true +~~~ + +The `!include` directive can also be placed on the line after the merge key: + +~~~yaml +snapshot: lts-24.37 +<<: + !include shared-config.yaml +~~~ + +## Nested includes + +Included files can themselves contain `!include` directives, allowing for nested +composition of configuration. + +!!! note + + A file cannot include itself or a file that has already included the file. + Stack detects and raises an error for cyclic includes. diff --git a/doc/configure/yaml/index.md b/doc/configure/yaml/index.md new file mode 100644 index 0000000000..0f9fae07b0 --- /dev/null +++ b/doc/configure/yaml/index.md @@ -0,0 +1,129 @@ +--- +title: Configuration files +--- +
+ +# Configuration files + +Stack is configured by the content of files in the [YAML](https://yaml.org/) +format. Stack also supports an [`!include` directive](include.md) that allows a +configuration file to include the contents of another file. + +## Project-specific and non-project specific options + +Stack's configuration options are each of one of two types: + +
+ +- :material-account:{ .lg .middle } __Project-specific__ + + Configured only at the project level + + --- + + [:octicons-arrow-right-24: Learn more](project.md) + +- :material-account-multiple:{ .lg .middle } __Non-project specific__ + + Configured globally or at the project level. + + --- + + [:octicons-arrow-right-24: Learn more](non-project.md) + +
+ +Most of Stack's configuration options are non-project specific. + +## Project-level and global configuration files + +Stack's configuration files are each of one of two types: + +
+ +- :material-language-haskell:{ .lg .middle } __Project-level__ + + Named `stack.yaml` by default. + + --- + + Contains [project-specific](project.md) options and may contain + [non-project-specific](non-project.md) options. + + Non-project-specific options in the project-level configuration file in the + `global-project` directory (see below) are ignored by Stack. + +- :octicons-globe-24:{ .lg .middle } __Global__ + + Named `config.yaml`. + + There is a user-specific file and there may be a system-wide one. If a + user-specific file does not exist, then Stack will create one. + + --- + + Contains [non-project-specific](non-project.md) options. + + An option set in the user-specific file will override a corresponding option + set in the system-wide file (if it exists). + +
+ +## Location of project-level configuration + +Stack obtains project-level configuration from one of the following (in order of +preference): + +1. A file specified by the `--stack-yaml` or `-w` command line option. +2. A file specified by the `STACK_YAML` environment variable. +3. A file named `stack.yaml` in the current directory or an ancestor directory. +4. A file name `stack.yaml` in the `global-project` directory in the + [Stack root](../../topics/stack_root.md). + +## Location of global configuration + +The default location of global configuration files depends on the operating +system and, in the case of the user-specific file, whether Stack is configured +to use the XDG Base Directory Specification. + +An absolute path to these files can be specified by the +[`STACK_CONFIG`](../environment_variables.md#stack_config) and +[`STACK_GLOBAL_CONFIG`](../environment_variables.md#stack_config) environment +variables, respectively. + +=== "Unix-like" + + The default locations are: + + * system-wide: `/etc/stack/config.yaml`; and + * user-specific: `config.yaml` in the + [Stack root](../../topics/stack_root.md). + + !!! note + + For compatibility with Stack 0.1.5.0 and earlier, if deprecated file + `/etc/stack/config` exists, then Stack will use it instead of + `/etc/stack/config.yaml`. + +=== "Windows" + + The default locations are: + + * system-wide: none; and + * user-specific: `config.yaml` in the + [Stack root](../../topics/stack_root.md). + +=== "XDG Base Directory Specification" + + On Unix-like operating systems and Windows, Stack can be configured to + follow the XDG Base Directory Specification if the environment variable + `STACK_XDG` is set to any non-empty value. However, Stack will ignore that + configuration if the [Stack root](../../topics/stack_root.md) location has + been set on the command line or the `STACK_ROOT` environment variable + exists. + + If Stack is following the XDG Base Directory Specification, the location of + `config.yaml` (for user-specific options) is `/stack`. If + the `XDG_CONFIG_HOME` environment variable does not exist, the default is + `~/.config/stack` on Unix-like operating systems and `%APPDATA%\stack` on + Windows. diff --git a/doc/configure/yaml/non-project.md b/doc/configure/yaml/non-project.md new file mode 100644 index 0000000000..0a45b26f96 --- /dev/null +++ b/doc/configure/yaml/non-project.md @@ -0,0 +1,1649 @@ +
+ +# Non-project-specific configuration + +Non-project configuration options can be included in: + +* a [project-level configuration file](../yaml/index.md#project-level-and-global-configuration-files) + (`stack.yaml`, by default), like [project-specific](project.md) ones; or + +* [global configuration files](../yaml/index.md#project-level-and-global-configuration-files) + (`config.yaml`). + +However, non-project-specific options in the project-level configuration file in +the `global-project` directory are ignored by Stack. + +The options below are listed in alphabetic order. + +## allow-different-user + +[:octicons-tag-24: 1.0.1.0](https://github.com/commercialhaskell/stack/releases/tag/v1.0.1.0) + +Restrictions: POSIX systems only. + +Default: `false` + +Command line equivalent (takes precedence): `--[no-]allow-different-user` flag + +Allow users other than the owner of the [Stack root](../../topics/stack_root.md) +to use the Stack installation. + +~~~yaml +allow-different-user: true +~~~ + +The intention of this option is to prevent file permission problems, for example +as the result of a Stack command executed under `sudo`. + +The option is automatically enabled when Stack is re-spawned in a Docker +process. + +## allow-newer + +[:octicons-tag-24: 0.1.8.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.8.0) + +Default: `false` + +Command line equivalent (takes precedence): +[`stack build --[no-]allow-newer`](../../commands/build_command.md#-no-allow-newer-flag) +flag + +Whether to ignore lower and upper version bounds in Cabal files. + +!!! info + + The name `allow-newer` was chosen to match a commonly-used Cabal option + which ignored only upper bounds. + +~~~yaml +allow-newer: true +~~~ + +## allow-newer-deps + +[:octicons-tag-24: 2.9.3](https://github.com/commercialhaskell/stack/releases/tag/v2.9.3) + +Default: `none` + +Determines a subset of packages to which `allow-newer` should apply. This option +has no effect (but warns) if `allow-newer` is `false`. + +~~~yaml +allow-newer-deps: + - foo + - bar +~~~ + +## apply-ghc-options + +[:octicons-tag-24: 0.1.6.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.6.0) + +Default: `locals` + +Related command line: +[`stack build --ghc-options`](../../commands/build_command.md#-ghc-options-option) +option + +Determines to which packages any GHC command line options specified on the +command line are applied. Possible values are: `everything` (all packages, +project packages or otherwise), `locals` (all project packages, targets or +otherwise), and `targets` (all project packages that are targets). + +!!! note + + The use of `everything` can break invariants about your snapshot database. + +!!! info + + Before Stack 0.1.6.0, the default value was `targets`. + +## apply-prog-options + +[:octicons-tag-24: 2.11.1](https://github.com/commercialhaskell/stack/releases/tag/v2.11.1) + +Default: `locals` + +Related command line: +[`stack build --PROG-option`](../../commands/build_command.md#-prog-option-options) +options + +Determines to which packages all and any `--PROG-option` command line options +specified on the command line are applied. Possible values are: `everything` +(all packages, project packages or otherwise), `locals` (all project packages, +targets or otherwise), and `targets` (all project packages that are targets). + +!!! note + + The use of `everything` can break invariants about your snapshot database. + +## arch + +Default: The machine architecture on which Stack is running. + +Command line equivalent (takes precedence): +[`--arch`](../global_flags.md#-arch-option) option + +Stack identifies different GHC executables by platform (operating system and +machine architecture), (optional) GHC variant and (optional) GHC build. +See [`setup-info`](#setup-info). + +`arch` sets the machine architecture. Values can be those recognized by Cabal +(the library) (which are case-insensitive and include `i386`, `x86_64`, and +`aarch64` / `arm64`), or other values (which are case-sensitive and treated as +an unknown 'other' architecture of the specified name). + +By default, Stack will warn the user if the specified machine architecture is an +unknown 'other' architecture. The warning can be muted; see +[`notify-if-arch-unknown`](#notify-if-arch-unknown) + +!!! note + + The machine architecture on which Stack is running is as classified by + Cabal (the library). Cabal does not distinguish between certain + architectures. Examples are `ppc64`/`powerpc64`/`powerpc64le` (classified as + `ppc64`) and `arm`/`armel`/`armeb` (classified as `arm`). + +!!! note + + As Cabal (the library) does not distinguish between machine architectures + `powerpc64` and `powerpc64le`, the latter can be specified in Stack's + configuration as an 'other' architecture, such as `arch: ppc64le`. + +## build + +[:octicons-tag-24: 1.1.0](https://github.com/commercialhaskell/stack/releases/tag/v1.1.0) + +Default: + +~~~yaml +build: + # Experimental. Since Stack UNRELEASED. Supported by GHC 9.8.1 or later with + # Cabal 3.12.0.0 (a boot package of GHC 9.10.1) or later. Ignored with a + # warning when unsupported. + semaphore: false + + library-profiling: false + executable-profiling: false + library-stripping: true + executable-stripping: true + + # NOTE: global usage of haddock can cause build failures when documentation is + # incorrectly formatted. This could also affect scripts which use Stack. + haddock: false + haddock-arguments: + + # Additional arguments passed to haddock. The corresponding command line + # option is --haddock-arguments. Example of use: + # + # haddock-args: + # - "--css=/home/user/my-css" + haddock-args: [] + + # The corresponding command line flag is --[no-]open. + open-haddocks: false + + # If Stack is configured to build Haddock documentation, defaults to true. + haddock-deps: false + + # The configuration is ignored, if haddock-for-hackage: true. + haddock-internal: false + + # The configuration is ignored, if haddock-for-hackage: true. + haddock-hyperlink-source: true + + # If specified, implies haddock-internal: false and + # haddock-hyperlink-source: true. Since Stack 2.15.1. + haddock-for-hackage: false + copy-bins: false + copy-compiler-tool: false + prefetch: false + keep-going: false + keep-tmp-files: false + + # These are inadvisable to use in your global configuration, as they make the + # Stack build command line behave quite differently. + force-dirty: false + test: false + test-arguments: + rerun-tests: true # Rerun successful tests + + # The corresponding command line option is --test-arguments. Example of use: + # + # additional-args: + # - "--fail-fast" + additional-args: [] + coverage: false + no-run-tests: false + # The option is ignored if the specified number of seconds is not positive: + test-suite-timeout: 0 + # The option is ignored if the specified number of seconds is not positive: + test-suite-timeout-grace: 0 + bench: false + benchmark-opts: + + # Example of use: + # + # benchmark-arguments: "--csv bench.csv" + benchmark-arguments: "" + no-run-benchmarks: false + reconfigure: false + cabal-verbosity: normal + cabal-verbose: false + split-objs: false + skip-components: [] # --skip + + # Since Stack 1.8. Starting with Stack 2.0, the default is true + interleaved-output: true + + # Since Stack 2.13.1. Available options are none, count-only, capped and full. + progress-bar: capped + + # Since Stack 1.10. + ddump-dir: "" +~~~ + +Command line equivalents (take precedence): Yes, see below. + +Allows setting build options which are usually specified on the command line. + +The meanings of these settings correspond directly with the command line flags +of the same name. For further information, see the +[`stack build`](../../commands/build_command.md) command documentation. + +## casa + +[:octicons-tag-24: 2.13.1](https://github.com/commercialhaskell/stack/releases/tag/v2.13.1) + +Default: + +~~~yaml +casa: + enable: true # Use a Casa server? + repo-prefix: https://casa.stackage.org # Unless casa-repo-prefix is set. + max-keys-per-request: 1280 # Maximum number of keys per request. +~~~ + +This option specifies whether or not Stack should use a Casa +(content-addressable storage archive) server to cache Cabal files and all other +files in packages; and, if so, the prefix for the URL used to pull information +from the server and the maximum number of keys per request. For further +information, see this blog post about +[Casa and Stack](https://www.fpcomplete.com/blog/casa-and-stack/). + +`repo-prefix` replaces [`casa-repo-prefix`](#casa-repo-prefix) (which is +deprecated) and has precedence if both keys are set. + +## casa-repo-prefix + +[:octicons-tag-24: 2.3.1](https://github.com/commercialhaskell/stack/releases/tag/v2.3.1) + +Deprecated in favour of [`casa`](#casa), which takes precedence if present. + +Default: `https://casa.stackage.org` + +This option specifies the prefix for the URL used to pull information from the +Casa server. + +## color + +Command line equivalent (takes precedence): `--color` option + +This option specifies when to use color in output. The option is used as +`color: `, where `` is 'always', 'never', or 'auto'. On Windows +versions before Windows 10, for terminals that do not support color codes, the +default is 'never'; color may work on terminals that support color codes. + +(The British English spelling (colour) is also accepted. In configuration files, +the American spelling is the alternative that has priority.) + +## compiler + +[:octicons-tag-24: 0.1.8.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.8.0) + +Command line equivalent (takes precedence): `--compiler` option + +Overrides the version of the compiler (and, implicitly, its boot packages) in +the snapshot. Note that the `compiler-check` flag also applies to the version +numbers. This uses the same syntax as compiler snapshots like `ghc-9.10.3`. This +can be used to override the compiler (and, implicitly, its boot packages) for a +Stackage snapshot, like this: + +~~~yaml +snapshot: lts-24.37 +compiler: ghc-9.10.2 +compiler-check: match-exact +~~~ + +:octicons-beaker-24: Experimental + +[:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) + +Stack also supports building the GHC compiler from source. For further +information, see the [building GHC from source](../../topics/GHC_from_source.md) +documentation. + +## compiler-check + +[:octicons-tag-24: 0.1.4.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.4.0) + +Default: `match-minor` + +Specifies how the compiler version in the snapshot is matched against concrete +versions. Valid values: + +* `match-minor`: make sure that the first three components match, but allow + patch-level differences. For example< 7.8.4.1 and 7.8.4.2 would both match + 7.8.4. This is useful to allow for custom patch levels of a compiler. +* `match-exact`: the entire version number must match precisely +* `newer-minor`: the third component can be increased, e.g. if your snapshot is + `ghc-7.10.1`, then 7.10.2 will also be allowed. This was the default up + through Stack 0.1.3 + +## concurrent-tests + +[:octicons-tag-24: 0.1.2.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.2.0) + +Default: `true` + +This option specifies whether test suites should be executed concurrently with +each other. The default is `true` since this is usually fine and it often means +that tests can complete earlier. However, if some test suites require exclusive +access to some resource, or require a great deal of CPU or memory resources, +then it makes sense to set this to `false`. + +~~~yaml +concurrent-tests: false +~~~ + +## configure-options + +[:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) + +Related command line (takes precedence): +[`stack build --PROG-option`](../../commands/build_command.md#-prog-option-options) +options + +`configure-options` can specify Cabal (the library) options (including +`--PROG-option` or `--PROG-options` options) for the configure step of the Cabal +build process for a named package, all project packages that are targets (using +the `$targets` key), all project packages (targets or otherwise) (using the +`$locals` key), or all packages (project packages or otherwise) (using the +`$everything` key). + +~~~yaml +configure-options: + $everything: + - --with-gcc + - /some/path + $locals: + - --happy-option=--ghc + $targets: + # Only works on platforms where GHC supports linking against shared Haskell + # libraries: + - --enable-executable-dynamic + my-package: + - --another-flag +~~~ + +On platforms where GHC supports linking against shared Haskell libraries (that +currently excludes Windows), Cabal's `--enable-executable-dynamic` flag (which +implies `--enable-shared`, unless `--disable-shared` is specified) links +dependent Haskell libraries into executables dynamically. + +## connection-count + +Default: `8` + +Integer indicating how many simultaneous downloads are allowed to happen. + +### default-init-snapshot + +[:octicons-tag-24: 3.1.1](https://github.com/commercialhaskell/stack/releases/tag/v3.1.1) + +Default: As for the [`stack init`](../../commands/init_command.md) command when +no snapshot is specified at the command line. + +Command line equivalent (takes precedence): +[`--snapshot`](../global_flags.md#-snapshot-option) or +[`--resolver`](../global_flags.md#-resolver-option) (deprecated) option + +This option specifies which snapshot to use with `stack init`, when none is +specified at the command line. + +`default-init-snapshot: global` specifies the snapshot specified by the +project-level configuration file in the `global-project` directory in the +[Stack root](../../topics/stack_root.md#global-project-directory). + +## default-template + +Default: `new-template` in the +[stack-templates](https://github.com/commercialhaskell/stack-templates/) +repository. + +This option specifies which template to use with `stack new`, when none is +specified. Other templates are listed in the +[stack-templates](https://github.com/commercialhaskell/stack-templates/) +repository. See the output of `stack templates`. + +## docker + +Command line equivalents: `--docker-*` flags and options (see +`stack --docker-help` for details). + +For further information, see the +[Docker integration](../../topics/docker_integration.md#configuration) +documentation. + +## dump-logs + +[:octicons-tag-24: 1.3.0](https://github.com/commercialhaskell/stack/releases/tag/v1.3.0) + +Default: `warning` + +Command line equivalent (takes precedence): `--[no-]dump-logs` flag + +In the case of *non-interleaved* output and *more than one* target package, +Stack sends the build output from GHC for each target package to a log file, +unless an error occurs that prevents that. For further information, see the +[`stack build --[no-]interleaved-output` flag](../../commands/build_command.md#-no-interleaved-output-flag) +documentation. + +The value of the `dump-logs` key controls what, if any, log file content is sent +('dumped') to the standard error stream of the console at the end of the build. +Possible values are: + +~~~yaml +dump-logs: none # do not dump the content of any log files +dump-logs: warning # dump the content of any log files that include GHC warnings +dump-logs: all # dump the content of all log files +~~~ + +At the command line, `--no-dump-logs` is equivalent to `dump-logs: none` and +`--dump-logs` is equivalent to `dump-logs: all`. + +If GHC reports an error during the build and a log file is created, that build +output will be included in the log file. Stack will also report errors during +building to the standard error stream. That stream can be piped to a file. For +example, for a file named `stderr.log`: + +~~~text +stack --no-dump-logs --color always build --no-interleaved-output 2> stderr.log +~~~ + +## extra-include-dirs + +Default: `[]` + +Command line equivalent: `--extra-include-dirs` option (repeat for each +directory) + +A list of extra paths to be searched for header files. Paths should be absolute + +~~~yaml +extra-include-dirs: +- /opt/foo/include +~~~ + +Since these are system-dependent absolute paths, it is recommended that you +specify these in your `config.yaml` file. If you control the build environment +in your project's ``stack.yaml``, perhaps through docker or other means, then it +may well make sense to include these there as well. + +## extra-lib-dirs + +Default: `[]` + +Command line equivalent: `--extra-lib-dirs` option (repeat for each directory) + +A list of extra paths to be searched for libraries. Paths should be absolute + +~~~yaml +extra-lib-dirs: +- /opt/foo/lib +~~~ + +Since these are system-dependent absolute paths, it is recommended that you +specify these in your `config.yaml` file. If you control the build environment +in your project's ``stack.yaml``, perhaps through Docker or other means, then it +may well make sense to include these there as well. + +## extra-path + +[:octicons-tag-24: 0.1.4.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.4.0) + +This option specifies additional directories to prepend to the PATH. These will +be used when resolving the location of executables, and will also be visible in +the PATH of processes run by Stack. + +For example, to prepend `/path-to-some-dep/bin` to your PATH: + +~~~yaml +extra-path: +- /path-to-some-dep/bin +~~~ + +Other paths added by Stack - things like the project's binary directory and the +compiler's binary directory - will take precedence over those specified here +(the automatic paths get prepended). + +## file-watch-hook + +[:octicons-tag-24: 3.1.1](https://github.com/commercialhaskell/stack/releases/tag/v3.1.1) + +Specifies the location of an executable or `sh` shell script to be run after +each attempted build with +[`build --file-watch`](../../commands/build_command.md#-file-watch-flag). An +absolute or relative path can be specified. Changes to the configuration after +the initial `build --file-watch` command are ignored. + +If the project-level configuration is provided in the `global-project` directory +in the [Stack root](../../topics/stack_root.md), a relative path is assumed to +be relative to the current directory. Otherwise, it is assumed to be relative to +the directory of the project-level configuration file. + +## ghc-build + +[:octicons-tag-24: 1.3.0](https://github.com/commercialhaskell/stack/releases/tag/v1.3.0) + +Default: `standard` + +Command line equivalent (takes precedence): `--ghc-build` option + +Stack identifies different GHC executables by platform (operating system and +machine architecture), (optional) GHC variant and (optional) GHC build. +See [`setup-info`](#setup-info). + +`ghc-build` specifies a specialized architecture for the GHC executable. +Normally this is determined automatically, but it can be overridden. Possible +arguments include `standard`, `gmp4`, `nopie`, `tinfo6`, `tinfo6-libc6-pre232`, +`tinfo6-nopie`, `ncurses6`, `int-native` and `integersimple`. + +## ghc-options + +[:octicons-tag-24: 0.1.4.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.4.0) + +Default: `{}` + +Related command line (takes precedence): +[`stack build --ghc-options`](../../commands/build_command.md#-ghc-options-option) +option + +GHC command line options can be specified for a package in its Cabal file +(including one created from a `package.yaml` file). This option augments and, if +applicable (see below), overrides any such GHC command line options. + +`ghc-options` can specify GHC command line options for a named package, all +project packages that are targets (using the `$targets` key), all project +packages (targets or otherwise) (using the `$locals` key), or all packages +(project packages or otherwise) (using the `$everything` key). + +~~~yaml +ghc-options: + "$everything": -O2 + "$locals": -Wall + "$targets": -Werror + some-package: -DSOME_CPP_FLAG +~~~ + +GHC's command line options are _order-dependent_ and evaluated from left to +right. Later options can override the effect of earlier ones. Stack applies +options (as applicable) in the order of `$everything`, `$locals`, `$targets`, +and then those for the named package. Any GHC command line options for a package +specified at Stack's command line are applied after those specified in Stack's +configuration files. + +Since Stack 1.6.1, setting a GHC options for a specific package will +automatically promote it to a project package (much like setting a custom +package flag). However, setting options via `$everything` on all flags will not +do so (see +[GitHub discussion](https://github.com/commercialhaskell/stack/issues/849#issuecomment-320892095) +for reasoning). This can lead to unpredictable behavior by affecting your +snapshot packages. + +!!! info + + Before Stack 1.6.1, the key `*` (then deprecated) had the same function as + the key `$everything`. + +## ghc-variant + +[:octicons-tag-24: 0.1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.5.0) + +Default: `standard` + +Command line equivalent (takes precedence): `--ghc-variant` option + +Stack identifies different GHC executables by platform (operating system and +machine architecture), (optional) GHC variant and (optional) GHC build. +See [`setup-info`](#setup-info). + +`ghc-variant` specifies a variant of the GHC executable. Known values are: + +* `standard`: Use the standard GHC binary distribution +* `int-native`: From GHC 9.4.1, use a GHC bindist that uses the Haskell-native + big-integer + [backend](https://downloads.haskell.org/~ghc/9.0.2/docs/html/users_guide/9.0.1-notes.html#highlights). + For further information, see this [article](https://iohk.io/en/blog/posts/2020/07/28/improving-haskells-big-numbers-support/). +* `integersimple`: Use a GHC bindist that uses + [integer-simple instead of GMP](https://ghc.haskell.org/trac/ghc/wiki/ReplacingGMPNotes) +* any other value: Use a custom GHC bindist. You should specify + [setup-info](#setup-info) or [setup-info-locations](#setup-info-locations) + so `stack setup` knows where to download it, + or pass the `stack setup --ghc-bindist` argument on the command-line + +This option is incompatible with [`system-ghc: true`](#system-ghc). + +## global-hints-location + +[:octicons-tag-24: 3.1.1](https://github.com/commercialhaskell/stack/releases/tag/v3.1.1) + +Default: + +~~~yaml +global-hints-location: + url: https://raw.githubusercontent.com/commercialhaskell/stackage-content/master/stack/global-hints.yaml +~~~ + +(as set in the `pantry` library) + +Sets the location of the global hints YAML file. The location can be either a +filepath of a local file or a URL. The filepath can be absolute or relative to +the Stack root. + +For example: + +~~~yaml +global-hints-location: + filepath: \pantry\global-hints.yaml +~~~ + +or: + +~~~yaml +global-hints-location: + url: https://example.com/global-hints/location/global-hints.yaml +~~~ + +## hackage-base-url + +[:octicons-tag-24: 1.9.1](https://github.com/commercialhaskell/stack/releases/tag/v1.9.1) + +Default: `https://hackage.haskell.org/` + +Sets the address of the Hackage server to upload the package to. + +~~~yaml +hackage-base-url: https://hackage.example.com/ +~~~ + +## hide-source-paths + +Default: `true` +([:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1)) + +Whether to use the `-fhide-source-paths` option by default for GHC >= 8.2: + +~~~yaml +hide-source-paths: false +~~~ + +Build output when enabled: + +~~~text +... +[1 of 2] Compiling Lib +[2 of 2] Compiling Paths_test_pr +... +~~~ + +Build output when disabled: + +~~~text +... +[1 of 2] Compiling Lib ( src/Lib.hs, .stack-work/dist/x86_64-linux-tinfo6/Cabal-2.4.0.1/build/Lib.o ) +... +~~~ + +## hide-th-loading + +Default: `true` + +Strip out the "Loading ..." lines from GHC build output, produced when using +Template Haskell. + +## hpack-force + +[:octicons-tag-24: 3.1.1](https://github.com/commercialhaskell/stack/releases/tag/v3.1.1) + +Default: `false` + +Command line equivalent (takes precedence): +[`--hpack-force` option](../global_flags.md#-no-hpack-force-flag) + +Whether or not to allow Hpack to overwrite a Cabal file that was created by a +more recent version of Hpack or has been modified manually. By default, +Hpack 0.12.0 or later will decline to overwrite a Cabal file that was created by +a more recent version of Hpack and Hpack 0.20.0 or later will decline to +overwrite a Cabal file that has been modified manually. + +## ignore-revision-mismatch + +(Removed 1.11) + +This flag was introduced in Stack 1.6, and removed on the move to Pantry. You +will receive a warning if this configuration value is set. + +## install-ghc + +Default: `true` +([:octicons-tag-24: 1.6.1](https://github.com/commercialhaskell/stack/releases/tag/v1.6.1)) + +Command line equivalent (takes precedence): +[`--[no-]install-ghc`](../global_flags.md#-no-install-ghc-flag) flag + +If the specified GHC version is not available, (if `true`) should Stack seek to +download and install that version when it is needed or (if `false`) fail +(reporting that the specified compiler is not available)? + +~~~yaml +# Even if needed, do not seek to download and install a Stack-supplied GHC: +install-ghc: false +~~~ + +On Windows, `install-ghc: false` also disables the download and installation of +the Stack-supplied MSYS2 when it is needed. + +!!! note + + The `install-ghc` option does not specify whether Stack checks the + availability of either a 'system' GHC executable on the PATH or a + Stack-supplied GHC executable. In that regard, see the + [`system-ghc`](#system-ghc) option. + +## install-msys + +[:octicons-tag-24: 3.5.1](https://github.com/commercialhaskell/stack/releases/tag/v3.5.1) + +Restrictions: Windows systems only + +Default: same as the [`install-ghc`](#install-ghc) setting (including if that is +set on the command line) + +Command line equivalent (takes precedence): +[`--[no-]install-msys`](../global_flags.md#-no-install-msys-flag) flag + +If Stack is checking for the Stack-supplied MSYS2 when Stack is setting up the +environment, whether or not to download and install MSYS2 when it is needed. + +To skip entirely checking for the Stack-supplied MSYS2, see the documentation +for the [`skip-msys`](#skip-msys) configuration option. + +## jobs + +Default: the number of CPUs (cores) that the machine has. + +Command line equivalent (takes precedence): +[`-j`, `--jobs` option](../global_flags.md#-jobs-or-j-option) + +Specifies the number of concurrent jobs (principally, Stack actions during +building - see further below) to run. + +When [building GHC from source](../../topics/GHC_from_source.md), specifies the +`-j[]` flag of GHC's Hadrian build system. + +In some circumstances, the default can cause some machines to run out of memory +during building. If those circumstances arise, specify `jobs: 1`. + +This configuration option is distinct from GHC's own `-j[]` flag, which +relates to parallel compilation of modules within a package. + +## local-bin-path + +Default (on Unix-like operating systems): `~/.local/bin` + +Default (on Windows): `%APPDATA%\local\bin` + +Command line equivalent (takes precedence): `--local-bin-path` option + +Specifies the target directory for +[`stack build --copy-bins`](../../commands/build_command.md#-no-copy-bins-flag) +and `stack install`. An absolute or relative path can be specified. + +If the project-level configuration is provided in the `global-project` directory +in the [Stack root](../../topics/stack_root.md), a relative path is assumed to +be relative to the current directory. Otherwise, it is assumed to be relative to +the directory of the project-level configuration file. + +## local-programs-path + +[:octicons-tag-24: 1.3.0](https://github.com/commercialhaskell/stack/releases/tag/v1.3.0) + +This overrides the location of the Stack 'programs' directory, where tools like +GHC get installed. The path must be an absolute one. + +Stack's defaults differ between Unix-like operating systems and Windows. + +=== "Unix-like" + + Default: `programs` directory in the + [Stack root](../../topics/stack_root.md). + +=== "Windows" + + Default: `$Env:LOCALAPPDATA\Programs\stack`, if the `LOCALAPPDATA` + environment variable exists. Otherwise, the `programs` directory in the + [Stack root](../../topics/stack_root.md). + + The MSYS2 tool is also installed in the Stack 'programs' directory. + + !!! warning "Space character in the path to Stack's 'programs' directory" + + If there is a space character in the path to Stack's 'programs' + directory this may cause problems: + + * with building packages that make use of the GNU project's `autoconf` + package and `configure` shell script files. That may be the case + particularly if there is no corresponding short name ('8 dot 3' + name) for the directory in the path with the space (which may be the + case if '8 dot 3' names have been stripped or their creation not + enabled by default). Examples of packages on Hackage that make use + of `configure` are `network` and `process`; and + + * building with GHC 9.4.1 and later. These versions of GHC have a bug + which means they do not work if the path to the `ghc` executable has + a space in it. + + The default location for Stack's 'programs' directory will have a space + in the path if the value of the `USERNAME` environment variable includes + a space. + + If there are problems building, it will be necessary to specify an + alternative path that does not contain spaces. For example, the + `programs` directory in the [Stack root](../../topics/stack_root.md) + (assuming that path is space-free). For example, if the relevant + directory is `C:\sr\programs`, add: + ~~~yaml + local-programs-path: C:\sr\programs + ~~~ + + to Stack's [global configuration](https://docs.haskellstack.org/en/stable/configure/yaml/#location-of-global-configuration) + file (`config.yaml`). + + If that global configuration file does not yet exist, command: + ~~~text + stack --no-install-ghc setup + ~~~ + + to cause Stack to create it (without also installing GHC). + +=== "Windows (Command Prompt)" + + Default: `%LOCALAPPDATA%\Programs\stack`, if the `LOCALAPPDATA` + environment variable exists. Otherwise, the `programs` directory in the + [Stack root](../../topics/stack_root.md). + + The MSYS2 tool is also installed in the Stack 'programs' directory. + + !!! warning "Space character in the path to Stack's 'programs' directory" + + If there is a space character in the path to Stack's 'programs' + directory this may cause problems: + + * with building packages that make use of the GNU project's `autoconf` + package and `configure` shell script files. That may be the case + particularly if there is no corresponding short name ('8 dot 3' + name) for the directory in the path with the space (which may be the + case if '8 dot 3' names have been stripped or their creation not + enabled by default). Examples of packages on Hackage that make use + of `configure` are `network` and `process`; and + + * building with GHC 9.4.1 and later. These versions of GHC have a bug + which means they do not work if the path to the `ghc` executable has + a space in it. + + The default location for Stack's 'programs' directory will have a space + in the path if the value of the `USERNAME` environment variable includes + a space. + + If there are problems building, it will be necessary to specify an + alternative path that does not contain spaces. For example, the + `programs` directory in the [Stack root](../../topics/stack_root.md) + (assuming that path is space-free). For example, if the relevant + directory is `C:\sr\programs`, add: + ~~~yaml + local-programs-path: C:\sr\programs + ~~~ + + to Stack's [global configuration](https://docs.haskellstack.org/en/stable/configure/yaml/#location-of-global-configuration) + file (`config.yaml`). + + If that global configuration file does not yet exist, command: + ~~~text + stack --no-install-ghc setup + ~~~ + + to cause Stack to create it (without also installing GHC). + +## modify-code-page + +[:octicons-tag-24: 0.1.6.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.6.0) + +Restrictions: Windows systems only. + +Default: `true` + +Command line equivalent (takes precedence): `--[no-]modify-code-page` flag + +Whether to modify the code page for UTF-8 output. + +~~~yaml +modify-code-page: false +~~~ + +## msys-environment + +[:octicons-tag-24: 3.1.1](https://github.com/commercialhaskell/stack/releases/tag/v3.1.1) + +Restrictions: Windows systems only. + +Default: `MINGW64` (64-bit Windows) or `MINGW32` (32-bit Windows) + +The name of the MSYS2 environment (case-sensitive) used in the Stack +environment. Valid environments are `CLANG32`, `CLANG64`, `CLANGARM64`, +`MINGW32`, `MINGW64`, and `UCRT64`. + +## nix + +[:octicons-tag-24: 0.1.10.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.10.0) + +Default: + +~~~yaml +nix: + enable: false # Except on NixOS, where `enable: true` + pure: true + packages: [] + shell-file: + nix-shell-options: [] + path: [] + add-gc-roots: false +~~~ + +Command line equivalents: `--nix-*` flags and options (see `stack --nix-help` +for details). + +For further information, see the +[Nix integration](../../topics/nix_integration.md#non-project-specific-configuration) documentation. + +## notify-if-arch-unknown + +[:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) + +Default: `true` + +If the specified machine architecture value is unknown to Cabal (the library), +should Stack notify the user of that? + +## notify-if-base-not-boot + +[:octicons-tag-24: 3.9.1](https://github.com/commercialhaskell/stack/releases/tag/v3.9.1) + +Default: `true` + +From GHC 9.12.1, `base` is not a GHC wired-in package. When using such GHC +versions, if the specified `base` package is other than the GHC boot package, +should Stack notify the user of that? + +When using GHC versions where `base` is a GHC wired-in package, Stack will +always notify the user if the specified `base` package is other than the GHC +boot package. + +## notify-if-cabal-untested + +[:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) + +Default: `true` + +If Stack has not been tested with the version of Cabal (the library) that has +been found, should Stack notify the user of that? + +## notify-if-ghc-untested + +[:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) + +Default: `true` + +If Stack has not been tested with the version of GHC that is being used, should +Stack notify the user of that? + +## notify-if-nix-on-path + +[:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) + +Default: `true` + +If Stack's integration with the Nix package manager is not enabled, should Stack +notify the user if a `nix` executable is on the PATH? + +## notify-if-no-run-benchmarks + +[:octicons-tag-24: 3.3.1](https://github.com/commercialhaskell/stack/releases/tag/v3.3.1) + +Default: `true` + +Should Stack notify the user if the automatic running of a benchmark is +prevented by the `--no-run-benchmarks` flag? + +## notify-if-no-run-tests + +[:octicons-tag-24: 3.3.1](https://github.com/commercialhaskell/stack/releases/tag/v3.3.1) + +Default: `true` + +Should Stack notify the user if the automatic running of a test suite is +prevented by the `--no-run-tests` flag? + +## package-index + +[:octicons-tag-24: 2.9.3](https://github.com/commercialhaskell/stack/releases/tag/v2.9.3) + +Default: + +~~~yaml +package-index: + download-prefix: https://hackage.haskell.org/ + hackage-security: + keyids: + - 0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d + - 1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42 + - 51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921 + - c7de58fc6a224b92b5b513f26fbb8b370f2d97c7cfe0075a951314a55734be93 + - fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0 + key-threshold: 3 + ignore-expiry: true +~~~ + +Specify the package index. The index must use the +[Hackage Security](https://hackage.haskell.org/package/hackage-security) format. +This setting is most useful for providing a mirror of the official Hackage +server for + +* bypassing a firewall; or +* faster downloads. + +If the setting specifies an index that does not mirror Hackage, it is likely +that will result in significant breakage, including most snapshots failing to +work. + +In the case of Hackage, the keys of its root key holders are contained in the +`haskell-infra/hackage-root-keys` +[repository](https://github.com/haskell-infra/hackage-root-keys). The Hackage +package index is signed. A signature is valid when three key holders have +signed. The Hackage timestamp is also signed. A signature is valid when one key +holder has signed. + +If the `hackage-security` key is absent, the Hackage Security configuration will +default to that applicable to the official Hackage server since 2025-07-24. + +`key-threshold` specifies the minimum number of keyholders that must have signed +the package index for it to be considered valid. + +`ignore-expiry` specifies whether or not the expiration of timestamps should be +ignored. + +!!! info + + In the case of Stack 3.7.1 and before, if the `hackage-security` key is + absent, the Hackage Security configuration will default to that applicable + to the official Hackage server before 2025-07-24. + + Before Stack 2.1.3, the default for `ignore-expiry` was `false`. For more + information, see + [issue #4928](https://github.com/commercialhaskell/stack/issues/4928). + +## pvp-bounds + +[:octicons-tag-24: 0.1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.5.0) + +Default: `none` + +Command line equivalent (takes precedence): +[`stack sdist --pvp-bounds`](../../commands/sdist_command.md) option or +[`stack upload --pvp-bounds`](../../commands/upload_command.md) option + +!!! warning + + As of Stack 1.6.0, this feature does not reliably work, due to issues with + the Cabal library's printer. Stack will generate a warning when a lossy + conversion occurs, in which case you may need to disable this setting. For + further information, see issue + [#3550](https://github.com/commercialhaskell/stack/issues/3550). + +When using the `sdist` and `upload` commands, this setting determines whether +the Cabal file's dependencies should be modified to reflect PVP lower and upper +bounds. + +### Basic use + +Values are `none` (unchanged), `upper` (add upper bounds), `lower` (add +lower bounds), and both (and upper and lower bounds). The algorithm Stack +follows is: + +* If an upper or lower bound (other than `>= 0` - 'any version') already exists + on a dependency, it is left alone +* When adding a lower bound, Stack looks at the current version specified by + `stack.yaml`, and sets it as the lower bound (e.g., `foo >= 1.2.3`) +* When adding an upper bound, Stack sets it as less than the next major version + (e.g., `foo < 1.3`) + +~~~yaml +pvp-bounds: none +~~~ + +For further information, see the announcement +[blog post](https://www.fpcomplete.com/blog/2015/09/stack-pvp). + +### Use with Cabal file revisions + +[:octicons-tag-24: 1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v1.5.0) + +Each of the values listed above supports adding `-revision` to the end of the +value, e.g. `pvp-bounds: both-revision`. This means that, when uploading to +Hackage, Stack will first upload your tarball with an unmodified Cabal file, and +then upload a Cabal file revision with the PVP bounds added. + +This can be useful - especially combined with the +[Stackage no-revisions feature](http://www.snoyman.com/blog/2017/04/stackages-no-revisions-field) - +as a method to ensure PVP compliance without having to proactively fix bounds +issues for Stackage maintenance. + +## recommend-stack-upgrade + +[:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) + +Default: `true` + +When Stack notices that a new version of Stack is available, should it notify +the user? + +## rebuild-ghc-options + +[:octicons-tag-24: 0.1.6.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.6.0) + +Default: `false` + +Should Stack rebuild a package when its GHC options change? + +The default value reflects that, in most cases, GHC options are used to affect +optimization levels and warning behavior, for which GHC does not recompile the +modules. + +!!! info + + Before Stack 0.1.6.0, Stack rebuilt a package when its GHC options changed. + +## require-stack-version + +Default: `"-any"` + +Require a version of Stack within the specified range +([cabal-style](https://www.haskell.org/cabal/users-guide/developing-packages.html#build-information)) +to be used for this project. Example: `require-stack-version: "== 0.1.*"` + +## save-hackage-creds + +[:octicons-tag-24: 1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v1.5.0) + +Default: `true` + +Command line equivalent (takes precedence): +[`stack upload --[no]-save-hackage-creds`](../../commands/upload_command.md) +option + +Controls whether, when using `stack upload`, the user's Hackage username and +password are stored in a local file. + +~~~yaml +save-hackage-creds: true +~~~ + +## setup-info + +[:octicons-tag-24: 0.1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.5.0) + +The `setup-info` dictionary specifies download locations for tools to be +installed during set-up, such as GHC or, on Windows, 7z and MSYS2. The +dictionary maps `('Tool', 'Platform', 'Version')` to the location where it can +be obtained. For example, mapping `(GHC, 64-bit Windows, 9.2.3)` to the URL +hosting the archive file for GHC's installation. + +Possible usages of this configuration option are: + +1. Using Stack offline or behind a firewall. +2. Extending the tools known to Stack, such as cutting-edge versions of GHC or + builds for custom Linux distributions (for use with the + [ghc-variant](#ghc-variant) option). + +By default, Stack obtains the dictionary from +[stack-setup-2.yaml](https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml). + +The `setup-info` dictionary is constructed in the following order: + +1. `setup-info` in the configuration - inline configuration +2. `--setup-info-yaml` command line arguments - URLs or paths. Multiple + locations may be specified. +3. `setup-info-locations` in the configuration - URLs or paths. See further + below. + +The format of this key is the same as in the default +[stack-setup-2.yaml](https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml). +For example, GHC 9.2.3 of custom variant `myvariant` (see further below) on +64-bit Windows: + +~~~yaml +setup-info: + ghc: + windows64-custom-myvariant: + 9.2.3: + url: "https://example.com/ghc-9.2.3-x86_64-unknown-mingw32-myvariant.tar.xz" +~~~ + +'Platforms' are pairs of an operating system and a machine architecture (for +example, 32-bit i386 or 64-bit x86-64) (represented by the +`Cabal.Distribution.Systems.Platform` type). Stack currently (version 2.15.1) +supports the following pairs in the format of the `setup-info` key: + +|Operating system|I386 arch|X86_64 arch|Other machine architectures | +|----------------|---------|-----------|------------------------------------------------------------| +|Linux |linux32 |linux64 |AArch64: linux-aarch64, Arm: linux-armv7, Sparc: linux-sparc| +|OSX |macosx |macosx | | +|Windows |windows32|windows64 | | +|FreeBSD |freebsd32|freebsd64 |AArch64: freebsd-aarch64 | +|OpenBSD |openbsd32|openbsd64 | | + +For GHC, the distinguishing 'Version' in the key format includes a 'tag' for +any (optional) GHC variant (see [ghc-variant](#ghc-variant)) and a further 'tag' +for any (optional) specialised GHC build (see [ghc-build](#ghc-build)). + +The optional variant 'tag' is either `-integersimple` or +`-custom-`. + +For example, for GHC 9.0.2 of specialised GHC build `tinfo6` on x86_64 Linux: +~~~yaml +setup-info: + ghc: + linux64-tinfo6: + 9.0.2: + url: "http://downloads.haskell.org/~ghc/9.0.2/ghc-9.0.2a-x86_64-fedora27-linux.tar.xz" + content-length: 237286244 + sha1: affc2aaa3e6a1c446698a884f56a0a13e57f00b4 + sha256: b2670e9f278e10355b0475c2cc3b8842490f1bca3c70c306f104aa60caff37b0 +~~~ + +On Windows, the required 7z executable and DLL tools are represented in the +format of the `setup-info` key simply by `sevenzexe-info` and `sevenzdll-info`. + +This configuration **adds** the specified setup information metadata to the +default. Specifying this configuration **does not** prevent the default +[stack-setup-2.yaml](https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml) +from being consulted as a fallback. If, however, you need to **replace** the +default `setup-info` dictionary, use the following: + +~~~yaml +setup-info-locations: [] +~~~ + +## setup-info-locations + +[:octicons-tag-24: 2.3.1](https://github.com/commercialhaskell/stack/releases/tag/v2.3.1) + +Command line equivalent (takes precedence): `--setup-info-yaml` option + +By way of introduction, see the [`setup-info`](#setup-info) option. This option +specifies the location(s) of `setup-info` dictionaries. + +The first location which provides a dictionary that specifies the location of a +tool - `('Tool', 'Platform', 'Version')` - takes precedence. For example, you +can extend the default tools, with a fallback to the default `setup-info` +location, as follows: + +~~~yaml +setup-info-locations: +- C:/stack-offline/my-stack-setup.yaml +- relative/inside/my/project/setup-info.yaml +- \\smbShare\stack\my-stack-setup.yaml +- http://stack-mirror.com/stack-setup.yaml +# Fallback to the default location +- https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml +~~~ + +Stack only refers to the default `setup-info` location if no locations are +specified in the `setup-info-locations` configuration or on the command line +using the `--setup-info-yaml` option. + +For example, both of the following will cause `stack setup` not to consult the +default `setup-info` location: + +~~~yaml +setup-info-locations: +- C:/stack-offline/my-stack-setup.yaml +~~~ + +and + +~~~yaml +setup-info-locations: [] +~~~ + +Relative paths are resolved relative to the `stack.yaml` file (either the one in +the local project or the global `stack.yaml`). + +Relative paths may also be used for the installation paths to tools (such as GHC +or 7z). This allows vendoring the tools inside a monorepo (a single repository +storing many projects). For example: + +Directory structure: + +~~~text +- src/ +- installs/ + - my-stack-setup.yaml + - 7z.exe + - 7z.dll + - ghc-9.2.3.tar.xz +- stack.yaml +~~~ + +In the project's `stack.yaml`: + +~~~yaml +setup-info-locations: +- installs/my-stack-setup.yaml +~~~ + +In `installs/my-stack-setup.yaml`: + +~~~yaml +sevenzexe-info: + url: "installs/7z.exe" + +sevenzdll-info: + url: "installs/7z.dll" + +ghc: + windows64: + 9.2.3: + url: "installs/ghc-9.2.3.tar.xz" +~~~ + +## skip-ghc-check + +Default: `false` + +Command line equivalent (takes precedence): `--[no-]skip-ghc-check` flag + +Should we skip the check to confirm that your system GHC version (on the PATH) +matches what your project expects? + +## skip-msys + +[:octicons-tag-24: 0.1.2.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.2.0) + +Restrictions: Windows systems only + +Default: `false` + +Command line equivalent (takes precedence): +[`--[no-]skip-msys`](../global_flags.md#-no-skip-msys-option) flag + +Skips checking for the Stack-supplied MSYS2 (and installing that MSYS2, if it is +not installed) when Stack is setting up the environment. + +To prevent installation of MSYS2, if it is not installed, see the documentation +for the [`install-msys`](#install-msys) configuration option. + +~~~yaml +skip-msys: true +~~~ + +!!! note + + Usually, the use of this option does not make sense in project-level + configuration and it is used only in global configuration. + +## snapshot-location-base + +[:octicons-tag-24: 2.5.1](https://github.com/commercialhaskell/stack/releases/tag/v2.5.1) + +Default: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/ +(as set in the `pantry` library) + +Command line equivalent (takes precedence): +[`--snapshot-location-base`](../global_flags.md#-snapshot-location-base-command-option) +option + +Sets the base location of the LTS Haskell or Stackage Nightly snapshots. + +For example: + +~~~yaml +snapshot-location-base: https://example.com/snapshots/location/ +~~~ + +has the following effect: + +* `lts-X.Y` expands to `https://example.com/snapshots/location/lts/X/Y.yaml` +* `nightly-YYYY-MM-DD` expands to + `https://example.com/snapshots/location/nightly/YYYY/M/D.yaml` + +This key is convenient in setups that restrict access to GitHub, for instance +closed corporate setups. In this setting, it is common for the development +environment to have general access to the internet, but not for testing/building +environments. To avoid the firewall, one can run a local snapshots mirror and +then use a custom `snapshot-location-base` in the closed environments only. + +## stack-colors + +Command line equivalent (takes precedence): `--stack-colors` option + +Stack uses styles to format some of its output. The default styles do not work +well with every terminal theme. This option specifies Stack's output styles, +allowing new styles to replace the defaults. The option is used as +`stack-colors: `, where `` is a colon-delimited sequence of +key=value, 'key' is a style name and 'value' is a semicolon-delimited list of +'ANSI' SGR (Select Graphic Rendition) control codes (in decimal). Use the +command `stack ls stack-colors --basic` to see the current sequence. + +The 'ANSI' standards refer to (1) standard ECMA-48 'Control Functions for Coded +Character Sets' (5th edition, 1991); (2) extensions in ITU-T Recommendation +(previously CCITT Recommendation) T.416 (03/93) 'Information Technology – Open +Document Architecture (ODA) and Interchange Format: Character Content +Architectures' (also published as ISO/IEC International Standard 8613-6); and +(3) further extensions used by 'XTerm', a terminal emulator for the X Window +System. The 'ANSI' SGR codes are described in a +[Wikipedia article](http://en.wikipedia.org/wiki/ANSI_escape_code) +and those codes supported on current versions of Windows in +[Microsoft's documentation](https://docs.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences). + +For example, users of the popular +[Solarized Dark](https://ethanschoonover.com/solarized/) +terminal theme might wish to set the styles as follows: + +~~~yaml +stack-colors: error=31:good=32:shell=35:dir=34:recommendation=32:target=95:module=35:package-component=95:secondary=92:highlight=32 +~~~ +In respect of styles used in verbose output, some of that output occurs before +the configuration file is processed. + +(The British English spelling (colour) is also accepted. In configuration files, +the American spelling is the alternative that has priority.) + +## stack-developer-mode + +[:octicons-tag-24: 2.3.3](https://github.com/commercialhaskell/stack/releases/tag/v2.3.3) + +Default (official distributed binaries): `false` + +Default (built from source): `true` + +Turns on a mode where some messages are printed at WARN level instead of DEBUG +level, especially useful for developers of Stack itself. + +~~~yaml +stack-developer-mode: false +~~~ + +## system-ghc + +Default: `false` +([:octicons-tag-24: 1.3.0](https://github.com/commercialhaskell/stack/releases/tag/v1.3.0)), +unless the [Docker](../../topics/docker_integration.md) or +[Nix](../../topics/nix_integration.md) integration is enabled. + +Command line equivalent (takes precedence): `--[no-]system-ghc` flag + +Should Stack seek to use (if `true`) a 'system' GHC executable (that is, one on +the PATH) or (if `false`) a Stack-supplied GHC executable? + +~~~yaml +# Seek to use a 'system' GHC on the PATH rather than a Stack-supplied GHC: +system-ghc: true +~~~ + +Stack's [Nix integration](../../topics/nix_integration.md), when enabled, is +incompatible with `system-ghc: false`. + +!!! note + + The `system-ghc` option does not specify Stack's behaviour if the specified + GHC version is not already available. In that regard, see the + [`install-ghc`](#install-ghc) option. + +## templates + +Command line equivalent (takes precedence): `stack new --param :` +(or `-p`) option + +Templates used with `stack new` have a number of parameters that affect the +generated code. These can be set for all new projects you create. The result of +them can be observed in the generated LICENSE and Cabal files. The value for all +of these parameters must be strings. + +The parameters are: `author-email`, `author-name`, `category`, `copyright`, +`year` and `github-username`. + +* _author-email_ - sets the `maintainer` property in Cabal +* _author-name_ - sets the `author` property in Cabal and the name used in + LICENSE +* _category_ - sets the `category` property in Cabal. This is used in Hackage. + For examples of categories see + [Packages by category](https://hackage.haskell.org/packages/). It makes sense + for `category` to be set on a per project basis because it is uncommon for all + projects a user creates to belong to the same category. The category can be + set per project by passing `-p "category:value"` to the `stack new` command. +* _copyright_ - sets the `copyright` property in Cabal. It is typically the + name of the holder of the copyright on the package and the year(s) from which + copyright is claimed. For example: `Copyright (c) 2023-2025 Joe Bloggs` +* _year_ - if `copyright` is not specified, `year` and `author-name` are used + to generate the copyright property in Cabal. If `year` is not specified, it + defaults to the current year. +* _github-username_ - used to generate `homepage` and `source-repository` in + Cabal. For instance `github-username: myusername` and + `stack new my-project new-template` would result: + +~~~yaml +homepage: http://github.com/myusername/my-project#readme + +source-repository head + type: git + location: https://github.com/myusername/my-project +~~~ + +These properties can be set in `config.yaml` as follows: +~~~yaml +templates: + params: + author-name: Your Name + author-email: youremail@example.com + category: Your Projects Category + copyright: 'Copyright (c) 2025 Your Name' + github-username: yourusername +~~~ + +Additionally, `stack new` can automatically initialize source control +repositories in the directories it creates. Source control tools can be +specified with the `scm-init` option. At the moment, only `git` is supported. + +~~~yaml +templates: + scm-init: git +~~~ + +## urls + +Default: + +~~~yaml +urls: + latest-snapshot: https://stackage-haddock.haskell.org/snapshots.json + recent-snapshots: https://www.stackage.org/api/v1/snapshots +~~~ + +Customize the URLs where Stack looks for information about available snapshots, +either +([:octicons-tag-24: 1.1.0](https://github.com/commercialhaskell/stack/releases/tag/v1.1.0)) +the latest LTS and Nightly snapshots (`latest-snapshots`) or +(:octicons-tag-24: UNRELEASED) recently-published snapshots (`recent-snapshots`). + +!!! note + + The default for `latest-snapshots` for Stack 1.3.0 to 2.15.3 was + https://s3.amazonaws.com/haddock.stackage.org/snapshots.json. Following the + handover of the Stackage project to the Haskell Foundation in + early 2024, the file at that URL may not be up to date. Users of those + versions of Stack should configure the URL to be the default above. + +!!! note + + The default for `recent-snapshots` for Stack 1.7.1 to 3.9.3 was + https://www.stackage.org/snapshots. The Stackage server API may cease to use + that end point in the future. + +## with-gcc + +Command line equivalent (takes precedence): `--with-gcc` option + +Specify a path to GCC explicitly, rather than relying on the normal path +resolution. + +~~~yaml +with-gcc: /usr/local/bin/gcc-5 +~~~ + +## with-hpack + +Command line equivalent (takes precedence): `--with-hpack` option + +Use an [Hpack](https://github.com/sol/hpack) executable, rather than Stack's +in-built version of the Hpack functionality. + +~~~yaml +with-hpack: /usr/local/bin/hpack +~~~ + +## work-dir + +[:octicons-tag-24: 0.1.10.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.10.0) + +Default: `.stack-work` + +Command line equivalent (takes precedence): +[`--work-dir`](../global_flags.md#-work-dir-option) option + +Environment variable alternative (lowest precedence): +[`STACK_WORK`](../environment_variables.md#stack_work) + +`work-dir` specifies the path of Stack's work directory, within a local project +or package directory. The path must be a relative one, relative to the +root directory of the project or package. The relative path cannot include a +`..` (parent directory) component. diff --git a/doc/configure/yaml/project.md b/doc/configure/yaml/project.md new file mode 100644 index 0000000000..7eea35ce90 --- /dev/null +++ b/doc/configure/yaml/project.md @@ -0,0 +1,465 @@ +
+ +# Project-specific configuration + +Project-specific configuration options are valid only in a project-level +configuration file (`stack.yaml`, by default). Most of Stack's configuration +options are [non-project specific](non-project.md). + +Each of the Haskell packages to which a Stack project relates is either a +**project package** that is part of the project and located locally or a package +on which one or more of the project packages depends (directly or indirectly). +The latter is referred to as a **dependency** and it may be located locally or +elsewhere. + +!!! info + + Project packages are built by default. Dependencies are only built when + needed. Building can target individual components of a project package. The + individual components of dependencies cannot be targeted. Test suite and + benchmark components of a project package can be built and run. The library + and executable components of a dependency, and only those components, are + built when the dependency is needed. + +In your project-specific options, you specify both **which project packages** to +build and **which dependencies to use** when building these packages. + +A dependency specified as an [extra-dep](#extra-deps) will shadow a package of +the same name specified in a [snapshot](#snapshot). A project package will +shadow a dependency of the same name. + +## snapshot + +Command line equivalent (takes precedence): +[`--snapshot`](../global_flags.md#-snapshot-option) or +[`--resolver`](../global_flags.md#-resolver-option) (deprecated) option + +The `snapshot` key specifies which snapshot is to be used for this project. A +snapshot defines a GHC version, the package version of packages available for +installation, and various settings like build flags. For example: + +~~~yaml +snapshot: lts-24.37 # A Stackage LTS Haskell snapshot +~~~ + +or + +~~~yaml +snapshot: nightly-2026-04-18 # A Stackage Nightly snapshot +~~~ + +For further information about how to specify the location of a snapshot, see the +[snapshot location](../../topics/snapshot_location.md) documentation. + +The choice of snapshot determines what constraints are placed on the compiler +version. For further information about additional control over the compiler +version, see the [compiler-check](non-project.md#compiler-check) option +documentation. + +A package version specified in a snapshot can be shadowed by an +[extra-dep](#extra-deps) of the same name or a [project package](#packages) of +the same name. + +A snapshot was formerly called a resolver since it states how dependencies are +resolved. + +## packages + +Default: + +~~~yaml +packages: +- . +~~~ + +The `packages` key specifies a list of the project packages that are part of +your project. These are specified via paths to local directories. A path is +considered relative to the directory containing the project-level configuration +file (`stack.yaml`, by default). For example, if the `stack.yaml` file is +located at `/dir1/dir2/stack.yaml`, and has: + +~~~yaml +packages: +- my-package +- dir3/my-other-package +~~~ + +the configuration means "project packages in directories `/dir1/dir2/my-package` +and `/dir1/dir2/dir3/my-other-package`". + +The `packages` key is optional. The default value, '`.`', means that the +project has a single project package located in the current directory. + +A project package will shadow a dependency of the same name. + +A package version specified in a snapshot can be shadowed by an +[extra-dep](#extra-deps) of the same name or a [project package](#packages) of +the same name. + +Each specified project package directory must have a valid Cabal file or Hpack +`package.yaml` file present. Any subdirectories of the directory are not +searched for Cabal files. A subdirectory has to be specified as an independent +item in the list of project packages. + +A project package is different from a dependency (located locally or elsewhere) +specified as an [extra-dep](#extra-deps) or via a [snapshot](#snapshot). For +example: + +* a project package will be built by default by commanding + [`stack build`](../../commands/build_command.md) without specific targets. A + dependency will only be built if it is needed; and +* test suites and benchmarks may be built and run for a project package. They + are never run for a dependency. + +## extra-deps + +Default: `[]` + +The `extra-deps` key specifies a list of extra dependencies on top of what is +defined in the [snapshot](#snapshot). A dependency may come from either a Pantry +package location or a local file path. + +A Pantry package location is one or three different kinds of sources: + +* the package index (Hackage); +* an archive (a tarball or zip file, either local or over HTTP or HTTPS); or +* a Git or Mercurial repository. + +For further information on the format for specifying a Pantry package location, +see the [package location](../../topics/package_location.md) documentation. For +example: + +~~~yaml +extra-deps: +# The latest revision of a package in the package index (Hackage): +- acme-missiles-0.3 +# A specific revision of a package in the package index (Hackage): +- acme-missiles-0.3@rev:0 +# An *.tar.gz archive file over HTTPS: +- url: https://github.com/example-user/my-repo/archive/08c9b4cdf977d5bcd1baba046a007940c1940758.tar.gz + subdirs: + - my-package +# A Git repository at a specific commit: +- git: https://github.com/example-user/my-repo.git + commit: '08c9b4cdf977d5bcd1baba046a007940c1940758' +# An archive of files at a point in the history of a GitHub repository +# (identified by a specific commit): +- github: example-user/my-repo + commit: '08c9b4cdf977d5bcd1baba046a007940c1940758' + subdirs: + - my-package +~~~ + +!!! note + + GHC boot packages are special. An extra-dep with the same package name and + version as a GHC boot package will be ignored. + +!!! note + + The `commit:` key expects a YAML string. A commit hash, or partial hash, + comprised only of digits represents a YAML number, unless it is enclosed in + quotation marks. + +For a local file path source, the path is considered relative to the directory +containing the `stack.yaml` file. For example, if the `stack.yaml` is located +at `/dir1/dir2/stack.yaml`, and has: + +~~~yaml +extra-deps: +- my-package +- dir3/my-other-package +~~~ + +the configuration means "extra-deps packages in directories +`/dir1/dir2/my-package` and `/dir1/dir2/dir3/my-other-package`". + +!!! note + + A local file path that has the format of a package identifier will be + interpreted as a reference to a package on Hackage. Prefix it with `./` to + avoid that confusion. + +!!! note + + A specified extra-dep that does not have the format of a valid Pantry + package location (for example, a reference to a package on Hackage that + omits the package's version) will be interpreted as a local file path. + +An extra-dep will shadow a dependency specified in a [snapshot](#snapshot) of +the same name. An extra-dep can be shadowed by a [project package](#packages) of +the same name. + +!!! info "GHC wired-in packages" + + Some Haskell packages published on Hackage, for example `base < 4.21.0.0` + and `ghc`, are referred to as 'wired-in' to one or more versions of GHC or + as 'magic'. They can be distinguished from normal packages by the contents + of their Cabal files: GHC's `-this-unit-id` option is set as the name of the + package without a version. For example, the `base.cabal` for `base-4.20.1.0` + includes: + + ~~~yaml + -- We need to set the unit id to base (without a version number) + -- as it's magic. + ghc-options: -this-unit-id base + ~~~ + + The GHC boot packages that are 'wired-in' cannot be shaddowed with different + versions of the same package. Given their dependencies, the use of these + boot packages in a build plan may limit what can be specified as an + extra-dep. + + For example, GHC boot package `ghc-9.10.3` has a dependency on `process`. Its + `*.conf` file identifies the dependency as `process-1.6.26.1-10ef`. If + package `ghc-9.10.3` is part of a build plan and a different version of + `process` is specified as an extra-dep, during a build, Stack will identify + that the build plan refers to two versions of `process` and warn that the + build is likely to fail. + +## flags + +Default: `{}` + +Command line equivalent (takes precedence): +[`stack build --flag`](../../commands/build_command.md#-flag-option) option + +Cabal flags can be set for each package separately. For example: + +~~~yaml +flags: + package-name: + flag-name: true +~~~ + +This overrides all Cabal flag specifications (if any) for the specified packages +in the snapshot. + +!!! note + + For a package included directly in the snapshot, if the Cabal flag + specifications differ from the Cabal flag specifications (if any) in the + snapshot, then the package will automatically be promoted to be an + [extra-dep](#extra-deps). + +!!! note + + In order to set a Cabal flag for a GHC boot package, the package must be + specified as an [extra-dep](#extra-deps). + +## drop-packages + +[:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) + +Default: `[]` + +Packages which, when present in the snapshot specified in the +[`snapshot`](#snapshot) or (deprecated) [`resolver`](#resolver) key, should not +be included in our project. This can be used for a few different purposes, e.g.: + +* Ensure that packages you do not want used in your project cannot be used in a + `package.yaml` file (e.g., for license reasons) +* When using a custom GHC build, avoid incompatible packages (see this + [comment](https://github.com/commercialhaskell/stack/pull/4655#issuecomment-477954429)). + +~~~yaml +drop-packages: +- buggy-package +- package-with-unacceptable-license +~~~ + +!!! info + + Stackage snapshots LTS Haskell 14.27 (GHC 8.6.5) and earlier, and Nightly + 2022-02-08 (GHC 8.8.2) and earlier, included directly the `Cabal` package. + Later snapshots do not include directly that package (which is a GHC boot + package). + + For the older Stackage snapshots, it could be handy to drop the + snapshot-specified `Cabal` package, to avoid building that version of the + package. For the later snapshots, there is no package version to drop. + +## user-message + +If present, specifies a message to be displayed every time the configuration is +loaded by Stack. It can serve as a reminder for the user to review the +configuration and make any changes if needed. The user can delete this message +if the generated configuration is acceptable. + +Consecutive line ends in the message are interpreted as a single blank line. + +For example, a user-message is inserted by `stack init` when it omits packages +or adds external dependencies, namely: + +~~~yaml +user-message: | + Warning (added by new or init): Some packages were found to be incompatible + with the snapshot and have been left commented out in the packages section. + + Warning (added by new or init): Specified snapshot could not satisfy all + dependencies. Some external packages have been added as dependencies. + + You can omit this message by removing it from the project-level configuration + file. +~~~ + +## custom-preprocessor-extensions + +Default: `[]` + +Command line equivalent: `--customer-preprocessor-extensions` option + +In order for Stack to be aware of any custom preprocessors you are using, add +their extensions here + +~~~yaml +custom-preprocessor-extensions: +- erb +~~~ + +??? example "Use of a custom preprocessor" + + The [Ruby](https://www.ruby-lang.org/en/) programming language provides + [`erb`](https://docs.ruby-lang.org/en/master/ERB.html) at the command line. + `erb` provides a templating system for Ruby. The following example uses + `erb` as a custom preprocessor. + + The example is a single-package project with a customised `Setup.hs`, which + Stack will use to build: + ~~~haskell + {-# LANGUAGE CPP #-} + + module Main + ( main + ) where + + import Distribution.Simple ( defaultMainWithHooks, simpleUserHooks ) + import Distribution.Simple.PreProcess + ( PreProcessor (..), mkSimplePreProcessor, unsorted ) + import Distribution.Simple.UserHooks ( UserHooks (..) ) + import Distribution.Types.BuildInfo ( BuildInfo ) + import Distribution.Types.ComponentLocalBuildInfo + ( ComponentLocalBuildInfo ) + import Distribution.Types.LocalBuildInfo ( LocalBuildInfo ) + import System.Process ( readCreateProcess, proc, shell ) + + main :: IO () + main = defaultMainWithHooks simpleUserHooks + { hookedPreProcessors = [("erb", runRuby)] + } + + runRuby :: + BuildInfo + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> PreProcessor + runRuby _ _ _ = PreProcessor + { platformIndependent = True + , ppOrdering = unsorted + , runPreProcessor = mkSimplePreProcessor $ \erbFile fout verbosity -> + readCreateProcess (erbProcess erbFile) "" >>= writeFile fout + } + where + erbProcess erbFile = + #if defined(mingw32_HOST_OS) + shell $ "erb " <> erbFile + #else + proc "erb" [erbFile] + #endif + ~~~ + + The example has a package description file (`package.yaml`) that specifies a + `Custom` build type: + ~~~yaml + spec-version: 0.36.0 + name: my-package + version: 0.1.0.0 + build-type: Custom + + dependencies: base + + custom-setup: + dependencies: + - base + - Cabal + - process + + library: + source-dirs: src + generated-exposed-modules: MyModule + ~~~ + + The example has a `src/MyModule.erb` file that will be preprocessed to + create Haskell source code: + ~~~text + module MyModule where + + <% (1..5).each do |i| %> + test<%= i %> :: Int + test<%= i %> = <%= i %> + <% end %> + ~~~ + + The example has a project-level configuration file (`stack.yaml`): + ~~~yaml + snapshot: lts-22.30 + custom-preprocessor-extensions: + - erb + ~~~ + +## extra-package-dbs + +[:octicons-tag-24: 0.1.6.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.6.0) + +Default: `[]` + +A list of relative or absolute paths to package databases. These databases will +be added on top of GHC's global package database before the addition of other +package databases. + +!!! warning + + Use of this feature may result in builds that are not reproducible, as Stack + has no control over the contents of the extra package databases. + +## resolver + +[:octicons-thumbsdown-24: 3.9.3](https://github.com/commercialhaskell/stack/releases/tag/v3.9.3) + +`resolver` (deprecated) and [`snapshot`](#snapshot) are synonyms. + +One of these keys is required. More than one is prohibited. + +## curator + +:octicons-beaker-24: Experimental + +[:octicons-tag-24: 2.1.0.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.0.1) + +Default: `{}` + +Configuration intended for use only by the +[`curator` tool](https://github.com/commercialhaskell/curator), which uses Stack +to build packages. For given package names (which need not exist in the +project), Stack can be configured to ignore (skip) silently building test +suites, building benchmarks and/or creating Haddock documentation or to expect +that building test suites, building benchmarks and/or creating Haddock +documentation will fail. + +For example: + +~~~yaml +curator: + skip-test: + - my-package1 + expect-test-failure: + - my-package2 + skip-bench: + - my-package3 + expect-benchmark-failure: + - my-package4 + skip-haddock: + - my-package5 + expect-haddock-failure: + - my-package6 +~~~ diff --git a/doc/coverage.md b/doc/coverage.md deleted file mode 100644 index 488cd9a880..0000000000 --- a/doc/coverage.md +++ /dev/null @@ -1,121 +0,0 @@ -
- -# Code Coverage - -Code coverage is enabled by passing the `--coverage` flag to `stack build`. - -## Usage - -`stack test --coverage` is quite streamlined for the following use-case: - -1. You have test-suites which exercise your local packages. - -2. These test-suites link against your library, rather than building the library - directly. Coverage information is only given for libraries, ignoring the - modules which get compiled directly into your executable. A common case where - this doesn't happen is when your test-suite and library both have something - like `hs-source-dirs: src/`. In this case, when building your test-suite you - may also be compiling your library, instead of just linking against it. - -When your project has these properties, you will get the following: - -1) Textual coverage reports in the build output. - -2) A unified textual and HTML report, considering the coverage on all local - libraries, based on all of the tests that were run. - -3) An index of all generated HTML reports, at - `$(stack path --local-hpc-root)/index.html`. - -## "stack hpc report" command - -The `stack hpc report` command generates a report for a selection of targets and -`.tix` files. For example, if we have 3 different packages with test-suites, -packages `A`, `B`, and `C`, the default unified report will have coverage from -all 3. If we want a unified report with just two, we can instead run: - -``` -$ stack hpc report A B -``` - -This will output a textual report for the combined coverage from `A` and `B`'s -test-suites, along with a path to the HTML for the report. To further -streamline this process, you can pass the `--open` option, to open the report in -your browser. - -This command also supports taking extra `.tix` files. If you've also built an -executable, against exactly the same library versions of `A`, `B`, and `C`, then -you could do the following: - -``` -$ stack exec -- an-exe -$ stack hpc report A B C an-exe.tix -``` - -This report will consider all test results as well as the newly generated -`an-exe.tix` file. Since this is a common use-case, there is a convenient flag -to use all stored results - `stack hpc report --all an-exe.tix`. - -## "extra-tix-files" directory - -During the execution of the build, you can place additional tix files in -`$(stack path --local-hpc-root)/extra-tix-files/` in order for them to be -included in the unified report. A couple caveats: - -* These tix files must be generated by executables that are built against the -exact same library versions. Also note that, on subsequent builds with coverage, -the `$(stack path --local-hpc-root)` directory will be recursively deleted. It -just stores the most recent coverage data. - -* These tix files will not be considered by `stack hpc report` unless listed -explicitly by file name. - -## Implementation details - -Most users can get away with just understanding the above documentation. -However, advanced users may want to understand exactly how `--coverage` works: - -1. The GHC option `-fhpc` gets passed to all local packages. This tells GHC to - output executables that track coverage information and output them to `.tix` - files. `the-exe-name.tix` files will get written to the working directory of - the executable. - - When switching on this flag, it will usually cause all local packages to be - rebuilt (see [#1940](https://github.com/commercialhaskell/stack/issues/1940)). - -2. Before the build runs with `--coverage`, the contents of `stack path --local-hpc-root` - gets deleted. This prevents old reports from getting mixed - with new reports. If you want to preserve report information from multiple - runs, copy the contents of this path to a new folder. - -3. Before a test run, if a `test-name.tix` file exists in the package directory, - it will be deleted. - -4. After a test run, it will expect a `test-name.tix` file to exist. This file - will then get loaded, modified, and outputted to - `$(stack path --local-hpc-root)/pkg-name/test-name/test-name.tix)`. - - The `.tix` file gets modified to remove coverage file that isn't associated - with a library. So, this means that you won't get coverage information for - the modules compiled in the `executable` or `test-suite` stanza of your cabal - file. This makes it possible to directly union multiple `*.tix` files from - different executables (assuming they are using the exact same versions of the - local packages). - - If there is enough popular demand, it may be possible in the future to give - coverage information for modules that are compiled directly into the - executable. See - [#1359](https://github.com/commercialhaskell/stack/issues/1359). - -5. Once we have a `.tix` file for a test, we also generate a textual and HTML - report for it. The textual report is sent to the terminal. The index of the - test-specific HTML report is available at - `$(stack path --local-hpc-root)/pkg-name/test-name/index.html` - -6. After the build completes, if there are multiple output `*.tix` files, they - get combined into a unified report. The index of this report will be - available at `$(stack path --local-hpc-root)/combined/all/index.html` - -7. Finally, an index of the resulting coverage reports is generated. It links to - the individual coverage reports (one for each test-suite), as well as the - unified report. This index is available at `$(stack path --local-hpc-root)/index.html` diff --git a/doc/css/extra.css b/doc/css/extra.css index e13323b9a3..93a2601b5f 100644 --- a/doc/css/extra.css +++ b/doc/css/extra.css @@ -1,3 +1,12 @@ .hidden-warning { display: none } + +/* The Read the Docs flyout is formatted with a font-size that is 90% of the +body's. Material for MkDocs has a body font-size that is 0.5rem. This body +font-size will result in the flyout having a font-size of 0.7rem, consistent +with the font-size of other elements in the theme. +*/ +body { + font-size: 0.777778rem; +} diff --git a/doc/custom_snapshot.md b/doc/custom_snapshot.md deleted file mode 100644 index c6cd74143f..0000000000 --- a/doc/custom_snapshot.md +++ /dev/null @@ -1,5 +0,0 @@ -
- -# Custom Snapshots - -This content has been moved to the [docs on pantry](pantry.md). diff --git a/doc/dependency_visualization.md b/doc/dependency_visualization.md deleted file mode 100644 index 2dbedf1941..0000000000 --- a/doc/dependency_visualization.md +++ /dev/null @@ -1,59 +0,0 @@ -
- -# Dependency visualization - -You can use stack to visualize the dependencies between your packages and -optionally also external dependencies. - -First, you need [Graphviz](https://www.graphviz.org/). You can [get it here](https://www.graphviz.org/download/). - -As an example, let's look at `wreq`: - -``` -$ stack dot | dot -Tpng -o wreq.png -``` -[![wreq](https://cloud.githubusercontent.com/assets/591567/8478591/ae10a418-20d2-11e5-8945-55246dcfac62.png)](https://cloud.githubusercontent.com/assets/591567/8478591/ae10a418-20d2-11e5-8945-55246dcfac62.png) - -Okay that is a little boring, let's also look at external dependencies: -``` -$ stack dot --external | dot -Tpng -o wreq.png -``` -[![wreq_ext](https://cloud.githubusercontent.com/assets/591567/8478621/d247247e-20d2-11e5-993d-79096e382abd.png)](https://cloud.githubusercontent.com/assets/591567/8478621/d247247e-20d2-11e5-993d-79096e382abd.png) - -Well that is certainly a lot. As a start we can exclude `base` and then -depending on our needs we can either limit the depth: - -``` -$ stack dot --no-include-base --external --depth 1 | dot -Tpng -o wreq.png -``` -[![wreq_depth](https://cloud.githubusercontent.com/assets/591567/8484310/45b399a0-20f7-11e5-8068-031c2b352961.png)](https://cloud.githubusercontent.com/assets/591567/8484310/45b399a0-20f7-11e5-8068-031c2b352961.png) - -or prune packages explicitly: - -``` -$ stack dot --external --prune base,lens,wreq-examples,http-client,aeson,tls,http-client-tls,exceptions | dot -Tpng -o wreq_pruned.png -``` -[![wreq_pruned](https://cloud.githubusercontent.com/assets/591567/8478768/adbad280-20d3-11e5-9992-914dc24fe569.png)](https://cloud.githubusercontent.com/assets/591567/8478768/adbad280-20d3-11e5-9992-914dc24fe569.png) - -Keep in mind that you can also save the dot file: -``` -$ stack dot --external --depth 1 > wreq.dot -$ dot -Tpng -o wreq.png wreq.dot -``` - -and pass in options to `dot` or use another graph layout engine like `twopi`: - -``` -$ stack dot --external --prune base,lens,wreq-examples,http-client,aeson,tls,http-client-tls,exceptions | twopi -Groot=wreq -Goverlap=false -Tpng -o wreq_pruned.png -``` -[![wreq_pruned](https://cloud.githubusercontent.com/assets/591567/8495538/9fae1184-216e-11e5-9931-99e6147f8aed.png)](https://cloud.githubusercontent.com/assets/591567/8495538/9fae1184-216e-11e5-9931-99e6147f8aed.png) - -## Specifying local targets and flags - -The `dot` and `list-dependencies` commands both also accept the following -options which affect how local packages are considered: - -* `TARGET`, same as the targets passed to `build` -* `--test`, specifying that test components should be considered -* `--bench`, specifying that benchmark components should be considered -* `--flag`, specifying flags which may affect cabal file `build-depends` diff --git a/doc/dev_containers.md b/doc/dev_containers.md new file mode 100644 index 0000000000..6f48d52bf0 --- /dev/null +++ b/doc/dev_containers.md @@ -0,0 +1,160 @@ +
+ +# Dev Containers + +A *container* refers to an isolated area of memory where application software +and some drivers execute. A [Development Container](https://containers.dev) (or +Dev Container for short) allows a container to be used as a full‑featured +development environment. + +Stack provides the following Dev Containers: + +* a default Dev Container, intended for use with Stack's default project‑level + configuration file (`stack.yaml`); and +* alternative Dev Containers, intended for use with Stack's experimental + project‑level configurations (in anticipation of building Stack with more + recent versions of GHC). + +Stack's Dev Containers provide the following tools: + +1. The + [Haskell Toolchain](https://www.haskell.org/ghcup/install/#supported-tools) + ([GHC](https://www.haskell.org/ghc), Stack, + [Cabal (the tool)](https://cabal.readthedocs.io) and + [HLS](https://haskell-language-server.readthedocs.io)) +2. [Git](https://git-scm.com) +3. [HLint](https://hackage.haskell.org/package/hlint) +4. [yamllint](https://yamllint.readthedocs.io) +5. [ShellCheck](https://www.shellcheck.net) +6. [hadolint](https://github.com/hadolint/hadolint) + +The tools in the Haskell Toolchain are installed at `/usr/local/bin`. HLS is +provided in the default Dev Container only. + +!!! info + + The PATH is + `$HOME/.cabal/bin:$HOME/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin`. + Consequently, executables installed with Cabal (the tool) (at + `$HOME/.cabal/bin` or `$HOME/.local/bin`) or Stack or Pip (at + `$HOME/.local/bin`) take precedence over the same executable installed at + `/usr/local/sbin`, `/usr/local/bin`, etc. + +[VS Code](https://code.visualstudio.com) is used as IDE, with the following +extensions pre‑installed: + +* [Haskell](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) + (Default Dev Container only) +* [GitHub Pull Requests and Issues](https://marketplace.visualstudio.com/items?itemName=GitHub.vscode-pull-request-github) +* [GitLens — Git supercharged](https://marketplace.visualstudio.com/items?itemName=eamodio.gitlens) + * Pinned to version 11.7.0 due to unsolicited AI content in recent versions +* [Git Graph](https://marketplace.visualstudio.com/items?itemName=mhutchie.git-graph) +* [ShellCheck](https://marketplace.visualstudio.com/items?itemName=timonwong.shellcheck) +* [hadolint](https://marketplace.visualstudio.com/items?itemName=exiasr.hadolint) +* [Resource Monitor](https://marketplace.visualstudio.com/items?itemName=mutantdino.resourcemonitor) + +## Parent images + +Stack's Dev Containers are derived from Docker images that are used to build +the *statically linked* Linux/x86_64 and Linux/AArch64 binary distributions of +Stack. + +These Docker images are multi‑architecture (`linux/amd64`, `linux/arm64/v8`) +*GHC musl* images. They are based on Alpine Linux (that is +[musl libc](https://musl.libc.org) and [BusyBox](https://www.busybox.net)). + +The images contain *unofficial* and *untested* binary distributions of GHC (that +is, ones not released by the GHC developers). That is because: + +1. the official GHC binary distributions for Alpine Linux/x86_64 have + [known](https://gitlab.haskell.org/ghc/ghc/-/issues/23043) + [bugs](https://gitlab.haskell.org/ghc/ghc/-/issues/25093) ~~; and~~ +1. ~~there are no official binary distributions for Alpine Linux/AArch64.~~ + +Stack's global configuration (`/etc/stack/config.yaml`) sets +`system-ghc: true` and `install-ghc: false`. That +ensures that only the GHC available in the Dev Containers is used. + +## Usage + +You can run Dev Containers locally/remotely with VS Code, or create a +[GitHub Codespace](https://github.com/features/codespaces) for a branch in a +repository to develop online. + +=== "VS Code" + + Follow the instructions at + [Developing inside a Container](https://code.visualstudio.com/docs/devcontainers/containers). + +=== "GitHub Codespaces" + + For use with GitHub Codespaces, follow the instructions at + [Creating a codespace for a repository](https://docs.github.com/en/codespaces/developing-in-codespaces/creating-a-codespace-for-a-repository#creating-a-codespace-for-a-repository). + +## Build Stack + +Stack can be built with Stack (which is recommended) or with Cabal (the tool). + +=== "Stack" + + Command `stack build` to build the `stack` executable. + + Append `--flag=stack:static` to build a *statically linked* + `stack` executable that can run on any Linux machine of the same + architecture. + + Append `--stack-yaml stack-ghc-$GHC_VERSION.yaml` if you want + to use an experimental project‑level configuration with the appropriate Dev + Container. + +=== "Cabal (the tool)" + + !!! info + + Default Dev Container only. + + Command `cabal build` to build the `stack` executable. + + Append `--flag=static` to build a *statically linked* `stack` + executable that can run on any Linux machine of the same architecture. + +## Haskell Language Server (HLS) + +The +[Haskell Language Server](https://github.com/haskell/haskell-language-server) +and the +[Haskell extension](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) +are only available in the default Dev Container. In order to use the Haskell +extension, you must first configure the project for the tool used for building +of your choice. + + + +See the documentation at +[Contributing: Haskell Language Server](CONTRIBUTING.md#haskell-language-server) +for cradles (`hie.yaml` files) that should suffice to configure the HLS +explicitly for `./Setup.hs` and each of the buildable components in Stack's +Cabal file. + +### Haskell extension + +Choose `Manually via PATH` when asked the following question: + +Manage HLS + +## Issues + +If there is a problem with a Dev Container, please +[open an issue](https://github.com/benz0li/ghc-musl/issues/new) at its +[parent images](#parent-images)' repository at +[https://github.com/benz0li/ghc-musl](https://github.com/benz0li/ghc-musl). diff --git a/doc/developing_on_windows.md b/doc/developing_on_windows.md deleted file mode 100644 index 05345be091..0000000000 --- a/doc/developing_on_windows.md +++ /dev/null @@ -1,63 +0,0 @@ -
- -# Developing on windows # - -On Windows, Stack comes with an installation of -[msys2](https://www.msys2.org/). Msys2 will be used by Stack to -provide a unix-like shell for Stack. This may be necessary for installing some Haskell packages, such as those which use `configure` scripts. -No -matter which terminal you choose (cmd.exe, powershell, git bash or any -other) you can use this environment too by executing all programs -through `stack exec -- program`. This is especially useful if your -project needs some additional tools during the build phase. - -Executables and libraries can be installed with Pacman. All tools can -be found [in the package -list](https://github.com/msys2/msys2/wiki/Packages). A [list of -commands](https://github.com/msys2/msys2/wiki/Using-packages) that -work with Pacman is also available. Just remember that -pacman—like all other tools—should be started with `stack -exec -- pacman`. - -## Setup.hs ## - -`Setup.hs` is automatically run inside the stack environment. So when -you need to launch another tool you don't need to prefix the command -with `stack exec --` within the custom `Setup.hs` file. - -## Pacman packages to install for common Haskell packages ## - -The following lists Pacman packages known to allow the installation of -some common Haskell packages on Windows. Feel free to submit -additional entries via a pull request. - -* For [text-icu](https://github.com/bos/text-icu) install `mingw64/mingw-w64-x86_64-icu` - -## Cmake ## - -Cmake has trouble finding other tools even if they are available on -the `PATH`. Likely this is not a cmake problem but one of the -environment not fully integrating. For example GHC comes with a copy -of GCC which is not installed by msys itself. If you want to use this -GCC you can provide a full path to it, or find it first with -`System.Directory.findExecutable` if you want to launch GCC from a -Haskell file such as `Setup.hs`. - -Experience tells that the `mingw-w64` versions of make and cmake are -most likely to work. Though there are other versions available through -pacman, so have a look to see what works for you. Both tools can be -installed with the commands: - -``` -stack exec -- pacman -S mingw-w64-x86_64-make -stack exec -- pacman -S mingw-w64-x86_64-cmake -``` - -Even though make and cmake are then both installed into the same -environment. Cmake still seems to have trouble to find make. To help -cmake find GCC and make supply the following flags: - -``` --DCMAKE_C_COMPILER=path --DCMAKE_MAKE_PROGRAM=path -``` diff --git a/doc/faq.md b/doc/faq.md index 6af66ce64a..d6ef86d970 100644 --- a/doc/faq.md +++ b/doc/faq.md @@ -1,586 +1,652 @@
-# FAQ - -So that this doesn't become repetitive: for the reasons behind the answers -below, see the [Build overview](build_overview.md) page. The goal of the answers -here is to be as helpful and concise as possible. - -## What version of GHC is used when I run something like `stack ghci`? - -The version of GHC, as well as which packages can be installed, are -specified by the _resolver_. This may be something like `lts-8.12`, -which is from the -[Long Term Support (LTS) Haskell](https://github.com/fpco/lts-haskell/) -project. The [user guide](GUIDE.md) discusses resolvers in more -detail. - -Which resolver is used is determined by finding the relevant -`stack.yaml` configuration file for the directory you're running the -command from. This essentially works by: - -1. Check for a `STACK_YAML` environment variable or the `--stack-yaml` - command line argument -2. If none present, check for a `stack.yaml` file in the current - directory or any parents -3. If no `stack.yaml` was found, use the _implicit global_ - -The implicit global is a shared project used whenever you're outside -of another project. It's a sort of "mutable shared state" that you -should be aware of when working with Stack. The most recent request -when working with the implicit global is how to move to a more recent -LTS snapshot. You can do this by running the following from outside of -a project: - - stack config set resolver lts - -## Where is stack installed and will it interfere with `ghc` (etc) I already have installed? - -Stack itself is installed in normal system locations based on the mechanism you -used (see the [Install and upgrade](install_and_upgrade.md) page). Stack -installs the Stackage libraries in `~/.stack` and any project libraries or -extra dependencies in a `.stack-work` directory within each project's -directory. None of this should affect any existing Haskell tools at all. - -## What is the relationship between stack and cabal? - -* Cabal-the-library is used by stack to build your Haskell code. See the - [Architecture: Plan Execution](architecture.md#plan-execution) section for - more detail, including how the Cabal version is chosen. -* A .cabal file is provided for each package, and defines all package-level - metadata just like it does in the cabal-install world: modules, executables, - test suites, etc. No change at all on this front. -* A stack.yaml file references 1 or more packages, and provides information on - where dependencies come from. -* `stack build` currently initializes a stack.yaml from the existing .cabal - file. Project initialization is something that is still being discussed and - there may be more options here for new projects in the future (see issue - [253](https://github.com/commercialhaskell/stack/issues/253)) - -For detail on the differences between a `stack.yaml` and Cabal package file, see -[stack.yaml vs cabal package file](stack_yaml_vs_cabal_package_file.md). - -## I need to use a different version of a package than what is provided by the LTS Haskell snapshot I'm using, what should I do? - -You can make tweaks to a snapshot by modifying the `extra-deps` configuration value in your `stack.yaml` file, e.g.: - -```yaml -resolver: lts-2.9 -packages: -- '.' -extra-deps: -- text-1.2.1.1 -``` - -## I need to use a package (or version of a package) that is not available on hackage, what should I do? - -Add it to the -[`extra-deps`](yaml_configuration.md#extra-deps) list in your project's -`stack.yaml`, specifying the package's source code location relative to the -directory where your `stack.yaml` file lives, e.g. - -```yaml -resolver: lts-2.10 -packages: -- '.' -extra-deps: -- third-party/proprietary-dep -- github-version-of/conduit -- patched/diagrams -``` - -The above example specifies that the `proprietary-dep` package is found in the -project's `third-party` folder, that the `conduit` package is found in the -project's `github-version-of` folder, and that the `diagrams` package is found -in the project's `patched` folder. This autodetects changes and reinstalls the -package. - -To install packages directly from a Git repository, use e.g.: - -```yaml -extra-deps: - - git: https://github.com/githubuser/reponame.git - commit: somecommitID -``` - -## What is the meaning of the arguments given to stack build, test, etc? - -Those are the targets of the build, and can have one of three formats: - -* A package name (e.g., `my-package`) will mean that the `my-package` package - must be built -* A package identifier (e.g., `my-package-1.2.3`), which includes a specific - version. This is useful for passing to `stack install` for getting a specific - version from upstream -* A directory (e.g., `./my-package`) for including a local directory's package, - including any packages in subdirectories - -## I need to modify an upstream package, how should I do it? - -Typically, you will want to get the source for the package and then add it to -your `packages` list in stack.yaml. (See the previous question.) -`stack unpack` is one approach for getting the source. -Another would be to add the upstream package as a submodule to your -project. - -## How do I use this with sandboxes? - -Explicit sandboxing on the part of the user is not required by stack. All -builds are automatically isolated into separate package databases without any -user interaction. This ensures that you won't accidentally corrupt your -installed packages with actions taken in other projects. - -## Can I run `cabal` commands inside `stack exec`? - -With a recent enough version of cabal-install (>= 1.22), you can. For older -versions, due to -[haskell/cabal#1800](https://github.com/haskell/cabal/issues/1800), this does -not work. Note that even with recent versions, for some commands you may need -this extra level of indirection: -``` -$ stack exec -- cabal exec -- cabal -``` - -However, virtually all `cabal` commands have an equivalent in stack, so this -should not be necessary. In particular, `cabal` users may be accustomed to the -`cabal run` command. In stack: -``` -$ stack build && stack exec -``` -Or, if you want to install the binaries in a shared location: -``` -$ stack install -$ -``` -assuming your `$PATH` has been set appropriately. +# Frequently asked questions -## Using custom preprocessors +## Stack and Cabal -If you have a custom preprocessor, for example, Ruby, you may have a -file like: +??? question "What is the relationship between Stack and Cabal?" -***B.erb*** - -``` haskell -module B where + 'Cabal' can refer to Cabal (the library) or to Cabal (the tool). -<% (1..5).each do |i| %> -test<%= i %> :: Int -test<%= i %> = <%= i %> -<% end %> -``` + === "Cabal (the library)" -To ensure that Stack picks up changes to this file for rebuilds, add -the following line to your .cabal file: + Cabal (the library) is used by Stack to build your Haskell code. - extra-source-files: B.erb + A Haskell package is described by a Cabal file, which file is part of + the package. The file is named `.cabal`. -## I already have GHC installed, can I still use stack? + Stack requires a project-level configuration file (`stack.yaml`, by + default). -Yes. In its default configuration, stack will simply ignore any system GHC -installation and use a sandboxed GHC that it has installed itself (typically -via the `stack setup` command). You can find these sandboxed GHC installations -in `~/.stack/programs/$platform/ghc-$version/`. + For further information about the difference between a Cabal file and + a project-level configuration file, see the + [stack.yaml vs a Cabal file](topics/stack_yaml_vs_cabal_package_file.md) + documentation. -If you would like stack to use your system GHC installation, use the -[`--system-ghc` flag](yaml_configuration.md#system-ghc) or run -`stack config set system-ghc --global true` to make stack check your -`PATH` for a suitable GHC by default. + The [`stack init`](commands/init_command.md) command initializes a + project-level configuration file from package description files. -Note that stack can only use a system GHC installation if its version is -compatible with the configuration of the current project, particularly the -[`resolver` setting](yaml_configuration.md#resolver). + Stack uses Cabal (the library) via an executable. For + `build-type: Simple` (the most common case), Stack builds that + executable using the version of Cabal which came with GHC. Stack caches + such executables, in the [Stack root](topics/stack_root.md) under + directory `setup-exe-cache`. -Note that GHC installation doesn't work for all OSes, so in some cases you -will need to use `system-ghc` and install GHC yourself. + In rare or complex cases, a different version of Cabal to the one that + came with GHC may be needed. `build-type: Custom` and a `setup-custom` + stanza in the Cabal file, and a `Setup.hs` file in the package + directory, can be specified. Stack's project-level configuration file + can then specify the version of Cabal that Stack will use to build the + executable (named `setup`) from `Setup.hs`. Stack will use Cabal via + `setup`. -## How does stack determine what GHC to use? + === "Cabal (the tool)" -In its default configuration, stack determines from the current project which -GHC version, architecture etc. it needs. It then looks in -`~/.stack/programs/$platform/ghc-$version/` for a compatible GHC, requesting -to install one via `stack setup` if none is found. + Cabal (the tool) is a tool provided by the + [`cabal-install`](https://hackage.haskell.org/package/cabal-install) + Haskell package. It aims to simplify the process of managing Haskell + software by automating the fetching, configuration, compilation and + installation of Haskell libraries and programs. These are goals that + Stack shares. Stack can be used independently of Cabal (the tool) but + users can also use both, if they wish. -If you are using the [`--system-ghc` flag](yaml_configuration.md/#system-ghc) or -have configured `system-ghc: true` either in the project `stack.yaml` -or the global `~/.stack/config.yaml`, stack will use the first GHC that it finds -on your `PATH`, falling back on its sandboxed installations only if the found GHC -doesn't comply with the various requirements (version, architecture) that your -project needs. +??? question "How do I use Stack with sandboxes?" -See [this issue](https://github.com/commercialhaskell/stack/issues/420) for a -detailed discussion of stack's behavior when `system-ghc` is enabled. + A 'sandbox' is a development environment that is isolated from other parts + of the system. The concept of sandboxing is built into Stack. All builds are + automatically isolated into separate package databases. -## How do I upgrade to GHC 7.10.2 with stack? +??? question "Can I run `cabal` commands inside `stack exec`?" -If you already have a prior version of GHC use `stack --resolver ghc-7.10 setup --reinstall`. -If you don't have any GHC installed, you can skip the `--reinstall`. + Yes. Some `cabal` commands are inconsistent with the `GHC_PACKAGE_PATH` + environment variable in the Stack environment. Command, for example: -## How do I get extra build tools? + ~~~text + stack exec --no-ghc-package-path -- cabal build + ~~~ -stack will automatically install build tools required by your packages or their -dependencies, in particular alex and happy. +## GHC or GHCi-related -__NOTE__: This works when using lts or nightly resolvers, not with ghc or -custom resolvers. You can manually install build tools by running, e.g., -`stack build alex happy`. +??? question "Will Stack interfere with the GHC I already have installed?" -## How does stack choose which snapshot to use when creating a new config file? + No. -It checks the two most recent LTS Haskell major versions and the most recent -Stackage Nightly for a snapshot that is compatible with all of the version -bounds in your .cabal file, favoring the most recent LTS. For more information, -see the snapshot auto-detection section in the architecture document. +??? question "I already have GHC installed. Can I still use Stack?" -## I'd like to use my installed packages in a different directory. How do I tell stack where to find my packages? + Yes. In its default configuration, Stack will simply ignore any system GHC + installation and use a sandboxed GHC that it has installed itself. You can + find these sandboxed GHC installations in the `ghc-*` directories in the + `stack path --programs` directory. -Set the `STACK_YAML` environment variable to point to the `stack.yaml` config -file for your project. Then you can run `stack exec`, `stack ghc`, etc., from -any directory and still use your packages. + If you would like Stack to use your system GHC installation, use the + [`--system-ghc`](configure/yaml/non-project.md#system-ghc) flag or run + `stack config set system-ghc --global true` to make Stack check your PATH + for a suitable GHC by default. -## My tests are failing. What should I do? + Stack can only use a system GHC installation if its version is compatible + with the configuration of the current project, particularly the snapshot + specified by the [`snapshot`](configure/yaml/project.md#snapshot) key. -Like all other targets, `stack test` runs test suites in parallel by default. -This can cause problems with test suites that depend on global resources such -as a database or binding to a fixed port number. A quick hack is to force stack -to run all test suites in sequence, using `stack test --jobs=1`. For test -suites to run in parallel developers should ensure that their test suites do -not depend on global resources (e.g. by asking the OS for a random port to bind -to) and where unavoidable, add a lock in order to serialize access to shared -resources. + GHC installation does not work for all operating systems, so in some cases + you will need to use `system-ghc` and install GHC yourself. -## Can I get bash autocompletion? +??? question "When I command `stack ghci` what version of GHC is used?" -Yes, see the [shell-autocompletion documentation](shell_autocompletion.md) + The version of GHC is specified by the snapshot in the relevant Stack + project-level configuration file. This may be the file in the + `global-project` directory in the [Stack root](topics/stack_root.md). -## How do I update my package index? + For further information, see the [configuration](configure/yaml/index.md) + documentation. -Users of cabal are used to running `cabal update` regularly. You can do the -same with stack by running `stack update`. But generally, it's not necessary: -if the package index is missing, or if a snapshot refers to package/version -that isn't available, stack will automatically update and then try again. If -you run into a situation where stack doesn't automatically do the update for -you, please report it as a bug. +??? question "How does Stack determine what GHC to use?" -## Isn't it dangerous to automatically update the index? Can't that corrupt build plans? + In its default configuration, Stack determines from the current project which + GHC version, architecture etc it needs. It then looks in the `ghc-` + subdirectory of the `stack path --programs` directory for a compatible GHC, + requesting to install one via `stack setup` if none is found. -No, stack is very explicit about which packages it's going to build for you. -There are three sources of information to tell it which packages to install: -the selected snapshot, the `extra-deps` configuration value, and your local -packages. The only way to get stack to change its build plan is to modify one -of those three. Updating the index will have no impact on stack's behavior. + If you are using the + [`--system-ghc`](configure/yaml/non-project.md#system-ghc) flag or have + configured `system-ghc: true` either in the project `stack.yaml` or the + global `config.yaml`, Stack will use the first GHC that it finds on your + PATH, falling back on its sandboxed installations only if the found GHC does + not comply with the various requirements (version, architecture) that your + project needs. -## I have a custom package index I'd like to use, how do I do so? + See issue [#420](https://github.com/commercialhaskell/stack/issues/420) for + a detailed discussion of Stack's behavior when `system-ghc` is enabled. -You can configure this in your stack.yaml. See [YAML configuration](yaml_configuration.md). +??? question "How can I test that different GHC versions can build my project?" -## How can I make sure my project builds against multiple ghc versions? + You can create multiple project-level configuration files for your project, + one for each build plan. For example, you might set up your project + directory like so: -You can create multiple yaml files for your project, -one for each build plan. For example, you might set up your project directory like so: + ~~~text + myproject/ + stack-ghc-9.10.3.yaml + stack-ghc-9.8.4.yaml + stack.yaml --> symlink to stack-ghc-9.10.3.yaml + myproject.cabal + src/ + ... + ~~~ -``` -myproject/ - stack-7.8.yaml - stack-7.10.yaml - stack.yaml --> symlink to stack-7.8.yaml - myproject.cabal - src/ - ... -``` + When you run `stack build`, you can set the `STACK_YAML` environment + variable to indicate which build plan to use. Command: -When you run `stack build`, you can set the -`STACK_YAML` environment variable to indicate which build plan to use. + === "Unix-like" -``` -$ stack build # builds using the default stack.yaml -$ STACK_YAML=stack-7.10.yaml stack build # builds using the given yaml file -``` + ~~~text + STACK_YAML=stack-ghc-9.10.3.yaml + stack build + ~~~ -## I heard you can use this with Docker? + === "Windows" -Yes, stack supports using Docker with images that contain preinstalled Stackage -packages and the tools. See [Docker integration](docker_integration.md) for details. + ~~~text + $Env:STACK_YAML='stack-ghc-9.10.3.yaml' + stack build + ~~~ -## How do I use this with Travis CI? + === "Windows (Command Prompt)" -See the [Travis CI instructions](travis_ci.md) + ~~~text + set STACK_YAML=stack-ghc-9.10.3.yaml + stack build + ~~~ -## How do I use this with Azure CI? +## Setup-related -See the [Azure CI instructions](azure_ci.md) +??? question "Where is Stack installed?" -## What is licensing restrictions on Windows? + Command: -Currently on Windows GHC produces binaries linked statically with [GNU Multiple -Precision Arithmetic Library](https://gmplib.org/) (GMP), which is used by -[integer-gmp](https://hackage.haskell.org/package/integer-gmp) library to -provide big integer implementation for Haskell. Contrary to the majority of -Haskell code licensed under permissive BSD3 license, GMP library is licensed -under LGPL, which means resulting binaries [have to be provided with source -code or object files](http://www.gnu.org/licenses/gpl-faq.html#LGPLStaticVsDynamic). -That may or may not be acceptable for your situation. Current workaround is to -use GHC built with alternative big integer implementation called -integer-simple, which is free from LGPL limitations as it's pure Haskell and -does not use GMP. Unfortunately it has yet to be available out of the box with -stack. See [issue #399](https://github.com/commercialhaskell/stack/issues/399) -for the ongoing effort and information on workarounds. + ~~~text + stack uninstall + ~~~ -## How to get a working executable on Windows? + for information about where Stack is installed. -When executing a binary after building with `stack build` (e.g. for target -"foo"), the command `foo.exe` might complain about missing runtime libraries -(whereas `stack exec foo` works). +??? question "Can I change Stack's default temporary directory?" -Windows is not able to find the necessary C++ libraries from the standard -prompt because they're not in the PATH environment variable. `stack exec` works -because it's modifying PATH to include extra things. + Stack downloads and extracts files to `$STACK_ROOT/programs` on most platforms, + which defaults to `~/.stack/programs`. On Windows `$LOCALAPPDATA\Programs\stack` + is used. If there is not enough free space in this directory, Stack may fail. + For instance, `stack setup` with a GHC installation requires roughly 1GB free. + If this is an issue, you can set `local-programs-path` in your + `~/.stack/config.yaml` to a directory on a file system with more free space. -Those libraries are shipped with GHC (and, theoretically in some cases, MSYS). -The easiest way to find them is `stack exec which`. E.g. + If you use Stack with Nix integration, be aware that Nix uses a `TMPDIR` + variable, and if it is not set Nix sets it to some subdirectory of `/run`, + which on most Linuxes is a Ramdir. Nix will run the builds in `TMPDIR`, + therefore if you do not have enough RAM you will get errors about disk + space. If this happens to you, please _manually_ set `TMPDIR` before + launching Stack to some directory on the disk. - >stack exec which libstdc++-6.dll - /c/Users/Michael/AppData/Local/Programs/stack/i386-windows/ghc-7.8.4/mingw/bin/libstdc++-6.dll +??? question "On Windows, `stack setup` tells me to add certain paths to the PATH instead of doing it?" + + In PowerShell, it is easy to automate even that step. Command: + + ~~~text + $Env:Path = ( stack setup | %{ $_ -replace '[^ ]+ ', ''} ), $Env:Path -join ";" + ~~~ + +??? question "Does Stack install the system/C libraries that some Cabal packages depend on?" + + No. This is currently out of the scope of Stack's target set of features. + Instead of attempting to automate the installation of 3rd party dependencies, we + have the following approaches for handling system dependencies: + + * Nix and docker help make your build and execution environment deterministic + and predictable. This way, you can install system dependencies into a + container, and share this container with all developers. + + * If you have installed some libraries into a non-standard location, use the + [`extra-lib-dirs`](configure/yaml/non-project.md#extra-lib-dirs) option or the + [`extra-include-dirs`](configure/yaml/non-project.md#extra-include-dirs) + option to specify it. + + In the future, Stack might give operating system-specific suggestions for how to + install system libraries. + +??? question "How can I make Stack aware of my custom SSL certificates?" + + === "Linux" + + Use the `SYSTEM_CERTIFICATE_PATH` environment variable to point at the directory + where you keep your SSL certificates. + + + === "macOS" + + In principle, you can use the following command to add a certificate to your + system certificate keychain: + + ~~~bash + sudo security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain + ~~~ + + Some users have reported issues with this approach, see issue + [#907](https://github.com/commercialhaskell/stack/issues/907) for more + information. + +## Package description format-related + +??? question "How does Stack support the Hpack specification?" + + The [Hpack](https://github.com/sol/hpack) package description format is an + alternative to that used in a Cabal file. + + If a package directory contains an package description file in the Hpack + format (`package.yaml`), Stack will use that file to create the + corresponding Cabal file. + + [`stack init`](commands/init_command.md) will use Hpack format package + description files, if they are present. + + The [`with-hpack`](configure/yaml/non-project.md#with-hpack) non-project + specific configuration option or the + [`--with-hpack`](configure/global_flags.md#-with-hpack-option) global flag + can be used to specify an Hpack executable to use instead of Stack's + built-in Hpack functionality. + +## Package index-related + +??? question "How do I update my package index?" + + Command: + + ~~~text + stack update + ~~~ + + However, generally, it is not necessary with Stack: if the package index is + missing, or if a snapshot refers to package version that is not available, + Stack will automatically update the package index and then try again. + + If you run into a situation where Stack does not automatically update the + package index, please report it as a bug. + +??? question "Is it dangerous to update the package index automatically? Can that corrupt build plans?" + + No. Stack is explicit about which packages it is going to build. There are + three sources of information to tell Stack which packages to install: the + selected snapshot, the `extra-deps` configuration value, and your project + packages. The only way to get Stack to change its build plan is to modify + one of those three. Updating the index will have no effect on Stack's + behavior. + +??? question "How do I use a custom package index?" + + See the [`package-index`](configure/yaml/non-project.md#package-index) + non-project specific configuration option documentation. + +## Package-related + +??? question "How do I use a package version on Hackage not in a snapshot?" + + Add the package version to the [`extra-deps`](configure/yaml/project.md) + project-specific configuration option in the + [project-level configuration file](configure/yaml/index.md). -A quick workaround is adding this path to the PATH environment variable or -copying the files somewhere Windows finds them (cf. https://msdn.microsoft.com/de-de/library/7d83bc18.aspx). +??? question "How do I use a package version not on Hackage?" -Cf. issue [#425](https://github.com/commercialhaskell/stack/issues/425). + Add the location of the package version to the + [`extra-deps`](configure/yaml/project.md) project-specific configuration + option in the [project-level configuration file](configure/yaml/index.md). -Another issue that may arise with building on Windows is as follows. The default -location of stack's programs folder is `%LOCALAPPDATA\Programs\stack`. If there -is a space character in the `%LOCALAPPDATA%` path this may, in some -circumstances, cause problems with building packages that make use of the GNU -project's `autoconf` package and `configure` shell script files. It may be -necessary to override the default location of stack's programs folder. See -[Non-project-specific config option, local-programs-path](yaml_configuration.md) -for more informaton. + For further information, see the + [package location](topics/package_location.md) documentation. -Cf. issue [#4726](https://github.com/commercialhaskell/stack/issues/4726) +??? question "How do I use a modified version of a package?" -## Can I change stack's default temporary directory? + Typically, a modified version of a package is used as a project package. + Add the location of the package to the + [`packages`](configure/yaml/project.md#packages) project-specific + configuration option in the + [project-level configuration file](configure/yaml/index.md). -Stack downloads and extracts files to `$STACK_ROOT/programs` on most platforms, -which defaults to `~/.stack/programs`. On Windows `$LOCALAPPDATA\Programs\stack` -is used. If there is not enough free space in this directory, Stack may fail. -For instance, `stack setup` with a GHC installation requires roughly 1GB free. -If this is an issue, you can set `local-programs-path` in your -`~/.stack/config.yaml` to a directory on a file system with more free space. + One way to get the source code for the unmodified package version is to use + the [`stack unpack`](commands/unpack_command.md). -If you use Stack with Nix integration, be aware that Nix uses a `TMPDIR` -variable, and if it is not set Nix sets it to some subdirectory of `/run`, which -on most Linuxes is a Ramdir. Nix will run the builds in `TMPDIR`, therefore if -you don't have enough RAM you will get errors about disk space. If this happens -to you, please _manually_ set `TMPDIR` before launching Stack to some directory -on the disk. +??? question "I'd like to use my installed packages in a different directory. How do I tell Stack where to find my packages?" -## Why doesn't stack rebuild my project when I specify `--ghc-options` on the command line? + Set the `STACK_YAML` environment variable to point to the `stack.yaml` + configuration file for your project. Then you can run `stack exec`, `stack ghc`, + etc., from any directory and still use your packages. -Because GHC options often only affect optimization levels and warning behavior, stack doesn't recompile -when it detects an option change by default. This behavior can be changed though by setting the -[`rebuild-ghc-options` option](yaml_configuration.md#rebuild-ghc-options) to `true`. +## `stack build`-related -To force recompilation manually, use the `--force-dirty` flag. If this still doesn't lead to a rebuild, -add the `-fforce-recomp` flag to your `--ghc-options`. +??? question "Why does `stack build` fail with GHC 9.8.1 and 9.8.2 only?" -## Why doesn't stack apply my `--ghc-options` to my dependencies? + If the Cabal file of the relevant package makes use of a `c-sources` field, + the failure may be due to `Cabal-3.10.2.0` enforcing that the field can + specify only `*.c` files. Earlier and later versions of Cabal (the library) + tolerate other files. -By default, stack applies command line GHC options only to local packages (these are all -the packages that are specified in the `packages` section of your `stack.yaml`). -For an explanation of this choice see [this discussion on the issue tracker](https://github.com/commercialhaskell/stack/issues/827#issuecomment-133263678). + When the Cabal build type is not `Custom`, Stack builds with the `Cabal` + boot package of the specified version of GHC. The boot package of GHC 9.8.1 + and 9.8.2 is `Cabal-3.10.2.0`. -If you still want to set specific GHC options for a dependency, use the [`ghc-options` option](yaml_configuration.md#ghc-options) in your -`stack.yaml` or global `~/.stack/config.yaml`. +??? question "What causes dependency on multiple versions of the same package?" -To change the set of packages that command line GHC options apply to, use the [`apply-ghc-options` option](yaml_configuration.md#apply-ghc-options). + When building a package, during its configuration, Stack may warn: -## stack setup on a windows system only tells me to add certain paths to the PATH variable instead of doing it + ~~~text + This package indirectly depends on multiple versions of the same package. + This is very likely to cause a compile failure. + ~~~ -If you are using a powershell session, it is easy to automate even that step: + and the build subsequently fails. - $env:Path = ( stack setup | %{ $_ -replace '[^ ]+ ', ''} ), $env:Path -join ";" + Often the cause is that: (1) the package depends, directly or indirectly, on + a GHC wired-in package (for example, the `ghc` package); and (2) a direct or + indirect dependency of that wired-in package is also specified as an + extra-dep but the versions differ. -## How do I reset / remove Stack (such as to do a completely fresh build)? +??? question "Why does `stack test` trigger a rebuild of other components?" -The first thing to remove is project-specific `.stack-work` directory within -the project's directory. Next, remove `~/.stack` directory overall. You may -have errors if you remove the latter but leave the former. Removing Stack -itself will relate to how it was installed, and if you used GHC installed -outside of Stack, that would need to be removed separately. + If the set of dependencies of a project package to be built are not a + subset of the set of dependencies when it was last built, then that will + trigger a rebuild of components that were previously built. -## How does stack handle parallel builds? What exactly does it run in parallel? + The command: -See [issue #644](https://github.com/commercialhaskell/stack/issues/644) for more details. + ~~~text + stack build + ~~~ -## I get strange `ld` errors about recompiling with "-fPIC" + will build the library and executable components of project packages and the + build will take into account the dependencies of those components. -(Updated in January 2019) + If you then command: -This is related to more recent versions of Linux distributions that have GCC -with PIE enabled by default. The continuously-updated distros like Arch, in -particular, had been in flux with this change and the upgrading -libtinfo6/ncurses6, and there were some workarounds attempted in Stack that -ended up causing trouble as these distros evolved. + ~~~text + stack test + ~~~ -GHC added official support for this setup in 8.0.2, so if you are using an -older version your best bet is to upgrade. You may be able to work around it -for older versions by editing `~/.stack/programs/x86_64-osx/ghc-VER/lib/ghc- -VER/settings` (replace `VER` with the GHC version) and adding `-no-pie` (or -`--no-pie` in the case of Gentoo, at least as of December 2017) to the __C -compiler link flags__. + or, equivalently: -If `stack setup` complains that there is no `linuxNN-*-nopie` bindist available, -try adding `ghc-build: *` (replacing the `*` with the actual value that -precedes `-nopie`, which may be empty) to your `~/.stack/config.yaml` (this -will no longer be necessary for stack >= 1.7). + ~~~text + stack build --test + ~~~ -If you are experiencing this with GHC >= 8.0.2, try running `stack setup ---reinstall` if you've upgraded your Linux distribution or you set up GHC -before late December 2017. + the test suite components of project packages are added to the build + targets. -If GHC doesn't recognize your C compiler as being able to use `-no-pie`, -this can happen even with GCC and Clang, it might be necessary to enable -this feature manually. To do this, just change -`("C compiler supports -no-pie", "NO"),` to `("C compiler supports -no-pie", "YES"),` -in the file `~/.stack/programs/x86_64-osx/ghc-VER/lib/ghc-VER/settings`. + That can add dependencies to a project package, if its test suite + components have dependencies that are not dependencies of its library + and executable components. -If you are still having trouble after trying the above, check the following -for more possible workarounds: + What is true of test suite components applies equally to benchmark + components. - * [Previous version of this FAQ entry](https://docs.haskellstack.org/en/v1.6.3/faq/#i-get-strange-ld-errors-about-recompiling-with-fpic) - * Related issues: - [#3518](https://github.com/commercialhaskell/stack/issues/3518), - [#2712](https://github.com/commercialhaskell/stack/issues/2712), - [#3630](https://github.com/commercialhaskell/stack/issues/3630), - [#3648](https://github.com/commercialhaskell/stack/issues/3648) + If that behaviour is undesirable, a way to avoid it is to change the + description of each project package so that adding its test suite (or + benchmark) components does not add dependencies to the package. That is, + to specify, in the package description, the dependencies as common to all + the components that you are switching between from one build to another. -## Where does the output from `--ghc-options=-ddump-splices` (and other `-ddump*` options) go? + For example, if you are using `package.yaml`, add the dependencies to its + top-level `dependencies` key. -These are written to `*.dump-*` files inside the package's `.stack-work` -directory. Specifically, they will be available at -`PKG-DIR/$(stack path --dist-dir)/build/SOURCE-PATH`, where `SOURCE-PATH` is the path to the source -file, relative to the location of the `*.cabal` file. When building named -components such as test-suites, `SOURCE-PATH` will also include -`COMPONENT/COMPONENT-tmp`, where `COMPONENT` is the name of the component. + Alternatively, build all components of project packages without running + tests or benchmarks once built. Add the following to a configuration file: -## Why is DYLD_LIBRARY_PATH ignored? + ~~~yaml + build: + test: true + test-arguments: + no-run-tests: true + bench: true + benchmark-opts: + no-run-benchmarks: true + ~~~ -If you are on Mac OS X 10.11 ("El Capitan") or later, there is an -[upstream GHC issue](https://ghc.haskell.org/trac/ghc/ticket/11617) -which -[prevents the `DYLD_LIBRARY_PATH` environment variable from being passed to GHC](https://github.com/commercialhaskell/stack/issues/1161) -when System Integrity Protection (a.k.a. "rootless") is enabled. There are two -known workarounds: + or command: - 1. Known to work in all cases: [disable System Integrity Protection](http://osxdaily.com/2015/10/05/disable-rootless-system-integrity-protection-mac-os-x/). **WARNING: Disabling SIP will severely reduce the security of your system, so only do this if absolutely necessary!** - 2. Experimental: [modify GHC's shell script wrappers to use a shell outside the protected directories](https://github.com/commercialhaskell/stack/issues/1161#issuecomment-186690904). + ~~~text + stack build --test --no-run-tests --bench --no-run-benchmarks + ~~~ -## Why do I get a `/usr/bin/ar: permission denied` error? +??? question "How do I use a custom preprocessor?" -If you are on OS X 10.11 ("El Capitan") or -later, GHC 7.8.4 is -[incompatible with System Integrity Protection (a.k.a. "rootless")](https://github.com/commercialhaskell/stack/issues/563). -GHC 7.10.2 includes a fix, so this only affects users of GHC 7.8.4. If you -cannot upgrade to GHC 7.10.2, you can work around it by -[disabling System Integrity Protection](http://osxdaily.com/2015/10/05/disable-rootless-system-integrity-protection-mac-os-x/). **WARNING: Disabling SIP will severely reduce the security of your system, so only do this if absolutely necessary!** + See the + [`customer-prepocessor-extensions`](configure/yaml/project.md#custom-preprocessor-extensions) + project-specific configuration option documentation. -## Why is the `--` argument separator ignored in Windows PowerShell +??? question "How do I get extra tools used during building?" -Some versions of Windows PowerShell -[don't pass the `--` to programs](https://github.com/commercialhaskell/stack/issues/813). -The workaround is to quote the `"--"`, e.g.: + Stack will automatically install tools used during building required by your + packages or their dependencies, in particular + [Alex](https://hackage.haskell.org/package/alex) and + [Happy](https://hackage.haskell.org/package/happy). - stack exec "--" cabal --version + !!! note -This is known to be a problem on Windows 7, but seems to be fixed on Windows 10. + This works when using LTS or nightly snapshots, not with GHC or custom + snapshots. You can manually install tools used during building by running, + e.g., `stack build alex happy`. -## Does stack also install the system/C libraries that some Cabal packages depend on? +??? question "My tests are failing. What should I do?" -No, this is currently out of the scope of stack's target set of features. -Instead of attempting to automate the installation of 3rd party dependencies, we -have the following approaches for handling system dependencies: + Like all other targets, `stack test` runs test suites in parallel by default. + This can cause problems with test suites that depend on global resources such + as a database or binding to a fixed port number. A quick hack is to force stack + to run all test suites in sequence, using `stack test --jobs=1`. For test + suites to run in parallel developers should ensure that their test suites do + not depend on global resources (e.g. by asking the operating system for a random + port to bind to) and where unavoidable, add a lock in order to serialize access + to shared resources. -* Nix and docker help make your build and execution environment deterministic - and predictable. This way, you can install system dependencies into a - container, and share this container with all developers. +??? question "How do I use Stack with Docker?" -* If you have installed some libraries into a non-standard location, - [`extra-lib-dirs` / `extra-include-dirs`](yaml_configuration.md#extra-include-dirsextra-lib-dirs) - to specify it. + See the [Docker integration](topics/docker_integration.md) documentation. -In the future, stack might give OS specific suggestions for how to install -system libraries. +??? question "How do I build a statically-linked executable on Linux?" -## How can I make `stack` aware of my custom SSL certificates? + The way that Stack itself builds statically-linked Stack executables for Linux + is as follows: -### macOS + * In the Cabal file, the following + [`ld-options`](https://cabal.readthedocs.io/en/stable/cabal-package-description-file.html#pkg-field-ld-options) + are set: `-static` and `-pthread`. -In principle, you can use the following command to add a certificate to your system certificate keychain: + * The Stack command is run in a Docker container based on Alpine Linux. The + relevant Docker image repository is set out in Stack's `stack.yaml` file. See + also Olivier Benz's [GHC musl project](https://gitlab.com/benz0li/ghc-musl). - sudo security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain + * Stack's configuration includes: -Some users have reported issues with this approach, see -[#907](https://github.com/commercialhaskell/stack/issues/907) for more -information. + ~~~yaml + extra-include-dirs: + - /usr/include + extra-lib-dirs: + - /lib + - /usr/lib + ~~~ -### Other *NIX OSs + * The build command is `stack build --docker --system-ghc --no-install-ghc` (on + x86_64) or + `stack build --docker --docker-stack-exe=image --system-ghc --no-install-ghc` + (on AArch64; the host Stack and the image Stack must have the same version + number). -Use the `SYSTEM_CERTIFICATE_PATH` environment variable to point at the directory -where you keep your SSL certificates. +??? question "Why does Stack not rebuild my project when I specify `--ghc-options` on the command line?" -## How do I get `verbose` output from GHC when I build with cabal? + Because GHC options often only affect optimization levels and warning + behavior, Stack does not recompile when it detects an option change by + default. This behavior can be changed though by setting the + [`rebuild-ghc-options` option](configure/yaml/non-project.md#rebuild-ghc-options) + to `true`. -Unfortunately `stack build` does not have an obvious equivalent to `cabal build -vN` which shows verbose output from GHC when building. The easiest workaround is to add `ghc-options: -vN` to the .cabal file or pass it via `stack build --ghc-options="-v"`. + To force recompilation manually, use the `--force-dirty` flag. If this still + does not lead to a rebuild, add the `-fforce-recomp` flag to your + `--ghc-options`. -## Does Stack support the Hpack specification? +??? question "Why does Stack not apply my `--ghc-options` to my dependencies?" -Yes: + By default, Stack applies command line GHC options only to + [project packages](configure/yaml/project.md#packages). For an explanation of + this choice see this discussion on issue + [#827](https://github.com/commercialhaskell/stack/issues/827#issuecomment-133263678). -* If a package directory contains an Hpack `package.yaml` file, then Stack will use it to generate a `.cabal` file when building the package. -* You can run `stack init` to initialize a `stack.yaml` file regardless of whether your packages are declared with `.cabal` files or with Hpack `package.yaml` files. -* You can use the `with-hpack` configuration option to specify an Hpack executable to use instead of the Hpack bundled with Stack. + If you still want to set specific GHC options for a dependency, use the + [`ghc-options`](configure/yaml/non-project.md#ghc-options) option in your YAML + configuration file. -## How do I resolve linker errors when running `stack setup` or `stack build` on macOS? + To change the set of packages that command line GHC options apply to, use the [`apply-ghc-options`](configure/yaml/non-project.md#apply-ghc-options) option. -This is likely to be caused by having a LLVM installation and default Apple -Clang compiler both under the `PATH`. The symptom of this issue is a linker -error "bad relocation (Invalid pointer diff)". The compiler picks up -inconsistent versions of binaries and the mysterious error occurs. +??? question "How does Stack handle parallel builds?" -The workaround is to remove LLVM binaries from the `PATH`. + See issue [#644](https://github.com/commercialhaskell/stack/issues/644) for more + details. -## How do I suppress `'-nopie'` warnings with `stack build` on macOS? +??? question "Where does the output from `--ghc-options=-ddump-splices` (and other `-ddump*` options) go?" -``` -clang: warning: argument unused during compilation: '-nopie' - [-Wunused-command-line-argument] -``` + These are written to `*.dump-*` files inside the package's `.stack-work` + directory. Specifically, they will be available at + `PKG-DIR/$(stack path --dist-dir)/build/SOURCE-PATH`, where `SOURCE-PATH` is the + path to the source file, relative to the location of the Cabal file. When + building named components such as test-suites, `SOURCE-PATH` will also include + `COMPONENT/COMPONENT-tmp`, where `COMPONENT` is the name of the component. -This warning is shown when compiler support of `-no-pie` is expected but unavailable. -It's possible to bypass the warning for a specific version of GHC by modifying a global setting: +??? question "Why is DYLD_LIBRARY_PATH ignored?" -``` -# ~/.stack/programs/x86_64-osx/ghc-8.2.2/lib/ghc-8.2.2/settings --- ("C compiler supports -no-pie", "YES"), -++ ("C compiler supports -no-pie", "NO"), -``` + If you are on Mac OS X 10.11 ("El Capitan") or later, there is a GHC issue + [#11617](https://ghc.haskell.org/trac/ghc/ticket/11617) which prevents the + `DYLD_LIBRARY_PATH` environment variable from being passed to GHC (see issue + [#1161](https://github.com/commercialhaskell/stack/issues/1161)) when System + Integrity Protection (a.k.a. "rootless") is enabled. There are two known + workarounds: -**Note that we're fixing `ghc-8.2.2` in this case; repeat for other versions as necessary.** You should apply this fix for the version of GHC that matches your resolver. + 1. Known to work in all cases: + [disable System Integrity Protection](http://osxdaily.com/2015/10/05/disable-rootless-system-integrity-protection-mac-os-x/). + **WARNING: Disabling SIP will severely reduce the security of your system, so only do this if absolutely necessary!** + 2. Experimental: modify GHC's shell script wrappers to use a shell outside the + protected directories (see issue + [#1161](https://github.com/commercialhaskell/stack/issues/1161#issuecomment-186690904)). -Issue [#4009](https://github.com/commercialhaskell/stack/issues/4009) on GitHub goes into further detail. +??? question "How do I get `verbose` output from GHC when I build?" -## How do I install ghc in stack when it fails with the error: Missing ghc bindist for "linux64-ncurses6"? + Set the [`--ghc-options`](commands/build_command.md#-ghc-options-option) + option of `stack build` to `-v`. -Example Error: -``` -No setup information found for ghc-8.6.4 on your platform. -This probably means a GHC bindist has not yet been added for OS key 'linux64-ncurses6'. -Supported versions: ghc-7.10.3, ghc-8.0.1, ghc-8.0.2, ghc-8.2.1, ghc-8.2.2 -``` +## Snapshot-related + +??? question "How does Stack choose which snapshot to use when creating a project-level configuration file?" + + See the [`stack init`](commands/init_command.md) command documentation. + +## CI-related + +??? question "How do I use Stack with Travis CI?" + + See the [Travis CI](topics/travis_ci.md) documentation. + +??? question "How do I use Stack with Azure CI?" + + See the [Azure CI](topics/azure_ci.md) documentation. + +## Linux-related + +??? question "How do fix error [S-9443] for 'linux64-ncurses6'?" + + Most Linux distributions have standardized on providing `libtinfo.so.6`, + either directly or as a symbolic link to `libncursesw.so.6`. As such, there + are no GHC binary distributions that link to `libncursesw.so.6` after + GHC 8.2.2. + + This error can be prevented by creating a symbolic link to + `libncursesw.so.6` using name `libtinfo.so.6`. Command: + + ~~~bash + ln -s /usr/lib/libncursesw.so.6 /usr/lib/libtinfo.so.6 + ~~~ + + Root privileges may be required. + +## macOS-related + +??? question "On macOS, how do I resolve linker errors when running `stack setup` or `stack build`?" + + This is likely to be caused by having both a LLVM installation and default + Apple Clang compiler on the PATH. The symptom of this issue is a linker + error "bad relocation (Invalid pointer diff)". The compiler picks up + inconsistent versions of binaries and the mysterious error occurs. + + The workaround is to remove LLVM binaries from the PATH. + +??? question "On macOS, how do I suppress `'-nopie'` warnings with `stack build`?" + + ~~~bash + clang: warning: argument unused during compilation: '-nopie' + [-Wunused-command-line-argument] + ~~~ + + This warning is shown when compiler support of `-no-pie` is expected but + unavailable. It is possible to bypass the warning for a specific version of + GHC by modifying a global setting: + + ~~~bash + # ~/.stack/programs/x86_64-osx/ghc-8.2.2/lib/ghc-8.2.2/settings + -- ("C compiler supports -no-pie", "YES"), + ++ ("C compiler supports -no-pie", "NO"), + ~~~ + + **Note that we are fixing `ghc-8.2.2` in this case; repeat for other + versions as necessary.** + You should apply this fix for the version of GHC that matches your snapshot. + + Issue [#4009](https://github.com/commercialhaskell/stack/issues/4009) goes into + further detail. + +## Windows-related + +??? question "What is licensing restrictions on Windows?" + + Currently, on Windows, GHC produces binaries linked statically with + [GNU Multiple Precision Arithmetic Library](https://gmplib.org/) (GMP), which is + used by [integer-gmp](https://hackage.haskell.org/package/integer-gmp) library + to provide big integer implementation for Haskell. Contrary to the majority of + Haskell code licensed under permissive BSD3 license, GMP library is licensed + under LGPL, which means resulting binaries + [have to be provided with source code or object files](http://www.gnu.org/licenses/gpl-faq.html#LGPLStaticVsDynamic). + That may or may not be acceptable for your situation. The current workaround + is to use GHC built with alternative big integer implementation called + `integer-simple`, which is free from LGPL limitations as it is pure Haskell + and does not use GMP. Unfortunately it has yet to be available out of the + box with Stack. See issue + [#399](https://github.com/commercialhaskell/stack/issues/399) + for the ongoing effort and information on workarounds. + +??? question "I have a Windows username with a space in it and problems building" + + See the [`local-programs-path`](configure/yaml/non-project.md#local-programs-path) + non-project specific configuration option documentation for advice. + +??? question "How to get a working executable on Windows?" + + When executing a binary after building with `stack build` (e.g. for target + "foo"), the command `foo.exe` might complain about missing runtime libraries + (whereas `stack exec foo` works). + + Windows is not able to find the necessary C++ libraries from the standard + prompt because they're not in the PATH environment variable. `stack exec` works + because it is modifying PATH to include extra things. + + Those libraries are shipped with GHC (and, theoretically in some cases, MSYS2). + The easiest way to find them is `stack exec which`. For example, command: + + ~~~text + stack exec -- which libstdc++-6.dll + /c/Users/Michael/AppData/Local/Programs/stack/i386-windows/ghc-7.8.4/mingw/bin/libstdc++-6.dll + ~~~ -Most Linux distributions have standardized on providing libtinfo.so.6 (either directly or as a symlink to libncursesw.so.6). As such, there aren't GHC 8.6.* bindists that link to libncursesw.so.6 available. + A quick workaround is adding this path to the PATH environment variable or + copying the files somewhere Windows finds them (see + https://msdn.microsoft.com/de-de/library/7d83bc18.aspx). -So creating a symlink to libncursesw.so.6 as libtinfo.so.6 can prevent this error (root privileges might be required). -``` -ln -s /usr/lib/libncursesw.so.6 /usr/lib/libtinfo.so.6 -``` + See issue [#425](https://github.com/commercialhaskell/stack/issues/425). diff --git a/doc/ghci.md b/doc/ghci.md deleted file mode 100644 index 1391d3d0b8..0000000000 --- a/doc/ghci.md +++ /dev/null @@ -1,84 +0,0 @@ -
- -# GHCi - -`stack ghci` allows you to load components and files of your project into -`ghci`. It uses the same TARGET syntax as `stack build`, and can also take -options like `--test`, `--bench`, and `--flag`. Similarly to `stack build`, the -default is to load up ghci with all libraries and executables in the project. - -In order to load multiple components, `stack ghci` combines all of the ghc options -together. This doesn't work in the general case, but when the packages being -loaded share similar conventions, it should work out. A common source of issues -is when one component defines default extensions which aren't assumed by another -component. For example, specifying `NoImplicitPrelude` in one component but -not another is quite likely to cause failures. `ghci` will be run with -`-XNoImplicitPrelude`, but it is likely that modules in the other component -assume that the Prelude is implicitly imported. - -## Selecting Main module - -When loading multiple packages, there may be multiple definitions for the `Main` -module. You can specify which Main module to load by passing in the -`--main-is TARGET` flag. If no selection is made and there are multiple `Main` -modules, you will be asked to select from a list of options. - -## Speeding up initial load - -There are two ways to speed up the initial startup of ghci: - -* `--no-build`, to skip an initial build step. This only works if the - dependencies have already been built. - -* `--no-load`, to skip loading all defined modules into ghci. You can then - directly use `:load MyModule` to load a specific module in your project. - -## Loading just the main module - -By default, `stack ghci` loads and imports all of the modules in the package. -This allows you to easily use anything exported by your package. This is -usually quite convenient, but in some cases it makes sense to only load one -module, or no modules at all. The `--only-main` flag allows this. It specifies -that only the main module will be loaded, if any. This is particularly useful -in the following circumstances: - -1. You're loading the project in order to run it in ghci (e.g. via `main`), and - you intend to reload while developing. Without the `--only-main` flag, you - will need to quit and restart ghci whenever a module gets deleted. With the - flag, reloading should work fine in this case. - -2. If many of your modules have exports named the same thing, then you'll need to - refer to them using qualified names. To avoid this, it may be easier to use - `--only-main` to start with a blank slate and just import the modules you are - interested in. - -## Loading a filepath directly - -Instead of the `TARGET` syntax, it is also possible to directly run -`stack ghci src/MyFile.hs`. This will figure out which component the file is -associated with, and use the options from that component. - -## Specifying extra packages to build / depend on - -Sometimes you want to load ghci with an additional package, that isn't a direct -dependency of your components. This can be achieved by using the `--package` flag. -For example, if I want to experiment with the lens library, I can run -`stack ghci --package lens`. - -## Running plain ghci - -`stack ghci` always runs ghci configured to load code from packages in your -project. In particular, this means it passes in flags like `-hide-all-packages` -and `-package-id=` in order to configure which packages are visible to ghci. - -For doing experiments which just involve packages installed in your databases, -it may be useful to run ghci plainly like `stack exec ghci`. This will run a -plain `ghci` in an environment which includes `GHC_PACKAGE_PATH`, and so will -have access to your databases. - -*Note*: Running `stack ghci` on a pristine copy of the code doesn't currently -build libraries -([#2790](https://github.com/commercialhaskell/stack/issues/2790)) or internal -libraries ([#4148](https://github.com/commercialhaskell/stack/issues/4148)). -It is recommended to always run a `stack build` before running `stack ghci`, -until these two issues are closed. diff --git a/doc/ghcjs.md b/doc/ghcjs.md deleted file mode 100644 index 5d2fbfe6ea..0000000000 --- a/doc/ghcjs.md +++ /dev/null @@ -1,5 +0,0 @@ -
- -# GHCJS (removed) - -GHCJS support is no longer included in Stack as of version 2.3. diff --git a/doc/glossary.md b/doc/glossary.md new file mode 100644 index 0000000000..8c0874d514 --- /dev/null +++ b/doc/glossary.md @@ -0,0 +1,63 @@ +
+ +# Glossary + +The following terms are used in Stack's documentation. + +|Term |Meaning | +|--------------------|---------------------------------------------------------| +|Cabal |The Haskell Common Architecture for Building Applications and Libraries, provided by the [`Cabal` package](https://hackage.haskell.org/package/Cabal). Also referred to as Cabal (the library) to distinguish it from Cabal (the tool).| +|Cabal file |A file containing a [package description](https://cabal.readthedocs.io/en/stable/cabal-package-description-file.html) used by Cabal, named `.cabal`. A Cabal file specifies properties in the form of field/value pairs.| +|Cabal (the tool) |The Haskell tool used for building provided by the [`cabal-install` package](https://hackage.haskell.org/package/cabal-install).| +|CI |Continuous integration. | +|CMake |A [system](https://cmake.org/) for managing build processes.| +|`config.yaml` |A global and non-project-specific configuration file used by Stack.| +|dependency |A Haskell package other than a project package and on which a project package depends (directly or indirectly), located locally or elsewhere.| +|Docker |A [platform](https://www.docker.com/) for developing, shipping, and running applications. It can package and run an application in a loosely isolated environment called a _container_.| +|Emacs |[GNU Emacs](https://www.gnu.org/software/emacs/), an extensible, customisable text editor.| +|extra-deps |Extra dependencies (one version of each) that add to, or shadow, those specified in a snapshot.| +|FreeBSD |A Unix-like operating system. | +|GCC |The [GNU Compiler Collection](https://gcc.gnu.org/) or its executable `gcc`.| +|GHC |The [Glasgow Haskell Compiler](https://www.haskell.org/ghc/).| +|GHC boot package |A package that comes with GHC, is included in GHC's global package database, and is not specified directly in a Stackage snapshot. See the output of command `stack exec -- ghc-pkg list --global`.| +|GHC wired-in package|A GHC boot package that cannot be shaddowed with different versions of the same package. Also referred to as 'magic'.| +|GHCi |GHC's [interactive environment](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/ghci.html).| +|GHCJS |A Haskell to JavaScript [compiler](https://github.com/ghcjs/ghcjs).| +|GHCup |An [installer](https://www.haskell.org/ghcup/) for Haskell. +|Git |A [distributed version control system](https://git-scm.com/).| +|GPG |The [GNU Privacy Guard](https://gnupg.org/) or GnuPG, software that allows you to encrypt or sign your data and communications.| +|Hackage |The [Haskell Package Repository](https://hackage.haskell.org/). +|Haddock |The [document generation tool](https://hackage.haskell.org/package/haddock) for Haskell libraries.| +|'Haskell' extension |The ['Haskell' extension]() for VS Code. | +|HLS |[Haskell Language Server](https://haskell-language-server.readthedocs.io/en/latest/), an implementation of the Language Server Protocol for Haskell.| +|Homebrew |A [package manager](https://brew.sh/) for macOS or Linux, or its executable `brew`.| +|Hoogle |A Haskell API [search engine](https://hoogle.haskell.org/).| +|Hpack |A [format](https://github.com/sol/hpack) for Haskell packages or the executable `hpack` that produces a Cabal file from `package.yaml`.| +|Linux |A family of operating systems based on the Linux kernel. | +|macOS |The primary operating system for Apple's Mac computers. Previously known as Mac OS X or OS X.| +|Make |A [build automation tool](https://www.gnu.org/software/make/).| +|Markdown |A plain text [formatting syntax](https://daringfireball.net/projects/markdown/) or software used to convert files in that format to HTML.| +|MSYS2 |The [MSYS2](https://www.msys2.org/) software distribution and building platform for Windows.| +|Nix |A purely functional [package manager](https://nixos.org/), available for Linux and macOS.| +|package |A Haskell package is an organised collection of Haskell code and related files. It is described by a Cabal file or a `package.yaml` file, which is itself part of the package.| +|`package.yaml` |A file that describes a package in the Hpack format. | +|Pantry |A library for content-addressable Haskell package management, provided by the [`pantry` package](https://hackage.haskell.org/package/pantry). A dependency of Stack.| +|PATH |The `PATH` environment variable, specifying a list of directories searched for executable files.| +|project |A Stack project is a local directory that contains a project-level configuration file (`stack.yaml`, by default). A project may relate to more than one project package.| +|project package |A Haskell package that is part of a project and located locally. Distinct from a dependency located locally.| +|PVP |The Haskell [Package Versioning Policy](https://pvp.haskell.org/), which tells developers of libraries how to set their version numbers.| +|REPL |An interactive (run-eval-print loop) programming environment.| +|resolver |A synonym for snapshot, now deprecated. | +|`Setup.hs` |A project-specific file used by Cabal to perform setup tasks.| +|snapshot |A snapshot defines a version of GHC (and, implicitly, its boot packages), a set of packages (one version of each), Cabal flags and GHC options.| +|Stack |The Stack project or its executable `stack`.| +|`stack.yaml` |A project-level configuration file used by Stack, which may also contain non-project-specific options.| +|Stackage |A [distribution](https://www.stackage.org/) of compatible Haskell packages.| +|Stack root |A directory in which Stack stores important files. See `stack path --stack-root`. On Windows, or if Stack is configured to use the XDG Base Directory Specification, Stack also stores important files outside of the Stack root.| +|Stack work directory|A directory within a local project or package directory in which Stack stores files created during the build process. Named `.stack-work`, by default.| +|Unix-like operating systems|Linux, FreeBSD and macOS. | +|VS Code |[Visual Studio Code](https://code.visualstudio.com/), a source code editor.| +|Windows |A group of operating systems developed by Microsoft. | +|WSL |[Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/). Provides a Linux environment on Windows.| +|XDG Base Directory Specification|A [specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) of directories relative to which files should be located.| +|YAML |A human-friendly [data serialization language](https://yaml.org/). The YAML syntax allows for key/value pairs.| diff --git a/doc/img/dot_command/wreq-example1.svg b/doc/img/dot_command/wreq-example1.svg new file mode 100644 index 0000000000..15996e6a98 --- /dev/null +++ b/doc/img/dot_command/wreq-example1.svg @@ -0,0 +1,31 @@ + + + + + + +deps + + + +wreq + +wreq + + + +wreq-examples + +wreq-examples + + + +wreq-examples->wreq + + + + + diff --git a/doc/img/dot_command/wreq-example2.svg b/doc/img/dot_command/wreq-example2.svg new file mode 100644 index 0000000000..a0d8853137 --- /dev/null +++ b/doc/img/dot_command/wreq-example2.svg @@ -0,0 +1,6145 @@ + + + + + + +deps + + + +wreq + +wreq + + + +base + +base + + + +wreq->base + + + + + +ghc-prim + +ghc-prim + + + +wreq->ghc-prim + + + + + +template-haskell + +template-haskell + + + +wreq->template-haskell + + + + + +Cabal + +Cabal + + + +wreq->Cabal + + + + + +bytestring + +bytestring + + + +wreq->bytestring + + + + + +containers + +containers + + + +wreq->containers + + + + + +time + +time + + + +wreq->time + + + + + +text + +text + + + +wreq->text + + + + + +aeson + +aeson + + + +wreq->aeson + + + + + +exceptions + +exceptions + + + +wreq->exceptions + + + + + +hashable + +hashable + + + +wreq->hashable + + + + + +unordered-containers + +unordered-containers + + + +wreq->unordered-containers + + + + + +memory + +memory + + + +wreq->memory + + + + + +attoparsec + +attoparsec + + + +wreq->attoparsec + + + + + +authenticate-oauth + +authenticate-oauth + + + +wreq->authenticate-oauth + + + + + +http-client + +http-client + + + +wreq->http-client + + + + + +http-types + +http-types + + + +wreq->http-types + + + + + +base16-bytestring + +base16-bytestring + + + +wreq->base16-bytestring + + + + + +cabal-doctest + +cabal-doctest + + + +wreq->cabal-doctest + + + + + +case-insensitive + +case-insensitive + + + +wreq->case-insensitive + + + + + +crypton + +crypton + + + +wreq->crypton + + + + + +mime-types + +mime-types + + + +wreq->mime-types + + + + + +http-client-tls + +http-client-tls + + + +wreq->http-client-tls + + + + + +lens + +lens + + + +wreq->lens + + + + + +lens-aeson + +lens-aeson + + + +wreq->lens-aeson + + + + + +psqueues + +psqueues + + + +wreq->psqueues + + + + + +time-locale-compat + +time-locale-compat + + + +wreq->time-locale-compat + + + + + +wreq-examples + +wreq-examples + + + +wreq-examples->wreq + + + + + +wreq-examples->base + + + + + +wreq-examples->ghc-prim + + + + + +wreq-examples->bytestring + + + + + +wreq-examples->containers + + + + + +filepath + +filepath + + + +wreq-examples->filepath + + + + + +mtl + +mtl + + + +wreq-examples->mtl + + + + + +wreq-examples->text + + + + + +wreq-examples->aeson + + + + + +ansi-wl-pprint + +ansi-wl-pprint + + + +wreq-examples->ansi-wl-pprint + + + + + +wreq-examples->lens + + + + + +wreq-examples->lens-aeson + + + + + +optparse-applicative + +optparse-applicative + + + +wreq-examples->optparse-applicative + + + + + +tagsoup + +tagsoup + + + +wreq-examples->tagsoup + + + + + +ghc-internal + +ghc-internal + + + +base->ghc-internal + + + + + +base->ghc-prim + + + + + +ghc-bignum + +ghc-bignum + + + +ghc-bignum->ghc-prim + + + + + +ghc-internal->ghc-bignum + + + + + +ghc-internal->ghc-prim + + + + + +rts + +rts + + + +ghc-internal->rts + + + + + +ghc-prim->rts + + + + + +template-haskell->base + + + + + +template-haskell->ghc-prim + + + + + +pretty + +pretty + + + +template-haskell->pretty + + + + + +ghc-boot-th + +ghc-boot-th + + + +template-haskell->ghc-boot-th + + + + + +zlib-clib + +zlib-clib + + + +Cabal->base + + + + + +Cabal-syntax + +Cabal-syntax + + + +Cabal->Cabal-syntax + + + + + +Win32 + +Win32 + + + +Cabal->Win32 + + + + + +array + +array + + + +Cabal->array + + + + + +Cabal->bytestring + + + + + +Cabal->containers + + + + + +deepseq + +deepseq + + + +Cabal->deepseq + + + + + +directory + +directory + + + +Cabal->directory + + + + + +Cabal->filepath + + + + + +Cabal->mtl + + + + + +parsec + +parsec + + + +Cabal->parsec + + + + + +Cabal->pretty + + + + + +process + +process + + + +Cabal->process + + + + + +Cabal->time + + + + + +transformers + +transformers + + + +Cabal->transformers + + + + + +Cabal-syntax->base + + + + + +Cabal-syntax->array + + + + + +Cabal-syntax->bytestring + + + + + +Cabal-syntax->containers + + + + + +Cabal-syntax->deepseq + + + + + +Cabal-syntax->directory + + + + + +Cabal-syntax->filepath + + + + + +Cabal-syntax->mtl + + + + + +Cabal-syntax->parsec + + + + + +Cabal-syntax->pretty + + + + + +Cabal-syntax->time + + + + + +Cabal-syntax->transformers + + + + + +binary + +binary + + + +Cabal-syntax->binary + + + + + +Cabal-syntax->text + + + + + +Win32->base + + + + + +Win32->filepath + + + + + +os-string + +os-string + + + +Win32->os-string + + + + + +array->base + + + + + +bytestring->base + + + + + +bytestring->ghc-prim + + + + + +bytestring->template-haskell + + + + + +bytestring->deepseq + + + + + +containers->base + + + + + +containers->template-haskell + + + + + +containers->array + + + + + +containers->deepseq + + + + + +deepseq->base + + + + + +deepseq->ghc-prim + + + + + +deepseq->array + + + + + +directory->base + + + + + +directory->Win32 + + + + + +directory->filepath + + + + + +directory->time + + + + + +directory->os-string + + + + + +filepath->base + + + + + +filepath->template-haskell + + + + + +filepath->bytestring + + + + + +filepath->deepseq + + + + + +filepath->os-string + + + + + +filepath->exceptions + + + + + +mtl->base + + + + + +mtl->transformers + + + + + +parsec->base + + + + + +parsec->bytestring + + + + + +parsec->mtl + + + + + +parsec->text + + + + + +pretty->base + + + + + +pretty->ghc-prim + + + + + +pretty->deepseq + + + + + +process->base + + + + + +process->Win32 + + + + + +process->deepseq + + + + + +process->directory + + + + + +process->filepath + + + + + +time->base + + + + + +time->Win32 + + + + + +time->deepseq + + + + + +transformers->base + + + + + +binary->base + + + + + +binary->array + + + + + +binary->bytestring + + + + + +binary->containers + + + + + +text->base + + + + + +text->ghc-prim + + + + + +text->template-haskell + + + + + +text->array + + + + + +text->bytestring + + + + + +text->deepseq + + + + + +text->binary + + + + + +OneTuple + +OneTuple + + + +OneTuple->base + + + + + +OneTuple->ghc-prim + + + + + +OneTuple->template-haskell + + + + + +QuickCheck + +QuickCheck + + + +QuickCheck->base + + + + + +QuickCheck->template-haskell + + + + + +QuickCheck->containers + + + + + +QuickCheck->deepseq + + + + + +QuickCheck->transformers + + + + + +random + +random + + + +QuickCheck->random + + + + + +splitmix + +splitmix + + + +QuickCheck->splitmix + + + + + +random->base + + + + + +random->bytestring + + + + + +random->deepseq + + + + + +random->mtl + + + + + +random->splitmix + + + + + +splitmix->base + + + + + +splitmix->deepseq + + + + + +RSA + +RSA + + + +RSA->base + + + + + +RSA->bytestring + + + + + +RSA->binary + + + + + +SHA + +SHA + + + +RSA->SHA + + + + + +crypto-api + +crypto-api + + + +RSA->crypto-api + + + + + +crypto-pubkey-types + +crypto-pubkey-types + + + +RSA->crypto-pubkey-types + + + + + +SHA->base + + + + + +SHA->array + + + + + +SHA->bytestring + + + + + +SHA->binary + + + + + +crypto-api->base + + + + + +crypto-api->bytestring + + + + + +crypto-api->transformers + + + + + +tagged + +tagged + + + +crypto-api->tagged + + + + + +cereal + +cereal + + + +crypto-api->cereal + + + + + +entropy + +entropy + + + +crypto-api->entropy + + + + + +crypto-pubkey-types->base + + + + + +asn1-encoding + +asn1-encoding + + + +crypto-pubkey-types->asn1-encoding + + + + + +asn1-types + +asn1-types + + + +crypto-pubkey-types->asn1-types + + + + + +StateVar + +StateVar + + + +StateVar->base + + + + + +StateVar->transformers + + + + + +stm + +stm + + + +StateVar->stm + + + + + +stm->base + + + + + +stm->array + + + + + +os-string->base + + + + + +os-string->template-haskell + + + + + +os-string->bytestring + + + + + +os-string->deepseq + + + + + +os-string->exceptions + + + + + +adjunctions + +adjunctions + + + +adjunctions->base + + + + + +adjunctions->array + + + + + +adjunctions->containers + + + + + +adjunctions->mtl + + + + + +adjunctions->transformers + + + + + +comonad + +comonad + + + +adjunctions->comonad + + + + + +contravariant + +contravariant + + + +adjunctions->contravariant + + + + + +distributive + +distributive + + + +adjunctions->distributive + + + + + +free + +free + + + +adjunctions->free + + + + + +profunctors + +profunctors + + + +adjunctions->profunctors + + + + + +semigroupoids + +semigroupoids + + + +adjunctions->semigroupoids + + + + + +semigroups + +semigroups + + + +adjunctions->semigroups + + + + + +adjunctions->tagged + + + + + +transformers-compat + +transformers-compat + + + +adjunctions->transformers-compat + + + + + +void + +void + + + +adjunctions->void + + + + + +comonad->base + + + + + +comonad->containers + + + + + +comonad->transformers + + + + + +comonad->distributive + + + + + +comonad->tagged + + + + + +comonad->transformers-compat + + + + + +indexed-traversable + +indexed-traversable + + + +comonad->indexed-traversable + + + + + +contravariant->base + + + + + +contravariant->transformers + + + + + +contravariant->StateVar + + + + + +distributive->base + + + + + +distributive->transformers + + + + + +distributive->tagged + + + + + +base-orphans + +base-orphans + + + +distributive->base-orphans + + + + + +free->base + + + + + +free->template-haskell + + + + + +free->containers + + + + + +free->mtl + + + + + +free->transformers + + + + + +free->comonad + + + + + +free->distributive + + + + + +free->profunctors + + + + + +free->semigroupoids + + + + + +free->exceptions + + + + + +free->indexed-traversable + + + + + +th-abstraction + +th-abstraction + + + +free->th-abstraction + + + + + +transformers-base + +transformers-base + + + +free->transformers-base + + + + + +profunctors->base + + + + + +profunctors->transformers + + + + + +profunctors->comonad + + + + + +profunctors->contravariant + + + + + +profunctors->distributive + + + + + +profunctors->tagged + + + + + +profunctors->base-orphans + + + + + +bifunctors + +bifunctors + + + +profunctors->bifunctors + + + + + +semigroupoids->base + + + + + +semigroupoids->template-haskell + + + + + +semigroupoids->containers + + + + + +semigroupoids->transformers + + + + + +semigroupoids->comonad + + + + + +semigroupoids->contravariant + + + + + +semigroupoids->distributive + + + + + +semigroupoids->tagged + + + + + +semigroupoids->transformers-compat + + + + + +semigroupoids->hashable + + + + + +semigroupoids->unordered-containers + + + + + +semigroupoids->base-orphans + + + + + +semigroupoids->bifunctors + + + + + +semigroups->base + + + + + +tagged->base + + + + + +tagged->template-haskell + + + + + +tagged->deepseq + + + + + +tagged->transformers + + + + + +transformers-compat->base + + + + + +transformers-compat->ghc-prim + + + + + +transformers-compat->transformers + + + + + +void->base + + + + + +aeson->base + + + + + +aeson->ghc-prim + + + + + +aeson->template-haskell + + + + + +aeson->bytestring + + + + + +aeson->containers + + + + + +aeson->deepseq + + + + + +aeson->time + + + + + +aeson->text + + + + + +aeson->OneTuple + + + + + +aeson->QuickCheck + + + + + +aeson->tagged + + + + + +character-ps + +character-ps + + + +aeson->character-ps + + + + + +data-fix + +data-fix + + + +aeson->data-fix + + + + + +dlist + +dlist + + + +aeson->dlist + + + + + +aeson->exceptions + + + + + +generically + +generically + + + +aeson->generically + + + + + +aeson->hashable + + + + + +aeson->indexed-traversable + + + + + +integer-conversion + +integer-conversion + + + +aeson->integer-conversion + + + + + +integer-logarithms + +integer-logarithms + + + +aeson->integer-logarithms + + + + + +network-uri + +network-uri + + + +aeson->network-uri + + + + + +primitive + +primitive + + + +aeson->primitive + + + + + +scientific + +scientific + + + +aeson->scientific + + + + + +semialign + +semialign + + + +aeson->semialign + + + + + +strict + +strict + + + +aeson->strict + + + + + +text-iso8601 + +text-iso8601 + + + +aeson->text-iso8601 + + + + + +text-short + +text-short + + + +aeson->text-short + + + + + +aeson->th-abstraction + + + + + +these + +these + + + +aeson->these + + + + + +time-compat + +time-compat + + + +aeson->time-compat + + + + + +aeson->unordered-containers + + + + + +uuid-types + +uuid-types + + + +aeson->uuid-types + + + + + +vector + +vector + + + +aeson->vector + + + + + +witherable + +witherable + + + +aeson->witherable + + + + + +character-ps->base + + + + + +data-fix->base + + + + + +data-fix->deepseq + + + + + +data-fix->hashable + + + + + +dlist->base + + + + + +dlist->deepseq + + + + + +exceptions->base + + + + + +exceptions->template-haskell + + + + + +exceptions->mtl + + + + + +exceptions->transformers + + + + + +exceptions->stm + + + + + +generically->base + + + + + +hashable->base + + + + + +hashable->ghc-bignum + + + + + +hashable->ghc-prim + + + + + +hashable->bytestring + + + + + +hashable->containers + + + + + +hashable->deepseq + + + + + +hashable->filepath + + + + + +hashable->text + + + + + +hashable->os-string + + + + + +indexed-traversable->base + + + + + +indexed-traversable->array + + + + + +indexed-traversable->containers + + + + + +indexed-traversable->transformers + + + + + +integer-conversion->base + + + + + +integer-conversion->bytestring + + + + + +integer-conversion->text + + + + + +integer-conversion->primitive + + + + + +integer-logarithms->base + + + + + +integer-logarithms->ghc-bignum + + + + + +integer-logarithms->ghc-prim + + + + + +integer-logarithms->array + + + + + +network-uri->base + + + + + +network-uri->template-haskell + + + + + +network-uri->deepseq + + + + + +network-uri->parsec + + + + + +th-compat + +th-compat + + + +network-uri->th-compat + + + + + +primitive->base + + + + + +primitive->template-haskell + + + + + +primitive->deepseq + + + + + +primitive->transformers + + + + + +scientific->base + + + + + +scientific->template-haskell + + + + + +scientific->bytestring + + + + + +scientific->containers + + + + + +scientific->deepseq + + + + + +scientific->binary + + + + + +scientific->text + + + + + +scientific->hashable + + + + + +scientific->integer-logarithms + + + + + +scientific->primitive + + + + + +semialign->base + + + + + +semialign->containers + + + + + +semialign->transformers + + + + + +semialign->semigroupoids + + + + + +semialign->tagged + + + + + +semialign->hashable + + + + + +semialign->indexed-traversable + + + + + +semialign->these + + + + + +semialign->unordered-containers + + + + + +semialign->vector + + + + + +indexed-traversable-instances + +indexed-traversable-instances + + + +semialign->indexed-traversable-instances + + + + + +strict->base + + + + + +strict->ghc-prim + + + + + +strict->bytestring + + + + + +strict->deepseq + + + + + +strict->transformers + + + + + +strict->binary + + + + + +strict->text + + + + + +strict->hashable + + + + + +strict->these + + + + + +assoc + +assoc + + + +strict->assoc + + + + + +text-iso8601->base + + + + + +text-iso8601->time + + + + + +text-iso8601->text + + + + + +text-iso8601->integer-conversion + + + + + +text-iso8601->time-compat + + + + + +text-short->base + + + + + +text-short->ghc-prim + + + + + +text-short->template-haskell + + + + + +text-short->bytestring + + + + + +text-short->deepseq + + + + + +text-short->binary + + + + + +text-short->text + + + + + +text-short->hashable + + + + + +th-abstraction->base + + + + + +th-abstraction->ghc-prim + + + + + +th-abstraction->template-haskell + + + + + +th-abstraction->containers + + + + + +these->base + + + + + +these->deepseq + + + + + +these->binary + + + + + +these->hashable + + + + + +these->assoc + + + + + +time-compat->base + + + + + +time-compat->template-haskell + + + + + +time-compat->deepseq + + + + + +time-compat->time + + + + + +time-compat->hashable + + + + + +time-compat->base-orphans + + + + + +unordered-containers->base + + + + + +unordered-containers->template-haskell + + + + + +unordered-containers->deepseq + + + + + +unordered-containers->hashable + + + + + +uuid-types->base + + + + + +uuid-types->template-haskell + + + + + +uuid-types->bytestring + + + + + +uuid-types->deepseq + + + + + +uuid-types->binary + + + + + +uuid-types->text + + + + + +uuid-types->random + + + + + +uuid-types->hashable + + + + + +vector->base + + + + + +vector->deepseq + + + + + +vector->random + + + + + +vector->primitive + + + + + +tasty + +tasty + + + +vector->tasty + + + + + +vector-stream + +vector-stream + + + +vector->vector-stream + + + + + +witherable->base + + + + + +witherable->containers + + + + + +witherable->transformers + + + + + +witherable->hashable + + + + + +witherable->indexed-traversable + + + + + +witherable->unordered-containers + + + + + +witherable->vector + + + + + +witherable->base-orphans + + + + + +witherable->indexed-traversable-instances + + + + + +ansi-terminal + +ansi-terminal + + + +ansi-terminal->base + + + + + +ansi-terminal-types + +ansi-terminal-types + + + +ansi-terminal->ansi-terminal-types + + + + + +colour + +colour + + + +ansi-terminal->colour + + + + + +ansi-terminal-types->base + + + + + +ansi-terminal-types->colour + + + + + +colour->base + + + + + +ansi-wl-pprint->base + + + + + +prettyprinter-compat-ansi-wl-pprint + +prettyprinter-compat-ansi-wl-pprint + + + +ansi-wl-pprint->prettyprinter-compat-ansi-wl-pprint + + + + + +prettyprinter-compat-ansi-wl-pprint->base + + + + + +prettyprinter-compat-ansi-wl-pprint->text + + + + + +prettyprinter + +prettyprinter + + + +prettyprinter-compat-ansi-wl-pprint->prettyprinter + + + + + +prettyprinter-ansi-terminal + +prettyprinter-ansi-terminal + + + +prettyprinter-compat-ansi-wl-pprint->prettyprinter-ansi-terminal + + + + + +appar + +appar + + + +appar->base + + + + + +appar->bytestring + + + + + +asn1-encoding->base + + + + + +asn1-encoding->bytestring + + + + + +asn1-encoding->asn1-types + + + + + +hourglass + +hourglass + + + +asn1-encoding->hourglass + + + + + +asn1-types->base + + + + + +asn1-types->bytestring + + + + + +asn1-types->hourglass + + + + + +asn1-types->memory + + + + + +hourglass->base + + + + + +hourglass->Win32 + + + + + +hourglass->deepseq + + + + + +asn1-parse + +asn1-parse + + + +asn1-parse->base + + + + + +asn1-parse->bytestring + + + + + +asn1-parse->asn1-encoding + + + + + +asn1-parse->asn1-types + + + + + +memory->base + + + + + +memory->ghc-prim + + + + + +memory->bytestring + + + + + +memory->deepseq + + + + + +basement + +basement + + + +memory->basement + + + + + +assoc->base + + + + + +async + +async + + + +async->base + + + + + +async->stm + + + + + +async->hashable + + + + + +attoparsec->base + + + + + +attoparsec->ghc-prim + + + + + +attoparsec->array + + + + + +attoparsec->bytestring + + + + + +attoparsec->containers + + + + + +attoparsec->deepseq + + + + + +attoparsec->transformers + + + + + +attoparsec->text + + + + + +attoparsec->scientific + + + + + +authenticate-oauth->base + + + + + +authenticate-oauth->bytestring + + + + + +authenticate-oauth->time + + + + + +authenticate-oauth->transformers + + + + + +authenticate-oauth->random + + + + + +authenticate-oauth->RSA + + + + + +authenticate-oauth->SHA + + + + + +authenticate-oauth->crypto-pubkey-types + + + + + +authenticate-oauth->transformers-compat + + + + + +base64-bytestring + +base64-bytestring + + + +authenticate-oauth->base64-bytestring + + + + + +blaze-builder + +blaze-builder + + + +authenticate-oauth->blaze-builder + + + + + +data-default + +data-default + + + +authenticate-oauth->data-default + + + + + +authenticate-oauth->http-client + + + + + +authenticate-oauth->http-types + + + + + +base64-bytestring->base + + + + + +base64-bytestring->bytestring + + + + + +blaze-builder->base + + + + + +blaze-builder->ghc-prim + + + + + +blaze-builder->bytestring + + + + + +blaze-builder->deepseq + + + + + +blaze-builder->text + + + + + +data-default->base + + + + + +data-default->containers + + + + + +http-client->base + + + + + +http-client->ghc-prim + + + + + +http-client->Win32 + + + + + +http-client->array + + + + + +http-client->bytestring + + + + + +http-client->containers + + + + + +http-client->deepseq + + + + + +http-client->filepath + + + + + +http-client->time + + + + + +http-client->transformers + + + + + +http-client->text + + + + + +http-client->random + + + + + +http-client->stm + + + + + +http-client->exceptions + + + + + +http-client->network-uri + + + + + +http-client->async + + + + + +http-client->base64-bytestring + + + + + +http-client->blaze-builder + + + + + +http-client->http-types + + + + + +http-client->case-insensitive + + + + + +cookie + +cookie + + + +http-client->cookie + + + + + +network + +network + + + +http-client->network + + + + + +iproute + +iproute + + + +http-client->iproute + + + + + +http-client->mime-types + + + + + +safe + +safe + + + +http-client->safe + + + + + +streaming-commons + +streaming-commons + + + +http-client->streaming-commons + + + + + +http-types->base + + + + + +http-types->array + + + + + +http-types->bytestring + + + + + +http-types->text + + + + + +http-types->case-insensitive + + + + + +base-orphans->base + + + + + +base-orphans->ghc-prim + + + + + +base16-bytestring->base + + + + + +base16-bytestring->bytestring + + + + + +basement->base + + + + + +basement->ghc-prim + + + + + +basement->Win32 + + + + + +bifunctors->base + + + + + +bifunctors->template-haskell + + + + + +bifunctors->containers + + + + + +bifunctors->transformers + + + + + +bifunctors->comonad + + + + + +bifunctors->tagged + + + + + +bifunctors->th-abstraction + + + + + +bifunctors->assoc + + + + + +byteorder + +byteorder + + + +byteorder->base + + + + + +cabal-doctest->base + + + + + +cabal-doctest->Cabal + + + + + +cabal-doctest->directory + + + + + +cabal-doctest->filepath + + + + + +call-stack + +call-stack + + + +call-stack->base + + + + + +call-stack->filepath + + + + + +case-insensitive->base + + + + + +case-insensitive->bytestring + + + + + +case-insensitive->deepseq + + + + + +case-insensitive->text + + + + + +case-insensitive->hashable + + + + + +cborg + +cborg + + + +cborg->base + + + + + +cborg->ghc-bignum + + + + + +cborg->ghc-prim + + + + + +cborg->array + + + + + +cborg->bytestring + + + + + +cborg->containers + + + + + +cborg->deepseq + + + + + +cborg->text + + + + + +cborg->primitive + + + + + +half + +half + + + +cborg->half + + + + + +half->base + + + + + +half->template-haskell + + + + + +half->deepseq + + + + + +half->binary + + + + + +cereal->base + + + + + +cereal->ghc-prim + + + + + +cereal->array + + + + + +cereal->bytestring + + + + + +cereal->containers + + + + + +cookie->base + + + + + +cookie->bytestring + + + + + +cookie->deepseq + + + + + +cookie->time + + + + + +cookie->text + + + + + +data-default-class + +data-default-class + + + +cookie->data-default-class + + + + + +data-default-class->data-default + + + + + +entropy->base + + + + + +entropy->Cabal + + + + + +entropy->Win32 + + + + + +entropy->bytestring + + + + + +entropy->directory + + + + + +entropy->filepath + + + + + +entropy->process + + + + + +crypton->base + + + + + +crypton->ghc-prim + + + + + +crypton->Win32 + + + + + +crypton->bytestring + + + + + +crypton->deepseq + + + + + +crypton->memory + + + + + +crypton->basement + + + + + +integer-gmp + +integer-gmp + + + +crypton->integer-gmp + + + + + +integer-gmp->base + + + + + +integer-gmp->ghc-bignum + + + + + +integer-gmp->ghc-internal + + + + + +integer-gmp->ghc-prim + + + + + +crypton-connection + +crypton-connection + + + +crypton-connection->base + + + + + +crypton-connection->bytestring + + + + + +crypton-connection->containers + + + + + +crypton-connection->data-default + + + + + +crypton-socks + +crypton-socks + + + +crypton-connection->crypton-socks + + + + + +crypton-x509-store + +crypton-x509-store + + + +crypton-connection->crypton-x509-store + + + + + +crypton-x509-system + +crypton-x509-system + + + +crypton-connection->crypton-x509-system + + + + + +crypton-connection->network + + + + + +tls + +tls + + + +crypton-connection->tls + + + + + +crypton-socks->base + + + + + +crypton-socks->bytestring + + + + + +crypton-socks->cereal + + + + + +crypton-socks->network + + + + + +crypton-x509-store->base + + + + + +crypton-x509-store->bytestring + + + + + +crypton-x509-store->containers + + + + + +crypton-x509-store->directory + + + + + +crypton-x509-store->filepath + + + + + +crypton-x509-store->mtl + + + + + +crypton-x509-store->asn1-encoding + + + + + +crypton-x509-store->asn1-types + + + + + +crypton-x509-store->crypton + + + + + +crypton-x509 + +crypton-x509 + + + +crypton-x509-store->crypton-x509 + + + + + +pem + +pem + + + +crypton-x509-store->pem + + + + + +crypton-x509-system->base + + + + + +crypton-x509-system->Win32 + + + + + +crypton-x509-system->bytestring + + + + + +crypton-x509-system->containers + + + + + +crypton-x509-system->directory + + + + + +crypton-x509-system->filepath + + + + + +crypton-x509-system->mtl + + + + + +crypton-x509-system->process + + + + + +crypton-x509-system->asn1-encoding + + + + + +crypton-x509-system->crypton-x509-store + + + + + +crypton-x509-system->crypton-x509 + + + + + +crypton-x509-system->pem + + + + + +network->base + + + + + +network->bytestring + + + + + +network->deepseq + + + + + +network->directory + + + + + +network->stm + + + + + +temporary + +temporary + + + +network->temporary + + + + + +tls->base + + + + + +tls->bytestring + + + + + +tls->mtl + + + + + +tls->transformers + + + + + +tls->asn1-encoding + + + + + +tls->asn1-types + + + + + +tls->memory + + + + + +tls->data-default + + + + + +tls->base16-bytestring + + + + + +tls->cereal + + + + + +tls->crypton + + + + + +tls->crypton-x509-store + + + + + +tls->network + + + + + +tls->crypton-x509 + + + + + +crypton-x509-validation + +crypton-x509-validation + + + +tls->crypton-x509-validation + + + + + +serialise + +serialise + + + +tls->serialise + + + + + +zlib + +zlib + + + +tls->zlib + + + + + +unix-time + +unix-time + + + +tls->unix-time + + + + + +crypton-x509->base + + + + + +crypton-x509->bytestring + + + + + +crypton-x509->containers + + + + + +crypton-x509->transformers + + + + + +crypton-x509->asn1-encoding + + + + + +crypton-x509->asn1-types + + + + + +crypton-x509->hourglass + + + + + +crypton-x509->asn1-parse + + + + + +crypton-x509->memory + + + + + +crypton-x509->crypton + + + + + +crypton-x509->pem + + + + + +pem->base + + + + + +pem->bytestring + + + + + +pem->memory + + + + + +pem->basement + + + + + +crypton-x509-validation->base + + + + + +crypton-x509-validation->bytestring + + + + + +crypton-x509-validation->containers + + + + + +crypton-x509-validation->mtl + + + + + +crypton-x509-validation->asn1-encoding + + + + + +crypton-x509-validation->asn1-types + + + + + +crypton-x509-validation->hourglass + + + + + +crypton-x509-validation->memory + + + + + +crypton-x509-validation->data-default + + + + + +crypton-x509-validation->crypton + + + + + +crypton-x509-validation->crypton-x509-store + + + + + +crypton-x509-validation->crypton-x509 + + + + + +crypton-x509-validation->pem + + + + + +crypton-x509-validation->iproute + + + + + +iproute->base + + + + + +iproute->bytestring + + + + + +iproute->containers + + + + + +iproute->appar + + + + + +iproute->byteorder + + + + + +iproute->network + + + + + +transformers-base->base + + + + + +transformers-base->transformers + + + + + +transformers-base->stm + + + + + +transformers-base->transformers-compat + + + + + +transformers-base->base-orphans + + + + + +ghc-boot-th->base + + + + + +mime-types->base + + + + + +mime-types->bytestring + + + + + +mime-types->containers + + + + + +mime-types->text + + + + + +safe->base + + + + + +streaming-commons->base + + + + + +streaming-commons->Win32 + + + + + +streaming-commons->array + + + + + +streaming-commons->bytestring + + + + + +streaming-commons->directory + + + + + +streaming-commons->filepath + + + + + +streaming-commons->process + + + + + +streaming-commons->transformers + + + + + +streaming-commons->text + + + + + +streaming-commons->random + + + + + +streaming-commons->stm + + + + + +streaming-commons->async + + + + + +streaming-commons->network + + + + + +streaming-commons->zlib + + + + + +http-client-tls->base + + + + + +http-client-tls->bytestring + + + + + +http-client-tls->containers + + + + + +http-client-tls->transformers + + + + + +http-client-tls->text + + + + + +http-client-tls->exceptions + + + + + +http-client-tls->network-uri + + + + + +http-client-tls->memory + + + + + +http-client-tls->data-default + + + + + +http-client-tls->http-client + + + + + +http-client-tls->http-types + + + + + +http-client-tls->case-insensitive + + + + + +http-client-tls->crypton + + + + + +http-client-tls->crypton-connection + + + + + +http-client-tls->network + + + + + +http-client-tls->tls + + + + + +indexed-traversable-instances->base + + + + + +indexed-traversable-instances->OneTuple + + + + + +indexed-traversable-instances->tagged + + + + + +indexed-traversable-instances->indexed-traversable + + + + + +indexed-traversable-instances->unordered-containers + + + + + +indexed-traversable-instances->vector + + + + + +invariant + +invariant + + + +invariant->base + + + + + +invariant->ghc-prim + + + + + +invariant->template-haskell + + + + + +invariant->array + + + + + +invariant->containers + + + + + +invariant->transformers + + + + + +invariant->StateVar + + + + + +invariant->stm + + + + + +invariant->comonad + + + + + +invariant->contravariant + + + + + +invariant->profunctors + + + + + +invariant->tagged + + + + + +invariant->transformers-compat + + + + + +invariant->th-abstraction + + + + + +invariant->unordered-containers + + + + + +invariant->bifunctors + + + + + +kan-extensions + +kan-extensions + + + +kan-extensions->base + + + + + +kan-extensions->array + + + + + +kan-extensions->containers + + + + + +kan-extensions->mtl + + + + + +kan-extensions->transformers + + + + + +kan-extensions->adjunctions + + + + + +kan-extensions->comonad + + + + + +kan-extensions->contravariant + + + + + +kan-extensions->distributive + + + + + +kan-extensions->free + + + + + +kan-extensions->profunctors + + + + + +kan-extensions->semigroupoids + + + + + +kan-extensions->tagged + + + + + +kan-extensions->invariant + + + + + +lens->base + + + + + +lens->ghc-prim + + + + + +lens->template-haskell + + + + + +lens->array + + + + + +lens->bytestring + + + + + +lens->containers + + + + + +lens->filepath + + + + + +lens->mtl + + + + + +lens->transformers + + + + + +lens->text + + + + + +lens->comonad + + + + + +lens->contravariant + + + + + +lens->distributive + + + + + +lens->free + + + + + +lens->profunctors + + + + + +lens->semigroupoids + + + + + +lens->tagged + + + + + +lens->transformers-compat + + + + + +lens->exceptions + + + + + +lens->hashable + + + + + +lens->indexed-traversable + + + + + +lens->strict + + + + + +lens->th-abstraction + + + + + +lens->these + + + + + +lens->unordered-containers + + + + + +lens->vector + + + + + +lens->assoc + + + + + +lens->base-orphans + + + + + +lens->bifunctors + + + + + +lens->call-stack + + + + + +lens->indexed-traversable-instances + + + + + +lens->kan-extensions + + + + + +parallel + +parallel + + + +lens->parallel + + + + + +reflection + +reflection + + + +lens->reflection + + + + + +parallel->base + + + + + +parallel->ghc-prim + + + + + +parallel->array + + + + + +parallel->containers + + + + + +parallel->deepseq + + + + + +reflection->base + + + + + +reflection->template-haskell + + + + + +lens-aeson->base + + + + + +lens-aeson->bytestring + + + + + +lens-aeson->text + + + + + +lens-aeson->aeson + + + + + +lens-aeson->scientific + + + + + +lens-aeson->text-short + + + + + +lens-aeson->unordered-containers + + + + + +lens-aeson->vector + + + + + +lens-aeson->lens + + + + + +temporary->base + + + + + +temporary->directory + + + + + +temporary->filepath + + + + + +temporary->transformers + + + + + +temporary->random + + + + + +temporary->exceptions + + + + + +th-compat->base + + + + + +th-compat->template-haskell + + + + + +old-locale + +old-locale + + + +old-locale->base + + + + + +old-time + +old-time + + + +old-time->base + + + + + +old-time->old-locale + + + + + +optparse-applicative->base + + + + + +optparse-applicative->process + + + + + +optparse-applicative->transformers + + + + + +optparse-applicative->text + + + + + +optparse-applicative->transformers-compat + + + + + +optparse-applicative->prettyprinter + + + + + +optparse-applicative->prettyprinter-ansi-terminal + + + + + +prettyprinter->base + + + + + +prettyprinter->text + + + + + +prettyprinter-ansi-terminal->base + + + + + +prettyprinter-ansi-terminal->text + + + + + +prettyprinter-ansi-terminal->ansi-terminal + + + + + +prettyprinter-ansi-terminal->prettyprinter + + + + + +psqueues->base + + + + + +psqueues->ghc-prim + + + + + +psqueues->deepseq + + + + + +psqueues->hashable + + + + + +serialise->base + + + + + +serialise->ghc-prim + + + + + +serialise->array + + + + + +serialise->bytestring + + + + + +serialise->containers + + + + + +serialise->time + + + + + +serialise->text + + + + + +serialise->hashable + + + + + +serialise->primitive + + + + + +serialise->strict + + + + + +serialise->these + + + + + +serialise->unordered-containers + + + + + +serialise->vector + + + + + +serialise->cborg + + + + + +serialise->half + + + + + +zlib->base + + + + + +zlib->zlib-clib + + + + + +zlib->bytestring + + + + + +tagsoup->base + + + + + +tagsoup->bytestring + + + + + +tagsoup->containers + + + + + +tagsoup->text + + + + + +tasty->base + + + + + +tasty->containers + + + + + +tasty->transformers + + + + + +tasty->stm + + + + + +tasty->tagged + + + + + +tasty->ansi-terminal + + + + + +tasty->optparse-applicative + + + + + +time-locale-compat->base + + + + + +time-locale-compat->time + + + + + +unix-time->base + + + + + +unix-time->bytestring + + + + + +unix-time->binary + + + + + +unix-time->old-time + + + + + +vector-stream->base + + + + + +vector-stream->ghc-prim + + + + + diff --git a/doc/img/dot_command/wreq-example4.svg b/doc/img/dot_command/wreq-example4.svg new file mode 100644 index 0000000000..9554d2ccb1 --- /dev/null +++ b/doc/img/dot_command/wreq-example4.svg @@ -0,0 +1,3553 @@ + + + + + + +deps + + + +wreq + +wreq + + + +ghc-prim + +ghc-prim + + + +wreq->ghc-prim + + + + + +template-haskell + +template-haskell + + + +wreq->template-haskell + + + + + +Cabal + +Cabal + + + +wreq->Cabal + + + + + +bytestring + +bytestring + + + +wreq->bytestring + + + + + +containers + +containers + + + +wreq->containers + + + + + +time + +time + + + +wreq->time + + + + + +text + +text + + + +wreq->text + + + + + +aeson + +aeson + + + +wreq->aeson + + + + + +exceptions + +exceptions + + + +wreq->exceptions + + + + + +hashable + +hashable + + + +wreq->hashable + + + + + +unordered-containers + +unordered-containers + + + +wreq->unordered-containers + + + + + +attoparsec + +attoparsec + + + +wreq->attoparsec + + + + + +authenticate-oauth + +authenticate-oauth + + + +wreq->authenticate-oauth + + + + + +http-client + +http-client + + + +wreq->http-client + + + + + +http-types + +http-types + + + +wreq->http-types + + + + + +base16-bytestring + +base16-bytestring + + + +wreq->base16-bytestring + + + + + +cabal-doctest + +cabal-doctest + + + +wreq->cabal-doctest + + + + + +case-insensitive + +case-insensitive + + + +wreq->case-insensitive + + + + + +crypton + +crypton + + + +wreq->crypton + + + + + +memory + +memory + + + +wreq->memory + + + + + +mime-types + +mime-types + + + +wreq->mime-types + + + + + +http-client-tls + +http-client-tls + + + +wreq->http-client-tls + + + + + +lens + +lens + + + +wreq->lens + + + + + +lens-aeson + +lens-aeson + + + +wreq->lens-aeson + + + + + +psqueues + +psqueues + + + +wreq->psqueues + + + + + +time-locale-compat + +time-locale-compat + + + +wreq->time-locale-compat + + + + + +wreq-examples + +wreq-examples + + + +wreq-examples->wreq + + + + + +wreq-examples->ghc-prim + + + + + +mtl + +mtl + + + +wreq-examples->mtl + + + + + +wreq-examples->bytestring + + + + + +wreq-examples->containers + + + + + +filepath + +filepath + + + +wreq-examples->filepath + + + + + +wreq-examples->text + + + + + +wreq-examples->aeson + + + + + +ansi-wl-pprint + +ansi-wl-pprint + + + +wreq-examples->ansi-wl-pprint + + + + + +wreq-examples->lens + + + + + +wreq-examples->lens-aeson + + + + + +optparse-applicative + +optparse-applicative + + + +wreq-examples->optparse-applicative + + + + + +tagsoup + +tagsoup + + + +wreq-examples->tagsoup + + + + + +contravariant + +contravariant + + + +crypto-pubkey-types + +crypto-pubkey-types + + + +ghc-bignum + +ghc-bignum + + + +ghc-bignum->ghc-prim + + + + + +ghc-internal + +ghc-internal + + + +ghc-internal->ghc-bignum + + + + + +ghc-internal->ghc-prim + + + + + +rts + +rts + + + +ghc-internal->rts + + + + + +ghc-prim->rts + + + + + +stm + +stm + + + +template-haskell->ghc-prim + + + + + +pretty + +pretty + + + +template-haskell->pretty + + + + + +Cabal->mtl + + + + + +Cabal-syntax + +Cabal-syntax + + + +Cabal->Cabal-syntax + + + + + +Win32 + +Win32 + + + +Cabal->Win32 + + + + + +Cabal->bytestring + + + + + +Cabal->containers + + + + + +deepseq + +deepseq + + + +Cabal->deepseq + + + + + +directory + +directory + + + +Cabal->directory + + + + + +Cabal->filepath + + + + + +parsec + +parsec + + + +Cabal->parsec + + + + + +Cabal->pretty + + + + + +process + +process + + + +Cabal->process + + + + + +Cabal->time + + + + + +Cabal-syntax->mtl + + + + + +Cabal-syntax->bytestring + + + + + +Cabal-syntax->containers + + + + + +Cabal-syntax->deepseq + + + + + +Cabal-syntax->directory + + + + + +Cabal-syntax->filepath + + + + + +Cabal-syntax->parsec + + + + + +Cabal-syntax->pretty + + + + + +Cabal-syntax->time + + + + + +binary + +binary + + + +Cabal-syntax->binary + + + + + +Cabal-syntax->text + + + + + +Win32->filepath + + + + + +os-string + +os-string + + + +Win32->os-string + + + + + +bytestring->ghc-prim + + + + + +bytestring->template-haskell + + + + + +bytestring->deepseq + + + + + +containers->template-haskell + + + + + +containers->deepseq + + + + + +deepseq->ghc-prim + + + + + +directory->Win32 + + + + + +directory->filepath + + + + + +directory->time + + + + + +directory->os-string + + + + + +filepath->template-haskell + + + + + +filepath->bytestring + + + + + +filepath->deepseq + + + + + +filepath->os-string + + + + + +filepath->exceptions + + + + + +parsec->mtl + + + + + +parsec->bytestring + + + + + +parsec->text + + + + + +pretty->ghc-prim + + + + + +pretty->deepseq + + + + + +process->Win32 + + + + + +process->deepseq + + + + + +process->directory + + + + + +process->filepath + + + + + +time->Win32 + + + + + +time->deepseq + + + + + +binary->bytestring + + + + + +binary->containers + + + + + +text->ghc-prim + + + + + +text->template-haskell + + + + + +text->bytestring + + + + + +text->deepseq + + + + + +text->binary + + + + + +OneTuple + +OneTuple + + + +OneTuple->ghc-prim + + + + + +OneTuple->template-haskell + + + + + +QuickCheck + +QuickCheck + + + +QuickCheck->template-haskell + + + + + +QuickCheck->containers + + + + + +QuickCheck->deepseq + + + + + +random + +random + + + +QuickCheck->random + + + + + +random->mtl + + + + + +random->bytestring + + + + + +random->deepseq + + + + + +RSA + +RSA + + + +RSA->crypto-pubkey-types + + + + + +RSA->bytestring + + + + + +RSA->binary + + + + + +SHA + +SHA + + + +RSA->SHA + + + + + +SHA->bytestring + + + + + +SHA->binary + + + + + +os-string->template-haskell + + + + + +os-string->bytestring + + + + + +os-string->deepseq + + + + + +os-string->exceptions + + + + + +aeson->ghc-prim + + + + + +aeson->template-haskell + + + + + +aeson->bytestring + + + + + +aeson->containers + + + + + +aeson->deepseq + + + + + +aeson->time + + + + + +aeson->text + + + + + +aeson->OneTuple + + + + + +aeson->QuickCheck + + + + + +data-fix + +data-fix + + + +aeson->data-fix + + + + + +dlist + +dlist + + + +aeson->dlist + + + + + +aeson->exceptions + + + + + +aeson->hashable + + + + + +indexed-traversable + +indexed-traversable + + + +aeson->indexed-traversable + + + + + +integer-conversion + +integer-conversion + + + +aeson->integer-conversion + + + + + +integer-logarithms + +integer-logarithms + + + +aeson->integer-logarithms + + + + + +network-uri + +network-uri + + + +aeson->network-uri + + + + + +primitive + +primitive + + + +aeson->primitive + + + + + +scientific + +scientific + + + +aeson->scientific + + + + + +semialign + +semialign + + + +aeson->semialign + + + + + +strict + +strict + + + +aeson->strict + + + + + +tagged + +tagged + + + +aeson->tagged + + + + + +text-iso8601 + +text-iso8601 + + + +aeson->text-iso8601 + + + + + +text-short + +text-short + + + +aeson->text-short + + + + + +th-abstraction + +th-abstraction + + + +aeson->th-abstraction + + + + + +these + +these + + + +aeson->these + + + + + +time-compat + +time-compat + + + +aeson->time-compat + + + + + +aeson->unordered-containers + + + + + +uuid-types + +uuid-types + + + +aeson->uuid-types + + + + + +vector + +vector + + + +aeson->vector + + + + + +witherable + +witherable + + + +aeson->witherable + + + + + +data-fix->deepseq + + + + + +data-fix->hashable + + + + + +dlist->deepseq + + + + + +exceptions->mtl + + + + + +exceptions->stm + + + + + +exceptions->template-haskell + + + + + +hashable->ghc-bignum + + + + + +hashable->ghc-prim + + + + + +hashable->bytestring + + + + + +hashable->containers + + + + + +hashable->deepseq + + + + + +hashable->filepath + + + + + +hashable->text + + + + + +hashable->os-string + + + + + +indexed-traversable->containers + + + + + +integer-conversion->bytestring + + + + + +integer-conversion->text + + + + + +integer-conversion->primitive + + + + + +integer-logarithms->ghc-bignum + + + + + +integer-logarithms->ghc-prim + + + + + +network-uri->template-haskell + + + + + +network-uri->deepseq + + + + + +network-uri->parsec + + + + + +primitive->template-haskell + + + + + +primitive->deepseq + + + + + +scientific->template-haskell + + + + + +scientific->bytestring + + + + + +scientific->containers + + + + + +scientific->deepseq + + + + + +scientific->binary + + + + + +scientific->text + + + + + +scientific->hashable + + + + + +scientific->integer-logarithms + + + + + +scientific->primitive + + + + + +semialign->containers + + + + + +semialign->hashable + + + + + +semialign->indexed-traversable + + + + + +semialign->tagged + + + + + +semialign->these + + + + + +semialign->unordered-containers + + + + + +semialign->vector + + + + + +semigroupoids + +semigroupoids + + + +semialign->semigroupoids + + + + + +indexed-traversable-instances + +indexed-traversable-instances + + + +semialign->indexed-traversable-instances + + + + + +strict->ghc-prim + + + + + +strict->bytestring + + + + + +strict->deepseq + + + + + +strict->binary + + + + + +strict->text + + + + + +strict->hashable + + + + + +strict->these + + + + + +tagged->template-haskell + + + + + +tagged->deepseq + + + + + +text-iso8601->time + + + + + +text-iso8601->text + + + + + +text-iso8601->integer-conversion + + + + + +text-iso8601->time-compat + + + + + +text-short->ghc-prim + + + + + +text-short->template-haskell + + + + + +text-short->bytestring + + + + + +text-short->deepseq + + + + + +text-short->binary + + + + + +text-short->text + + + + + +text-short->hashable + + + + + +th-abstraction->ghc-prim + + + + + +th-abstraction->template-haskell + + + + + +th-abstraction->containers + + + + + +these->deepseq + + + + + +these->binary + + + + + +these->hashable + + + + + +time-compat->template-haskell + + + + + +time-compat->deepseq + + + + + +time-compat->time + + + + + +time-compat->hashable + + + + + +base-orphans + +base-orphans + + + +time-compat->base-orphans + + + + + +unordered-containers->template-haskell + + + + + +unordered-containers->deepseq + + + + + +unordered-containers->hashable + + + + + +uuid-types->template-haskell + + + + + +uuid-types->bytestring + + + + + +uuid-types->deepseq + + + + + +uuid-types->binary + + + + + +uuid-types->text + + + + + +uuid-types->random + + + + + +uuid-types->hashable + + + + + +vector->deepseq + + + + + +vector->random + + + + + +vector->primitive + + + + + +witherable->containers + + + + + +witherable->hashable + + + + + +witherable->indexed-traversable + + + + + +witherable->unordered-containers + + + + + +witherable->vector + + + + + +witherable->base-orphans + + + + + +witherable->indexed-traversable-instances + + + + + +prettyprinter-compat-ansi-wl-pprint + +prettyprinter-compat-ansi-wl-pprint + + + +ansi-wl-pprint->prettyprinter-compat-ansi-wl-pprint + + + + + +prettyprinter-compat-ansi-wl-pprint->text + + + + + +prettyprinter + +prettyprinter + + + +prettyprinter-compat-ansi-wl-pprint->prettyprinter + + + + + +prettyprinter-ansi-terminal + +prettyprinter-ansi-terminal + + + +prettyprinter-compat-ansi-wl-pprint->prettyprinter-ansi-terminal + + + + + +async + +async + + + +async->stm + + + + + +async->hashable + + + + + +attoparsec->ghc-prim + + + + + +attoparsec->bytestring + + + + + +attoparsec->containers + + + + + +attoparsec->deepseq + + + + + +attoparsec->text + + + + + +attoparsec->scientific + + + + + +authenticate-oauth->crypto-pubkey-types + + + + + +authenticate-oauth->bytestring + + + + + +authenticate-oauth->time + + + + + +authenticate-oauth->random + + + + + +authenticate-oauth->RSA + + + + + +authenticate-oauth->SHA + + + + + +base64-bytestring + +base64-bytestring + + + +authenticate-oauth->base64-bytestring + + + + + +blaze-builder + +blaze-builder + + + +authenticate-oauth->blaze-builder + + + + + +data-default + +data-default + + + +authenticate-oauth->data-default + + + + + +authenticate-oauth->http-client + + + + + +authenticate-oauth->http-types + + + + + +transformers-compat + +transformers-compat + + + +authenticate-oauth->transformers-compat + + + + + +base64-bytestring->bytestring + + + + + +blaze-builder->ghc-prim + + + + + +blaze-builder->bytestring + + + + + +blaze-builder->deepseq + + + + + +blaze-builder->text + + + + + +data-default->containers + + + + + +http-client->ghc-prim + + + + + +http-client->stm + + + + + +http-client->Win32 + + + + + +http-client->bytestring + + + + + +http-client->containers + + + + + +http-client->deepseq + + + + + +http-client->filepath + + + + + +http-client->time + + + + + +http-client->text + + + + + +http-client->random + + + + + +http-client->exceptions + + + + + +http-client->network-uri + + + + + +http-client->async + + + + + +http-client->base64-bytestring + + + + + +http-client->blaze-builder + + + + + +http-client->http-types + + + + + +http-client->case-insensitive + + + + + +cookie + +cookie + + + +http-client->cookie + + + + + +network + +network + + + +http-client->network + + + + + +iproute + +iproute + + + +http-client->iproute + + + + + +http-client->mime-types + + + + + +streaming-commons + +streaming-commons + + + +http-client->streaming-commons + + + + + +http-types->bytestring + + + + + +http-types->text + + + + + +http-types->case-insensitive + + + + + +transformers-compat->ghc-prim + + + + + +base-orphans->ghc-prim + + + + + +base16-bytestring->bytestring + + + + + +basement + +basement + + + +basement->ghc-prim + + + + + +basement->Win32 + + + + + +bifunctors + +bifunctors + + + +bifunctors->template-haskell + + + + + +bifunctors->containers + + + + + +bifunctors->tagged + + + + + +bifunctors->th-abstraction + + + + + +comonad + +comonad + + + +bifunctors->comonad + + + + + +comonad->containers + + + + + +comonad->indexed-traversable + + + + + +comonad->tagged + + + + + +comonad->transformers-compat + + + + + +distributive + +distributive + + + +comonad->distributive + + + + + +cabal-doctest->Cabal + + + + + +cabal-doctest->directory + + + + + +cabal-doctest->filepath + + + + + +call-stack + +call-stack + + + +call-stack->filepath + + + + + +case-insensitive->bytestring + + + + + +case-insensitive->deepseq + + + + + +case-insensitive->text + + + + + +case-insensitive->hashable + + + + + +distributive->tagged + + + + + +distributive->base-orphans + + + + + +cookie->bytestring + + + + + +cookie->deepseq + + + + + +cookie->time + + + + + +cookie->text + + + + + +crypton->ghc-prim + + + + + +crypton->Win32 + + + + + +crypton->bytestring + + + + + +crypton->deepseq + + + + + +crypton->basement + + + + + +integer-gmp + +integer-gmp + + + +crypton->integer-gmp + + + + + +crypton->memory + + + + + +integer-gmp->ghc-bignum + + + + + +integer-gmp->ghc-internal + + + + + +integer-gmp->ghc-prim + + + + + +memory->ghc-prim + + + + + +memory->bytestring + + + + + +memory->deepseq + + + + + +memory->basement + + + + + +crypton-connection + +crypton-connection + + + +crypton-connection->bytestring + + + + + +crypton-connection->containers + + + + + +crypton-connection->data-default + + + + + +crypton-connection->network + + + + + +tls + +tls + + + +crypton-connection->tls + + + + + +network->stm + + + + + +network->bytestring + + + + + +network->deepseq + + + + + +network->directory + + + + + +tls->mtl + + + + + +tls->bytestring + + + + + +tls->data-default + + + + + +tls->base16-bytestring + + + + + +tls->crypton + + + + + +tls->memory + + + + + +tls->network + + + + + +free + +free + + + +free->mtl + + + + + +free->template-haskell + + + + + +free->containers + + + + + +free->exceptions + + + + + +free->indexed-traversable + + + + + +free->th-abstraction + + + + + +free->comonad + + + + + +free->distributive + + + + + +profunctors + +profunctors + + + +free->profunctors + + + + + +free->semigroupoids + + + + + +profunctors->contravariant + + + + + +profunctors->tagged + + + + + +profunctors->base-orphans + + + + + +profunctors->bifunctors + + + + + +profunctors->comonad + + + + + +profunctors->distributive + + + + + +semigroupoids->contravariant + + + + + +semigroupoids->template-haskell + + + + + +semigroupoids->containers + + + + + +semigroupoids->hashable + + + + + +semigroupoids->tagged + + + + + +semigroupoids->unordered-containers + + + + + +semigroupoids->transformers-compat + + + + + +semigroupoids->base-orphans + + + + + +semigroupoids->bifunctors + + + + + +semigroupoids->comonad + + + + + +semigroupoids->distributive + + + + + +iproute->bytestring + + + + + +iproute->containers + + + + + +iproute->network + + + + + +mime-types->bytestring + + + + + +mime-types->containers + + + + + +mime-types->text + + + + + +streaming-commons->stm + + + + + +streaming-commons->Win32 + + + + + +streaming-commons->bytestring + + + + + +streaming-commons->directory + + + + + +streaming-commons->filepath + + + + + +streaming-commons->process + + + + + +streaming-commons->text + + + + + +streaming-commons->random + + + + + +streaming-commons->async + + + + + +streaming-commons->network + + + + + +http-client-tls->bytestring + + + + + +http-client-tls->containers + + + + + +http-client-tls->text + + + + + +http-client-tls->exceptions + + + + + +http-client-tls->network-uri + + + + + +http-client-tls->data-default + + + + + +http-client-tls->http-client + + + + + +http-client-tls->http-types + + + + + +http-client-tls->case-insensitive + + + + + +http-client-tls->crypton + + + + + +http-client-tls->memory + + + + + +http-client-tls->crypton-connection + + + + + +http-client-tls->network + + + + + +http-client-tls->tls + + + + + +indexed-traversable-instances->OneTuple + + + + + +indexed-traversable-instances->indexed-traversable + + + + + +indexed-traversable-instances->tagged + + + + + +indexed-traversable-instances->unordered-containers + + + + + +indexed-traversable-instances->vector + + + + + +kan-extensions + +kan-extensions + + + +kan-extensions->contravariant + + + + + +kan-extensions->mtl + + + + + +kan-extensions->containers + + + + + +kan-extensions->tagged + + + + + +kan-extensions->comonad + + + + + +kan-extensions->distributive + + + + + +kan-extensions->free + + + + + +kan-extensions->profunctors + + + + + +kan-extensions->semigroupoids + + + + + +lens->contravariant + + + + + +lens->ghc-prim + + + + + +lens->mtl + + + + + +lens->template-haskell + + + + + +lens->bytestring + + + + + +lens->containers + + + + + +lens->filepath + + + + + +lens->text + + + + + +lens->exceptions + + + + + +lens->hashable + + + + + +lens->indexed-traversable + + + + + +lens->strict + + + + + +lens->tagged + + + + + +lens->th-abstraction + + + + + +lens->these + + + + + +lens->unordered-containers + + + + + +lens->vector + + + + + +lens->transformers-compat + + + + + +lens->base-orphans + + + + + +lens->bifunctors + + + + + +lens->comonad + + + + + +lens->call-stack + + + + + +lens->distributive + + + + + +lens->free + + + + + +lens->profunctors + + + + + +lens->semigroupoids + + + + + +lens->indexed-traversable-instances + + + + + +lens->kan-extensions + + + + + +parallel + +parallel + + + +lens->parallel + + + + + +reflection + +reflection + + + +lens->reflection + + + + + +parallel->ghc-prim + + + + + +parallel->containers + + + + + +parallel->deepseq + + + + + +reflection->template-haskell + + + + + +lens-aeson->bytestring + + + + + +lens-aeson->text + + + + + +lens-aeson->aeson + + + + + +lens-aeson->scientific + + + + + +lens-aeson->text-short + + + + + +lens-aeson->unordered-containers + + + + + +lens-aeson->vector + + + + + +lens-aeson->lens + + + + + +optparse-applicative->process + + + + + +optparse-applicative->text + + + + + +optparse-applicative->transformers-compat + + + + + +optparse-applicative->prettyprinter + + + + + +optparse-applicative->prettyprinter-ansi-terminal + + + + + +prettyprinter->text + + + + + +prettyprinter-ansi-terminal->text + + + + + +prettyprinter-ansi-terminal->prettyprinter + + + + + +psqueues->ghc-prim + + + + + +psqueues->deepseq + + + + + +psqueues->hashable + + + + + +tagsoup->bytestring + + + + + +tagsoup->containers + + + + + +tagsoup->text + + + + + +time-locale-compat->time + + + + + diff --git a/doc/img/dot_command/wreq-example5.svg b/doc/img/dot_command/wreq-example5.svg new file mode 100644 index 0000000000..ff54025103 --- /dev/null +++ b/doc/img/dot_command/wreq-example5.svg @@ -0,0 +1,1861 @@ + + + + + + +deps + + + +wreq + +wreq + + + +ghc-prim + +ghc-prim + + + +wreq->ghc-prim + + + + + +template-haskell + +template-haskell + + + +wreq->template-haskell + + + + + +Cabal + +Cabal + + + +wreq->Cabal + + + + + +bytestring + +bytestring + + + +wreq->bytestring + + + + + +containers + +containers + + + +wreq->containers + + + + + +time + +time + + + +wreq->time + + + + + +text + +text + + + +wreq->text + + + + + +memory + +memory + + + +wreq->memory + + + + + +attoparsec + +attoparsec + + + +wreq->attoparsec + + + + + +authenticate-oauth + +authenticate-oauth + + + +wreq->authenticate-oauth + + + + + +http-types + +http-types + + + +wreq->http-types + + + + + +base16-bytestring + +base16-bytestring + + + +wreq->base16-bytestring + + + + + +cabal-doctest + +cabal-doctest + + + +wreq->cabal-doctest + + + + + +case-insensitive + +case-insensitive + + + +wreq->case-insensitive + + + + + +hashable + +hashable + + + +wreq->hashable + + + + + +crypton + +crypton + + + +wreq->crypton + + + + + +lens-aeson + +lens-aeson + + + +wreq->lens-aeson + + + + + +unordered-containers + +unordered-containers + + + +wreq->unordered-containers + + + + + +mime-types + +mime-types + + + +wreq->mime-types + + + + + +psqueues + +psqueues + + + +wreq->psqueues + + + + + +time-locale-compat + +time-locale-compat + + + +wreq->time-locale-compat + + + + + +ansi-terminal-types + +ansi-terminal-types + + + +ghc-bignum + +ghc-bignum + + + +ghc-bignum->ghc-prim + + + + + +ghc-internal + +ghc-internal + + + +ghc-internal->ghc-bignum + + + + + +ghc-internal->ghc-prim + + + + + +rts + +rts + + + +ghc-internal->rts + + + + + +ghc-prim->rts + + + + + +mtl + +mtl + + + +stm + +stm + + + +template-haskell->ghc-prim + + + + + +pretty + +pretty + + + +template-haskell->pretty + + + + + +Cabal->mtl + + + + + +Cabal-syntax + +Cabal-syntax + + + +Cabal->Cabal-syntax + + + + + +Win32 + +Win32 + + + +Cabal->Win32 + + + + + +Cabal->bytestring + + + + + +Cabal->containers + + + + + +deepseq + +deepseq + + + +Cabal->deepseq + + + + + +directory + +directory + + + +Cabal->directory + + + + + +filepath + +filepath + + + +Cabal->filepath + + + + + +parsec + +parsec + + + +Cabal->parsec + + + + + +Cabal->pretty + + + + + +process + +process + + + +Cabal->process + + + + + +Cabal->time + + + + + +Cabal-syntax->mtl + + + + + +Cabal-syntax->bytestring + + + + + +Cabal-syntax->containers + + + + + +Cabal-syntax->deepseq + + + + + +Cabal-syntax->directory + + + + + +Cabal-syntax->filepath + + + + + +Cabal-syntax->parsec + + + + + +Cabal-syntax->pretty + + + + + +Cabal-syntax->time + + + + + +binary + +binary + + + +Cabal-syntax->binary + + + + + +Cabal-syntax->text + + + + + +Win32->filepath + + + + + +os-string + +os-string + + + +Win32->os-string + + + + + +bytestring->ghc-prim + + + + + +bytestring->template-haskell + + + + + +bytestring->deepseq + + + + + +containers->template-haskell + + + + + +containers->deepseq + + + + + +deepseq->ghc-prim + + + + + +directory->Win32 + + + + + +directory->filepath + + + + + +directory->time + + + + + +directory->os-string + + + + + +filepath->template-haskell + + + + + +filepath->bytestring + + + + + +filepath->deepseq + + + + + +filepath->os-string + + + + + +parsec->mtl + + + + + +parsec->bytestring + + + + + +parsec->text + + + + + +pretty->ghc-prim + + + + + +pretty->deepseq + + + + + +process->Win32 + + + + + +process->deepseq + + + + + +process->directory + + + + + +process->filepath + + + + + +time->Win32 + + + + + +time->deepseq + + + + + +binary->bytestring + + + + + +binary->containers + + + + + +text->ghc-prim + + + + + +text->template-haskell + + + + + +text->bytestring + + + + + +text->deepseq + + + + + +text->binary + + + + + +RSA + +RSA + + + +RSA->bytestring + + + + + +RSA->binary + + + + + +SHA + +SHA + + + +RSA->SHA + + + + + +crypto-api + +crypto-api + + + +RSA->crypto-api + + + + + +crypto-pubkey-types + +crypto-pubkey-types + + + +RSA->crypto-pubkey-types + + + + + +SHA->bytestring + + + + + +SHA->binary + + + + + +crypto-api->bytestring + + + + + +cereal + +cereal + + + +crypto-api->cereal + + + + + +entropy + +entropy + + + +crypto-api->entropy + + + + + +tagged + +tagged + + + +crypto-api->tagged + + + + + +asn1-encoding + +asn1-encoding + + + +crypto-pubkey-types->asn1-encoding + + + + + +asn1-types + +asn1-types + + + +crypto-pubkey-types->asn1-types + + + + + +os-string->template-haskell + + + + + +os-string->bytestring + + + + + +os-string->deepseq + + + + + +ansi-terminal + +ansi-terminal + + + +ansi-terminal->ansi-terminal-types + + + + + +asn1-encoding->bytestring + + + + + +asn1-encoding->asn1-types + + + + + +hourglass + +hourglass + + + +asn1-encoding->hourglass + + + + + +asn1-types->bytestring + + + + + +asn1-types->hourglass + + + + + +asn1-types->memory + + + + + +hourglass->Win32 + + + + + +hourglass->deepseq + + + + + +memory->ghc-prim + + + + + +memory->bytestring + + + + + +memory->deepseq + + + + + +basement + +basement + + + +memory->basement + + + + + +attoparsec->ghc-prim + + + + + +attoparsec->bytestring + + + + + +attoparsec->containers + + + + + +attoparsec->deepseq + + + + + +attoparsec->text + + + + + +scientific + +scientific + + + +attoparsec->scientific + + + + + +scientific->template-haskell + + + + + +scientific->bytestring + + + + + +scientific->containers + + + + + +scientific->deepseq + + + + + +scientific->binary + + + + + +scientific->text + + + + + +scientific->hashable + + + + + +integer-logarithms + +integer-logarithms + + + +scientific->integer-logarithms + + + + + +primitive + +primitive + + + +scientific->primitive + + + + + +authenticate-oauth->bytestring + + + + + +authenticate-oauth->time + + + + + +authenticate-oauth->RSA + + + + + +authenticate-oauth->SHA + + + + + +authenticate-oauth->crypto-pubkey-types + + + + + +base64-bytestring + +base64-bytestring + + + +authenticate-oauth->base64-bytestring + + + + + +blaze-builder + +blaze-builder + + + +authenticate-oauth->blaze-builder + + + + + +data-default + +data-default + + + +authenticate-oauth->data-default + + + + + +authenticate-oauth->http-types + + + + + +random + +random + + + +authenticate-oauth->random + + + + + +transformers-compat + +transformers-compat + + + +authenticate-oauth->transformers-compat + + + + + +base64-bytestring->bytestring + + + + + +blaze-builder->ghc-prim + + + + + +blaze-builder->bytestring + + + + + +blaze-builder->deepseq + + + + + +blaze-builder->text + + + + + +data-default->containers + + + + + +http-types->bytestring + + + + + +http-types->text + + + + + +http-types->case-insensitive + + + + + +random->mtl + + + + + +random->bytestring + + + + + +random->deepseq + + + + + +splitmix + +splitmix + + + +random->splitmix + + + + + +transformers-compat->ghc-prim + + + + + +base16-bytestring->bytestring + + + + + +basement->ghc-prim + + + + + +basement->Win32 + + + + + +cabal-doctest->Cabal + + + + + +cabal-doctest->directory + + + + + +cabal-doctest->filepath + + + + + +case-insensitive->bytestring + + + + + +case-insensitive->deepseq + + + + + +case-insensitive->text + + + + + +case-insensitive->hashable + + + + + +hashable->ghc-bignum + + + + + +hashable->ghc-prim + + + + + +hashable->bytestring + + + + + +hashable->containers + + + + + +hashable->deepseq + + + + + +hashable->filepath + + + + + +hashable->text + + + + + +hashable->os-string + + + + + +cereal->ghc-prim + + + + + +cereal->bytestring + + + + + +cereal->containers + + + + + +entropy->Cabal + + + + + +entropy->Win32 + + + + + +entropy->bytestring + + + + + +entropy->directory + + + + + +entropy->filepath + + + + + +entropy->process + + + + + +tagged->template-haskell + + + + + +tagged->deepseq + + + + + +crypton->ghc-prim + + + + + +crypton->Win32 + + + + + +crypton->bytestring + + + + + +crypton->deepseq + + + + + +crypton->memory + + + + + +crypton->basement + + + + + +integer-gmp + +integer-gmp + + + +crypton->integer-gmp + + + + + +integer-gmp->ghc-bignum + + + + + +integer-gmp->ghc-internal + + + + + +integer-gmp->ghc-prim + + + + + +integer-logarithms->ghc-bignum + + + + + +integer-logarithms->ghc-prim + + + + + +lens-aeson->bytestring + + + + + +lens-aeson->text + + + + + +lens-aeson->scientific + + + + + +text-short + +text-short + + + +lens-aeson->text-short + + + + + +lens-aeson->unordered-containers + + + + + +vector + +vector + + + +lens-aeson->vector + + + + + +text-short->ghc-prim + + + + + +text-short->template-haskell + + + + + +text-short->bytestring + + + + + +text-short->deepseq + + + + + +text-short->binary + + + + + +text-short->text + + + + + +text-short->hashable + + + + + +unordered-containers->template-haskell + + + + + +unordered-containers->deepseq + + + + + +unordered-containers->hashable + + + + + +vector->deepseq + + + + + +vector->random + + + + + +vector->primitive + + + + + +tasty + +tasty + + + +vector->tasty + + + + + +vector-stream + +vector-stream + + + +vector->vector-stream + + + + + +mime-types->bytestring + + + + + +mime-types->containers + + + + + +mime-types->text + + + + + +optparse-applicative + +optparse-applicative + + + +optparse-applicative->process + + + + + +optparse-applicative->text + + + + + +optparse-applicative->transformers-compat + + + + + +prettyprinter + +prettyprinter + + + +optparse-applicative->prettyprinter + + + + + +prettyprinter-ansi-terminal + +prettyprinter-ansi-terminal + + + +optparse-applicative->prettyprinter-ansi-terminal + + + + + +prettyprinter->text + + + + + +prettyprinter-ansi-terminal->text + + + + + +prettyprinter-ansi-terminal->ansi-terminal + + + + + +prettyprinter-ansi-terminal->prettyprinter + + + + + +primitive->template-haskell + + + + + +primitive->deepseq + + + + + +psqueues->ghc-prim + + + + + +psqueues->deepseq + + + + + +psqueues->hashable + + + + + +splitmix->deepseq + + + + + +tasty->stm + + + + + +tasty->containers + + + + + +tasty->ansi-terminal + + + + + +tasty->tagged + + + + + +tasty->optparse-applicative + + + + + +time-locale-compat->time + + + + + +vector-stream->ghc-prim + + + + + diff --git a/doc/img/dot_command/wreq-example6.svg b/doc/img/dot_command/wreq-example6.svg new file mode 100644 index 0000000000..45d0129511 --- /dev/null +++ b/doc/img/dot_command/wreq-example6.svg @@ -0,0 +1,1861 @@ + + + + + + +deps + + + +wreq + +wreq + + + +ghc-prim + +ghc-prim + + + +wreq->ghc-prim + + + + + +template-haskell + +template-haskell + + + +wreq->template-haskell + + + + + +Cabal + +Cabal + + + +wreq->Cabal + + + + + +bytestring + +bytestring + + + +wreq->bytestring + + + + + +containers + +containers + + + +wreq->containers + + + + + +time + +time + + + +wreq->time + + + + + +text + +text + + + +wreq->text + + + + + +memory + +memory + + + +wreq->memory + + + + + +attoparsec + +attoparsec + + + +wreq->attoparsec + + + + + +authenticate-oauth + +authenticate-oauth + + + +wreq->authenticate-oauth + + + + + +http-types + +http-types + + + +wreq->http-types + + + + + +base16-bytestring + +base16-bytestring + + + +wreq->base16-bytestring + + + + + +cabal-doctest + +cabal-doctest + + + +wreq->cabal-doctest + + + + + +case-insensitive + +case-insensitive + + + +wreq->case-insensitive + + + + + +hashable + +hashable + + + +wreq->hashable + + + + + +crypton + +crypton + + + +wreq->crypton + + + + + +lens-aeson + +lens-aeson + + + +wreq->lens-aeson + + + + + +unordered-containers + +unordered-containers + + + +wreq->unordered-containers + + + + + +mime-types + +mime-types + + + +wreq->mime-types + + + + + +psqueues + +psqueues + + + +wreq->psqueues + + + + + +time-locale-compat + +time-locale-compat + + + +wreq->time-locale-compat + + + + + +ansi-terminal-types + +ansi-terminal-types + + + +ghc-bignum + +ghc-bignum + + + +ghc-bignum->ghc-prim + + + + + +ghc-internal + +ghc-internal + + + +ghc-internal->ghc-bignum + + + + + +ghc-internal->ghc-prim + + + + + +rts + +rts + + + +ghc-internal->rts + + + + + +ghc-prim->rts + + + + + +mtl + +mtl + + + +stm + +stm + + + +template-haskell->ghc-prim + + + + + +pretty + +pretty + + + +template-haskell->pretty + + + + + +Cabal->mtl + + + + + +Cabal-syntax + +Cabal-syntax + + + +Cabal->Cabal-syntax + + + + + +Win32 + +Win32 + + + +Cabal->Win32 + + + + + +Cabal->bytestring + + + + + +Cabal->containers + + + + + +deepseq + +deepseq + + + +Cabal->deepseq + + + + + +directory + +directory + + + +Cabal->directory + + + + + +filepath + +filepath + + + +Cabal->filepath + + + + + +parsec + +parsec + + + +Cabal->parsec + + + + + +Cabal->pretty + + + + + +process + +process + + + +Cabal->process + + + + + +Cabal->time + + + + + +Cabal-syntax->mtl + + + + + +Cabal-syntax->bytestring + + + + + +Cabal-syntax->containers + + + + + +Cabal-syntax->deepseq + + + + + +Cabal-syntax->directory + + + + + +Cabal-syntax->filepath + + + + + +Cabal-syntax->parsec + + + + + +Cabal-syntax->pretty + + + + + +Cabal-syntax->time + + + + + +binary + +binary + + + +Cabal-syntax->binary + + + + + +Cabal-syntax->text + + + + + +Win32->filepath + + + + + +os-string + +os-string + + + +Win32->os-string + + + + + +bytestring->ghc-prim + + + + + +bytestring->template-haskell + + + + + +bytestring->deepseq + + + + + +containers->template-haskell + + + + + +containers->deepseq + + + + + +deepseq->ghc-prim + + + + + +directory->Win32 + + + + + +directory->filepath + + + + + +directory->time + + + + + +directory->os-string + + + + + +filepath->template-haskell + + + + + +filepath->bytestring + + + + + +filepath->deepseq + + + + + +filepath->os-string + + + + + +parsec->mtl + + + + + +parsec->bytestring + + + + + +parsec->text + + + + + +pretty->ghc-prim + + + + + +pretty->deepseq + + + + + +process->Win32 + + + + + +process->deepseq + + + + + +process->directory + + + + + +process->filepath + + + + + +time->Win32 + + + + + +time->deepseq + + + + + +binary->bytestring + + + + + +binary->containers + + + + + +text->ghc-prim + + + + + +text->template-haskell + + + + + +text->bytestring + + + + + +text->deepseq + + + + + +text->binary + + + + + +RSA + +RSA + + + +RSA->bytestring + + + + + +RSA->binary + + + + + +SHA + +SHA + + + +RSA->SHA + + + + + +crypto-api + +crypto-api + + + +RSA->crypto-api + + + + + +crypto-pubkey-types + +crypto-pubkey-types + + + +RSA->crypto-pubkey-types + + + + + +SHA->bytestring + + + + + +SHA->binary + + + + + +crypto-api->bytestring + + + + + +cereal + +cereal + + + +crypto-api->cereal + + + + + +entropy + +entropy + + + +crypto-api->entropy + + + + + +tagged + +tagged + + + +crypto-api->tagged + + + + + +asn1-encoding + +asn1-encoding + + + +crypto-pubkey-types->asn1-encoding + + + + + +asn1-types + +asn1-types + + + +crypto-pubkey-types->asn1-types + + + + + +os-string->template-haskell + + + + + +os-string->bytestring + + + + + +os-string->deepseq + + + + + +ansi-terminal + +ansi-terminal + + + +ansi-terminal->ansi-terminal-types + + + + + +asn1-encoding->bytestring + + + + + +asn1-encoding->asn1-types + + + + + +hourglass + +hourglass + + + +asn1-encoding->hourglass + + + + + +asn1-types->bytestring + + + + + +asn1-types->hourglass + + + + + +asn1-types->memory + + + + + +hourglass->Win32 + + + + + +hourglass->deepseq + + + + + +memory->ghc-prim + + + + + +memory->bytestring + + + + + +memory->deepseq + + + + + +basement + +basement + + + +memory->basement + + + + + +attoparsec->ghc-prim + + + + + +attoparsec->bytestring + + + + + +attoparsec->containers + + + + + +attoparsec->deepseq + + + + + +attoparsec->text + + + + + +scientific + +scientific + + + +attoparsec->scientific + + + + + +scientific->template-haskell + + + + + +scientific->bytestring + + + + + +scientific->containers + + + + + +scientific->deepseq + + + + + +scientific->binary + + + + + +scientific->text + + + + + +scientific->hashable + + + + + +integer-logarithms + +integer-logarithms + + + +scientific->integer-logarithms + + + + + +primitive + +primitive + + + +scientific->primitive + + + + + +authenticate-oauth->bytestring + + + + + +authenticate-oauth->time + + + + + +authenticate-oauth->RSA + + + + + +authenticate-oauth->SHA + + + + + +authenticate-oauth->crypto-pubkey-types + + + + + +base64-bytestring + +base64-bytestring + + + +authenticate-oauth->base64-bytestring + + + + + +blaze-builder + +blaze-builder + + + +authenticate-oauth->blaze-builder + + + + + +data-default + +data-default + + + +authenticate-oauth->data-default + + + + + +authenticate-oauth->http-types + + + + + +random + +random + + + +authenticate-oauth->random + + + + + +transformers-compat + +transformers-compat + + + +authenticate-oauth->transformers-compat + + + + + +base64-bytestring->bytestring + + + + + +blaze-builder->ghc-prim + + + + + +blaze-builder->bytestring + + + + + +blaze-builder->deepseq + + + + + +blaze-builder->text + + + + + +data-default->containers + + + + + +http-types->bytestring + + + + + +http-types->text + + + + + +http-types->case-insensitive + + + + + +random->mtl + + + + + +random->bytestring + + + + + +random->deepseq + + + + + +splitmix + +splitmix + + + +random->splitmix + + + + + +transformers-compat->ghc-prim + + + + + +base16-bytestring->bytestring + + + + + +basement->ghc-prim + + + + + +basement->Win32 + + + + + +cabal-doctest->Cabal + + + + + +cabal-doctest->directory + + + + + +cabal-doctest->filepath + + + + + +case-insensitive->bytestring + + + + + +case-insensitive->deepseq + + + + + +case-insensitive->text + + + + + +case-insensitive->hashable + + + + + +hashable->ghc-bignum + + + + + +hashable->ghc-prim + + + + + +hashable->bytestring + + + + + +hashable->containers + + + + + +hashable->deepseq + + + + + +hashable->filepath + + + + + +hashable->text + + + + + +hashable->os-string + + + + + +cereal->ghc-prim + + + + + +cereal->bytestring + + + + + +cereal->containers + + + + + +entropy->Cabal + + + + + +entropy->Win32 + + + + + +entropy->bytestring + + + + + +entropy->directory + + + + + +entropy->filepath + + + + + +entropy->process + + + + + +tagged->template-haskell + + + + + +tagged->deepseq + + + + + +crypton->ghc-prim + + + + + +crypton->Win32 + + + + + +crypton->bytestring + + + + + +crypton->deepseq + + + + + +crypton->memory + + + + + +crypton->basement + + + + + +integer-gmp + +integer-gmp + + + +crypton->integer-gmp + + + + + +integer-gmp->ghc-bignum + + + + + +integer-gmp->ghc-internal + + + + + +integer-gmp->ghc-prim + + + + + +integer-logarithms->ghc-bignum + + + + + +integer-logarithms->ghc-prim + + + + + +lens-aeson->bytestring + + + + + +lens-aeson->text + + + + + +lens-aeson->scientific + + + + + +text-short + +text-short + + + +lens-aeson->text-short + + + + + +lens-aeson->unordered-containers + + + + + +vector + +vector + + + +lens-aeson->vector + + + + + +text-short->ghc-prim + + + + + +text-short->template-haskell + + + + + +text-short->bytestring + + + + + +text-short->deepseq + + + + + +text-short->binary + + + + + +text-short->text + + + + + +text-short->hashable + + + + + +unordered-containers->template-haskell + + + + + +unordered-containers->deepseq + + + + + +unordered-containers->hashable + + + + + +vector->deepseq + + + + + +vector->random + + + + + +vector->primitive + + + + + +tasty + +tasty + + + +vector->tasty + + + + + +vector-stream + +vector-stream + + + +vector->vector-stream + + + + + +mime-types->bytestring + + + + + +mime-types->containers + + + + + +mime-types->text + + + + + +optparse-applicative + +optparse-applicative + + + +optparse-applicative->process + + + + + +optparse-applicative->text + + + + + +optparse-applicative->transformers-compat + + + + + +prettyprinter + +prettyprinter + + + +optparse-applicative->prettyprinter + + + + + +prettyprinter-ansi-terminal + +prettyprinter-ansi-terminal + + + +optparse-applicative->prettyprinter-ansi-terminal + + + + + +prettyprinter->text + + + + + +prettyprinter-ansi-terminal->text + + + + + +prettyprinter-ansi-terminal->ansi-terminal + + + + + +prettyprinter-ansi-terminal->prettyprinter + + + + + +primitive->template-haskell + + + + + +primitive->deepseq + + + + + +psqueues->ghc-prim + + + + + +psqueues->deepseq + + + + + +psqueues->hashable + + + + + +splitmix->deepseq + + + + + +tasty->stm + + + + + +tasty->containers + + + + + +tasty->ansi-terminal + + + + + +tasty->tagged + + + + + +tasty->optparse-applicative + + + + + +time-locale-compat->time + + + + + +vector-stream->ghc-prim + + + + + diff --git a/doc/img/dot_command/wreq-example7.svg b/doc/img/dot_command/wreq-example7.svg new file mode 100644 index 0000000000..d78d538314 --- /dev/null +++ b/doc/img/dot_command/wreq-example7.svg @@ -0,0 +1,457 @@ + + + + + + +deps + + + +wreq + +wreq + + + +memory + +memory + + + +wreq->memory + + + + + +authenticate-oauth + +authenticate-oauth + + + +wreq->authenticate-oauth + + + + + +crypton + +crypton + + + +wreq->crypton + + + + + +http-client-tls + +http-client-tls + + + +wreq->http-client-tls + + + + + +wreq-examples + +wreq-examples + + + +wreq-examples->wreq + + + + + +basement + +basement + + + +RSA + +RSA + + + +crypto-pubkey-types + +crypto-pubkey-types + + + +RSA->crypto-pubkey-types + + + + + +asn1-encoding + +asn1-encoding + + + +crypto-pubkey-types->asn1-encoding + + + + + +asn1-types + +asn1-types + + + +crypto-pubkey-types->asn1-types + + + + + +asn1-encoding->asn1-types + + + + + +asn1-types->memory + + + + + +asn1-parse + +asn1-parse + + + +asn1-parse->asn1-encoding + + + + + +asn1-parse->asn1-types + + + + + +memory->basement + + + + + +authenticate-oauth->RSA + + + + + +authenticate-oauth->crypto-pubkey-types + + + + + +crypton->basement + + + + + +crypton->memory + + + + + +crypton-connection + +crypton-connection + + + +crypton-x509-store + +crypton-x509-store + + + +crypton-connection->crypton-x509-store + + + + + +crypton-x509-system + +crypton-x509-system + + + +crypton-connection->crypton-x509-system + + + + + +tls + +tls + + + +crypton-connection->tls + + + + + +crypton-x509-store->asn1-encoding + + + + + +crypton-x509-store->asn1-types + + + + + +crypton-x509-store->crypton + + + + + +crypton-x509 + +crypton-x509 + + + +crypton-x509-store->crypton-x509 + + + + + +pem + +pem + + + +crypton-x509-store->pem + + + + + +crypton-x509-system->asn1-encoding + + + + + +crypton-x509-system->crypton-x509-store + + + + + +crypton-x509-system->crypton-x509 + + + + + +crypton-x509-system->pem + + + + + +tls->asn1-encoding + + + + + +tls->asn1-types + + + + + +tls->memory + + + + + +tls->crypton + + + + + +tls->crypton-x509-store + + + + + +tls->crypton-x509 + + + + + +crypton-x509-validation + +crypton-x509-validation + + + +tls->crypton-x509-validation + + + + + +crypton-x509->asn1-encoding + + + + + +crypton-x509->asn1-types + + + + + +crypton-x509->asn1-parse + + + + + +crypton-x509->memory + + + + + +crypton-x509->crypton + + + + + +crypton-x509->pem + + + + + +pem->basement + + + + + +pem->memory + + + + + +crypton-x509-validation->asn1-encoding + + + + + +crypton-x509-validation->asn1-types + + + + + +crypton-x509-validation->memory + + + + + +crypton-x509-validation->crypton + + + + + +crypton-x509-validation->crypton-x509-store + + + + + +crypton-x509-validation->crypton-x509 + + + + + +crypton-x509-validation->pem + + + + + +http-client-tls->memory + + + + + +http-client-tls->crypton + + + + + +http-client-tls->crypton-connection + + + + + +http-client-tls->tls + + + + + diff --git a/doc/img/manageHLS.png b/doc/img/manageHLS.png new file mode 100644 index 0000000000..d6589d593a Binary files /dev/null and b/doc/img/manageHLS.png differ diff --git a/doc/img/stack-favicon.svg b/doc/img/stack-favicon.svg new file mode 100644 index 0000000000..cf1049d03a --- /dev/null +++ b/doc/img/stack-favicon.svg @@ -0,0 +1,42 @@ + + + + + + + + + + + diff --git a/doc/img/stack-logo-white.svg b/doc/img/stack-logo-white.svg new file mode 100644 index 0000000000..da33a9f595 --- /dev/null +++ b/doc/img/stack-logo-white.svg @@ -0,0 +1,38 @@ + + + + + + + + + + diff --git a/doc/img/stack-open-graph.png b/doc/img/stack-open-graph.png new file mode 100644 index 0000000000..a329f5ab45 Binary files /dev/null and b/doc/img/stack-open-graph.png differ diff --git a/doc/img/stack-open-graph.svg b/doc/img/stack-open-graph.svg new file mode 100644 index 0000000000..497940d734 --- /dev/null +++ b/doc/img/stack-open-graph.svg @@ -0,0 +1,81 @@ + + + + + + + + + + + + + The Haskell Tool + STACK + https://haskellstack.org + + diff --git a/doc/img/stack-welcome.gif b/doc/img/stack-welcome.gif new file mode 100644 index 0000000000..c6a2c8bb86 Binary files /dev/null and b/doc/img/stack-welcome.gif differ diff --git a/doc/install_and_upgrade.md b/doc/install_and_upgrade.md index 2cb8aa58f3..d900db1118 100644 --- a/doc/install_and_upgrade.md +++ b/doc/install_and_upgrade.md @@ -1,393 +1,882 @@
-# Install/upgrade +# Setting up -For common Un\*x operating systems (including macOS), all you need to do is run: +The goal of setting up is a `stack` executable file on the PATH. When Stack is +used, it sets other things up as needed. - curl -sSL https://get.haskellstack.org/ | sh +*[PATH]: An environment variable that specifies a list of directories searched for executable files. -or: +??? question "How do I know if Stack is on the PATH?" - wget -qO- https://get.haskellstack.org/ | sh + Command `stack`. If Stack is available, that should output information about + how to use it. +??? question "How do I find where Stack is located?" -Note that this script will ask for root access using `sudo` in order -to use your platform's package manager to install dependencies and to -install to `/usr/local/bin`. If you prefer more control, follow the -manual installation instructions for your platform below. + === "Unix-like" -Binaries for other operating systems are listed below, and available on -[the GitHub release page](https://github.com/fpco/stack/releases). For the -future, we are open to supporting more OSes (to request one, please -[submit an issue](https://github.com/commercialhaskell/stack/issues/new)). + Command `which -a stack`. -Binary packages are signed with this [signing key](SIGNING_KEY.md). + === "Windows" -If you are writing a script that needs to download the latest binary, you can -use URLs like `https://get.haskellstack.org/stable/.` -(e.g. https://get.haskellstack.org/stable/linux-x86_64.tar.gz) that always -point to the latest stable release. + Command `where.exe stack`. -## Windows +??? question "How do I find what version of Stack is available?" -We recommend installing to the default location with these installers, as that -will make `stack install` and `stack upgrade` work correctly out of the box. + Command `stack --version` or `stack --numeric-version`. - * [Windows 64-bit Installer](https://get.haskellstack.org/stable/windows-x86_64-installer.exe) +??? question "If I do not use GHCup, is there a preferred location for Stack?" -If in doubt: you should prefer the 64-bit installer. + You can put the `stack` executable file anywhere on your PATH. However, a + good location is the directory where Stack itself will install executables. + That location depends on the operating system: -You may see a "Windows Defender SmartScreen prevented an unrecognized app from -starting" warning when you try to run the installer. If so, click on -**More info**, and then click on the **Run anyway** button that appears. + === "Unix-like" -### Manual download + Stack installs executables to: -* Download the latest release: + ~~~text + $HOME/.local/bin + ~~~ - * [Windows 64-bit](https://get.haskellstack.org/stable/windows-x86_64.zip) + If you do not have that directory in your PATH, you may need to update + your PATH. That can be done by editing the `~/.bashrc` file. -* Unpack the archive and place `stack.exe` somewhere on your `%PATH%` (see - [Path section below](#path)) and you can then run `stack` on the command line. + === "Windows" -* Now you can run `stack` from the terminal. + Stack installs executables to: -## macOS + ~~~text + $Env:APPDATA\local\bin + ~~~ -We generally test on the current version of macOS and do our best to keep it compatible with the three most recent major versions. Stack may also work on older versions (YMMV). + For example: `C:\Users\\AppData\Roaming\local\bin`. -### Installer script + If you do not have that directory in your PATH, you may need to update + your PATH. That can be done by searching for 'Edit Environment variables + for your account' under Start. -Run: + === "Windows (Command Prompt)" - curl -sSL https://get.haskellstack.org/ | sh + Stack installs executables to: -### Manual download + ~~~text + %APPDATA%\local\bin + ~~~ -* Download the latest release: - * [macOS 64-bit](https://get.haskellstack.org/stable/osx-x86_64.tar.gz) -* Extract the archive and place `stack` somewhere on your `$PATH` (see - [Path section below](#path)) -* Now you can run `stack` from the terminal. + For example: `C:\Users\\AppData\Roaming\local\bin`. -### Using Homebrew + If you do not have that directory in your PATH, you may need to update + your PATH. That can be done by searching for 'Edit Environment variables + for your account' under Start. -If you have the popular [brew](https://brew.sh/) tool installed, you can just do: + !!! note - brew install haskell-stack + If you used GHCup to install Stack, GHCup puts executable files in the + `bin` directory in the GHCup root directory. -* The Homebrew formula and bottles are **unofficial** and lag slightly behind new Stack releases, -but tend to be updated within a day or two. -* Normally, Homebrew will install from a pre-built binary (aka "pour from a -bottle"), but if `brew` starts trying to build everything from source (which -will take hours), see -[their FAQ on the topic](https://github.com/Homebrew/brew/blob/master/docs/FAQ.md#why-do-you-compile-everything). +To get and use Stack, some other things need to be in place first: -### Notes +
-After installation, running `stack setup` might fail with `configure: error: cannot run C compiled programs.` in which case you should run: +- :material-laptop:{ .lg .middle } __A computer__ - xcode-select --install + --- -Starting with macOs 10.14 (Mojave) running `xcode-select --install` [might not be enough](https://forums.developer.apple.com/thread/104296). You will need to install additional headers by running: + Stack will need at least about 5 GB of disk space[^1]. It will help to know + what platform your computer provides. - cd /Library/Developer/CommandLineTools/Packages/ - open macOS_SDK_headers_for_macOS_10.14.pkg + [^1]: + About 3 GB for a single version of GHC and about 2 GB for a local copy + of the Hackage package index. -If you are on OS X 10.11 ("El Capitan") and encounter either of these -problems, see the linked FAQ entries: + *[platform]: Machine architecture (eg x86_64, AArch64) and operating system (eg Linux distribution, macOS, Windows). - * [GHC 7.8.4 fails with `/usr/bin/ar: permission denied`](faq.md#usr-bin-ar-permission-denied) - * [DYLD_LIBRARY_PATH is ignored](faq.md#dyld-library-path-ignored) +- :material-wifi:{ .lg .middle } __Access to the Internet__ + --- -If you are on OS X 10.12 ("Sierra") and encounter [GHC panic while building, see this issue](https://github.com/commercialhaskell/stack/issues/2577) + Stack will need to fetch files from remote locations. -## Ubuntu +- :octicons-terminal-24:{ .lg .middle } __Terminal software__ -Use the [generic Linux option](#linux). + --- -There is also a [Ubuntu -package](http://packages.ubuntu.com/search?keywords=haskell-stack&searchon=names&suite=all§ion=all) -for Ubuntu 16.10 and up, but the distribution's Stack version lags behind, so we -recommend running `stack upgrade --binary-only` after installing it. For older stack -versions which do not support `--binary-only`, just `stack upgrade` may work too. The -version in Ubuntu 16.04 is too old to upgrade successfully, and so in that case -stack should be installed from a [release -tarball](https://github.com/commercialhaskell/stack/releases). + Stack is used at the command line. Your operating system likely provides + terminal software and alternatives may be available. -## Debian +- :material-text-box-edit-outline:{ .lg .middle } __A code editor__ -Use the [generic Linux option](#linux). + --- -There is also a [Debian -package](https://packages.debian.org/search?keywords=haskell-stack&searchon=names&suite=all§ion=all) -for Stretch and up, but the distribution's Stack version lags behind, so running -`stack upgrade --binary-only` is recommended after installing it. For older stack -versions which do not support `--binary-only`, just `stack upgrade` may work too. + You can use any editor program that can edit text files but code editors + with extensions for Haskell code files are recommended. -## CentOS / Red Hat / Amazon Linux +
-Use the [generic Linux option](#linux). +## Install Stack -There is also an unofficial -[Copr repo](https://copr.fedoraproject.org/coprs/petersen/stack/). -Note that this Stack version may lag behind, -so we recommend running `stack upgrade` after installing it. +Stack can be installed on most Linux distributions, macOS and Windows. -## Fedora +??? question "What about other operating systems?" -Use the [generic Linux option](#linux). + Stack is open to supporting more operating systems. To request support for + an operating system, please submit an + [issue](https://github.com/commercialhaskell/stack/issues/new) at Stack's + GitHub repository. -Fedora includes builds of stack, but the version may lag behind, -so we recommend running `stack upgrade` after installing it. +Stack can be installed directly or by using the GHCup tool. -## openSUSE / SUSE Linux Enterprise +=== "Directly" -Use the [generic Linux option](#linux). + Stack can be installed on various operating systems. -There is also an unofficial SUSE package. Note that this Stack -version may lag behind, so we recommend running `stack upgrade` after installing -it. To install it: + ??? question "Where can binary distributions for Stack be found?" - 1. Add the appropriate OBS repository: + Stack executables are available on the + [releases](https://github.com/commercialhaskell/stack/releases) page of + Stack's GitHub repository. - * openSUSE Tumbleweed + URLs with the format + `https://get.haskellstack.org/stable/.` point to + the latest stable release. The manual download links use those URLs. - all needed is in distribution + ??? question "Does the `sh` installation script have flags and options?" - * openSUSE Leap + The `sh` installation script recognises the following optional flags and + options: `-q` suppresses output and specifies non-intervention (likely a + prerequisite for the use of the script in CI environments); `-f` forces + installation, even if an existing Stack executable is detected; and + `-d ` specifies a destination directory for the Stack + executable. - sudo zypper ar http://download.opensuse.org/repositories/devel:/languages:/haskell/openSUSE_Leap_42.1/devel:languages:haskell.repo + === "Linux" - * SUSE Linux Enterprise 12 + For most Linux distributions, on x86_64 or AArch64 machine + architectures, the easiest way to install Stack is to command either: - sudo zypper ar http://download.opensuse.org/repositories/devel:/languages:/haskell/SLE_12/devel:languages:haskell.repo + ~~~text + curl -sSL https://get.haskellstack.org/ | sh + ~~~ - 2. Install: + or: - sudo zypper in stack + ~~~text + wget -qO- https://get.haskellstack.org/ | sh + ~~~ -## Arch Linux + These commands download a script file and run it using `sh`. -There is an official package in the Arch community repository. So you can -install it by simply doing: + ??? question "Will the installation script need root access?" - sudo pacman -S stack + The script at [get.haskellstack.org](https://get.haskellstack.org/) + will ask for root access using `sudo`. It needs such access in order + to use your platform's package manager to install dependencies and + to install to `/usr/local/bin`. If you prefer more control, follow + the manual installation instructions for your platform below. -Note that this version may slightly lag behind, but it should be updated within -the day. The package is also always rebuilt and updated when one of its -dependencies gets an update. + ??? question "Can I download Stack manually?" - - [stack](https://www.archlinux.org/packages/community/x86_64/stack/) _latest stable version_ - - [haskell-stack-git](https://aur.archlinux.org/packages/haskell-stack-git/) _git version_ + Yes. Manual download for Linux distributions depends on your machine + architecture, x86_64 or AArch64/ARM64. -In order to use `stack setup` with older versions of GHC or on a 32-bit system, -you may need the -[ncurses5-compat-libs](https://aur.archlinux.org/packages/ncurses5-compat-libs/) -AUR package installed. If this package is not installed, Stack may not be able -to install older (< 7.10.3) or 32-bit GHC versions. + === "x86_64" -If you use the -[ArchHaskell repository](https://wiki.archlinux.org/index.php/ArchHaskell), you -can also get the `haskell-stack-tool` package from there. + * Click + [:material-cloud-download-outline:](https://get.haskellstack.org/stable/linux-x86_64.tar.gz) + to download an archive file with the latest release. -## NixOS + * Extract the archive and place the `stack` executable file + somewhere on your PATH. -Users who follow the `nixos-unstable` channel or the Nixpkgs `master` branch can install the latest `stack` release into their profile by running: + * Ensure you have the required system dependencies installed. + These include GCC, GNU Make, xz, perl, libgmp, libffi, and + zlib. We also recommend Git and GPG. - nix-env -f "" -iA stack + The installation of system dependencies will depend on the + package manager for your Linux distribution. Notes are provided + for Arch Linux, CentOS, Debian, Fedora, Gentoo and Ubuntu. -Alternatively, the package can be built from source as follows. + === "Arch Linux" - 1. Clone the git repo: + ~~~text + sudo pacman -S make gcc ncurses git gnupg xz zlib gmp libffi zlib + ~~~ - git clone https://github.com/commercialhaskell/stack.git + === "CentOS" - 2. Create a `shell.nix` file: + ~~~text + sudo yum install perl make automake gcc gmp-devel libffi zlib zlib-devel xz tar git gnupg + ~~~ - cabal2nix --shell ./. --no-check --no-haddock > shell.nix + === "Debian" - Note that the tests fail on NixOS, so disable them with `--no-check`. Also, haddock currently doesn't work for stack, so `--no-haddock` disables it. + ~~~text + sudo apt-get install g++ gcc libc6-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg netbase + ~~~ - 3. Install stack to your user profile: + === "Fedora" - nix-env -i -f shell.nix + ~~~text + sudo dnf install perl make automake gcc gmp-devel libffi zlib zlib-devel xz tar git gnupg + ~~~ -For more information on using Stack together with Nix, please see [the NixOS -manual section on -Stack](http://nixos.org/nixpkgs/manual/#how-to-build-a-haskell-project-using-stack). + === "Gentoo" -## Linux (generic) + Ensure you have the `ncurses` package with `USE=tinfo`. Without + it, Stack will not be able to install GHC. -### Installer script + === "Ubuntu" -Run: + ~~~text + sudo apt-get install g++ gcc libc6-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg netbase + ~~~ - curl -sSL https://get.haskellstack.org/ | sh + === "AArch64" -or: + * Click + [:material-cloud-download-outline:](https://get.haskellstack.org/stable/linux-aarch64.tar.gz) + to download an archive file with the latest release. - wget -qO- https://get.haskellstack.org/ | sh + * Extract the archive and place the `stack` executable file + somewhere on your PATH. -### Manual download + * Ensure you have the required system dependencies installed. + These include GCC, GNU Make, xz, perl, libgmp, libffi, and + zlib. We also recommend Git and GPG. -* Download the latest release: + The installation of system dependencies will depend on the + package manager for your Linux distribution. Notes are provided + for Arch Linux, CentOS, Debian, Fedora, Gentoo and Ubuntu. - * [Linux 64-bit (static)](https://get.haskellstack.org/stable/linux-x86_64.tar.gz) + === "Arch Linux" - + ~~~text + sudo pacman -S make gcc ncurses git gnupg xz zlib gmp libffi zlib + ~~~ - + === "CentOS" - + ~~~text + sudo yum install perl make automake gcc gmp-devel libffi zlib zlib-devel xz tar git gnupg + ~~~ -* Extract the archive and place `stack` somewhere on your `$PATH` (see [Path section below](#path)) + === "Debian" -* Ensure you have required system dependencies installed. These include GCC, GNU make, xz, perl, libgmp, libffi, and zlib. We also recommend Git and GPG. To install these using your package manager: - * Debian / Ubuntu: `sudo apt-get install g++ gcc libc6-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg netbase` - * Fedora / CentOS: `sudo dnf install perl make automake gcc gmp-devel libffi zlib zlib-devel xz tar git gnupg` (use `yum` instead of `dnf` on CentOS and Fedora <= 21) - * Fedora 24: In order to use `stack setup` on a 32-bit system, you may - need to run `sudo dnf install ncurses-compat-libs`. If this package is - not installed, Stack may not be able to install 32-bit GHC versions. - Also `sudo dnf install ncurses-compat-libs` if you nee - * Arch Linux: `sudo pacman -S make gcc ncurses git gnupg xz zlib gmp libffi zlib` + ~~~text + sudo apt-get install g++ gcc libc6-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg netbase + ~~~ - * In order to use `stack setup` with older versions of GHC or on a - 32-bit system, you may need the - [ncurses5-compat-libs](https://aur.archlinux.org/packages/ncurses5-compat-libs/) - AUR package installed. If this package is not installed, Stack may not - be able to install older (< 7.10.3) or 32-bit GHC versions. - * Gentoo users, make sure to have the `ncurses` package with `USE=tinfo` (without it, stack will not be able to install GHC). + === "Fedora" -* Now you can run `stack` from the terminal. + ~~~text + sudo dnf install perl make automake gcc gmp-devel libffi zlib zlib-devel xz tar git gnupg + ~~~ - + The Arch User Repository (AUR) also provides: -## Path + * a [`stack-bin` package](https://aur.archlinux.org/packages/stack-bin); + and -You can install stack by copying it anywhere on your PATH environment variable. A good place to install is the same directory where stack itself will install executables. On Windows, that directory is `%APPDATA%\local\bin`, e.g. `c:\Users\Michael\AppData\Roaming\local\bin`. For other systems, it's `$HOME/.local/bin`. + * a [`stack-static` package](https://aur.archlinux.org/packages/stack-static) -If you don't have that directory in your PATH, you may need to update your PATH (such as by editing `~/.bashrc`). + === "Debian" -If you're curious about the choice of these paths, see [issue #153](https://github.com/commercialhaskell/stack/issues/153) + There are Debian + [packages](https://packages.debian.org/search?keywords=haskell-stack&searchon=names&suite=all§ion=all) + for Buster and up. However, the distribution's Stack version + lags behind. -## Shell auto-completion + === "Fedora" -To get tab-completion of commands on bash, just run the following (or add it to -`.bashrc`): + Fedora includes Stack, but its Stack version may lag behind. - eval "$(stack --bash-completion-script stack)" + === "NixOS" -For more information and other shells, see [the shell auto-completion page](shell_autocompletion.md) + Users who follow the `nixos-unstable` channel or the Nixpkgs + `master` branch can install the latest Stack release into their + profile with the command: -## China-based users + ~~~text + nix-env -f "" -iA stack + ~~~ -If you're attempting to install stack from within China: + Alternatively, the package can be built from source as follows. -* As of 2020-02-24, the download link has limited connectivity from within mainland China. If this is the case, please proceed by manually downloading (ideally via a VPN) and installing stack per the instructions found on this page pertinent to your OS. + 1. Clone the git repo, with the command: -* After install, your ~/.stack/config.yaml will need to be configured before stack can download large files consistently from within China (without reliance on a VPN). Please add the following to the bottom of the ~/.stack/config.yaml file (for Windows: use the %STACK_ROOT%\config.yaml): + ~~~text + git clone https://github.com/commercialhaskell/stack.git + ~~~ -``` -###ADD THIS IF YOU LIVE IN CHINA -setup-info-locations: -- "http://mirrors.tuna.tsinghua.edu.cn/stackage/stack-setup.yaml" -urls: - latest-snapshot: http://mirrors.tuna.tsinghua.edu.cn/stackage/snapshots.json + 2. Create a `shell.nix` file with the command: -package-indices: - - download-prefix: http://mirrors.tuna.tsinghua.edu.cn/hackage/ - hackage-security: - keyids: - - 0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d - - 1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42 - - 280b10153a522681163658cb49f632cde3f38d768b736ddbc901d99a1a772833 - - 2a96b1889dc221c17296fcc2bb34b908ca9734376f0f361660200935916ef201 - - 2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3 - - 51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921 - - 772e9f4c7db33d251d5c6e357199c819e569d130857dc225549b40845ff0890d - - aa315286e6ad281ad61182235533c41e806e5a787e0b6d1e7eef3f09d137d2e9 - - fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0 - key-threshold: 3 - ignore-expiry: no -``` + ~~~text + cabal2nix --shell ./. --no-check --no-haddock > shell.nix + ~~~ -## Using an http proxy + Note that the tests fail on NixOS, so disable them with + `--no-check`. Also, Haddock currently does not work for + Stack, so `--no-haddock` disables it. -To use `stack` behind a http proxy with ip address *IP* and port *PORT*, first set up an environment variable `http_proxy` and then run the stack command. _e.g._ + 3. Install Stack to your user profile with the command: -``` -$ export http_proxy=IP:PORT -$ stack install -``` + ~~~text + nix-env -i -f shell.nix + ~~~ -Note that on most operating systems, it is not mandatory for programs to follow the "system-wide" http proxy. Some programs, such as browsers, do honor this "system-wide" http proxy setting, while other programs, including bash, do not. That means configuring "http proxy setting" in your Control Panel (Windows) or System Preferences (Mac) would not result in `stack` traffic going through the proxy. + For more information on using Stack together with Nix, please + see the + [NixOS manual section on Stack](http://nixos.org/nixpkgs/manual/#how-to-build-a-haskell-project-using-stack). -## Upgrade + === "SUSE" -There are essentially four different approaches to upgrade: + There is also an unofficial package for openSUSE or SUSE Linux + Enterprise. Its Stack version may lag behind. To install it: -* The `stack` tool itself ships with an `upgrade` command, which download a `stack` binary or build it from source and install it to the default install path (e.g. `~/.local/bin` or `%APPDATA%\local\bin`; see the [Path](#Path) section above). You can use `stack upgrade` to get the latest official release, and `stack upgrade --git` to install from Git and live on the bleeding edge. Make sure the default install directory is on your `PATH` and takes precedence over the system installed `stack`, or copy `stack` from that directory to the system location afterward. For more information, see [this discussion](https://github.com/commercialhaskell/stack/issues/237#issuecomment-126793301). + === "openSUSE Tumbleweed" -* If you're using a package manager and are happy with sticking with the officially released binaries from the distribution (which may the lag behind latest version of Stack significantly), simply follow your normal package manager strategies for upgrading (e.g. `apt-get update && apt-get upgrade`). + ~~~text + sudo zypper in stack + ~~~ -* The get.haskellstack.org script supports the `-f` argument to over-write the current stack executable. For example: + === "openSUSE Leap" - curl -sSL https://get.haskellstack.org/ | sh -s - -f + ~~~text + sudo zypper ar http://download.opensuse.org/repositories/devel:/languages:/haskell/openSUSE_Leap_42.1/devel:languages:haskell.repo + sudo zypper in stack + ~~~ - or: + === "SUSE Linux Enterprise 12" - wget -qO- https://get.haskellstack.org/ | sh -s - -f + ~~~text + sudo zypper ar http://download.opensuse.org/repositories/devel:/languages:/haskell/SLE_12/devel:languages:haskell.repo + sudo zypper in stack + ~~~ -* Manually follow the steps above to download the newest binaries from the release page and replace the old binary. + === "Ubuntu" -## Install Older Versions + There are Ubuntu + [packages](http://packages.ubuntu.com/search?keywords=haskell-stack&searchon=names&suite=all§ion=all) + for Ubuntu 20.04 and up. -To install a specific version of stack, navigate to the desired version on -[the GitHub release page](https://github.com/fpco/stack/releases), -and click the appropriate link under its "Assets" drop-down menu. + ??? question "Can I set up auto-completion of Stack commands?" -Alternatively, use the URL -`https://github.com/commercialhaskell/stack/releases/download/vVERSION/stack-VERSION-PLATFORM.EXTENSION`. -For example, the tarball for stack 2.1.0.1, osx-x86_64 is at -`https://github.com/commercialhaskell/stack/releases/download/v2.1.0.1/stack-2.1.0.1-osx-x86_64.tar.gz`. + Yes. For further information, see the + [shell auto-completion](topics/shell_autocompletion.md) + documentation. -Here's a snippet for `appveyor.yml` files, borrowed from `dhall`'s -[`appveyor.yml`](https://github.com/dhall-lang/dhall-haskell/blob/1079b7a3a7a6922f72a373e47daf6f1b74f128b1/appveyor.yml). -Change the values of PATH and VERSION as needed. + === "macOS" - install: - - set PATH=C:\Program Files\Git\mingw64\bin;%PATH% - - curl --silent --show-error --output stack.zip --location "https://github.com/commercialhaskell/stack/releases/download/v%STACK_VERSION%/stack-%STACK_VERSION%-windows-x86_64.zip" - - 7z x stack.zip stack.exe - - stack setup > nul - - git submodule update --init --recursive + Most users of Stack on macOS will also have up to date tools for + software development. + + ??? question "What if I am not sure that I have those tools?" + + macOS does not come with all the tools required for software + development but a collection of useful tools, known as the Xcode + Command Line Tools, is readily available. A version of that + collection is provided with each version of Xcode (Apple’s + integrated development environment) and can also be obtained from + Apple separately from Xcode. The collection also includes the macOS + SDK (software development kit). The macOS SDK provides header files + for macOS APIs. + + If you use a command that refers to a common Xcode Command Line Tool + and the Xcode Command Line Tools are not installed, macOS may prompt + you to install the tools. + + macOS also comes with a command line tool, `xcode-select`, that can + be used to obtain the Xcode Command Line Tools. Command + `xcode-select --print-path` to print the path to the currently + selected (active) developer directory. If the directory does not + exist, or is empty, then the Xcode Command Line Tools are not + installed. + + If the Xcode Command Line Tools are not installed, command + `xcode-select --install` to open a user interface dialog to request + automatic installation of the tools. + + An upgrade of macOS may sometimes require the existing Xcode Command + Line Tools to be uninstalled and an updated version of the tools to + be installed. The existing tools can be uninstalled by deleting the + directory reported by `xcode-select --print-path`. + + If, after the installation of Stack, running `stack setup` fails + with: + ~~~text + configure: error: cannot run C compiled programs. + ~~~ + + that indicates that the Xcode Command Line Tools are not installed. + + If building fails with messages that `*.h` files are not found, that + may also indicate that Xcode Command Line Tools are not up to date. + + Xcode 10 provided an SDK for macOS 10.14 (Mojave) and + [changed the location](https://developer.apple.com/documentation/xcode-release-notes/xcode-10-release-notes#Command-Line-Tools) + of the macOS system headers. As a workaround, an extra package was + provided by Apple which installed the headers to the base system + under `/usr/include`. + + ??? question "What versions of the LLVM compiler and toolchain are supported?" + + The documentation for each version of GHC identifies the versions of + LLVM that are supported. That is summarised in the table below for + recent versions of GHC: + + |GHC version|LLVM versions| + |-----------|-------------| + |9.12.2 |11 to 15 | + |9.10.3 |11 to 15 | + |9.8.4 |11 to 15 | + |9.6.7 |11 to 15 | + |9.4.8 |10 to 14 | + |9.2.8 |9 to 12 | + |9.0.2 |9, 10 or 12 | + |8.10.7 |9 to 12 | + |8.8.4 |7 | + |8.6.5 |6 | + |8.4.4 |5 | + + From late 2020, Apple began a transition from Mac computers with Intel + processors (Intel-based Mac) to + [Mac computers with Apple silicon](https://support.apple.com/en-gb/HT211814). + + === "Intel-based" + + Intel-based Mac computers have processors with x86_64 architectures. + For most Intel-based Mac computers, the easiest way to install Stack + is to command either: + + ~~~text + curl -sSL https://get.haskellstack.org/ | sh + ~~~ + + or: + + ~~~text + wget -qO- https://get.haskellstack.org/ | sh + ~~~ + + These commands download a script file and run it using `sh`. + + ??? question "Will the installation script need root access?" + + The script at + [get.haskellstack.org](https://get.haskellstack.org/) + will ask for root access using `sudo`. It needs such access in + order to use your platform's package manager to install + dependencies and to install to `/usr/local/bin`. If you prefer + more control, follow the manual installation instructions below. + + ??? question "Can I download Stack manually?" + + Yes: + + * Click + [:material-cloud-download-outline:](https://get.haskellstack.org/stable/osx-x86_64.tar.gz) + to download an archive file with the latest release for x86_64 + architectures. + + * Extract the archive and place `stack` somewhere on your PATH. + + Now you can run Stack from the command line in a terminal. + + === "Apple silicon" + + Mac computers with Apple silicon have an M series chip. These chips + use an architecture known as ARM64 or AArch64. + + For Mac computers with Apple silicon, the easiest way to install + Stack is to command either: + + ~~~text + curl -sSL https://get.haskellstack.org/ | sh + ~~~ + + or: + + ~~~text + wget -qO- https://get.haskellstack.org/ | sh + ~~~ + + These commands download a script file and run it using `sh`. + + ??? question "Will the installation script need root access?" + + The script at + [get.haskellstack.org](https://get.haskellstack.org/) + will ask for root access using `sudo`. It needs such access in + order to use your platform's package manager to install + dependencies and to install to `/usr/local/bin`. If you prefer + more control, follow the manual installation instructions below. + + ??? question "What if I get error `C compiler cannot build executables`?" + + The installation of Stack or some packages (e.g. `network`) + requiring C source compilation might fail with: + + ~~~text + configure: error: C compiler cannot build executables + ~~~ + + In that case you should pass `-arch arm64` as part of the + `CFLAGS` environment variable. This setting will be picked up by + the C compiler of your choice. + + ~~~bash + # Assuming BASH below + + # passing CFLAGS in-line with the command giving rise to the error + CFLAGS="-arch arm64 ${CFLAGS:-}" some_command_to_install_stack + CFLAGS="-arch arm64 ${CFLAGS:-}" stack [build|install] + + # -- OR -- + + # ~/.bash_profile + # NOTE: only do this if you do not have to cross-compile, or remember to unset + # CFLAGS when needed + export CFLAGS="-arch arm64 ${CFLAGS:-}" + ~~~ + + The setting instructs the C compiler to compile objects for + ARM64. These can then be linked with libraries built for ARM64. + Without the instruction, the C compiler, invoked by Cabal + running in x86-64, would compile x86-64 objects and attempt to + link them with existing ARM64 libraries, resulting in the error + above. + + ??? question "Can I download Stack manually?" + + Yes: + + * Click + [:material-cloud-download-outline:](https://get.haskellstack.org/stable/osx-aarch64.tar.gz) + to download an archive file with the latest release for + AArch64 architectures. + + * Extract the archive and place `stack` somewhere on your PATH. + + Now you can run Stack from the command line in a terminal. + + ??? question "Can I use the Homebrew package manager to get Stack?" + + [Homebrew](https://brew.sh/) is a popular package manager for macOS. + If you have its `brew` tool installed, you can just command: + + ~~~text + brew install haskell-stack + ~~~ + + * The Homebrew formula and bottles are **unofficial** and lag + slightly behind new Stack releases, but tend to be updated within + a day or two. + + * Normally, Homebrew will install from a pre-built binary (aka "pour + from a bottle"), but if it starts trying to build everything from + source (which will take hours), see + [their FAQ on the topic](https://github.com/Homebrew/brew/blob/master/docs/FAQ.md#why-do-you-compile-everything). + + ??? question "Can I set up auto-completion of Stack commands?" + + Yes. For further information, see the + [shell auto-completion](topics/shell_autocompletion.md) + documentation. + + === "Windows" + + Most computers using Windows have a x86_64 machine architecture. More + recently, Microsoft has provided Windows on Arm that runs on other + processors. + + === "x86_64" + + On 64-bit Windows, the easiest way to install Stack is to download + and use the + [Windows installer](https://get.haskellstack.org/stable/windows-x86_64-installer.exe). + + !!! info "Stack root" + + By default, the Windows installer will set the + [Stack root](topics/stack_root.md) by setting the `STACK_ROOT` + environment variable to `C:\sr`. + + !!! note "Anti-virus software" + + Systems with antivirus software may need to add Stack to the + list of 'trusted' applications. + + You may see a "Windows Defender SmartScreen prevented an + unrecognized app from starting" warning when you try to run the + installer. If so, click on **More info**, and then click on the + **Run anyway** button that appears. + + ??? warning "I have a Windows username with a space in it" + + GHC 9.4.1 and later have a bug which means they do not work if + the path to the `ghc` executable has a space character in it. + The default location for Stack's 'programs' directory will have + a space in the path if the value of the `USERNAME` environment + variable includes a space. + + A solution is to configure Stack to use a different location for + its 'programs' directory. For further information, see the + [`local-programs-path`](configure/yaml/non-project.md#local-programs-path) + non-project specific configuration option documentation. + + ??? warning "Stack 2.9.1, 2.9.3 and 2.11.1: Long user PATH environment variable" + + The Windows installer for Stack 2.9.1, 2.9.3 and 2.11.1 (only) + will replace the user `PATH` environment variable (rather than + append to it) if a 1024 character limit is exceeded. If the + content of your existing user `PATH` is long, preserve it before + running the installer. + + We recommend installing to the default location with the installer, + as that will make `stack install` and `stack upgrade` work correctly + out of the box. + + ??? question "Can I download Stack manually?" + + Yes: + + * Click + [:material-cloud-download-outline:](https://get.haskellstack.org/stable/windows-x86_64.zip) + to download an archive file with the latest release. + + * Unpack the archive and place `stack.exe` somewhere on your + PATH. + + Now you can run Stack from the command line in a terminal. + + === "Windows on Arm" + + The GHC project does not yet provide a version of GHC that runs on + Windows on Arm. + + ??? note "China-based users: download" + + As of 24 February 2020, the download link has limited connectivity from + within mainland China. If you experience this, please proceed by + manually downloading (ideally via a VPN) and installing Stack following + the instructions on this page that apply to your operating system. + +=== "GHCup" + + The separate [GHCup](https://www.haskell.org/ghcup/) project provides a tool + that can be used to install Stack and other Haskell-related tools, including + GHC and + [Haskell Language Server](https://github.com/haskell/haskell-language-server) + (HLS). HLS is a program that is used by Haskell extensions for popular code + editors. + + GHCup provides Stack for some combinations of machine architecture and + operating system not provided elsewhere. + + By default, the script to install GHCup (which can be run more than once) + also configures Stack so that if Stack needs a version of GHC, GHCup takes + over obtaining and installing that version. + +??? note "China-based users: configuration" + + After installation, Stack will need to be configured before it can download + large files consistently from within China (without reliance on a VPN). + Please add the following to the bottom of the + [global configuration file](configure/yaml/index.md) (`config.yaml`): + + ~~~yaml + ###ADD THIS IF YOU LIVE IN CHINA + setup-info-locations: + - "http://mirrors.tuna.tsinghua.edu.cn/stackage/stack-setup.yaml" + urls: + latest-snapshot: http://mirrors.tuna.tsinghua.edu.cn/stackage/snapshots.json + + package-index: + download-prefix: http://mirrors.tuna.tsinghua.edu.cn/hackage/ + ~~~ + +??? question "What if I am using an HTTP proxy?" + + To use Stack behind a HTTP proxy with IP address *IP* and port *PORT*, first + set up an environment variable `http_proxy` and then run the Stack command. + For example: + + === "Unix-like" + + ~~~text + export http_proxy=IP:PORT + stack install + ~~~ + + On most operating systems, it is not mandatory for programs to follow + the 'system-wide' HTTP proxy. Some programs, such as browsers, do honor + this 'system-wide' HTTP proxy setting, while other programs, including + Bash, do not. That means configuring 'http proxy setting' in your System + Preferences (macOS) would not result in Stack traffic going through the + proxy. + + === "Windows" + + ~~~text + $Env:http_proxy=IP:PORT + stack install + ~~~ + + It is not mandatory for programs to follow the 'system-wide' HTTP proxy. + Some programs, such as browsers, do honor this 'system-wide' HTTP proxy + setting, while other programs do not. That means configuring + 'http proxy setting' in your Control Panel would not result in Stack + traffic going through the proxy. + + === "Windows (Command Prompt)" + + ~~~text + set http_proxy=IP:PORT + stack install + ~~~ + + It is not mandatory for programs to follow the 'system-wide' HTTP proxy. + Some programs, such as browsers, do honor this 'system-wide' HTTP proxy + setting, while other programs do not. That means configuring + 'http proxy setting' in your Control Panel would not result in Stack + traffic going through the proxy. + +## Upgrade Stack + +The Stack project recommends the use of the latest released version of Stack. + +If Stack is already installed, upgrading it depends on whether you are using +Stack or GHCup to manage versions of Stack. + +=== "Stack" + + ??? warning "If you use GHCup to manage versions of Stack, use it consistently" + + If you used GHCup to install Stack, you should also use GHCup to upgrade + Stack. + + GHCup uses an executable named `stack` to manage versions of Stack, + through a file `stack.shim`. Stack will likely overwrite the executable + on upgrade. + + There are different approaches to upgrading Stack, which vary as between + Unix-like operating systems (including macOS) and Windows. + + === "Unix-like" + + There are essentially four different approaches: + +
+ + - __Use the `stack upgrade` command__ + + --- + + For further information, see the + [`stack upgrade`](commands/upgrade_command.md) documentation. + + - __Manual download__ + + --- + + Follow the steps above to download manually the newest executable + and replace the old executable. + + - __Use the `sh` installation script__ + + --- + + Use the `get.haskellstack.org` script with its `-f` flag to + overwrite the current Stack executable. For example, command + either: + + ~~~text + curl -sSL https://get.haskellstack.org/ | sh -s - -f + ~~~ + + or: + + ~~~text + wget -qO- https://get.haskellstack.org/ | sh -s - -f + ~~~ + + - __Use a package manager__ + + --- + + Follow your normal package manager approach to upgrading. For + example: + + ~~~text + apt-get update + apt-get upgrade + ~~~ + + Be aware that officially released binaries from the distribution may + lag behind the latest version of Stack significantly. + +
+ + === "Windows" + + There are essentially two different approaches: + +
+ + - __Use the `stack upgrade` command__ + + --- + + For further information, see the + [`stack upgrade`](commands/upgrade_command.md) documentation. + + - __Manual download__ + + --- + + Follow the steps above to download manually the newest executable + and replace the old executable. + +
+ +=== "GHCup" + + The separate [GHCup](https://www.haskell.org/ghcup/) project provides + guidance about how to use GHCup to manage versions of tools such as Stack. diff --git a/doc/lock_files.md b/doc/lock_files.md deleted file mode 100644 index 3202019805..0000000000 --- a/doc/lock_files.md +++ /dev/null @@ -1,179 +0,0 @@ -
- -# Lock Files - -Stack attempts to provide reproducible build plans. This involves -reproducibly getting the exact same contents of source packages and -configuration options (like cabal flags and GHC options) for a given -set of input files. There are a few problems with making this work: - -* Entering all of the information to fully provide reproducibility is - tedious. This would include things like Hackage revisions, hashes of - remote tarballs, etc. Users don't want to enter this information. -* Many operations in Stack rely upon a "snapshot hash," which - transitively includes the completed information for all of these - dependencies. If any of that information is missing when parsing the - `stack.yaml` file or snapshot files, it could be expensive for Stack - to calculate it. - -To address this, we follow the (fairly standard) approach of having a -_lock file_. The goal of the lock file is to cache completed -locations of project, snapshot packages and snapshots themselves so that: - -* These files can be stored in source control -* Users on other machines can reuse these lock files and get identical - build plans given that the used local packages and local snapshots are - the same on those machines -* Rerunning `stack build` in the future is deterministic in the build - plan, not depending on mutable state in the world like Hackage - revisions - * **NOTE** If, for example, a tarball available remotely is - deleted or the hash changes, it will not be possible for Stack - to perform the build. However, by deterministic, we mean it - either performs the same build or fails, never accidentally - doing something different. - -This document explains the contents of a lock file, how they are used, -and how they are created and updated. - -## stack.yaml and snapshot files - -Relevant to this discussion, the `stack.yaml` file specifies: - -* Resolver (the parent snapshot) -* `extra-deps` - -The resolver can either specify a compiler version or another snapshot -file. This snapshot file can contain the same information referenced -above for a `stack.yaml`, with the following differences: - -* The `extra-deps` are called `packages` -* Drop packages can be included - -Some information in these files can be incomplete. Consider: - -```yaml -resolver: lts-13.9 -packages: [] -extra-deps: -- https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz -``` - -This information is _incomplete_, since the contents of that URL may -change in the future. Instead, you could specify enough information in -the `stack.yaml` file to fully resolve that package. That looks like: - -```yaml -extra-deps: -- size: 1442 - url: https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz - name: acme-missiles - version: '0.3' - sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b - pantry-tree: - size: 226 - sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 -``` - -Users don't particularly feel like writing all of that. Therefore, -it's common to see _incomplete_ information in a `stack.yaml` file. - -Additionally, the `lts-13.9` information is _also_ incomplete. While -we assume in general that LTS snapshots never change, there's nothing -that technically prohibits that from happening. Instead, the complete -version of that field is: - -```yaml -resolver: - size: 496662 - url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/13/9.yaml - sha256: 83de9017d911cf7795f19353dba4d04bd24cd40622b7567ff61fc3f7223aa3ea -``` - -Also something people don't feel like writing by hand. - -## Recursive snapshot layers - -Snapshot files can be _recursive_, where `stack.yaml` refers to -`foo.yaml`, which refers to `bar.yaml`, which refers to `baz.yaml`. A -local snapshot file can refer to a remote snapshot file (available via -an HTTP(S) URL). - -We need to encode information from _all_ of these snapshot layers and -the `stack.yaml` file in the lock file, to ensure that we can detect -if anything changes. - -## Performance - -In addition to acting as a pure correctness mechanism, the design of a -lock file given here also works as a performance improvement. Instead -of requiring that all snapshot files be fully parsed on each Stack -invocation, we can store information in the lock file and bypass -parsing of the additional files in the common case of no changes. - -## Lock file contents - -The lock file contains the following information: - -* Completed package locations for both `extra-deps` and packages in - snapshot files - * **NOTE** This only applies to _immutable_ packages. Mutable - packages are not included in the lock file. -* Completed information for the snapshot locations - -It looks like the following: - -```yaml -# Lock file, some message about the file being auto-generated -snapshots: - # Starts with the snapshot specified in stack.yaml, - # then continues with the snapshot specified in each - # subsequent snapshot file - - original: - foo.yaml # raw content specified in a snapshot file - completed: - file: foo.yaml - sha256: XXXX - size: XXXX - - original: - lts-13.9 - completed: - size: 496662 - url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/13/9.yaml - sha256: 83de9017d911cf7795f19353dba4d04bd24cd40622b7567ff61fc3f7223aa3ea - -packages: -- original: https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz - completed: - size: 1442 - url: https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz - name: acme-missiles - version: '0.3' - sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b - pantry-tree: - size: 226 - sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 -``` - -## Creation - -Whenever a `stack.yaml` file is loaded, Stack checks for a lock file -in the same file path, with a `.lock` extension added. For example, if -you run `stack build --stack-yaml stack-11.yaml`, it will use a lock -file in the location `stack-11.yaml.lock`. For the rest of this -document, we'll assume that the files are simply `stack.yaml` and -`stack.yaml.lock`. - -If the lock file does not exist, it will be created by: - -* Loading the `stack.yaml` -* Loading all snapshot files -* Completing all missing information -* Writing out the new `stack.yaml.lock` file - -## Update procedure - -When loading a Stack project all completed package or snapshot locations -(even when they were completed using information from a lock file) get -collected to form a new lock file in memory and compare against the one -on disk, writing if there are any differences. diff --git a/doc/maintainers/7zip.md b/doc/maintainers/7zip.md new file mode 100644 index 0000000000..19d1149b2f --- /dev/null +++ b/doc/maintainers/7zip.md @@ -0,0 +1,92 @@ +
+ +# Upgrading 7-Zip + +When installing GHC or MSYS2 on Windows, Stack will also install +[7-Zip](https://www.7-zip.org/). 7-Zip is a file archiver and is used by Stack +to extract files from archives. This section explains the steps required to +upgrade the 7-Zip version used by Stack. The 7-Zip functionality used by Stack +is mature and stable. It is anticipated that the Stack-supplied 7-Zip will not +need to be updated frequently. On 10 September 2022, it was updated from 7-Zip +9.20 (released on 18 November 2010) to 7-Zip 22.01 (released on 15 July 2022). + +1. Download the latest installer for 64-bit x64 Windows from 7-Zip's website. + +2. Run the installer and install to the default location + (`C:\C:\Program Files\7-Zip`). The four relevant files from those installed + will be: + + ~~~text + 7z.exe # 7-Zip Console + 7z.dll # 7-Zip Engine + license.txt # 7-Zip License + readme.txt # 7-Zip Overview + ~~~ + +3. In the + [commercialhaskell/stackage-content](https://github.com/commercialhaskell/stackage-content) + GitHub repository, create a new draft release tagged and named `7z-XX.YY`, + where `XX.YY` is the 7-Zip version number. + +4. Upload the four relevant files in step 2 above into the draft release. + +5. Provide a description for the release. For example: + + ~~~text + 7-Zip 22.01 (2022-07-15) for Windows 64-bit x64. + ~~~ + +6. Publish the release. + +7. Changes need to be made to the + [stackage-content/stack/stack-setup-2.yaml](https://github.com/commercialhaskell/stackage-content/blob/master/stack/stack-setup-2.yaml) + file, to switch over to using the newly uploaded files. For example + (extract): + + ~~~yaml + sevenzexe-info: + url: "https://github.com/commercialhaskell/stackage-content/releases/download/7z-22.01/7z.exe" + content-length: 545280 + sha256: 254cf6411d38903b2440819f7e0a847f0cfee7f8096cfad9e90fea62f42b0c23 + + sevenzdll-info: + url: "https://github.com/commercialhaskell/stackage-content/releases/download/7z-22.01/7z.dll" + content-length: 1814016 + sha256: 73578f14d50f747efa82527a503f1ad542f9db170e2901eddb54d6bce93fc00e + ~~~ + + The `content-length:` key's value is the size of the file in bytes. It can + be obtained from the `Length` field of the `dir` command. The `sha256:` + key's value can be obtained from the commands (in PowerShell): + + ~~~text + (Get-FileHash 7z.exe -Algorithm SHA256).Hash.ToLower() + (Get-FileHash 7z.dll -Algorithm SHA256).Hash.ToLower() + ~~~ + + The `sha256:` key only accepts lowercase hash results as values. + +8. The changed `stack-setup-2.yaml` file should be tested locally. This can be + done by: + + * temporarily disabling the existing local copy of 7-Zip by changing the + name of the `7z.exe` and `7z.dll` files in the `stack path --programs` + directory; + + * identifying a version of GHC not already installed in the + `stack path --programs` directory; and + + * executing the command: + + ~~~text + stack --snapshot setup --setup-info-yaml + ~~~ + + where `` requires the missing version of GHC. + + If all is well, the command should proceed to download the missing version + of GHC, download the `7z.exe` and `7z.dll` files, and use the 7-Zip version + to extract files from the GHC archive. + +9. Raise a pull request on `commercialhaskell/stackage-contents` for the + changes to the locally-tested `stack-setup-2.yaml` file. diff --git a/doc/maintainers/archive/README.md b/doc/maintainers/archive/README.md new file mode 100644 index 0000000000..60a66d31fe --- /dev/null +++ b/doc/maintainers/archive/README.md @@ -0,0 +1,4 @@ +# README + +The `doc/maintainers/archive` directory contains documentation that appears to +have been superceded but is preserved in case it is, in fact, useful. diff --git a/doc/maintainers/archive/docker.md b/doc/maintainers/archive/docker.md new file mode 100644 index 0000000000..5550b38956 --- /dev/null +++ b/doc/maintainers/archive/docker.md @@ -0,0 +1,180 @@ +
+ +# Docker images + +Each Stackage LTS release has two corresponding docker images in the +[fpco/stack-build](https://hub.docker.com/r/fpco/stack-build/) and +[fpco/stack-build-small](https://hub.docker.com/r/fpco/stack-build-small/) +repositories. The former contains every system library needed to build any +package in the snapshot, while the latter only contains a minimal set of system +libraries for basic programs. + +The Dockerfiles for building these images are in +[stackage/automated/dockerfiles](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/). +There is also a +[build.sh](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/build.sh) +script to help with building and pushing the images (see the +[README](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/README.md) +for usage instructions). + +## Build images for new minor LTS snapshot + +In most cases, a new minor LTS snapshot just needs the previous LTS image to be +re-tagged and pushed. If the image needs a patch for the new minor LTS snapshot, +see the next section. + +Below, replace `.` with the minor LTS snapshot version. + +- Check out the `stable` branch of the + [Stack repository](https://github.com/commercialhaskell/stack/). + +- Build and push the images (both standard and `small` variants) using the + [build.sh](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/build.sh) + script: + + ~~~text + ./build.sh --push lts-. + ./build.sh --push --small lts-. + ~~~ + +## Patch images for new minor LTS snapshot + +Below, replace `.` with the minor LTS snapshot version. and `.` +with the previous minor LTS snapshot version. + +- Check out the `stable` branch of the + [Stack repository](https://github.com/commercialhaskell/stack/). + +- In `stackage/automated/dockerfiles`, create a new `lts-.` directory. + +- Create `lts-./Dockerfile`, starting with: + + ~~~dockerfile + FROM $DOCKER_REPO:lts-. + ~~~ + +- Add layers for any changes that need to be made to the image. + +- Build the new image using the + [build.sh](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/build.sh) + script: + + ~~~text + ./build.sh lts-. + ./build.sh --small lts-. + ~~~ + +- Test the new image. For example, command: + + ~~~text + stack --snapshot=lts-. new image-test + cd image-test + stack --docker build + ~~~ + + This should use the image you just built. Make sure you test that the new + image actually contains the desired changes. + +- Follow the process in the previous section to push the images. + +## Build images for new major LTS snapshot release + +### Test a Dockerfile prior to new major LTS snapshot release + +Replace `` with major version of new LTS snapshot, and `` with previous +major LTS snapshot version. + +- Check out the `stable` branch of the + [Stack repository](https://github.com/commercialhaskell/stack/). + +- In `stackage/automated/dockerfiles`, create a new `lts-.0` directory. + +- Copy `lts-.0/Dockerfile` to `lts-.0/Dockerfile`. + +- Check the `FROM` statement, make sure the Ubuntu version matches the Ubuntu + version used in the + [Stackage Dockerfile](https://github.com/commercialhaskell/stackage/blob/master/Dockerfile). + +- Update `GHC_VERSION` to match the version used by the + [latest nightly snapshot](https://www.stackage.org/nightly). + +- Set `LTS_SLUG` to the + [latest nightly snapshot](https://www.stackage.org/nightly) (this will be + temporary until the major LTS snapshot is actually released, at which point it + will be updated to `lts-.0`). + +- Update `PID1_VERSION` and `STACK_VERSION` to the latest versions of those + tools. + +- Make sure `CUDA_VERSION` and `JVM_PATH` match what + [debian-bootstrap.sh](https://github.com/commercialhaskell/stackage/blob/master/debian-bootstrap.sh) + uses. + +- Update `LLVM_PATH` to the version required for the GHC version. This will be + shown on the download page for the GHC version, which you can reach from + https://www.haskell.org/ghc/. It should match the base directory used in + `CLANG_PURE_LLVM_INCLUDE_DIR` in + [debian-bootstrap.sh](https://github.com/commercialhaskell/stackage/blob/master/debian-bootstrap.sh) + (leaving off the `/include` suffix). + +- Update `BOOTSTRAP_COMMIT` to the Git commit ID of the latest + [debian-bootstrap.sh](https://github.com/commercialhaskell/stackage/blob/master/debian-bootstrap.sh). + +- Check for any other `lts-.*/Dockerfile`s and make sure + `lts-.0/Dockerfile` includes anything that was updated in those, if they're + still relevant for LTS-15 (note that a newer + [debian-bootstrap.sh](https://github.com/commercialhaskell/stackage/blob/master/debian-bootstrap.sh) + may already include those changes, so check there first). + +### Perform basic tests + +- Build the image: `docker build -t local/stack-build lts-.0/`. + +- Ensure that all the directories listed in `PATH`, `CUDA_PATH`, and `CPATH` and + any other path-like environment variables actually exist in the image. + +- Try building a test package with the new image. Command: + + ~~~text + stack --snapshot=nightly new image-test` + cd image-test + stack --docker --docker-image=local/stack-build build + ~~~ + + This should build without needing to install GHC. + +- Build the "small" variant. Command: + + ~~~text + docker build -t local/stack-build-small --build-arg "VARIANT=small" lts-.0/ + ~~~ + +- Try building a test package with the new small image. Command: + + ~~~text + stack --snapshot=nightly new small-image-test + cd small-image-test + stack --docker --docker-image=local/stack-build-small build + ~~~ + + This should build without needing to install GHC. + +### Build real image once major LTS snapshot has been released + +- Update `LTS_SLUG` to `lts-.0` + +- Update `BOOTSTRAP_COMMIT` to the git commit ID of the latest + [debian-bootstrap.sh](https://github.com/commercialhaskell/stackage/blob/master/debian-bootstrap.sh). + +- Repeat the tests above, except use `lts-.0` instead of `nightly`. + +- Build and push the real images (both standard and `small` variants) using the + [build.sh](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/build.sh) + script: + + ~~~text + ./build.sh --push lts-.0 + ./build.sh --push --small lts-.0 + ~~~ + +- Commit and push the new Dockerfile to the `stable` branch. diff --git a/doc/maintainers/archive/docker_images.md b/doc/maintainers/archive/docker_images.md new file mode 100644 index 0000000000..e5f9a933b9 --- /dev/null +++ b/doc/maintainers/archive/docker_images.md @@ -0,0 +1,34 @@ +
+ +# Docker images + +Docker Hub includes Docker images under +[`fpco/stack-build'](https://hub.docker.com/r/fpco/stack-build). + +To update those images with a new version of Stack: + +1. Under + [commercialhaskell/stackage/automated/dockerfiles](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/), + add `lts-X.Y/Dockerfile` (where `X.Y` is the latest Stackage Haskell LTS + version), containing (where `X.Z` is the previous Haskell LTS version, + and `X.Y.Z` is the newly released Stack version): + + ~~~dockerfile + FROM $DOCKER_REPO:lts-X.Z + ARG STACK_VERSION=X.Y.Z + RUN wget -qO- https://github.com/commercialhaskell/stack/releases/download/v$STACK_VERSION/stack-$STACK_VERSION-linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C /usr/local/bin '*/stack' + ~~~ + +2. Run `./build.sh lts-X.Y`. Then test that the new image has the new + version of Stack. For example, command: + + ~~~text + docker run --rm fpco/stack-build:lts stack --version + ~~~ + +3. Use the following commands to push the new image to the registry: + + ~~~text + ./build.sh --push lts-X.Y + ./build.sh --push --small lts-X.Y + ~~~ diff --git a/doc/maintainers/archive/releases.md b/doc/maintainers/archive/releases.md new file mode 100644 index 0000000000..5cc3e0b936 --- /dev/null +++ b/doc/maintainers/archive/releases.md @@ -0,0 +1,308 @@ +# Archive - from releases.md + +## Build Linux static binary distribution with Nix + +**NOTE: We have switched back to Alpine Linux for building static binaries, done by CI. Leaving this section for future reference.** + +These instructions are tested on Ubuntu 16.04, but theoretically should work on +any Linux distribution. + +- Install nix (tested with v2.0.4 and v2.1.2, but should work with any) + + ~~~sh + curl https://nixos.org/nix/install | sh + ~~~ + +- Install and authenticate cachix (first two steps at https://cachix.org/ after + signing up) + + +- Add nh2's cache: + + ~~~sh + cachix use static-haskell-nix + ~~~ + + !!! note + + To clear cache index, use `rm $HOME/.cache/nix/binary-cache-v5.sqlite*` + (useful if someone else uploads new stuff to the cache and you want to use + it right away). The recent `narinfo-cache-positive`/`negative-ttl` options + might also help. + +- Check out Stack commit to be released to `~/stack-release` (or elsewhere, in + which case adjust following instructions) + +- `rm -f ~/stack-release/*.cabal`, to ensure it is regenerated + +- clone https://github.com/nh2/static-haskell-nix recursively (last known to + work with commit 725ceb2479637b3b3ab29298a1bc0e48c54984c9) + +- in `static-stack` directory, run (from `static-stack/README.md`): + + ~~~sh + $(nix-build --no-link -A run-stack2nix-and-static-build-script --argstr stackDir ~/stack-release) + ~~~ + +- Run integration tests against the static binary [TODO: improve this process by + adding full support in `release.hs` or the integration tests for testing a + binary built elsewhere] + + - In `~/stack-release`, run + `stack build --flag stack:integration-tests stack:stack-integration-test` + - Copy binary built above to place where `stack build` normally puts the + `stack binary` (e.g. + `cp /nix/store/7vl1xvlbbqjvf864inz5vw7z2z1k4nmw-stack-2.1.0.1/bin/stack /home/vagrant/stack-release/.stack-work/install/x86_64-linux/custom-snapshot-for-building-stack-with-ghc-8.2.2-PyNP5UoO8Ott/8.2.2/bin/stack`; + figure it out using `stack exec which stack`) + - Run `stack exec stack-integration-test` + +- Copy the binary built above (in `/nix/store/XXX-stack-X.Y.Z/bin/stack`) to + `~/stack-release/_release/bin/stack-X.Y.Z-linux-x86_64/stack` (replace `X.Y.Z` + with the version, and the `/nix/store/*` path with that output at the end of + the previous command) + +- Package, sign, and upload to GitHub using Stack's release script in the stack + directory: + + ~~~sh + cd ~/stack-release + stack etc/scripts/release.hs --no-test-haddocks --binary-variant=static --build-args=--dry-run upload + ~~~ + + (adding `--build-args=--dry-run` ensures the binary you copied will be used rather than building a new one) + +- Download the bindist from GitHub and double check that the `stack` in it is + actually static (use `ldd /path/to/stack`) and that `--version` reports + correctly (and not dirty). + +## Setting up a Windows VM for releases + +These instructions are a bit rough, but has the steps to get the Windows machine +set up. + +## Using Virtualbox + + 1. Download Virtualbox VM image: + https://developer.microsoft.com/en-us/microsoft-edge/tools/vms/mac/ + + 2. Launch the VM using Virtualbox and the image downloaded + + 3. Adjust settings: + * Number of CPUs: at least half the host's + * Memory: at least 3 GB + * Video RAM: the minimum recommended by Virtualbox + * Enable 3D and 2D accelerated mode (this makes programs with lots of + console output much faster) + * Enabled shared clipboard (in VM window, Devices->Shared + Clipboard->Both Directions) + +Now continue to the **General Windows setup** subsection below. + +## Using ESXi + +1. Download the **MSEdge on Win10** VM for **VMWare (Windows, Mac)**. +2. Unzip the file downloaded file +3. Upload the VMDK file to the ESXi datastore +4. SSH into ESXi CLI and run: + - `vmkfstools -i /vmfs/volumes/datastore1/win10-msedge/MSEdge-Win10-VMWare-disk1-ORIG.vmdk /vmfs/volumes/datastore1/win10-msedge/MSEdge-Win10-VMWare-disk1.vmdk -d thin`. + This converts the disk to a format that is compatible with ESXi. You may + have to run `esxcli system module load -m multiextent` first (see + https://www.virtuallyghetto.com/2012/09/2gbsparse-disk-format-no-longer-working.html). + - `vmkfstools -X 80G /vmfs/volumes/datastore1/win10-msedge/MSEdge-Win10-VMWare-disk1.vmdk`. + This makes the disk twice as large, which helps avoid running out of disk + space. +5. In the ESXi web UI: + - Create a new VM + - Give is 8192 MB of memory + - Give it 4 virtual CPUs + - Remove the default hard disk + - Add an **Existing hard disk** + - Select `/datastore1/win10-msedge/MSEdge-Win10-VMWare-disk1.vmdk` + - Power on the VM + - In Windows settings: + - Search for "disk management" + - Extend the partition to take the whole disk. + - In all likelihood, you will want to search for "remote desktop" and enable + remote desktop. Then you can connect to the VM using Microsoft Remote + Desktop instead of using it from within the ESXi web UI. + +Now continue to the **General Windows setup** subsection below. + +## General Windows setup + + 5. In **Settings**->**Update & Security**->**Windows Update**->**Advanced options**: + * Change **Choose how updates are installed** to **Notify to schedule restart** + * Check **Defer upgrades** (this avoids rebooting in the middle of the stack + build) + + 6. In **Settings**->**System**->**Power & sleep** + + * Disable turning off the screen or going to sleep when plugged in + + 7. Install msysgit: https://msysgit.github.io/ + + 8. Install TortoiseHG: https://tortoisehg.bitbucket.io/download/index.html + + 9. Install nsis-2.46.5-Unicode-setup.exe from http://www.scratchpaper.com/ + +10. Install Stack using the Windows 64-bit installer + + a. Restart any command prompts to ensure they get new `%STACK_ROOT%` value. + +11. Visit https://hackage.haskell.org/ in Edge to ensure system has correct CA + certificates + +13. Run in command prompt: + + ~~~text + md C:\p + md C:\tmp + cd /d C:\p + ~~~ + +14. Create `C:\p\env.bat`: + + ~~~text + SET TEMP=C:\tmp + SET TMP=C:\tmp + SET PATH=C:\Users\IEUser\AppData\Roaming\local\bin;"c:\Program Files\Git\usr\bin";"C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin";%PATH% + ~~~ + +15. Run `C:\p\env.bat` (do this every time you open a new command prompt) + +16. `stack exec -- gpg --import`, and paste in the your GPG secret key (must be + done using `stack exec` because that uses the right keyring for the embedded + MSYS2 GPG; you can get the key from another machine with + `gpg --export-secret-keys --armor `) + +17. Run in command prompt (adjust the `user.email` and `user.name` settings): + + ~~~text + git config --global user.email manny@fpcomplete.com + git config --global user.name "Emanuel Borsboom" + git config --global push.default simple + git config --global core.autocrlf true + git clone https://github.com/borsboom/stack-installer.git + git clone -b stable --reference C:\p\stack-release https://github.com/commercialhaskell/stack.git stack-release + cd stack-release + stack install cabal-install + ~~~ + +## Setting up an ARM VM for releases + +1. Use Scaleway to start ARMv7 and ARM64 VMs. + +2. Select Ubuntu Xenial as the operating system + +3. Install the correct version of LLVM: `sudo apt-get install -y llvm-3.9` + (appropriate for GHC 8.2, might need different version for other GHCs) + +4. Symlink opt-3.X to `opt`: `sudo ln -s opt-3.9 /usr/bin/opt` (adjust the + version if you installed a different one above) + +5. Switch to gold linker: + + ~~~sh + update-alternatives --install "/usr/bin/ld" "ld" "/usr/bin/ld.gold" 20 + update-alternatives --install "/usr/bin/ld" "ld" "/usr/bin/ld.bfd" 10 + update-alternatives --config ld + ~~~ + +6. Add swap space: + + ~~~sh + dd if=/dev/zero of=/swapfile1 bs=1024 count=4194304 + mkswap /swapfile1 + swapon /swapfile1 + echo '/swapfile1 none swap sw 0 0' >>/etc/fstab + ~~~ + +7. Install additional tools: + + ~~~Sh + apt-get update && apt-get install -y unzip gpg + ~~~ + +8. Import your GPG key (`gpg --import` and paste the private key) + +9. Git settings (adjust for your preferences/email/name) + + ~~~text + git config --global push.default simple + git config --global user.email "manny@fpcomplete.com" + git config --global user.name "Emanuel Borsboom" + ~~~ + +10. Install tools used during building and dependencies packages + + ~~~text + sudo apt-get install -y g++ gcc libc6-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg + ~~~ + +11. Install clang+llvm + + NOTE: the Debian jessie `llvm` package does not work (executables built with + it just exit with "schedule: re-entered unsafely."). + + The version of LLVM needed depends on the version of GHC you need. + + * GHC 8.2.2 (the standard for building Stack) + + ~~~sh + wget http://llvm.org/releases/3.9.1/clang+llvm-3.9.1-armv7a-linux-gnueabihf.tar.xz && \ + sudo tar xvf clang+llvm-3.9.1-armv7a-linux-gnueabihf.tar.xz -C /opt + ~~~ + + Run this now and add it to the `.profile`: + + ~~~sh + export PATH="$HOME/.local/bin:/opt/clang+llvm-3.9.1-armv7a-linux-gnueabihf/bin:$PATH" + ~~~ + + * GHC 7.10.3 + + ~~~sh + wget http://llvm.org/releases/3.5.2/clang+llvm-3.5.2-armv7a-linux-gnueabihf.tar.xz && \ + sudo tar xvf clang+llvm-3.5.2-armv7a-linux-gnueabihf.tar.xz -C /opt + ~~~ + + Run this now and add it to the `.profile`: + + ~~~sh + export PATH="$HOME/.local/bin:/opt/clang+llvm-3.5.2-armv7a-linux-gnueabihf/bin:$PATH" + ~~~ + +12. Install Stack + + Binary: get an + [existing `stack` binary](https://github.com/commercialhaskell/stack/releases) + and put it in `~/.local/bin`. + + From source, using Cabal (the tool): + + ~~~sh + wget http://downloads.haskell.org/~ghc/7.10.3/ghc-7.10.3-armv7-deb8-linux.tar.xz && \ + tar xvf ghc-7.10.3-armv7-deb8-linux.tar.xz && \ + cd ghc-7.10.3 && \ + ./configure --prefix=/opt/ghc-7.10.3 && \ + sudo make install && \ + cd .. + export PATH="/opt/ghc-7.10.3/bin:$PATH" + wget https://www.haskell.org/cabal/release/cabal-install-1.24.0.0/cabal-install-1.24.0.0.tar.gz &&&&& \ + tar xvf cabal-install-1.24.0.0.tar.gz && \ + cd cabal-install-1.24.0.0 && \ + EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh && \ + cd .. && \ + export PATH="$HOME/.cabal/bin:$PATH" && \ + cabal update + ~~~ + + Edit `~/.cabal/config`, and set `executable-stripping: False` and + `library-stripping: False`. + + ~~~sh + cabal unpack stack && \ + cd stack-* && \ + cabal install && \ + mv ~/.cabal/bin/stack ~/.local/bin + ~~~ diff --git a/doc/maintainers/docker.md b/doc/maintainers/docker.md deleted file mode 100644 index 40f2966a49..0000000000 --- a/doc/maintainers/docker.md +++ /dev/null @@ -1,93 +0,0 @@ -
- -# Docker images - -Each Stackage LTS release has two corresponding docker images in the [fpco/stack-build](https://hub.docker.com/r/fpco/stack-build/) and [fpco/stack-build-small](https://hub.docker.com/r/fpco/stack-build-small/) repositories. The former contains every system library needed to build any package in the snapshot, while the latter only contains a minimal set of system libraries for basic programs. - -The Dockerfiles for building these images are in [stackage/automated/dockerfiles](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/). There is also a [build.sh](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/build.sh) script to help with building and pushing the images (see the [README](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/README.md) for usage instructions). - - -## Build images for new minor LTS snapshot - -In most cases, a new minor LTS snapshot just needs the previous LTS image to be re-tagged and pushed. If the image needs a patch for the new minor LTS snapshot, see the next section. - -Below, replace `.` with the minor LTS snapshot version. - -- Check out the `stable` branch of the [stack repo](https://github.com/commercialhaskell/stack/). - -- Build and push the images (both standard and `small` variants) using the [build.sh](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/build.sh) script: `./build.sh --push lts-. && ./build.sh --push --small lts-.`. - - -## Patch images for new minor LTS snapshot - -Below, replace `.` with the minor LTS snapshot version. and `.` with the previous minor LTS snapshot version. - -- Check out the `stable` branch of the [stack repo](https://github.com/commercialhaskell/stack/). - -- In `stackage/automated/dockerfiles`, create a new `lts-.` directory. - -- Create `lts-./Dockerfile`, starting with: - - FROM $DOCKER_REPO:lts-. - -- Add layers for any changes that need to be made to the image. - -- Build the new image using the [build.sh](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/build.sh) script: `./build.sh lts-. && ./build.sh --small lts-.` - -- Test the new image. For example, `(stack --resolver=lts-. new image-test && cd image-test && stack --docker build)` (this should use the image you just built). Make sure you test that the new image actually contains the desired changes. - -- Follow the process in the previous section to push the images. - - -## Build images for new major LTS snapshot release - -### Test a Dockerfile prior to new major LTS snapshot release - -Replace `` with major version of new LTS snapshot, and `` with previous major LTS snapshot version. - -- Check out the `stable` branch of the [stack repo](https://github.com/commercialhaskell/stack/). - -- In `stackage/automated/dockerfiles`, create a new `lts-.0` directory. - -- Copy `lts-.0/Dockerfile` to `lts-.0/Dockerfile`. - -- Check the `FROM` statement, make sure the Ubuntu version matches the Ubuntu version used in the [Stackage Dockerfile](https://github.com/commercialhaskell/stackage/blob/master/Dockerfile). - -- Update `GHC_VERSION` to match the version used by the [latest nightly snapshot](https://www.stackage.org/nightly). - -- Set `LTS_SLUG` to the [latest nightly snapshot](https://www.stackage.org/nightly) (this will be temporary until the major LTS snapshot is actually released, at which point it will be updated to `lts-.0`). - -- Update `PID1_VERSION` and `STACK_VERSION` to the latest versions of those tools. - -- Make sure `CUDA_VERSION` and `JVM_PATH` match what [debian-bootstrap.sh](https://github.com/commercialhaskell/stackage/blob/master/debian-bootstrap.sh) uses. - -- Update `LLVM_PATH` to the version required for the GHC version. This will be shown on the download page for the GHC version, which you can reach from https://www.haskell.org/ghc/. It should match the base directory used in `CLANG_PURE_LLVM_INCLUDE_DIR` in [debian-bootstrap.sh](https://github.com/commercialhaskell/stackage/blob/master/debian-bootstrap.sh) (leaving off the `/include` suffix). - -- Update `BOOTSTRAP_COMMIT` to the git commit ID of the latest [debian-bootstrap.sh](https://github.com/commercialhaskell/stackage/blob/master/debian-bootstrap.sh). - -- Check for any other `lts-.*/Dockerfile`s and make sure `lts-.0/Dockerfile` includes anything that was updated in those, if they're still relevant for LTS-15 (note that a newer [debian-bootstrap.sh](https://github.com/commercialhaskell/stackage/blob/master/debian-bootstrap.sh) may already include those changes, so check there first). - - -### Perform basic tests - -- Build the image: `docker build -t local/stack-build lts-.0/`. - -- Ensure that all the directories listed in `PATH`, `CUDA_PATH`, and `CPATH` and any other path-like environment variables actually exist in the image. - -- Try building a test package with the new image: `(stack --resolver=nightly new image-test && cd image-test && stack --docker --docker-image=local/stack-build build)`. This should build without needing to install GHC. - -- Build the "small" variant: `docker build -t local/stack-build-small --build-arg "VARIANT=small" lts-.0/`. - -- Try building a test package with the new small image: `(stack --resolver=nightly new small-image-test && cd small-image-test && stack --docker --docker-image=local/stack-build-small build)`. This should build without needing to install GHC. - -### Build real image once major LTS snapshot has been released - -- Update `LTS_SLUG` to `lts-.0` - -- Update `BOOTSTRAP_COMMIT` to the git commit ID of the latest [debian-bootstrap.sh](https://github.com/commercialhaskell/stackage/blob/master/debian-bootstrap.sh). - -- Repeat the tests above, except use `lts-.0` instead of `nightly`. - -- Build and push the real images (both standard and `small` variants) using the [build.sh](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/build.sh) script: `./build.sh --push lts-.0 && ./build.sh --push --small lts-.0` - -- Commit and push the new Dockerfile to the `stable` branch. diff --git a/doc/maintainers/ghc.md b/doc/maintainers/ghc.md index f97790ba70..30ed59cc52 100644 --- a/doc/maintainers/ghc.md +++ b/doc/maintainers/ghc.md @@ -2,98 +2,123 @@ # Adding a new GHC version - * Push new tag to our fork: +* Push new tag to our fork. Command: - git clone git@github.com:commercialhaskell/ghc.git - cd ghc - git remote add upstream https://gitlab.haskell.org/ghc/ghc.git - git fetch upstream - git push origin ghc-X.Y.Z-release + ~~~text + git clone git@github.com:commercialhaskell/ghc.git + cd ghc + git remote add upstream https://gitlab.haskell.org/ghc/ghc.git + git fetch upstream + git push origin ghc-X.Y.Z-release + ~~~ - * [Publish a new Github release](https://github.com/commercialhaskell/ghc/releases/new) - with tag `ghc-X.Y.Z-release` and same name, with description noting where the binidsts are mirrored from. E.g. +* [Publish a new GitHub release](https://github.com/commercialhaskell/ghc/releases/new) + with tag `ghc-X.Y.Z-release` and same name, with description noting where the + binidsts are mirrored from. For example: - Unless otherwise indicated, bindists are mirrored from https://downloads.haskell.org/~ghc/ - * FreeBSD bindists are mirrored from http://distcache.FreeBSD.org/local-distfiles/arrowd/stack-bindists - * musl bindists are mirrored from https://github.com/redneb/ghc-alt-libc/releases + ~~~text + Unless otherwise indicated, bindists are mirrored from https://downloads.haskell.org/~ghc/ + * FreeBSD bindists are mirrored from http://distcache.FreeBSD.org/local-distfiles/arrowd/stack-bindists + * musl bindists are mirrored from https://github.com/redneb/ghc-alt-libc/releases + ~~~ - * Download all the relevant GHC bindists from their sources, and upload them to the just-created Github release (see - [stack-setup-2.yaml](https://github.com/fpco/stackage-content/blob/master/stack/stack-setup-2.yaml) - for the ones we used in the last GHC release). +* Download all the relevant GHC bindists from their sources, and upload them to + the just-created GitHub release (see + [stack-setup-2.yaml](https://github.com/fpco/stackage-content/blob/master/stack/stack-setup-2.yaml) + for the ones we used in the last GHC release). - In the case of macOS, repackage the `.xz` bindist as a `.bz2`, since macOS does - not include `xz` by default or provide an easy way to install it. + In the case of macOS, repackage the `.xz` bindist as a `.bz2`, since macOS + does not include `xz` by default or provide an easy way to install it. - The script at `etc/scripts/mirror-ghc-bindists-to-github.sh` will help with - this. See the comments within the script. + The script at `etc/scripts/mirror-ghc-bindists-to-github.sh` will help with + this. See the comments within the script. - * [Edit stack-setup-2.yaml](https://github.com/fpco/stackage-content/edit/master/stack/stack-setup-2.yaml) - and add the new bindists, pointing to the Github release version. Be sure to - update the `content-length` and `sha1` values. +* [Edit stack-setup-2.yaml](https://github.com/fpco/stackage-content/edit/master/stack/stack-setup-2.yaml) + and add the new bindists, pointing to the GitHub release version. Be sure to + update the `content-length` and `sha1` values. - Before committing, test using a command like: + Before committing, test using a command like: - stack --resolver=ghc-X.Y.Z setup --setup-info-yaml=path/to/stackage-content/stack/stack-setup-2.yaml + ~~~text + stack --snapshot=ghc-X.Y.Z setup --setup-info-yaml=path/to/stackage-content/stack/stack-setup-2.yaml + ~~~ - * In [stackage-content](https://github.com/fpco/stackage-content), run +* In [stackage-content](https://github.com/fpco/stackage-content), command: - cd stack && ./update-global-hints.hs ghc-X.Y.Z - - and commit the changes. + ~~~text + cd stack + ./update-global-hints.hs ghc-X.Y.Z + ~~~ + and commit the changes. ## Building GHC **NOTE: We are no longer building custom GHC bindists. This section remains for future reference, but GHC's build system has changed substantially since it was written.** -TODO: look into using https://github.com/bgamari/ghc-utils/blob/master/rel-eng/bin-release.sh, which is the script used to official bindists. +TODO: look into using +https://github.com/bgamari/ghc-utils/blob/master/rel-eng/bin-release.sh, which +is the script used to official bindists. On systems with a small `/tmp`, you should set TMP and TEMP to an alternate location. -Setup the system based on [these instructions](https://gitlab.haskell.org/ghc/ghc/wikis/building/preparation/linux). On Ubuntu (`docker run -ti --rm ubuntu:16.04`): - - apt-get update && apt-get install -y ghc alex happy make autoconf g++ git vim xz-utils automake libtool gcc libgmp-dev ncurses-dev libtinfo-dev python3 - -on Void Linux (`docker run -ti --rm voidlinux/voidlinux bash`): - - xbps-install -S curl gcc make xz ghc autoconf git vim automake gmp-devel ncurses-devel python3 cabal-install && \ - cabal update && \ - cabal install alex happy - -For GHC >= 7.10.2, set the `GHC_VERSION` environment variable to the version to build: - - * `export GHC_VERSION=8.2.2` - * `export GHC_VERSION=8.2.1` - * `export GHC_VERSION=8.0.2` - * `export GHC_VERSION=8.0.1` - * `export GHC_VERSION=7.10.3a` - * `export GHC_VERSION=7.10.2` - -then, run (from [here](https://gitlab.haskell.org/ghc/ghc/wikis/building/quick-start)): - - git config --global url."git://github.com/ghc/packages-".insteadOf git://github.com/ghc/packages/ && \ - git clone -b ghc-${GHC_VERSION}-release --recursive https://gitlab.haskell.org/ghc/ghc.git ghc-${GHC_VERSION} && \ - cd ghc-${GHC_VERSION}/ && \ - cp mk/build.mk.sample mk/build.mk && \ - sed -i 's/^#BuildFlavour *= *perf$/BuildFlavour = perf/' mk/build.mk && \ - ./boot && \ - ./configure --enable-tarballs-autodownload && \ - sed -i 's/^TAR_COMP *= *bzip2$/TAR_COMP = xz/' mk/config.mk && \ - make -j$(cat /proc/cpuinfo|grep processor|wc -l) && \ - make binary-dist - -GHC 7.8.4 is slightly different: - - export GHC_VERSION=7.8.4 && \ - git config --global url."git://github.com/ghc/packages-".insteadOf git://github.com/ghc/packages/ && \ - git clone -b ghc-${GHC_VERSION}-release --recursive https://gitlab.haskell.org/ghc/ghc.git ghc-${GHC_VERSION} && \ - cd ghc-${GHC_VERSION}/ && \ - ./sync-all --extra --nofib -r git://git.haskell.org get -b ghc-7.8 && \ - cp mk/build.mk.sample mk/build.mk && \ - sed -i 's/^#BuildFlavour *= *perf$/BuildFlavour = perf/' mk/build.mk && \ - perl boot && \ - ./configure && \ - sed -i 's/^TAR_COMP *= *bzip2$/TAR_COMP = xz/' mk/config.mk && \ - make -j$(cat /proc/cpuinfo|grep processor|wc -l) && \ - make binary-dist +Setup the system based on these +[instructions](https://gitlab.haskell.org/ghc/ghc/wikis/building/preparation/linux). +On Ubuntu (`docker run -ti --rm ubuntu:16.04`): + +~~~text +apt-get update +apt-get install -y ghc alex happy make autoconf g++ git vim xz-utils automake libtool gcc libgmp-dev ncurses-dev libtinfo-dev python3 +~~~ + +On Void Linux (`docker run -ti --rm voidlinux/voidlinux bash`) command: + +~~~text +xbps-install -S curl gcc make xz ghc autoconf git vim automake gmp-devel ncurses-devel python3 cabal-install +cabal update +cabal install alex happy +~~~ + +For GHC >= 7.10.2, set the `GHC_VERSION` environment variable to the version to +build: + +* `export GHC_VERSION=8.2.2` +* `export GHC_VERSION=8.2.1` +* `export GHC_VERSION=8.0.2` +* `export GHC_VERSION=8.0.1` +* `export GHC_VERSION=7.10.3a` +* `export GHC_VERSION=7.10.2` + +then, from +[here](https://gitlab.haskell.org/ghc/ghc/wikis/building/quick-start), command: + +~~~text +git config --global url."git://github.com/ghc/packages-".insteadOf git://github.com/ghc/packages/ +git clone -b ghc-${GHC_VERSION}-release --recursive https://gitlab.haskell.org/ghc/ghc.git ghc-${GHC_VERSION} +cd ghc-${GHC_VERSION}/ +cp mk/build.mk.sample mk/build.mk +sed -i 's/^#BuildFlavour *= *perf$/BuildFlavour = perf/' mk/build.mk +./boot +./configure --enable-tarballs-autodownload +sed -i 's/^TAR_COMP *= *bzip2$/TAR_COMP = xz/' mk/config.mk +make -j$(cat /proc/cpuinfo|grep processor|wc -l) +make binary-dist +~~~ + +GHC 7.8.4 is slightly different. Command: + +~~~text +export GHC_VERSION=7.8.4 +git config --global url."git://github.com/ghc/packages-".insteadOf git://github.com/ghc/packages/ +git clone -b ghc-${GHC_VERSION}-release --recursive https://gitlab.haskell.org/ghc/ghc.git ghc-${GHC_VERSION} +cd ghc-${GHC_VERSION}/ +./sync-all --extra --nofib -r git://git.haskell.org get -b ghc-7.8 +cp mk/build.mk.sample mk/build.mk +sed -i 's/^#BuildFlavour *= *perf$/BuildFlavour = perf/' mk/build.mk +perl boot +./configure +sed -i 's/^TAR_COMP *= *bzip2$/TAR_COMP = xz/' mk/config.mk +make -j$(cat /proc/cpuinfo|grep processor|wc -l) +make binary-dist +~~~ diff --git a/doc/maintainers/haskellstack.org.md b/doc/maintainers/haskellstack.org.md new file mode 100644 index 0000000000..4062d85e52 --- /dev/null +++ b/doc/maintainers/haskellstack.org.md @@ -0,0 +1,179 @@ +
+ +# HaskellStack.org + +The domain https://docs.haskellstack.org hosts online documentation for the +Stack project, using [Read the Docs](https://readthedocs.org/) with +[MkDocs](https://www.mkdocs.org/) and the Material for MkDocs +[theme](https://squidfunk.github.io/mkdocs-material/). + +The domain https://get.haskellstack.org provides URLs that redirect to URLs +used to install the Stack executable. + +## Read the Docs + +The Read the Docs project is named +['The Haskell Tool Stack'](https://readthedocs.org/projects/stack/). + +The set up on the Read the Docs web site involves two page redirects when there +are HTTP 404 Not Found errors: + + / -> /README/ + /README/ -> / + +The 'Home' MkDocs page is `doc/README.md`. + +The `/ -> /README/` redirect ensures that +https://docs.haskellstack.org/en/stable/ (for example) will, if not found, +redirect to https://docs.haskellstack.org/en/stable/README/. + +The `/README/ -> /` redirect ensures that +https://docs.haskellstack.org/en/latest/README/ (for example) will, if not +found, redirect to https://docs.haskellstack.org/en/latest/. + +MkDocs rendering of `README.md` differed before and after MkDocs 1.0. Prior to +MkDocs 1.0, `README.md` rendered to `/README/index.html`. From MkDocs 1.0, +`README.md` rendered to `/index.html`. The two redirects above ensure that the +Read the Docs flyout works when moving between different versions of the home +page using the flyout. + +Stack moved from MkDocs 0.17.3 to MkDocs 1.3.1 after publishing the +documentation for Stack 2.7.5. + +A configuration file, `.readthedocs.yaml` is included in the repository root +directory. See https://docs.readthedocs.io/en/stable/config-file/v2.html. It +specifies a Python requirements file in `doc/requirements.txt`. + +## MkDocs + +The `doc/requirements.txt` file pins the version of MkDocs. As at +13 April 2025 it is set to: + + mkdocs==1.6.0 + +A configuration file, `mkdocs.yml` is included in the repository root directory. +See https://www.mkdocs.org/user-guide/configuration/. + +`site_dir: _site` specifies the directory where the output HTML and other files +are created. This directory is added to the `.gitignore` file. + +## Material for MkDocs + +Stack moved from the default `readthedocs` theme to Material for MkDocs after +publishing the documentation for Stack 2.7.5. The new theme has extensive online +documentation and features that the default theme lacked. + +The Material for MkDocs theme is loaded in the `doc/requirements.txt` file: + + mkdocs-material + +The theme is specified in the `mkdocs.yml` file: + +~~~yaml +theme: + name: material + palette: + primary: 'deep purple' + accent: 'deep purple' + logo: img/stack-logo-white.svg + favicon: img/stack-favicon.svg + features: + - content.code.annotate + - content.code.copy + - content.code.select + - content.tabs.link + - navigation.indexes + - navigation.tabs + - navigation.top +~~~ + +Read the Docs requires [JQuery](https://jquery.com/) for its JavaScript code to +inject the flyout menu. Material for MkDocs does not come with JQuery. So, the +following is required in the `mkdocs.yml` file: + +~~~yaml +extra_javascript: +- 'https://code.jquery.com/jquery-3.6.1.min.js' +~~~ + +The Read the Docs flyout is formatted with a `font-size` that is 90% of the +`body` `font-size`. Material for MkDocs has a `body` `font-size` that is +`0.5rem`, which is small. A little additional CSS is added to the `extra.css` +file, to force the final `font-size` to be `0.7rem`. That size is consistent +with that of other elements in the theme. + +~~~css +body { + font-size: 0.777778rem; +} +~~~ + +Material for MkDocs default suggestions for syntax highlighting in code blocks +are applied. They are specified in the `mkdocs.yml` file as: + +~~~yaml +markdown_extensions: +- pymdownx.highlight: + anchor_linenums: true +- pymdownx.inlinehilite +- pymdownx.snippets +- pymdownx.superfences +~~~ + +Other extensions to the basic Markdown syntax used include: + +* Admonitions + + !!! info + + This is an example of an 'info' admonition. + +* Content tabs, which can be nested + + !!! info + + Content tabs are used so that users of different operating systems, or + different distributions of Linux, can be presented with content specific + to their needs. + +* icons and emojis + + !!! info + + The `octicons-tag-24` icon (:octicons-tag-24:) is used to refer to + versions of Stack. The `material-cloud-download-outline` icon + (:material-cloud-download-outline:) is used to signify a download link. + The `octicons-beaker-24` icon (:octicons-beaker-24:) is used with + 'Experimental' to signify that a feature is experimental. + +## Testing online documentation + +Online documentation can be tested by establishing a branch on the repository +that is then configured on the Read the Docs web site as 'Active' but +'Hidden' - for example branch `mkdocs-test`. As the branch is 'Hidden' it does +not appear in the Read the Docs flyout or search results. + +## get.haskellstack.org redirects + +The https://get.haskellstack.org redirects are implemented with +[CloudFlare Pages](https://developers.cloudflare.com/pages/platform/redirects/) +and a `_redirects` file in the root of the +`commercialhaskell/get-haskellstack-org` GitHub +[repository](https://github.com/commercialhaskell/get-haskellstack-org). + +Each redirect is defined as a line in the file with format: + +~~~text +[source] [destination] +~~~ + +'Splats' are used in redirects. On matching, a splat (asterisk, `*`) will greedily match all characters and the matched value can be used in the redirect location with `:splat`. + +For example, for Stack 2.9.1: + +~~~text +/stable/* https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-:splat +/upgrade/linux-x86_64-static.tar.gz https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz +/upgrade/* https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-:splat +/ https://raw.githubusercontent.com/commercialhaskell/stack/stable/etc/scripts/get-stack.sh +~~~ diff --git a/doc/maintainers/msys.md b/doc/maintainers/msys.md index 5ff7209bba..7e184860ff 100644 --- a/doc/maintainers/msys.md +++ b/doc/maintainers/msys.md @@ -1,31 +1,115 @@
-# Upgrading msys +# Upgrading MSYS2 When installing GHC on Windows, Stack will also install -[msys2](http://www.msys2.org/) to provide a Unix shell and environment, -necessary for such things as running configure scripts. This section explains -the steps required to upgrade the msys2 version used by Stack. +[MSYS2](http://www.msys2.org/). MSYS2 provides a Unix shell and environment, and +is necessary for such things as running configure scripts. This section explains +the steps required to upgrade the MSYS2 version used by Stack. -1. Download latest installers from msys2's website. These installers are - executables, versioned by date (YYYYMMDD), and are separate for `x86_64` - and `i686`. You'll usually be upgrading both at the same time, which we'll - assume here. +1. Download latest installer(s) from MSYS2's website. Historically, there were + separate installers for 32 bit (`i686`) and 64 bit (`x86_64`). On + 17 May 2020, the MSYS2 project announced it did not plan to release any + further `i686` installers. An installer is an executable, versioned by a + date in the format YYYYMMDD - for example, `msys2-x86_64-20220503.exe`. -2. Run the installer and install to the default location (`c:\msys64` and - `c:\msys32`, respectively). +2. Run the installer and install to the default location (`C:\msys64` for the + 64 bit version; the location for the 32 bit version was `C:\msys32`). Do not + use the installed version; it will create a `.bash_history` file if you do. -3. Create tarballs for each directory: +3. Create an `.tar.xz` archive file for each relevant directory (eg + `C:\msys64`). That is best done using the same `7z` executable in Stack's + 'programs' directory (`stack path --programs`) that will be used to extract + files from the archive. That can be done in two steps: the first to create a + `.tar` archive, and the second to create a `.tar.xz` archive. If the current + working directory is Stack's 'programs' directory: - ``` - $ cd /c/ - $ tar cJf msys2-YYYYMMDD-x86_64.tar.xz msys64 - $ tar cJf msys2-YYYYMMDD-i686.tar.xz msys32 - ``` + ~~~text + ./7z a msys2-YYYYMMDD-x86_64.tar C:\msys64 + ./7z a msys2-YYYYMMDD-x86_64.tar.xz msys2-YYYYMMDD-x86_64.tar + rm msys2-YYYYMMDD-x86_64.tar # Tidy up + ~~~ -4. Create a new release named `msys2-YYYYMMDD` on the - [fpco/stackage-content](https://github.com/fpco/stackage-content) - repo, and upload these two files. + !!! note -5. Create a PR for the [stack-setup-2.yaml file](https://github.com/fpco/stackage-content/blob/master/stack/stack-setup-2.yaml) - to switch over to using the newly uploaded files. You should test this file locally first. + Previously, the advice was that creating the archive file required a + version of [`tar`](https://www.gnu.org/software/tar/tar.html) that + supported the compression option `--xz`. The version of `tar` that is + supplied with Windows (`C:\Windows\System32\tar.exe`) does not support + that option, but MSYS2 can supply a + [version](https://packages.msys2.org/package/tar) that does (using its + `pacman` tool). Using the existing Stack-supplied MSYS2, in PowerShell + and located in a folder with write permissions (so the `.tar.xz` file + can be created), it was advised to command: + + ~~~text + stack exec -- pacman -S tar + stack exec -- tar cJf msys2-YYYYMMDD-x86_64.tar.xz C:\msys64 + ~~~ + + However, in the case of `msys2-20220503` that resulted in an archive + that could not extracted on a terminal that did not have elevated rights + ('Run as administrator') due to errors + `ERROR: Cannot create symbolic link : A required privilege is not held by the client`. + +4. Test that the Stack-supplied `7z` executable can extract the files in the + archive that has been created without error: + + ~~~test + ./7z x msys2-YYYYMMDD-x86_64.tar.xz + ./7z x msys2-YYYYMMDD-x86_64.tar + ~~~ + +5. Create a new release tagged and named `msys2-YYYYMMDD` in the `master` + branch of the + [commercialhaskell/stackage-content](https://github.com/commercialhaskell/stackage-content) + GitHub repository, uploading the tarball file(s) into that release. + +6. Changes need to be made to the + [stackage-content/stack/stack-setup-2.yaml](https://github.com/commercialhaskell/stackage-content/blob/master/stack/stack-setup-2.yaml) + file, to switch over to using the newly uploaded files. For example + (extract): + + ~~~yaml + # For upgrade instructions, see: https://github.com/commercialhaskell/stack/blob/stable/doc/maintainers/msys.md + msys2: + windows32: + version: "20200517" + url: "https://github.com/fpco/stackage-content/releases/download/20200517/msys2-20200517-i686.tar.xz" + content-length: 79049224 + sha256: 9152ddf50c6bacfae33c1436338235f8db4b10d73aaea63adefd96731fb0bceb + windows64: + version: "20220503" + url: "https://github.com/commercialhaskell/stackage-content/releases/download/msys2-20220503/msys2-20220503-x86_64.tar.xz" + content-length: 93835868 + sha256: c918f66e984f70add313ee3a5c5b101132cd93d5a3f8e3555e129e2d3dcb3718 + ~~~ + + The `content-length:` key's value is the size of the file in bytes. It can + be obtained from the `Length` field of the `dir` command. The `sha256:` + key's value can be obtained from the command (in PowerShell): + + ~~~text + (Get-FileHash msys2-YYYYMMDD-x86_64.tar.xz -Algorithm SHA256).Hash.ToLower() + ~~~ + + The `sha256:` key only accepts lowercase hash results as values. + +7. The changed `stack-setup-2.yaml` file should be tested locally. This can be + done by: + + * temporarily disabling the existing local copy of MSYS2 by changing the + name of the `msys2-YYYYMMDD.installed` file in the `stack path --programs` + directory; and + + * executing the command: + + ~~~text + stack setup --setup-info-yaml + ~~~ + + If all is well, the command should proceed to download the updated version + of MSYS2 that has been specified. + +8. Raise a pull request on `commercialhaskell/stackage-contents` for the + changes to the locally-tested `stack-setup-2.yaml` file. diff --git a/doc/maintainers/releases.md b/doc/maintainers/releases.md index c8bf004ea1..e7c1c72ba1 100644 --- a/doc/maintainers/releases.md +++ b/doc/maintainers/releases.md @@ -2,69 +2,120 @@ # Releases -## Upcoming release tasks: - -* Simplify branch/version structure -- just release from `master` (but will keep `stable` tracking latest stable release plus doc updates) - -* At some point (a couple of major releases after 2.3), remove the `-static` version from https://github.com/commercialhaskell/stackage-content/blob/master/stack/releases.yaml. People still using that will get an error, and we'll add a release note to switch over to https://get.haskellstack.org/stable/linux-x86_64.tar.gz instead (and note that www.stackage.org/stack is deprecated) - -## Version scheme - -* Versions with an _even_ second component are development versions (the `master` branch) -* Versions with an _odd_ second component are stable versions (the `stable` branch, or in a `rc/vX.Y` release candidate branch for not-yet-released versions) -* Versions with an _even_ third component (e.g. 1.6.2 and 1.7.0) are unreleased versions -* Versions with an _odd_ third component (e.g. 1.6.1 or 1.7.3) and released versions -* Pre-release unstable binaries will be released with the date as the fourth component (e.g. 1.6.0.20171129) -* Release candidate binaries will be released with an even third component and and odd number as the fourth component (e.g. 1.7.0.1) -* Hackage-only dependency compatibility patch releases add a fourth patchlevel component (e.g. v1.7.3.1, in the `release` branch) -* All branches _except_ `release` (which matches exactly the most recent release) must have an even third component (development) -* Branches other than `stable`, `release`, and a `rc/vX.Y` release candidate will always have a `0` third component (e.g. 1.7.0). - -Examples: - -* `1.7.0.0`: v1.7.x series pre-release branch (`v1.7` branch) -* `1.7.0.1`: release candidate for first release of v1.7.x series (`v1.7` branch) -* `1.7.0.2`: continuing development on pre-release branch -* `1.7.0.3`: second release candidate for first release of v1.7.x series (`v1.7` branch) -* `1.7.1`: first release of the 1.7.x series (`release` branch) -* `1.7.2.0`: development for second release of 1.7.x series (`stable` branch) -* `1.7.2.1`: release candidate for second release of 1.7.x series (`stable` branch) -* `1.7.3`: second release of 1.7.x series (`release` branch) -* `1.7.3.1`: first hackage-only patch of 1.7.3 (`release` branch) -* `1.7.3.2`: second hackage-only patch of 1.7.3 (`release` branch) -* `1.8.0`: unstable development code (`master` branch) -* `1.8.0.20181004`: pre-release snapshot of unstable version (`master` branch) +## Branches + +* The `release` branch is intended to preserve the most recent release + (including, if applicable, Hackage-only dependency compatibility patch + releases). + +* The `stable` branch is intended to be a copy of the `release` branch together + with any subsequent commits that (only) revise the documentation for the most + recent release. That documentation is presented at . + +* The `master` branch is the current development branch. It is intended that a + working version of Stack can be built from the branch. The release process + begins with a copy of the branch. + +* A `rc/vX.Y` branch (named after a release in the Stack X.Y.* series) is + intended to be for release candidates and final releases. It begins as a copy + of the `master` branch. ## Pre-release checks -* Check for any P0 and P1 issues that should be dealt with before release -* Check for un-merged pull requests that should be merged before release -* Ensure `release` and `stable` branches merged to `master` -* Ensure CI matrices in docs (travis-complex, appveyor, azure) have current stackage snapshots and GHC versions (e.g. https://github.com/commercialhaskell/stack/pull/4565/files) -* Update the `stack-*.yaml` that uses a `nightly` snapshot to the latest nightly (go over the extra-deps too) and ensure the project builds and tests pass (e.g. `stack build --stack-yaml=… --haddock --test --bench --no-run-benchmarks`) -* Ensure integration tests pass on a Windows, macOS, and Linux. Do so by checking that the latest nightly build for the `master` branch succeeded in Azure DevOps (or kick one off manually if any significant changes were made since the last automated build). +1. Check that Stack is built against the most recent version of `hpack` + released on Hackage. If not, create an issue and/or raise a pull request + accordingly. + +2. Check for any important issues that should be dealt with before release. + +3. Check for un-merged pull requests that should be merged before release. + +4. Ensure the `release` and `stable` branches are merged to the `master` + branch. + +5. Check the copyright dates, and update if needed. + +6. Check the backwards compatibility section of `CONTRIBUTING.md` is up to + date. + +7. Ensure CI matrices in docs (travis-complex, appveyor, azure) have current + stackage snapshots and GHC versions (e.g. + https://github.com/commercialhaskell/stack/pull/4565/files) + +8. Update any `stack-*.yaml` that uses a `nightly` snapshot to the latest + nightly (go over the extra-deps too) and ensure the project builds and tests + pass. For example, command: + + ~~~text + stack build --stack-yaml=… --haddock --test --bench --no-run-benchmarks + ~~~ + +9. The Windows installer is built using an + [NSIS compiler](https://nsis.sourceforge.io/Main_Page). Check that the NSIS + compiler that will be used is capable of handling + [large strings](https://nsis.sourceforge.io/Special_Builds). + +10. Ensure the integration tests pass on Linux, macOS and Windows. + +11. Some people prefer, or need, to build Stack with Cabal (the tool). Check + that `cabal.project` is up to date (the specified `with-compiler:`). Check + that `cabal.config` is up to date and is not missing dependencies relevant + on Windows and non-Windows operating systems, following the instructions in + `cabal.project`. ## Release preparation -* In master branch: - * `package.yaml`: bump to next release candidate version (bump second - component to next odd number, ensure third component is `0`, and add - patchlevel `0`; e.g. from `1.8.0` to `1.9.0.0`). Be sure to also update - `stack.cabal` (e.g. by running `stack build`). - * `ChangeLog.md` - * Check for any entries that snuck into the previous version's changes - due to merges (`git diff origin/stable HEAD ChangeLog.md`) - -* Cut a release candidate branch `rc/vX.Y` from master - -* In master branch: - * `package.yaml`: bump version to next unstable version (next even second - component with `.0` third component (e.g. from 1.9.0 to 1.10.0). Be sure - to also update `stack.cabal` (e.g. by running `stack build`). - * `Changelog.md`: - * Change the title of the existing **Unreleased changes** section to what will be the next final (non-RC) release (e.g. `v2.1.1`). - * add new "Unreleased changes" section: - ``` +### A: In the `master` branch + +1. `package.yaml`: bump to the next release candidate version (bump the second + component to the next odd number, ensure the third component is `0`, and add + patchlevel `0`; e.g. from `3.4.0` to `3.5.0.0`). + + !!! attention + + Be sure to update also `stack.cabal` (for example by using + `stack build --dry-run`). + +2. `cabal.config`: Ensure the `stack` constraint is set to the same version as + in the `package.yaml`. + +3. `ChangeLog.md`: Check for any entries that snuck into the previous version's + changes due to merges (`git diff origin/stable HEAD ChangeLog.md`) + +4. Commit the changes to the `master` branch. + +### B: Create a new release candidate branch + +From the `master` branch, checkout a new release candidate (RC) branch named +`rc/vX.Y` (replacing `X.Y` with the first and second components of the release +version). + +~~~text +git checkout -b rc/vX.Y +~~~ + +### C: Return to the `master` branch + +1. `package.yaml`: bump version to the next unstable version (bump the second + component to the next even number, ensure the third component is `0`; e.g. + from `3.5.0` to `3.6.0`). + + !!! attention + + Be sure to update also `stack.cabal` (for example by using + `stack build --dry-run`). + +2. `cabal.config`: Ensure the `stack` constraint is set to the same version as + in the `package.yaml`. + +3. `Changelog.md`: + + * Change the title of the existing **Unreleased changes** section to what + will be the next final (non-RC) release (e.g. `v3.5.1`). + + * Add new "Unreleased changes" section: + + ~~~markdown ## Unreleased changes Release notes: @@ -78,387 +129,522 @@ Examples: Other enhancements: Bug fixes: + ~~~ + +4. Commit the changes to the `master` branch. + +### D: For each release candidate, in the release candidate branch + +1. Review documentation for any changes that need to be made: + + * Ensure all the documentation pages are listed in the `mkdocs.yaml` file. + Use `git diff --stat origin/stable..HEAD doc/` to look for new or + deleted files. + + * Any new documentation pages should have the "may not be correct for the + released version of Stack" warning at the top. + + * Search for old Stack version, unstable Stack version, and the next + "obvious" possible versions in sequence, and `UNRELEASED` and replace + with next release version (`X.Y.1`, where Y is odd). + + !!! attention + + Do **NOT** update the repository's issue and pull request templates + (in the `.github` directory) to point at the new release version + yet! + + * Search for old snapshots, set to latest snapshot (e.g. in documentation + where it references the "currently the latest LTS") + + * Look for any links to "latest" (`latest/`) documentation, replace with + version tag + +2. Check for any platform entries that need to be added to (or removed from): + + * [releases.yaml](https://github.com/fpco/stackage-content/blob/master/stack/releases.yaml), + + * [install_and_upgrade.md](https://github.com/commercialhaskell/stack/blob/master/doc/install_and_upgrade.md), + + * [get-stack.sh](https://github.com/commercialhaskell/stack/blob/master/etc/scripts/get-stack.sh), + + * [doc/README.md](https://github.com/commercialhaskell/stack/blob/master/doc/README.md), + and + + * `get.haskellstack.org` redirects. - ``` - -* In RC branch: - * Review documentation for any changes that need to be made - * Ensure all documentation pages listed in `mkdocs.yaml` (use `git diff - --stat origin/stable..HEAD doc/` to look for new/deleted files) - * Any new documentation pages should have the "may not be correct for - the released version of Stack" warning at the top. - * Search for old Stack version, unstable stack version, and the next - "obvious" possible versions in sequence, and - `UNRELEASED` and replace with next release version (`X.Y.1`, where Y is odd). - * Do **NOT** update the Dockerfiles in [stackage/automated/dockerfiles](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/) yet; that will come later) - * Do **NOT** update templates in `.github` to point at the new release version yet! - * Search for old resolvers, set to latest resolver (e.g. in `doc/GUIDE.md` where it references the "currently the latest LTS") - * Look for any links to "latest" (`latest/`) documentation, replace with version tag - * Check that for any platform entries that need to be added to (or removed from) - [releases.yaml](https://github.com/fpco/stackage-content/blob/master/stack/releases.yaml), - [install_and_upgrade.md](https://github.com/commercialhaskell/stack/blob/master/doc/install_and_upgrade.md), [get-stack.sh](https://github.com/commercialhaskell/stack/blob/master/etc/scripts/get-stack.sh), and [doc/README.md](https://github.com/commercialhaskell/stack/blob/master/doc/README.md), and get.haskellstack.org redirects. - -* For first release candidate: - * Re-do the pre-release checks (above section) - * `package.yaml`: bump to first odd patchlevel version (e.g. `X.Y.0.1`). Be sure to also update `stack.cabal` (e.g. by running `stack build`). - * `ChangeLog.md` - - Rename the “Unreleased changes” section to the same version as - package.yaml, and mark it clearly as a release candidate (e.g. - `vX.Y.0.1 (release candidate)`). Remove any empty sections. - * Follow steps in *Release process* below tagged with `[RC]` to make a release candidate - -* For subsequent release candidates: - * Re-do the pre-release checks (above section) - * `package.yaml`: bump to next odd patchlevel version (e.g. `X.Y.0.3`). Be - sure to also update `stack.cabal` (e.g. by running `stack build`). - * `ChangeLog.md`: Rename the "Unreleased changes" section to the new version, clearly marked as a release candidate (e.g. `vX.Y.0.3 (release candidate)`). Remove any empty sections. - * Follow steps in *Release process* below tagged with `[RC]` to make a release candidate - -* For final release: - * Re-do the pre-release checks (above section) - * `package.yaml`: bump version to odd last component and no patchlevel - (e.g. from `X.Y.0.2` to `X.Y.1`). Be sure to also update `stack.cabal` - (e.g. by running `stack build`). - * `ChangeLog.md`: consolidate all the RC changes into a single section for the release version - * Follow all steps in the *Release process* section below. +3. Re-do the pre-release checks (see the section above). +4. Update `package.yaml` and `ChangeLog.md`. This step differs between a first, + second etc release candidate and a final release. + + === "First RC" + + * `package.yaml`: bump to first odd patchlevel version (e.g. + `X.Y.0.1`). + + * `ChangeLog.md`: Rename the “Unreleased changes” section to the same + version as `package.yaml`, and mark it clearly as a release + candidate (e.g. `vX.Y.0.1 (release candidate)`). Remove any empty + sections. + + === "Second, third etc RC" + + * `package.yaml`: bump to next odd patchlevel version (e.g. + `X.Y.0.3`). + + * `ChangeLog.md`: Rename the “Unreleased changes” section to the same + version as `package.yaml`, and mark it clearly as a release + candidate (e.g. `vX.Y.0.3 (release candidate)`). Remove any empty + sections. + + === "Final Release" + + * `package.yaml`: bump version to odd last component and no patchlevel + (e.g. from `X.Y.0.2` to `X.Y.1`). + + * `ChangeLog.md`: consolidate all the release candidate changes into a + single section for the final release version. + + !!! attention + + After updating `package.yaml`, be sure to update also `stack.cabal` (for + example by using `stack build --dry-run`). + +5. Ensure the `stack ==` constraint in `cabal.config` is set to be equal to the + same version as `package.yaml`. + +6. Commit the changes to the release candidate branch. + +7. Follow the steps in the *Release process* section below that apply. ## Release process -* Ensure that the [Integration Tests workflow on Github Actions](https://github.com/commercialhaskell/stack/actions?query=workflow%3A%22Integration+tests%22) passes the branch you are releasing. This will run automatically for `master`, `stable`, and `rc/*` branches (if another branch, you can run it manually). `[RC]` +The release process differs between a first, second etc release candidate and a +final release. + +=== "First, second etc RC" + + ### A: Integration tests workflow passes + + Ensure that the GitHub + [Integration Tests workflow](https://github.com/commercialhaskell/stack/actions?query=workflow%3A%22Integration+tests%22) + passes on the branch that you are releasing. + + This workflow will run automatically for the `rc/*` branch. + + ### B: Push a Git tag + + Push a Git tag. The tag should be `rc/vX.Y.Z.A`, with `X.Y.Z.A` matching the + version in `package.yaml`. + + For example, command: + + ~~~text + git tag -m rc/vX.Y.Z.A rc/vX.Y.Z.A + git push origin rc/vX.Y.Z.A + ~~~ + + ### C: Edit the draft GitHub release, and publish it + + Wait for the GitHub + [Integration Tests workflow](https://github.com/commercialhaskell/stack/actions?query=workflow%3A%22Integration+tests%22) + to complete for the branch you just created. This will create a draft GitHub + release and upload the bindists (plus signatures and hashes) to it. + + Edit the draft + [GitHub release](https://github.com/commercialhaskell/stack/releases/): -* Push signed Git tag. For final releases the tag should be `vX.Y.Z` (where X.Y.Z matches the version in `package.yaml` from the previous step); for release candidates it should be `rc/vX.Y.Z.A`. e.g.: `git tag -u -m vX.Y.Z vX.Y.Z && git push origin vX.Y.Z`. `[RC]` + * Add `(release candidate)` to the name field and ensure that + *This is a pre-release* is checked. -* Wait for [Integration Tests workflow on Github Actions](https://github.com/commercialhaskell/stack/actions?query=workflow%3A%22Integration+tests%22) to complete for the branch you just created. This will create a draft Github release and upload the bindists (plus signatures and hashes) to it. + * Add the ChangeLog to the description. -* Edit the draft - [Github release](https://github.com/commercialhaskell/stack/releases/), and `[RC]` - * In the case of a release candidate, add `(release candidate)` to the name field and ensure that *This is a pre-release* is checked. - * Add the Changelog to the description. - * For final releases (**not** release candidates), use e.g. `git shortlog -s origin/release..HEAD|sed $'s/^[0-9 \t]*/* /'|grep -v azure-pipelines|LC_ALL=C sort -f` to get the list of contributors and add it to the description. - * Publish the Github release. `[RC]` + Publish the GitHub release. -* Upload `stack` package to Hackage: `stack upload . --pvp-bounds=lower`. + ### D: Consider adding other platforms to the GitHub release -* Reset the `release` branch to the released commit, e.g.: `git checkout release && git merge --ff-only vX.Y.Z && git push origin release` + The + [Integration Tests workflow](https://github.com/commercialhaskell/stack/actions?query=workflow%3A%22Integration+tests%22) + is limited to the platforms supported by the GitHub-hosted runners + (currently, x86_64 and macOS/AArch64) and any self-hosted runners + (currently, only Linux/AArch64). However, it is possible to edit the GitHub + release to include binary distributions for other platforms. The + prerequisites are: -* Update the `stable` branch similarly + * a computer with that platform (operating system, machine architecture); -* Activate version for new release tag, on - [readthedocs.org](https://readthedocs.org/projects/stack/versions/), and - ensure that stable documentation has updated. + * a sufficiently-recent existing version of Stack for that platform; -* Update [get.haskellstack.org /stable and /upgrade rewrite rules](https://gitlab.com/fpco/operations/kube/fpcomplete-sites-project/-/blob/master/fpcomplete-redirects/get-haskellstack_virtualservice.yaml) with the new version and [sync the application in ArgoCD](https://v5.fpcomplete.com/argocd/applications/fpcomplete-redirects). + * a tool to print SHA checksums, such as `shasum` on Linux and macOS; and - * Test with `curl -vL https://get.haskellstack.org/stable/linux-x86_64.tar.gz >/dev/null`, make sure it redirects to the new version + * the GNU Privacy Guard tool (`gpg`), which has had imported the private key + used to sign Stack executables (see further below). -* In the `stable` or, in the case of a release candidate, `rc/vX.Y` branch: - - `package.yaml`: bump the version number even third component (e.g. from - 1.6.1 to 1.6.2) or, in the case of a release candidate even _fourth_ - component (e.g. from 1.7.0.1 to 1.7.0.2). Be sure to also update - `stack.cabal` (e.g. by running `stack build`). `[RC]` + The steps are similar to those in the workflow: - - `ChangeLog.md`: Add an “Unreleased changes” section (update “changes since” version):`[RC]` + 1. Change to the root directory of the Stack project. - ``` - ## Unreleased changes + 2. `stack etc/scripts/release.hs check`, to check before building. - Release notes: + 3. `stack etc/scripts/release.hs build`, to build. The output 'assets' + (`stack--- ...`) will be in + the `_release` directory in the root directory of the Stack project. - **Changes since vX.Y.Z:** + 4. For each of the output assets, create a corresponding SHA 256 file with + a `.sha256` extension. For example (where `` is the name of the + file): - Major changes: + ~~~text + shasum -a 256 > .sha256 + ~~~ - Behavior changes: + 5. For each of the output assets, create a corresponding ASCII-armored + signature file with an `.asc` extension using `gpg`. For example (where + `` is the name of the file): - Other enhancements: + ~~~text + gpg --digest-algo=sha512 --detach-sig --armor -u 0x575159689BEFB442 + ~~~ - Bug fixes: + 6. Edit the GitHub release to include the output assets and their + corresponding `.sha256` and `.asc` files. - ``` + The private key used to sign Stack executables can be exported from a + version of `gpg` to which it has previously been imported with: - - Update templates in `.github` to point at the new release version (`X.Y.1`). + ~~~text + gpg --armor --export-secret-key 0x575159689BEFB442 + ~~~ -* Delete the RC branch (locally and on origin). E.g. `git branch -d rc/vX.Y; git push origin :rc/vX.Y`. + The private key, so obtained, can be imported into `gpg` by: + + 1. Commanding `gpg --import`. + + 2. Pasting the private key. + + 3. Entering Ctrl+D and Enter. + + ### E: Update versions and `ChangeLog.md` for 'unreleased' + + In the `rc/vX.Y` branch: + + * `package.yaml`: bump the version number. Bump the fourth component to an + even number (e.g. from `3.5.0.1` to `3.5.0.2`). + + !!! attention + + Be sure to update also `stack.cabal` (for example by using + `stack build --dry-run`). + + * `cabal.config`: Ensure the `stack` constraint is set to the same version + as in the `package.yaml`. + + * `ChangeLog.md`: Add an “Unreleased changes” section (update the “changes + since” version): + + ~~~markdown + ## Unreleased changes + + Release notes: + + **Changes since vX.Y.Z:** + + Major changes: + + Behavior changes: + + Other enhancements: + + Bug fixes: + ~~~ -* Merge any changes made in the RC/release/stable branches to master (be careful about version and changelog). It is best to do this by making a `ci/merge-stable-to-master` branch and waiting for CI to pass, then merging. If anything is complicated to merge, consider making it a PR and getting it reviewed rather than merging immediately. + ### F: Request update of GHCup's metadata -* Announce to haskell-cafe@haskell.org; haskell-stack@googlegroups.com; commercialhaskell@googlegroups.com mailing lists, subject `ANN: stack-X.Y.Z` (or `ANN: first release candidate for stack-X.Y.x`), containing the release description from Github. `[RC]` + Raise a pull request at the + [`haskell/ghcup-metadata`](https://github.com/haskell/ghcup-metadata) GitHub + repository to request an addition to GHCup's latest metadata configuration + file for prereleases, tagged as the latest prerelease. In the metadata, + change the tags for any past Stack prereleases to indicate that they are no + longer the latest prerelease. - * For release candidates, also include a link to the Github Release (`https://github.com/commercialhaskell/stack/releases/tag/vX.Y.Z`) to download it. `[RC]` + ### G: Announce the release candidate -* Update fpco/stack-build Docker images with new version + Announce the release candidate: - * Under [commercialhaskell/stackage/automated/dockerfiles](https://github.com/commercialhaskell/stackage/tree/master/automated/dockerfiles/), add `lts-X.Y/Dockerfile` (where `X.Y` is the latest stackage LTS version), containing (note where X.Z is the previous LTS version, and X.Y.Z is the newly released stack version) + * on the + [Haskell Community](https://discourse.haskell.org/c/announcements/10/l/latest) + forum; - ``` - FROM $DOCKER_REPO:lts-X.Z - ARG STACK_VERSION=X.Y.Z - RUN wget -qO- https://github.com/commercialhaskell/stack/releases/download/v$STACK_VERSION/stack-$STACK_VERSION-linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C /usr/local/bin '*/stack' - ``` + * in the Haskell Foundation's + [general](https://matrix.to/#/#haskell-foundation-general:matrix.org) room + (address `#haskell-foundation-general:matrix.org`) on + [Matrix](https://matrix.org/); - * Run `./build.sh lts-X.Y` and test that the new image has the new version of Stack (e.g. `docker run --rm fpco/stack-build:lts stack --version`). + * in the Haskell + [Stack and Stackage](https://matrix.to/#/#haskell-stack:matrix.org) room + (address `#haskell-stack:matrix.org`) on [Matrix](https://matrix.org/); + and - * Run `./build.sh --push lts-X.Y && ./build.sh --push --small lts-X.Y` to push the new image to the registry. + * in Reddit's [Haskell](https://www.reddit.com/r/haskell/) community. + In each case, use the subject (change 'first' to 'second' etc for subsequent + release candidates): -## Build Linux static binary distribution with Nix + * `[ANN] First release candidate for Stack X.Y.Z` -**NOTE: We have switched back to Alpine Linux for building static binaries, done by CI. Leaving this section for future reference.** + In the message, include: -These instructions are tested on Ubuntu 16.04, but theoretically should work on any Linux distribution. + * a link to the release on GitHub + (`https://github.com/commercialhaskell/stack/releases/tag/rc/vX.Y.Z.A`) to + download it -- Install nix (tested with v2.0.4 and v2.1.2, but should work with any) + * the release description from Github. - ``` - curl https://nixos.org/nix/install | sh - ``` +=== "Final Release" -- Install and authenticate cachix (first two steps at https://cachix.org/ after signing up) + ### A: Integration tests workflow passes + Ensure that the GitHub + [Integration Tests workflow](https://github.com/commercialhaskell/stack/actions?query=workflow%3A%22Integration+tests%22) + passes on the branch that you are releasing. -- Add nh2's cache: + This workflow will run automatically for `rc/*` branches. - ``` - cachix use static-haskell-nix - ``` + ### B: Push a Git tag - NOTE: to clear cache index, use `rm $HOME/.cache/nix/binary-cache-v5.sqlite*` (useful if someone else uploads new stuff to the cache and you want to use it right away). The recent `narinfo-cache-positive`/`negative-ttl` options might also help. + Push a Git tag. The tag should be `vX.Y.Z`, where `X.Y.Z` matches the + version in `package.yaml`. -- Check out stack commit to be released to `~/stack-release` (or elsewhere, in which case adjust following instructions) + For example, command: -- `rm -f ~/stack-release/*.cabal`, to ensure it's regenerated + ~~~text + git tag -m vX.Y.Z vX.Y.Z + git push origin vX.Y.Z + ~~~ -- clone https://github.com/nh2/static-haskell-nix recursively (last known to work with commit 725ceb2479637b3b3ab29298a1bc0e48c54984c9) + ### C: Edit the draft GitHub release, and publish it -- in `static-stack` directory, run (from `static-stack/README.md`): + Wait for the GitHub + [Integration Tests workflow](https://github.com/commercialhaskell/stack/actions?query=workflow%3A%22Integration+tests%22) + to complete for the tag you just created. This will create a draft GitHub + release and upload the bindists (plus signatures and hashes) to it. - ``` - $(nix-build --no-link -A run-stack2nix-and-static-build-script --argstr stackDir ~/stack-release) - ``` + Edit the draft + [GitHub release](https://github.com/commercialhaskell/stack/releases/): -- Run integration tests against the static binary [TODO: improve this process by adding full support in `release.hs` or the integration tests for testing a binary built elsewhere] + * Add the ChangeLog to the description. + * Get the list of contributors to the release and add it to the + description. For example, command: - - In `~/stack-release`, run `stack build --flag stack:integration-tests stack:stack-integration-test` - - Copy binary built above to place where `stack build` normally puts the `stack binary` (e.g. `cp /nix/store/7vl1xvlbbqjvf864inz5vw7z2z1k4nmw-stack-2.1.0.1/bin/stack /home/vagrant/stack-release/.stack-work/install/x86_64-linux/custom-snapshot-for-building-stack-with-ghc-8.2.2-PyNP5UoO8Ott/8.2.2/bin/stack`; figure it out using `stack exec which stack`) - - Run `stack exec stack-integration-test` + === "Unix-like" -- Copy the binary built above (in `/nix/store/XXX-stack-X.Y.Z/bin/stack`) to `~/stack-release/_release/bin/stack-X.Y.Z-linux-x86_64/stack` (replace `X.Y.Z` with the version, and the `/nix/store/*` path with that output at the end of the previous command) + ~~~text + git shortlog -s origin/release..HEAD|sed 's/^[0-9 \t]*/* /'|LC_ALL=C sort -f + ~~~ -- Package, sign, and upload to Github using stack's release script in the stack directory: + === "Windows" - ``` - cd ~/stack-release - stack etc/scripts/release.hs --no-test-haddocks --binary-variant=static --build-args=--dry-run upload - ``` + ~~~text + (git shortlog -s origin/release..HEAD) -Replace '^[0-9 \t]*', '* ' | Sort-Object + ~~~ - (adding `--build-args=--dry-run` ensures the binary you copied will be used rather than building a new one) + in PowerShell. -- Download the bindist from github and double check that the `stack` in it is actually static (use `ldd /path/to/stack`) and that `--version` reports correctly (and not dirty). + Publish the GitHub release. + ### D: Consider adding other platforms to the GitHub release -## Setting up a Windows VM for releases + The + [Integration Tests workflow](https://github.com/commercialhaskell/stack/actions?query=workflow%3A%22Integration+tests%22) + is limited to the platforms supported by the GitHub-hosted runners + (currently, x86_64 and macOS/AArch64) and any self-hosted runners + (currently, only Linux/AArch64). However, it is possible to edit the GitHub + release to include binary distributions for other platforms. The + prerequisites are: -These instructions are a bit rough, but has the steps to get the Windows machine -set up. + * a computer with that platform (operating system, machine architecture); -## Using Virtualbox + * a sufficiently-recent existing version of Stack for that platform; - 1. Download Virtualbox VM image: - https://developer.microsoft.com/en-us/microsoft-edge/tools/vms/mac/ + * a tool to print SHA checksums, such as `shasum` on Linux and macOS; and - 2. Launch the VM using Virtualbox and the image downloaded + * the GNU Privacy Guard tool (`gpg`), which has had imported the private key + used to sign Stack executables (see further below). - 3. Adjust settings: - * Number of CPUs: at least half the host's - * Memory: at least 3 GB - * Video RAM: the minimum recommended by Virtualbox - * Enable 3D and 2D accelerated mode (this makes programs with lots of console output much faster) - * Enabled shared clipboard (in VM window, Devices->Shared Clipboard->Both Directions) + The steps are similar to those in the workflow: -Now continue to the **General Windows setup** subsection below. + 1. Change to the root directory of the Stack project. -## Using ESXi + 2. `stack etc/scripts/release.hs check`, to check before building. -1. Download the **MSEdge on Win10** VM for **VMWare (Windows, Mac)**. -2. Unzip the file downloaded file -3. Upload the VMDK file to the ESXi datastore -4. SSH into ESXi CLI and run: - - `vmkfstools -i /vmfs/volumes/datastore1/win10-msedge/MSEdge-Win10-VMWare-disk1-ORIG.vmdk /vmfs/volumes/datastore1/win10-msedge/MSEdge-Win10-VMWare-disk1.vmdk -d thin`. This converts the disk to a format that is compatible with ESXi. You may have to run `esxcli system module load -m multiextent` first (see https://www.virtuallyghetto.com/2012/09/2gbsparse-disk-format-no-longer-working.html). - - `vmkfstools -X 80G /vmfs/volumes/datastore1/win10-msedge/MSEdge-Win10-VMWare-disk1.vmdk`. This makes the disk twice as large, which helps avoid running out of disk space. -5. In the ESXi web UI: - - Create a new VM - - Give is 8192 MB of memory - - Give it 4 virtual CPUs - - Remove the default hard disk - - Add an **Existing hard disk** - - Select `/datastore1/win10-msedge/MSEdge-Win10-VMWare-disk1.vmdk` - - Power on the VM - - In Windows settings: - - Search for "disk management" - - Extend the partition to take the whole disk. - - In all likelihood, you will want to search for "remote desktop" and enable remote desktop. Then you can connect to the VM using Microsoft Remote Desktop instead of using it from within the ESXi web UI. + 3. `stack etc/scripts/release.hs build`, to build. The output 'assets' + (`stack--- ...`) will be in + the `_release` directory in the root directory of the Stack project. -Now continue to the **General Windows setup** subsection below. + 4. For each of the output assets, create a corresponding SHA 256 file with + a `.sha256` extension. For example (where `` is the name of the + file): -## General Windows setup + ~~~text + shasum -a 256 > .sha256 + ~~~ - 5. In **Settings**->**Update & Security**->**Windows Update**->**Advanced options**: - * Change **Choose how updates are installed** to **Notify to schedule restart** - * Check **Defer upgrades** (this avoids rebooting in the middle of the stack build) + 5. For each of the output assets, create a corresponding ASCII-armored + signature file with an `.asc` extension using `gpg`. For example (where + `` is the name of the file): - 6. In **Settings**->**System**->**Power & sleep** + ~~~text + gpg --digest-algo=sha512 --detach-sig --armor -u 0x575159689BEFB442 + ~~~ - * Disable turning off the screen or going to sleep when plugged in + 6. Edit the GitHub release to include the output assets and their + corresponding `.sha256` and `.asc` files. - 7. Install msysgit: https://msysgit.github.io/ + The private key used to sign Stack executables can be exported from a + version of `gpg` to which it has previously been imported with: - 8. Install TortoiseHG: https://tortoisehg.bitbucket.io/download/index.html + ~~~text + gpg --armor --export-secret-key 0x575159689BEFB442 + ~~~ - 9. Install nsis-2.46.5-Unicode-setup.exe from http://www.scratchpaper.com/ + The private key, so obtained, can be imported into `gpg` by: -10. Install Stack using the Windows 64-bit installer + 1. Commanding `gpg --import`. - a. Restart any command prompts to ensure they get new `%STACK_ROOT%` value. + 2. Pasting the private key. -11. Visit https://hackage.haskell.org/ in Edge to ensure system has correct CA - certificates + 3. Entering Ctrl+D and Enter. -13. Run in command prompt: + ### E: Upload to Hackage and reset branches - md C:\p - md C:\tmp - cd /d C:\p + Upload the `stack` package to Hackage with the command: -14. Create `C:\p\env.bat`: + ~~~text + stack upload . --pvp-bounds=lower + ~~~ - SET TEMP=C:\tmp - SET TMP=C:\tmp - SET PATH=C:\Users\IEUser\AppData\Roaming\local\bin;"c:\Program Files\Git\usr\bin";"C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin";%PATH% + Reset the `release` branch to the released commit. For example, with the + commands: -15. Run `C:\p\env.bat` (do this every time you open a new command prompt) + ~~~text + git checkout release + git merge --ff-only vX.Y.Z + git push origin release + ~~~ -16. `stack exec -- gpg --import`, and paste in the your GPG secret key (must be done using `stack exec` because that uses the right keyring for the embedded msys GPG; you can get the key from another machine with `gpg --export-secret-keys --armor `) + Update the `stable` branch to the released commit. For example, with the + commands: -17. Run in command prompt (adjust the `user.email` and `user.name` settings): + ~~~text + git checkout stable + git merge --ff-only vX.Y.Z + git push origin stable + ~~~ - git config --global user.email manny@fpcomplete.com - git config --global user.name "Emanuel Borsboom" - git config --global push.default simple - git config --global core.autocrlf true - git clone https://github.com/borsboom/stack-installer.git - git clone -b stable --reference C:\p\stack-release https://github.com/commercialhaskell/stack.git stack-release - cd stack-release - stack install cabal-install + Merge any changes made in the RC, `release` or `stable` branches to the + `master` branch. Be careful about version and `ChangeLog.md`. It is best to + do this by making a `ci/merge-stable-to-master` branch and waiting for CI to + pass, then merging. If anything is complicated to merge, consider making it + a pull request and getting it reviewed rather than merging immediately. + Delete the RC branch, both locally and on the remote. For example with the + commands: -## Setting up an ARM VM for releases + ~~~text + git branch -d rc/vX.Y + git push origin :rc/vX.Y + ~~~ -1. Use Scaleway to start ARMv7 and ARM64 VMs. + ### F: Activate the version on Read The Docs -2. Select Ubuntu Xenial as the operating system + Activate the version for new release tag, on + [readthedocs.org](https://readthedocs.org/projects/stack/versions/). -3. Install the correct version of LLVM: `sudo apt-get install -y llvm-3.9` (appropriate for GHC 8.2, might need different version for other GHCs) + Ensure that the `stable` documentation has updated. -4. Symlink opt-3.X to `opt`: `sudo ln -s opt-3.9 /usr/bin/opt` (adjust the version if you installed a different one above) + ### G: Update get.haskellstack.org redirects -5. Switch to gold linker: + Update the https://get.haskellstack.org redirects by updating the + `_redirects` file in the root of the + `commercialhaskell/get-haskellstack-org` GitHub + [repository](https://github.com/commercialhaskell/get-haskellstack-org). - ``` - update-alternatives --install "/usr/bin/ld" "ld" "/usr/bin/ld.gold" 20 - update-alternatives --install "/usr/bin/ld" "ld" "/usr/bin/ld.bfd" 10 - update-alternatives --config ld - ``` + For further information, see the + [get.haskellstack.org redirects](haskellstack.org.md#gethaskellstackorg-redirects) + documentation. -6. Add swap space: + Test with the commands: - ``` - dd if=/dev/zero of=/swapfile1 bs=1024 count=4194304 - mkswap /swapfile1 - swapon /swapfile1 - echo '/swapfile1 none swap sw 0 0' >>/etc/fstab - ``` + === "Unix-like" -7. Install additional tools: + ~~~text + curl -vL https://get.haskellstack.org/stable/linux-x86_64.tar.gz >/dev/null + curl -vL https://get.haskellstack.org/upgrade/linux-x86_64.tar.gz >/dev/null + ~~~ - ``` - apt-get update && apt-get install -y unzip gpg - ``` + === "Windows" -8. Import your GPG key (`gpg --import` and paste the private key) + ~~~text + curl -vL https://get.haskellstack.org/stable/linux-x86_64.tar.gz >NUL + curl -vL https://get.haskellstack.org/upgrade/linux-x86_64.tar.gz >NUL + ~~~ -9. Git settings (adjust for your preferences/email/name) + and make sure it redirects to the new version. - git config --global push.default simple - git config --global user.email "manny@fpcomplete.com" - git config --global user.name "Emanuel Borsboom" + ### H: Update versions and `ChangeLog.md` for 'unreleased' -10. Install build tools and dependencies packages + In the `stable` branch: - sudo apt-get install -y g++ gcc libc6-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg + * `package.yaml`: bump the version number. Bump the third component to an + even number (e.g. from `3.5.1` to `3.5.2`). -11. Install clang+llvm + !!! attention - NOTE: the Debian jessie `llvm` package does not work (executables built with it - just exit with "schedule: re-entered unsafely."). + Be sure to update also `stack.cabal` (for example by using + `stack build --dry-run`). - The version of LLVM needed depends on the version of GHC you need. + * `cabal.config`: Ensure the `stack` constraint is set to the same version + as in the `package.yaml`. - * GHC 8.2.2 (the standard for building Stack) + ### I: Update the repository's issue and pull request templates - ``` - wget http://llvm.org/releases/3.9.1/clang+llvm-3.9.1-armv7a-linux-gnueabihf.tar.xz && \ - sudo tar xvf clang+llvm-3.9.1-armv7a-linux-gnueabihf.tar.xz -C /opt - ``` + The repository's issue and pull request templates are the `.github` + directory. Update them to refer to the new release version (`X.Y.Z`). - Run this now and add it to the `.profile`: + ### J: Request update of GHCup's metadata - ``` - export PATH="$HOME/.local/bin:/opt/clang+llvm-3.9.1-armv7a-linux-gnueabihf/bin:$PATH" - ``` + Raise a pull request at the + [`haskell/ghcup-metadata`](https://github.com/haskell/ghcup-metadata) GitHub + repository to request an addition to GHCup's latest metadata configuration + files for releases and 'vanilla' releases, tagged as the latest release. + (The GHCup project will decide whether, and when, to recommend the release.) + In the metadata, change the tags for any past Stack releases to indicate + that they are no longer the latest release. - * GHC 7.10.3 + ### K: Announce the release - ``` - wget http://llvm.org/releases/3.5.2/clang+llvm-3.5.2-armv7a-linux-gnueabihf.tar.xz && \ - sudo tar xvf clang+llvm-3.5.2-armv7a-linux-gnueabihf.tar.xz -C /opt - ``` + Announce the release: - Run this now and add it to the `.profile`: + * on the + [Haskell Community](https://discourse.haskell.org/c/announcements/10/l/latest) + forum. - ``` - export PATH="$HOME/.local/bin:/opt/clang+llvm-3.5.2-armv7a-linux-gnueabihf/bin:$PATH" - ``` + * in the Haskell Foundation's + [general](https://matrix.to/#/#haskell-foundation-general:matrix.org) + room (address `#haskell-foundation-general:matrix.org`) on + [Matrix](https://matrix.org/). -12. Install Stack + * in the Haskell + [Stack and Stackage](https://matrix.to/#/#haskell-stack:matrix.org) room + (address `#haskell-stack:matrix.org`) on [Matrix](https://matrix.org/). - Binary: get an [existing `stack` binary](https://github.com/commercialhaskell/stack/releases) -and put it in `~/.local/bin`. + * in Reddit's [Haskell](https://www.reddit.com/r/haskell/) community. - From source (using cabal-install): + In each case, use the subject: - ``` - wget http://downloads.haskell.org/~ghc/7.10.3/ghc-7.10.3-armv7-deb8-linux.tar.xz && \ - tar xvf ghc-7.10.3-armv7-deb8-linux.tar.xz && \ - cd ghc-7.10.3 && \ - ./configure --prefix=/opt/ghc-7.10.3 && \ - sudo make install && \ - cd .. - export PATH="/opt/ghc-7.10.3/bin:$PATH" - wget https://www.haskell.org/cabal/release/cabal-install-1.24.0.0/cabal-install-1.24.0.0.tar.gz &&&&& \ - tar xvf cabal-install-1.24.0.0.tar.gz && \ - cd cabal-install-1.24.0.0 && \ - EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh && \ - cd .. && \ - export PATH="$HOME/.cabal/bin:$PATH" && \ - cabal update - ``` + * `[ANN] Stack X.Y.Z` - Edit `~/.cabal/config`, and set `executable-stripping: False` and `library-stripping: False`. + In the message, include: - ``` - cabal unpack stack && \ - cd stack-* && \ - cabal install && \ - mv ~/.cabal/bin/stack ~/.local/bin - ``` + * the release description from Github. diff --git a/doc/maintainers/self-hosted_runners.md b/doc/maintainers/self-hosted_runners.md new file mode 100644 index 0000000000..7b11eb1d59 --- /dev/null +++ b/doc/maintainers/self-hosted_runners.md @@ -0,0 +1,111 @@ +
+ +# Self-hosted runners + +[GitHub Actions](https://docs.github.com/en/actions) is used to do CI on Stack. + +In the case of the Linux/AArch64 platform, since 8 February 2025, that is done +with a GitHub-hosted runner (currently in public preview). Previously, that was +done in a separate `linux-arm64` job of the `integration-tests.yml` workflow +that ran on a self-hosted runner for Linux and ARM64. Given that the +GitHub-hosted runner is only in preview, the documentation of the self-hosted +runner is being preserved for the time being. + +## Introduction + +The current basic setup was: + +* FP Complete has an Oracle Cloud account that provides a free tier that + includes a really powerful ARM64 machine; +* within Oracle Cloud, FP Complete are running an Ubuntu/ARM64 instance; and +* on that instance, FP Complete are running the GitHub Runner software. + +The runner name is `stack-github-action3` and the machine name is +`stack-github-action3`. + +Occasionally Oracle will turn off the machine because: + +* Oracle thinks it is not being used (because of the free tier); and/or +* other things, like disk space filling up. + +The disk space can fill up due to obsolete Docker images, versions of GHC +installed by Stack or obsolete snapshots created by Stack. + +## Managing the `stack-github-action3` runner + +With the appropriate authority installed on the server, a maintainer can manage +the runner remotely using SSH, with command +`ssh ubuntu@arm-runner.stackage.org`. + +!!! info + + The authority is the addition of the maintainer's SSH public key to the end + of file `~/.ssh/authorized_keys` on the server. + +This is best done using [`tmux`](https://github.com/tmux/tmux/wiki), a terminal +multiplexer, as follows: +~~~sh +$ # In a shell, command tmux to create a new session with a single window with a +$ # single pane (a pseudo terminal). The session will be displayed on the screen +$ # by a client: +$ tmux new-session +$ # Send the following command to that pseudo terminal, to connect to the +$ # remote host: +$ ssh ubuntu@arm-runner.stackage.org +$ # In the remote host, change to the actions-runner directory: +$ cd actions-runner +$ # In the remote host, start the runner: +$ ./run.sh +$ # Detach the current client from the session by the key combination of +$ # 'C-b' 'd' (where 'C-b' is CTRL+b). The session will continue to run in the +$ # background: +$ C-b d +~~~ + +The available disk space can be queried with command `df -h`; the relevant entry +is for filesystem `/dev/sda1`. + +If the available space is low, that may be due to unncessary GHC versions +installed in Stack's `programs` directory. + +## Re-configuring a removed `stack-github-action3` runner + +A self-hosted runner is automatically removed from GitHub if it has not +connected to GitHub Actions for more than 14 days. + +Re-adding a self-hosted runner to GitHub requires it to be configured, and +GitHub will suggest a configuration command like: + +~~~text +./config.sh --url https://github.com/commercialhaskell/stack --token +~~~ + +However, to re-use and configure the runner machine, you have first to delete +the `.runner` file inside the `actions-runner` directory. This allows the runner +to be registered without having to re-download the self-hosted runner +application. + +## The `ghc-arm-5` runner + +From 9 February 2024, the Haskell Foundation sought to provide an alternative +runner named `ghc-arm-5` but that was based on NixOS and proved to be +incompatible. + +## Alternatives to the self-hosted runners + +One alternative to the self-hosted runners is to build statically-linked Stack +executables for Linux/AArch64 on macOS/AArch64. This can be done thanks to +the multi-architecture Docker images built and published by Olivier Benz, at +https://gitlab.com/benz0li/ghc-musl. + +GitHub provides a GitHub-hosted macOS/AArch64 runner (`macOS-14`). +Unfortunately, that is macOS/M1 and the M1 machine architecture does not support +nested virtualisation. This rules out using Docker, as the runner is itself a +virtual machine. + +However, this solution can be applied locally and the build outputs for the +Linux/AArch64 platform added manually to the result of the GitHub workflow. + +The experimental Haskell script `etc/scripts/release-linux-aarch64.hs` is +intended to facilitate the building of statically-linked Stack executables for +Linux/AArch64 on macOS/AArch64. diff --git a/doc/maintainers/stack_errors.md b/doc/maintainers/stack_errors.md new file mode 100644 index 0000000000..ac3086639c --- /dev/null +++ b/doc/maintainers/stack_errors.md @@ -0,0 +1,698 @@ +
+ +## Stack's errors + +In connection with considering Stack's support of the +[Haskell Error Index](https://errors.haskell.org/) initiative, this page seeks +to take stock of the errors that Stack itself can raise, by reference to the +`master` branch of the Stack repository. Last updated: 2026-04-24. + +* `Stack.main`: catches exceptions from action `commandLineHandler`. + + - `ExitCode` + - `throwIO` + +* `Stack.main`: catches exceptions from action `run`: + + - `ExitCode` (`exitWith`) + - `PrettyException` (`exitFailure`) + - `SomeException` (`exitFailure`) + + The following types are instances of `Control.Exception.Exception` and + `Show`. Some are instances of `Stack.Prelude.PrettyException`. Some data + constructors have strict fields but that is not documented below: + + - `Control.Concurrent.ExecuteException` + + ~~~haskell + [S-2816] = InconsistentDependenciesBug + ~~~ + + - `Options.Applicative.Builder.Extra.OptionsApplicativeExtraException` + + ~~~haskell + [S-2797] = FlagNotFoundBug + ~~~ + + - `Stack.Build.CabalVersionPrettyException` + + ~~~haskell + [S-5973] = CabalVersionNotSupported Version + ~~~ + + - `Stack.Build.ConstructPlan.NotOnlyLocal` + + ~~~haskell + [S-1727] = NotOnlyLocal [PackageName] [Text] + ~~~ + + - `Stack.Build.ExecutePackage` + + `[S-4541]` used in `copyPreCompiled` + + - `Stack.BuildPlan.BuildPlanException` + + ~~~haskell + [S-7571] = UnknownPackages (Path Abs File) (Map PackageName (Maybe Version, Set PackageName)) (Map PackageName (Set PackageIdentifier)) + [S-2045] | SnapshotNotFound SnapName + [S-8559] | NeitherCompilerOrSnapshotSpecified Text + [S-5743] | DuplicatePackagesBug + ~~~ + + - `Stack.CLI.CliPrettyException` + + ~~~haskell + [S-4639] = NoArgumentsBug + ~~~ + + - `Stack.Clean.CleanPrettyException` + + ~~~haskell + [S-9463] = NonLocalPackages [PackageName] + [S-6321] | DeletionFailures [(Path Abs Dir, SomeException)] + ~~~ + + - `Stack.Config.Docker.ConfigDockerException` + + ~~~haskell + [S-8575] = SnapshotNotSupportedException (Maybe Project) (Maybe AbstractSnapshot) + ~~~ + + - `Stack.Config.Nix.ConfigNixPrettyException` + + ~~~haskell + [S-2726] = NixCannotUseShellFileAndPackagesException FilePath [Text] + [S-9317] | GHCMajorVersionUnspecified + [S-8605] | OnlyGHCSupported + ~~~ + + - `Stack.ConfigCmd.ConfigCmdPrettyException` + + ~~~haskell + [S-3136] = NoProjectConfigAvailable + [S-6088] | ConfigFileContainsIncludes (Path Abs File) + ~~~ + + - `Stack.Constants.ConstantsException` + + ~~~haskell + [S-6057] = WiredInPackagesNotParsedBug + ~~~ + + - `Stack.Coverage.CoveragePrettyException` + + ~~~haskell + [S-6361] = NonTestSuiteTarget PackageName + [S-2321] | NoTargetsOrTixSpecified + [S-9975] | NotLocalPackage PackageName + ~~~ + + - `Stack.DependencyGraph.DependencyGraphException` + + ~~~haskell + [S-7071] = DependencyNotFoundBug GhcPkgId + ~~~ + + - `Stack.DependencyGraph.DependencyGraphPrettyException` + + ~~~haskell + [S-7151] | PackageNotFound PackageName + ~~~ + + - `Stack.Exec.ExecException` + + ~~~haskell + [S-1541] = InvalidPathForExec FilePath + ~~~ + + - `Stack.Exec.ExecPrettyException` + + ~~~haskell + [S-8251] = PackageIdNotFoundBug String + [S-2483] | ExecutableToRunNotFound + [S-8600] | NoPackageIdReportedBug + [S-7371] | InvalidExecTargets [Text] + ~~~ + + - `Stack.GhcPkg` + + `[S-6716]` used in `unregisterGhcPkgIds` + + - `Stack.Ghci.GhciPrettyException` + + ~~~haskell + [S-6948] = GhciTargetParseException [StyleDoc] + [S-1939] | CandidatesIndexOutOfRangeBug + [S-6716] | InvalidPackageOption String + [S-3600] | FileTargetIsInvalidAbsFile String + [S-9906] | Can'tSpecifyFilesAndTargets + [S-5188] | Can'tSpecifyFilesAndMainIs + ~~~ + + - `Stack.Hoogle.HoogleException` + + ~~~haskell + [S-9669] = HoogleOnPathNotFoundBug + ~~~ + + - `Stack.Hoogle.HooglePrettyException` + + ~~~haskell + [S-1329] = HoogleNotFound StyleDoc + [S-3025] | HoogleDatabaseNotFound + ~~~ + + - `Stack.Init.InitException` + + ~~~haskell + [S-2747] | NoPackagesToIgnoreBug + ~~~ + + - `Stack.Init.InitPrettyException` + + ~~~haskell + [S-8332] = SnapshotDownloadFailure SomeException + [S-8009] | ConfigFileAlreadyExists FilePath + [S-5267] | PackageNameInvalid [FilePath] + [S-1833] | NoMatchingSnapshot (NonEmpty SnapName) + [S-6395] | SnapshotMismatch RawSnapshotLocation String + [S-2422] | SnapshotPartial RawSnapshotLocation String + ~~~ + + - `Stack.List.ListPrettyException` + + ~~~haskell + [S-4926] = CouldNotParsePackageSelectors [StyleDoc] + ~~~ + + - `Stack.Lock.LockPrettyException` + + ~~~haskell + [S-1353] = WritingLockFileError (Path Abs File) Locked + ~~~ + + - `Stack.Ls.LsPrettyException` + + ~~~haskell + [S-3421] = ParseFailure [Value] + [S-9131] | ParseRecentSnapshotsUrlFailed HttpException + ~~~ + + - `Stack.New.NewPrettyException` + + ~~~haskell + [S-2135] = ProjectDirAlreadyExists String (Path Abs Dir) + [S-1688] | DownloadTemplateFailed Text String VerifiedDownloadException + [S-3650] | LoadTemplateFailed TemplateName FilePath + [S-9582] | ExtractTemplateFailed TemplateName FilePath String + [S-9490] | TemplateInvalid TemplateName StyleDoc + [S-5682] | MagicPackageNameInvalid String + [S-3113] | AttemptedOverwrites [Path Abs File] + ~~~ + + - `Stack.Nix.NixPrettyException` + + ~~~haskell + [S-7384] = CannotDetermineProjectRoot + ~~~ + + - `Stack.PackageDump.PackageDumpException` + + ~~~haskell + [S-4257] = MissingSingleField Text (Map Text [Line]) + [S-2016] | Couldn'tParseField Text [Line] + ~~~ + + - `Stack.Query.QueryException` + + ~~~haskell + [S-4419] = SelectorNotFound [Text] + [S-8422] | IndexOutOfRange [Text] + [S-4360] | NoNumericSelector [Text] + [S-1711] | CannotApplySelector Value [Text] + ~~~ + + - `Stack.Runners.RunnersException` + + ~~~haskell + [S-7144] = CommandInvalid + [S-8314] | DockerAndNixInvalid + [S-8641] | NixWithinDockerInvalid + [S-5107] | DockerWithinNixInvalid + ~~~ + + - `Stack.SDist.SDistPrettyException` + + ~~~haskell + [S-6439] = CheckException (NonEmpty PackageCheck) + [S-9595] | CabalFilePathsInconsistentBug (Path Abs File) (Path Abs File) + [S-7875] | ToTarPathException + ~~~ + + - `Stack.Script.ScriptException` + + ~~~haskell + [S-4994] = MutableDependenciesForScript [PackageName] + [S-1691] | AmbiguousModuleName ModuleName [PackageName] + [S-5067] | ArgumentsWithNoRunInvalid + [S-9469] | NoRunWithoutCompilationInvalid + [S-5055] | FailedToParseScriptFileAsDirBug (Path Rel File) + [S-9464] | FailedToParseFileAsDirBug (Path Abs Dir) + ~~~ + + - `Stack.Setup.PerformPathCheckingException` + + ~~~haskell + [S-1991] = ProcessExited ExitCode String [String] + ~~~ + + - `Stack.Setup.SetupException` + + ~~~haskell + [S-2076] = WorkingDirectoryInvalidBug + [S-3967] | StackBinaryArchiveZipUnsupportedBug + ~~~ + + - `Stack.Setup.SetupPrettyException` + + ~~~haskell + [S-7441] = GHCInstallFailed SomeException StyleDoc String [String] (Path Abs Dir) (Path Abs Dir) (Path Abs Dir) + [S-2476] | InvalidGhcAt (Path Abs File) SomeException + [S-4764] | ExecutableNotFound [Path Abs File] + [S-9953] | SandboxedCompilerNotFound [String] [Path Abs Dir] + [S-1852] | UnsupportedSetupCombo OS Arch + [S-2126] | MissingDependencies [String] + [S-9443] | UnknownCompilerVersion (Set Text) WantedCompiler (Set ActualCompiler) + [S-6810] | UnknownOSKey Text + [S-5159] | GHCSanityCheckCompileFailed SomeException (Path Abs File) + [S-8948] | RequireCustomGHCVariant + [S-2905] | ProblemWhileDecompressing (Path Abs File) + [S-9561] | SetupInfoMissingSevenz + [S-7748] | UnsupportedSetupConfiguration + [S-5308] | MSYS2NotFound Text + [S-5127] | UnwantedCompilerVersion + [S-1540] | UnwantedArchitecture + [S-8668] | GHCInfoNotValidUTF8 UnicodeException + [S-4878] | GHCInfoNotListOfPairs + [S-2965] | GHCInfoMissingGlobalPackageDB + [S-5219] | GHCInfoMissingTargetPlatform + [S-8299] | GHCInfoTargetPlatformInvalid String + [S-2574] | CabalNotFound (Path Abs File) + [S-8488] | GhcBootScriptNotFound + [S-1128] | HadrianScriptNotFound + [S-1906] | URLInvalid String + [S-1648] | UnknownArchiveExtension String + [S-4509] | Unsupported7z + [S-3158] | TarballInvalid String + [S-5252] | TarballFileInvalid String (Path Abs File) + [S-1827] | UnknownArchiveStructure (Path Abs File) + [S-9476] | StackReleaseInfoNotFound String + [S-4461] | StackBinaryArchiveNotFound [String] + [S-6617] | HadrianBindistNotFound + [S-7227] | DownloadAndInstallCompilerError + [S-6636] | StackBinaryArchiveUnsupported Text + [S-7871] | StackBinaryNotInArchive String Text + [S-5046] | FileTypeInArchiveInvalid Entry Text + [S-4132] | BinaryUpgradeOnOSUnsupported OS + [S-3249] | BinaryUpgradeOnArchUnsupported Arch + [S-4230] | ExistingMSYS2NotDeleted (Path Abs Dir) IOException + ~~~ + + - `Stack.StackException` + + ~~~haskell + [S-2186] = InvalidReExecVersion String String + ~~~ + + - `Stack.Storage.User.StorageUserException` + + ~~~haskell + [S-8196] = CompilerFileMetadataMismatch + [S-5378] | GlobalPackageCacheFileMetadataMismatch + [S-2673] | GlobalDumpParseFailure + [S-8441] | CompilerCacheArchitectureInvalid Text + ~~~ + + - `Stack.Templates.TemplatesPrettyException` + + ~~~haskell + [S-8143] = DownloadTemplatesHelpFailed HttpException + [S-6670] | TemplatesHelpEncodingInvalid String UnicodeException + ~~~ + + - `Stack.Types.Build.BuildException` + + ~~~haskell + [S-7178] = Couldn'tFindPkgId PackageName + [S-3127] | Couldn'tParseTargets [Text] + [S-2154] | UnknownTargets (Set PackageName) (Map PackageName Version) (Path Abs File) + [S-1995] | TestSuiteFailure PackageIdentifier (Map Text (Maybe ExitCode)) (Maybe (Path Abs File)) ByteString + [S-3819] | TestSuiteTypeUnsupported TestSuiteInterface + [S-5797] | LocalPackageDoesn'tMatchTarget PackageName Version Version + [S-3118] | NoSetupHsFound (Path Abs Dir) + [S-4925] | InvalidGhcOptionsSpecification [PackageName] + [S-5510] | LocalPackagesPresent [PackageIdentifier] + [S-7168] | CouldNotLockDistDir (Path Abs File) + [S-7868] | TaskCycleBug PackageIdentifier + [S-8923] | PackageIdMissingBug PackageIdentifier + [S-7371] | AllInOneBuildBug + [S-6739] | MultipleResultsBug PackageName [DumpPackage] + [S-3121] | TemplateHaskellNotFoundBug + [S-6901] | HaddockIndexNotFound + [S-5452] | ShowBuildErrorBug + [S-2696] | CallStackEmptyBug + ~~~ + + - `Stack.Types.Build.BuildPrettyException` + + ~~~haskell + [S-4804] = ConstructPlanFailed [ConstructPlanException] (Path Abs File) (Path Abs Dir) ParentMap (Set PackageName) (Map PackageName [PackageName]) + [S-7282] | ExecutionFailure [SomeException] + [S-7011] | CabalExitedUnsuccessfully ExitCode PackageIdentifier (Path Abs File) [String] (Maybe (Path Abs File)) [Text] + [S-6374] | SetupHsBuildFailure ExitCode (Maybe PackageIdentifier) (Path Abs File) [String] (Maybe (Path Abs File)) [Text] + [S-8506] | TargetParseException [StyleDoc] + [S-7086] | SomeTargetsNotBuildable [(PackageName, NamedComponent)] + [S-8664] | InvalidFlagSpecification [UnusedFlags] + [S-8100] | GHCProfOptionInvalid + [S-1727] | NotOnlyLocal [PackageName] [Text] + [S-6362] | CompilerVersionMismatch (Maybe (ActualCompiler, Arch)) (WantedCompiler, Arch) GHCVariant CompilerBuild VersionCheck WantedCompilerSetter Text + [S-4660] | ActionNotFilteredBug StyleDoc + [S-7987] | TestSuiteExeMissing Bool String PackageName StackUnqualCompName + [S-8027] | CabalCopyFailed Bool BuildPrettyException + ~~~ + + - `Stack.Types.Compiler.CompilerException` + + ~~~haskell + [S-7903] = GhcjsNotSupported + [S-7972] | PantryException PantryException + ~~~ + + - `Stack.Types.Config.Exception.ConfigException` + + ~~~haskell + [S-8981] | ParseCustomSnapshotException Text ParseException + [S-2206] | NoProjectConfigFound (Path Abs Dir) (Maybe Text) + [S-4964] | UnexpectedArchiveContents [Path Abs Dir] [Path Abs File] + [S-2040] | UnableToExtractArchive Text (Path Abs File) + [S-1641] | BadStackVersionException VersionRange + [S-8773] | NoSuchDirectory FilePath + [S-4335] | NoSuchFile FilePath + [S-3938] | ParseGHCVariantException String + [S-8530] | BadStackRoot (Path Abs Dir) + [S-7613] | Won'tCreateStackRootInDirectoryOwnedByDifferentUser (Path Abs Dir) (Path Abs Dir) + [S-8707] | UserDoesn'tOwnDirectory (Path Abs Dir) + [S-3605] | ManualGHCVariantSettingsAreIncompatibleWithSystemGHC + [S-6816] | NixRequiresSystemGhc + [S-5027] | NoSnapshotWhenUsingNoProject + [S-3803] | NoLTSWithMajorVersion Int + [S-5472] | NoLTSFound + ~~~ + + - `Stack.Types.Config.Exception.ConfigPrettyException` + + ~~~haskell + [S-6602] = ParseConfigFileException (Path Abs File) ParseException + [S-7462] | StackWorkEnvNotRelativeDir String + [S-5470] | DuplicateLocalPackageNames [(PackageName, [PackageLocation])] + [S-6854] | BadMsysEnvironment MsysEnvironment Arch + [S-5006] | NoDefaultMsysEnvironmentBug + [S-8398] | ConfigFileNotProjectLevelBug + [S-6890] | NoExecutablePath String + ~~~ + + - `Stack.Types.Config.ParseAbsolutePathException` + + ~~~haskell + [S-9437] = ParseAbsolutePathException String String + ~~~ + + - `Stack.Types.Docker.DockerException` + + ~~~haskell + [S-3223] = DockerMustBeEnabledException + [S-9779] | OnlyOnHostException + [S-9105] | InspectFailedException String + [S-6626] | NotPulledException String + [S-5841] | InvalidImagesOutputException String + [S-9608] | InvalidPSOutputException String + [S-2240] | InvalidInspectOutputException String + [S-6092] | PullFailedException String + [S-6218] | DockerTooOldException Version Version + [S-8252] | DockerVersionProhibitedException [Version] Version + [S-6170] | BadDockerVersionException VersionRange Version + [S-5827] | InvalidVersionOutputException + [S-7112] | HostStackTooOldException Version (Maybe Version) + [S-5832] | ContainerStackTooOldException Version Version + [S-4078] | CannotDetermineProjectRootException + [S-7058] | DockerNotInstalledException + [S-6894] | UnsupportedStackExeHostPlatformException + [S-1512] | DockerStackExeParseException String + ~~~ + + - `Stack.Types.GhcPkgExe` + + ~~~haskell + [S-6512] = CannotParse String String String + [S-3384] | CannotOpenDBForModification FilePath IOException + [S-1430] | SingleFileDBUnsupported FilePath + [S-5996] | ParsePackageInfoExceptions String + [S-3189] | CannotFindPackage PackageArg (Maybe FilePath) + [S-9323] | CannotParseRelFileBug String + [S-7651] | CannotParseDirectoryWithDBug String + [S-6590] | CannotRecacheAfterUnregister (Path Abs Dir) SomeException + ~~~ + + - `Stack.Types.GhcPkgId.GhcPkgIdParseFail` + + ~~~haskell + [S-5359] = GhcPkgIdParseFail Text + ~~~ + + - `Stack.Types.Package.PackageException` + + ~~~haskell + [S-8072] = PackageInvalidCabalFile (Either PackageIdentifierRevision (Path Abs File)) (Maybe Version) [PError] [PWarning] + [S-5394] | MismatchedCabalIdentifier PackageIdentifierRevision PackageIdentifier + [S-2203] | CabalFileNameParseFail FilePath + [S-8854] | CabalFileNameInvalidPackageName FilePath + [S-4623] | ComponentNotParsedBug + ~~~ + + - `Stack.Types.Snapshot.TypesSnapshotException` + + ~~~haskell + [S-8787] = ParseSnapshotException Text + [S-4865] | FilepathInDownloadedSnapshot Text + ~~~ + + - `Stack.Types.Storage.StoragePrettyException` + + ~~~haskell + [S-8835] = StorageMigrationFailure Text (Path Abs File) SomeException + ~~~ + + - `Stack.Types.TemplateName.TypesTemplateNameException` + + ~~~haskell + [S-7410] = DefaultTemplateNameNotParsedBug String + ~~~ + + - `Stack.Unpack.UnpackPrettyException` + + ~~~haskell + [S-3515] = UnpackDirectoryAlreadyExists (Set (Path Abs Dir)) + [S-2628] | CouldNotParsePackageSelectors [String] + ~~~ + + - `Stack.Upgrade.UpgradePrettyException` + + ~~~haskell + [S-8761] = SnapshotOptionInvalid + [S-3642] | NeitherBinaryOrSourceSpecified + [S-8716] | ExecutableFailure + [S-7114] | CommitsNotFound String String + [S-9668] | StackInPackageIndexNotFound + [S-6648] | VersionWithNoRevision + ~~~ + + - `Stack.Upload.UploadPrettyException` + + ~~~haskell + [S-2256] = AuthenticationFailure + [S-6108] | ArchiveUploadFailure Int [String] String + [S-2837] | DocsTarballInvalid [(String, Path Abs File)] + [S-3179] | ItemsInvalid [FilePath] + [S-3030] | NoItemSpecified String + [S-5908] | PackageDirectoryInvalid [FilePath] + [S-7274] | PackageIdNotSpecifiedForDocsUploadBug + [S-5860] | PackageIdSpecifiedForPackageUploadBug + [S-5955] | TarGzFileNameInvalidBug String + ~~~ + + - `System.Process.Pager.PagerException` + + ~~~haskell + [S-9392] = PagerExitFailure CmdSpec Int + ~~~ + + \* The instance of `Show` is derived. + +* `Stack.Build.Execute.singleBuild`: catches exceptions in `cabal ...` + + `throwM` + +* `Stack.Build.Source.getFileDigestMaybe`: catches exceptions in + `liftM Just . withSourceFile fp $ getDigest` + + `throwM` + +* `Stack.Config.configFromConfigMonoid`: + + Presented as a warning rather than as an error: + + ~~~text + [S-8432] + Stack's 'programs' path is . It contains a space character. This will + prevent building with GHC 9.4.1 or later. It also has has no alternative + short ('8 dot 3') name. This will cause problems with packages that use the + GNU project's 'configure' shell script. + + To avoid such problems, use the local-programs-path non-project specific + configuration option to specify an alternative space-free path. + ~~~ + + or + + ~~~text + [S-8432] + Stack's 'programs' path is . It contains a space character. This will + prevent building with GHC 9.4.1 or later. + + To avoid such problems, use the local-programs-path non-project specific + configuration option to specify an alternative space-free path. + ~~~ + +* `Stack.Coverage.generateHpcReport`: catches exceptions from + `findPackageFieldForBuiltPackage` + + ~~~text + + ~~~ + +* `Stack.Coverage.generateHpcReportInternal`: + + ~~~text + [S-4634] Didn't find .tix for - expected to find it at . + ~~~ + +* `Stack.Coverage.generateHpcReportInternal`: + + ~~~text + [S-8215] + Error occurred while producing " + ~~~ + +* `Stack.Coverage.generateHpcReportInternal`: + + ~~~text + [S-6829] Error: The did not consider any code. One possible cause of this is + if your test-suite builds the library code (see Stack issue #1008). It may + also indicate a bug in Stack or the hpc program. Please report this issue if + you think your coverage report should have meaningful results. + ~~~ + +* `Stack.Coverage.readTixOrlog`: + + ~~~text + [S-3521] Error while reading tix: + ~~~ + +* `Stack.Coverage.readTixOrlog`: + + ~~~text + [S-7786] Failed to read tix file + ~~~ + +* `Stack.Coverage.updateTixFile`: + + ~~~text + [S-2887] Failed to read + ~~~ + +* `Stack.Ghci.buildDepsAndInitialSteps`: catches exeception from + `buildLocalTargets` + + ~~~text + + ~~~ + +* `Stack.GhcPkg.createDatabase`: + + ~~~text + [S-9735] Unable to create package database at + ~~~ + +* `Stack.Lock.loadYamlThrow`: + + `Data.Yaml.AesonException` + + `throwIO` + +* `Stack.Package.resolveGlobFiles`: + + `Control.Exception.Base.IOException` + + `throwIO` + +* `Stack.Runners.withConfig`: + + ~~~text + [S-7353] Error when running shouldUpgradeCheck: + ~~~ + +* `Stack.Script.scriptCmd`: + + Error used because warnings are surpressed. + + ~~~text + Ignoring override stack.yaml file for script command: + ~~~ + +* `Stack.Script.scriptCmd`: + + Error used because warnings are surpressed. + + ~~~text + Ignoring SYLGlobalProject for script command + ~~~ + +* `Stack.SDist.getSDistTarball`: + + ~~~text + [S-8399] Error building custom-setup dependencies: + ~~~ + +* `Stack.Setup.downloadStackExe`: catches exceptions from `performPathChecking` + + ~~~text + + ~~~ + +* `Stack.Upload.uploadBytes`: + + ~~~text + [S-2804] forbidden upload + Usually means: you have already uploaded this package/version combination + Ignoring error and continuing, full message from Hackage below: + + ~~~ + +* `Stack.Upload.uploadBytes`: + + ~~~text + [S-4444] service unavailable + This error some times gets sent even though the upload succeeded + Check on Hackage to see if your package is present + + ~~~ diff --git a/doc/maintainers/team_process.md b/doc/maintainers/team_process.md index d9da00ff0b..65c7325df4 100644 --- a/doc/maintainers/team_process.md +++ b/doc/maintainers/team_process.md @@ -5,7 +5,7 @@ ## Purpose of this document This guide is intended to formalize the process by which the `Stack` maintainer -team operates. We expect it'll evolve over time as experience makes clearer +team operates. We expect it will evolve over time as experience makes clearer what works best in practice. Having a well-documented set of processes and responsibilities has been @@ -15,7 +15,6 @@ distributed team ](https://github.com/commercialhaskell/stackage/blob/master/CURATORS.md)) and we hope to replicate that success here. - ## Goals The goals of the `Stack` maintainer team are to: @@ -39,11 +38,10 @@ The goals of the `Stack` maintainer team are to: The sections below detail various activities by the `Stack` team to realize these goals. - ## Issue triage The maintainer team provides ongoing review and responses to newly-filed GitHub -issues and pull requests. From experience, we find it's easiest to have a +issues and pull requests. From experience, we find it is easiest to have a single person "on call" at any given time. Therefore, the team rotates shifts on a weekly basis. The "on call" triager is responsible for: @@ -65,7 +63,6 @@ This is one path to getting a lot of experience with the codebase, plus great interaction with the rest of the maintainer team, without necessarily taking on major coding tasks. - ## Time commitment Someone considering joining the issue triager team may be wondering: @@ -84,8 +81,7 @@ etc) that amounts to about one or two hours spent per week being on-call. Again, these estimates will evolve over time as we settle into an optimal process, but for now we anticipate growing the team to about eight members (which is the size of the `Stackage` team as well), each of whom is likely to -spend about two hours in total on upkeep work every eight weeks. - +spend about two hours in total on upkeep work every eight weeks. ## Issue and pull request interaction guidelines @@ -103,7 +99,6 @@ These guidelines apply to all members of the maintainer team. - If you know you’ll be unable to answer for a significant period of time, say so. Even better: ask someone else to take over from you. - ## Assessing feature requests and enhancement PRs **NB: this section is very likely to evolve over time as well.** @@ -123,22 +118,22 @@ Oftentimes though, the request is larger and more far-reaching in nature. Perhaps the requester is unaware of the extent to which his or her change would impact other people's workflows or related components in the code. Conversely, they may have a deep understanding of its implications and feel strongly that -it would be a valuable improvement, whereas it's not so clear to the triager. +it would be a valuable improvement, whereas it is not so clear to the triager. Discerning which requests should be considered "small" and which warrant broader collaboration is admittedly an inexact science. Use your best judgment -and don't sweat the occasional mistake if you approve something you thought was -small but ultimately wasn't. +and do not sweat the occasional mistake if you approve something you thought was +small but ultimately was not. -In the case of medium-to-large sized feature requests, it's best to solicit +In the case of medium-to-large sized feature requests, it is best to solicit feedback from at least one or two of the core `Stack` developers. You may use GitHub @mentions to draw the relevant contributors' attention to the issue in -question. If you're not sure who's best to consult you should ask on +question. If you are not sure who's best to consult you should ask on `#stack-collaborators`. -Try to be clear to the requester that you're opening the discussion up to more +Try to be clear to the requester that you are opening the discussion up to more participants and that the proposal will require thoughtful consideration -(probably a majority vote) before any decision is made. +(probably a majority vote) before any decision is made. Also remember that busy schedules, lack of complete familiarity with a given subject, strong-yet-opposing opinions held by equally rational people, and many @@ -150,7 +145,7 @@ on-track and concrete. respectful "no".* *If feasible, propose alternative solutions or educate the user in preference to -complicating `Stack` or accepting scope-creep.* +complicating `Stack` or accepting scope-creep.* With respect to assessing a PR's code, @snoyberg has outlined some [tips for successfully getting one's work merged @@ -158,14 +153,12 @@ With respect to assessing a PR's code, @snoyberg has outlined some into his projects which may be helpful to `Stack` triagers when performing reviews, as well. - ## Real-time communications At present, the maintainer team mostly communicates via a rather quiet channel -called [`#stack-collaborators`](../CONTRIBUTING/#slack-channel) on FP -Complete's Slack workspace, although we may migrate to some other platform in -the near future. - +called [`#stack-collaborators`](../CONTRIBUTING.md#slack-channel) in the +Haskell Foundation's Slack workspace, although we may migrate to some other +platform in the future. ## Dealing with support issues @@ -177,7 +170,6 @@ maintainers team makes no guarantees about regularly checking them. *We may decide to offload support questions elsewhere in the future, but for now the most important thing is to direct users to a single destination.* - ## Issue vs pull request Sometimes it is ambiguous whether something should be opened as an issue to @@ -188,7 +180,6 @@ open a PR, it will _definitely_ take less time than opening an issue and describing the change you'd like to make. This logic can sometimes apply to minor code changes. Use your best judgement. - ## Issue closing policy We need to strike a balance in issue handling between keeping a maintainable @@ -198,18 +189,20 @@ of this is: any open issue indicates "this deserves attention in the near future." Before this policy existed, issues actually meant "maybe someone will deal with this someday." -The policy for closing an issue depends entirely on the type of issue we're +The policy for closing an issue depends entirely on the type of issue we are looking at. When closing an issue, please provide a brief explanation for why the issue was closed, such as a reference to a PR, a comment about lack of clarification, etc. -__NOTE__ The following sections establish rules under which a bug report will -be closed by the maintainer team. The goal is to avoid a situation where issues -linger in an indeterminate state. The maintainer team is allowed to disregard -these "rules" at any point. In other words: the goal isn't to allow people to -"lawyer" issues. +!!! note + + The following sections establish rules under which a bug report will be + closed by the maintainer team. The goal is to avoid a situation where issues + linger in an indeterminate state. The maintainer team is allowed to + disregard these "rules" at any point. In other words: the goal is not to + allow people to "lawyer" issues. ### Discussion diff --git a/doc/maintainers/version_scheme.md b/doc/maintainers/version_scheme.md new file mode 100644 index 0000000000..d409cfae3c --- /dev/null +++ b/doc/maintainers/version_scheme.md @@ -0,0 +1,63 @@ +
+ +# Version scheme + +A Stack package or executable may have a version with three or four components: +`X.Y.Z` or `X.Y.Z.A`. + +## Development or stable versions + +* Versions with an _even_ `Y` component are development versions (the `master` + branch) + +* Versions with an _odd_ `Y` component are stable versions (the `stable` branch, + or in a `rc/vX.Y` release candidate branch for not-yet-released versions) + +## Unreleased or released versions + +* Versions with an _even_ `Z` component are unreleased versions (including + release candidates) + +* Versions with an _odd_ `Z` component are released versions + +* Except for the `release` branch, all branches must have an even `Z` component + +* Except for the `release`, `stable` and `rc/vX.Y` release candidate branches, + all branches will have a `0` `Z` component + +## Use of a fourth component + +* Release candidate binaries will be released with an odd `A` component + +* Hackage-only dependency compatibility patch releases add a `A` component + (e.g. `v2.5.1.1`, in the `release` branch) + +* Pre-release unstable binaries will be released with the date as the `A` + component (e.g. `3.6.0.20241228`) + +## Examples + +* `3.5.0.0`: `v3.5.*` series pre-release branch (`rc/v3.5` branch) + +* `3.5.0.1`: first release candidate for first release of `v3.5.*` series + (`rc/v3.5` branch) + +* `3.5.0.2`: continuing development on pre-release branch + +* `3.5.0.3`: second release candidate for first release of `v3.5.*` series + (`rc/v3.5` branch) + +* `3.5.1`: first release of the `3.5.*` series (`release` branch) + +* `3.5.2.1`: first release candidate for second release of `3.5.*` series + (`rc/v3.5` branch) + +* `3.5.3`: second release of `3.5.*` series (`release` branch) + +* `3.5.3.1`: first Hackage-only patch of `3.5.3` (`release` branch) + +* `3.5.3.2`: second Hackage-only patch of `3.5.3` (`release` branch) + +* `3.6.0`: unstable development code (`master` branch) + +* `3.6.0.20241228`: pre-release snapshot of unstable version (`master` branch) diff --git a/doc/nix_integration.md b/doc/nix_integration.md deleted file mode 100644 index 22c7c8c468..0000000000 --- a/doc/nix_integration.md +++ /dev/null @@ -1,268 +0,0 @@ -
- -# Nix integration - -(since 0.1.10.0) - -When using the Nix integration, Haskell dependencies are handled as usual: They -are downloaded from Stackage and built locally by Stack. Nix is used by Stack to -provide the _non-Haskell_ dependencies needed by these Haskell packages. - -`stack` can automatically create a build environment (the equivalent -of a "container" in Docker parlance) using `nix-shell`, provided Nix -is already installed on your system. To do so, please visit the -[Nix download page](http://nixos.org/nix/download.html). - -There are two ways to create a build environment: - -- providing a list of packages (by "attribute name") from - [Nixpkgs](http://nixos.org/nixos/packages.html), or -- providing a custom `shell.nix` file containing a Nix expression that - determines a *derivation*, i.e. a specification of what resources - are available inside the shell. - -The second requires writing code in Nix's custom language. So use this -option only if you already know Nix and have special requirements, -such as using custom Nix packages that override the standard ones or -using system libraries with special requirements. - -### Checking Nix installation - -Follow the instructions on the -[Nix download page](http://nixos.org/nix/download.html) to install Nix. After -doing so, when opening a terminal, the nix commands (`nix-build`, `nix-shell`, -etc) should be available. If they are not, it should be because the file -located at `$HOME/.nix-profile/etc/profile.d/nix.sh` is not sourced by your shell. - -You should either run `source ~/.nix-profile/etc/profile.d/nix.sh` manually -every time you open a terminal and need Nix or add this command to your -`~/.bashrc` or `~/.bash_profile`. - -### Additions to your `stack.yaml` - -Add a section to your `stack.yaml` as follows: -```yaml -nix: - enable: true - packages: [glpk, pcre] -``` - -This will instruct `stack` to build inside a local build environment -that will have the `glpk` and `pcre` libraries installed and -available. Further, the build environment will implicitly also include -a version of GHC matching the configured resolver. Enabling Nix -support means packages will always be built using a GHC available -inside the shell, rather than your globally installed one if any. - -Note that in this mode `stack` can use only GHC versions that have -already been mirrored into the Nix package repository. -The [Nixpkgs master branch](https://github.com/NixOS/nixpkgs/tree/master/pkgs/development/haskell-modules) -usually picks up new versions quickly, but it takes two or three -days before those updates arrive in the `unstable` channel. Release -channels, like `nixos-15.09`, receive those updates only -occasionally -- say, every two or three months --, so you should not -expect them to have the latest compiler available. Fresh NixOS installs -use a release version by default. - -To know for sure whether a given compiler is available on your system, -you can use the command - -```sh -$ nix-env -f "" -qaP -A haskell.compiler.ghc801 -haskell.compiler.ghc801 ghc-8.0.1 -``` - -to check whether it's available. If Nix doesn't know that resolver -yet, then you'll see the following error message instead: - -```sh -$ nix-env -f "" -qaP -A haskell.compiler.ghc999 -error: attribute ‘ghc999’ in selection path ‘haskell.compiler.ghc999’ not found -``` - -You can list all known Haskell compilers in Nix with the following: - -```sh -$ nix-instantiate --eval -E "with import {}; lib.attrNames haskell.compiler" -``` - -Alternatively, use `nix repl`, a convenient tool to explore -nixpkgs: - -```sh -$ nix repl -``` - -In the REPL, load nixpkgs and get the same information through -autocomplete: - -```sh -nix-repl> :l -nix-repl> haskell.compiler.ghc -``` - -You can type and evaluate any nix expression in the nix repl, such as -the one we gave to `nix-instantiate` earlier. - -**Note:** currently, stack only discovers dynamic and static libraries -in the `lib/` folder of any nix package, and likewise header files in -the `include/` folder. If you're dealing with a package that doesn't -follow this standard layout, you'll have to deal with that using -a custom shell file (see below). - -### Use stack as normal - -With Nix enabled, `stack build` and `stack exec` will automatically -launch themselves in a local build environment (using `nix-shell` -behind the scenes). - -`stack setup` will start a nix-shell, so it will gather all the required -packages, but given nix handles GHC installation, instead of stack, this will -happen when running `stack build` if no setup has been performed -before. Therefore it is not longer necessary to run `stack setup` unless you -want to cache a GHC installation before running the build. - -If `enable:` is omitted or set to `false`, you can still build in a nix-shell by -passing the `--nix` flag to stack, for instance `stack --nix build`. Passing -any `--nix*` option to the command line will do the same. - -**Known limitation on macOS:** currently, `stack --nix ghci` fails on -macOS, due to a bug in GHCi when working with external shared -libraries. - -### The Nix shell - -By default, stack will run the build in a *pure* Nix build environment (or -*shell*), which means two important things: - -- basically **no environment variable will be forwarded** from your user session - to the nix-shell (variables like `HTTP_PROXY` or `PATH` notably will not be - available), -- the build should fail if you haven't specified all the dependencies in the - `packages:` section of the `stack.yaml` file, even if these dependencies are - installed elsewhere on your system. This behaviour enforces a complete - description of the build environment to facilitate reproducibility. - -To override this behaviour, add `pure: false` to your `stack.yaml` or pass the -`--no-nix-pure` option to the command line. - -**Note:** On macOS shells are non-pure by default currently. This is -due soon to be resolved locale issues. So on macOS you'll need to be -a bit more careful to check that you really have listed all -dependencies. - -### Package sources - -By default, `nix-shell` will look for the nixpkgs package set located -by your `NIX_PATH` environment variable. - -You can override this by passing -`--nix-path="nixpkgs=/my/own/nixpkgs/clone"` to ask Nix to use your -own local checkout of the nixpkgs repository. You could in this way -use a bleeding edge nixpkgs, cloned from the -[nixpkgs](http://www.github.com/NixOS/nixpkgs) `master` branch, or -edit the nix descriptions of some packages. Setting - -```yml -nix: - path: [nixpkgs=/my/own/nixpkgs/clone] -``` - -in your `stack.yaml` will do the same. - -## Command-line options - -The configuration present in your `stack.yaml` can be overridden on the -command-line. See `stack --nix-help` for a list of all Nix options. - -## Configuration - -`stack.yaml` contains a `nix:` section with Nix settings. -Without this section, Nix will not be used. - -Here is a commented configuration file, showing the default values: - -```yaml -nix: - - # false by default. Must be present and set to `true` to enable Nix, except on - # NixOS where it is enabled by default (see #3938). You can set set it in your - # `$HOME/.stack/config.yaml` to enable Nix for all your projects without having - # to repeat it - # enable: true - - # true by default. Tells Nix whether to run in a pure shell or not. - pure: true - - # Empty by default. The list of packages you want to be - # available in the nix-shell at build time (with `stack - # build`) and run time (with `stack exec`). - packages: [] - - # Unset by default. You cannot set this option if `packages:` - # is already present and not empty. - shell-file: shell.nix - - # A list of strings, empty by default. Additional options that - # will be passed verbatim to the `nix-shell` command. - nix-shell-options: [] - - # A list of strings, empty by default, such as - # `[nixpkgs=/my/local/nixpkgs/clone]` that will be used to override - # NIX_PATH. - path: [] - - # false by default. Whether to add your nix dependencies as nix garbage - # collection roots. This way, calling nix-collect-garbage will not remove - # those packages from the nix store, saving you some time when running - # stack build again with nix support activated. - # This creates a `nix-gc-symlinks` directory in the project `.stack-work`. - # To revert that, just delete this `nix-gc-symlinks` directory. - add-gc-roots: false -``` - -## Using a custom shell.nix file - -Nix is also a programming language, and as specified -[here](#nix-integration) if you know it you can provide to the shell -a fully customized derivation as an environment to use. Here is the -equivalent of the configuration used in -[this section](#additions-to-your-stackyaml), but with an explicit -`shell.nix` file (make sure you're using a nixpkgs version later than -2015-03-05): - -```nix -{ghc}: -with (import {}); - -haskell.lib.buildStackProject { - inherit ghc; - name = "myEnv"; - buildInputs = [ glpk pcre ]; -} -``` - -Defining manually a `shell.nix` file gives you the possibility to override some -Nix derivations ("packages"), for instance to change some build options of the -libraries you use, or to set additional environment variables. See the -[Nix manual][nix-manual-exprs] for more. The `buildStackProject` utility -function is documented in the [Nixpkgs manual][nixpkgs-manual-haskell]. In such -case, stack expect this file to define a function of exactly one argument that -should be called `ghc` (as arguments within a set are non-positional), which you -should give to `buildStackProject`. This is the ghc from the resolver you set in -the `stack.yaml`. - -And now for the `stack.yaml` file: - -```yaml -nix: - enable: true - shell-file: shell.nix -``` - -The `stack build` command will behave exactly the same as above. Note -that specifying both `packages:` and a `shell-file:` results in an -error. (Comment one out before adding the other.) - -[nix-manual-exprs]: http://nixos.org/nix/manual/#chap-writing-nix-expressions -[nixpkgs-manual-haskell]: https://nixos.org/nixpkgs/manual/#users-guide-to-the-haskell-infrastructure diff --git a/doc/nonstandard_project_init.md b/doc/nonstandard_project_init.md deleted file mode 100644 index 475c149171..0000000000 --- a/doc/nonstandard_project_init.md +++ /dev/null @@ -1,132 +0,0 @@ -
- -# Non-standard project initialization - -## Introduction -The purpose of this page is to collect information about issues that arise when -users either have an existing cabal project or another nonstandard setup such -as a private hackage database. - -## Using a Cabal File - -New users may be confused by the fact that you must add -dependencies to the package's cabal file, even in the case when you have -already listed the package in the `stack.yaml`. In most cases, dependencies for -your package that are in the Stackage snapshot need *only* be added to the -cabal file. stack makes heavy use of Cabal the library under the hood. In -general, your stack packages should also end up being valid cabal-install -packages. - -### Issues Referenced - - - -## Passing Flags to Cabal - -Any build command, `bench`, `install`, `haddock`, `test`, etc. takes a `--flag` -option which passes flags to cabal. Another way to do this is using the flags -field in a `stack.yaml`, with the option to specify flags on a per package -basis. - -As an example, in a `stack.yaml` for multi-package project with packages `foo`, -`bar`, `baz`: - -``` -flags: - foo: - release: true - bar: - default: true - baz: - manual: true -``` - -It is also possible to pass the same flag to multiple packages, i.e. -`stack build --flag *:necessary` - -Currently one needs to list all of your modules that interpret flags in the -`other-modules` section of a cabal file. `cabal-install` has a different -behavior currently and doesn't require that the modules be listed. This may -change in a future release. - - -### Issues Referenced - - - - - - - - - - - - - -## Selecting a Resolver - -`stack init` or `stack new` will try to default to the current Haskell LTS -present on `https://www.stackage.org/snapshots` if no snapshot has been -previously used locally, and to the latest LTS snapshot locally used for a -build otherwise. Using an incorrect resolver can cause a build to fail if the -version of GHC it requires is not present. - -In order to override the resolver entry at project initialization one can pass -`--prefer-lts` or `--prefer-nightly`. These options will choose the latest LTS -or nightly versions locally used. Alternatively the `--resolver` option can be -used with the name of any snapshots on Stackage, or with `lts` or `nightly` to -select the latest versions, disregarding previously used ones. This is not the -default so as to avoid unnecessary recompilation time. - -:TODO: Document `--solver` - -### Issues Referenced - - - - - -## Using git Repositories - -Stack has support for packages that reside in remote git locations. Please see -the [YAML configuration -documentation](yaml_configuration.md#git-and-mercurial-repos) for more -information. - -### Issues Referenced - - - - - -## Private Hackage -Working with a private Hackage is currently supported in certain situations. -There exist special entries in `stack.yaml` that may help you. In a -`stack.yaml` file, it is possible to add lines for packages in your database -referencing the sdist locations via an `http` entry, or to use a `Hackage` -entry. - -The recommended stack workflow is to use git submodules instead of a private -Hackage. Either by using git submodules and listing the directories in the -packages section of `stack.yaml`, or by adding the private dependencies as git -URIs with a commit SHA to the `stack.yaml`. This has the large benefit of -eliminating the need to manage a Hackage database and pointless version bumps. - -For further information see [YAML configuration](yaml_configuration.md) - -### Issues Referenced - - - - - -## Custom Snapshots -See [Custom Snapshots](custom_snapshot.md). - -### Issues Referenced - - - - - - - -## Intra-package Targets -Stack supports intra-package targets, similar to `cabal build COMPONENTS` for -situations when you don't want to build every target inside your package. - -Example: -``` -stack build stack:lib:stack -stack test stack:test:stack-integration-test -``` - -Note: this does require prefixing the component name with the package name. - -### Issues referenced - - diff --git a/doc/other_resources.md b/doc/other_resources.md new file mode 100644 index 0000000000..31887f32b3 --- /dev/null +++ b/doc/other_resources.md @@ -0,0 +1,26 @@ +
+ +# Other resources + +There are lots of resources available for learning more about Stack: + +* `stack`, `stack --help` (or `-h`) — lists Stack's commands, and flags and + options common to those commands +* `stack --help` (or `-h`) — provides help on the particular Stack + command, including flags and options specific to the command +* `stack --version` — identify the version and Git hash of the Stack executable +* `--verbose` (or `-v`) — much more info about internal operations (useful for + bug reports) +* The [home page](http://haskellstack.org) +* The [Stack mailing list](https://groups.google.com/d/forum/haskell-stack) +* The [FAQ](faq.md) +* The [haskell-stack tag on Stack Overflow](http://stackoverflow.com/questions/tagged/haskell-stack) +* [Another getting started with Stack tutorial](http://seanhess.github.io/2015/08/04/practical-haskell-getting-started.html) +* [Why is Stack not Cabal?](https://www.fpcomplete.com/blog/2015/06/why-is-stack-not-cabal) + +Package description format specifications supported by Stack: + +* Cabal's: a + [Cabal file](https://cabal.readthedocs.io/en/stable/cabal-package-description-file.html) +* Hpack's: a + [`package.yaml` file](https://github.com/sol/hpack?tab=readme-ov-file#documentation) diff --git a/doc/pantry.md b/doc/pantry.md deleted file mode 100644 index 770e6253b7..0000000000 --- a/doc/pantry.md +++ /dev/null @@ -1,319 +0,0 @@ -
- -# Pantry in Stack - -Beginning with Stack 1.11, Stack uses the Pantry library for its -specification of snapshots and package locations. Under the surface, -Pantry is geared towards reproducible build plans with -cryptographically secure specification of packages and snapshots. - -There are three user-visible components to Pantry's configuration which affect usage of Stack: - -* Snapshot location specification (in the `resolver` field) -* Package location specification (in the `extra-deps` field and inside snapshots) -* Snapshot specification, for creating custom snapshots - -## Snapshot location - -There are essentially four different ways of specifying a snapshot -location: - -* Via a compiler version, which is a "compiler only" snapshot. This - could be, e.g., `resolver: ghc-8.6.5`. -* Via a URL pointing to a snapshot configuration file, e.g. `resolver: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/nightly/2018/8/21.yaml` -* Via a local file path pointing to a snapshot configuration file, e.g. `resolver: my-local-snapshot.yaml` -* Via a _convenience synonym_, which provides a short form for some - common URLs. These are: - * Github: `github:user/repo:path` is treated as `https://raw.githubusercontent.com/user/repo/master/path` - * LTS Haskell: `lts-X.Y` is treated by default as `github:commercialhaskell/stackage-snapshots:lts/X/Y.yaml` - * Stackage Nightly: `nightly-YYYY-MM-DD` is treated by default as `github:commercialhaskell/stackage-snapshots:nightly/YYYY/M/D.yaml` - -By default, LTS Haskell/Stackage Nightly snapshot configurations are retrieved from `commercialhaskell`'s GitHub repository. You can set a custom location in the [snapshot-location-base](yaml_configuration.md#snapshot-location-base) yaml configuration field. - -For safer, more reproducible builds, you can optionally specify a URL -together with a cryptographic hash of its content, e.g.: - -```yaml -resolver: - size: 499143 - url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/12/0.yaml - sha256: 781ea577595dff08b9c8794761ba1321020e3e1ec3297fb833fe951cce1bee11 -``` - -Where the `size` is the number of bytes in the file, and `sha256` is its SHA256 -hash. If not provided, the information will automatically be generated and -stored in a lock file. - -## Package location - -Pantry supports three types of package locations: - -* Hackage packages -* Repositories -* Archives - -All three of these formats support optional tree metadata to be added, -which can be used for reproducibility and faster downloads. This -information can automatically be generated in a lock file. - -### Hackage - -Packages can be stated by a name/version combination. The basic syntax -for this is: - -```yaml -extra-deps: -- acme-missiles-0.3 -``` - -Using this syntax, the most recent Cabal file revision available will -be used. For more reproducibility of builds, it is recommended to -state the SHA256 hash of the cabal file contents as well, like this: - -```yaml -extra-deps: -- acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1 -``` - -Or, better yet, including the cabal file size too: - -```yaml -extra-deps: -- acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1,631 -``` - -Or a specific revision number, with `0` being the original file: - -```yaml -extra-deps: -- acme-missiles-0.3@rev:0 -``` - -Note that specifying via SHA256 is slightly more resilient in that it -does not rely on correct ordering in the package index, while revision -number is likely simpler to use. In practice, both should guarantee -equally reproducible build plans. - -Finally, you can include the Pantry tree information. The following -would be generated and stored in the lock file: - -```yaml -- hackage: acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1,613 - pantry-tree: - size: 226 - sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 -``` - -### Git and Mercurial repos - -You can give a Git or Mercurial repo at a specific commit, and Stack -will clone that repo. - -```yaml -extra-deps: -- git: git@github.com:commercialhaskell/stack.git - commit: 6a86ee32e5b869a877151f74064572225e1a0398 -- git: git@github.com:snoyberg/http-client.git - commit: "a5f4f3" -- hg: https://example.com/hg/repo - commit: da39a3ee5e6b4b0d3255bfef95601890afd80709 -``` - -__NOTE__ It is highly recommended that you only use SHA1 values for a -Git or Mercurial commit. Other values may work, but they are not -officially supported, and may result in unexpected behavior (namely, -Stack will not automatically pull to update to new versions). -Another problem with this is that your build will not be deterministic, -because when someone else tries to build the project they can get a -different checkout of the package. - -A common practice in the Haskell world is to use "megarepos", or -repositories with multiple packages in various subdirectories. Some -common examples include [wai](https://github.com/yesodweb/wai/) and -[digestive-functors](https://github.com/jaspervdj/digestive-functors). To -support this, you may also specify `subdirs` for repositories, e.g.: - -```yaml -extra-deps: -- git: git@github.com:yesodweb/wai - commit: 2f8a8e1b771829f4a8a77c0111352ce45a14c30f - subdirs: - - auto-update - - wai -``` - -Since v1.7.1, you can specify packages from GitHub repository name using `github`: - -```yaml -extra-deps: -- github: snoyberg/http-client - commit: a5f4f30f01366738f913968163d856366d7e0342 -``` - -If unspecified, `subdirs` defaults to `['.']` meaning looking for a -package in the root of the repo. Note that if you specify a value of -`subdirs`, then `'.'` is _not_ included by default and needs to be -explicitly specified if a required package is found in the top-level -directory of the repository. - -#### Limited [git-annex](https://git-annex.branchable.com) support - -Pantry does not support [git-annex](https://git-annex.branchable.com). This is -because `git archive` does not handle symbolic links outside the work tree. It -is still possible to use repositories which use git-annex but do not require the -annex files for the package to be built. - -To do so, ensure that any files or directories stored by git-annex are marked -[export-ignore](https://git-scm.com/docs/git-archive#Documentation/git-archive.txt-export-ignore) -in the `.gitattributes` file in the repository. See -[#4579](https://github.com/commercialhaskell/stack/issues/4579) for more -information. - -For example, if the directory `fonts/` is controlled by git-annex, use the -following line. - -```gitattributes -fonts export-ignore -``` - -### Archives (HTTP(S) or local filepath) - -You can use HTTP and HTTPS URLs and local filepaths referring to -either tarballs or ZIP files. - -__NOTE__ Stack assumes that these files never change after downloading -to avoid needing to make an HTTP request on each build. Use hashes to -provide more security. - -```yaml -extra-deps: -- https://example.com/foo/bar/baz-0.0.2.tar.gz -- archive: http://github.com/yesodweb/wai/archive/2f8a8e1b771829f4a8a77c0111352ce45a14c30f.zip - subdirs: - - wai - - warp -- archive: ../acme-missiles-0.3.tar.gz - sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b -``` - -## Snapshots - -_NOTE_ Stack has supported custom snapshots properly since version -1.6. In version 1.11, the support for snapshots was moved to Pantry, -and Stackage snapshots have moved over to using the same -format. Therefore, there is no longer such a thing as "custom -snapshots," there are simply "snapshots." Pantry snapshots follow the -same format as Stack 1.6 "custom snapshots." - -Snapshots provide a list of packages to use, along with flags, -ghc-options, and a few other settings. Snapshots may extend any other -snapshot that can be specified in a `resolver` field. The packages -specified follow the same syntax mentioned above for -dependencies. Unlike `extra-deps`, however, no support for local -directories is available in snapshots to ensure reproducibility. - -```yaml -resolver: lts-8.21 # Inherits GHC version and package set -compiler: ghc-8.0.1 # Overwrites GHC version in the resolver, optional - -# Additional packages, follows extra-deps syntax -packages: -- unordered-containers-0.2.7.1 -- hashable-1.2.4.0 -- text-1.2.2.1 - -# Override flags, can also override flags in the parent snapshot -flags: - unordered-containers: - debug: true - -# Packages from the parent snapshot to ignore -drop-packages: -- wai-extra - -# Packages which should be hidden (affects script command's import -# parser -hidden: - wai: true - warp: false - -# Set GHC options for specific packages -ghc-options: - warp: - - -O2 -``` - -If you put this in a `snapshot.yaml` file in the same directory as your project, -you can now use the custom snapshot like this: - -```yaml -resolver: snapshot.yaml -``` - -This is an example of a custom snapshot stored in the filesystem. They are -assumed to be mutable, so you are free to modify it. We detect that the snapshot -has changed by hashing the contents of the involved files, and using it to -identify the snapshot internally. It is often reasonably efficient to modify a -custom snapshot, due to stack sharing snapshot packages whenever possible. - -### Overriding the compiler - -The following snapshot specification will be identical to `lts-7.1`, but instead -use `ghc-7.10.3` instead of `ghc-8.0.1`: - -```yaml -resolver: lts-7.1 -compiler: ghc-7.10.3 -``` - -### Dropping packages - -The following snapshot specification will be identical to `lts-7.1`, but without -the `text` package in our snapshot. Removing this package will cause all the -packages that depend on `text` to be unbuildable, but they will still be present -in the snapshot. - -```yaml -resolver: lts-7.1 -drop-packages: - - text -``` - -### Specifying ghc-options - -In order to specify ghc-options for a package, you use the same syntax as the -[ghc-options](yaml_configuration.md#ghc-options) field for build configuration. -The following snapshot specification will be identical to `lts-7.1`, but -provides `-O1` as a ghc-option for `text`: - -```yaml -resolver: lts-7.1 -packages: - - text-1.2.2.1 -ghc-options: - text: -O1 -``` - -This works somewhat differently than the stack.yaml `ghc-options` field, in that -options can only be specified for packages that are mentioned in the custom -snapshot's `packages` list. It sets the ghc-options, rather than extending those -specified in the snapshot being extended. - -Another difference is that the `*` entry for `ghc-options` applies to all -packages in the `packages` list, rather than all packages in the snapshot. - -### Specifying flags - -In order to specify flags for a package, you use the same syntax as the -[flags](yaml_configuration.md#flags) field for build configuration. The -following snapshot specification will be identical to `lts-7.1`, but -it enables the `developer` cabal flag: - -```yaml -resolver: lts-7.1 -packages: - - text-1.2.2.1 -flags: - text: - developer: true -``` diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 0000000000..b904b68ee2 --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,20 @@ +# pip requirements file for MkDocs, used by Read The Docs +# +# See: +# +# The Read The Docs recommendation to 'pin' the MkDocs version used to build +# documentation: +# https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html#pinning-dependencies +# +# The pip guide to its requirements file format: +# https://pip.pypa.io/en/stable/reference/requirements-file-format/ +# +# Current version as at 20 July 2024 +mkdocs==1.6.0 +# Added as the structure of the online documentation was reorganised around the +# time of the release of Stack 3.1.1. See: +# +# https://github.com/mkdocs/catalog?tab=readme-ov-file#-links--references +# https://github.com/mkdocs/mkdocs-redirects +mkdocs-redirects +mkdocs-material[imaging] diff --git a/doc/resources/appveyor.yml b/doc/resources/appveyor.yml new file mode 100644 index 0000000000..445d734606 --- /dev/null +++ b/doc/resources/appveyor.yml @@ -0,0 +1,32 @@ +build: false + +before_test: +# http://help.appveyor.com/discussions/problems/6312-curl-command-not-found +- set PATH=C:\Program Files\Git\mingw64\bin;%PATH% + +- curl -sS -ostack.zip -L --insecure https://get.haskellstack.org/stable/windows-x86_64.zip +- 7z x stack.zip stack.exe + +clone_folder: "c:\\stack" +environment: + global: + STACK_ROOT: "c:\\sr" + + # Override the temp directory to avoid sed escaping issues + # See https://github.com/haskell/cabal/issues/5386 + TMP: "c:\\tmp" + + matrix: + - ARGS: "" + - ARGS: "--resolver lts-14" + - ARGS: "--resolver lts-16" + - ARGS: "--resolver lts-17" + +test_script: + +# Install toolchain, but do it silently due to lots of output +- stack %ARGS% setup > nul + +# The ugly echo "" hack is to avoid complaints about 0 being an invalid file +# descriptor +- echo "" | stack %ARGS% --no-terminal test diff --git a/doc/resources/stack-and-stackage-avatar.svg b/doc/resources/stack-and-stackage-avatar.svg new file mode 100644 index 0000000000..7dd3165eb9 --- /dev/null +++ b/doc/resources/stack-and-stackage-avatar.svg @@ -0,0 +1,56 @@ + + + + diff --git a/doc/resources/travis-complex.yml b/doc/resources/travis-complex.yml new file mode 100644 index 0000000000..5b52ea082a --- /dev/null +++ b/doc/resources/travis-complex.yml @@ -0,0 +1,196 @@ +# This is the complex Travis configuration, which is intended for use +# on open source libraries which need compatibility across multiple GHC +# versions, must work with cabal-install, and should be +# cross-platform. For more information and other options, see: +# +# https://docs.haskellstack.org/en/stable/travis_ci/ +# +# Copy these contents into the root directory of your GitHub project in a file +# named .travis.yml + +# Run jobs on Linux unless "os" is specified explicitly. +os: linux + +# Do not choose a language; we provide our own build tools. +language: generic + +# Caching so the next build will be fast too. +cache: + directories: + - $HOME/.ghc + - $HOME/.cabal + - $HOME/.stack + - $TRAVIS_BUILD_DIR/.stack-work + +# The different configurations we want to test. We have BUILD=cabal which uses +# cabal-install, and BUILD=stack which uses Stack. More documentation on each +# of those below. +# +# We set the compiler values here to tell Travis to use a different +# cache file per set of arguments. +# +# If you need to have different apt packages for each combination in the +# job matrix, you can use a line such as: +# addons: {apt: {packages: [libfcgi-dev,libgmp-dev]}} +jobs: + include: + # We grab the appropriate GHC and cabal-install versions from hvr's PPA. See: + # https://github.com/hvr/multi-ghc-travis + - env: BUILD=cabal GHCVER=8.6.5 CABALVER=2.4 HAPPYVER=1.19.5 ALEXVER=3.1.7 + compiler: ": #GHC 8.6.5" + addons: {apt: {packages: [cabal-install-2.4, ghc-8.6.5, happy-1.19.5, alex-3.1.7], sources: [hvr-ghc]}} + - env: BUILD=cabal GHCVER=8.8.4 CABALVER=3.0 HAPPYVER=1.19.12 ALEXVER=3.2.6 + compiler: ": #GHC 8.8.4" + addons: {apt: {packages: [cabal-install-3.0, ghc-8.8.4, happy-1.19.12, alex-3.2.6], sources: [hvr-ghc]}} + - env: BUILD=cabal GHCVER=8.10.4 CABALVER=3.2 HAPPYVER=1.20.0 ALEXVER=3.2.6 + compiler: ": #GHC 8.10.4" + addons: {apt: {packages: [cabal-install-3.2, ghc-8.10.4, happy-1.20.0, alex-3.2.6], sources: [hvr-ghc]}} + + # Build with the newest GHC and cabal-install. This is an accepted failure, + # see below. + - env: BUILD=cabal GHCVER=head CABALVER=head HAPPYVER=1.19.5 ALEXVER=3.1.7 + compiler: ": #GHC HEAD" + addons: {apt: {packages: [cabal-install-head, ghc-head, happy-1.19.5, alex-3.1.7], sources: [hvr-ghc]}} + + # The Stack builds. We can pass in arbitrary Stack arguments via the ARGS + # variable, such as using --stack-yaml to point to a different file. + - env: BUILD=stack ARGS="" + compiler: ": #stack default" + addons: {apt: {packages: [libgmp-dev]}} + + - env: BUILD=stack ARGS="--resolver lts-14" + compiler: ": #stack 8.6.5" + addons: {apt: {packages: [libgmp-dev]}} + + - env: BUILD=stack ARGS="--resolver lts-16" + compiler: ": #stack 8.8.4" + addons: {apt: {packages: [libgmp-dev]}} + + - env: BUILD=stack ARGS="--resolver lts-17" + compiler: ": #stack 8.10.4" + addons: {apt: {packages: [libgmp-dev]}} + + # Nightly builds are allowed to fail + - env: BUILD=stack ARGS="--resolver nightly" + compiler: ": #stack nightly" + addons: {apt: {packages: [libgmp-dev]}} + + # Build on macOS in addition to Linux + - env: BUILD=stack ARGS="" + compiler: ": #stack default osx" + os: osx + + # Travis includes an macOS which is incompatible with GHC 7.8.4 + + - env: BUILD=stack ARGS="--resolver lts-14" + compiler: ": #stack 8.6.5 osx" + os: osx + + - env: BUILD=stack ARGS="--resolver lts-16" + compiler: ": #stack 8.8.4 osx" + os: osx + + - env: BUILD=stack ARGS="--resolver lts-17" + compiler: ": #stack 8.10.4 osx" + os: osx + + - env: BUILD=stack ARGS="--resolver nightly" + compiler: ": #stack nightly osx" + os: osx + + allow_failures: + - env: BUILD=cabal GHCVER=head CABALVER=head HAPPYVER=1.19.5 ALEXVER=3.1.7 + - env: BUILD=stack ARGS="--resolver nightly" + +before_install: +# Using compiler above sets CC to an invalid value, so unset it +- unset CC + +# We want to always allow newer versions of packages when building on GHC HEAD +- CABALARGS="" +- if [ "x$GHCVER" = "xhead" ]; then CABALARGS=--allow-newer; fi + +# Download and unpack the stack executable +- export PATH=/opt/ghc/$GHCVER/bin:/opt/cabal/$CABALVER/bin:$HOME/.local/bin:/opt/alex/$ALEXVER/bin:/opt/happy/$HAPPYVER/bin:$HOME/.cabal/bin:$PATH +- mkdir -p ~/.local/bin +- | + if [ `uname` = "Darwin" ] + then + travis_retry curl --insecure -L https://get.haskellstack.org/stable/osx-x86_64.tar.gz | tar xz --strip-components=1 --include '*/stack' -C ~/.local/bin + else + travis_retry curl -L https://get.haskellstack.org/stable/linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack' + fi + + # Use the more reliable S3 mirror of Hackage + mkdir -p $HOME/.cabal + echo 'remote-repo: hackage.haskell.org:http://hackage.fpcomplete.com/' > $HOME/.cabal/config + echo 'remote-repo-cache: $HOME/.cabal/packages' >> $HOME/.cabal/config + + +install: +- echo "$(ghc --version) [$(ghc --print-project-git-commit-id 2> /dev/null || echo '?')]" +- if [ -f configure.ac ]; then autoreconf -i; fi +- | + set -ex + case "$BUILD" in + stack) + # Add in extra-deps for older snapshots, as necessary + # + # This is disabled by default, as relying on the solver like this can + # make builds unreliable. Instead, if you have this situation, it's + # recommended that you maintain multiple stack-lts-X.yaml files. + + #stack --no-terminal --install-ghc $ARGS test --bench --dry-run || ( \ + # stack --no-terminal $ARGS build cabal-install && \ + # stack --no-terminal $ARGS solver --update-config) + + # Build the dependencies + stack --no-terminal --install-ghc $ARGS test --bench --only-dependencies + ;; + cabal) + cabal --version + travis_retry cabal update + + # Get the list of packages from the stack.yaml file. Note that + # this will also implicitly run hpack as necessary to generate + # the .cabal files needed by cabal-install. + PACKAGES=$(stack --install-ghc query locals | grep '^ *path' | sed 's@^ *path:@@') + + cabal install --only-dependencies --enable-tests --enable-benchmarks --force-reinstalls --ghc-options=-O0 --reorder-goals --max-backjumps=-1 $CABALARGS $PACKAGES + ;; + esac + set +ex + +script: +- | + set -ex + case "$BUILD" in + stack) + stack --no-terminal $ARGS test --bench --no-run-benchmarks --haddock --no-haddock-deps + ;; + cabal) + cabal install --enable-tests --enable-benchmarks --force-reinstalls --ghc-options=-O0 --reorder-goals --max-backjumps=-1 $CABALARGS $PACKAGES + + ORIGDIR=$(pwd) + for dir in $PACKAGES + do + cd $dir + cabal check || [ "$CABALVER" == "1.16" ] + cabal sdist + PKGVER=$(cabal info . | awk '{print $2;exit}') + SRC_TGZ=$PKGVER.tar.gz + cd dist + tar zxfv "$SRC_TGZ" + cd "$PKGVER" + cabal configure --enable-tests --ghc-options -O0 + cabal build + if [ "$CABALVER" = "1.16" ] || [ "$CABALVER" = "1.18" ]; then + cabal test + else + cabal test --show-details=streaming --log=/dev/stdout + fi + cd $ORIGDIR + done + ;; + esac + set +ex diff --git a/doc/travis-simple.yml b/doc/resources/travis-simple.yml similarity index 92% rename from doc/travis-simple.yml rename to doc/resources/travis-simple.yml index d6fa9bdc35..612299fbb2 100644 --- a/doc/travis-simple.yml +++ b/doc/resources/travis-simple.yml @@ -5,7 +5,7 @@ # # https://docs.haskellstack.org/en/stable/travis_ci/ # -# Copy these contents into the root directory of your Github project in a file +# Copy these contents into the root directory of your GitHub project in a file # named .travis.yml # Choose a build environment @@ -23,7 +23,7 @@ cache: addons: apt: packages: - - libgmp-dev + - libgmp-dev before_install: # Download and unpack the stack executable diff --git a/doc/shell_autocompletion.md b/doc/shell_autocompletion.md deleted file mode 100644 index fce764cd51..0000000000 --- a/doc/shell_autocompletion.md +++ /dev/null @@ -1,48 +0,0 @@ -
- -# Shell Auto-completion - -Note: if you installed a package for you Linux distribution, the bash -completion file was automatically installed (you may need the `bash-completion` -package to have it take effect). - -The following adds support for shell tab completion for standard Stack -arguments, although completion for filenames and executables etc. within stack -is still lacking (see [issue -823](https://github.com/commercialhaskell/stack/issues/832)). - -## for bash users - -you need to run following command -``` -eval "$(stack --bash-completion-script stack)" -``` -You can also add it to your `.bashrc` file if you want. - -## for ZSH users - -documentation says: -> Zsh can handle bash completions functions. The latest development version of -> zsh has a function bashcompinit, that when run will allow zsh to read bash -> completion specifications and functions. This is documented in the zshcompsys -> man page. To use it all **you need to do is run bashcompinit at any time -> after compinit**. It will define complete and compgen functions corresponding -> to the bash builtins. - -You must so: - 1. launch compinint - 2. launch bashcompinit - 3. eval stack bash completion script - -```shell -autoload -U +X compinit && compinit -autoload -U +X bashcompinit && bashcompinit -eval "$(stack --bash-completion-script stack)" -``` - -:information_source: If you already have quite a large zshrc, or if you use -oh-my-zsh, **compinit** will probably already be loaded. If you have a blank -zsh config, all of the 3 lines above are necessary. - -:gem: tip: instead of running those 3 lines from your shell every time you want -to use stack, you can add those 3 lines in your $HOME/.zshrc file diff --git a/doc/stack_yaml_vs_cabal_package_file.md b/doc/stack_yaml_vs_cabal_package_file.md deleted file mode 100644 index e206721fe7..0000000000 --- a/doc/stack_yaml_vs_cabal_package_file.md +++ /dev/null @@ -1,140 +0,0 @@ -
- -# stack.yaml vs cabal package file - -Due to their apparent overlap, the purpose of the following three files can be -unclear: - -* `stack.yaml` -* A cabal package file, e.g. `my-package.cabal` -* `package.yaml` - -The last two are easy to explain: `package.yaml` is a file format supported by -[hpack](https://github.com/sol/hpack#readme). It adds some niceties on top of -cabal. For example, hpack has YAML syntax support and will automatically -generate of `exposed-modules` lists. However, it's just a frontend to cabal -package files. So for this document, we're instead going to focus on the first -two and try to answer: - -_What's the difference between a `stack.yaml` file and a cabal package file?_ - -## Package versus project - -Cabal is a build system, which is used by Stack. Cabal defines the concept of a -_package_. A package has: - -* A name and version -* 0 or 1 libraries -* 0 or more executables -* A cabal file (or, as mentioned above, an hpack `package.yaml` that - generates a cabal file) -* And a bunch more - -The second to last bullet bears repeating: there's a 1-to-1 correspondence between -packages and cabal files. - -Stack is a build tool that works on top of the Cabal build system, and defines -a new concept called a _project_. A project has: - -* A _resolver_, which tells it about a snapshot (more on this later) -* Extra dependencies on top of the snapshot -* 0 or more local Cabal packages -* Flag and GHC options configurations -* And a bunch more Stack configuration - -A source of confusion is that, often, you'll have a project that defines -exactly one package you're working on, and in that situation it's unclear why, -for example, you need to specify an extra depedency in both your `stack.yaml` -_and_ cabal file. To explain, let's take a quick detour to talk about snapshots -and how Stack resolves dependencies. - -## Resolvers and snapshots - -Stack follows a rule that says, for any projects, there is precisely 1 version -of each package available. Obviously there are _many_ versions of many -different packages available in the world. But when resolving a `stack.yaml` -file, Stack requires that you have chosen a specific version for each package -available. - -The most common means by which this set of packages is defined is via a -Stackage Snapshot. For example, if you go to the page -, you will see a list of 2,666 packages at -specific version numbers. When you then specify `resolver: lts-10.2`, you're -telling Stack to use those package versions in resolving dependencies down to -concrete version numbers. - -Sometimes a snapshot doesn't have all of the packages you want. Or you want a -different version. Or you want to work on a local modification of a package. In -all of those cases, you can add more configuration data to your `stack.yaml` to -override the values it received from your `resolver` setting. At the end of the -day, each of your projects will end up with some way of resolving a package -name into a concrete version number. - -## Why specify deps twice? - -When you add something like this to your `stack.yaml` file: - -```yaml -extra-deps: -- acme-missiles-0.3 -``` - -What you're saying to Stack is: if at any point you find that you need to build -the `acme-missiles` package, please use version `0.3`. You are _not_ saying -"please build `acme-missiles` now." You are also not saying "my package depends -on `acme-missiles`." You are simply making it available should the need arise. - -When you add `build-depends: acme-missiles` to your cabal file or -`dependencies: [acme-missiles]` to your `package.yaml` file, you're saying -"this package requires that `acme-missiles` be available." Since -`acme-missiles` doesn't appear in your snapshot, without also modifying your -`stack.yaml` to mention it via `extra-deps`, Stack will complain about the -dependency being unavailable. - -You may challenge: but why go through all of that annoyance? Stack knows what -package I want, why not just go grab it? The answer is that, if Stack just -grabbed `acme-missiles` for you without it being specified in the `stack.yaml` -somehow, you'd lose reproducibility. How would Stack know which version to use? -It may elect to use the newest version, but if a new version is available in -the future, will it automatically switch to that? - -Stack's baseline philosophy is that build plans are always reproducible\*. The -purpose of the `stack.yaml` file is to define an immutable set of packages. No -matter when in time you use it, and no matter how many new release happen in -the interim, the build plan generated should be the same. - -\* There's at least one hole in this theory today, which is Hackage revisions. -When you specify `extra-deps: [acme-missiles-0.3]`, it doesn't specify which -revision of the cabal file to use, and Stack will just choose the latest. Stack -version 1.6 added the ability to specify exact revisions of cabal files, but -this isn't enforced as a requirement as it's so different from the way most -people work with packages. - -And now, how about the other side: why doesn't Stack automatically add -`acme-missiles` to `build-depends` in your cabal file if you add it as an -extra-dep? There are a surprising number reasons actually: - -* The cabal spec doesn't support anything like that -* There can be multiple packages in a project, and how do we know which package - actually needs the dependency? -* There can be multiple components (libraries, executable, etc) in a package, - and how do we know which of those actually needs the dependency? -* The dependency may only be conditionally needed, based on flags, OS, or - architecture. As an extreme example, we wouldn't want a Linux-only package to - be force-built on Windows. - -While for simple use cases it seems like automatically adding dependencies from -the cabal file to the `stack.yaml` file or vice-versa would be a good thing, it -breaks down immediately for any semi-difficult case. Therefore, Stack requires -you to add it to both places. - -And a final note, in case it wasn't clear. The example I gave above used -`acme-missiles`, which is not in Stackage snapshots. If, however, you want to -depend on a package already present in the snapshot you've selected, there's no -need to add it explicitly to your `stack.yaml` file: it's already there -implicitly via the `resolver` setting. This is what you do the majority of the -time, such as when you add `vector` or `mtl` as a `build-depends` value. - -## Should I check in generated cabal files? - -Yes, you should. This recommendation was changed in [issue #5210](https://github.com/commercialhaskell/stack/issues/5210), please see the discussion there. diff --git a/doc/topics/CI.md b/doc/topics/CI.md new file mode 100644 index 0000000000..5ab088ee64 --- /dev/null +++ b/doc/topics/CI.md @@ -0,0 +1,17 @@ +
+ +# Continuous integration (CI) + +## GitHub Actions + +The Stack repository uses GitHub Actions for its own CI. For further +information, see the guide to +[contributing](../CONTRIBUTING.md#continuous-integration-ci). + +## Azure + +For further information, see the [Azure CI](azure_ci.md) documentation. + +## Travis + +For further information, see the [Travis CI](travis_ci.md) documentation. diff --git a/doc/topics/GHC_from_source.md b/doc/topics/GHC_from_source.md new file mode 100644 index 0000000000..2c66e07f81 --- /dev/null +++ b/doc/topics/GHC_from_source.md @@ -0,0 +1,237 @@ +
+ +# Building GHC from source + +:octicons-beaker-24: Experimental + +[:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) + +Stack supports building the GHC compiler from source, using +[Hadrian](https://gitlab.haskell.org/ghc/ghc/blob/master/hadrian/README.md) (the +build system for GHC). The GHC version to be built and used is defined by a +Git commit ID and a Hadrian "flavour", with the following syntax in a YAML +configuration file: + +~~~yaml +compiler: ghc-git-- +~~~ + +In the following example the commit ID is "5be7ad..." and the flavour is +"quick": + +~~~yaml +compiler: ghc-git-5be7ad7861c8d39f60b7101fd8d8e816ff50353a-quick +~~~ + +The [`-j`, `--jobs` option](../configure/global_flags.md#-jobs-or-j-option) at +the command line or the [`jobs`](../configure/yaml/non-project.md#jobs) option +in a YAML configuraton file can be used to specify Hadrian's `-j[]` flag. + +By default, the code is retrieved from the main GHC repository. If you want to +select another repository, use the `compiler-repository` option in a YAML +configuration file: + +~~~yaml +compiler-repository: git://my/ghc/repository +# default +# compiler-repository: https://gitlab.haskell.org/ghc/ghc.git +~~~ + +By default, the Hadrian build target is `reloc-binary-dist` on Windows and +`binary-dist` on other operating systems. If you want to specify another +Hadrian build target, use the `compiler-target` option in a YAML configuration +file: + +~~~yaml +compiler-target: binary-dist +# default (Windows) +# compiler-target: reloc-binary-dist +# default (non-Windows) +# compiler-target: binary-dist +~~~ + +By default, Stack assumes that the path to the binary distribution built by +Hadrian is `_build/reloc-bindist` on Windows and `_build/bindist` on other +operating systems. If you want to specify another path, use the +`compiler-bindist-path` option in a YAML configuration file: + +~~~yaml +compiler-bindist-path: _build/bindist +# default (Windows) +# compiler-bindist-path: _build/reloc-bindist +# default (non-Windows) +# compiler-bindist-path: _build/bindist +~~~ + +!!! note + + The Hadrian build target `reloc-binary-dist` was introduced with Git commit + id + [`fe23629b147d419053052e6e881f6e8ddfbf3bae`](https://gitlab.haskell.org/ghc/ghc/-/commit/fe23629b147d419053052e6e881f6e8ddfbf3bae). + + Once introduced, the target must be used on Windows. + +Stack does not check the compiler version when it uses a compiler built from +source. It is assumed that the built compiler is recent enough as Stack does not +enable any known workaround to make older compilers work. + +Building the compiler can take a very long time (more than one hour). For faster +build times, use Hadrian flavours that disable documentation generation. + +!!! note + + The building of the compiler can require the creation of symbolic links + (symlinks). On Windows, symlinks can only be created by processes with + Administrator privileges unless Windows' Developer Mode has been set. + +### Bootstrap compiler + +Building GHC from source requires a working GHC (known as the bootstrap +compiler). As we use a Stack based version of Hadrian (`hadrian/build-stack` in +GHC sources), the bootstrap compiler is configured into `hadrian/stack.yaml` and +fully managed by Stack. + +!!! note + + For some commit IDs, the snapshot specified in `hadrian/stack.yaml` + specifies a version of GHC that cannot be used to build GHC. This results in + GHC's `configure` script reporting messages similar to the following before + aborting: + + ~~~text + checking version of ghc... 9.0.2 + configure: error: GHC version 9.2 or later is required to compile GHC. + ~~~ + + The resolution is: + + 1. to specify an alternative snapshot (one that specifies a sufficiently + recent version of GHC) on the command line, using Stack's option + `--snapshot `. Stack will use that snapshot when running GHC's + `configure` script; and + + 2. to set the contents of the `STACK` environment variable to be + `stack --snapshot `. If `` is a path to a local YAML + file, it needs to be an absolute one. Hadrian's `build-stack` script + will refer to that environment variable for the Stack command it uses. + +### Hadrian prerequisites + +The Hadrian build system has certain +[prerequisites](https://gitlab.haskell.org/ghc/ghc/-/wikis/building/preparation). +It requires certain versions of the `happy` and `alex` executables on the PATH. +Stack will build and install `happy` and `alex`, if not already on the PATH. + +!!! note + + If `happy` and/or `alex` is already on the PATH, it may not be a version + that Hadrian supports. In that case, you may wish to remove that version + from the PATH and ensure that the package version specified by + `hadrian/stack.yaml` or, if applicable, via the contents of the `STACK` environment variable is an appropriate one. + +!!! note + + `alex-3.5.0.0` removed `-v` as an alias for `--version` and added `-v` as an + alias for `--verbose` (a flag that is not implemented as of `alex-3.5.4.0`). + + Hadrian may expect `-v` to be an alias for `--version`. + +=== "macOS" + + Hadrian requires, or case use, certain tools or Python packages that do not + come with macOS by default and that need to be installed using `brew` or + `pip3` (Python). Hadrian's LaTeX documentation also requires the + [DejaVu fonts](https://dejavu-fonts.github.io/) to be installed. + + ~~~zsh + brew install python@3.11 + # GHC uses a Python script named `boot`. + brew install automake + # Tool for generating GNU Standards-compliant Makefiles. + brew install texinfo + # Official documentation format of the GNU project. + pip3 install -U sphinx + # Sphinx is the Python documentation generator. + brew install --cask mactex + # MacTeX: Full TeX Live distribution with GUI applications + ~~~ + +=== "Windows" + + Hadrian requires, or can use, certain MSYS2 or Python packages that do not + come with the Stack-supplied MSYS2 by default and need to be installed + using `pacman` (MSYS2) or `pip` (Python). Hadrian's LaTeX documentation also + requires the [DejaVu fonts](https://dejavu-fonts.github.io/) to be + installed. + + ~~~pwsh + stack exec -- pacman --sync --refresh + # Synchronize MSYS2 package databases + stack exec -- pacman --sync mingw-w64-x86_64-python-pip + # The PyPA recommended tool (pip) for installing Python packages. Also + # installs Python as a dependency. GHC uses a Python script named `boot`. + # The package must be the one from the `mingw64` MSYS2 repository, as Python + # from the `msys` repository cannot interpret Windows file paths correctly. + stack exec -- pacman --sync mingw-w64-x86_64-autotools + # The GNU autotools build system, including `autoreconf`, `aclocal` + # and `make`. GHC uses a sh script named `configure` which is itself created + # from a file named `configure.ac`. + stack exec -- pacman --sync patch + # A utility to apply patch files to original sources. + stack exec -- pacman --sync texinfo + # Utilities to work with and produce manuals, ASCII text, and on-line + # documentation from a single source file, including `makeinfo`. + stack exec -- pacman --sync mingw-w64-x86_64-ca-certificates + # Common CA (certificate authority) certificates. + stack exec -- pacman -sync mingw-w64-x86_64-python-sphinx + # Sphinx is the Python documentation generator. + stack exec -- pacman -sync mingw-w64-x86_64-texlive-full + # The TeX Live distribution. + ~~~ + + Hadrian may require certain LaTeX packages and may prompt for these to be + installed duing the build process. + + !!! note + + Before commit + [cdddeb0f1280b40cc194028bbaef36e127175c4c](https://gitlab.haskell.org/ghc/ghc/-/commit/cdddeb0f1280b40cc194028bbaef36e127175c4c) + the GHC project did not support `autoconf >= 2.72`. + + MSYS2 can be + [configured](https://www.msys2.org/docs/autotools/#autoconf-wrapper) to + use an earlier version of `autoconf` than the latest version. + +### Global packages + +The GHC compiler you build from sources may depend on unreleased versions of +some global packages (e.g. Cabal). It may be an issue if a package you try to +build with this compiler depends on such global packages because Stack may not +be able to find versions of those packages (on Hackage, etc.) that are +compatible with the compiler. + +The easiest way to deal with this issue is to use the +[`drop-packages`](../configure/yaml/project.md#drop-packages) +project-specific configuration option to drop the offending packages as follows. +Instead of using the packages specified in the snapshot, the global packages +bundled with GHC will be used. + +~~~yaml +drop-packages: +- Cabal +- ... +~~~ + +Another way to deal with this issue is to add the relevant packages as +[`extra-deps`](../configure/yaml/project.md#extra-deps) built from source. To +avoid mismatching versions, you can use exactly the same commit id you used to +build GHC as follows: + +~~~ +extra-deps: +- git: https://gitlab.haskell.org/ghc/ghc.git + commit: '5be7ad7861c8d39f60b7101fd8d8e816ff50353a' + subdirs: + - libraries/Cabal/Cabal + - libraries/... +~~~ diff --git a/doc/topics/Stack_and_VS_Code.md b/doc/topics/Stack_and_VS_Code.md new file mode 100644 index 0000000000..259138b0e6 --- /dev/null +++ b/doc/topics/Stack_and_VS_Code.md @@ -0,0 +1,180 @@ +
+ +# Stack and Visual Studio Code + +[Visual Studio Code](https://code.visualstudio.com/) (VS Code) is a popular +source code editor, and +['Haskell'](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) +is an extension for VS Code that is popular with Haskell coders. + +The 'Haskell' extension can be used with Stack but there are some things to be +aware of, set out below. + +## GHCup + +The separate [GHCup](https://www.haskell.org/ghcup/) project provides a tool +that can be used to install various tools useful for developing Haskell +projects. Those tools include: + +* GHC, +* HLS (see further below), +* MSYS2 (on Windows; see the developing on Windows + [documentation](developing_on_windows.md)), +* Stack, and +* Cabal (the tool). + +Stack itself can be used to install GHC and MSYS2. Stack can also be used to +upgrade, or downgrade, Stack. + +GHCup can configure Stack so that if Stack needs a version of GHC, GHCup takes +over obtaining and installing that version. By default, the script to install +GHCup (which can be run more than once) configures Stack in that way. For +further information about how GHCup configures Stack, see the GHC installation +customisation +[documentation](../configure/customisation_scripts.md#ghc-installation-customisation). + +On Windows, GHCup has the capability of using the Stack-supplied MSYS2 rather +than installing a duplicate copy. + +## HLS + +The VS Code extension makes use of +[HLS](https://github.com/haskell/haskell-language-server) (the Haskell Language +Server). To work, HLS has to be built with the same version of GHC that it will +support. That is, a version of HLS is required for each version of GHC in use. +It is possible that the most recent versions of GHC are not supported by HLS. + +VS Code with the 'Haskell' extension can be configured in a number of ways: + +=== "GHCup manages HLS" + + The VS Code extension's settings (under 'Haskell: Manage HLS') allow a + user to specify that the extension should use GHCup, to download and install + the versions of HLS that it needs. + + If GHCup manages versions of HLS, versions of GHC can be managed in a number + of ways: + + === "Stack manages GHC using GHCup" + + As identified above, GHCup can configure Stack to use GHCup to manage + versions of GHC. + + === "Stack manages GHC directly" + + It is possible to install GHCup so that it is 'empty' except for the + current version of HLS, allow the VS Code extension to use GHCup to + manage HLS requirements only, and to disable messages from the extension + on start-up that installation of GHC, Cabal (the tool) + and/or Stack are also necessary (they are not, if only Stack is being + used). + + To install a version of GHCup that is 'empty' is a little more + complicated than a default installation of GHCup. + + === "Unix-like" + + The following environment variable must be set before GHCup's + installation `sh` script is run: `BOOTSTRAP_HASKELL_MINIMAL`. + + === "Windows" + + The second argument to the PowerShell script must be set to + `$false`, namely: + + Set-ExecutionPolicy Bypass -Scope Process -Force;[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072;Invoke-Command -ScriptBlock ([ScriptBlock]::Create((Invoke-WebRequest https://www.haskell.org/ghcup/sh/bootstrap-haskell.ps1 -UseBasicParsing))) -ArgumentList $true,$false + + There is the possibility of false messages from the extension on + start-up that need to be ignored. Those messages can be disabled by + [setting the following](https://github.com/haskell/vscode-haskell#setting-a-specific-toolchain) + for the VS Code extension: + + ~~~yaml + "haskell.toolchain": { + "ghc": null, + "cabal": null, + "stack": null + } + ~~~ + + There can be no differences between the version of GHC that the + GHCup-supplied HLS was built with and the version that Stack has + installed. + + For the most part, the versions of HLS provided by GHCup are built with + the same versions of GHC that Stack downloads from its default + `setup-info` dictionary (see the + [`setup-info`](../configure/yaml/non-project.md#setup-info) non-project + specific configuration option documentation). Stack's default is to + mirror the 'official' binary distributions published by GHC. + + However, in some cases, it is possible that a GHCup-supplied and + GHCup-selected HLS has been built with a different binary distribution + of GHC than the one which Stack has installed. + + ??? question "When have the GHCup- and Stack-supplied GHCs differed?" + + An example occurred with the release of GHC 9.0.2. For some Linux + users (Debian 9 and Fedora 27), the version of GHC 9.0.2 linked on + GHC’s download + [web page](https://www.haskell.org/ghc/download_ghc_9_0_2.html) was + broken. The GHC developers made alternative ‘9.0.2a’ versions + available. For a while, Stack referred to the versions published by + GHC on its download web page while the GHCup-supplied versions of + HLS were built using alternative versions. This incompatibility led + to problems. + + It was resolved by Stack's default also being changed to refer to + the '9.0.2a' versions. Where Stack has already installed GHC 9.0.2, + it is necessary to delete GHC 9.0.2 from the `stack path --programs` + directory. This will cause Stack to reinstall the alternative + version, when it first needs GHC 9.0.2. Stack should distinguish + what it builds with the alternative from what it has built, and + cached, with the original GHC 9.0.2. + + === "Stack uses a GHCup-supplied GHC" + + GHCup is used to manage versions of GHC and Stack is configured to use + the version of GHC on the PATH. + + That is, GHCup is used to install a version of GHC on the PATH. Stack is + configured to make use of that version, by making use of Stack's + `install-ghc` option (which needs to be disabled) and Stack's + `system-ghc` option (which needs to be enabled). + + For further information about these options, see the + [`install-ghc`](../configure/yaml/non-project.md#install-ghc) + documentation and the + [`system-ghc`](../configure/yaml/non-project.md#system-ghc) + documentation. + + Each time that a snapshot is used that references a different version of + GHC, then GHCup must be used to install it (if GHCup has not already + installed that version). For example, to use `snapshot: lts-24.37` + (GHC 9.10.3), the command `ghcup install ghc 9.10.3` must have been used + to install GHC 9.10.3. That may be a minor inconvenience for some people, + as one the primary benefits of Stack over other tools for building + Haskell code has been that Stack automatically ensures that the + necessary version of GHC is available. + +=== "User manages HLS" + + By default, the VS Code extension uses tools that are in the PATH. + + If the VS Code extension is set not to use GHCup, its user needs to ensure + that each version of HLS that the extension needs is on the PATH. + +### Cradle + +HLS may need a 'cradle' - an +[`hie.yaml` file](https://hackage.haskell.org/package/hie-bios#stack) - in the +project's root directory in order to work well. + +The [`gen-hie` tool](https://hackage.haskell.org/package/implicit-hie) can help +generate such a cradle. + +### Tips + +It has been suggested that a project must have been successfully built before +the VS code extension (and HLS) is first activated on the project, for HLS to +work reliably. diff --git a/doc/topics/azure_ci.md b/doc/topics/azure_ci.md new file mode 100644 index 0000000000..412df8df03 --- /dev/null +++ b/doc/topics/azure_ci.md @@ -0,0 +1,174 @@ +
+ +# Azure CI + +This page documents how to use Stack on [Azure CI](http://dev.azure.com/). + +## Quick Start + +Note that you have to create [azure pipelines](#creating-azure-pipelines) for +your project and then you need to put the relevant configuration files: + +* For simple Azure configuration, copy-paste the + [azure-simple](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-simple.yml) + file into `azure-pipelines.yml`. +* For complex Azure configuration, you need to take the below linked four files + and put all of them into the `.azure` directory. + +For a more detailed explanation, you can read further. + +## Simple and Complex configuration + +We provide two fully baked configuration ready to be used on your projects: + +* [The simple Azure configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-simple.yml) + is intended for applications that do not require multiple GHC support or + cross-platform support. It builds and tests your project with just the + settings present in your `stack.yaml` file. +* The complex Azure configuration is intended for projects that need to support + multiple GHC versions and multiple operating systems, such as open source + libraries to be released to Hackage. It tests against Stack for different + snapshots on Linux, macOS and Windows. These are the files for the complex + configuration: + - [azure-pipelines.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-pipelines.yml) + : This is the starter file used by the Azure CI. + - [azure-linux-template.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-linux-template.yml) + : Template for Azure Linux build + - [azure-osx-template.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-osx-template.yml) + : Template for Azure macOS build + - [azure-windows-template.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-windows-template.yml) + : Template for Azure Windows build + + !!! note + + It is likely going to be necessary to modify this configuration to match + the needs of your project, such as tweaking the build matrix to alter + which GHC versions you test against, or to specify GHC-version-specific + `stack.yaml` files if necessary. Do not be surprised if it does not work + the first time around. See the multiple GHC section below for more + information. + +## Creating Azure Pipelines + +Each of these configurations is ready to be used immediately. But before we go +into where to put them, we have to create pipeline for your project in Azure +CI platform: + +* Go to [dev.azure.com](https://dev.azure.com). You have to initially sign-in to + your microsoft account there. +* Once you have logged in to your Microsoft account, you have to sign in to + [Azure devops](https://user-images.githubusercontent.com/737477/52465678-70963080-2ba5-11e9-83d8-84112b140236.png) + from there. +* You will be greeted with a + [dashboard](https://user-images.githubusercontent.com/737477/52465677-70963080-2ba5-11e9-904a-c15c7c0524ef.png) + where you can create your projects. +* Click the "Create Project" button and fill the relevant information in the + [dialog](https://user-images.githubusercontent.com/737477/52465676-70963080-2ba5-11e9-82a4-093ee58f11c9.png) and then click the "Create" button. +* This will lead you to the project + [dashboard](https://user-images.githubusercontent.com/737477/52465675-6ffd9a00-2ba5-11e9-917e-3dec251fcc87.png) + page where you can create pipelines. +* Click on "Pipelines" in the left menu. This will load the + [pipelines page](https://user-images.githubusercontent.com/737477/52465673-6ffd9a00-2ba5-11e9-97a4-04e703ae1fbc.png) + on the right. +* Click on the button "New Pipeline" and you have to follow through the wizard + there. You need to choose your github repository (or Azure repos) and follow + the wizard. Note that in the + [Configure step](https://user-images.githubusercontent.com/737477/52465670-6ffd9a00-2ba5-11e9-83a3-9fffdacbf249.png) + you have to select the "Starter Pipeline". This will open up an + [editor window](https://user-images.githubusercontent.com/737477/52465669-6f650380-2ba5-11e9-9662-e9c6fc2682b5.png). + You can leave the existing YAML configuration there as it is and click the + "Save and run" button. That will popup a + [dialog](https://user-images.githubusercontent.com/737477/52465668-6f650380-2ba5-11e9-9203-6347a609e3c4.png). + Select the relevant option and click "Save and run" button. (Note that this + step would have created `azure-pipelines.yml` in your repository. You have to + replace that with the appropriate configuration file.) + +The rest of this document explains the details of common Azure configurations +for those of you who want to tweak the above configuration files or write your +own. + +*Note:* both Azure and Stack infrastructures are actively developed. We try to +document best practices at the moment. + +## Infrastructure + +Note that you need at least one agent to build your code. You can specify which +virtual image you want to choose using this configuration: + +~~~yaml +pool: + vmImage: ubuntu-latest +~~~ + +The other popular options are `macOS-latest`, `windows-latest` for macOS and +Windows respectively. You can find the +[complete list](https://docs.microsoft.com/en-us/azure/devops/pipelines/agents/hosted?view=vsts&tabs=yaml) +here. You also have the option to select a specific supported ubuntu version +like `ubuntu-18.08`. + +## Installing Stack + +Currently there is only one reasonable way to install Stack: fetch a precompiled +binary from GitHub. + +~~~yaml +- script: | + mkdir -p ~/.local/bin + curl -L https://get.haskellstack.org/stable/linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack' + displayName: Install Stack +~~~ + +## Installing GHC + +There are two ways to install GHC: + +- Let Stack download GHC +- Install GHC using apt package manger. This method is only applicable for + Debian based images. + +But we only use the first method of using Stack to download GHC. + +### Multiple GHC - parametrised builds + +For different GHC versions, you probably want to use different project-level +configuration files (`stack.yaml`, by default). If you do not want to put a +specific `stack.yaml` for a particular snapshot and still want to test it, you +have specify your snapshot argument in `ARGS` environment variable (you will see +an example below). + +~~~yaml +strategy: + matrix: + GHC 8.0: + ARGS: "--snapshot lts-9" + GHC 8.2: + ARGS: "--snapshot lts-11" + GHC 8.4: + ARGS: "--snapshot lts-12" + GHC 8.6: + ARGS: "--snapshot lts-14" + GHC 8.8: + ARGS: "--snapsht lts-15" + nightly: + ARGS: "--snapshot nightly" +~~~ + +## Running tests + +After the environment setup, actual test running is simple. Command: + +~~~text +stack $ARGS test --bench --no-run-benchmarks --haddock --no-haddock-deps +~~~ + +## Other details + +Some Stack commands will run for long time. To avoid timeouts, use the +[timeoutInMinutes](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/phases?tabs=yaml&view=azdevops#timeouts) +for jobs. + +## Examples + +- [commercialhaskell/stack](https://github.com/commercialhaskell/stack/blob/master/azure-pipelines.yml) +- [psibi/tldr-hs](http://github.com/psibi/tldr-hs) +- [psibi/wai-slack-middleware](https://github.com/psibi/wai-slack-middleware) diff --git a/doc/topics/custom_snapshot.md b/doc/topics/custom_snapshot.md new file mode 100644 index 0000000000..225b7a5446 --- /dev/null +++ b/doc/topics/custom_snapshot.md @@ -0,0 +1,151 @@ +
+ +## Snapshot specification + +[:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) + +A snapshot specifies: (a) a version of GHC and, implicitly, its boot packages; +(b) usually, directly, specific versions of a list of packages; (c) sometimes, +Cabal flags for certain packages; and (d) sometimes, GHC options. + +!!! info + + Stackage snapshots are not expected to include directly any boot packages + but some such snapshots may include directly some boot packages. In + particular, some snapshots include directly `Win32` (which is a boot package + on Windows) while most do not. + +Snapshots may extend any other snapshot that can be specified in a +[`snapshot`](../configure/yaml/project.md#snapshot) key. The packages specified +follow the same syntax for dependencies in Stack's project-level configuration +files. Unlike the `extra-deps` key, however, no support for local directories is +available in snapshots to ensure reproducibility. + +!!! info + + Stack uses the [Pantry](https://hackage.haskell.org/package/pantry) library + for snapshot specification. + +~~~yaml +# Inherits a specific GHC version and, implicitly, its boot packages and +# specific versions of a set of other packages: +snapshot: lts-24.37 +# Overwrites the version of GHC (and, implicitly, its boot packages) specified +# in the snapshot (optional): +compiler: ghc-9.10.2 + +# Additional packages, follows extra-deps syntax +packages: +- unordered-containers-0.2.7.1 +- hashable-1.2.4.0 +- text-1.2.2.1 + +# Packages from the parent snapshot to ignore +drop-packages: +- wai-extra + +# Packages which should be hidden +hidden: + wai: true + warp: false + +# Set GHC options for specific packages +ghc-options: + warp: + - -O2 + +# Override flags, can also override flags in the parent snapshot +flags: + unordered-containers: + debug: true +~~~ + +If you put this in a `snapshot.yaml` file in the same directory as your project, +you can now use the snapshot like this: + +~~~yaml +snapshot: snapshot.yaml +~~~ + +This is an example of a custom snapshot stored in the filesystem. They are +assumed to be mutable, so you are free to modify it. We detect that the snapshot +has changed by hashing the contents of the involved files, and using it to +identify the snapshot internally. It is often reasonably efficient to modify a +custom snapshot, due to Stack sharing snapshot packages whenever possible. + +### Overriding the compiler + +The following snapshot specification will be identical to `lts-24.37`, but +instead use `ghc-9.10.2` and its boot packages instead of `ghc-9.10.3` and its +boot packages: + +~~~yaml +snapshot: lts-24.37 # GHC 9.10.3 +compiler: ghc-9.10.2 +~~~ + +### Dropping packages + +The following snapshot specification will be identical to `lts-24.37`, but +without the `text` package in our snapshot. Removing this package will cause all +the packages that depend on `text` to be unbuildable, but they will still be +present in the snapshot. + +~~~yaml +snapshot: lts-24.37 +drop-packages: +- text +~~~ + +### Hiding packages + +The following snapshot specification will be identical to `lts-24.37`, but the +`text` package will be hidden when registering. This will affect, for example, +the import parser in the script command. + +~~~yaml +snapshot: lts-24.37 +hidden: +- text +~~~ + +### Specifying GHC options + +In order to specify GHC options for a package, you use the same syntax as the +[ghc-options](../configure/yaml/non-project.md#ghc-options) key for build +configuration. + +The following snapshot specification will be identical to `lts-24.37`, but +provides `-O1` as a ghc-option for `text`: + +~~~yaml +snapshot: lts-24.37 +packages: +- text-2.1.2 +ghc-options: + text: -O1 +~~~ + +This works somewhat differently than the stack.yaml `ghc-options` key, in that +options can only be specified for packages that are mentioned in the custom +snapshot's `packages` list. It sets the ghc-options, rather than extending those +specified in the snapshot being extended. + +Another difference is that the `*` entry for `ghc-options` applies to all +packages in the `packages` list, rather than all packages in the snapshot. + +### Specifying Cabal flags + +In order to specify Cabal flags for a package, you use the same syntax as the +[flags](../configure/yaml/project.md#flags) key for build configuration. The +following snapshot specification will be identical to `lts-24.37`, but +it enables the `developer` Cabal flag: + +~~~yaml +snapshot: lts-24.37 +packages: +- text-2.1.2 +flags: + text: + developer: true +~~~ diff --git a/doc/topics/debugging.md b/doc/topics/debugging.md new file mode 100644 index 0000000000..de75b76116 --- /dev/null +++ b/doc/topics/debugging.md @@ -0,0 +1,96 @@ +
+ +# Debugging + +To profile a component of the current project, pass the +[`--profile` flag](../commands/build_command.md#-profile-flag) to `stack build`. + +The flag: + +* for project packages, turns on the Cabal flag + [`--enable-profiling`](https://cabal.readthedocs.io/en/stable/setup-commands.html#cmdoption-runhaskell-Setup.hs-configure-enable-profiling); +* turns on the Cabal flag + [`--enable-library-profiling`](https://cabal.readthedocs.io/en/stable/setup-commands.html#cmdoption-runhaskell-Setup.hs-configure-enable-library-profiling); and +* passes GHC's + [`+RTS -p` runtime options](https://downloads.haskell.org/ghc/latest/docs/users_guide/profiling.html#rts-flag--p) + to any test suites and benchmarks. + +For example the following command will build the `my-tests` testsuite with +profiling options and create a `my-tests.prof` file in the current directory +as a result of the test run. + +~~~text +stack test --profile my-tests +~~~ + +The `my-tests.prof` file now contains time and allocation info for the test run. + +To create a profiling report for an executable, e.g. `my-exe`, you can command: + +~~~text +stack exec --profile -- my-exe +RTS -p +~~~ + +For more fine-grained control of compilation options there are the +[`--library-profiling` flag](../commands/build_command.md#-no-library-profiling-flag) +and +[`--executable-profiling` flag](../commands/build_command.md#-no-executable-profiling-flag). + +The `--library-profiling` flag: + +* turns on the Cabal flag + [`--enable-library-profiling`](https://cabal.readthedocs.io/en/stable/setup-commands.html#cmdoption-runhaskell-Setup.hs-configure-enable-library-profiling); and +* passes GHC's + [`+RTS -p` runtime options](https://downloads.haskell.org/ghc/latest/docs/users_guide/profiling.html#rts-flag--p) + to any test suites and benchmarks. + +The `--executable-profiling` flag: + +* for project packages, turns on the Cabal flag + [`--enable-profiling`](https://cabal.readthedocs.io/en/stable/setup-commands.html#cmdoption-runhaskell-Setup.hs-configure-enable-profiling); +* turns on the Cabal flag + [`--enable-library-profiling`](https://cabal.readthedocs.io/en/stable/setup-commands.html#cmdoption-runhaskell-Setup.hs-configure-enable-library-profiling); and +* passes GHC's + [`+RTS -p` runtime options](https://downloads.haskell.org/ghc/latest/docs/users_guide/profiling.html#rts-flag--p) + to any test suites and benchmarks. + +To enable compilation with profiling options by default you can add the +following to a project-level or global configuration file: + +~~~yaml +build: + library-profiling: true + executable-profiling: true +~~~ + +## Further reading + +For more commands and uses, see the +[official GHC chapter on profiling](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/profiling.html), +the [Haskell wiki](https://wiki.haskell.org/How_to_profile_a_Haskell_program), +and the +[chapter on profiling in Real World Haskell](http://book.realworldhaskell.org/read/profiling-and-optimization.html). + +## Tracing + +To generate a backtrace in case of exceptions during a test or benchmarks run, +use the `--trace` flag. Like `--profile` this compiles with profiling options, +but adds the `+RTS -xc` runtime option. + +## Debugging symbols + +Building with debugging symbols in the +[DWARF information](https://ghc.haskell.org/trac/ghc/wiki/DWARF) is supported by +Stack. This can be done by passing the flag `--ghc-options="-g"` and also to +override the default behaviour of stripping executables of debugging symbols by +passing either one of the following flags: `--no-strip`, +`--no-library-stripping` or `--no-executable-stripping`. + +In Windows, GDB can be installed to debug an executable with +`stack exec -- pacman -S gdb`. Windows' Visual Studio compiler's debugging +format PDB is not supported at the moment. This might be possible by +[separating](https://stackoverflow.com/questions/866721/how-to-generate-gcc-debug-symbol-outside-the-build-target) +debugging symbols and +[converting](https://github.com/rainers/cv2pdb) their format. Or as an option +when +[using the LLVM backend](http://blog.llvm.org/2017/08/llvm-on-windows-now-supports-pdb-debug.html). diff --git a/doc/topics/developing_on_windows.md b/doc/topics/developing_on_windows.md new file mode 100644 index 0000000000..d91716a78a --- /dev/null +++ b/doc/topics/developing_on_windows.md @@ -0,0 +1,109 @@ +
+ +# Developing on Windows # + +On Windows, Stack comes with an installation of [MSYS2](https://www.msys2.org/). +An environment of MSYS2 (by default, `MINGW64` on 64-bit Windows or `MINGW32` on +32-bit Windows) will be used by Stack to provide a Unix-like shell and +environment for Stack. This may be necessary for installing some Haskell +packages, such as those which use `configure` scripts, or if your project needs +some additional tools during the build phase. + +No matter which terminal software you choose (Windows Terminal, Console Windows +Host, Command Prompt, PowerShell, Git bash or any other) you can use this +environment too by executing all programs through +`stack exec -- `. + +Executables and libraries can be installed with the MSYS2 package manager +`pacman`. All tools can be found in the [index](https://packages.msys2.org) to +MSYS2 packages. A [guide](https://www.msys2.org/docs/package-management/) to +package management with `pacman` is also available. `pacman` — like all +other tools in the Stack environment — should be started with +`stack exec -- pacman`. Help about `pacman` commands (operations) can be +obtained by `stack exec -- pacman --help`. Help about a specific `pacman` +operation can be obtained by using `--help` (or `-h`) with an operation. For +example, help about the operation `--sync` (or `-S`) can be obtained with +`stack exec -- pacman --sync --help` or, equivalently, +`stack exec -- pacman -Sh`. + +Command `stack path --bin-path` to see the PATH in the Stack environment. If the +relevant MSYS2 environment is `MINGW64`, on Windows, it includes the +`\mingw64\bin`, `\usr\bin` and `\usr\local\bin` directories of the +Stack-supplied MSYS2. (It includes the corresponding directory if the relevant +MSYS2 environment is other than `MINGW64`.) If your executable depends on files +(for example, dynamic-link libraries) in those directories and you want to run +it outside of the Stack environment, you will need to ensure copies of those +files are on the PATH. + +Command `stack path --extra-include-dirs` and `stack path --extra-library-dirs` +to see the extra directories searched for C header files or system libraries +files in the Stack environment. If the relevant MSYS2 environment is `MINGW64`, +on Windows, it includes the `\mingw64\include` (include) and the `\mingw64\lib` +and `\mingw64\bin` directories (library) of the Stack-supplied MSYS2. (It +includes the corresponding directories if the relevant MSYS2 environment is +other than `MINGW64`.) + +For further information about configuring the relevant MSYS2 environment, see +Stack's [`msys-environment`](../configure/yaml/non-project.md#msys-environment) +non-project specific configuration option documentation. + +## Updating the Stack-supplied MSYS2 ## + +The Stack-supplied MSYS2 can itself be updated with the Stack-supplied `pacman`. +See the MSYS2 guide [Updating MSYS2](https://www.msys2.org/docs/updating/). If +the Stack-supplied `pacman` has a version that is 5.0.1.6403 or greater (see +`stack exec -- pacman --version`) then the command to update is simply: + + stack exec -- pacman -Suy + +This command may need to be run more than once, until everything is reported by +`pacman` as 'up to date' and 'nothing to do'. + +## Setup.hs ## + +`Setup.hs` is automatically run inside the Stack environment. So when you need +to launch another tool you do not need to prefix the command with +`stack exec --` within the custom `Setup.hs` file. + +## Pacman packages to install for common Haskell packages ## + +The following lists MSYS2 packages known to allow the installation of some +common Haskell packages on Windows. Feel free to submit additional entries via a +pull request. + +* For [text-icu](https://hackage.haskell.org/package/text-icu) install + `mingw64/mingw-w64-x86_64-icu`. + +* For [zlib >= 0.7](https://hackage.haskell.org/package/zlib) the default + Cabal flag `pkg-config` is `true` and requires executable `pkg-config` on + the PATH. MSYS2 [defaults](https://www.msys2.org/docs/pkgconfig/) to + [`pkgconf`](https://packages.msys2.org/package/pkgconf?repo=msys&variant=x86_64) + as its `pkg-config` implementation. Installation: + + stack exec -- pacman -S pkgconf + + Alternatively, build with `--flag zlib:-pkg-config`. + +## CMake ## + +CMake has trouble finding other tools even if they are available on the PATH. +Likely this is not a CMake problem but one of the environment not fully +integrating. For example GHC comes with a copy of GCC which is not installed by +MSYS2 itself. If you want to use this GCC you can provide a full path to it, or +find it first with `System.Directory.findExecutable` if you want to launch GCC +from a Haskell file such as `Setup.hs`. + +Experience tells that the `mingw-w64` versions of Make and CMake are most +likely to work. Though there are other versions available through `pacman`, so +have a look to see what works for you. Both tools can be installed with the +commands: + + stack exec -- pacman -S mingw-w64-x86_64-make + stack exec -- pacman -S mingw-w64-x86_64-cmake + +Even though Make and CMake are then both installed into the same environment, +CMake still seems to have trouble to find Make. To help CMake find GCC and Make +supply the following flags: + + -DCMAKE_C_COMPILER=path + -DCMAKE_MAKE_PROGRAM=path diff --git a/doc/docker_integration.md b/doc/topics/docker_integration.md similarity index 92% rename from doc/docker_integration.md rename to doc/topics/docker_integration.md index ba89eebdb3..73e062c233 100644 --- a/doc/docker_integration.md +++ b/doc/topics/docker_integration.md @@ -39,9 +39,9 @@ supported way to use Docker integration on macOS (the older Docker Machine with host volume mounting that make Stack nearly unusable for anything but the most trivial projects). -Other Un*xen are not officially supported but there are ways to get them working. -See [#194](https://github.com/commercialhaskell/stack/issues/194) for details -and workarounds. +Other Un*xen are not officially supported but there are ways to get them +working. See [#194](https://github.com/commercialhaskell/stack/issues/194) for +details and workarounds. Note: you may want to use set the `mount-mode` option to `delegated`, since this can dramatically improve performance on macOS (see @@ -93,7 +93,7 @@ in the Docker container by default. To use a compiler installed by stack, add system-ghc: false -(see [`system-ghc`](yaml_configuration.md#system-ghc)). +(see [`system-ghc`](../configure/yaml/non-project.md#system-ghc)). ### Use stack as normal @@ -105,7 +105,7 @@ container). The first time you run a command with a new image, you will be prompted to run `stack docker pull` to pull the image first. This will pull a Docker -image with a tag that matches your resolver. Only LTS resolvers are supported +image with a tag that matches your snapshot. Only LTS snapshots are supported (we do not generate images for nightly snapshots). Not every LTS version is guaranteed to have an image existing, and new LTS images tend to lag behind the LTS snapshot being published on stackage.org. Be warned: these images are @@ -199,7 +199,7 @@ otherwise noted. # What to name the Docker container. Only useful with `detach` or # `persist` true. (default none) container-name: "example-name" - + # Sets the network used by docker. Gets directly passed to dockers `net` # argument (default: host) network: host @@ -262,9 +262,9 @@ Image Repositories FP Complete provides the following public image repositories on Docker Hub: -- [fpco/stack-build](https://registry.hub.docker.com/u/fpco/stack-build/) (the - default) - GHC (patched), tools (stack, cabal-install, happy, alex, etc.), and - system developer libraries required to build all Stackage packages. +- [fpco/stack-build](https://registry.hub.docker.com/r/fpco/stack-build/) (the + default) - GHC (patched), tools (Stack, Cabal (the tool), happy, alex, etc.), + and system developer libraries required to build all Stackage packages. FP Complete also builds custom variants of these images for their clients. @@ -307,10 +307,11 @@ Additional notes ### Volume-mounts and ephemeral containers Since filesystem changes outside of the volume-mounted project directory are not -persisted across runs, this means that if you `stack exec sudo apt-get install some-ubuntu-package`, -that package will be installed but then the container it's -installed in will disappear, thus causing it to have no effect. If you wish to -make this kind of change permanent, see later instructions for how to create a +persisted across runs, this means that if you +`stack exec sudo apt-get install some-ubuntu-package`, that package will be +installed but then the container it is installed in will disappear, thus causing +it to have no effect. If you wish to make this kind of change permanent, see +later instructions for how to create a [derivative Docker image](#derivative-image). Inside the container, your home directory is a special location that volume- @@ -339,8 +340,8 @@ If you do want to do all your work, including editing, in the container, it might be better to use a persistent container in which you can install Ubuntu packages. You could get that by running something like `stack --docker-container-name=NAME --docker-persist exec bash`. This -means when the container exits, it won't be deleted. You can then restart it -using `docker start -a -i NAME`. It's also possible to detach from a container +means when the container exits, it will not be deleted. You can then restart it +using `docker start -a -i NAME`. It is also possible to detach from a container while it continues running in the background using by pressing Ctrl-P Ctrl-Q, and then reattach to it using `docker attach NAME`. @@ -354,7 +355,7 @@ information about managing Docker containers. Creating your own custom derivative image can be useful if you need to install additional Ubuntu packages or make other changes to the operating system. Here is an example (replace `stack-build:custom` if you prefer a different name for -your derived container, but it's best if the repo name matches what you're +your derived container, but it is best if the repo name matches what you are deriving from, only with a different tag, to avoid recompilation): ;;; On host @@ -369,9 +370,9 @@ deriving from, only with a different tag, to avoid recompilation): $ docker commit temp stack-build:custom $ docker rm temp -Now you have a new Docker image named `stack-build:custom`. To use the new image, run -a command such as the following or update the corresponding values in your -`stack.yaml`: +Now you have a new Docker image named `stack-build:custom`. To use the new +image, run a command such as the following or update the corresponding values in +your `stack.yaml`: stack --docker-image=stack-build:custom @@ -384,7 +385,7 @@ on creating Docker images. The easiest way to create your own custom image us by extending FP Complete's images, but if you prefer to start from scratch, most images that include the -basics for building code with GHC will work. The image doesn't even, strictly +basics for building code with GHC will work. The image does not even, strictly speaking, need to include GHC, but it does need to have libraries and tools that GHC requires (e.g., libgmp, gcc, etc.). @@ -392,7 +393,7 @@ There are also a few ways to set up images that tightens the integration: * Create a user and group named `stack`, and create a `~/.stack` directory for it. Any build plans and caches from it will be copied from the image by Stack, - meaning they don't need to be downloaded separately. + meaning they do not need to be downloaded separately. * Any packages in GHC's global package database will be available. This can be used to add private libraries to the image, or the make available a set of packages from an LTS release. @@ -403,14 +404,14 @@ Troubleshooting ### "No Space Left on Device", but 'df' shows plenty of disk space This is likely due to the storage driver Docker is using, in combination with -the large size and number of files in these images. Use `docker info|grep 'Storage Driver'` -to determine the current storage driver. +the large size and number of files in these images. Use +`docker info|grep 'Storage Driver'` to determine the current storage driver. We recommend using either the `overlay` or `aufs` storage driver for stack, as they are least likely to give you trouble. On Ubuntu, `aufs` is the default for new installations, but older installations sometimes used `devicemapper`. -The `devicemapper` storage driver's doesn't work well with large filesystems, +The `devicemapper` storage driver's does not work well with large filesystems, and we have experienced other instabilities with it as well. We recommend against its use. @@ -430,7 +431,7 @@ for this condition. Unfortunately, the number of inodes is set when creating the filesystem, so fixing this requires reformatting and passing the `-N` argument to mkfs.ext4. -### Name resolution doesn't work from within container +### Name resolution does not work from within container On Ubuntu 12.04, by default `NetworkManager` runs `dnsmasq` service, which sets `127.0.0.1` as your DNS server. Since Docker containers cannot access this diff --git a/doc/topics/editor_integration.md b/doc/topics/editor_integration.md new file mode 100644 index 0000000000..952b2a58b5 --- /dev/null +++ b/doc/topics/editor_integration.md @@ -0,0 +1,20 @@ +
+ +# Editor integration + +## Visual Studio Code + +For further information, see the [Stack and Visual Code](Stack_and_VS_Code.md) +documentation. + +## Shell auto-completion + +Love tab-completion of commands? You are not alone. If you are on bash, just run +the following command (or add it to `.bashrc`): + +~~~text +eval "$(stack --bash-completion-script stack)" +~~~ + +For more information and other shells, see the +[shell auto-completion](shell_autocompletion.md) documentation. diff --git a/doc/topics/haskell_and_c_code.md b/doc/topics/haskell_and_c_code.md new file mode 100644 index 0000000000..217391ad41 --- /dev/null +++ b/doc/topics/haskell_and_c_code.md @@ -0,0 +1,229 @@ +
+ +# Haskell and C code + +## Haskell packages with C code + +A Haskell package can include C source code. For example, consider a simple +one-package Stack project named `c-example`, created by `stack new c-example` +but with these changes: + +A C header file `my-library.h` added in new directory `include`: +~~~c +#ifndef MY_LIBRARY_HEADER +#define MY_LIBRARY_HEADER +int max(int, int); +#endif +~~~ + +A C source code file `my-library.c` added in new directory `c-source`: +~~~c +#include "my-library.h" + +/* Function returning the larger of two integers */ +int max(int x1, int x2) { + if (x1 > x2) + return x1; + else + return x2; +} +~~~ + +A different Haskell module in source file `src/Lib.hs`, including a Haskell +foreign import declaration making use of the C `max` function: +~~~haskell +module Lib ( c_max ) where + +foreign import ccall "max" c_max :: Int -> Int -> Int +~~~ + +A different Haskell module in source file `app/Main.hs`, making use of the +Haskell function `c_max` exported from module `Lib`: +~~~haskell +module Main ( main ) where + +import Lib ( c_max ) + +main :: IO () +main = print $ c_max 10 100 +~~~ + +The package's `package.yaml` file (simplied), used to create the package's +Cabal file, might look like this: +~~~yaml +spec-version: 0.36.0 + +name: c-example +version: 0.1.0.0 + +extra-source-files: +- include/my-library.h + +dependencies: +- base >= 4.7 && < 5 + +library: + source-dirs: src + include-dirs: # Where to look for C header files? + - include + c-sources: # What C source code files to be compiled and linked? + - c-source/my-library.c + +executables: + c-example-exe: + main: Main.hs + source-dirs: app + ghc-options: + - -threaded + - -rtsopts + - -with-rtsopts=-N + dependencies: + - c-example +~~~ + +The project's `stack.yaml` file only needs to identify a snapshot: +~~~yaml +snapshot: lts-24.37 # GHC 9.10.3 +~~~ + +This example project can be built with Stack in the normal way (`stack build`), +and the built executable can then be executed in the Stack environment in the +normal way (`stack exec c-example-exe`). + +## Haskell packages with C `main` function + +A Haskell package can include an executable which has a `main` function written +in C. For example, consider a simple one-package Stack project named +`c-example`, with: + +A `package.yaml` describing a library and two executables, named `haskell-exe` +and `c-exe`: + +~~~yaml +spec-version: 0.36.0 + +name: c-example +version: 0.1.0.0 + +dependencies: base + +library: + source-dirs: src + # The Lib_stub.h header must be put by GHC somewhere where Cabal can find it. + # This tells GHC to put it in the autogen-stubs directory of the project + # directory. + ghc-options: + - -stubdir autogen-stubs + +executables: + haskell-exe: + main: Main.hs + source-dirs: app + ghc-options: + - -threaded + - -rtsopts + - -with-rtsopts=-N + dependencies: c-example + c-exe: + main: main.c + source-dirs: c-app + ghc-options: -no-hs-main + # This specifies that directory autogen-stubs should be searched for header + # files. + include-dirs: autogen-stubs + dependencies: c-example +~~~ + +!!! warning + + `Cabal-3.12.0.0`, a boot package of GHC 9.10.1, ignores `source-dirs` when + the `main` file is not a Haskell source code file. This was a regression and + fixed in subsequent versions of Cabal (the library). + +A Haskell module souce file named `Lib.hs` in directory `src`: +~~~haskell +module Lib + ( myMax -- Exported only for the use of the 'Haskell' executable + ) where + +myMax :: Int -> Int -> Int +myMax x1 x2 = if x1 > x2 then x1 else x2 + +foreign export ccall myMax :: Int -> Int -> Int +~~~ + +A Haskell module source file named `Main.hs` in directory `app`: +~~~haskell +module Main ( main ) where + +import Lib ( myMax ) + +main :: IO () +main = print $ myMax 10 100 +~~~ + +A C source file named `main.c` in directory `c-app`: +~~~c +// Based in part on +// https://downloads.haskell.org/ghc/latest/docs/users_guide/exts/ffi.html#using-your-own-main + +#include // Provides printf() + +#include // Provides hs_init() and hs_exit(). See the Haskell 2010 + // Report, 8.7. + +// Parts specific to GHC +#ifdef __GLASGOW_HASKELL__ +#include "Lib_stub.h" // Automatically generated by GHC, given use of + // foreign export ... in module Lib.hs ... +#endif + +int main(int argc, char *argv[]) { + // Initialises the Haskell system and provides it with the available command + // line arguments + hs_init(&argc, &argv); + + // Use our foreign export from module Lib.hs ... + printf("%lld\n", myMax(10,100)); + + // De-initialise the Haskell system + hs_exit(); + return 0; +} +~~~ + +The `foreign export` declaration in Haskell module `Lib` will cause GHC to +generate a 'stub' C header file named `Lib_stub.h`. The GHC option `-stubdir` +will cause GHC to put that file in the specified directory (`autogen-stubs`, in +this example). + +!!! info + + If GHC's `-stubdir` option is omitted, GHC will put the generated C header + file together with the other build artefacts for the module. However, that + location cannot be specified reliably using the `include-dirs` key. + +That generated C header file will have content like: +~~~c +#include +#if defined(__cplusplus) +extern "C" { +#endif +extern HsInt myMax(HsInt a1, HsInt a2); +#if defined(__cplusplus) +} +#endif +~~~ + +The `include-dirs` key will cause the specified directory (again, +`autogen-stubs` in this example) to be searched for C header files. + +The project's `stack.yaml` file only needs to identify a snapshot: +~~~yaml +snapshot: lts-24.37 # GHC 9.10.3 +~~~ + +This example project can be built with Stack in the normal way (`stack build`), +and the built executables can then be executed in the Stack environment in the +normal way (`stack exec haskell-exe` for the 'Haskell' executable and +`stack exec c-exe` for the 'C' executable). diff --git a/doc/topics/index.md b/doc/topics/index.md new file mode 100644 index 0000000000..52a6cceaaa --- /dev/null +++ b/doc/topics/index.md @@ -0,0 +1,96 @@ +--- +title: Topics +--- +
+ +# Topics (advanced) + +This part of the guide provides pages dedicated to specific topics. + +[Stack root](stack_root.md) + +: The Stack root is a directory where Stack stores important information + +[Stack work directories](stack_work.md) + +: Stack work directories are directories within a local project or package + directory in which Stack stores files created during the build process. + +[Snapshot location](snapshot_location.md) + +: How to specify the location of snapshots. + +[Package location](package_location.md) + +: How to specify the location of packages. + +[Snapshot specification](custom_snapshot.md) + +: How to specify the contents of a snapshot. + +[`stack.yaml` vs a Cabal file](stack_yaml_vs_cabal_package_file.md) + +: The difference between Stack's project-level configuration file and a + Cabal file describing a Haskell package. + +[Script interpreter](scripts.md) + +: How to use Stack's script interpreter. + +[Docker integration](docker_integration.md) + +: Stack has support for automatically performing builds inside a Docker + container. + +[Nix integration](nix_integration.md) + +: Stack can be configured to integrate with Nix, a purely functional package + manager. + +[Non-standard project initialization](nonstandard_project_init.md) + +: You may need to configure Stack to work with an existing project that has + one or more Cabal files but no Stack project-level configuration file. + +[Debugging](debugging.md) + +: Advice on debugging using Stack. + +[Editor integration](editor_integration.md) + +: Advice on intergrating Stack with code editors. + +[Stack and Visual Studio Code](Stack_and_VS_Code.md) + +: Advice on using Stack with Visual Studio Code and its Haskell extension. + +[Developing on Windows](developing_on_windows.md) + +: Advice on using Stack on Windows. + +[Shell auto-completion](shell_autocompletion.md) + +: Adding support for the tab completion of standard Stack arguments to the + shell programs Bash, Zsh (the Z shell) and fish. + +[CI](CI.md) + +: Advice on using Stack with CI. + +[Travis CI](travis_ci.md) + +: Advice on using Stack on Travis CI. + +[Azure CI](azure_ci.md) + +: Advice on using Stack on Azure CI. + +[Lock files](lock_files.md) + +: The contents of Stack's lock files, how they are used, and how they are + created and updated. + +[Haskell and C code](haskell_and_c_code.md) + +: Advice on using Stack with Haskell packages that include C source code, + including those with a C `main` function. diff --git a/doc/topics/lock_files.md b/doc/topics/lock_files.md new file mode 100644 index 0000000000..2bfb41d974 --- /dev/null +++ b/doc/topics/lock_files.md @@ -0,0 +1,195 @@ +
+ +# Lock Files + +Stack attempts to provide reproducible build plans. This involves reproducibly +getting the exact same contents of source packages and configuration options +(like Cabal flags and GHC options) for a given set of input files. There are a +few problems with making this work: + +* Entering all of the information to fully provide reproducibility is tedious. + This would include things like Hackage revisions, hashes of remote tarballs, + etc. Users do not want to enter this information. + +* Many operations in Stack rely upon a "snapshot hash," which transitively + includes the completed information for all of these dependencies. If any of + that information is missing when parsing the `stack.yaml` file or snapshot + files, it could be expensive for Stack to calculate it. + +To address this, we follow the (fairly standard) approach of having a +_lock file_. The goal of the lock file is to cache completed locations of +project, snapshot packages and snapshots themselves so that: + +* These files can be stored in source control +* Users on other machines can reuse these lock files and get identical build + plans given that the used project packages and local snapshots are the same on + those machines +* Rerunning `stack build` in the future is deterministic in the build plan, not + depending on mutable state in the world like Hackage revisions + + !!! note + + If, for example, a tarball available remotely is deleted or the hash + changes, it will not be possible for Stack to perform the build. + However, by deterministic, we mean it either performs the same build or + fails, never accidentally doing something different. + +This document explains the contents of a lock file, how they are used, and how +they are created and updated. + +## stack.yaml and snapshot files + +Relevant to this discussion, Stack's project-level configuration file +(`stack.yaml`, by default) specifies: + +* the parent snapshot (the [`snapshot`](../configure/yaml/project.md#snapshot) + key) +* extra-deps + +Some of this information can be incomplete. Consider this `stack.yaml` file: + +~~~yaml +snapshot: lts-19.22 +packages: +- . +extra-deps: +- acme-missiles-0.3 +~~~ + +This information is _incomplete_. For example, the extra-deps may change in the +future. Instead, you could specify enough information in the `stack.yaml` file +to fully resolve that package. That looks like: + +~~~yaml +extra-deps: +- hackage: acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1,613 + pantry-tree: + size: 226 + sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 +~~~ + +The `lts-19.22` information is also incomplete. While we assume in general that +Haskell LTS snapshots never change, there is nothing that prohibits that from +happening. Instead, the complete version of that key is: + +~~~yaml +snapshot: +- url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/19/22.yaml + size: 619399 + sha256: 5098594e71bdefe0c13e9e6236f12e3414ef91a2b89b029fd30e8fc8087f3a07 +~~~ + +Users do not particularly feel like writing all of that. Therefore, it is common +to see _incomplete_ information in a `stack.yaml` file. + +## Recursive snapshot layers + +Snapshot files can be _recursive_, where `stack.yaml` refers to `foo.yaml`, +which refers to `bar.yaml`, which refers to `baz.yaml`. A local snapshot file +can refer to a remote snapshot file (available via an HTTP(S) URL). + +We need to encode information from _all_ of these snapshot layers and the +`stack.yaml` file in the lock file, to ensure that we can detect if anything +changes. + +## Performance + +In addition to acting as a pure correctness mechanism, the design of a lock file +given here also works as a performance improvement. Instead of requiring that +all snapshot files be fully parsed on each Stack invocation, we can store +information in the lock file and bypass parsing of the additional files in the +common case of no changes. + +## Lock file contents + +The lock file contains the following information: + +* Completed package locations for extra-deps and packages in snapshot files + + !!! note + + This only applies to _immutable_ packages. Mutable packages are not + included in the lock file. + +* Completed information for the snapshot locations + +It looks like the following: + +~~~yaml +# Lock file, some message about the file being auto-generated +snapshots: + # Starts with the snapshot specified in stack.yaml, + # then continues with the snapshot specified in each + # subsequent snapshot file + - original: + foo.yaml # raw content specified in a snapshot file + completed: + file: foo.yaml + sha256: XXXX + size: XXXX + - original: + lts-13.9 + completed: + size: 496662 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/13/9.yaml + sha256: 83de9017d911cf7795f19353dba4d04bd24cd40622b7567ff61fc3f7223aa3ea + +packages: +- original: https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz + completed: + size: 1442 + url: https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz + name: acme-missiles + version: '0.3' + sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b + pantry-tree: + size: 226 + sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 +~~~ + +## Creation procedure + +Whenever a project-level configuration file (`stack.yaml`, by default) is +loaded, Stack checks for a lock file in the same file path, with a `.lock` +extension added. For example, if you command: + +~~~text +stack --stack-yaml my-stack.yaml build +~~~ + +or + +~~~text +stack --stack-yaml my-stack.yaml build --dry-run +~~~ + +then Stack will use a lock file in the location `my-stack.yaml.lock`. For the +rest of this document, we will assume that the files are simply `stack.yaml` and +`stack.yaml.lock`. + +If the lock file does not exist, subject to Stack's +[`--lock-file`](../configure/global_flags.md#-lock-file-option) option, it will +be created by: + +* Loading the `stack.yaml` +* Loading all snapshot files +* Completing all missing information +* Writing out the new `stack.yaml.lock` file to the disk + +## Update procedure + +Whenever a project-level configuration file (`stack.yaml`, by default) is +loaded, all completed package or snapshot locations (even those completed using +information from a lock file) get collected to form a new lock file in memory. +Subject to Stack's +[`--lock-file`](../configure/global_flags.md#-lock-file-option) option, that new +lock file is compared against the one on disk and, if there are any differences, +written out to the disk. + +## `stack config build-files` command + +The +[`stack config build-files`](../commands/config_command.md#the-stack-config-build-files-command) +loads a project-level configuration file (see above) without taking any other +build steps (other than generating, when applicable, a Cabal file from a package +description in the Hpack format). diff --git a/doc/topics/nix_integration.md b/doc/topics/nix_integration.md new file mode 100644 index 0000000000..7720033893 --- /dev/null +++ b/doc/topics/nix_integration.md @@ -0,0 +1,471 @@ +
+ +# Nix integration + +[:octicons-tag-24: 0.1.10.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.10.0) + +[Nix](https://nixos.org/) is a purely functional package manager. Stack can be +configured to integrate with Nix. Integration provides these benefits: + +* more reproducible builds. This is because fixed versions of any system + libraries and commands required to build the project are automatically built + using Nix and managed locally for each project. These system packages never + conflict with any existing versions of these libraries on your system. That + they are managed locally to the project means that you do not need to alter + your system in any way to build any odd project pulled from the Internet; and + +* implicit sharing of system packages between projects. This means you do not + have more copies on-disk than you need. + +The Nix package manager is a pre-requisite for integration. On Linux (including +Windows Subsystem for Linux) and macOS, it can be downloaded and installed from +the [Nix download page](https://nixos.org/download.html). + +When integrated with Nix, Stack handles Haskell dependencies as it usually does +and the Nix package manager handles the _non-Haskell_ dependencies needed by the +Haskell packages. + +Stack downloads Haskell packages from [Stackage](https://www.stackage.org/lts) +and builds them locally. Stack uses Nix to download +[Nix packages][nix-search-packages]. These provide the GHC compiler and external +C libraries that you would normally install manually. + +Nix's `nix-shell` starts an interactive shell based on a Nix expression. Stack +can automatically create a Nix build environment in the background using +`nix-shell`. There are two alternative options to create such a build +environment: + +1. provide a list of [Nix packages][nix-search-packages]. To these, Stack will + add Nix packages for the GHC compiler, `git` (the distributed version control + system), `gcc` (the GNU compiler collection), `gmp` (the GNU multiple + precision arithmetic library) and `cacert` (a bundle of X.509 certificates of + public Certificate Authorities); and +2. provide a `shell.nix` file that gives you more control over the libraries and + tools available inside the shell. + +A `shell.nix` file requires writing code in Nix's +[custom language][nix-language]. Use this option only if you know Nix and have +special requirements, such as using custom Nix packages that override the +standard ones or using system libraries with special requirements. + +## Checking the Nix installation + +Once Nix is installed, the Nix commands (`nix-shell` etc) should be available. +If they are not, it could be because the file +`$HOME/.nix-profile/etc/profile.d/nix.sh` is not sourced by your shell. + +You should either: + +1. run `source ~/.nix-profile/etc/profile.d/nix.sh` each time you open a + terminal and need Nix; or +2. add the command `source ~/.nix-profile/etc/profile.d/nix.sh` to your + `~/.bashrc` or `~/.bash_profile` file. + +A Nix path can be specified between angle brackets, e.g. ``, and the +directories listed in the `NIX_PATH` environment variable will be searched for +the given file or directory name. Stack makes use of path ``. From +Nix 2.4, `NIX_PATH` is not set by `nix.sh`. If `NIX_PATH` is not set, Nix will +fall back to (first) `$HOME/.nix-defexpr/channels` in impure and unrestricted +evaluation mode. However, Stack may use a pure Nix mode (see further +[below](#pure-and-impure-nix-shells)). That directory can be appended to +`NIX_PATH` with +`export NIX_PATH=${NIX_PATH:+$NIX_PATH:}$HOME/.nix-defexpr/channels`. For +information about how Stack itself can configure `NIX_PATH`, see further +[below](#nix-package-sources). + +## Enable Nix integration + +On NixOS, Nix integration is enabled by default; on other operating systems it +is disabled. To enable Nix integration, add the following section to your Stack +configuration file (`stack.yaml` or `config.yaml`): + +~~~yaml +nix: + enable: true # false by default, except on NixOS +~~~ + +The equivalent command line flag (which will prevail) is `--[no-]nix`. Passing +any `--nix-*` option on the command line will imply the `--nix` option. + +If Nix integration is not enabled, Stack will notify the user if a `nix` +executable is on the PATH. If that notification is unwanted, it can be muted by +setting Stack's configuration option +[`notify-if-nix-on-path`](../configure/yaml/non-project.md#notify-if-nix-on-path) +to `false`. + +If Nix integration is enabled: + +* `stack build` and `stack exec` automatically launch themselves in a local + build environment (using `nix-shell`). It is not necessary to run + `stack setup`, unless you want to cache a GHC installation before running a + build; and + +* [`system-ghc: true`](../configure/yaml/non-project.md#system-ghc) is implied. + ([`install-ghc: false`](../configure/yaml/non-project.md#install-ghc) is not + implied.) + +**Known limitation on macOS:** currently, `stack --nix ghci` fails on macOS, due +to a bug in GHCi when working with external shared libraries. + +## Supporting both Nix and non-Nix developers + +With Nix integration enabled in Stack's configuration file, every developer of +your project needs to have Nix installed, but the developer also gets all +external libraries automatically. + +Julien Debon of Tweag has published a [blog post][tweag-blog-post] on +*Smooth, non-invasive Haskell Stack and Nix shell integration* (2 June 2022). +The post explains how to set things up so that both Nix and non-Nix developers +can work together on the same project. The `tweag/haskell-stack-nix-example` +[GitHub repository][tweag-example] provides an example of working Stack and Nix +shell integration to accompany the post. + +Nix 2.4 (released 1 November 2021) introduced a new and experimental format to +package Nix-based projects, known as 'flakes'. + +The example below adapts and extends the example accompanying the blog post +above to use Nix flakes. The `flake.nix` file is: + +~~~nix +{ + description = "my project description"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + inputs.flake-utils.url = "github:numtide/flake-utils"; + + outputs = { self, nixpkgs, flake-utils }: + flake-utils.lib.eachDefaultSystem (system: + let + pkgs = nixpkgs.legacyPackages.${system}; + + hPkgs = + pkgs.haskell.packages."ghc8107"; # need to match Stackage LTS version + # from stack.yaml snapshot + + myDevTools = [ + hPkgs.ghc # GHC compiler in the desired version (will be available on PATH) + hPkgs.ghcid # Continuous terminal Haskell compile checker + hPkgs.ormolu # Haskell formatter + hPkgs.hlint # Haskell codestyle checker + hPkgs.hoogle # Lookup Haskell documentation + hPkgs.haskell-language-server # LSP server for editor + hPkgs.implicit-hie # auto generate LSP hie.yaml file from cabal + hPkgs.retrie # Haskell refactoring tool + # hPkgs.cabal-install + stack-wrapped + pkgs.zlib # External C library needed by some Haskell packages + ]; + + # Wrap Stack to work with our Nix integration. We do not want to modify + # stack.yaml so non-Nix users do not notice anything. + # - no-nix: We do not want Stack's way of integrating Nix. + # --system-ghc # Use the existing GHC on PATH (will come from this Nix file) + # --no-install-ghc # Do not try to install GHC if no matching GHC found on PATH + stack-wrapped = pkgs.symlinkJoin { + name = "stack"; # will be available as the usual `stack` in terminal + paths = [ pkgs.stack ]; + buildInputs = [ pkgs.makeWrapper ]; + postBuild = '' + wrapProgram $out/bin/stack \ + --add-flags "\ + --no-nix \ + --system-ghc \ + --no-install-ghc \ + " + ''; + }; + in { + devShells.default = pkgs.mkShell { + buildInputs = myDevTools; + + # Make external Nix c libraries like zlib known to GHC, like + # pkgs.haskell.lib.buildStackProject does + # https://github.com/NixOS/nixpkgs/blob/d64780ea0e22b5f61cd6012a456869c702a72f20/pkgs/development/haskell-modules/generic-stack-builder.nix#L38 + LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath myDevTools; + }; + }); +} +~~~ + +Check-in this `flake.nix` to your project's repository. Run the `nix develop` +command (it searches for `flake.nix` by default) and you WIll find a new +`flake.lock` file. That file that pins the precise nixpkgs package set. Check-in +that `flake.lock` file as well, and every Nix developer of your project will use +precisely the same package set. + +## GHC through Nix packages + +Nix integration will instruct Stack to build inside a local build environment. +That environment will also download and use a +[GHC Nix package](https://search.nixos.org/packages?query=haskell.compiler.ghc) +matching the required version of the configured Stack +[snapshot](../configure/yaml/project.md#snapshot). + +Enabling Nix integration means that packages will always be built using the +local GHC from Nix inside your shell, rather than your globally installed system +GHC (if any). + +Stack can use only GHC versions that are in the Nix package repository. The +[Nixpkgs master branch](https://github.com/NixOS/nixpkgs/tree/master/pkgs/development/haskell-modules) +usually picks up new versions quickly, but it takes two or three days before +those updates arrive in the `unstable` channel. Release channels, like +`nixos-22.05`, receive those updates only occasionally -- say, every two or +three months --, so you should not expect them to have the latest compiler +available. Fresh NixOS installs use a release version by default. + +To identify whether a given compiler is available, you can use the following Nix +command: + +~~~sh +nix-env -f "" -qaP -A haskell.compiler.ghc924 +haskell.compiler.ghc924 ghc-9.2.4 +~~~ + +If Nix does not know that version of GHC, you will see the following error +message: + +~~~sh +nix-env -f "" -qaP -A haskell.compiler.ghc999 +error: attribute ‘ghc999’ in selection path ‘haskell.compiler.ghc999’ not found +~~~ + +You can list all known Haskell compilers in Nix with the following: + +~~~sh +nix-instantiate --eval -E "with import {}; lib.attrNames haskell.compiler" +~~~ + +Alternatively, use `nix repl`, a convenient tool to explore nixpkgs: + +~~~sh +nix repl +~~~ + +In the REPL, load nixpkgs and get the same information through autocomplete: + +~~~sh +nix-repl> :l +nix-repl> haskell.compiler.ghc +~~~ + +You can type and evaluate any Nix expression in the Nix REPL, such as the one we +gave to `nix-instantiate` earlier. + +## External C libraries through Nix packages + +To let Nix manage external C libraries, add (for example) the following section +to your Stack configuration file: + +~~~yaml +nix: + enable: true + packages: [zlib, glpk, pcre] +~~~ + +The equivalent command line option is `--nix-packages "zlib glpk pcre"`. + +The `packages` key and the `shell-file` key (see further below) are +alternatives. Specifying both results in an error. + +The example above will instruct Stack to build inside a local build environment +that will have the Nix packages +[zlib](https://search.nixos.org/packages?query=zlib), +[glpk](https://search.nixos.org/packages?query=glpk) and +[pcre](https://search.nixos.org/packages?query=pcre) +installed, which provide the C libraries of the same names. + +**Note:** currently, Stack only discovers dynamic and static libraries in the +`lib/` folder of any Nix package, and likewise header files in the `include/` +folder. If you are dealing with a package that does not follow this standard +layout, you will have to deal with that using a custom `shell.nix` file (see +further below). + +## External C libraries through a `shell.nix` file + +In Nix, a 'derivation' is a description of a build action and its result is a +Nix store object. Nix's [custom language][nix-language] can provide a fully +customized derivation as an environment to use. To specify such a `shell.nix` +file, add the following section to your Stack configuration file: + +~~~yaml +nix: + enable: true + shell-file: shell.nix +~~~ + +The equivalent command line option (which will prevail) is +`--nix-shell-file shell.nix`. + +The `packages` and `shell-file` keys are alternatives. Specifying both results +in an error. + +Defining a `shell.nix` file allow you to override some Nix derivations, for +instance to change some build options of the libraries you use, or to set +additional environment variables. For further information, see the +[Nix manual][nix-manual-exprs]. + +The `shell.nix` file that is the equivalent of the +`packages: [zlib, glpk, pcre]` example above is: + +~~~nix +{ghc}: +with (import {}); + +haskell.lib.buildStackProject { + inherit ghc; + name = "myEnv"; + buildInputs = [ zlib glpk pcre ]; +} +~~~ + +The `buildStackProject` utility function is documented in the +[Nixpkgs manual][nixpkgs-manual-haskell]. + +Stack expects the `shell.nix` file to define a function of with one argument +called `ghc` (arguments are not positional), which you should give to +function `buildStackProject`. This argument is a GHC Nix package in the +version as defined in the snapshot you set in Stack's project-level +configuration file (`stack.yaml`, by default). + +## Pure and impure Nix shells + +By default, Stack will run the build in a *pure* Nix build environment (or +*shell*). Building in a pure Nix shell means: + +1. with limited exceptions, **no environment variable will be forwarded** from + your user session to the Nix shell (variables like `HTTP_PROXY`, `PATH`, + `STACK_XDG` and `STACK_ROOT` notably will not be available); and + +2. the build should fail if you have not specified all the dependencies in the + `packages:` section of the Stack configuration file, even if these + dependencies are installed elsewhere on your system. This behaviour enforces + a complete description of the build environment to facilitate + reproducibility. + +To override this behaviour, add the following section to your Stack YAML +configuration file: + +~~~yaml +nix: + enable: true + pure: false +~~~ + +The equivalent command line flag (which will prevail) is `--[no-]-nix-pure`. + +To run the build in a *pure* Nix shell but preserve specific environment +variables use Nix's `nix-shell` command's `--keep` option. For example, to +preserve the [`STACK_XDG`](../configure/environment_variables.md#stack_xdg) +environment variable, add the following to your Stack YAML configuration file: + +~~~yaml +nix: + nix-shell-options: + - --keep + - STACK_XDG +~~~ + +The equivalent command line option is: + +~~~text +--nix-shell-options "--keep STACK_XDG" +~~~ + +**Note:** On macOS, shells are non-pure by default currently. This is due soon +to be resolved locale issues. So on macOS you will need to be a bit more careful +to check that you really have listed all dependencies. + +## Nix package sources + +Nix organizes its packages in snapshots of packages (each snapshot being a +"package set") similar to how Stackage organizes Haskell packages. By default, +`nix-shell` will look for the "nixpkgs" package set located by your `NIX_PATH` +environment variable. This package set can be different depending on when you +installed Nix and which nixpkgs channel you are using (similar to the LTS channel +for stable packages and the nightly channel for bleeding edge packages in +[Stackage](https://www.stackage.org/)). This is bad for reproducibility so that +nixpkgs should be pinned, i.e., set to the same package set for every developer +of your project. + +To set or override the Nix package set, add the following section to your Stack +configuration file: + +~~~yaml +nix: + path: [nixpkgs=] +~~~ + +The equivalent command line option is +`--nix-path `. + +By this means, you can ask Nix to use your own local checkout of the nixpkgs +repository. You could in this way use a bleeding edge nixpkgs, cloned from the +`NixOS/nixpkgs` [repository](http://www.github.com/NixOS/nixpkgs) `master` +branch, or edit the Nix descriptions of some packages. + +The Tweag example [repository][tweag-example] shows how you can pin a package +set. + +## Non-project specific configuration + +Below is a summary of the non-project specific configuration options and their +default values. The options can be set in Stack's project-level configuration +file (`stack.yaml`, by default) or its global configuration file +(`config.yaml`). + +~~~yaml +nix: + + # false by default, except on NixOS. Is Nix integration enabled? + enable: true + + # true by default. Should Nix run in a pure shell? + pure: true + + # Empty by default. The list of packages you want to be available in the + # nix-shell at build time (with `stack build`) and run time (with + # `stack exec`). + packages: [] + + # Unset by default. You cannot set this option if `packages:` + # is already present and not empty. + shell-file: shell.nix + + # A list of strings, empty by default. Additional options that will be passed + # verbatim to the `nix-shell` command. + nix-shell-options: [] + + # A list of strings, empty by default, such as + # `[nixpkgs=/my/local/nixpkgs/clone]` that will be used to override + # NIX_PATH. + path: [] + + # false by default. Whether to add your Nix dependencies as Nix garbage + # collection roots. This way, calling nix-collect-garbage will not remove + # those packages from the Nix store, saving you some time when running + # stack build again with Nix support activated. + # + # This creates a `nix-gc-symlinks` directory in the project `.stack-work`. + # To revert that, just delete this `nix-gc-symlinks` directory. + add-gc-roots: false +~~~ + +`stack --nix-help` will list the equivalent command line flags and options. + +## Stack and developer tools on NixOS + +NixOS is a Linux distribution based on Nix, that is composed using modules and +packages defined in the Nixpkgs project. + +When using Stack on NixOS, you must use Stack's Nix integration to install GHC. +That is because external C libraries in NixOS are not installed in the usual +distribution directories. GHC installed through Stack (without Nix) cannot find +those libraries and, therefore, cannot build most projects. However, GHC +provided through Nix can be modified to find the external C libraries provided +through Nix. + +[nix-language]: https://wiki.nixos.org/wiki/Overview_of_the_Nix_Language +[nix-manual-exprs]: http://nixos.org/manual/nix/stable/expressions/writing-nix-expressions.html +[nix-search-packages]: https://search.nixos.org/packages +[nixpkgs-manual-haskell]: https://haskell4nix.readthedocs.io/nixpkgs-users-guide.html?highlight=buildStackProject#how-to-build-a-haskell-project-using-stack +[tweag-blog-post]: https://www.tweag.io/blog/2022-06-02-haskell-stack-nix-shell/ +[tweag-example]: https://github.com/tweag/haskell-stack-nix-example/ diff --git a/doc/topics/nonstandard_project_init.md b/doc/topics/nonstandard_project_init.md new file mode 100644 index 0000000000..3798dc95f8 --- /dev/null +++ b/doc/topics/nonstandard_project_init.md @@ -0,0 +1,36 @@ +
+ +# Non-standard project initialization + +You may need to configure Stack to work with an existing project that has one or +more Cabal files but no Stack project-level configuration file (`stack.yaml`, by +default). + +## The `stack init` command + +The `stack init` command: + +* finds all of the Cabal files in your current directory and subdirectories + (unless you use `--ignore-subdirs`) and determines the packages and versions + they require +* Finds the best combination of snapshot and package flags that allows + everything to compile with minimum external dependencies +* Tries to look for the best matching snapshot from latest Haskell LTS, latest + Stackage Nightly, and other Haskell LTS, in that order + +If `stack init` finds a match, it will generate a `stack.yaml` file. + +You can specify the directory, or directories to include in the search for +Cabal files. + +### The `stack init --force` flag + +Set the flag to force the over-writing of any existing `stack.yaml` file. + +### The `stack init --ignore-subdirs` flag + +Set the flag to not search for Cabal files in subdirectories. + +### The `stack init --omit-packages` flag + +Set the flag to exclude any conflicting or incompatible user packages. diff --git a/doc/topics/package_location.md b/doc/topics/package_location.md new file mode 100644 index 0000000000..ec1383d675 --- /dev/null +++ b/doc/topics/package_location.md @@ -0,0 +1,225 @@ +
+ +# Package location + +[:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) + +This document describes the specification of a package location (in the +`extra-deps` key and in a snapshot). + +!!! info + + Stack uses the [Pantry](https://hackage.haskell.org/package/pantry) to + specify the location of packages. Pantry is geared towards reproducible + build plans with cryptographically secure specification of packages. + +There are three types of package locations: + +1. Hackage packages +2. Git and Mecurial repositories +3. Local or remote archives (such as GitHub archives) + +All three types support optional tree metadata to be added, which can be used +for reproducibility and faster downloads. This information can automatically be +generated in a [lock file](lock_files.md). + +## Hackage packages + +A package can be identified by its name, version and Cabal file revision +number, with revision `0` being the original Cabal file. For example: + +~~~yaml +extra-deps: +- acme-missiles-0.3@rev:0 +~~~ + +A package name and version only can be stated. Using this syntax, the most +recent Cabal file revision available in the package index will be used. For +example: + +~~~yaml +extra-deps: +- acme-missiles-0.3 +~~~ + +This syntax is often used in practice, but may result in one build differing +from another, if a new or further Cabal file revision is added to the package +index between the builds. + +As an alternative to specifying the Cabal file revision number, you can specify +the package name and version with the SHA256 hash of the contents of its Cabal +file. Doing so is slightly more resilient than using the Cabal file revision +number, as it does not rely on the correct ordering in the package index. +For example: + +~~~yaml +extra-deps: +- acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1 +~~~ + +Optionally, you can specify also the size of the Cabal file in bytes. For +example (where the file size is `631` bytes): + +~~~yaml +extra-deps: +- acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1,613 +~~~ + +Optionally, you can specify also the Pantry tree information. For example: + +~~~yaml +- hackage: acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1,613 + pantry-tree: + size: 226 + sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 +~~~ + +A Pantry tree is a list of CAS (content-addressable storage) +'SHA256 hash'-'size in bytes' keys for each of the files in a package. + +The SHA256 hash of the contents of the Cabal file and its size in bytes is +provided in Stack's lock file. For further information, see the +[lock files](lock_files.md) documentation. The SHA256 hash and file size +alternative is also what Stack uses when it makes suggestions about missing +packages. + +## Git and Mercurial repositories + +You can specify a Git or Mercurial repository at a specific commit, and Stack +will clone that repository and, if it has submodules (Git), update the +repository's submodules. For example: + +~~~yaml +extra-deps: +- git: git@github.com:commercialhaskell/stack.git + commit: '6a86ee32e5b869a877151f74064572225e1a0398' +- git: git@github.com:snoyberg/http-client.git + commit: 'a5f4f3' +- hg: https://example.com/hg/repo + commit: 'da39a3ee5e6b4b0d3255bfef95601890afd80709' +~~~ + +!!! note + + It is highly recommended that you only use SHA1 values for a Git or + Mercurial commit. Other values may work, but they are not officially + supported, and may result in unexpected behavior (namely, Stack will not + automatically pull to update to new versions). Another problem with this is + that your build will not be deterministic, because when someone else tries + to build the project they can get a different checkout of the package. + +!!! note + + The `commit:` key expects a YAML string. A commit hash, or partial hash, + comprised only of digits represents a YAML number, unless it is enclosed in + quotation marks. + +!!! warning + + For the contents of a Git repository, Stack cannot handle filepaths or + symbolic link names that are longer than those supported by the `ustar` + (Unix Standard TAR) archive format defined by + [POSIX.1-1988](https://nvlpubs.nist.gov/nistpubs/Legacy/FIPS/fipspub151-1.pdf). + + Stack uses `git archive` to convert the content of a Git repository to a + TAR archive, which it then seeks to consume. Git produces `pax` format + archives which use 'extended' headers for matters that the `ustar` format + cannot handle. Unfortunately, Stack cannot consume an extended header and + will silently discard the item. + +A common practice in the Haskell world is to use "megarepos", or repositories +with multiple packages in various subdirectories. Some common examples include +[wai](https://github.com/yesodweb/wai/) and +[digestive-functors](https://github.com/jaspervdj/digestive-functors). To +support this, you may also specify `subdirs` for repositories. For example: + +~~~yaml +extra-deps: +- git: git@github.com:yesodweb/wai + commit: '2f8a8e1b771829f4a8a77c0111352ce45a14c30f' + subdirs: + - auto-update + - wai +~~~ + +If unspecified, `subdirs` defaults to `['.']` meaning looking for a package in +the root of the repository. If you specify a value of `subdirs`, then `'.'` is +_not_ included by default and needs to be explicitly specified if a required +package is found in the top-level directory of the repository. + +### git-annex + +[git-annex](https://git-annex.branchable.com) is not supported. This is because +`git archive` does not handle symbolic links outside the work tree. It is still +possible to use repositories which use git-annex but do not require the annex +files for the package to be built. + +To do so, ensure that any files or directories stored by git-annex are marked +[export-ignore](https://git-scm.com/docs/git-archive#Documentation/git-archive.txt-export-ignore) +in the `.gitattributes` file in the repository. For further information, see +issue [#4579](https://github.com/commercialhaskell/stack/issues/4579). + +For example, if the directory `fonts/` is controlled by git-annex, use the +following line: + +~~~gitattributes +fonts export-ignore +~~~ + +## Local or remote archives (such as GitHub archives) + +### Filepaths or URLs to archive files + +You can use filepaths referring to local archive files or HTTP or HTTPS URLs +referring to remote archive files, either tarballs or ZIP files. + +!!! note + + An example of a remote archive file is a Hackage package candidate, usually + located at (for example) + https://hackage.haskell.org/package/my-package-1.0.0/candidate/my-package-1.0.0.tar.gz. + +!!! warning + + Stack assumes that these archive files never change after downloading to + avoid needing to make an HTTP request on each build. + +For safer, more reproducible builds, you can optionally specify a cryptographic +hash of the archive file. + +For example: + +~~~yaml +extra-deps: +- https://example.com/foo/bar/baz-0.0.2.tar.gz +- archive: http://github.com/yesodweb/wai/archive/2f8a8e1b771829f4a8a77c0111352ce45a14c30f.zip + subdirs: + - wai + - warp +- archive: ../acme-missiles-0.3.tar.gz + sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b +~~~ + +### GitHub archive files + +[:octicons-tag-24: 1.7.1](https://github.com/commercialhaskell/stack/releases/tag/v1.7.1) + +You can specify a GitHub respository at a specific commit and Stack will obtain +from GitHub an archive file of the files in the repository at that point in its +history. For example: + +~~~yaml +extra-deps: +- github: snoyberg/http-client + commit: 'a5f4f30f01366738f913968163d856366d7e0342' +~~~ + +!!! note + + An archive file of the files in a GitHub repository at a point in its + history is not the same as a clone of the repository (including its history) + and the updating of any submodules. If you need the latter, use the syntax + for a [Git repository](#git-and-mercurial-repositories). + + If the package fails to build due to missing files, it may be that updated + submodules are required. diff --git a/doc/topics/scripts.md b/doc/topics/scripts.md new file mode 100644 index 0000000000..7c88e61dd2 --- /dev/null +++ b/doc/topics/scripts.md @@ -0,0 +1,310 @@ +
+ +# Stack's script interpreter + +Stack offers a very useful feature for running files: a script interpreter. For +too long have Haskellers felt shackled to bash or Python because it is just too +hard to create reusable source-only Haskell scripts. Stack attempts to solve +that. + +You can use `stack ` to execute a Haskell source file. Usually, the +Stack command to be applied is specified using a special Haskell comment (the +Stack interpreter options comment) at the start of the source file. That command +is most often [`stack script`](../commands/script_command.md) but it can be, for +example, [`stack runghc`](../commands/runghc_command.md). If there is no Stack +interpreter options comment, Stack will warn that one was expected. + +An example will be easiest to understand. Consider the Haskell source file +`turtle-example.hs` with contents: + +~~~haskell +#!/usr/bin/env stack +-- stack script --snapshot lts-24.37 --package turtle +{-# LANGUAGE OverloadedStrings #-} +import Turtle (echo) +main = echo "Hello World!" +~~~ + +=== "Unix-like" + + The first line beginning with the 'shebang' (`#!`) tells Unix to use Stack + as a script interpreter, if the file's permissions mark it as executable. A + shebang line is limited to a single argument, here `stack`. + + The file's permissions can be set with command `chmod` and then it can be + run: + + ~~~text + chmod +x turtle-example.hs + ./turtle-example.hs + ~~~ + + !!! note + + On macOS: + + - Avoid `{-# LANGUAGE CPP #-}` in Stack scripts; it breaks the shebang + line ([GHC #6132](https://gitlab.haskell.org/ghc/ghc/issues/6132)) + + - Use a compiled executable, not another script, in the shebang line. + Eg `#!/usr/bin/env runhaskell` will work but + `#!/usr/local/bin/runhaskell` would not. + + Alternatively, the script can be run with command: + + ~~~text + stack turtle-example.hs + ~~~ + +=== "Windows" + + The first line beginning with the 'shebang' (`#!`) has a meaning on + Unix-like operating systems but will be ignored by PowerShell. It can be + omitted on Windows. The script can be run with command: + + ~~~text + stack turtle-example.hs + ~~~ + +In both cases, the command yields: + +~~~text +Hello World! +~~~ + +the first time after a little delay (as GHC is downloaded, if necessary, and +dependencies are built) and subsequent times more promptly (as the runs are +able to reuse everything already built). + +The second line of the source code is the Stack interpreter options comment. In +this example, it specifies the [`stack script`](../commands/script_command.md) +command with the options of a LTS Haskell 24.37 snapshot +(`--snapshot lts-24.37`) and ensuring the +[`turtle`](https://hackage.haskell.org/package/turtle) package is available +(`--package turtle`). The version of the package will be that in the specified +snapshot (`lts-24.37` provides `turtle-1.6.2`). + +## Arguments and interpreter options and arguments + +Arguments for the script can be specified on the command line after the file +name: `stack ...`. + +The Stack interpreter options comment must specify what would be a single valid +Stack command at the command line if the file name were included as an argument, +starting with `stack`. It can include `--` followed by arguments. In particular, +the Stack command `stack MyScript.hs ` with +Stack interpreter options comment: + +~~~haskell +-- stack -- +~~~ + +is equivalent to the following command at the command line: + +~~~text +stack -- MyScript.hs +~~~ + +The Stack interpreter options comment must be the first line of the file, unless +a shebang line is the first line, when the comment must be the second line. The +comment must start in the first column of the line. + +When many options are needed, a block style comment that splits the command over +more than one line may be more convenient and easier to read. + +For example, the command `stack MyScript.hs arg1 arg2` with `MyScript.hs`: + +~~~haskell +#!/usr/bin/env stack +{- stack script + --snapshot lts-24.37 + -- + +RTS -s -RTS +-} +import Data.List (intercalate) +import System.Environment (getArgs) +import Turtle (echo, fromString) + +main = do + args <- getArgs + echo $ fromString $ intercalate ", " args +~~~ + +is equivalent to the following command at the command line: + +~~~text +stack script --snapshot lts-24.37 -- MyScript.hs arg1 arg2 +RTS -s -RTS +~~~ + +where `+RTS -s -RTS` are some of GHC's +[runtime system (RTS) options](https://downloads.haskell.org/~ghc/latest/docs/users_guide/runtime_control.html). + +Arguments that include spaces can be quoted using double quotation marks. + +## Just-in-time compilation + +As with using [`stack script`](../commands/script_command.md) at the command +line, you can pass the `--compile` flag to make Stack compile the script, and +then run the compiled executable. Compilation is done quickly, without +optimization. To compile with optimization, pass the `--optimize` flag instead. +Compilation is done only if needed; if the executable already exists, and is +newer than the script, Stack just runs the executable directly. + +This feature can be good for speed (your script runs faster) and also for +durability (the executable remains runnable even if the script is disturbed, eg +due to changes in your installed GHC/snapshots, changes to source files during +git bisect, etc.) + +## Using multiple packages + +As with using [`stack script`](../commands/script_command.md) at the command +line, you can also specify multiple packages, either with multiple `--package` +options, or by providing a comma or space separated list. For example: + +~~~haskell +#!/usr/bin/env stack +{- stack script + --snapshot lts-24.37 + --package turtle + --package "stm async" + --package http-client,http-conduit +-} +~~~ + +## Using extra-deps + +As with using [`stack script`](../commands/script_command.md) at the command +line, you can also specify one or more extra-deps using a valid YAML value for +each. For example: + +~~~haskell +#!/usr/bin/env stack +{- stack script + --snapshot lts-24.37 + --extra-dep acme-missiles-0.3@rev:0 + --extra-dep "{git: git@github.com:yesodweb/wai, commit: '2f8a8e1b771829f4a8a77c0111352ce45a14c30f', subdirs: [auto-update, wai]}" +-} +~~~ + +Relative paths to local archive files are assumed to be relative to the +directory in which the script file is located. + +## Stack configuration for scripts + +When using the [`stack script`](../commands/script_command.md) command, as when +using it at the command line, any project-level configuration file +(`stack.yaml`, by default) (including in the `global-project` directory in the +Stack root), including any specified by the options to the `stack script` +command itself, is ignored. + +!!! info + + Non-project level configuration options in global configuration files + (`config.yaml`), are not ignored by the + [`stack script`](../commands/script_command.md) command. Such options may be + useful if [`allow-newer`](../configure/yaml/non-project.md#allow-newer) + and/or + [`allow-newer-deps`](../configure/yaml/non-project.md#allow-newer-deps) + are required. + +When using the [`stack runghc`](../commands/runghc_command.md) command, as when +using it at the command line, if the current working directory is inside a +project, then that project's project-level configuration file is effective when +running the script. Otherwise the script uses the project-level configuration +file in the `global-project` directory in the Stack root. + +## Testing scripts + +You can use the flag `--script-no-run-compile` on the command line to enable (it +is disabled by default) the use of the `--no-run` option with +[`stack script`](../commands/script_command.md) (and forcing the `--compile` +option). The flag may help test that scripts compile in CI (continuous +integration). + +For example, consider the following simple script, in a file named `Script.hs`, +which makes use of the joke package +[`acme-missiles`](https://hackage.haskell.org/package/acme-missiles): + +~~~haskell +{- stack script + --snapshot lts-24.37 + --package acme-missiles +-} +import Acme.Missiles (launchMissiles) + +main :: IO () +main = launchMissiles +~~~ + +The command `stack --script-no-run-compile Script.hs` then behaves as if the +command +`stack script --snapshot lts-24.37 --package acme-missiles --no-run --compile -- Script.hs` +had been given. `Script.hs` is compiled (without optimisation) and the resulting +executable is not run: no missiles are launched in the process! + +## Writing independent and reliable scripts + +The [`stack script`](../commands/script_command.md) command will automatically: + +* Install GHC and libraries, if missing. + [`stack script`](../commands/script_command.md) behaves as if the + `--install-ghc` flag had been passed at the command line. +* Require that all packages used be explicitly stated on the command line. + +This ensures that your scripts are _independent_ of any prior deployment +specific configuration, and are _reliable_ by using exactly the same version of +all packages every time it runs so that the script does not break by +accidentally using incompatible package versions. + +In earlier versions of Stack, the +[`stack runghc`](../commands/runghc_command.md) command was used for scripts and +can still be used in that way. In order to achieve the same effect with the +[`stack runghc`](../commands/runghc_command.md) command, you can do the +following: + +1. Use the `--install-ghc` option to install the compiler automatically +2. Explicitly specify all packages required by the script using the `--package` + option. Use `-hide-all-packages` GHC option to force explicit specification + of all packages. +3. Use the `--snapshot` Stack option to ensure a specific GHC version and + package set is used. + +It is possible for a project-level configuration file to affect +[`stack runghc`](../commands/runghc_command.md). For that reason, +[`stack script`](../commands/script_command.md) is strongly recommended. For +those curious, here is an example with +[`stack runghc`](../commands/runghc_command.md): + +~~~haskell +#!/usr/bin/env stack +{- stack + runghc + --install-ghc + --snapshot lts-24.37 + --package base + --package turtle + -- + -hide-all-packages + -} +~~~ + +The [`stack runghc`](../commands/runghc_command.md) command is still useful, +especially when you are working on a project and want to access the package +databases and configurations used by that project. See the next section for more +information on configuration files. + +## Loading scripts in GHCi + +Sometimes you want to load your script in GHCi to play around with your program. +In those cases, you can use `exec ghci` option in the script to achieve +it. Here is an example: + +~~~haskell +#!/usr/bin/env stack +{- stack + exec ghci + --install-ghc + --snapshot lts-24.37 + --package turtle +-} +~~~ diff --git a/doc/topics/shell_autocompletion.md b/doc/topics/shell_autocompletion.md new file mode 100644 index 0000000000..36aa72fd3e --- /dev/null +++ b/doc/topics/shell_autocompletion.md @@ -0,0 +1,70 @@ +
+ +# Shell auto-completion + +The following adds support for the tab completion of standard Stack arguments to +the following shell programs: Bash, Zsh (the Z shell) and fish. Completion of +file names and executables within Stack is still lacking. For further +information, see issue +[#823](https://github.com/commercialhaskell/stack/issues/832). + +!!! info + + Stack's completion library provides + [hidden options](https://github.com/pcapriotti/optparse-applicative#bash-zsh-and-fish-completions) + for Bash, Zsh, and fish which output commands used for shell + auto-completion. For example: + + ~~~bash + $ stack --bash-completion-script stack + _stack() + { + local CMDLINE + local IFS=$'\n' + CMDLINE=(--bash-completion-index $COMP_CWORD) + + for arg in ${COMP_WORDS[@]}; do + CMDLINE=(${CMDLINE[@]} --bash-completion-word $arg) + done + + COMPREPLY=( $(stack "${CMDLINE[@]}") ) + } + + complete -o filenames -F _stack stack + ~~~ + +=== "Bash" + + Add the output of the following command to your preferred completions file + (e.g. `~/.config/bash_completions.d/stack`). + + ~~~bash + stack --bash-completion-script $(which stack) + ~~~ + + You may need to `source` this. + +=== "Zsh" + + Add the output of the following command to your preferred completions file + (e.g. `~/.config/zsh/completions/_stack`). + + ~~~zsh + stack --zsh-completion-script $(which stack) + ~~~ + + You will not need to `source` this, but do update your `fpath`: + + ~~~zsh + fpath=($HOME/.config/zsh/completions $fpath) + autoload -U compinit && compinit + ~~~ + +=== "fish" + + Add the output of the following command to your preferred completions file + (e.g. `~/.config/fish/completions/stack.fish`). + + ~~~fish + stack --fish-completion-script $(which stack) + ~~~ diff --git a/doc/topics/snapshot_location.md b/doc/topics/snapshot_location.md new file mode 100644 index 0000000000..4022cfb4e0 --- /dev/null +++ b/doc/topics/snapshot_location.md @@ -0,0 +1,145 @@ +
+ +# Snapshot location + +[:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) + +This document describes the specification of a snapshot location: + +* **in YAML configuration files**, in the + [`snapshot`](../configure/yaml/project.md#snapshot) or + [`resolver`](../configure/yaml/project.md#resolver) (deprecated) key; or +* **at the command line**, with the + [`--snapshot`](../configure/global_flags.md#-snapshot-option) or + [`--resolver`](../configure/global_flags.md#-resolver-option) (deprecated) + option. + +## In YAML configuration files + +!!! info + + Stack uses the [Pantry](https://hackage.haskell.org/package/pantry) to + specify the location of snapshots. Pantry is geared towards reproducible + build plans with cryptographically secure specification of snapshots. + +There are four ways to specify a snapshot location: + +1. Via a _convenience synonym_, which provides a short form for some common + URLs (see further below). + + These are: + + * **Stackage LTS Haskell snapshots**, for example: + + ~~~yaml + snapshot: lts-24.37 + ~~~ + + ??? info "Expansion of `lts-X.Y`" + + `lts-X.Y` is treated (by default) as: + + ~~~text + github:commercialhaskell/stackage-snapshots:lts/X/Y.yaml + ~~~ + + and, consequently, expands to: + + ~~~text + https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/X/Y.yaml + ~~~ + + * **Stackage Nightly snapshots**, for example: + + ~~~yaml + snapshot: nightly-2026-04-18 + ~~~ + + ??? info "Expansion of `nightly-YYYY-MM-DD`" + + `nightly-YYYY-MM-DD` is treated (by default) as: + + ~~~text + github:commercialhaskell/stackage-snapshots:nightly/YYYY/M/D.yaml + ~~~ + + and, consequently (see below), expands to: + + ~~~text + https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/nightly/YYYY/M/D.yaml + ~~~ + + * **GitHub**: `github:user/repo:path` is treated as: + + ~~~text + https://raw.githubusercontent.com/user/repo/master/path + ~~~ + + ??? info "Overriding the default snapshot location base" + + By default, LTS Haskell and Stackage Nightly snapshot configurations are + retrieved from the `stackage-snapshots` GitHub repository of user + `commercialhaskell`. The + [snapshot-location-base](../configure/yaml/non-project.md#snapshot-location-base) + option allows a custom location to be set. + +2. Via a **compiler version**, for example: + + ~~~yaml + snapshot: ghc-9.10.3 + ~~~ + + This snapshot specifies only a compiler version and, indirectly, its boot + packages. + +3. Via a **URL** pointing to a snapshot configuration file, for example: + + ~~~yaml + snapshot: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/nightly/2025/8/17.yaml` + ~~~ + + For safer, more reproducible builds, you can optionally specify a URL + together with a cryptographic hash of its content. For example: + + ~~~yaml + snapshot: + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/12/0.yaml + size: 499143 + sha256: 781ea577595dff08b9c8794761ba1321020e3e1ec3297fb833fe951cce1bee11 + ~~~ + + `size` is the number of bytes in the file and `sha256` is the file's SHA256 + hash. If not provided, the information will automatically be generated and + stored in a [lock file](lock_files.md). + +4. Via a relative or absolute **local file path** pointing to a snapshot + configuration file, for example: + + ~~~yaml + snapshot: my-local-snapshot.yaml + ~~~ + + This can also be specified as: + + ~~~yaml + snapshot: + filepath: my-local-snapshot.yaml + ~~~ + + This allows local file paths and covenience synonyms to be disambiguated. + +## At the command line + +As in YAML configuration files, a snapshot location can be specified via a +convenience synoynm, a compiler version, a URL, or a local file path. In +addition, at the command line only: + +* `--snapshot lts-` specifies the latest Stackage LTS Haskell + snapshot with the specified major version; +* `--snapshot lts` specifies, from those with the greatest major version, the + latest Stackage LTS Haskell snapshot; +* `--snapshot nightly` specifies the most recent Stackage Nightly snapshot; + and +* `--snapshot global` specifies the snapshot specified by the project-level + configuration file in the `global-project` directory in the + [Stack root](../topics/stack_root.md#global-project-directory). diff --git a/doc/topics/stack_root.md b/doc/topics/stack_root.md new file mode 100644 index 0000000000..973328b0ce --- /dev/null +++ b/doc/topics/stack_root.md @@ -0,0 +1,310 @@ +
+ +# Stack root + +The Stack root is a directory where Stack stores important files. + +On Unix-like operating systems and Windows, Stack can be configured to follow +the XDG Base Directory Specification if the environment variable `STACK_XDG` is +set to any non-empty value. However, Stack will ignore that configuration if the +Stack root location has been set on the command line or the `STACK_ROOT` +environment variable exists. + +## Location + +The location of the Stack root depends on the operating system, whether Stack is +configured to use the XDG Base Directory Specification, and/or whether an +alternative location to Stack's default 'programs' directory has been specified. + +The location of the Stack root can be configured by setting the +[`STACK_ROOT`](../configure/environment_variables.md#stack_root) environment +variable or using Stack's +[`--stack-root`](../configure/global_flags.md#-stack-root-option) option on the +command line. + +=== "Unix-like" + + The Stack root contains snapshot packages; (by default) tools such as GHC, + in a `programs` directory; Stack's global + [configuration](../configure/yaml/index.md) file (`config.yaml`); and + Stack's [`global-projects`](../configure/yaml/index.md) directory. + + The default Stack root is `~/.stack`. + +=== "Windows" + + The default Stack root is `$Env:APPDATA\stack`. + + If the `LOCALAPPDATA` environment variable exists, then the default location + of tools is `$Env:LOCALAPPDATA\Programs\stack`. Otherwise, it is the + `programs` directory in the Stack root. + + !!! warning + + If there is a space character in the `$Env:LOCALAPPDATA` path (which may + be the case if the relevant user account name and its corresponding user + profile path have a space) this may cause problems with building + packages that make use of the GNU project's `autoconf` package and + `configure` shell script files. That may be the case particularly if + there is no corresponding short name ('8 dot 3' name) for the directory + in the path with the space (which may be the case if '8 dot 3' names + have been stripped or their creation not enabled by default). If there + are problems building, it will be necessary to override the default + location of Stack's 'programs' directory to specify an alternative path + that does not contain space characters. Examples of packages on + Hackage that make use of `configure` are `network` and `process`. + + On Windows, the length of filepaths may be limited (to + [MAX_PATH](https://docs.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=cmd)), + and things can break when this limit is exceeded. Setting a Stack root with + a short path to its location (for example, `C:\sr`) can help. + +=== "Windows (Command Prompt)" + + The default Stack root is `%APPDATA%\stack`. + + If the `LOCALAPPDATA` environment variable exists, then the default location + of tools is `%LOCALAPPDATA%\Programs\stack`. Otherwise, it is the `programs` + directory in the Stack root. + + !!! warning + + If there is a space character in the `%LOCALAPPDATA%` path (which may be + the case if the relevant user account name and its corresponding user + profile path have a space) this may cause problems with building + packages that make use of the GNU project's `autoconf` package and + `configure` shell script files. That may be the case particularly if + there is no corresponding short name ('8 dot 3' name) for the directory + in the path with the space (which may be the case if '8 dot 3' names + have been stripped or their creation not enabled by default). If there + are problems building, it will be necessary to override the default + location of Stack's 'programs' directory to specify an alternative path + that does not contain space characters. Examples of packages on + Hackage that make use of `configure` are `network` and `process`. + + On Windows, the length of filepaths may be limited (to + [MAX_PATH](https://docs.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=cmd)), + and things can break when this limit is exceeded. Setting a Stack root with + a short path to its location (for example, `C:\sr`) can help. + +=== "XDG Base Directory Specification" + + The Stack root is `/stack`. If the `XDG_DATA_HOME` + environment variable does not exist, the default is `~/.local/share/stack` + on Unix-like operating systems and `%APPDATA%\stack` on Windows. + + The location of `config.yaml` is `/stack`. If the + `XDG_CONFIG_HOME` environment variable does not exist, the default is + `~/.config/stack` on Unix-like operating systems and `%APPDATA%\stack` on + Windows. + + This approach treats: + + * the project-level configuration file that is common to all projects + without another such file in their project directory or its ancestor + directories as _data_ rather than as part of Stack's own + _configuration_; + + * the snapshots database as essential data rather than as non-essential + data that would be part of a _cache_, notwithstanding that Stack will + rebuild that database as its contents are needed; and + + * the Pantry store as essential data rather than as non-essential data + that would be part of a _cache_, notwithstanding that Stack will + download the package index and rebuild the store if it is absent. + +An alternative to the default location of tools such as GHC can be specified +with the +[`local-programs-path`](../configure/yaml/non-project.md#local-programs-path) +configuration option. + +The location of the Stack root is reported by command: + +~~~text +stack path --stack-root +~~~ + +The full path of Stack's global configuration file is reported by command: + +~~~text +stack path --global-config +~~~ + +The location of tools such as GHC for the current platform is reported by +command: + +~~~text +stack path --programs +~~~ + +## Contents + +The contents of the Stack root depend on the operating system, whether Stack is +configured to use the XDG Base Directory Specification, and/or whether an +alternative location to Stack's default 'programs' directory has been specified. + +=== "Unix-like" + + The Stack root contains snapshot packages; (by default) tools such as GHC, + in a `programs` directory; Stack's global + [configuration](../configure/yaml/index.md) file (`config.yaml`); and + Stack's [`global-projects`](../configure/yaml/index.md) directory. + +=== "Windows" + + The Stack root contains snapshot packages; Stack's global + [configuration](../configure/yaml/index.md) file (`config.yaml`); and + Stack's [`global-projects`](../configure/yaml/index.md) directory. The + default location of tools such as GHC and MSYS2 is outside of the Stack + root. + +=== "XDG Base Directory Specification" + + If Stack is following the XDG Base Directory Specification, the Stack root + contains what it would otherwise contain for the operating system, but + Stack's global configuration file (`config.yaml`) may be located elsewhere. + +### `config.yaml` + +This is Stack's global configuration file. For further information, see the +documentation for non-project specific +[configuration](../configure/yaml/non-project.md#non-project-specific-configuration). + +If the file is deleted, and Stack needs to consult it, then Stack will create a +file with default contents. + +### `stack.sqlite3` + +This is a 'user' database that Stack uses to cache certain information. The +associated lock file is `stack.sqlite3.pantry-write-lock`. + +### `.stack-work` directory (optional) + +Stack can build when there is no project-level configuration file (including one +in the `global-project` directory of the Stack root); for example, as a result +of a [`stack script`](../commands/script_command.md) command (at the command +line or in a [Stack interpreter options comment](scripts.md) in a Haskell script +file). When it does so, the directory corresponding to a project directory is +the Stack root. Stack will create its work directory, named `.stack-work` by +default, in the Stack root. + +If the work directory is deleted, and Stack needs that work directory, then +Stack will recreate it. + +### `global-project` directory + +This contains: + +* an explanation of the directory (`README.txt`); +* the project-level configuration file (`stack.yaml`) for the global project + and its associated lock file (`stack.yaml.lock`); and +* if created, Stack's working directory (`.stack-work`) for the global project. + +If the project-level configuration file is deleted, and Stack needs to consult +it, then Stack will recreate the contents of the directory. + +### `pantry\hackage` directory + +This contains a local cache of the package index. If the contents of the +directory are deleted, and Stack needs to consult the package index, then Stack +will seek to download the latest package index. + +!!! info + + Stack depends on package `pantry` which, in turn, depends on package + `hackage-security`. The latter handles the local cache of the package index. + The type `CacheLayout` represents the location of the files that are cached. + `pantry` uses `cabalCacheLayout :: CacheLayout`, the layout that Cabal (the + tool) uses. That is what specifies the names of the files used to cache the + package index, including `00-index.tar` and `00-index.tar.gz`. + +### `pantry` directory + +This contains: + +* the Pantry database used by Stack (`pantry.sqlite3`) and its associated lock + file (`pantry.sqlite2.pantry-write-lock`). If the database is deleted, and + Stack needs to consult it, then Stack will seek to create and initialise it. + The database is initialised with information from the package index; and +* a database of package versions that come with each version of GHC + (`global-hints-cache.yaml`). + +### `programs` directory + +This contains a directory for the platform. That directory contains for each +installed Stack-supplied tool: + +* the archive file for the tool. This can be deleted; +* a file indicating the tool is installed (`.installed`); and +* a directory for the tool. + +To remove a Stack-supplied tool, delete all of the above. If Stack needs a +Stack-supplied tool and it is unavailable, then Stack will seek to obtain it. + +### `scripts` directory (optional) + +If the `--compile` or `--optimize` and `--use-root` flags are used with the +[`stack script`](../commands/script_command.md) command, then this contains: + +* script-specific locations, each containing all the compilation outputs + (inclduing the executable) generated by the command. + +If the `scripts` directory, or a script-specific location within it, is deleted, +and Stack needs that directory, then Stack will recreate it. + +### `setup-exe-cache` directory + +This contains a directory for the platform. That directory contains, for each +version of GHC (an associated version of Cabal (the library)) that Stack has +used, an executable that Stack uses to access Cabal (the library). + +If the contents of the directory are deleted, and Stack needs the executable, +then Stack will seek to rebuild it. + +### `setup-exe-src` directory + +See the documentation for the +[`setup-exe-cache` directory](#setup-exe-cache-directory). This contains the two +source files (`setup-.hs` and `setup-shim-.hs`) that Stack uses to +build the executable. + +If the contents of the directory are deleted, and Stack needs the executable, +then Stack will recreate them. + +The hash in the names of the source files is a hash of arguments passed to GHC +when building the executable and the contents of the two source files. + +The content of the `setup-.hs` file is the familiar: + +~~~haskell +import Distribution.Simple +main = defaultMain +~~~ + +The content of the `setup-shim-.hs` file uses `main` except when the +executable is called with arguments `repl` and `stack-initial-build-steps`. Then +Stack uses Cabal (the library) to create the autogenerated files for every +configured component. Stack's `stack ghci` or `stack repl` commands call the +executable with those arguments. + +### `snapshots` directory + +This contains a directory for each snapshot that Stack creates when building +immutable dependencies of projects. + +If the contents of the directory are deleted, and the snapshot is not available +to Stack when it builds, then Stack will recreate the snapshot. + +### `templates` directory + +This contains a `.hsfile` for each project template that Stack has used. For +further information, see the +[`stack templates`](../commands/templates_command.md) command documentation. + +If the contents of the directory are deleted, an Stack needs a project template, +then Stack will seek to download the template. + +### `upload` directory + +This may contain saved credentials for uploading packages to Hackage +(`credentials.json`). diff --git a/doc/topics/stack_work.md b/doc/topics/stack_work.md new file mode 100644 index 0000000000..7f40180dbd --- /dev/null +++ b/doc/topics/stack_work.md @@ -0,0 +1,153 @@ +
+ +# Stack work directories + +Stack work directories are directories within a local project or package +directory in which Stack stores files created during the build process. Stack +can be used without an understanding of the content of those directories. In +particular, the [`stack exec`](../commands/exec_command.md) command sets up an +environment where relevant subdirectories of the project Stack work directory +are on the PATH. + +## Naming + +By default, Stack work directories are named `.stack-work`. The name can be +overidden by: + +* the use of the + [`STACK_WORK` environment variable](../configure/environment_variables.md#stack_work); +* the [`work-dir`](../configure/yaml/non-project.md#work-dir) non-project + specific configuration option; or +* the [`--work-dir`](../configure/global_flags.md#-work-dir-option) command line + option. + +Given the location of Stack work directories, the name of the work directories +must be a relative path to a directory. + +## Location + +If the work directory does not already exist, it will be created by the +[`stack build`](../commands/build_command.md) command as a subdirectory of each +project package directory and, if different, the project directory. + +## Project package Stack work directory + +The Stack work directory for a project package will contain a `dist` directory. +This directory will contain a path to a directory containing: + +* a `build` directory; +* a `package.conf.inplace` directory; +* a `stack-build-caches` directory; +* a `build-lock` file; +* a `setup-config` file; +* a `stack-cabal-mod` file. This file is used by Stack only for its modification + time; +* a `stack-project-root` file. This file contains an absolute path to the + project root directory; and +* a `stack-setup-config-mod` file. This file is used by Stack only for its + modification time. + +The directory, relative to the project package directory or the project +directory, is the one reported by +[`stack path --dist-dir`](../commands/path_command.md). + +=== "Unix-like" + + On Unix-like operating systems, the path to the directory is a directory + named after the platform (including Stack's classification of variants of + Linux distributions) followed by a directory named after the GHC version. + +=== "Windows" + + On Windows, the path to the directory is an eight-character hash of the + path that applies on Unix-like operating systems. + +## Project Stack work directory + +The Stack work directory for a project will contain: + +* a `stack.sqlite3` file (being a SQLite v3 database); and +* a `stack.sqlite3.pantry-write-lock` file (used to ensure that only one + process is trying to write to the database at any time). + +The Stack work directory for a project will contain a `install` directory. +This directory will contain a path to a directory containing: + +* a `bin` directory, containing built executable files; +* a `doc` directory, containing a directory for each project package. This is + the directory reported by + [`stack path --local-doc-root`](../commands/path_command.md); +* if the [`stack hpc`](../commands/hpc_command.md) command is used, a `hpc` + directory. This is the directory reported by + [`stack path --local-hpc-root`](../commands/path_command.md); +* a `lib` directory, containing a directory named after the platform and the + GHC version and, within that, a directory for each project package; and +* a `pkgdb` directory. This is the directory reported by + [`stack path --local-pkg-db`](../commands/path_command.md). + +The directory is the one reported by +[`stack path --local-install-root`](../commands/path_command.md). + +=== "Unix-like" + + On Unix-like operating systems, the path to the directory is a directory + named after the platform (including Stack's classification of variants of + Linux distributions) followed by a directory named after a SHA 256 hash + (see further below) followed by a directory named after the version number + of GHC. + + The SHA 256 hash is a hash of the following information: + + * the path to the specified compiler; + * the information about the compiler provided by `ghc --info`; + * the options that Stack passes to GHC for package that is not a project + package; and + * information about the immutable dependencies: their location, whether or + not Haddock documentation is to be built, their flags, their GHC options, + and their Cabal configuration options. + + The options that Stack passes to GHC for a package that is not a project + package depend on: + + * the specification of + [profiling](../commands/build_command.md#flags-affecting-ghcs-behaviour); + * the specification of + [stripping](../commands//build_command.md#flags-affecting-ghcs-behaviour); + and + * if + [`apply-ghc-options: everything`](../configure/yaml/non-project.md#apply-ghc-options) + is specified, any GHC command line options specified on the command line. + + !!! note + + As a consequence, the path reported by the following commands will + differ (and similarly for the paths established by the + [`stack exec`](../commands/exec_command.md) command): + + ~~~text + stack path --local-install-root + stack --profile path --local-install-root + stack --no-strip path --local-install-root + stack --profile --no-strip path --local-install-root + ~~~ + +=== "Windows" + + On Windows, the path to the directory is an eight-character hash of the + path that applies on Unix-like operating systems. + +Following a `stack ghci` or `stack repl` command, the Stack work directory for +a project will contain a `ghci` directory. This directory will contain paths to +`cabal_macos.h` files that are generated automatically by Cabal. + +!!! note + + Haskell Language Server makes use of the `stack ghci` command to obtain + information. + +If the [`stack hoogle`](../commands/hoogle_command.md) command is used, the +Stack work directory for a project will contain a `hoogle` directory. This +directory will contain a directory being the one reported by +[`stack path --local-hoogle-root`](../commands/path_command.md). The naming of +the path to the directory is same as for the path to the directory in the +`install` directory. diff --git a/doc/topics/stack_yaml_vs_cabal_package_file.md b/doc/topics/stack_yaml_vs_cabal_package_file.md new file mode 100644 index 0000000000..070cae979a --- /dev/null +++ b/doc/topics/stack_yaml_vs_cabal_package_file.md @@ -0,0 +1,162 @@ +
+ +# stack.yaml versus package.yaml versus a Cabal file + +What is the difference between a `stack.yaml` file, a `package.yaml` file and a +Cabal file (named `.cabal`)? This page aims to make that clear. + +In short: + +* `stack.yaml` contains project-level configuration for Stack, and may contain + project-specific options and non-project-specific options. + +* `package.yaml` contains a description of a package in the + [Hpack](https://github.com/sol/hpack) format. Hpack, including Stack's + built-in version, uses the file to create a Cabal file. + +* a Cabal file also contains a description of a package, but in the format used + by Cabal. + +## package.yaml versus a Cabal file + +Why two different formats to describe packages? Hpack is considered to have some +advantages over the underlying Cabal format, which are explained its project +repository. They include that the Hpack format supports YAML syntax and the +automatic generation of the lists of `exposed-modules` used in the Cabal format. + +The remainder of this page will focus on the difference between a `stack.yaml` +file and a package description file. + +## Package versus project + +Stack is a tool for building Haskell code and it uses Cabal, a build system. +Cabal defines the concept of a _package_. A package has: + +* A name and version +* optionally, one library +* optionally, one or more executables +* A Cabal file (or, as mentioned above, an [Hpack](https://github.com/sol/hpack) + `package.yaml` file that generates a Cabal file) +* And a bunch more + +There is a one-to-one correspondence between a package and a Cabal file. + +Stack defines a new concept called a _project_. A project has: + +* A snapshot (more on this later) +* Extra dependencies on top of the snapshot +* Optionally, one or more local Cabal packages +* Flag and GHC options configurations +* And a bunch more Stack configuration + +Often you will have a project that defines only one local Cabal package that you +are working on. If you need to specify a dependency, a source of confusion can +be why you need to specify it both in the `stack.yaml` file _and_ +in the Cabal file. To explain, let us take a quick detour to talk about +snapshots and how Stack resolves dependencies. + +## Snapshots + +Stack follows a rule that says, for any project, there is precisely one version +of each package available. Obviously, for many packages there are _many_ +versions available in the world. But when resolving a `stack.yaml` file, Stack +requires that you have chosen a specific version for each package available. + +The most common means by which this set of packages is defined is via a +snapshot provided by Stackage. For example, if you go to the page +, you will see a list of 3,427 packages at +specific version numbers. When you then specify `snapshot: lts-24.37` you are +telling Stack to use those package versions in resolving dependencies down to +specific versions of packages. + +Sometimes a snapshot does not have all of the packages that you want. Or you +want a different version of a package. Or you want to work on a local +modification of a package. In all of those cases, you can add more configuration +data to your `stack.yaml` file to override the values it received from your +[`snapshot`](../configure/yaml/project.md#snapshot) setting. At the end of the +day, each of your projects will end up with some way of resolving a package name +into a specific version of that package. + +## Why specify dependencies twice? + +The package `acme-missiles` is not included in any Stackage snapshots. When you +add something like this to your `stack.yaml` file: + +~~~yaml +extra-deps: +- acme-missiles-0.3 +~~~ + +what you are saying to Stack is: "if at any point you find that you need to +build the `acme-missiles` package, please use version `0.3`". You are _not_ +saying "please build `acme-missiles` now." You are also not saying "my package +depends on `acme-missiles`." You are simply making it available should the need +arise. + +When you add to your `package.yaml` file: + +~~~yaml +dependencies: +- acme-missiles +~~~ + +or, alternatively, you add directly to your Cabal file: + +~~~yaml +build-depends: acme-missiles +~~~ + +you are saying "this package requires that `acme-missiles` be available." Since +`acme-missiles` does not appear in your snapshot, without also modifying your +`stack.yaml` to mention it via `extra-deps`, Stack will complain about the +dependency being unavailable. + +You may challenge: but why go through all of that annoyance? Stack knows what +package I want, why not just go grab it? The answer is that, if Stack just +grabbed `acme-missiles` for you without it being specified in the `stack.yaml` +somehow, you'd lose reproducibility. How would Stack know which version to use? +It may elect to use the newest version, but if a new version is available in +the future, will it automatically switch to that? + +Stack's core philosophy is that build plans are always reproducible. The +purpose of the `stack.yaml` file is to define an immutable set of packages. No +matter when in time you use it, and no matter how many new release happen in +the interim, the build plan generated should be the same. + +(There is, however, at least one hole in this theory today, which is Hackage +revisions. When you specify `extra-deps: [acme-missiles-0.3]`, it does not +specify which revision of the Cabal file to use, and Stack will just choose the +latest. Stack has the ability to specify exact revisions of Cabal files, but +this is not enforced as a requirement, because it is so different from the way +most people work with packages.) + +And now, how about the other side: why does Stack not automatically add +`acme-missiles` to `build-depends` in your Cabal file if you add it as an +extra-dep? There are a surprising number reasons for this: + +* The Cabal specification does not support anything like that +* There can be multiple packages in a project, and how do we know which package + actually needs the dependency? +* There can be multiple components (libraries, executable, etc) in a package, + and how do we know which of those actually needs the dependency? +* The dependency may only be conditionally needed, based on flags, operating + system, or architecture. As an extreme example, we would not want a Linux-only + package to be built by force on Windows. + +While for simple use cases it seems like automatically adding dependencies from +the Cabal file to the `stack.yaml` file or vice-versa would be a good thing, it +breaks down immediately for any semi-difficult case. Therefore, Stack requires +you to add it to both places. + +And a final note, in case it was not clear. The example above used +`acme-missiles`, which is not in Stackage snapshots. If, however, you want to +depend on a package already present in the snapshot you have selected, there is +no need to add it explicitly to your `stack.yaml` file: it is already there +implicitly via the `snapshot` setting. This is what you do the majority of the +time, such as when you add `vector` or `mtl` as a `build-depends` value. + +## Should I check-in automatically generated Cabal files? + +Yes, you should. This recommendation was changed in +[issue #5210](https://github.com/commercialhaskell/stack/issues/5210). Please +see the discussion there. diff --git a/doc/travis_ci.md b/doc/topics/travis_ci.md similarity index 84% rename from doc/travis_ci.md rename to doc/topics/travis_ci.md index b430627e05..0906bb5161 100644 --- a/doc/travis_ci.md +++ b/doc/topics/travis_ci.md @@ -7,32 +7,34 @@ This page documents how to use Stack on familiarity with Travis. We provide two fully baked example files ready to be used on your projects: -* [The simple Travis configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/travis-simple.yml) +* [The simple Travis configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/resources/travis-simple.yml) is intended for applications that do not require multiple GHC support or cross-platform support. It builds and tests your project with just the settings present in your `stack.yaml` file. -* [The complex Travis configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/travis-complex.yml) +* [The complex Travis configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/resources/travis-complex.yml) is intended for projects that need to support multiple GHC versions and multiple OSes, such as open source libraries to be released to Hackage. It tests against cabal-install, as well as Stack on Linux and macOS. The configuration is significantly more involved to allow for all of this branching behavior. - __NOTE__: It is likely going to be necessary to modify this configuration to - match the needs of your project, such as tweaking the build matrix to alter - which GHC versions you test against, or to specify GHC-version-specific - `stack.yaml` files if necessary. Don't be surprised if it doesn't work the - first time around. See the multiple GHC section below for more information. + !!! note + It is likely going to be necessary to modify this configuration to match + the needs of your project, such as tweaking the build matrix to alter + which GHC versions you test against, or to specify GHC-version-specific + `stack.yaml` files if necessary. Do not be surprised if it does not work + the first time around. See the multiple GHC section below for more + information. Each of these configurations is ready to be used immediately, just copy-paste the content into the `.travis.yml` file in the root or your -repo, enable Travis on the repo, and you're good to go. +repo, enable Travis on the repo, and you are good to go. You may also be interested in using AppVeyor, which supports Windows -builds, for more cross-platform testing. There's a +builds, for more cross-platform testing. There is a [short blog post available on how to do this](http://www.snoyman.com/blog/2016/08/appveyor-haskell-windows-ci), or just copy in -[the appveyor.yml file](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/appveyor.yml) +[the appveyor.yml file](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/resources/appveyor.yml) The rest of this document explains the details of common Travis configurations for those of you who want to tweak the above @@ -49,14 +51,14 @@ currently available only for [container-based Travis infrastructure](http://docs.travis-ci.com/user/workers/container-based-infrastructure/). Shortly we have to add -```yaml +~~~yaml sudo: false # Caching so the next build will be fast too. cache: directories: - $HOME/.stack -``` +~~~ To the `.travis.yml`. This however restricts how we can install GHC and Stack on the Travis machines. @@ -64,15 +66,15 @@ the Travis machines. ## Installing Stack Currently there is only one reasonable way to install Stack: fetch precompiled -binary from the Github. +binary from the GitHub. -```yaml +~~~yaml before_install: # Download and unpack the stack executable - mkdir -p ~/.local/bin - export PATH=$HOME/.local/bin:$PATH - travis_retry curl -L https://get.haskellstack.org/stable/linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack' -``` +~~~ ## Installing GHC @@ -85,7 +87,7 @@ See the above scripts for an example of the first option (letting Stack download GHC). Here, we will explain the second option. With single GHC the situation is simple: -```yaml +~~~yaml before_install: # Install stack as above # ... @@ -99,18 +101,18 @@ addons: - hvr-ghc packages: - ghc-7.10.2 -``` +~~~ ### Multiple GHC - parametrised builds -Travis apt plugin doesn't yet support installing apt packages dynamically +Travis apt plugin does not yet support installing apt packages dynamically (https://github.com/travis-ci/travis-ci/issues/4291). That for we need to write a bit repetitive `.travis.yml`. Also for different GHC versions, you probably want to use different `stack.yaml` files. -```yaml +~~~yaml # N.B. No top-level env: declaration! matrix: @@ -142,7 +144,7 @@ matrix: before_install: # ghc - export PATH=/opt/ghc/$GHCVER/bin:$PATH -``` +~~~ Especially to use ghc `HEAD` you need to pass `--skip-ghc-check` option to Stack. @@ -150,12 +152,12 @@ Especially to use ghc `HEAD` you need to pass `--skip-ghc-check` option to Stack After the environment setup, actual test running is simple: -```yaml +~~~yaml script: - stack --no-terminal --skip-ghc-check test -``` +~~~ -In case you're wondering: we need `--no-terminal` because stack does some fancy +In case you are wondering: we need `--no-terminal` because stack does some fancy sticky display on smart terminals to give nicer status and progress messages, and the terminal detection is broken on Travis. @@ -165,11 +167,11 @@ Some Stack commands will run for long time (when cache is cold) without producing any output. To avoid timeouts, use the built in [travis_wait](https://docs.travis-ci.com/user/common-build-problems/#Build-times-out-because-no-output-was-received). -```yaml +~~~yaml install: - travis_wait stack --no-terminal --skip-ghc-check setup - travis_wait stack --no-terminal --skip-ghc-check test --only-snapshot -``` +~~~ ## Examples diff --git a/doc/travis-complex.yml b/doc/travis-complex.yml deleted file mode 100644 index 02f5e5f2d1..0000000000 --- a/doc/travis-complex.yml +++ /dev/null @@ -1,264 +0,0 @@ -# This is the complex Travis configuration, which is intended for use -# on open source libraries which need compatibility across multiple GHC -# versions, must work with cabal-install, and should be -# cross-platform. For more information and other options, see: -# -# https://docs.haskellstack.org/en/stable/travis_ci/ -# -# Copy these contents into the root directory of your Github project in a file -# named .travis.yml - -# Run jobs on Linux unless "os" is specified explicitly. -os: linux - -# Do not choose a language; we provide our own build tools. -language: generic - -# Caching so the next build will be fast too. -cache: - directories: - - $HOME/.ghc - - $HOME/.cabal - - $HOME/.stack - - $TRAVIS_BUILD_DIR/.stack-work - -# The different configurations we want to test. We have BUILD=cabal which uses -# cabal-install, and BUILD=stack which uses Stack. More documentation on each -# of those below. -# -# We set the compiler values here to tell Travis to use a different -# cache file per set of arguments. -# -# If you need to have different apt packages for each combination in the -# job matrix, you can use a line such as: -# addons: {apt: {packages: [libfcgi-dev,libgmp-dev]}} -jobs: - include: - # We grab the appropriate GHC and cabal-install versions from hvr's PPA. See: - # https://github.com/hvr/multi-ghc-travis - #- env: BUILD=cabal GHCVER=7.0.4 CABALVER=1.16 HAPPYVER=1.19.5 ALEXVER=3.1.7 - # compiler: ": #GHC 7.0.4" - # addons: {apt: {packages: [cabal-install-1.16,ghc-7.0.4,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - #- env: BUILD=cabal GHCVER=7.2.2 CABALVER=1.16 HAPPYVER=1.19.5 ALEXVER=3.1.7 - # compiler: ": #GHC 7.2.2" - # addons: {apt: {packages: [cabal-install-1.16,ghc-7.2.2,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - #- env: BUILD=cabal GHCVER=7.4.2 CABALVER=1.16 HAPPYVER=1.19.5 ALEXVER=3.1.7 - # compiler: ": #GHC 7.4.2" - # addons: {apt: {packages: [cabal-install-1.16,ghc-7.4.2,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - #- env: BUILD=cabal GHCVER=7.6.3 CABALVER=1.16 HAPPYVER=1.19.5 ALEXVER=3.1.7 - # compiler: ": #GHC 7.6.3" - # addons: {apt: {packages: [cabal-install-1.16,ghc-7.6.3,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - #- env: BUILD=cabal GHCVER=7.8.4 CABALVER=1.18 HAPPYVER=1.19.5 ALEXVER=3.1.7 - # compiler: ": #GHC 7.8.4" - # addons: {apt: {packages: [cabal-install-1.18,ghc-7.8.4,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - #- env: BUILD=cabal GHCVER=7.10.3 CABALVER=1.22 HAPPYVER=1.19.5 ALEXVER=3.1.7 - # compiler: ": #GHC 7.10.3" - # addons: {apt: {packages: [cabal-install-1.22,ghc-7.10.3,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - - env: BUILD=cabal GHCVER=8.0.2 CABALVER=1.24 HAPPYVER=1.19.5 ALEXVER=3.1.7 - compiler: ": #GHC 8.0.2" - addons: {apt: {packages: [cabal-install-1.24,ghc-8.0.2,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - - env: BUILD=cabal GHCVER=8.2.2 CABALVER=2.0 HAPPYVER=1.19.5 ALEXVER=3.1.7 - compiler: ": #GHC 8.2.2" - addons: {apt: {packages: [cabal-install-2.0,ghc-8.2.2,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - - env: BUILD=cabal GHCVER=8.4.4 CABALVER=2.2 HAPPYVER=1.19.5 ALEXVER=3.1.7 - compiler: ": #GHC 8.4.4" - addons: {apt: {packages: [cabal-install-2.2,ghc-8.4.4,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - - env: BUILD=cabal GHCVER=8.6.5 CABALVER=2.4 HAPPYVER=1.19.5 ALEXVER=3.1.7 - compiler: ": #GHC 8.6.5" - addons: {apt: {packages: [cabal-install-2.4,ghc-8.6.5,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - - # Build with the newest GHC and cabal-install. This is an accepted failure, - # see below. - - env: BUILD=cabal GHCVER=head CABALVER=head HAPPYVER=1.19.5 ALEXVER=3.1.7 - compiler: ": #GHC HEAD" - addons: {apt: {packages: [cabal-install-head,ghc-head,happy-1.19.5,alex-3.1.7], sources: [hvr-ghc]}} - - # The Stack builds. We can pass in arbitrary Stack arguments via the ARGS - # variable, such as using --stack-yaml to point to a different file. - - env: BUILD=stack ARGS="" - compiler: ": #stack default" - addons: {apt: {packages: [libgmp-dev]}} - - #- env: BUILD=stack ARGS="--resolver lts-2" - # compiler: ": #stack 7.8.4" - # addons: {apt: {packages: [libgmp-dev]}} - - #- env: BUILD=stack ARGS="--resolver lts-3" - # compiler: ": #stack 7.10.2" - # addons: {apt: {packages: [libgmp-dev]}} - - #- env: BUILD=stack ARGS="--resolver lts-6" - # compiler: ": #stack 7.10.3" - # addons: {apt: {packages: [libgmp-dev]}} - - #- env: BUILD=stack ARGS="--resolver lts-7" - # compiler: ": #stack 8.0.1" - # addons: {apt: {packages: [libgmp-dev]}} - - - env: BUILD=stack ARGS="--resolver lts-9" - compiler: ": #stack 8.0.2" - addons: {apt: {packages: [libgmp-dev]}} - - - env: BUILD=stack ARGS="--resolver lts-11" - compiler: ": #stack 8.2.2" - addons: {apt: {packages: [libgmp-dev]}} - - - env: BUILD=stack ARGS="--resolver lts-12" - compiler: ": #stack 8.4.4" - addons: {apt: {packages: [libgmp-dev]}} - - - env: BUILD=stack ARGS="--resolver lts-14" - compiler: ": #stack 8.6.5" - addons: {apt: {packages: [libgmp-dev]}} - - - env: BUILD=stack ARGS="--resolver lts-15" - compiler: ": #stack 8.8.3" - addons: {apt: {packages: [libgmp-dev]}} - - # Nightly builds are allowed to fail - - env: BUILD=stack ARGS="--resolver nightly" - compiler: ": #stack nightly" - addons: {apt: {packages: [libgmp-dev]}} - - # Build on macOS in addition to Linux - - env: BUILD=stack ARGS="" - compiler: ": #stack default osx" - os: osx - - # Travis includes an macOS which is incompatible with GHC 7.8.4 - #- env: BUILD=stack ARGS="--resolver lts-2" - # compiler: ": #stack 7.8.4 osx" - # os: osx - - #- env: BUILD=stack ARGS="--resolver lts-3" - # compiler: ": #stack 7.10.2 osx" - # os: osx - - #- env: BUILD=stack ARGS="--resolver lts-6" - # compiler: ": #stack 7.10.3 osx" - # os: osx - - #- env: BUILD=stack ARGS="--resolver lts-7" - # compiler: ": #stack 8.0.1 osx" - # os: osx - - - env: BUILD=stack ARGS="--resolver lts-9" - compiler: ": #stack 8.0.2 osx" - os: osx - - - env: BUILD=stack ARGS="--resolver lts-11" - compiler: ": #stack 8.2.2 osx" - os: osx - - - env: BUILD=stack ARGS="--resolver lts-12" - compiler: ": #stack 8.4.4 osx" - os: osx - - - env: BUILD=stack ARGS="--resolver lts-14" - compiler: ": #stack 8.6.5 osx" - os: osx - - - env: BUILD=stack ARGS="--resolver lts-15" - compiler: ": #stack 8.8.3 osx" - os: osx - - - env: BUILD=stack ARGS="--resolver nightly" - compiler: ": #stack nightly osx" - os: osx - - allow_failures: - - env: BUILD=cabal GHCVER=head CABALVER=head HAPPYVER=1.19.5 ALEXVER=3.1.7 - - env: BUILD=stack ARGS="--resolver nightly" - -before_install: -# Using compiler above sets CC to an invalid value, so unset it -- unset CC - -# We want to always allow newer versions of packages when building on GHC HEAD -- CABALARGS="" -- if [ "x$GHCVER" = "xhead" ]; then CABALARGS=--allow-newer; fi - -# Download and unpack the stack executable -- export PATH=/opt/ghc/$GHCVER/bin:/opt/cabal/$CABALVER/bin:$HOME/.local/bin:/opt/alex/$ALEXVER/bin:/opt/happy/$HAPPYVER/bin:$HOME/.cabal/bin:$PATH -- mkdir -p ~/.local/bin -- | - if [ `uname` = "Darwin" ] - then - travis_retry curl --insecure -L https://get.haskellstack.org/stable/osx-x86_64.tar.gz | tar xz --strip-components=1 --include '*/stack' -C ~/.local/bin - else - travis_retry curl -L https://get.haskellstack.org/stable/linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack' - fi - - # Use the more reliable S3 mirror of Hackage - mkdir -p $HOME/.cabal - echo 'remote-repo: hackage.haskell.org:http://hackage.fpcomplete.com/' > $HOME/.cabal/config - echo 'remote-repo-cache: $HOME/.cabal/packages' >> $HOME/.cabal/config - - -install: -- echo "$(ghc --version) [$(ghc --print-project-git-commit-id 2> /dev/null || echo '?')]" -- if [ -f configure.ac ]; then autoreconf -i; fi -- | - set -ex - case "$BUILD" in - stack) - # Add in extra-deps for older snapshots, as necessary - # - # This is disabled by default, as relying on the solver like this can - # make builds unreliable. Instead, if you have this situation, it's - # recommended that you maintain multiple stack-lts-X.yaml files. - - #stack --no-terminal --install-ghc $ARGS test --bench --dry-run || ( \ - # stack --no-terminal $ARGS build cabal-install && \ - # stack --no-terminal $ARGS solver --update-config) - - # Build the dependencies - stack --no-terminal --install-ghc $ARGS test --bench --only-dependencies - ;; - cabal) - cabal --version - travis_retry cabal update - - # Get the list of packages from the stack.yaml file. Note that - # this will also implicitly run hpack as necessary to generate - # the .cabal files needed by cabal-install. - PACKAGES=$(stack --install-ghc query locals | grep '^ *path' | sed 's@^ *path:@@') - - cabal install --only-dependencies --enable-tests --enable-benchmarks --force-reinstalls --ghc-options=-O0 --reorder-goals --max-backjumps=-1 $CABALARGS $PACKAGES - ;; - esac - set +ex - -script: -- | - set -ex - case "$BUILD" in - stack) - stack --no-terminal $ARGS test --bench --no-run-benchmarks --haddock --no-haddock-deps - ;; - cabal) - cabal install --enable-tests --enable-benchmarks --force-reinstalls --ghc-options=-O0 --reorder-goals --max-backjumps=-1 $CABALARGS $PACKAGES - - ORIGDIR=$(pwd) - for dir in $PACKAGES - do - cd $dir - cabal check || [ "$CABALVER" == "1.16" ] - cabal sdist - PKGVER=$(cabal info . | awk '{print $2;exit}') - SRC_TGZ=$PKGVER.tar.gz - cd dist - tar zxfv "$SRC_TGZ" - cd "$PKGVER" - cabal configure --enable-tests --ghc-options -O0 - cabal build - if [ "$CABALVER" = "1.16" ] || [ "$CABALVER" = "1.18" ]; then - cabal test - else - cabal test --show-details=streaming --log=/dev/stdout - fi - cd $ORIGDIR - done - ;; - esac - set +ex diff --git a/doc/tutorial/building_existing_projects.md b/doc/tutorial/building_existing_projects.md new file mode 100644 index 0000000000..6ffce44af2 --- /dev/null +++ b/doc/tutorial/building_existing_projects.md @@ -0,0 +1,151 @@ +
+ +# 5. Building existing projects + +So far, we have been building simple projects. Let us now take an open source +package and try to build it. We will be ambitious and use +[yackage](https://hackage.haskell.org/package/yackage), a local package server +using [Yesod](http://www.yesodweb.com/). To get the code, we will use the +`stack unpack` command run from the root directory for all our Haskell projects: + +~~~text +stack unpack yackage +Unpacked yackage-0.8.1 to /yackage-0.8.1/ +~~~ + +You can also unpack to a directory of your liking instead of the current one +by commanding: + +~~~text +stack unpack yackage --to +~~~ + +This will create a `yackage-0.8.1` directory inside ``. + +We will change to that directory, with the command: + +~~~text +cd yackage-0.8.1 +~~~ + +## The `stack init` command + +This new directory does not have a `stack.yaml` file, so we need to make one +first. We could do it by hand, but let us be lazy instead with the `stack init` +command: + +~~~text +stack init +# init output ... +~~~ + +`stack init` does quite a few things for you behind the scenes: + +* Finds all of the Cabal files in your current directory and subdirectories + (unless you use `--ignore-subdirs`) and determines the packages and versions + they require +* Finds the best combination of snapshot and package flags that allows + everything to compile with minimum external dependencies +* It tries to look for the best matching snapshot from latest LTS, latest + nightly, other LTS versions in that order + +Assuming it finds a match, it will write your `stack.yaml` file, and everything +will work. + +!!! note + + The `yackage` package does not currently support Hpack, but you can also use + `hpack-convert` should you need to generate a `package.yaml` file. + +### Excluded Packages + +Sometimes multiple packages in your project may have conflicting requirements. +In that case `stack init` will fail, so what do you do? + +You could manually create `stack.yaml` by omitting some packages to resolve the +conflict. Alternatively you can ask `stack init` to do that for you by +specifying `--omit-packages` flag on the command line. Let us see how that +works. + +To simulate a conflict we will use `acme-missiles-0.3` in `yackage` and we will +also copy `yackage.cabal` to another directory and change the name of the file +and package to `yackage-test`. In this new package we will use +`acme-missiles-0.2` instead. Let us see what happens when we command +`stack init` again: + +~~~text +stack init --force --omit-packages +# init failure output ... +~~~ + +Looking at `stack.yaml`, you will see that the excluded packages have been +commented out under the `packages` key. In case wrong packages are excluded +you can uncomment the right one and comment the other one. + +Packages may get excluded due to conflicting requirements among user packages or +due to conflicting requirements between a user package and the snapshot +compiler. If all of the packages have a conflict with the compiler then all of +them may get commented out. + +When packages are commented out you will see a warning every time you run a +command which needs the configuration file. The warning can be disabled by +editing the configuration file and removing it. + +### Using a specific snapshot + +Sometimes you may want to use a specific snapshot for your project instead of +`stack init` picking one for you. You can do that by using +`stack init --snapshot `. + +You can also init with a compiler snapshot if you do not want to use a +Stackage snapshot. That will result in all of your project's dependencies being +put under the `extra-deps` section. + +### Installing the compiler + +Stack will automatically install the compiler when you run `stack build` but you +can manually specify the compiler by running `stack setup `. + +### Miscellaneous and diagnostics + +_Add selected packages_: If you want to use only selected packages from your +project directory you can do so by explicitly specifying the package directories +on the command line. + +_Duplicate package names_: If multiple packages under the directory tree have +same name, `stack init` will report those and automatically ignore one of them. + +_Ignore subdirectories_: By default `stack init` searches all the subdirectories +for Cabal files. If you do not want that then you can use `--ignore-subdirs` +command line switch. + +_Cabal warnings_: `stack init` will show warnings if there were issues in +reading a Cabal file. You may want to pay attention to the warnings as sometimes +they may result in incomprehensible errors later on during dependency solving. + +_Package naming_: If the `name` field defined in a Cabal file does not match +with the Cabal file name then `stack init` will refuse to continue. + +_User warnings_: When packages are excluded or external dependencies added Stack +will show warnings every time the configuration file is loaded. You can suppress +the warnings by editing the configuration file and removing the warnings from +it. If you command: + +~~~text +stack build +~~~ + +you may see something like this: + +~~~text +Warning: Warnings (added by new or init): Some packages were found to be + incompatible with the snapshot and have been left commented out in the + packages section. + + Warning (added by new or init): Specified snapshot could not satisfy + all dependencies. Some external packages have been added as + dependencies. + + You can omit this message by removing it from the project-level + configuration file. +~~~ diff --git a/doc/tutorial/building_your_project.md b/doc/tutorial/building_your_project.md new file mode 100644 index 0000000000..a09942bb10 --- /dev/null +++ b/doc/tutorial/building_your_project.md @@ -0,0 +1,346 @@ +
+ +# 4. Building your project + +The [`stack build`](../commands/build_command.md) command is the heart of Stack. +It is the engine that powers building your code, testing it, getting +dependencies, and more. Much of the remainder of this getting started guide will +cover its features. + +!!! note + + Using the `build` command twice with the same options and arguments should + generally do nothing (besides things like rerunning test suites), and + should, in general, produce a reproducible result between different runs. + +## Adding dependencies + +A Haskell package often depends on code exposed by other Haskell packages. + +Let us say we decide to modify our existing `helloworld` package source code to +use a new library, the one provided by the +[`text`](https://hackage.haskell.org/package/text) package. + +We can modify `src/Lib.hs` so that its contents are as follows (click +:material-plus-circle: to learn more): + +~~~haskell +{-# LANGUAGE OverloadedStrings #-} -- (1)! + +module Lib + ( someFunc + ) where + +import qualified Data.Text.IO as T -- (2)! + +someFunc :: IO () +someFunc = T.putStrLn "someFunc" --(3)! +~~~ + +1. Enables overloaded string literals. String literals now have type + `(IsString a) => a`. + +2. The module is exposed by the library of the `text` package. + +3. `Data.Text.IO.putStrLn :: Text -> IO ()`. + +If we command: + +~~~text +stack build +~~~ + +Stack will report Stack error [S-7282] during the build, with output like the +following: + +~~~text +... +Building library for helloworld-0.1.0.0.. +[1 of 2] Compiling Lib [Source file changed] + +src\Lib.hs:7:1: error: + Could not load module ‘Data.Text.IO’ + It is a member of the hidden package ‘text-2.0.2’. + Perhaps you need to add ‘text’ to the build-depends in your .cabal file. + Use -v (or `:set -v` in ghci) to see a list of the files searched for. + | +7 | import qualified Data.Text.IO as T + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Error: [S-7282] + Stack failed to execute the build plan. + + While executing the build plan, Stack encountered the error: + + [S-7011] + While building package helloworld-0.1.0.0 (scroll up to its section to + see the error) using: + ... + Process exited with code: ExitFailure 1 +~~~ + +The error `Could not load module ...` means that the package exposing the module +in question is not available. + +To tell Stack that the `text` package is a dependency of the `helloworld` +package, you need to update the package description file (`package.yaml`). +Specifically, you need to add `text` under the `dependencies` key, like this: + +~~~yaml +dependencies: +- base >= 4.7 && < 5 +- text # added +~~~ + +Now, if we command: + +~~~text +stack build +~~~ + +we should get a successful result. + +The output means that the `text` package was downloaded, configured, built, and +locally installed. Once that was done, we moved on to building our project +package (`helloworld`). At no point did we need to ask Stack to build +dependencies — it does so automatically. + +## Listing dependencies + +Let us have Stack add a few more dependencies to our project. First, we will +include two new packages in the `dependencies` section for our library in our +`package.yaml`: + +~~~yaml +dependencies: +- base >= 4.7 && < 5 +- text +- filepath # added +- containers # added +~~~ + +After adding these two dependencies, we can again command: + +~~~text +stack build +~~~ + +to have them downloaded, configured, built, and locally installed. + +To find out which versions of these packages Stack installed, we can command: + +~~~text +stack ls dependencies +~~~ + +## Packages not in the snapshot + +The packages `text`, `filepath` and `containers` have something in common: they +are all provided with GHC (referred to as GHC boot packages). + +Let us try a dependency on a more off-the-beaten-track package: the joke +[acme-missiles](http://www.stackage.org/package/acme-missiles) package. + +We can further modify `src/Lib.hs` so that its contents are as follows: + +~~~haskell +module Lib + ( someFunc + ) where + +import Acme.Missiles ( launchMissiles ) + +someFunc :: IO () +someFunc = launchMissiles +~~~ + +As before, to tell Stack that the `acme-missiles` package is a dependency of the +`helloworld` package, we must update the package description file +(`package.yaml`). The relevant part of that file now looks like this: + +~~~yaml +dependencies: +- base >= 4.7 && < 5 +- text +- filepath +- containers +- acme-missiles # added +~~~ + +However, if we command: + +~~~text +stack build +~~~ + +Stack will report Stack error [S-4804] during the build, with output like the +following: + +~~~text +Error: [S-4804] + Stack failed to construct a build plan. + + While constructing the build plan, Stack encountered the following + errors. The 'Stack configuration' refers to the set of package versions + specified by the snapshot (after any dropped packages, or pruned GHC boot + packages; if a boot package is replaced, Stack prunes all other such + packages that depend on it) and any extra-deps: + + In the dependencies for helloworld-0.1.0.0: + * acme-missiles needed, but no version is in the Stack configuration + (latest matching version is 0.3). + The above is/are needed since helloworld is a build target. + + Some different approaches to resolving some or all of this: + + * Recommended action: try adding the following to your extra-deps in + ...\helloworld\stack.yaml (project-level configuration): + + - acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a6 +87d45965778deb8694c5d1,613 +~~~ + +The error message explains that Stack was unable to construct a build plan and +why: the package `acme-missiles` was needed but no version of that package is +in the set of package versions specified by the snapshot. Stack makes a +suggestion to fix that. + +This brings us to the next major topic in using Stack. + +## Extending snapshots + +A snapshot specifies a version of GHC and a set of package versions chosen to +work well together. However, sometimes you will want to use package versions +that are not specified by the snapshot. That may be because the package is not +in the snapshot or because a different version of the package is in the +snapshot. + +Remember above when `stack new` selected some +[LTS snapshot](https://github.com/commercialhaskell/lts-haskell#readme) for us? +That defined our build plan and available packages. When we tried using the +`text` package, it just worked, because it was part of the LTS *package set*. + +We have updated the description of the `helloworld` package (in `package.yaml`) +to specify that it depends on the `acme-missiles` package, but `acme-missiles` +is not a member of the set of package versions specified by the snapshot. So +building failed. + +To add a version of `acme-missiles` to the available package versions, we will +use the `extra-deps` key in Stack's project-level configuration file +(`stack.yaml`). That key defines extra package versions, not present in the +snapshot, that will be needed as dependencies. You can add this like so: + +~~~yaml +extra-deps: +- acme-missiles-0.3 # not in the LTS snapshot +~~~ + +Now, if we command: + +~~~text +stack build +~~~ + +we should get a successful result. + +## Stackage snapshots + +With that out of the way, let us dig a little bit more into these snapshots. We +mentioned the LTS snapshots, and you can get information about it at +[https://www.stackage.org/lts](https://www.stackage.org/lts), including: + +* The appropriate value (`lts-24.37`, as is currently the latest LTS) +* The GHC version used +* A full list of all packages versions available in this snapshot +* The ability to perform a Hoogle search on the packages in this snapshot +* A [list of all modules](https://www.stackage.org/lts/docs) in a snapshot, + which can be useful when trying to determine which package to add to your + `package.yaml` file. + +You can also see a +[list of all available snapshots](https://www.stackage.org/snapshots). You will +notice two flavors: LTS (for "Long Term Support") and Nightly. You can read more +about them on the +[LTS Haskell GitHub page](https://github.com/commercialhaskell/lts-haskell#readme). +If you are not sure which to use, start with LTS Haskell (which Stack will lean +towards by default as well). + +## Snapshots and GHC versions + +As mentioned, a snapshot specifies a version of GHC as well as a set of package +versions. + +??? question "I want to use a particular version of GHC. What snapshot should I use?" + + For each supported version of GHC, the Stackage + [homepage](https://www.stackage.org/) lists the most recent Stackage + snapshot. In most cases, that is the snapshot you should use. + +??? question "Can I use a snapshot like `ghc-9.10.3`?" + + Snapshot `ghc-9.10.3` specifies GHC 9.10.3 and, consequently, the GHC boot + packages that come with that compiler. However, the snapshot does not + include the many other package versions that will work with that compiler. + For a set of those package versions, see the snapshots published by the + [Stackage](https://www.stackage.org/) project. + +Let us try using an older Stackage LTS Haskell snapshot. We will use the LTS +22.43 snapshot with the command: + +~~~text +stack --snapshot lts-22.43 build +~~~ + +Stackage LTS Haskell 22.43 specifies GHC 9.6.6. If that version of GHC is not +already available, Stack will try to fetch it and install it before starting the +rest of the build. + +## Specifying a snapshot + +A snapshot must be specified in Stack's project-level configuration file +(`stack.yaml`, by default). For further information, see the +[`snapshot`](../configure/yaml/project.md#snapshot) project-specific +configuration option documentation. + +As we have seen, a snapshot can also be specified on the command line. That can +be useful in a Continuous Integration (CI) setting. + +When passed on the command line, you also get some additional "short-cut" +versions of snapshots: `--snapshot nightly` will use the newest Nightly snapshot +available, `--snapshot lts` will use the newest LTS, and `--snapshot lts-22` +will use the newest LTS in the 22.x series. The reason these are only available +on the command line and not in your `stack.yaml` file is that using them: + +1. Will slow down your build (since Stack then needs to download information on + the latest available LTS each time it builds) +2. Produces unreliable results (since a build run today may proceed differently + tomorrow because of changes outside of your control) + +## Cleaning up your project + +Stack creates files during the build process and stores those files in +directories within a local project or package directory known as +[Stack work directories](../topics/stack_work.md). Stack can be used without an +understanding of the content of those directories. + +if you wish, you can clean up files created during the build process for your +project using the `stack clean` and `stack purge` commands. + +### The `stack clean` command + +`stack clean` deletes the local working directories containing compiler output. +By default, that means the contents of directories in `.stack-work/dist`, for +all the `.stack-work` directories within a project. + +Use `stack clean ` to delete the output for the package +_specific-package_ only. + +### The `stack purge` command + +`stack purge` deletes the local stack working directories, including extra-deps, +git dependencies and the compiler output (including logs). It does not delete +any snapshot packages, compilers or programs installed using `stack install`. +This essentially reverts the project to a completely fresh state, as if it had +never been built. + +`stack purge` is a shortcut for `stack clean --full`. diff --git a/doc/tutorial/cabal_flags_and_ghc_options.md b/doc/tutorial/cabal_flags_and_ghc_options.md new file mode 100644 index 0000000000..2878761f2e --- /dev/null +++ b/doc/tutorial/cabal_flags_and_ghc_options.md @@ -0,0 +1,165 @@ +
+ +# 10. Cabal flags and GHC options + +There are two common ways to affect how a package will be built: with Cabal +flags and with GHC options. + +## Cabal flags + +A package description may specify one or more Cabal flags: + +=== "Cabal file" + + ~~~text + flag my-flag-name + description: My (optional) description of my flag. + default: false -- Optional: the default value is true + manual: true -- Optional: the default value is false + ~~~ + +=== "`package.yaml` (Hpack)" + + ~~~yaml + flags: + my-flag-name: + description: My (optional) description of my flag. + default: true # Required + manual: false # Required + ~~~ + +??? question "How does `manual: false` affect Stack's builds?" + + `manual: false` has different implications for Cabal and Stack. Cabal tries + to 'solve' dependencies using the flag’s default value and, if it can't, + tries again with the negated default value. Stack emphasises reproducible + builds. It only tries to build with the flag's default value and, if it + can't, reports that it can't. + +Cabal flags can be set or unset at the command line or as a project-specific +Stack option. + +To set or unset a Cabal flag at the command line, we can use the `--flag` +option. The `yackage` package has an `upload` flag that is enabled by default. +We can command: + +~~~text +stack build --flag yackage:-upload +~~~ + +This means: when compiling the `yackage` package, turn off the `upload` Cabal +flag (thus the `-` in `-upload`). Unlike other tools, Stack is explicit about +which package's flag you want to change. It does this for two reasons: + +1. There is no global meaning for Cabal flags, and therefore two packages can + use the same flag name for completely different things. + +2. By following this approach, we can avoid unnecessarily recompiling snapshot + packages that happen to use a Cabal flag that we are using. + +You can also change Cabal flag values on the command line for extra-dep and +snapshot packages. If you do this, that package will automatically be promoted +to an extra-dep, since the build plan is different than what the plan snapshot +definition would entail. + +If you have Cabal flags that you will be setting regularly when building your +packages, you can add them to your Stack project-level configuration file +(`stack.yaml`). For more information, see the +[flags](../configure/yaml/project.md#flags) project-specific configuration +option documentation. + +## GHC options + +GHC options can be specified at the command line or as an non-project specific +Stack option. + +At the command line, consider the command: + +~~~text +stack build --ghc-options="-Wall -Werror" +~~~ + +or, equivalently: + +~~~text +stack build --ghc-options=--pedantic +~~~ + +By default, this will set GHC's `-Wall` and `-Werror` options for all *project +packages*. This will not, however, affect other packages at all. This design +provides us with reproducible and fast builds. + +??? question "Can GHC options for other packages be specified at the command line?" + + Yes, GHC options can be specified at the command line for all packages or + only project packages that are targets. For further information, see the + documentation for the + [apply-ghc-options](../configure/yaml/non-project.md#apply-ghc-options) + non-project specific configuration option. + +??? question "What if GHC options specified at the command line apply only to targets?" + + By changing the default using the + [apply-ghc-options](../configure/yaml/non-project.md#apply-ghc-options) + configuration option, it is possble to specify that GHC options at the + command line apply only to project packages that are *targets*. If this is + done and you change your targets, the options will no longer apply to other + project packages. + + Let us consider an example from the `wai` repository, which includes the + `wai` and `warp` packages, the latter depending on the former. If we + command: + + ~~~text + stack build --ghc-options=-O0 wai + ~~~ + + Stack will build all of the dependencies of `wai` (inclduding `warp`) and then + build `wai` with all GHC optimizations disabled. + + Now let us add `warp` as a target. If we command: + + ~~~text + stack build --ghc-options=-O0 wai warp + ~~~ + + this builds the additional dependencies for `warp`, and then builds `warp` + with GHC optimizations disabled. Importantly, Stack does not rebuild `wai`, + since `wai`'s configuration has not been altered. + + Now the surprising case. If we command: + + ~~~text + stack build --ghc-options=-O0 warp + ~~~ + + you may expect this to do nothing, as neither `wai` nor `warp` has changed. + However, Stack will rebuild `wai` with GHC optimizations enabled again, and + then rebuild `warp` (with optimizations disabled) against this newly-built + `wai`. The reason is reproducible builds. If we had never built `wai` or + `warp` before, trying to build `warp` would require building all of its + dependencies, and it would do so with default GHC options (that is, GHC + optimizations enabled). These dependencies would include `wai`. So when we + command: + + ~~~text + stack build --ghc-options=-O0 warp + ~~~ + + we want Stack's behavior to be unaffected by any previous build steps we + took. + +If you have GHC options that you will be applying regularly when building your +packages, you can add them to your Stack project-level configuration file +(`stack.yaml`) or (if applicable) to a +[global Stack configuration file](../configure/yaml/index.md#project-level-and-global-configuration-files). +For more information, see the +[ghc-options](../configure/yaml/non-project.md#ghc-options) non-project specific +configuration option documentation. + +??? question "Can Stack be configured to specify GHC options for specific packages?" + + Yes, Stack can be configured to specify GHC options for specific packages, + either globally or at the project level. For more information, see the + [ghc-options](../configure/yaml/non-project.md#ghc-options) non-project + specific configuration option documentation. diff --git a/doc/tutorial/executing_commands.md b/doc/tutorial/executing_commands.md new file mode 100644 index 0000000000..e363e13227 --- /dev/null +++ b/doc/tutorial/executing_commands.md @@ -0,0 +1,62 @@ +
+ +# 12. Executing commands + +We have already used `stack exec` multiple times in this guide. As you have +likely already guessed, it allows you to run executables, but with a slightly +modified environment. + +## The `stack exec` command + +In particular: `stack exec` looks for executables on Stack's bin +paths, and sets a few additional environment variables (like adding those paths +to the PATH, and setting `GHC_PACKAGE_PATH`, which tells GHC which package +databases to use). + +If you want to see exactly what the modified environment looks like, try +command: + +~~~text +stack exec env +~~~ + +The only issue is how to distinguish flags to be passed to Stack versus those +for the underlying program. Thanks to the `optparse-applicative` library, Stack +follows the Unix convention of `--` to separate these. For example, command: + +~~~text +stack exec --package stm -- echo I installed the stm package via --package stm +~~~ + +yields output like: + +~~~text +Writing the configuration file for the implicit global project to: +.../global-project/stack.yaml. Note: You can change the snapshot via the +snapshot key there. +Using the latest snapshot lts-24.37. +I installed the stm package via --package stm +~~~ + +Flags worth mentioning: + +* `--package foo` can be used to force a package to be installed before running + the given command. +* `--no-ghc-package-path` can be used to stop the `GHC_PACKAGE_PATH` environment + variable from being set. Some tools — notably Cabal (the tool) — do not behave + well with that variable set. + +You may also find it convenient to use `stack exec` to launch a subshell +(substitute `bash` with your preferred shell) where your compiled executable is +available at the front of your PATH. Command: + +~~~text +stack exec bash +~~~ + +## The `stack ghc` and `stack runghc` commands + +You will sometimes want to just compile (or run) a single Haskell source file, +instead of creating an entire Cabal package for it. You can use `stack exec ghc` +or `stack exec runghc` for that. As simple helpers, we also provide the +`stack ghc` and `stack runghc` commands, for these common cases. diff --git a/doc/tutorial/hello_world_example.md b/doc/tutorial/hello_world_example.md new file mode 100644 index 0000000000..32bc06be33 --- /dev/null +++ b/doc/tutorial/hello_world_example.md @@ -0,0 +1,403 @@ +
+ +# 1. A Hello World example + +With Stack installed, we will create a new Stack project and walk through the +most common Stack commands. First, let us look at other foundations for getting +started with Haskell. + +## Foundations + +Although this guide starts with creating Stack projects using Stack, if you are +relatively inexperienced in coding with Haskell, there are things you +may wish to experience first. Examples are: + +* using GHC interactively to evaluate Haskell expressions and explore their + types. The [`stack ghci`](../commands/ghci_command.md) command can help do + that; and + +* developing simple Haskell programs where all your source code is in a single + file. The [`stack runghc`](../commands/runghc_command.md), + [`stack ghc`](../commands/ghc_command.md), and + [`stack script`](../commands/script_command.md) commands can all help with + that. + +By default, the `stack new` command introduced below creates a simple project. +However, Stack projects can be even simpler than that. + +For example, imagine you already had the following simple Haskell program in a +single source file named `MyMainModule.hs` in a directory named `my-project`. + +~~~haskell +module Main ( main ) where + +main :: IO () +main = putStrLn "Hello, World!" +~~~ + +You could create a Haskell package from that source file by also creating +manually a package description file named `package.yaml` in `my-project`, with +the contents: + +~~~yaml +name: my-package-name +version: 1 +dependencies: base +executable: + main: MyMainModule.hs +~~~ + +This description reflects the following: + +* all Haskell packages have a name and a version; and + +* the GHC boot package [`base`](https://hackage.haskell.org/package/base) is a + dependency of almost all other packages. + +You could then create a Stack project based on that single package by also +creating manually a Stack project-level configuration file named `stack.yaml` in +`my-project`, with the contents: + +~~~yaml +snapshot: lts-24.37 +~~~ + +By adding those two files (one to describe a package, the other to configure the +project) manually, you would now have a valid single-package Stack project in +the `my-project` directory. However, let us step through what Stack's commands +provide, when it comes to creating new projects. + +In this guide, unless stated otherwise, the working directory is the project's +root directory. + +## The `stack new` command + +We will start off with the [`stack new`](../commands/new_command.md) command to +create a new Stack *project* from a project template. + +The project template `new-template` is used by default. However, in our example, +we will use it by name. + +That template creates a project with a single package of the same name. So, we +need to pick a name for the project that is also a valid package name. We will +call our project `helloworld`. + +??? question "How do project packages relate to projects?" + + A project can have one or more packages. Each project package has its own + root directory. In the case of a single-package project, the project + directory and the package directory can be the same directory. + +??? question "What is a valid package name?" + + A valid package name consists of one or more alphanumeric words separated by + hyphens. Each word must contain at least one letter. That is, the word must + not be interpreted as a number. + + The names of packages are intended to be unique. + +??? question "What is a valid package version number?" + + A package version number is one or more natural numbers (including zero) + separated by dots (`.`). However, Haskell's Package Versioning Policy (PVP) + says that a version should have the form *A.B.C*, and may optionally have + any number of additional components. + +??? question "Are other project templates available?" + + Yes. For further information about project templates, command: + ~~~text + stack templates + ~~~ + +From the root directory for all our Haskell projects, we command: + +~~~text +stack new helloworld new-template +~~~ + +For this first Stack command, Stack will do some setting up. For example, it +will create the [Stack root](../topics/stack_root.md) directory. + +Other than any setting up, Stack will: + +* create the project directory; +* download the project template; +* attempt to populate the project template based on parameters; and +* create and initialise Stack's project-level configuration file. + +Unless the parameters have been configured, Stack will note that parameters were +needed by the template but not provided. That can be ignored for now. + +??? question "How can I configure project template paramaters?" + + For further information, see the + [`templates`](../configure/yaml/non-project.md#templates) non-project + specific configuration option. + + As noted in Stack's output, parameters to populate project templates can + also be set at the command line by using the options of the `stack new` + command. + +??? question "Can I create a new project in the current working directory?" + + Yes. Pass the `--bare` flag to cause Stack to create the project in the + current working directory rather than in a new project directory. + +We now have a project in the `helloworld` directory! We will change to that +directory, with command: + +~~~text +cd helloworld +~~~ + +## The `stack build` command + +Next, we will run the most important Stack command, +[`stack build`](../commands/build_command.md). We command: + +~~~text +stack build +~~~ + +Stack needs a version of GHC and, on Windows, a version of MSYS2, in order to +build your project. Stack will discover that you are missing it and will install +it for you. + +You will get intermediate download percentage statistics while the download is +occurring. This command may take some time, depending on download speeds. + +??? question "Where is the Stack-supplied GHC located?" + + You can use the [`stack path`](../commands/path_command.md) command for path + information. To identify where GHC is installed, command: + + === "Unix-like" + + ~~~text + stack exec -- which ghc + /home//.stack/programs/x86_64-linux/ghc-9.6.5/bin/ghc + ~~~ + + === "Windows" + + ~~~text + stack exec -- where.exe ghc + C:\Users\\AppData\Local\Programs\stack\x86_64-windows\ghc-9.6.5\bin\ghc.exe + ~~~ + + As you can see from that path, the installation is placed to not interfere + with any other GHC installation, whether system-wide or different GHC + versions installed by Stack. + +??? question "Can I use that version of GHC by commanding `ghc`?" + + No. GHC will be installed to the Stack programs directory, which is likely + not on the PATH, so commanding `ghc` will not work. However, that version of + GHC can be used in the Stack environment. For more information, see the + [`stack exec`](../commands/exec_command.md) command, + [`stack ghc`](../commands/ghc_command.md) command, and + [`stack runghc`](../commands/ghc_command.md) command documentation. + +Once a version of GHC and, on Windows, a version of MSYS2, is installed, Stack +will then build your project. The end of the output should look similar to this: + +=== "Unix-like" + + ~~~text + ... + helloworld> configure (lib + exe) + Configuring helloworld-0.1.0.0... + helloworld> build (lib + exe) with ghc-9.10.3 + Preprocessing library for helloworld-0.1.0.0.. + Building library for helloworld-0.1.0.0.. + [1 of 2] Compiling Lib + [2 of 2] Compiling Paths_helloworld + Preprocessing executable 'helloworld-exe' for helloworld-0.1.0.0.. + Building executable 'helloworld-exe' for helloworld-0.1.0.0.. + [1 of 2] Compiling Main + [2 of 2] Compiling Paths_helloworld + [3 of 3] Linking .stack-work/dist/x86_64-linux-tinfo6/ghc-9.10.3/build/helloworld-exe/helloworld-exe + helloworld> copy/register + Installing library in .../helloworld/.stack-work/install/x86_64-linux-tinfo6/a2caceceda039eb4f791856f85a68f9582d4daf3d0527344693ff3d1fcd92ba4/9.6.6/lib/x86_64-linux-ghc-9.10.3/helloworld-0.1.0.0-KFyX8zLxDvzLZURq3JaCVX + Installing executable helloworld-exe in .../helloworld/.stack-work/install/x86_64-linux-tinfo6/a2caceceda039eb4f791856f85a68f9582d4daf3d0527344693ff3d1fcd92ba4/9.6.6/bin + Registering library for helloworld-0.1.0.0.. + ~~~ + +=== "Windows" + + ~~~text + ... + helloworld> configure (lib + exe) + Configuring helloworld-0.1.0.0... + helloworld> build (lib + exe) with ghc-9.10.3 + Preprocessing library for helloworld-0.1.0.0.. + Building library for helloworld-0.1.0.0.. + [1 of 2] Compiling Lib + [2 of 2] Compiling Paths_helloworld + Preprocessing executable 'helloworld-exe' for helloworld-0.1.0.0.. + Building executable 'helloworld-exe' for helloworld-0.1.0.0.. + [1 of 2] Compiling Main + [2 of 2] Compiling Paths_helloworld + [3 of 3] Linking .stack-work\dist\effaccc7\build\helloworld-exe\helloworld-exe.exe + helloworld> copy/register + Installing library in ...\helloworld\.stack-work\install\c8c71a24\lib\x86_64-windows-ghc-9.10.3\helloworld-0.1.0.0-KFyX8zLxDvzLZURq3JaCVX + Installing executable helloworld-exe in ...\helloworld\.stack-work\install\c8c71a24\bin + Registering library for helloworld-0.1.0.0.. + ~~~ + + On Windows, Stack uses hashes of certain information to keep paths short. + +Stack aims not to rebuild unnecessarily. If we command `stack build` a second +time, nothing happens. + +## The `stack exec` command + +The output of the previous command has three main steps. You can see, from the +first two steps, that a library (lib) and an executable (exe) are being built +and that the final step involved the installation of an executable named +`helloworld-exe` (on Windows, the file is `helloworld-exe.exe`) (extract): + +~~~text +helloworld> configure (lib + exe) +... +helloworld> build (lib + exe) with ghc-9.10.3 +... +helloworld> copy/register +... +Installing executable helloworld-exe in .../helloworld/.stack-work/.../bin +~~~ + +The executable is installed in a location in the project's `.stack-work` +directory. + +Now, let us use the [`stack exec`](../commands/exec_command.md) command to run +our executable. We command: + +~~~text +stack exec helloworld-exe +~~~ + +and the output is just: + +~~~text +someFunc +~~~ + +??? question "Why is the output just `someFunc`?" + + The code in the `new-template` project template is very simple. The package + has a Haskell module `Lib`: + ~~~haskell + module Lib + ( someFunc + ) where + + someFunc :: IO () + someFunc = putStrLn "someFunc" + ~~~ + + and a Haskell module `Main`: + ~~~haskell + module Main (main) where + + import Lib + + main :: IO () + main = someFunc + ~~~ + + `putStrLn "someFunc"` is an action that, when executed, outputs the string + `someFunc` to the standard output channel. + +`stack exec` works by providing the same reproducible environment that was used +to build your project to the command that you are running. Thus, it knew where +to find `helloworld-exe` even though it is not on the PATH outside of that +environment. + +??? question "How I can find the PATH used in the Stack environment?" + + Command `stack path --bin-path` to see the PATH in the Stack environment. + +!!! info + + On Windows, the Stack environment includes the `\mingw64\bin`, `\usr\bin` + and `\usr\local\bin` directories of the Stack-supplied MSYS2. If your + executable depends on files (for example, dynamic-link libraries) in those + directories and you want ro run it outside of the Stack environment, you + will need to ensure copies of those files are on the PATH. + +## The `stack test` command + +Finally, like all good software, `helloworld` actually has a test suite. + +Let us run it with the [`stack test`](../commands/test_command.md) command. We +command: + +~~~text +stack test +~~~ + +The start of the output should look similar to this: + +=== "Unix-like" + + ~~~text + helloworld-0.1.0.0: unregistering (components added: test:helloworld-test) + helloworld> configure (lib + exe + test) + Configuring helloworld-0.1.0.0... + helloworld> build (lib + exe + test) with ghc-9.10.3 + Preprocessing library for helloworld-0.1.0.0.. + Building library for helloworld-0.1.0.0.. + Preprocessing test suite 'helloworld-test' for helloworld-0.1.0.0.. + Building test suite 'helloworld-test' for helloworld-0.1.0.0.. + [1 of 2] Compiling Main + [2 of 2] Compiling Paths_helloworld + [3 of 3] Linking .stack-work/dist/x86_64-linux-tinfo6/ghc-9.10.3/build/helloworld-test/helloworld-test + Preprocessing executable 'helloworld-exe' for helloworld-0.1.0.0.. + Building executable 'helloworld-exe' for helloworld-0.1.0.0.. + helloworld> copy/register + Installing library in .../helloworld/.stack-work/install/x86_64-linux-tinfo6/a2caceceda039eb4f791856f85a68f9582d4daf3d0527344693ff3d1fcd92ba4/9.6.6/lib/x86_64-linux-ghc-9.10.3/helloworld-0.1.0.0-KFyX8zLxDvzLZURq3JaCVX + Installing executable helloworld-exe in .../helloworld/.stack-work/install/x86_64-linux-tinfo6/a2caceceda039eb4f791856f85a68f9582d4daf3d0527344693ff3d1fcd92ba4/9.6.6/bin + Registering library for helloworld-0.1.0.0.. + ~~~ + +=== "Windows" + + ~~~text + helloworld-0.1.0.0: unregistering (components added: test:helloworld-test) + helloworld> configure (lib + exe + test) + Configuring helloworld-0.1.0.0... + helloworld> build (lib + exe + test) with ghc-9.10.3 + Preprocessing library for helloworld-0.1.0.0.. + Building library for helloworld-0.1.0.0.. + Preprocessing test suite 'helloworld-test' for helloworld-0.1.0.0.. + Building test suite 'helloworld-test' for helloworld-0.1.0.0.. + [1 of 2] Compiling Main + [2 of 2] Compiling Paths_helloworld + [3 of 3] Linking .stack-work\dist\effaccc7\build\helloworld-test\helloworld-test.exe + Preprocessing executable 'helloworld-exe' for helloworld-0.1.0.0.. + Building executable 'helloworld-exe' for helloworld-0.1.0.0.. + helloworld> copy/register + Installing library in ...\helloworld\.stack-work\install\0aa166fa\lib\x86_64-windows-ghc-9.10.3\helloworld-0.1.0.0-KFyX8zLxDvzLZURq3JaCVX + Installing executable helloworld-exe in ...\helloworld\.stack-work\install\0aa166fa\bin + Registering library for helloworld-0.1.0.0.. + ~~~ + +Again, Stack does not rebuild unnecessarily. Only the test suite component is +compiled and linked. + +The output should then conclude: + +~~~text +helloworld> test (suite: helloworld-test) + +Test suite not yet implemented + + + +helloworld> Test suite helloworld-test passed +Completed 2 action(s). +~~~ + +Having build the test suite executable, Stack then automatically runs it. diff --git a/doc/tutorial/index.md b/doc/tutorial/index.md new file mode 100644 index 0000000000..a46fb1c563 --- /dev/null +++ b/doc/tutorial/index.md @@ -0,0 +1,221 @@ +--- +title: Getting started +--- + +
+ +# Getting started + +Stack is a program for developing [Haskell](https://www.haskell.org/) projects. + +You may be getting started with Haskell or you may only be getting started with +Stack. If you are getting started with Haskell, you may already know other +programming languages or you may be new to coding. + +This guide is aimed at people getting started with Stack. It takes a new Stack +user through the ways that Stack is typically used. It requires no prior +experience of Stack or other Haskell tools. + +This guide will not teach Haskell - other resources are better placed to do +that - or involve much code. However, it does aim to be accessible to people who +who are relatively inexperienced in coding with Haskell. + +Terms used in the guide will be explained as they are introduced and are also +defined in the [glossary](../glossary.md). + +Some of Stack's features will not be needed regularly or by all users. Other +parts of Stack's documentation include its [commands](../commands/index.md) and +its [configuration](../configure/index.md). + +## Other Haskell tools + +First, simplifying greatly, let us briefly place Stack and other Haskell tools +in the Haskell landscape. + +Haskell was specified in the +[Haskell 98 Language and Library Reports](https://www.haskell.org/onlinereport/), +first published in February 1999, and further specified in the +[Haskell 2010 Language Report](https://www.haskell.org/onlinereport/haskell2010/), +published in April 2010. Extensions to the language have been developed (see +below). + +The Haskell compiler is **GHC** (the +[Glasgow Haskell Compiler](https://www.haskell.org/ghc/)). It can compile +Haskell code into executable and other binary files. GHC can also be used +interactively (**GHCi**) and Stack supports such use. + +~~~mermaid +flowchart LR + code@{ shape: docs, label: "Haskell code" } + ghc[GHC] + binaries@{ shape: docs, label: "Executable and other binary files" } + code --> ghc --> binaries +~~~ + +A lot of interesting things can be done using only what comes with GHC +(including GHCi) but as Haskell code becomes more complex and there is a desire +to reuse efficiently code developed by others it becomes necessary to organise +code in a structured way. + +In that regard, Haskell code can be organised into *packages*. A Haskell package +also includes a file that describes the package's contents. The most established +description format is known as a +[Cabal file](https://cabal.readthedocs.io/en/stable/file-format-changelog.html). +The [**Hpack** project](https://github.com/sol/hpack) provides a modern +alternative format (in a file named `package.yaml`) and a library and +application that translates from that format to the legacy format. Stack has +built-in support for Hpack. Other build tools (see below) do not. + +~~~mermaid +flowchart LR + packageYaml@{ shape: doc, label: "package.yaml" } + hpack[Hpack] + cabalFile@{ shape: doc, label: "Cabal file" } + packageYaml --> hpack --> cabalFile +~~~ + +The code in a Haskell package is organised into *components*, including +components known as *libraries*. Historically, a package had no more than one +library component. The Cabal specification has developed to allow a package to +have named *sub-library* components as well as a main library. + +The code in a Haskell package can depend on the libraries in the same package +or in another package. These are known as its *dependencies*. + +GHC comes with the libraries of certain Haskell packages (known as +*boot packages*) already installed in its global database of installed +libraries. These include the library of the +[package `base`](https://hackage.haskell.org/package/base), which +is a dependency of almost all other packages, and the library of +[package `Cabal`](https://hackage.haskell.org/package/Cabal), which provides +code to build packages and components of packages using GHC. + +Two important public databases are **Hackage** +[(the Haskell Package Repository)](https://hackage.haskell.org/) and +[**Stackage**](https://www.stackage.org/). + +~~~mermaid +flowchart LR + hackage@{ shape: lin-cyl, label: "Hackage + (packages)" } + stackage@{ shape: lin-cyl, label: "Stackage + (snapshots)" } + hackage --> stackage +~~~ + +Hackage is a database of Haskell packages, each identified by a name and version +number. There were over 17,000 package names on Hackage as at July 2025. + +Stackage is a database of collections of Haskell package versions on +Hackage that are known, by testing, to work well together and with a specific +version of GHC and its boot packages. Those collections are known as +*snapshots*. As at July 2025, a snapshot for a recent version of GHC includes +well over 3,000 packages. + +Stack can unpack packages from, and upload packages to, Hackage and +builds making use of snapshots from Stackage. + +GHC comes with an application +[**Haddock**](https://haskell-haddock.readthedocs.io/latest/) that automatically +generates web page and other documentation from annotated Haskell code. The +Hackage and Stackage websites display that documentation. Stack supports the use +of Haddock. + +~~~mermaid +flowchart LR + code@{ shape: docs, label: "Haskell code" } + haddock[Haddock] + docs@{ shape: docs, label: "Documentation" } + code --> haddock --> docs +~~~ + +**Hoogle** is a [website](https://hoogle.haskell.org/) and an +[application](https://hackage.haskell.org/package/hoogle) that allows its users +to search for the library components of Haskell packages, the modules they +expose, and functions and types exported by modules. Stack supports the use of +Hoogle on the command line. + +GHC is described in its +[User Guide](https://downloads.haskell.org/ghc/latest/docs/users_guide/). It can +be used directly but it is a complex application with many flags and options. +These include flags to specify extensions to the Haskell language. Haskell +*build tools* are applications that make it easier to use GHC, including by +applying sensible defaults. Stack is such a build tool. Stack itself uses +the Cabal library to build. + +~~~mermaid +flowchart LR + stack[Stack] + cabal["Cabal (the library)"] + code@{ shape: docs, label: "Haskell code" } + ghc[GHC] + binaries@{ shape: docs, label: "Executable and other files" } + subgraph buildtool ["Build tool"] + direction TB + stack --> cabal --> ghc + end + code --> buildtool --> binaries +~~~ + +When Haskell code changes, GHC and build tools aim to minimise what needs to be +re-compiled. + +A Stack project may comprise only a single package, but Stack can also handle +multi-package projects. + +Another build tool is **Cabal** (the tool) (named after the library). It is +provided by the +[`cabal-install`](https://hackage.haskell.org/package/cabal-install) +Haskell package. Stack can be used independently of Cabal (the tool) but users +can also use both, if they wish. + +Some popular code editors (including +[Visual Studio Code](https://code.visualstudio.com/)) have extensions that +support Haskell coding by providing an integrated development environment (IDE). +Those extensions use **HLS** (the +[Haskell Language Server](https://haskell-language-server.readthedocs.io/en/stable/)), +an application that implements the Language Server Protocol for Haskell. + +Stack can manage versions of GHC and upgrade/downgrade Stack itself and is, in +that sense, an *installer* of those applications as well as a build tool. +However, Cabal (the tool) is not an installer and versions of HLS applicable to +versions of GHC also need to be installed. +[**GHCup**](https://www.haskell.org/ghcup/) is an installer of versions of GHC, +HLS, Stack and Cabal (the tool) built for various operating systems and machine +architectures. Stack can be configured to manage versions of GHC by using GHCup. + +~~~mermaid +flowchart TD + ghcUp["GHCup + installer"] + ghc["GHC + compiler"] + hls["HLS + IDE tool"] + stack["Stack + build tool, installer"] + cabal["Cabal (the tool) + build tool"] + ghcUp -.-> ghc + ghcUp -.-> hls + ghcUp -.-> stack + stack -- "(optional) to fetch GHC" --> ghcUp + ghcUp -.-> cabal +~~~ + +## Setting up + +The goal of setting up is a `stack` executable on the PATH. As we will see, when +Stack is used, it sets other things up as needed. + +*[PATH]: An environment variable that specifies a list of directories searched for executable files. + +For further information about setting up, see the +[documentation](../install_and_upgrade.md) on that topic. Return here when you +know that Stack is on the PATH. + +This guide assumes that the directory where Stack install executables (the +location reported by `stack path --local-bin`) has been added to the PATH. + +This guide assumes that your computer's operating system is one of Linux, macOS +or Windows. Stack's commands are the same on all operating systems. diff --git a/doc/tutorial/installed_package_databases.md b/doc/tutorial/installed_package_databases.md new file mode 100644 index 0000000000..c2775a114b --- /dev/null +++ b/doc/tutorial/installed_package_databases.md @@ -0,0 +1,110 @@ +
+ +# 6. Installed package databases + +Time to take a short break from hands-on examples and discuss a little +architecture. Stack has the concept of multiple *databases*. + +A database consists of a GHC package database (which contains the compiled +version of a library), executables, and a few other things as well. To give you +an idea, the contents of the parent directory of the `stack path --local-pkg-db` +directory are the directories: + +~~~text +bin +doc +lib +pkgdb +~~~ + +Databases in Stack are *layered*. For example, the database listing we just gave +is called a *local* database (also known as a *mutable* database). That is +layered on top of a *snapshot* database (also known as a *write-only* database). +The snapshot database contains the libraries and executables that are considered +to be *immutable*. Finally, GHC itself ships with a number of libraries and +executables, also considered to be immutable, which forms the *global* database. + +To get a quick idea of this, we can look at the output of the +`stack exec -- ghc-pkg list` command in our `helloworld` project: + +~~~text + + Cabal-3.6.3.0 + Win32-2.12.0.1 + array-0.5.4.0 + base-4.16.2.0 + binary-0.8.9.0 + bytestring-0.11.3.1 + containers-0.6.5.1 + deepseq-1.4.6.1 + directory-1.3.6.2 + exceptions-0.10.4 + filepath-1.4.2.2 + (ghc-9.2.3) + ghc-bignum-1.2 + ghc-boot-9.2.3 + ghc-boot-th-9.2.3 + ghc-compact-0.1.0.0 + ghc-heap-9.2.3 + ghc-prim-0.8.0 + ghci-9.2.3 + haskeline-0.8.2 + hpc-0.6.1.0 + integer-gmp-1.1 + libiserv-9.2.3 + mtl-2.2.2 + parsec-3.1.15.0 + pretty-1.1.3.6 + process-1.6.13.2 + rts-1.0.2 + stm-2.5.0.2 + template-haskell-2.18.0.0 + text-1.2.5.0 + time-1.11.1.1 + transformers-0.5.6.2 + xhtml-3000.2.2.1 + + + acme-missiles-0.3 + + + helloworld-0.1.0.0 +~~~ + +where `` refers to the directory output by +the command `stack path --global-pkg-db`, and so on. + +Notice that `acme-missiles` ends up in the *snapshot* database. Any package +which comes from Hackage, an archive, or a repository is considered to be an +*immutable* package. + +Anything which is considered *mutable*, or depends on something mutable, ends up +in the *local* database. This includes your own code and any other packages +located on a local file path. + +The reason we have this structure is that: + +* it lets multiple projects reuse the same binary builds of immutable packages, +* but does not allow different projects to "contaminate" each other by putting + non-standard content into the shared snapshot database. + +As you probably guessed, there can be multiple snapshot databases available. See +the contents of the `snapshots` directory in the +[Stack root](../topics/stack_root.md). + +* On Unix-like operating systems, each snapshot is in the last of a sequence of + three subdirectories named after the platform, a 256-bit hash of the source + map (how the package should be built -- including the compiler, options, and + immutable dependencies), and the GHC version. + +* On Windows, each snapshot is in a subdirectory that is a shorter hash (eight + characters) of the sequence of three directories used on Unix-like operating + systems. This is done to avoid problems created by default limits on file + path lengths on Windows systems. + +These snapshot databases do not get layered on top of each other; they are each +used separately. + +In reality, you will rarely — if ever — interact directly with these databases, +but it is good to have a basic understanding of how they work so you can +understand why rebuilding may occur at different points. diff --git a/doc/tutorial/locations_used_by_stack.md b/doc/tutorial/locations_used_by_stack.md new file mode 100644 index 0000000000..7260ae8145 --- /dev/null +++ b/doc/tutorial/locations_used_by_stack.md @@ -0,0 +1,86 @@ +
+ +# 11. Locations used by Stack + +Generally, you do not need to worry about where Stack stores various files. But +some people like to know this stuff. That's when the `stack path` command is +useful. + +## The `stack path` command + +`stack path --help` explains the available options and, consequently, +the output of the command: + +~~~text +--stack-root Global Stack root directory +--global-config Global Stack configuration file +--project-root Project root (derived from stack.yaml file) +--config-location Configuration location (where the stack.yaml file is) +--bin-path PATH environment variable +--programs Install location for GHC and other core tools (see + 'stack ls tools' command) +--compiler-exe Compiler binary (e.g. ghc) +--compiler-bin Directory containing the compiler binary (e.g. ghc) +--compiler-tools-bin Directory containing binaries specific to a + particular compiler +--local-bin Directory where Stack installs executables (e.g. + ~/.local/bin (Unix-like OSs) or %APPDATA%\local\bin + (Windows)) +--extra-include-dirs Extra include directories +--extra-library-dirs Extra library directories +--snapshot-pkg-db Snapshot package database +--local-pkg-db Local project package database +--global-pkg-db Global package database +--ghc-package-path GHC_PACKAGE_PATH environment variable +--snapshot-install-root Snapshot installation root +--local-install-root Local project installation root +--snapshot-doc-root Snapshot documentation root +--local-doc-root Local project documentation root +--local-hoogle-root Local project documentation root +--dist-dir Dist work directory, relative to package directory +--local-hpc-root Where HPC reports and tix files are stored +~~~ + +In addition, `stack path` accepts the flags above on the command line to state +which keys you are interested in. This can be convenient for scripting. As a +simple example, let us find out the sandboxed versions of GHC that Stack +installed: + +=== "Unix-like" + + Command: + + ~~~text + ls $(stack path --programs)/*.installed + /home//.stack/programs/x86_64-linux/ghc-9.0.2.installed + ~~~ + +=== "Windows" + + In PowerShell, command: + + ~~~text + dir "$(stack path --programs)/*.installed" + + Directory: C:\Users\mikep\AppData\Local\Programs\stack\x86_64-windows + + Mode LastWriteTime Length Name + ---- ------------- ------ ---- + -a--- 27/07/2022 5:40 PM 9 ghc-9.0.2.installed + -a--- 25/02/2022 11:39 PM 9 msys2-20210604.installed + ~~~ + +While we are talking about paths, to wipe our Stack install completely, here is +what typically needs to be removed: + +1. the Stack root folder (see `stack path --stack-root`, before you uninstall); +2. if different, the folder containing Stack's global configuration file (see + `stack path --global-config`, before you uninstall); +3. on Windows, the folder containing Stack's tools (see `stack path --programs`, + before you uninstall), which is located outside of the Stack root folder; and +4. the `stack` executable file (see `which stack`, on Unix-like operating + systems, or `where.exe stack`, on Windows). + +You may also want to delete `.stack-work` folders in any Haskell projects that +you have built using Stack. The `stack uninstall` command provides information +about how to uninstall Stack. diff --git a/doc/tutorial/multi-package_projects.md b/doc/tutorial/multi-package_projects.md new file mode 100644 index 0000000000..dc0ddd9735 --- /dev/null +++ b/doc/tutorial/multi-package_projects.md @@ -0,0 +1,152 @@ +
+ +# 9. Multi-package projects + +Everything we have done with Stack so far has used a single-package project, +where the project directory is also the package's directory. However, a Stack +project can have more than one project package. + +Let us demonstrate this with a project that has two project packages named +`packageA` and `packageB`. We will create a project directory named `my-project` +and, for our example, create the two project packages in subdirectories. +Command: + +~~~text +mkdir my-project +cd my-project +stack new packageA --no-init +stack new packageB --no-init +stack init +~~~ + +The `--no-init` flags above stop Stack from creating project-level configuration +files in the `packageA` and `packageB` directories that +[`stack new`](../commands/new_command.md) will create. + +The [`stack init`](../commands/init_command.md) command above creates a +project-level configuration file (`stack.yaml`) in the `my-project` directory. +The command should report something like this: + +~~~text +Looking for Cabal or package.yaml files to use to initialise Stack's +project-level YAML configuration file. + +Using the Cabal packages: +* packageA\ +* packageB\ + +Selecting the best among 14 snapshots... + +Note: Matches https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/24/25.yaml + +Selected the snapshot https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/24/25.yaml. +Initialising Stack's project-level configuration file using snapshot https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/24/25.yaml. +Considered 2 user packages. +Writing configuration to stack.yaml. +Stack's project-level configuration file has been initialised. +~~~ + +Ignoring comments in the file, the content of the created `stack.yaml` file +should be something like this: + +~~~yaml +snapshot: + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/24/25.yaml + +packages: +- packageA +- packageB +~~~ + +The value of the [`packages`](../configure/yaml/project.md#packages) key is a +list of paths (relative paths, in this example) to project package directories. + +If we command +[`stack ide targets`](../commands/ide_command.md#the-stack-ide-targets-command), +Stack reports the build targets for these two project packages: + +~~~text +packageA:lib +packageA:exe:packageA-exe +packageA:test:packageA-test +packageB:lib +packageB:exe:packageB-exe +packageB:test:packageB-test +~~~ + +If we command +[`stack build`](../commands/build_command.md#no-targets-specified), Stack will +build all the library and executable components of all the project packages. + +One project package can depend on another. Let us demonstrate this by modifying +the main library of the `packageB` package to depend on that of the `packageA` +package. + +Currently, the source code of the `packageA` and `packageB` packages are the +same. Let us first modify the `someFunc` function exported by the `Lib` module +exposed by the `packageA` package, as follows: + +~~~haskell +someFunc :: IO () +someFunc = putStrLn "someFunc of packageA's Lib module" +~~~ + +and the source code of the `Lib` module exposed by the `packageB` package to +become: + +~~~haskell +{-# LANGUAGE PackageImports #-} + +module Lib + ( someFunc + ) where + +import qualified "packageA" Lib as LibA + +someFunc :: IO () +someFunc = do + putStrLn "someFunc of packageB's Lib module" + LibA.someFunc +~~~ + +In this example, as the `packageA` and `packageB` packages both expose a module +named `Lib`, we have to use GHC's language extension +[`PackageImports`](https://downloads.haskell.org/ghc/latest/docs/users_guide/exts/package_qualified_imports.html) +to allow imports from the `Lib` module exposed by the `packageA` package to be +distiguished. + +In the package description file (`package.yaml`) for the `packageB` package, we +need to specify that the dependencies of its main library now include the main +library of the `packageA` package, as follows (extract): + +~~~yaml +library: + source-dirs: src + dependencies: + - packageA # Add the dependency on the main library of the packageA package +~~~ + +Now, if we command `stack build packageB`, Stack will build the library and +executable components of the `packageA` package (the dependency) and then the +library and executable (named `packageB-exe`) of `packageB`. + +To execute the built `packageB-exe` executable, we can command: + +~~~text +stack exec packageB-exe +~~~ + +giving the expected output: + +~~~text +someFunc of packageB's Lib module +someFunc of packageA's Lib module +~~~ + +!!! note + + A project package can depend on another project package, as above. It can + also depend on a local package that is specified as an + [extra-dep](../configure/yaml/project.md#extra-deps). Although both + dependencies are local, the former is part of the project and the latter is + not. diff --git a/doc/tutorial/package_description.md b/doc/tutorial/package_description.md new file mode 100644 index 0000000000..5fc75d080e --- /dev/null +++ b/doc/tutorial/package_description.md @@ -0,0 +1,416 @@ +
+ +# 2. Package description + +Let us begin to look at the `helloworld` example in more detail to understand +better how Stack works. + +The contents of the project directory are set out below. Click +:material-plus-circle: to learn more about each file or directory: + +~~~shell +. +├── .stack-work # (1)! +│   └── ... +│ +├── app +│   └── Main.hs # (2)! +├── src +│   └── Lib.hs # (3)! +├── test +│ └── Spec.hs # (4)! +│ +├── .gitignore # (5)! +├── CHANGELOG.md # (6)! +├── LICENSE # (7)! +├── README.md # (8)! +│ +├── package.yaml # (9)! +├── helloworld.cabal # (10)! +├── Setup.hs # (11)! +│ +└── stack.yaml # (12)! +~~~ + +1. The Stack work directory for the project and the project package. + + Stack work directories are ones in which Stack stores files created during + the build process. A product of the build - does not affect the build (other + than to avoid rebuilding things unnecessarily). + +2. The Haskell source code for the executable (application). + + As your project develops you can add further source code files to the `app` + directory. + +3. The executable uses a library. The Haskell source code for the library. + + As your project develops you can add further source code files to the `src` + directory. + +4. The package has a test suite executable. The Haskell source code for the + test suite. + + As your project develops you can add further source code files to the `test` + directory. + +5. A text file used to configure the Git tool to ignore certain files. Does not + affect the build. + +6. A text file in the Markdown format in which changes to the project can be + documented. Does not affect the build. + +7. A text file used to document the copyright applicable to the project's files + and the licence for the use of those files. Does not affect the build. + +8. A text file in the Markdown format which is intended to be read by users of + the project. Does not affect the build. + +9. A file describing the package in the Hpack format. See further below. + +10. A file describing the package in the Cabal format. See further below. + +11. A Haskell source file which is a component of the Cabal build system. See + further below. + +12. A text file in the YAML format, containing Stack's project-level + configuration. See the next part of this guide to getting started. + +The files of most interest here are `package.yaml` and `helloworld.cabal`. We +will also explain the `Setup.hs` file. + +## Package description formats + +Each package contains a file that describes the package. It is located in the +package's root directory. + +??? question "What is covered by a package description?" + + A package description includes information such as the package name and + version, and the package's *components*. A package can have an optional + main library component and optional named sub-library components. It can + also have optional executable components, test suite components and + benchmark components. The description identifies other packages on which + those components depend. + +Stack is aware of two different formats of package description, and both files +may be present in the package's root directory: + +
+ +- :material-package-variant:{ .lg .middle } __Cabal: A Cabal file__ + + Used directly by the Cabal build system. + + Unique but simple syntax. + + Named after the package (eg `helloworld.cabal`). + + If no `package.yaml` file, used directly by Stack. + + --- + + Specified by the Cabal project: + + [:octicons-arrow-right-24: Learn more](https://cabal.readthedocs.io/en/stable/cabal-package-description-file.html) + +- :material-package-variant-plus:{ .lg .middle } __Hpack: a package.yaml file__ + + Used by Stack to create a Cabal file. + + YAML syntax. + + Named `package.yaml`. + + Stack's preferred format. If present, used by Stack. + + --- + + Specified by the Hpack project: + + [:octicons-arrow-right-24: Learn more](https://github.com/sol/hpack?tab=readme-ov-file#documentation) + +
+ +??? question "Why use the Hpack format?" + + A `package.yaml` file can be more concise and less repetitive than the Cabal + file that is generated from it. That is because the Hpack format uses + defaults and top-level keys common to other parts of the format. The YAML + syntax, which may already be familiar for some users, can also avoid + repetition. + + In particular, the format's defaults can infer the names of exposed and + other modules. + + The format allows a user to specify defaults in a file on GitHub or a local + file. + +??? question "I use the Hpack format. Should I also check-in a Cabal file to my repository?" + + Yes. This helps people who do not use Stack or the Hpack tool separately. + +## `package.yaml` + +The `package.yaml` file describes the package in the +[Hpack format](https://github.com/sol/hpack?tab=readme-ov-file#documentation). + +If a `package.yaml` file is present, Stack will use its built-in Hpack +functionality to create a Cabal file. + +??? question "What are the contents of the `package.yaml` file?" + + The contents of the `package.yaml` file for the `helloworld` example are + described below, using additional YAML comments: + + ~~~yaml + # The name of the package: + name: helloworld + # The version of the package: + version: 0.1.0.0 + # The GitHub repository for the package (optional): + github: "githubuser/helloworld" + # The licence for the use of the package's files (optional): + license: BSD-3-Clause + # The author of the package (optional): + author: "Author name here" + # The email address to contact the maintainer of the package (optional): + maintainer: "example@example.com" + # The copyright for the package's files (optional): + copyright: "2025 Author name here" + + # Extra files (if any) to be distributed with the source files of the + # package: + extra-source-files: + - README.md + - CHANGELOG.md + + # Metadata used when publishing your package + # synopsis: Short description of your package + # category: Web + + # To avoid duplicated efforts in documentation and dealing with the + # complications of embedding Haddock markup inside cabal files, it is + # common to point users to the README.md file. + description: Please see the README on GitHub at + + + # Dependencies applicable to all components: + dependencies: + - base >= 4.7 && < 5 + + # GHC options (if any) common to all components: + ghc-options: + # These GHC flags affect which warnings GHC will emit: + - -Wall + - -Wcompat + - -Widentities + - -Wincomplete-record-updates + - -Wincomplete-uni-patterns + - -Wmissing-export-lists + - -Wmissing-home-modules + - -Wpartial-fields + - -Wredundant-constraints + + # The main (unnamed) library component of the package (if it has one): + library: + # Directories containing source files: + source-dirs: src + + # The executable components of the package (if it has any): + executables: + # The executable component named 'helloworld-exe': + helloworld-exe: + # The source file exporting the 'main' function: + main: Main.hs + # Directories containing source files: + source-dirs: app + # GHC options applicable to the component: + ghc-options: + # Link the program with the 'threaded' version of GHC's runtime system: + - -threaded + # Make all of GHC's runtime system (RTS) options available: + - -rtsopts + # Compile so as to use simultaneous threads when running the program, + # based on how many processors are in the machine. + - -with-rtsopts=-N + # Dependencies applicable to the component: + dependencies: + # The main library of the package: + - helloworld + + # The test suite components of the package (if it has any). Test suites have + # keys in common with executables: + tests: + # The test suite component named 'helloworld-test': + helloworld-test: + main: Spec.hs + source-dirs: test + ghc-options: + - -threaded + - -rtsopts + - -with-rtsopts=-N + dependencies: + - helloworld + ~~~ + +??? question "What are the contents of a minimal `package.yaml` file?" + + For a package `my-package-0.1.0.0` with a main library, an executable + named `my-program`, and a dependency only on the `base` package, its + `package.yaml` file could be as simple as the one below: + + ~~~yaml + package: my-package + version: 0.1.0.0 + dependencies: + - base + library: + source-dirs: src + executables: + my-program: + main: Main.hs + source-dirs: app + ghc-options: + - -threaded + - -rtsopts + - -with-rtsopts=-N + dependencies: + - my-package + ~~~ + +## `helloworld.cabal` + +In the case of the `helloworld` example, the `helloworld.cabal` file is updated +automatically as part of the `stack build` process and should not be modified. + +If the `package.yaml` file were deleted, Stack would use the Cabal file +directly. + +??? question "What are the contents of the `helloworld.cabal` file?" + + The contents of the `helloworld.cabal` file are described below, using + additional Cabal file comments: + + ~~~text + -- The version of the Cabal package description format specification: + cabal-version: 2.2 + + -- This file has been generated from package.yaml by hpack version 0.37.0. + -- + -- see: https://github.com/sol/hpack + + -- The name of the package: + name: helloworld + -- The version of the package: + version: 0.1.0.0 + -- The description of the package: + description: Please see the README on GitHub at + + -- A URL for the package: + homepage: https://github.com/githubuser/helloworld#readme + -- A URL for bug reports for the package: + bug-reports: https://github.com/githubuser/helloworld/issues + -- The author of the package: + author: Author name here + -- The email address to contact the maintainer of the package: + maintainer: example@example.com + -- The copyright for the package's files: + copyright: 2025 Author name here + -- The licence for the use of the package's files: + license: BSD-3-Clause + -- The file documenting the terms of the licence: + license-file: LICENSE + -- The Cabal system build type of the package: + build-type: Simple + -- Extra files to be distributed with the source files of the package: + extra-source-files: + README.md + CHANGELOG.md + + -- The respository for the package: + source-repository head + type: git + location: https://github.com/githubuser/helloworld + + -- The main (unnamed) library component of the package: + library + -- The modules that the library exposes: + exposed-modules: + Lib + -- The other modules of the compoment: + other-modules: + Paths_helloworld + -- Automatically generated modules of the component: + autogen-modules: + Paths_helloworld + -- Directories containing source files: + hs-source-dirs: + src + -- GHC options applicable to the component. In this case, they are flags + -- that affect which warnings GHC will emit: + ghc-options: -Wall -Wcompat -Widentities -Wincomplete-record-updates + -Wincomplete-uni-patterns -Wmissing-export-lists + -Wmissing-home-modules -Wpartial-fields + -Wredundant-constraints + -- Dependencies applicable to the building of the component: + build-depends: + base >=4.7 && <5 + -- The applicable version of the Haskell language: + default-language: Haskell2010 + + -- The executable 'helloworld-exe' component of the package. Executable + -- components have fields in common with library components: + executable helloworld-exe + -- The source file exporting the 'main' function: + main-is: Main.hs + other-modules: + Paths_helloworld + autogen-modules: + Paths_helloworld + hs-source-dirs: + app + -- GHC options applicable to the component. In this case, they include + -- flags that affect GHC's runtime system (RTS). + ghc-options: -Wall -Wcompat -Widentities -Wincomplete-record-updates + -Wincomplete-uni-patterns -Wmissing-export-lists + -Wmissing-home-modules -Wpartial-fields + -Wredundant-constraints -threaded -rtsopts -with-rtsopts=-N + build-depends: + base >=4.7 && <5 + , helloworld + default-language: Haskell2010 + + -- The test suite 'helloworld-test' component of the package. Test suite + -- components have fields in common with executable components: + test-suite helloworld-test + -- The type of the test suite: + type: exitcode-stdio-1.0 + main-is: Spec.hs + other-modules: + Paths_helloworld + autogen-modules: + Paths_helloworld + hs-source-dirs: + test + ghc-options: -Wall -Wcompat -Widentities -Wincomplete-record-updates + -Wincomplete-uni-patterns -Wmissing-export-lists + -Wmissing-home-modules -Wpartial-fields + -Wredundant-constraints -threaded -rtsopts -with-rtsopts=-N + build-depends: + base >=4.7 && <5 + , helloworld + default-language: Haskell2010 + ~~~ + +## `Setup.hs` + +The `Setup.hs` file is a component of the Cabal build system. + +Technically, it is not needed by Stack, but it is considered good practice to +include it. The file we are using is boilerplate: + +~~~haskell +import Distribution.Simple +main = defaultMain +~~~ diff --git a/doc/tutorial/project_configuration.md b/doc/tutorial/project_configuration.md new file mode 100644 index 0000000000..23a7ba6589 --- /dev/null +++ b/doc/tutorial/project_configuration.md @@ -0,0 +1,57 @@ +
+ +# 3. Project configuration + +Let us continue to look at the `helloworld` example in more detail to understand +better how Stack works. + +As discussed in the previous part of this guide to getting started, some of the +contents of the project directory are set out below. The item of interest here +is the `stack.yaml` file. + +~~~text +. +├── .stack-work +│   └── ... +... +│ +├── package.yaml +├── helloworld.cabal +├── Setup.hs +│ +└── stack.yaml +~~~ + +## `stack.yaml` + +Stack requires a Stack project-level configuration file for every project. +`stack.yaml` is that file. The contents of the file set project-specific and +non-project-specific options that apply to the project. (Non-project +specific options that affect the project may also be set in a +[global Stack configuration file](../configure/yaml/index.md#project-level-and-global-configuration-files).) + +The contents of the `stack.yaml` file include comments beginning `#`. Ignoring +those comments, the contents will look something like this: + +~~~yaml +snapshot: + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/24/9.yaml +packages: +- . +~~~ + +The key [`snapshot`](../configure/yaml/project.md#snapshot) is a +project-specific configuration option. Its value tells Stack *how* to build your +package: which version of GHC (and, implicitly, its boot packages) to use; which +versions of other package dependencies to use, and so on. Our value here says to +use [LTS Haskell 24.37](https://www.stackage.org/lts-24.37), which implies +GHC 9.10.3 (which is why `stack build` installs that version of GHC if it is not +already available to Stack). There are a number of values you can use for +`snapshot`, which we will cover later. + +The key [`packages`](../configure/yaml/project.md#packages) is another +project-specific configuration option. Its value tells Stack which project +packages, located locally, to build. In our simple example, we have only a +single project package, located in the same directory, so '`.`' suffices. +However, Stack has powerful support for multi-package projects, which we will +describe as this guide progresses. diff --git a/doc/tutorial/stack_build_synonyms.md b/doc/tutorial/stack_build_synonyms.md new file mode 100644 index 0000000000..ec37b94854 --- /dev/null +++ b/doc/tutorial/stack_build_synonyms.md @@ -0,0 +1,94 @@ +
+ +# 7. `stack build` synonyms + +Let us look at a subset of the `stack --help` output: + +~~~text +build Build the package(s) in this directory/configuration +install Shortcut for 'build --copy-bins' +test Shortcut for 'build --test' +bench Shortcut for 'build --bench' +haddock Shortcut for 'build --haddock' +~~~ + +Four of these commands are just synonyms for the `build` command. They are +provided for convenience for common cases (e.g., `stack test` instead of +`stack build --test`) and so that commonly expected commands just work. + +What's so special about these commands being synonyms? It allows us to make +much more composable command lines. For example, we can have a command that +builds executables, generates Haddock documentation (Haskell API-level docs), +and builds and runs your test suites, with: + +~~~text +stack build --haddock --test +~~~ + +You can even get more inventive as you learn about other flags. For example, +take the following command: + +~~~text +stack build --pedantic --haddock --test --exec "echo Yay, it succeeded" --file-watch +~~~ + +This command will: + +* turn on all warnings and errors (the `--pedantic` flag) +* build your library and executables +* generate Haddocks (the `--haddock` flag) +* build and run your test suite (the `--test` flag) +* run the command `echo Yay, it succeeded` when that completes (the `--exec` + option) +* after building, watch for changes in the files used to build the project, and + kick off a new build when done (the `--file-watch` flag) + +## The `stack install` command and `copy-bins` option + +It is worth calling out the behavior of the `install` command and `--copy-bins` +option, since this has confused a number of users (especially when compared to +behavior of other tools like Cabal (the tool)). The `install` command does +precisely one thing in addition to the build command: it copies any generated +executables to the local binary directory. You may recognize the default value +for that path: + +On Unix-like operating systems, command: + +~~~text +stack path --local-bin +/home//.local/bin +~~~ + +On Windows, command: + +~~~text +stack path --local-bin +C:\Users\\AppData\Roaming\local\bin +~~~ + +That's why the download page recommends adding that directory to your PATH. This +feature is convenient, because now you can simply run `executable-name` in your +shell instead of having to run `stack exec executable-name` from inside your +project directory. + +Since it is such a point of confusion, let us list a number of things Stack does +*not* do specially for the `install` command: + +* Stack will always build any necessary dependencies for your code. The install + command is not necessary to trigger this behavior. If you just want to build a + project, run `stack build`. +* Stack will *not* track which files it is copied to your local binary directory + nor provide a way to automatically delete them. There are many great tools out + there for managing installation of binaries, and Stack does not attempt to + replace those. +* Stack will not necessarily be creating a relocatable executable. If your + executables hard-codes paths, copying the executable will not change those + hard-coded paths. + + * At the time of writing, there is no way to change those kinds of paths with + Stack, but see + [issue #848 about --prefix](https://github.com/commercialhaskell/stack/issues/848) + for future plans. + +That's really all there is to the `install` command: for the simplicity of what +it does, it occupies a much larger mental space than is warranted. diff --git a/doc/tutorial/stack_build_targets.md b/doc/tutorial/stack_build_targets.md new file mode 100644 index 0000000000..59ef23f8f8 --- /dev/null +++ b/doc/tutorial/stack_build_targets.md @@ -0,0 +1,107 @@ +
+ +# 8. `stack build` targets + +We have not discussed this too much yet, but, in addition to having a number of +synonyms *and* taking a number of options on the command line, the `build` +command *also* takes many arguments. These are parsed in different ways, and can +be used to achieve a high level of flexibility in telling Stack exactly what you +want to build. + +We are not going to cover the full generality of these arguments here; instead, +there is documentation covering the full +[build command syntax](../commands/build_command.md). Here, we will just point +out a few different types of arguments: + +* You can specify a *package name*, e.g. `stack build vector`. + * This will attempt to build the `vector` package, whether it is a local + package, in your extra-deps, in your snapshot, or just available upstream. + If it is just available upstream but not included in your locals, + extra-deps, or snapshot, the newest version is automatically promoted to + an extra-dep. +* You can also give a *package identifier*, which is a package name plus + version, e.g. `stack build yesod-bin-1.4.14`. + * This is almost identical to specifying a package name, except it will (1) + choose the given version instead of latest, and (2) error out if the given + version conflicts with the version of a project package. +* The most flexibility comes from specifying individual *components*, e.g. + `stack build helloworld:test:helloworld-test` says "build the test suite + component named helloworld-test from the helloworld package." + * In addition to this long form, you can also shorten it by skipping what + type of component it is, e.g. `stack build helloworld:helloworld-test`, or + even skip the package name entirely, e.g. `stack build :helloworld-test`. +* Finally, you can specify individual *directories* to build to trigger building + of any project packages included in those directories or subdirectories. + +When you give no specific arguments on the command line (e.g., `stack build`), +it is the same as specifying the names of all of your project packages. If you +just want to build the package for the directory you are currently in, you can +use `stack build .`. + +## Components, --test, and --bench + +Here is one final important yet subtle point. Consider our `helloworld` package: +it has a library component, an executable `helloworld-exe`, and a test suite +`helloworld-test`. When you run `stack build helloworld`, how does it know which +ones to build? By default, it will build the library (if any) and all of the +executables but ignore the test suites and benchmarks. + +This is where the `--test` and `--bench` flags come into play. If you use them, +those components will also be included. So `stack build --test helloworld` will +end up including the helloworld-test component as well. + +You can bypass this implicit adding of components by being much more explicit, +and stating the components directly. For example, the following will not build +the `helloworld-exe` executable: + +~~~text +stack purge +stack build :helloworld-test +helloworld> configure (lib + test) +Configuring helloworld-0.1.0.0... +helloworld> build (lib + test) with ghc-9.6.5 +Preprocessing library for helloworld-0.1.0.0.. +Building library for helloworld-0.1.0.0.. +[1 of 2] Compiling Lib +[2 of 2] Compiling Paths_helloworld +Preprocessing test suite 'helloworld-test' for helloworld-0.1.0.0.. +Building test suite 'helloworld-test' for helloworld-0.1.0.0.. +[1 of 2] Compiling Main +[2 of 2] Compiling Paths_helloworld +[3 of 3] Linking .stack-work\dist\\build\helloworld-test\helloworld-test.exe +helloworld> copy/register +Installing library in ...\helloworld\.stack-work\install\... +Registering library for helloworld-0.1.0.0.. +helloworld> test (suite: helloworld-test) + +Test suite not yet implemented + + + +helloworld> Test suite helloworld-test passed +Completed 2 action(s). +~~~ + +We first purged our project to clear old results so we know exactly what Stack +is trying to do. + +The last line shows that our command also *runs* the test suite it just built. +This may surprise some people who would expect tests to only be run when using +`stack test`, but this design decision is what allows the `stack build` command +to be as composable as it is (as described previously). The same rule applies to +benchmarks. To spell it out completely: + +* The `--test` and `--bench` flags simply state which components of a package + should be built, if no explicit set of components is given +* The default behavior for any test suite or benchmark component which has been + built is to also run it + +You can use the `--no-run-tests` and `--no-run-benchmarks` flags to disable +running of these components. You can also use `--no-rerun-tests` to prevent +running a test suite which has already passed and has not changed. + +!!! note + + Stack does not build or run test suites and benchmarks for non-local + packages. This is done so that a command like `stack test` does not need to + run 200 test suites! diff --git a/doc/tutorial/stack_configuration.md b/doc/tutorial/stack_configuration.md new file mode 100644 index 0000000000..63c4b801fc --- /dev/null +++ b/doc/tutorial/stack_configuration.md @@ -0,0 +1,76 @@ +
+ +# 14. Stack configuration + +Whenever you run something with Stack, it needs a project-level configuration +file. The algorithm Stack uses to find such a file is: + +1. Check for a `--stack-yaml` or `-w` option on the command line +2. Check for a `STACK_YAML` environment variable +3. Check the current directory and all ancestor directories for a `stack.yaml` + file + +The first two provide a convenient method for using an alternate configuration. +For example: `stack build --stack-yaml stack-ghc-9.2.3.yaml` can be used by your +CI system to check your code against GHC 9.2.3. Setting the `STACK_YAML` +environment variable can be convenient if you are going to be running commands +like `stack ghc` in other directories, but you want to use the configuration you +defined in a specific project. + +If Stack does not find a project level configuration file in any of the three +specified locations, the *implicit global* logic kicks in. You have probably +noticed that phrase a few times in the output from commands above. Implicit +global is essentially a hack to allow Stack to be useful in a non-project +setting. When no implicit global configuration file exists, Stack creates one +for you with the latest LTS snapshot. This allows you to do things like: + +* compile individual files easily with `stack ghc` +* build executables without starting a project, e.g. `stack install pandoc` + +Keep in mind that there is nothing magical about this implicit global +configuration. It has no effect on projects at all. Every package you install +with it is put into isolated databases just like everywhere else. The only magic +is that it is the catch-all project whenever you are running Stack somewhere +else. + +## Package description vs project-level configuration + +Now that we have covered a lot of ways to use Stack, this quick summary of +package description files versus Stack's project-level configuration file will +hopefully make sense and be a good reminder for future uses of Stack: + +
+ +- __Package description__ + + Each package has a package description file in the Cabal format (a Cabal + file named after the package. + + A package may have a package description file in the Hpack format + (`package.yaml`). If one exists, Stack will use it to generate the Cabal + file. + + --- + + Specifies which other Haskell packages are dependencies of the package. + + --- + + Specifies the components, modules, and Cabal flags provided by the package. + +- __Project-level configuration__ + + Each project has a project-level configuration file (named `stack.yaml`, by + default). + + A project can have one or more project packages. + + --- + + Specifies which package versions are available to be used. + + --- + + Can override the Cabal flag settings for individual packages. + +
diff --git a/doc/tutorial/tutorial_conclusion.md b/doc/tutorial/tutorial_conclusion.md new file mode 100644 index 0000000000..e943eb6130 --- /dev/null +++ b/doc/tutorial/tutorial_conclusion.md @@ -0,0 +1,45 @@ +
+ +# 15. In conclusion + +Stack is not the only tool available for building Haskell code. If you are +happy building with other tools, you may not need Stack. If you are experiencing +problems with other tools, give Stack a try. + +If you are a new user who has no experience with other tools, we recommend +Stack. It aims to be easy to use and its defaults match modern best practices in +Haskell development. + +Other key features of Stack include: + +
+ +- __Sandboxing__ + + --- + + A 'sandbox' is a development environment that is isolated from other parts + of the system. The concept of sandboxing is built into Stack. + +- __Snapshots__ + + --- + + A snapshot specifies a GHC versions and a set of package versions that work + well together. Stack uses snapshots to define precisely the set of package + versions available for a project. + +- __Reproducibility__ + + --- + + Stack goes to great lengths to ensure that `stack build` today does the + same thing tomorrow. Changing the build plan is always an explicit decision. + +- __Building dependencies__ + + --- + + Stack automatically builds dependencies. + +
diff --git a/doc/tutorial/using_ghc_interactively.md b/doc/tutorial/using_ghc_interactively.md new file mode 100644 index 0000000000..003baf6a80 --- /dev/null +++ b/doc/tutorial/using_ghc_interactively.md @@ -0,0 +1,31 @@ +
+ +# 13. Using GHC interactively + +GHCi is the interactive GHC environment, a.k.a. the REPL. You *could* access it +with command: + +~~~text +stack exec ghci +~~~ + +But that will not load up locally written modules for access. For that, use the +`stack ghci` or `stack repl` commands, which are equivalent. + +## The `stack ghci` or `stack repl` command + +To then load +modules from your project in GHCi, use the `:module` command (`:m` for short) +followed by the module name. + +!!! note + + If you have added packages to your project please make sure to mark them as + extra-deps for faster and reliable usage of `stack ghci`. Otherwise GHCi may + have trouble due to conflicts of compilation flags or having to + unnecessarily interpret too many modules. See Stack's project-level + [configuration](../configure/yaml/project.md#extra-deps) to learn how to + configure a package as an extra-dep. + +For further information, see the [REPL environment](../commands/ghci_command.md) +documentation. diff --git a/doc/yaml_configuration.md b/doc/yaml_configuration.md deleted file mode 100644 index 067b6f48a5..0000000000 --- a/doc/yaml_configuration.md +++ /dev/null @@ -1,1191 +0,0 @@ -
- -# YAML Configuration - -This page is intended to fully document all configuration options available in -the stack.yaml file. Note that this page is likely to be both *incomplete* and -sometimes *inaccurate*. If you see such cases, please update the page, and if -you're not sure how, open an issue labeled "question". - -The stack.yaml configuration options break down into [project-specific](#project-specific-config) options in: - -- `/stack.yaml` - -and [non-project-specific](#non-project-specific-config) options in: - -- `/etc/stack/config.yaml` -- for system global non-project default options -- `~/.stack/config.yaml` -- for user non-project default options -- The project file itself may also contain non-project specific options - -*Note:* When stack is invoked outside a stack project it will source project -specific options from `~/.stack/global-project/stack.yaml`. When stack is -invoked inside a stack project, only options from `/stack.yaml` are -used, and `~/.stack/global-project/stack.yaml` is ignored. - -*Note 2:* A common source of confusion is the distinction between configuration -in a `stack.yaml` file versus a cabal file. If you're trying to understand this -breakdown, see [stack vs cabal config](stack_yaml_vs_cabal_package_file.md). - -## Project-specific config - -Project-specific options are only valid in the `stack.yaml` file local to a -project, not in the user or global config files. - -> Note: We define **project** to mean a directory that contains a `stack.yaml` -> file, which specifies how to build a set of packages. We define **package** to -> be a package with a `.cabal` file or Hpack `package.yaml` file. - -In your project-specific options, you specify both **which local packages** to -build and **which dependencies to use** when building these packages. Unlike the -user's local packages, these dependencies aren't built by default. They only get -built when needed. - -Shadowing semantics, described -[here](http://docs.haskellstack.org/en/stable/architecture/#shadowing), are -applied to your configuration. So, if you add a package to your `packages` list, -it will be used even if you're using a snapshot that specifies a particular -version. Similarly, `extra-deps` will shadow the version specified in the -resolver. - -### resolver - -> Note: Starting with **Stack 2.0**, `snapshot` is accepted as a synonym for `resolver`. Only one of these fields is permitted, not both. - -Specifies which snapshot is to be used for this project. A snapshot -defines a GHC version, a number of packages available for -installation, and various settings like build flags. It is called a -resolver since a snapshot states how dependencies are resolved. There -are currently four resolver types: - -* LTS Haskell snapshots, e.g. `resolver: lts-2.14` -* Stackage Nightly snapshot, e.g. `resolver: nightly-2015-06-16` -* No snapshot, just use packages shipped with the compiler - * For GHC this looks like `resolver: ghc-7.10.2` -* Custom snapshot, via a URL or relative file path. (See [pantry docs](pantry.md) for more information.) - -Each of these resolvers will also determine what constraints are placed on the -compiler version. See the [compiler-check](#compiler-check) option for some -additional control over compiler version. - -Since Stack 1.11, the resolver field corresponds to a Pantry snapshot -location. See [the docs on pantry](pantry.md) for more information. - -### packages - -_NOTE_ Beginning with Stack 1.11, Stack has moved over to Pantry for -managing extra-deps, and has removed some legacy syntax for specifying -dependencies in `packages`. See some conversion notes below. - -A list of packages that are part of your local project. These are -specified via paths to local directories. The paths are considered -relative to the directory containing the `stack.yaml` file. For -example, if your `stack.yaml` is located at `/foo/bar/stack.yaml`, and -you have: - -```yaml -packages: -- hello -- there/world -``` - -Your configuration means "I have packages in `/foo/bar/hello` and -`/foo/bar/there/world`. - -If these packages should be treated as dependencies instead, specify -them in `extra-deps`, described below. - -The `packages` field is _optional_. If omitted, it is treated as: - -```yaml -packages: -- . -``` - -Each package directory specified must have a valid cabal file or hpack -`package.yaml` file present. Note that the subdirectories of the -directory are not searched for cabal files. Subdirectories will have -to be specified as independent items in the list of packages. - -Meaning that your project has exactly one package, and it is located -in the current directory. - -Project packages are different from snapshot dependencies (via -`resolver`) and extra dependencies (via `extra-deps`) in multiple -ways, e.g.: - -* Project packages will be built by default with a `stack build` - without specific targets. Dependencies will only be built if - they are depended upon. -* Test suites and benchmarks may be run for project packages. They are - never run for extra dependencies. - -__Legacy syntax__ Prior to Stack 1.11, it was possible to specify -dependencies in your `packages` configuration value as well. This -support has been removed to simplify the file format. Instead, these -values should be moved to `extra-deps`. As a concrete example, you -would convert: - -```yaml -packages: -- . -- location: - git: https://github.com/bitemyapp/esqueleto.git - commit: 08c9b4cdf977d5bcd1baba046a007940c1940758 - extra-dep: true -- location: - git: https://github.com/yesodweb/wai.git - commit: 6bf765e000c6fd14e09ebdea6c4c5b1510ff5376 - subdirs: - - wai-extra - extra-dep: true - -extra-deps: - - streaming-commons-0.2.0.0 - - time-1.9.1 - - yesod-colonnade-1.3.0.1 - - yesod-elements-1.1 -``` - -into - -```yaml -packages: -- . - -extra-deps: - - streaming-commons-0.2.0.0 - - time-1.9.1 - - yesod-colonnade-1.3.0.1 - - yesod-elements-1.1 - - git: https://github.com/bitemyapp/esqueleto.git - commit: 08c9b4cdf977d5bcd1baba046a007940c1940758 - - git: https://github.com/yesodweb/wai.git - commit: 6bf765e000c6fd14e09ebdea6c4c5b1510ff5376 - subdirs: - - wai-extra -``` - -And, in fact, the `packages` value could be left off entirely since -it's using the default value. - -### extra-deps - -This field allows you to specify extra dependencies on top of what is -defined in your snapshot (specified in the `resolver` field mentioned -above). These dependencies may either come from a local file path or a -Pantry package location. - -For the local file path case, the same relative path rules as apply to -`packages` apply. - -Pantry package locations allow you to include dependencies from three -different kinds of sources: - -* Hackage -* Archives (tarballs or zip files, either local or over HTTP(S)) -* Git or Mercurial repositories - -Here's an example using all of the above: - -```yaml -extra-deps: -- vendor/hashable -- streaming-commons-0.2.0.0 -- time-1.9.1 -- yesod-colonnade-1.3.0.1 -- yesod-elements-1.1 -- git: https://github.com/bitemyapp/esqueleto.git - commit: 08c9b4cdf977d5bcd1baba046a007940c1940758 -- url: https://github.com/yesodweb/wai/archive/6bf765e000c6fd14e09ebdea6c4c5b1510ff5376.tar.gz - subdirs: - - wai-extra -- github: snoyberg/conduit - commit: 2e3e41de93821bcfe8ec6210aeca21be3f2087bf - subdirs: - - network-conduit-tls -``` - -If no `extra-deps` value is provided, it defaults to an empty list, -e.g.: - -```yaml -extra-deps: [] -``` - -For more information on the format for specifying dependencies, please -see [the Pantry docs](pantry.md). - -### flags - -Flags can be set for each package separately, e.g. - -```yaml -flags: - package-name: - flag-name: true -``` - -If a specified flag is different than the one specified for a snapshot package, -then the snapshot package will automatically be promoted to be an extra-dep. - -### drop-packages - -Packages which, when present in the snapshot specified in `resolver`, -should not be included in our package. This can be used for a few -different purposes, e.g.: - -* Ensure that packages you don't want used in your project cannot be - used in a `package.yaml` file (e.g., for license reasons) -* Prevent overriding of a global package like `Cabal`. For more - information, see - [stackage#4425](https://github.com/commercialhaskell/stackage/issues/4425) -* When using a custom GHC build, avoid incompatible packages (see - [this - comment](https://github.com/commercialhaskell/stack/pull/4655#issuecomment-477954429)). - -```yaml -drop-packages: -- Cabal -- buggy-package -- package-with-unacceptable-license -``` - -Since Stack 2.0 - -### user-message - -A user-message is inserted by `stack init` when it omits packages or adds -external dependencies. For example: - -```yaml -user-message: ! 'Warning: Some packages were found to be incompatible with the resolver - and have been left commented out in the packages section. - - Warning: Specified resolver could not satisfy all dependencies. Some external packages - have been added as dependencies. - - You can omit this message by removing it from stack.yaml - -' -``` - -This messages is displayed every time the config is loaded by stack and serves -as a reminder for the user to review the configuration and make any changes if -needed. The user can delete this message if the generated configuration is -acceptable. - -## Non-project-specific config - -Non-project config options may go in the global config (`/etc/stack/config.yaml`) or the user config (`~/.stack/config.yaml`). - -### docker - -See [Docker integration](docker_integration.md#configuration). - -### nix - -(since 0.1.10.0) - -See [Nix integration](nix_integration.md#configuration). - -### connection-count - -Integer indicating how many simultaneous downloads are allowed to happen - -Default: `8` - -### hide-th-loading - -Strip out the "Loading ..." lines from GHC build output, produced when using Template Haskell - -Default: `true` - -### local-bin-path - -Target directory for `stack install` and `stack build --copy-bins`. - -Default: `~/.local/bin` - -### package-indices - -Since Stack 1.11, this field may only be used to specify a single -package index, which must use the Hackage Security format. For the -motivation for this change, please see [issue #4137](https://github.com/commercialhaskell/stack/issues/4137). -Therefore, this field is most useful for providing an alternate -Hackage mirror either for: - -* Bypassing a firewall -* Faster download speeds - -The following is the default setting for this field: - -```yaml -package-indices: -- download-prefix: https://hackage.haskell.org/ - hackage-security: - keyids: - - 0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d - - 1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42 - - 280b10153a522681163658cb49f632cde3f38d768b736ddbc901d99a1a772833 - - 2a96b1889dc221c17296fcc2bb34b908ca9734376f0f361660200935916ef201 - - 2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3 - - 51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921 - - 772e9f4c7db33d251d5c6e357199c819e569d130857dc225549b40845ff0890d - - aa315286e6ad281ad61182235533c41e806e5a787e0b6d1e7eef3f09d137d2e9 - - fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0 - key-threshold: 3 # number of keys required - - # ignore expiration date, see https://github.com/commercialhaskell/stack/pull/4614 - ignore-expiry: true -``` - -If you provide a replacement index which does not mirror Hackage, it -is likely that you'll end up with significant breakage, such as most -snapshots failing to work. - -Note: since Stack v2.1.3, `ignore-expiry` was changed to `true` by -default. For more information on this change, see -[issue #4928](https://github.com/commercialhaskell/stack/issues/4928). - -### system-ghc - -Enables or disables using the GHC available on the PATH. (Make sure PATH is explicit, i.e., don't use ~.) -Useful to enable if you want to save the time, bandwidth or storage space needed to setup an isolated GHC. -Default is `false` unless the [Docker](docker_integration.md) or [Nix](nix_integration.md) integration is enabled. -In a Nix-enabled configuration, stack is incompatible with `system-ghc: false`. - -```yaml -# Turn on system GHC -system-ghc: true -``` - -### install-ghc - -Whether or not to automatically install GHC when necessary. Since -Stack 1.5.0, the default is `true`, which means Stack will not ask you -before downloading and installing GHC. - -### skip-ghc-check - -Should we skip the check to confirm that your system GHC version (on the PATH) -matches what your project expects? Default is `false`. - -### require-stack-version - -Require a version of stack within the specified range -([cabal-style](https://www.haskell.org/cabal/users-guide/developing-packages.html#build-information)) -to be used for this project. Example: `require-stack-version: "== 0.1.*"` - -Default: `"-any"` - -### arch/os - -Set the architecture and operating system for GHC, build directories, etc. Values are those recognized by Cabal, e.g.: - - arch: i386, x86_64 - os: windows, linux - -You likely only ever want to change the arch value. This can also be set via the command line. - -### extra-include-dirs/extra-lib-dirs - -A list of extra paths to be searched for header files and libraries, respectively. Paths should be absolute - -```yaml -extra-include-dirs: -- /opt/foo/include -extra-lib-dirs: -- /opt/foo/lib -``` - -Since these are system-dependent absolute paths, it is recommended that you -specify these in your `config.yaml` within the stack root (usually, `~/.stack` -or, on Windows, `%LOCALAPPDATA%\Programs\stack`). If you control the build -environment in your project's ``stack.yaml``, perhaps through docker or other -means, then it may well make sense to include these there as well. - - -### with-gcc - -Specify a path to gcc explicitly, rather than relying on the normal path resolution. - -```yaml -with-gcc: /usr/local/bin/gcc-5 -``` - -### with-hpack - -Use an Hpack executable, rather than using the bundled Hpack. - -```yaml -with-hpack: /usr/local/bin/hpack -``` - -### compiler-check - -(Since 0.1.4) - -Specifies how the compiler version in the resolver is matched against concrete versions. Valid values: - -* `match-minor`: make sure that the first three components match, but allow - patch-level differences. For example< 7.8.4.1 and 7.8.4.2 would both match - 7.8.4. This is useful to allow for custom patch levels of a compiler. This is - the default -* `match-exact`: the entire version number must match precisely -* `newer-minor`: the third component can be increased, e.g. if your resolver is - `ghc-7.10.1`, then 7.10.2 will also be allowed. This was the default up - through stack 0.1.3 - -### compiler - -(Since 0.1.7) - -Overrides the compiler version in the resolver. Note that the `compiler-check` -flag also applies to the version numbers. This uses the same syntax as compiler -resolvers like `ghc-8.6.5`. This can be used to override the -compiler for a Stackage snapshot, like this: - -```yaml -resolver: lts-14.20 -compiler: ghc-8.6.4 -compiler-check: match-exact -``` - -#### Building GHC from source (experimental) - -(Since 2.0) - -Stack supports building the GHC compiler from source. The version to build and -to use is defined by a a Git commit ID and an Hadrian "flavour" (Hadrian is the -build system of GHC) with the following syntax: - -```yaml -compiler: ghc-git-COMMIT-FLAVOUR -``` - -In the following example the commit ID is "5be7ad..." and the flavour is -"quick": - -```yaml -compiler: ghc-git-5be7ad7861c8d39f60b7101fd8d8e816ff50353a-quick -``` - -By default the code is retrieved from the main GHC repository. If you want to -select another repository, set the "compiler-repository" option: - -```yaml -compiler-repository: git://my/ghc/repository -# default -# compiler-repository: https://gitlab.haskell.org/ghc/ghc.git -``` - -Note that Stack doesn't check the compiler version when it uses a compiler built -from source. Moreover it is assumed that the built compiler is recent enough as -Stack doesn't enable any known workaround to make older compilers work. - -Building the compiler can take a very long time (more than one hour). Hint: for -faster build times, use Hadrian flavours that disable documentation generation. - -#### Global packages - -The GHC compiler you build from sources may depend on unreleased versions of -some global packages (e.g. Cabal). It may be an issue if a package you try to -build with this compiler depends on such global packages because Stack may not -be able to find versions of those packages (on Hackage, etc.) that are -compatible with the compiler. - -The easiest way to deal with this issue is to drop the offending packages as -follows. Instead of using the packages specified in the resolver, the global -packages bundled with GHC will be used. - -```yaml -drop-packages: -- Cabal -- ... -``` - -Another way to deal with this issue is to add the relevant packages as -`extra-deps` built from source. To avoid mismatching versions, you can use -exactly the same commit id you used to build GHC as follows: - -``` -extra-deps: -- git: https://gitlab.haskell.org/ghc/ghc.git - commit: 5be7ad7861c8d39f60b7101fd8d8e816ff50353a - subdirs: - - libraries/Cabal/Cabal - - libraries/... -``` - -#### Bootstrapping compiler - -Building GHC from source requires a working GHC (known as the bootstrap -compiler). As we use a Stack based version of Hadrian (`hadrian/build.stack.sh` in -GHC sources), the bootstrap compiler is configured into `hadrian/stack.yaml` and -fully managed by Stack. - - -### ghc-options - -(Since 0.1.4) - -Allows specifying per-package and global GHC options: - -```yaml -ghc-options: - # All packages - "$locals": -Wall - "$targets": -Werror - "$everything": -O2 - some-package: -DSOME_CPP_FLAG -``` - -Since 1.6.0, setting a GHC options for a specific package will -automatically promote it to a local package (much like setting a -custom package flag). However, setting options via `$everything` on all flags -will not do so (see -[Github discussion](https://github.com/commercialhaskell/stack/issues/849#issuecomment-320892095) -for reasoning). This can lead to unpredictable behavior by affecting -your snapshot packages. - -The behavior of the `$locals`, `$targets`, and `$everything` special -keys mirrors the behavior for the -[`apply-ghc-options` setting](#apply-ghc-options), which affects -command line parameters. - -NOTE: Prior to version 1.6.0, the `$locals`, `$targets`, and -`$everything` keys were not supported. Instead, you could use `"*"` for -the behavior represented now by `$everything`. It is highly -recommended to switch to the new, more expressive, keys. - -### apply-ghc-options - -(Since 0.1.6) - -Which packages do ghc-options on the command line get applied to? Before 0.1.6, the default value was `targets` - -```yaml -apply-ghc-options: locals # all local packages, the default -# apply-ghc-options: targets # all local packages that are targets -# apply-ghc-options: everything # applied even to snapshot and extra-deps -``` - -Note that `everything` is a slightly dangerous value, as it can break invariants about your snapshot database. - -### rebuild-ghc-options - -(Since 0.1.6) - -Should we rebuild a package when its GHC options change? Before 0.1.6, this was -a non-configurable true. However, in most cases, the flag is used to affect -optimization levels and warning behavior, for which GHC itself doesn't actually -recompile the modules anyway. Therefore, the new behavior is to not recompile -on an options change, but this behavior can be changed back with the following: - -```yaml -rebuild-ghc-options: true -``` - -### configure-options - -Options which are passed to the configure step of the Cabal build process. -These can either be set by package name, or using the `$everything`, -`$targets`, and `$locals` special keys. These special keys have the same -meaning as in `ghc-options`. - -```yaml -configure-options: - $everything: - - --with-gcc - - /some/path - my-package: - - --another-flag -``` - -(Since 2.0) - -### ghc-variant - -(Since 0.1.5) - -Specify a variant binary distribution of GHC to use. Known values: - -* `standard`: This is the default, uses the standard GHC binary distribution -* `integersimple`: Use a GHC bindist that uses - [integer-simple instead of GMP](https://ghc.haskell.org/trac/ghc/wiki/ReplacingGMPNotes) -* any other value: Use a custom GHC bindist. You should specify - [setup-info](#setup-info) or [setup-info-locations](#setup-info-locations) - so `stack setup` knows where to download it, - or pass the `stack setup --ghc-bindist` argument on the command-line - -This option is incompatible with `system-ghc: true`. - -### ghc-build - -(Since 1.3.0) - -Specify a specialized architecture bindist to use. Normally this is -determined automatically, but you can override the autodetected value here. -Possible arguments include `standard`, `gmp4`, `tinfo6`, and `nopie`. - -### setup-info-locations - -(Since 2.3) - -Possible usages of this config are: -1. Using `stack` offline or behind a firewall -2. Extending the tools known to `stack` such as cutting-edge versions of `ghc` or builds for custom linux distributions. - -The `setup-info` dictionary specifies locations for installation of Haskell-related tooling - it maps `(Tool, Platform, Version)` to the location where it can be obtained, such as `(GHC, Windows64, 8.6.5)` to the url hosting the `*.tar.xz` for GHC's installation. - -By default, it's obtained from [stack-setup-2.yaml](https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml). - -The `setup-info` dictionary is constructed in the following order: -1. `setup-info` yaml configuration - inline config -2. `--setup-info-yaml` command line arguments - urls or paths, multiple locations may be specified. -3. `setup-info-locations` yaml configuration - urls or paths - -The first location which specifies the location of a tool `(Tool, Platform, Version)` takes precedence, so one can extend the default tools with a fallback to the default `setup-info` location: - -```yaml -setup-info-locations: -- C:/stack-offline/my-stack-setup.yaml -- relative/inside/my/project/setup-info.yaml -- \\smbShare\stack\my-stack-setup.yaml -- http://stack-mirror.com/stack-setup.yaml -- https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml -``` - -The default `setup-info` location is included only if no locations in the `setup-info-locations` config or the `--setup-info-yaml` command line argument were specified. - -Thus the following will cause `stack setup` not to consult github for the `setup-info`: -```yaml -setup-info-locations: -- C:/stack-offline/my-stack-setup.yaml -``` - -```yaml -setup-info-locations: [] -``` - -Relative paths are resolved relative to the `stack.yaml` file - either in the local project or the global `stack.yaml` in the stack directory. - -Relative paths may also be used inside paths to tool installs - such as for ghc or 7z, which allows vendoring the tools inside a monorepo. -For example: - -Directory structure: -``` -- src/ -- installs/ - - my-stack-setup.yaml - - 7z.exe - - 7z.dll - - ghc-8.2.2.tar.xz -- stack.yaml -``` - -In the project's `stack.yaml`: -```yaml -setup-info-locations: -- installs/my-stack-setup.yaml -``` - -In `installs/my-stack-setup.yaml`: -```yaml -sevenzexe-info: - url: "installs/7z.exe" - -sevenzdll-info: - url: "installs/7z.dll" - -ghc: - windows64: - 8.2.2: - url: "installs/ghc-8.2.2.tar.xz" -``` - -### setup-info - -(Since 0.1.5) - -Allows augmenting from where tools like GHC and msys2 (on Windows) are -downloaded. Most useful for specifying locations of custom GHC binary -distributions (for use with the [ghc-variant](#ghc-variant) option). - -The format of this field is the same as in the default [stack-setup-2.yaml](https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml): - -```yaml -setup-info: - ghc: - windows32-custom-foo: - 7.10.2: - url: "https://example.com/ghc-7.10.2-i386-unknown-mingw32-foo.tar.xz" -``` - -This configuration **adds** the specified setup info metadata to the default; -Specifying this config **does not** prevent the default `stack-setup-2.yaml` from being consulted as a fallback. - -If you need to **replace** the default setup-info, add the following: - -```yaml -setup-info-locations: [] -``` - -### pvp-bounds - -(Since 0.1.5) - -__NOTE__ As of Stack 1.6.0, this feature does not reliably work, due -to issues with the Cabal library's printer. Stack will generate a -warning when a lossy conversion occurs, in which case you may need to -disable this setting. See -[#3550](https://github.com/commercialhaskell/stack/issues/3550) for -more information. - -When using the `sdist` and `upload` commands, this setting determines whether -the cabal file's dependencies should be modified to reflect PVP lower and upper -bounds. Values are `none` (unchanged), `upper` (add upper bounds), `lower` (add -lower bounds), and both (and upper and lower bounds). The algorithm it follows -is: - -* If an upper or lower bound already exists on a dependency, it's left alone -* When adding a lower bound, we look at the current version specified by - stack.yaml, and set it as the lower bound (e.g., `foo >= 1.2.3`) -* When adding an upper bound, we require less than the next major version - (e.g., `foo < 1.3`) - -```yaml -pvp-bounds: none -``` - -For more information, see [the announcement blog post](https://www.fpcomplete.com/blog/2015/09/stack-pvp). - -__NOTE__ Since Stack 1.5.0, each of the values listed above supports -adding `-revision` to the end of each value, e.g. `pvp-bounds: -both-revision`. This means that, when uploading to Hackage, Stack will -first upload your tarball with an unmodified `.cabal` file, and then -upload a cabal file revision with the PVP bounds added. This can be -useful—especially combined with the -[Stackage no-revisions feature](http://www.snoyman.com/blog/2017/04/stackages-no-revisions-field)—as -a method to ensure PVP compliance without having to proactively fix -bounds issues for Stackage maintenance. - -### modify-code-page - -(Since 0.1.6) - -Modify the code page for UTF-8 output when running on Windows. Default behavior -is to modify. - -```yaml -modify-code-page: false -``` - -### allow-newer - -(Since 0.1.7) - -Ignore version bounds in .cabal files. Default is false. - -```yaml -allow-newer: true -``` - -Note that this also ignores lower bounds. The name "allow-newer" is chosen to -match the commonly used cabal option. - -### allow-different-user - -(Since 1.0.1) - -Allow users other than the owner of the stack root directory (typically `~/.stack`) -to use the stack installation. The default is `false`. POSIX systems only. - -```yaml -allow-different-user: true -``` - -The intention of this option is to prevent file permission problems, for example -as the result of a `stack` command executed under `sudo`. - -The option is automatically enabled when `stack` is re-spawned in a Docker process. - -### build - -(Since 1.1.0) - -Allows setting build options which are usually specified on the CLI. Here are -the settings with their defaults: - -```yaml -build: - library-profiling: false - executable-profiling: false - copy-bins: false - prefetch: false - keep-going: false - keep-tmp-files: false - - # NOTE: global usage of haddock can cause build failures when documentation is - # incorrectly formatted. This could also affect scripts which use stack. - haddock: false - haddock-arguments: - haddock-args: [] # Additional arguments passed to haddock, --haddock-arguments - # haddock-args: - # - "--css=/home/user/my-css" - open-haddocks: false # --open - haddock-deps: false # if unspecified, defaults to true if haddock is set - haddock-internal: false - - # These are inadvisable to use in your global configuration, as they make the - # stack build CLI behave quite differently. - test: false - test-arguments: - rerun-tests: true # Rerun successful tests - additional-args: [] # --test-arguments - # additional-args: - # - "--fail-fast" - coverage: false - no-run-tests: false - bench: false - benchmark-opts: - benchmark-arguments: "" - # benchmark-arguments: "--csv bench.csv" - no-run-benchmarks: false - force-dirty: false - reconfigure: false - cabal-verbose: false - split-objs: false - - # Since 1.8. Starting with 2.0, the default is true - interleaved-output: true - - # Since 1.10 - ddump-dir: "" -``` - -The meanings of these settings correspond directly with the CLI flags of the -same name. See the [build command docs](build_command.md) and the -[users guide](GUIDE.md#the-build-command) for more info. - -### dump-logs - -(Since 1.3.0) - -Control which log output from local non-dependency packages to print to the -console. By default, Stack will only do this when building a single target -package or if the log contains warnings, to avoid generating unnecessarily -verbose output. - -```yaml -dump-logs: none # don't dump logs even if they contain warnings -dump-logs: warning # default: dump logs that contain warnings -dump-logs: all # dump all logs for local non-dependency packages -``` - -### templates - -Templates used with `stack new` have a number of parameters that affect the -generated code. These can be set for all new projects you create. The result of -them can be observed in the generated LICENSE and cabal files. The value for all -of these parameters must be strings. - -The parameters are: `author-email`, `author-name`, `category`, `copyright`, `year` and `github-username`. - -* _author-email_ - sets the `maintainer` property in cabal -* _author-name_ - sets the `author` property in cabal and the name used in - LICENSE -* _category_ - sets the `category` property in cabal. This is used in Hackage. - For examples of categories see [Packages by - category](https://hackage.haskell.org/packages/). It makes sense for - `category` to be set on a per project basis because it is uncommon for all - projects a user creates to belong to the same category. The category can be - set per project by passing `-p "category:value"` to the `stack new` command. -* _copyright_ - sets the `copyright` property in cabal. It is typically the - name of the holder of the copyright on the package and the year(s) from which - copyright is claimed. For example: `Copyright (c) 2006-2007 Joe Bloggs` -* _year_ - if `copyright` is not specified, `year` and `author-name` are used - to generate the copyright property in cabal. If `year` is not specified, it - defaults to the current year. -* _github-username_ - used to generate `homepage` and `source-repository` in - cabal. For instance `github-username: myusername` and `stack new my-project new-template` - would result: - -```yaml -homepage: http://github.com/myusername/my-project#readme - -source-repository head - type: git - location: https://github.com/myusername/my-project -``` - -These properties can be set in `config.yaml` as follows: -```yaml -templates: - params: - author-name: Your Name - author-email: youremail@example.com - category: Your Projects Category - copyright: 'Copyright (c) 2020 Your Name' - github-username: yourusername -``` - -Additionally, `stack new` can automatically initialize source control repositories -in the directories it creates. Source control tools can be specified with the -`scm-init` option. At the moment, only `git` is supported. - -```yaml -templates: - scm-init: git -``` - -### save-hackage-creds - -Controls whether, when using `stack upload`, the user's Hackage -username and password are stored in a local file. Default: true. - -```yaml -save-hackage-creds: true -``` - -Since 1.5.0 - -### hackage-base-url - -Sets the address of the Hackage server to upload the package to. Default is -`https://hackage.haskell.org/`. - -```yaml -hackage-base-url: https://hackage.example.com/ -``` - -Since 1.9.1 - -### ignore-revision-mismatch - -This flag was introduced in Stack 1.6, and removed in Stack 1.11 with -the move to Pantry. You will receive a warning if this configuration -value is set. - -### urls - -Customize the URLs where `stack` looks for snapshot build plans. - -The default configuration is - -```yaml -urls: - latest-snapshot: https://www.stackage.org/download/snapshots.json -``` - -### jobs - -Specifies how many build tasks should be run in parallel. This can be overloaded -on the commandline via `-jN`, for example `-j2`. The default is to use the -number of processors reported by your CPU. One usage for this might be to avoid -running out of memory by setting it to 1, like this: - -```yaml -jobs: 1 -``` - -### work-dir - -Specifies relative path of work directory (default is `.stack-work`. This can -also be specified by env var or cli flag, in particular, the earlier items in -this list take precedence: - -1. `--work-dir DIR` passed on the commandline -2. `work-dir` in stack.yaml -3. `STACK_WORK` environment variable - -Since 0.1.10.0 - -### skip-msys - -Skips checking for and installing msys2 when stack is setting up the -environment. This is only useful on Windows machines, and usually doesn't make -sense in project configurations, just in `config.yaml`. Defaults to `false`, so -if this is used, it only really makes sense to use it like this: - -```yaml -skip-msys: true -``` - -Since 0.1.2.0 - -### concurrent-tests - -This option specifies whether test-suites should be executed concurrently with -each-other. The default for this is true, since this is usually fine and it -often means that tests can complete earlier. However, if some test-suites -require exclusive access to some resource, or require a great deal of CPU or -memory resources, then it makes sense to set this to `false` (the default is -`true`). - -```yaml -concurrent-tests: false -``` - -Since 0.1.2.0 - -### extra-path - -This option specifies additional directories to prepend to the PATH environment -variable. These will be used when resolving the location of executables, and -will also be visible in the `PATH` variable of processes run by stack. - -For example, to prepend `/path-to-some-dep/bin` to your PATH: - -```yaml -extra-path: -- /path-to-some-dep/bin -``` - -One thing to note is that other paths added by stack - things like the project's -bin dir and the compiler's bin dir - will take precedence over those specified -here (the automatic paths get prepended). - -Since 0.1.4.0 - -### local-programs-path - -This overrides the location of the programs directory, where tools like ghc and -msys get installed. - -On most systems, this defaults to a folder called `programs` -within the stack root directory. On Windows, if the `LOCALAPPDATA` environment -variable exists, then it defaults to `%LOCALAPPDATA%\Programs\stack`, which -follows Windows' conventions. - -__NOTE__: On Windows, if there is a space character in the `%LOCALAPPDATA%` path -(which may be the case if the relevant user account name and its corresponding -user profie path have a space) this may cause problems with building packages -that make use of the GNU project's `autoconf` package and `configure` shell -script files. That may be the case particularly if there is no corresponding -short name ('8 dot 3' name) for the folder in the path with the space (which may -be the case if '8 dot 3' names have been stripped or their creation not enabled -by default). If there are problems building, it will be necessary to override -the default location of stack's programs directory to specify an alternative -path that does not contain space characters. Examples of packages on Hackage -that make use of `configure` are `network` and `process`. - -Since 1.3.0 - -### default-template - -This option specifies which template to use with `stack new`, when none is -specified. The default is called `new-template`. The other templates are listed -in [the stack-templates repo](https://github.com/commercialhaskell/stack-templates/). - -### color - -This option specifies when to use color in output. The option is used as -`color: `, where `` is 'always', 'never', or 'auto'. On Windows -versions before Windows 10, for terminals that do not support color codes, the -default is 'never'; color may work on terminals that support color codes. - -The color use can also be set at the command line using the equivalent -`--color=` global option. Color use set at the command line takes -precedence over that set in a yaml configuration file. - -(The British English spelling (colour) is also accepted. In yaml configuration -files, the American spelling is the alternative that has priority.) - -### stack-colors - -Stack uses styles to format some of its output. The default styles do not work -well with every terminal theme. This option specifies stack's output styles, -allowing new styles to replace the defaults. The option is used as -`stack-colors: `, where `` is a colon-delimited sequence of -key=value, 'key' is a style name and 'value' is a semicolon-delimited list of -'ANSI' SGR (Select Graphic Rendition) control codes (in decimal). Use the -command `stack ls stack-colors --basic` to see the current sequence. - -The 'ANSI' standards refer to (1) standard ECMA-48 'Control Functions for Coded -Character Sets' (5th edition, 1991); (2) extensions in ITU-T Recommendation -(previously CCITT Recommendation) T.416 (03/93) 'Information Technology – Open -Document Architecture (ODA) and Interchange Format: Character Content -Architectures' (also published as ISO/IEC International Standard 8613-6); and -(3) further extensions used by 'XTerm', a terminal emulator for the X Window -System. The 'ANSI' SGR codes are described in a -[Wikipedia article](http://en.wikipedia.org/wiki/ANSI_escape_code) -and those codes supported on current versions of Windows in -[Microsoft's documentation](https://docs.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences). - -For example, users of the popular -[Solarized Dark](https://ethanschoonover.com/solarized/) -terminal theme might wish to set the styles as follows: - -```yaml -stack-colors: error=31:good=32:shell=35:dir=34:recommendation=32:target=95:module=35:package-component=95:secondary=92:highlight=32 -``` -The styles can also be set at the command line using the equivalent `--stack-colors=` -global option. Styles set at the command line take precedence over those set in -a yaml configuration file. (In respect of styles used in verbose output, some of -that output occurs before the configuration file is processed.) - -(The British English spelling (colour) is also accepted. In yaml configuration -files, the American spelling is the alternative that has priority.) - -### hide-source-paths - -Stack will use the `-fhide-source-paths` option by default for GHC >= 8.2, unless this -option is set to `false` as in the following example: - -```yaml -hide-source-paths: false -``` - -Build output when enabled: - -``` -... -[1 of 2] Compiling Lib -[2 of 2] Compiling Paths_test_pr -... -``` - -Build output when disabled: - -``` -... -[1 of 2] Compiling Lib ( src/Lib.hs, .stack-work/dist/x86_64-linux-tinfo6/Cabal-2.4.0.1/build/Lib.o ) -... -``` - -### recommend-stack-upgrade - -When Stack notices that a new version of Stack is available, should it notify the user? - -```yaml -recommend-stack-upgrade: true -``` - -Since 2.0 - -### stack-developer-mode - -Turns on a mode where some messages are printed at WARN level instead of DEBUG level, especially useful for developers of Stack itself. For official distributed binaries, this is set to `false` by default. When you build from source, it is set to `true` by default. - -```yaml -stack-developer-mode: false -``` - -Since 2.3.3 - -### snapshot-location-base -Sets the base location of LTS Haskell/Stackage Nightly snapshots. Default is https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/ (as set in the `pantry` library). - -For example: -```yaml -snapshot-location-base: https://example.com/snapshots/location/ -``` -has the following effect: -* `lts-X.Y` expands to `https://example.com/snapshots/location/lts/X/Y.yaml` -* `nightly-YYYY-MM-DD` expands to `https://example.com/snapshots/location/nightly/YYYY/M/D.yaml` - -This field is convenient in setups that restrict access to GitHub, for instance closed corporate setups. In this setting, it is common for the development environment to have general access to the internet, but not for testing/building environments. To avoid the firewall, one can run a local snapshots mirror and then use a custom `snapshot-location-base` in the closed environments only. - - -Since 2.5.0 diff --git a/etc/dockerfiles/arm64.Dockerfile b/etc/dockerfiles/arm64.Dockerfile new file mode 100644 index 0000000000..adac83d85a --- /dev/null +++ b/etc/dockerfiles/arm64.Dockerfile @@ -0,0 +1,73 @@ +# This Dockerfile was previously used to build dynamically-linked Stack for +# Linux/Aarch64. It was used with the following step in the GitHub Actions CI: +# +# run: | +# set -ex +# docker build . -f etc/dockerfiles/arm64.Dockerfile -t stack --build-arg USERID=$(id -u) --build-arg GROUPID=$(id -g) +# rm -rf _release +# mkdir -p _release +# docker run --rm -v $(pwd):/src -w /src stack bash -c "/home/stack/release build" +# +# However, after Stack 2.11.1, it was replaced with a step that makes use of +# https://gitlab.com/benz0li/ghc-musl. +# +# ------------------------------------------------------------------------------ +# +# Stack is built with GHC 9.2.8. GHC 9.2.8 for Linux/AArch64 says it was made on +# a Debian 10 system and requires GMP 6.1. Debian 10 is codename 'buster' and +# includes libc6 (2.28-10+deb10u1). +FROM debian:buster + +# pkg-config added to `apt-get install` list because it is required by package +# digest-0.0.1.7. +RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y \ + curl build-essential curl libffi-dev libffi6 libgmp-dev libgmp10 \ + libncurses-dev libncurses5 libtinfo5 libnuma-dev xz-utils g++ gcc \ + libc6-dev libffi-dev libgmp-dev make zlib1g-dev git gnupg netbase pkg-config + +# This is added in an attempt to avoid the failure: +# : commitAndReleaseBuffer: invalid argument (invalid character) +ENV LANG="C.UTF-8" + +RUN cd /tmp && \ + curl -L https://github.com/llvm/llvm-project/releases/download/llvmorg-9.0.1/clang+llvm-9.0.1-aarch64-linux-gnu.tar.xz --output /tmp/llvm.tar.xz && \ + unxz /tmp/llvm.tar.xz && \ + tar xfv /tmp/llvm.tar --strip-components 1 -C /usr && \ + rm /tmp/llvm.tar + +# Stack's *.tar archive contains a directory that contains the 'stack' +# executable, hence the use of tar's '--strip-components 1' option. +RUN curl -L https://github.com/commercialhaskell/stack/releases/download/v2.11.1/stack-2.11.1-linux-aarch64.tar.gz --output /tmp/stack.tar.gz && \ + tar xfv /tmp/stack.tar.gz -C /usr/local/bin --strip-components 1 && \ + rm /tmp/stack.tar.gz + +# RUN curl -sSL https://github.com/commercialhaskell/stack/releases/download/v2.7.1/stack-2.7.1-linux-aarch64.bin > /usr/local/bin/stack && \ +RUN chmod +x /usr/local/bin/stack + +ARG USERID +ARG GROUPID + +RUN useradd --uid $USERID stack +RUN mkdir -p /home/stack +RUN chown -R stack /home/stack +RUN usermod -aG $GROUPID stack + +USER stack +WORKDIR /home/stack + +COPY stack.yaml package.yaml /src/ + +USER root + +RUN chown -R stack /src + +USER stack +WORKDIR /src + +RUN stack build --only-snapshot --test +RUN stack build shake + +COPY etc/scripts/release.hs /src + +RUN stack script --resolver lts-20.26 --compile /src/release.hs -- --version +RUN cp /src/release /home/stack diff --git a/etc/scripts/LICENSE b/etc/scripts/LICENSE index 3b89372cbb..7d6acbccad 100644 --- a/etc/scripts/LICENSE +++ b/etc/scripts/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2015-2020, Stack contributors +Copyright (c) 2015-2025, Stack contributors All rights reserved. diff --git a/etc/scripts/README.md b/etc/scripts/README.md index 710431ea4e..bfa738a9f2 100644 --- a/etc/scripts/README.md +++ b/etc/scripts/README.md @@ -14,7 +14,8 @@ Prerequisites These must be installed in the PATH to use the release tool: - stack -- git (for Windows, [msysgit](https://msysgit.github.io) is recommended). +- git (for Windows, [Git for Windows](https://gitforwindows.org/) is + recommended). Invocation ---------- @@ -30,7 +31,13 @@ addition, the following options are accepted: * `--allow-dirty`: by default, the `check` rule aborts if the working tree is dirty, but this will allow it to continue. - uploaded to. +* `--arch=ARCHITECTURE`: Architecture to build (e.g. 'i386' or 'x86_64'). +* `--binary-variant=SUFFIX`: Extra suffix to add to binary executable archive + filename. +* `--no-test-haddocks`: Disable testing building Haddock documentation. +* `--alpine`: Build a statically linked binary using an Alpine Docker image. +* `--build-args="ARG1 ARG2 ..."`: Additional arguments to pass to `stack build`. +* `--certificate-name=NAME`: Certificate name for code signing on Windows. ### Targets diff --git a/etc/scripts/build-stack-installer.hs b/etc/scripts/build-stack-installer.hs index e9b1227a9d..6e601cf01e 100644 --- a/etc/scripts/build-stack-installer.hs +++ b/etc/scripts/build-stack-installer.hs @@ -1,7 +1,6 @@ {- stack script - --resolver lts-14.27 - --install-ghc - --package nsis + --snapshot lts-24.37 + --package nsis -} {-# LANGUAGE OverloadedStrings #-} @@ -16,7 +15,7 @@ import Development.NSIS.Plugins.EnvVarUpdate main :: IO () main = do - [srcPath, execPath, nsiPath] <- getArgs + [srcPath, execPath, nsiPath, stackVersionStr] <- getArgs writeFile (fromString nsiPath) $ nsis $ do _ <- constantStr "Name" "Haskell Stack" @@ -43,6 +42,7 @@ main = do -- Write the uninstall keys for Windows writeRegStr HKCU "Software/Microsoft/Windows/CurrentVersion/Uninstall/$Name" "DisplayName" "$Name" + writeRegStr HKCU "Software/Microsoft/Windows/CurrentVersion/Uninstall/$Name" "DisplayVersion" (str stackVersionStr) writeRegStr HKCU "Software/Microsoft/Windows/CurrentVersion/Uninstall/$Name" "UninstallString" "\"$INSTDIR/uninstall-stack.exe\"" writeRegDWORD HKCU "Software/Microsoft/Windows/CurrentVersion/Uninstall/$Name" "NoModify" 1 writeRegDWORD HKCU "Software/Microsoft/Windows/CurrentVersion/Uninstall/$Name" "NoRepair" 1 @@ -52,7 +52,7 @@ main = do [ Description "Add installation directory to user %PATH% to allow running Stack in the console." ] $ do setEnvVarPrepend HKCU "PATH" "$INSTDIR" - + section "Set %STACK_ROOT% to recommended default" [ Description "Set %STACK_ROOT% to C:\\sr to workaround issues with long paths." ] $ do @@ -79,14 +79,14 @@ main = do [ Description "Remove setting of %STACK_ROOT% to C:\\sr." ] $ do deleteEnvVar HKCU "STACK_ROOT" - - section "un.Compilers installed by stack" + + section "un.Compilers installed by Stack" [ Unselected , Description "Remove %LOCALAPPDATA%/Programs/stack, which contains compilers that have been installed by Stack." ] $ do rmdir [Recursive] "$LOCALAPPDATA/Programs/stack" - section "un.stack snapshots and configuration" + section "un.Stack snapshots and configuration" [ Unselected , Description "Remove %APPDATA%/stack, which contains the user-defined global stack.yaml and the snapshot/compilation cache." ] $ do diff --git a/etc/scripts/get-stack.sh b/etc/scripts/get-stack.sh index 1923cca2ad..319c74d514 100755 --- a/etc/scripts/get-stack.sh +++ b/etc/scripts/get-stack.sh @@ -69,14 +69,19 @@ post_install_separator() { info "" } -# determines the the CPU's instruction set +# determines the CPU's instruction set architecture (ISA) get_isa() { - if arch | grep -Eq 'armv[78]l?' ; then + if uname -m | grep -Eq 'armv[78]l?' ; then echo arm - elif arch | grep -q aarch64 ; then + elif uname -m | grep -q aarch64 ; then echo aarch64 - else + # uname -m returns arm64 on macOS/AArch64 + elif uname -m | grep -q arm64 ; then + echo aarch64 + elif uname -m | grep -q x86 ; then echo x86 + else + die "$(uname -m) is not a supported instruction set" fi } @@ -85,11 +90,10 @@ get_isa() { # test "$(get_isa)" = arm # } # -# # exits with code 0 if aarch64 ISA is detected as described above -# is_aarch64() { -# test "$(get_isa)" = aarch64 -# } - +# exits with code 0 if aarch64 ISA is detected as described above +is_aarch64() { + test "$(get_isa)" = aarch64 +} # determines 64- or 32-bit architecture # if getconf is available, it will return the arch of the OS, as desired @@ -172,16 +176,16 @@ do_ubuntu_install() { # install_dependencies # print_bindist_notice # install_arm_linux_binary - #elif is_aarch64 ; then - # install_dependencies - # print_bindist_notice - # install_aarch64_linux_binary if is_x86_64 ; then install_dependencies print_bindist_notice install_x86_64_linux_binary + elif is_aarch64 ; then + install_dependencies + print_bindist_notice + install_aarch64_linux_binary else - die "Sorry, currently only 64-bit (x86_64) Linux binary is available." + die "Sorry, currently only 64-bit (x86_64 or aarch64) Linux binary is available." #install_dependencies #print_bindist_notice #install_i386_linux_binary @@ -203,16 +207,16 @@ do_debian_install() { # install_dependencies # print_bindist_notice # install_arm_linux_binary - #elif is_aarch64 ; then - # install_dependencies - # print_bindist_notice - # install_aarch64_linux_binary if is_x86_64 ; then install_dependencies print_bindist_notice install_x86_64_linux_binary + elif is_aarch64 ; then + install_dependencies + print_bindist_notice + install_aarch64_linux_binary else - die "Sorry, currently only 64-bit (x86_64) Linux binary is available." + die "Sorry, currently only 64-bit (x86_64 or aarch64) Linux binary is available." # install_dependencies # print_bindist_notice # install_i386_linux_binary @@ -225,7 +229,7 @@ do_debian_install() { # and install the necessary dependencies explicitly. do_fedora_install() { install_dependencies() { - dnf_install_pkgs perl make automake gcc gmp-devel libffi zlib-devel xz tar git gnupg + dnf_install_pkgs perl make automake gcc gcc-c++ gmp-devel libffi zlib-devel xz tar git gnupg } if is_x86_64 ; then @@ -246,7 +250,7 @@ do_fedora_install() { # and install the necessary dependencies explicitly. do_centos_install() { install_dependencies() { - yum_install_pkgs perl make automake gcc gmp-devel libffi zlib xz tar git gnupg + yum_install_pkgs perl make automake gcc gcc-c++ gmp-devel libffi zlib xz tar git gnupg } if is_x86_64 ; then @@ -288,19 +292,25 @@ do_windows_install() { } # Attempts to install on macOS. -# If 'brew' exists, installs using Homebrew. Otherwise, installs -# the generic bindist. do_osx_install() { - info "Using generic bindist..." - info "" - install_64bit_osx_binary - info "NOTE: You may need to run 'xcode-select --install' and/or" - info " 'open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg'" - info " to set up the Xcode command-line tools, which Stack uses." - info "" + if is_x86_64 ; then + install_x86_64_osx_binary + info "NOTE: You may need to run 'xcode-select --install' and/or" + info " 'open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg'" + info " to set up the Xcode command-line tools, which Stack uses." + info "" + elif is_aarch64 ; then + install_aarch64_osx_binary + info "NOTE: You may need to run 'xcode-select --install' and/or" + info " 'open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg'" + info " to set up the Xcode command-line tools, which Stack uses." + info "" + else + die "Sorry, currently only 64-bit (x86_64 or aarch64) macOS binary is available." + fi } -# # Attempts to insall on FreeBSD. Installs dependencies with +# # Attempts to install on FreeBSD. Installs dependencies with # # 'pkg install' and then downloads bindist. # do_freebsd_install() { # install_dependencies() { @@ -337,12 +347,12 @@ do_sloppy_install() { #if is_arm ; then # install_arm_linux_binary - #elif is_aarch64 ; then - # install_aarch64_linux_binary if is_x86_64 ; then install_x86_64_linux_binary + elif is_aarch64 ; then + install_aarch64_linux_binary else - die "Sorry, currently only 64-bit (x86_64) Linux binary is available." + die "Sorry, currently only 64-bit (x86_64 or aarch64) Linux binary is available." #install_i386_linux_binary fi info "Since this installer doesn't support your Linux distribution," @@ -576,11 +586,15 @@ install_x86_64_linux_binary() { install_from_bindist "linux-x86_64.tar.gz" } -#install_aarch64_linux_binary() { -# install_from_bindist "linux-aarch64.tar.gz" -#} +install_aarch64_linux_binary() { + install_from_bindist "linux-aarch64.tar.gz" +} + +install_aarch64_osx_binary() { + install_from_bindist "osx-aarch64.tar.gz" +} -install_64bit_osx_binary() { +install_x86_64_osx_binary() { install_from_bindist "osx-x86_64.tar.gz" } @@ -731,14 +745,91 @@ on_path() { echo ":$PATH:" | grep -q :"$1": } +# Check whether the script may be running in well-known CI environments +has_ci_environment() { + if [ -n "$CI" ]; then + # GitHub Actions, GitLab CI, Travis, CircleCI and Bitbucket Pipelines all + # set CI. + return 0 + elif [ -n "$TR_BUILD" ]; then + # Azure Pipelines sets TR_BUILD. + return 0 + elif [ -n "$JENKINS_HOME" ]; then + # Jenkins sets JENKINS_HOME. + return 0 + else + return 1 + fi +} + # Check whether ~/.local/bin is on the PATH, and print a warning if not. check_home_local_bin_on_path() { if ! on_path "$HOME_LOCAL_BIN" ; then - #TODO: offer to add it for the user (pull requests welcome!) info "WARNING: '$HOME_LOCAL_BIN' is not on your PATH." info " Stack will place the binaries it builds in '$HOME_LOCAL_BIN' so" info " for best results, please add it to the beginning of PATH in your profile." info "" + # detect which profile file to use, then print a message about updating it + if [ -n "$BASH_VERSION" ]; then + if [ -f "$HOME/.bash_profile" ]; then + profile_file="$HOME/.bash_profile" + else + profile_file="$HOME/.bashrc" + fi + elif [ -n "$ZSH_VERSION" ]; then + profile_file="$HOME/.zshrc" + elif [ -n "$FISH_VERSION" ]; then + profile_file="$HOME/.config/fish/config.fish" + elif [ -n "$PROFILE" ]; then + profile_file="$PROFILE" + elif [ -f "$HOME/.bash_profile" ]; then + profile_file="$HOME/.bash_profile" + elif [ -f "$HOME/.bashrc" ]; then + profile_file="$HOME/.bashrc" + elif [ -f "$HOME/.zshrc" ]; then + profile_file="$HOME/.zshrc" + elif [ -f "$HOME/.config/fish/config.fish" ]; then + profile_file="$HOME/.config/fish/config.fish" + elif [ -f "$HOME/.profile" ]; then + profile_file="$HOME/.profile" + else + info " (profile not found; please add it to your PATH manually)" + fi + + # print a message about updating profile (if found) + if [ -n "$profile_file" ]; then + info " You can do this by running the following command:" + info " echo 'export PATH=\"$HOME_LOCAL_BIN:\$PATH\"' >> \"$profile_file\"" + info " (You may need to restart your shell for this to take effect.)" + info "" + + # prompt to update it on their behalf, unless QUIET is set. + if [ -z "$QUIET" ] && ! has_ci_environment ; then + while true; do + info "Would you like this installer to add it to your PATH in '$profile_file'?" + info " (This will be done by adding export PATH=\"$HOME_LOCAL_BIN:\$PATH\" to it." + info " You may need to restart your shell for this to take effect.)" + info " [y] Yes, prepend [n] No (default)" + read -p "" yesno + # default to no. + yesno=${yesno:-n} + case $yesno in + [Yy]* ) + echo "export PATH=\"$HOME_LOCAL_BIN:\$PATH\"" >> "$profile_file" + info "PATH updated in '$profile_file'" + break + ;; + [Nn]* ) + info "Not updating PATH in '$profile_file'" + break + ;; + * ) + info "Please answer 'y' or 'n'" + ;; + esac + done + fi + fi fi } diff --git a/etc/scripts/install-many-stack-releases.hs b/etc/scripts/install-many-stack-releases.hs index ab20ed9a5d..3b32ed463d 100755 --- a/etc/scripts/install-many-stack-releases.hs +++ b/etc/scripts/install-many-stack-releases.hs @@ -1,6 +1,6 @@ #!/usr/bin/env stack {- stack script - --resolver lts-14.27 + --snapshot lts-14.27 --package base --package directory --package filepath @@ -11,7 +11,7 @@ -- # Usage summary -- --- This is a hacky script to install many stack releases to a target +-- This is a hacky script to install many Stack releases to a target -- directory. By default it installs all releases `>= 1.0` (this can be -- changed by adjusting `minVersion` in the code). To use this on -- standard 64 bit linux systems, do the following: @@ -19,7 +19,7 @@ -- ./install-many-stack-releases.hs ~/.local/bin -- -- It will then populate this folder with binaries like `stack-1.6.3`, --- by downloading and unpacking stack releases to a temporary directory. +-- by downloading and unpacking Stack releases to a temporary directory. -- It will only download releases that do not already have binaries in -- the target directory. -- @@ -65,8 +65,8 @@ main = do (dir:_) -> do exists <- doesDirectoryExist dir unless exists $ fail $ unwords [show dir, "is not a directory or does not exist."] - return dir - _ -> fail "Expected the first CLI argument to be the target directory to place stack binaries." + pure dir + _ -> fail "Expected the first CLI argument to be the target directory to place Stack binaries." -- Parse platform from CLI args, with default. platform <- case tail args of [] -> do @@ -75,8 +75,8 @@ main = do , show defaultPlatform , "\n" ] - return defaultPlatform - [x] -> return x + pure defaultPlatform + [x] -> pure x _ -> fail "Expected at most two CLI argument, specifying target directory and target platform." -- Constants + common computation of urls / paths let minVersion = makeVersion [1, 0, 0] @@ -96,10 +96,10 @@ main = do -- Don't download super old versions let (newerVersions, olderVersions) = partition (>= minVersion) (mapMaybe readVersion releasesWithoutPrefix) - putStrLn "The following releases look like stack releases that are older than minVersion:" + putStrLn "The following releases look like Stack releases that are older than minVersion:" print (map showVersion olderVersions) putStrLn "" - putStrLn "The following releases look like recent enough stack releases:" + putStrLn "The following releases look like recent enough Stack releases:" print (map showVersion newerVersions) putStrLn "" -- Check which releases already exist. @@ -147,14 +147,14 @@ readProcessIsSuccess :: FilePath -> [String] -> IO Bool readProcessIsSuccess name args = do (ec, out, err) <- readProcessWithExitCode name args "" case ec of - ExitSuccess -> return True + ExitSuccess -> pure True ExitFailure code -> do putStrLn $ unwords $ ["Running", name, "with args", show args, "failed with code", show code] putStrLn "stdout:" putStrLn out putStrLn "stderr:" putStrLn err - return False + pure False -- Damn, base has some ugly stuff... 'parseVersion' yields multiple -- parses treating numeric portions as version tags.. WTF. Seems like diff --git a/etc/scripts/ls-deps-as-cabal-constraints.sh b/etc/scripts/ls-deps-as-cabal-constraints.sh new file mode 100755 index 0000000000..5f985dee21 --- /dev/null +++ b/etc/scripts/ls-deps-as-cabal-constraints.sh @@ -0,0 +1,8 @@ +#!/bin/sh +# +# Lists the dependencies as exact (==) Cabal constraints for use with a Cabal +# `cabal.project` file, and pipes to the specified location (typically a file): +# +set -eu + +stack ls dependencies cabal > "$1" diff --git a/etc/scripts/mirror-ghc-bindists-to-github.sh b/etc/scripts/mirror-ghc-bindists-to-github.sh index ed27cc437d..5a5b41361a 100755 --- a/etc/scripts/mirror-ghc-bindists-to-github.sh +++ b/etc/scripts/mirror-ghc-bindists-to-github.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash # # This script will download official GHC bindists from download.haskell.org and upload -# them to the Github Release that Stack uses. +# them to the GitHub Release that Stack uses. # # Prerequisites: -# - Create a Github release with tag `ghc-X.Y.Z-release` +# - Create a GitHub release with tag `ghc-X.Y.Z-release` # - Set GITHUB_AUTH_TOKEN to a token that has permission to upload assets to a Release # # To use: @@ -20,7 +20,7 @@ # https://downloads.haskell.org/~ghc/X.Y.Z/. # -GHCVER=8.10.2 +GHCVER=9.0.2 if [[ -z "$GITHUB_AUTH_TOKEN" ]]; then echo "$0: GITHUB_AUTH_TOKEN environment variable is required" >&2 @@ -31,7 +31,7 @@ UPLOAD_URL="$(curl --fail -sSLH "Authorization: token $GITHUB_AUTH_TOKEN" https: if [[ -z "$UPLOAD_URL" ]]; then set +x echo - echo "$0: Could not get upload URL from Github" >&2 + echo "$0: Could not get upload URL from GitHub" >&2 exit 1 fi echo 'ghc:' >stack-setup-$GHCVER.yaml @@ -59,15 +59,20 @@ mirror_ () { echo "$0: Unsupported conversion: ${srcext} to ${destext}" >&2 exit 1 fi - curl --fail -X POST --data-binary "@$destfn" -H "Content-type: application/octet-stream" -H "Authorization: token $GITHUB_AUTH_TOKEN" "$UPLOAD_URL?name=$destfn" + curl --fail -X POST --data-binary "@$destfn" -H "Content-type: application/octet-stream" -H "Authorization: token $GITHUB_AUTH_TOKEN" "$UPLOAD_URL?name=$destfn" |cat date >"$destfn.uploaded" fi while [[ $# -gt 0 ]]; do alias="$1" echo " $alias:" >>stack-setup-$GHCVER.yaml echo " $GHCVER:" >>stack-setup-$GHCVER.yaml - echo " # Mirrored from $srcurl" >>stack-setup-$GHCVER.yaml - echo " url: \"https://github.com/commercialhaskell/ghc/releases/download/ghc-$GHCVER-release/$destfn\"" >>stack-setup-$GHCVER.yaml + if [[ "$srcfn" == "$destfn" ]]; then + echo " url: \"$srcurl\"" >>stack-setup-$GHCVER.yaml + echo " #mirror-url: \"https://github.com/commercialhaskell/ghc/releases/download/ghc-$GHCVER-release/$destfn\"" >>stack-setup-$GHCVER.yaml + else + echo " # Converted to $destext from $srcurl" >>stack-setup-$GHCVER.yaml + echo " url: \"https://github.com/commercialhaskell/ghc/releases/download/ghc-$GHCVER-release/$destfn\"" >>stack-setup-$GHCVER.yaml + fi echo " content-length: $(stat --printf="%s" "$destfn" 2>/dev/null || stat -f%z "$destfn")" >>stack-setup-$GHCVER.yaml echo " sha1: $(shasum -a 1 $destfn |cut -d' ' -f1)" >>stack-setup-$GHCVER.yaml echo " sha256: $(shasum -a 256 $destfn |cut -d' ' -f1)" >>stack-setup-$GHCVER.yaml @@ -85,23 +90,20 @@ mirror () { mirror i386-deb9-linux "" xz xz linux32 mirror x86_64-deb9-linux "" xz xz linux64 -#mirror x86_64-centos67-linux "" xz xz linux64-gmp4 mirror x86_64-fedora27-linux "" xz xz linux64-tinfo6 -mirror x86_64-apple-darwin "" xz bz2 macosx -#mirror i386-unknown-mingw32 "" xz xz windows32 +mirror x86_64-apple-darwin "" bz2 bz2 macosx +mirror aarch64-apple-darwin "" bz2 bz2 macosx-aarch64 mirror x86_64-unknown-mingw32 "" xz xz windows64 -mirror x86_64-unknown-freebsd "" xz xz freebsd64-11 +# mirror armv7-deb10-linux "" xz xz linux-armv7 mirror aarch64-deb10-linux "" xz xz linux-aarch64 -mirror armv7-deb10-linux "" xz xz linux-armv7 -#mirror x86_64-alpine3.10-linux-integer-simple "" xz xz mirror_ https://github.com/redneb/ghc-alt-libc/releases/download/ghc-$GHCVER-musl i386-unknown-linux-musl "" xz xz linux32-musl mirror_ https://github.com/redneb/ghc-alt-libc/releases/download/ghc-$GHCVER-musl x86_64-unknown-linux-musl "" xz xz linux64-musl -mirror_ http://distcache.FreeBSD.org/local-distfiles/arrowd/stack-bindists/11 i386-portbld-freebsd "" xz xz freebsd32 -mirror_ http://distcache.FreeBSD.org/local-distfiles/arrowd/stack-bindists i386-portbld-freebsd "ino64" xz xz freebsd32-ino64 -mirror_ http://distcache.FreeBSD.org/local-distfiles/arrowd/stack-bindists/11 x86_64-portbld-freebsd "" xz xz freebsd64 -mirror_ http://distcache.FreeBSD.org/local-distfiles/arrowd/stack-bindists x86_64-portbld-freebsd "ino64" xz xz freebsd64-ino64 +# mirror_ http://distcache.FreeBSD.org/local-distfiles/arrowd/stack-bindists/11 i386-portbld-freebsd "" xz xz freebsd32 +# mirror_ http://distcache.FreeBSD.org/local-distfiles/arrowd/stack-bindists i386-portbld-freebsd "ino64" xz xz freebsd32-ino64 +# mirror_ http://distcache.FreeBSD.org/local-distfiles/arrowd/stack-bindists/11 x86_64-portbld-freebsd "" xz xz freebsd64 +# mirror_ http://distcache.FreeBSD.org/local-distfiles/arrowd/stack-bindists x86_64-portbld-freebsd "ino64" xz xz freebsd64-ino64 set +x echo diff --git a/etc/scripts/package.yaml b/etc/scripts/package.yaml new file mode 100644 index 0000000000..4043cb35bb --- /dev/null +++ b/etc/scripts/package.yaml @@ -0,0 +1,18 @@ +# # Added as part of the work around for: +# https://github.com/commercialhaskell/stack/issues/6711 +# +# On Windows only, for some unidentified reason, stack script can fail when +# using a pre-compiled package. This can affect the script +# build-stack-installer.hs. The work around is to build the package required for +# that script using the same Stack configuration as used by the script. + +# This dummy package descriptions is required because the first place that Stack +# looks for a target is project packages and without a Cabal file, Stack will +# report error [S-636]. + +spec-version: 0.36.0 + +name: dummy-for-build-stack-installer-work-around +version: 0.0.0.0 + +library: {} diff --git a/etc/scripts/release-linux-aarch64.hs b/etc/scripts/release-linux-aarch64.hs new file mode 100644 index 0000000000..750d2dad1f --- /dev/null +++ b/etc/scripts/release-linux-aarch64.hs @@ -0,0 +1,288 @@ +{- stack script + --snapshot lts-24.37 + --ghc-options -Wall +-} + +-- As no packages are specified in the `stack script` command in the Stack +-- interpreter options comment, Stack deduces the required packages from the +-- module imports, being: Cabal, base, bytestring, directory, extra, process, +-- shake, tar, zip-archive and zlib. These are either GHC boot packages or in +-- the snapshot. Stackage LTS Haskell 24.37 does not include boot packages +-- directly. As GHC 9.10.3 boot packages Cabal and Cabal-syntax expose modules +-- with the same names, the language extension PackageImports is required. + +-- EXPERIMENTAL + +-- This corresponds to release.hs but is intended to be run only on +-- macOS/AArch64 in order to create a statically-linked Linux/AArch64 version of +-- Stack: +-- +-- Install pre-requisites: +-- +-- > brew install docker +-- > brew install colima +-- +-- Start colima and run script: +-- +-- > colima start +-- > release-linux-aarch64.hs build --alpine --build-args --docker-stack-exe=image +-- +-- Could be incorporated into release.hs, in due course. + +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE PackageImports #-} +{-# LANGUAGE PatternSynonyms #-} + +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as TarEntry +import qualified Codec.Compression.GZip as GZip +import Control.Exception ( tryJust ) +import Control.Monad ( guard ) +import qualified Data.ByteString.Lazy.Char8 as L8 +import "extra" Data.List.Extra ( stripPrefix ) +import Development.Shake + ( Action, Change (..), pattern Chatty, Rules + , ShakeOptions (..), (%>), actionOnException, alwaysRerun + , cmd, copyFileChanged, getDirectoryFiles, liftIO, need + , phony, shakeArgsWith, shakeOptions, want + ) +import Development.Shake.FilePath + ( (<.>), (), exe, takeDirectory, toStandard ) +import "Cabal" Distribution.PackageDescription + ( PackageDescription (..), packageDescription, pkgVersion + ) +import Distribution.Simple.PackageDescription + ( readGenericPackageDescription ) +import "Cabal" Distribution.System + ( Arch, OS (..), Platform (..), buildPlatform ) +import "Cabal" Distribution.Text ( display ) +import Distribution.Verbosity ( silent ) +import System.Console.GetOpt ( ArgDescr (..), OptDescr (..) ) +import System.Directory ( removeFile ) +import System.IO.Error ( isDoesNotExistError ) +import System.Process ( readProcess ) + +-- | Entrypoint. +main :: IO () +main = shakeArgsWith + shakeOptions { shakeFiles = releaseDir + , shakeVerbosity = Chatty + , shakeChange = ChangeModtimeAndDigestInput + } + options $ + \flags args -> do + -- build the default value of type Global, with predefined constants + + -- 'stack build --dry-run' just ensures that 'stack.cabal' is generated from + -- 'package.yaml'. + _ <- readProcess "stack" ["build", "--dry-run"] "" + gStackPackageDescription <- + packageDescription <$> readGenericPackageDescription silent "stack.cabal" + + let Platform arch _ = buildPlatform + gArch = arch + gBuildArgs = ["--flag", "stack:-developer-mode"] + global = foldl + (flip id) + Global + { gStackPackageDescription + , gArch + , gBuildArgs + } + flags + + pure $ Just $ rules global args + +-- | Additional command-line options. +options :: [OptDescr (Either String (Global -> Global))] +options = + [ Option "" [alpineOptName] + ( NoArg $ Right $ \g -> + g { gBuildArgs = + gBuildArgs g + ++ [ "--flag=stack:static" + , "--docker" + , "--system-ghc" + , "--no-install-ghc" + ] + } + ) + "Build a statically linked binary using an Alpine Docker image." + , Option "" [buildArgsOptName] + ( ReqArg + (\v -> Right $ \g -> g{gBuildArgs = gBuildArgs g ++ words v}) + "\"ARG1 ARG2 ...\"" + ) + "Additional arguments to pass to 'stack build'." + ] + +-- | Shake rules. +rules :: Global -> [String] -> Rules () +rules global args = do + case args of + [] -> error "No wanted target(s) specified." + _ -> want args + + phony buildPhony $ + mapM_ (\f -> need [releaseDir f]) binaryPkgFileNames + + releaseDir binaryPkgTarGzFileName %> \out -> do + stageFiles <- getBinaryPkgStageFiles + writeTarGz id out releaseStageDir stageFiles + + releaseStageDir binaryName stackExeFileName %> \out -> do + copyFileChanged (releaseDir binaryExeFileName) out + + releaseStageDir (binaryName ++ "//*") %> \out -> do + copyFileChanged + (dropDirectoryPrefix (releaseStageDir binaryName) out) + out + + releaseDir binaryExeFileName %> \out -> do + need [releaseBinDir binaryName stackExeFileName] + case platformOS of + OSX -> + cmd "strip -o" + [out, releaseBinDir binaryName stackExeFileName] + _ -> undefined + + releaseBinDir binaryName stackExeFileName %> \out -> do + alwaysRerun + actionOnException + ( cmd stackProgName + (stackArgs global) + ["--local-bin-path=" ++ takeDirectory out] + "install" + global.gBuildArgs + integrationTestFlagArgs + "--pedantic" + "stack" + ) + (tryJust (guard . isDoesNotExistError) (removeFile out)) + + where + integrationTestFlagArgs = + -- Explicitly enabling 'hide-dependency-versions' and 'supported-build' to + -- work around https://github.com/commercialhaskell/stack/issues/4960 + [ "--flag=stack:hide-dependency-versions" + , "--flag=stack:supported-build" + ] + + getBinaryPkgStageFiles = do + docFiles <- getDocFiles + let stageFiles = concat + [ [releaseStageDir binaryName stackExeFileName] + , map ((releaseStageDir binaryName) ) docFiles + ] + need stageFiles + pure stageFiles + + getDocFiles = getDirectoryFiles "." ["LICENSE", "*.md", "doc//*.md"] + + buildPhony = "build" + + releaseStageDir = releaseDir "stage" + releaseBinDir = releaseDir "bin" + + binaryPkgFileNames = + case platformOS of + OSX -> [binaryExeFileName, binaryPkgTarGzFileName] + _ -> undefined + binaryPkgTarGzFileName = binaryName <.> tarGzExt + binaryExeFileName = binaryName ++ "-bin" <.> exe + binaryName = concat + [ stackProgName + , "-" + , stackVersionStr global + , "-" + , display targetPlatformOS + , "-" + , display global.gArch + ] + stackExeFileName = stackProgName <.> exe + + tarGzExt = tarExt <.> gzExt + gzExt = ".gz" + tarExt = ".tar" + +-- | Create a .tar.gz files from files. The paths should be absolute, and will +-- be made relative to the base directory in the tarball. +writeTarGz :: + (FilePath -> FilePath) + -> FilePath + -> FilePath + -> [FilePath] + -> Action () +writeTarGz fixPath out baseDir inputFiles = liftIO $ do + content <- Tar.pack baseDir $ map (dropDirectoryPrefix baseDir) inputFiles + L8.writeFile out $ GZip.compress $ Tar.write $ map fixPath' content + where + fixPath' :: Tar.Entry -> Tar.Entry + fixPath' entry = + case TarEntry.toTarPath isDir $ fixPath $ TarEntry.entryPath entry of + Left e -> error $ show (Tar.entryPath entry, e) + Right tarPath -> entry { TarEntry.entryTarPath = tarPath } + where + isDir = + case TarEntry.entryContent entry of + TarEntry.Directory -> True + _ -> False + +-- | Drops a directory prefix from a path. The prefix automatically has a path +-- separator character appended. Fails if the path does not begin with the +-- prefix. +dropDirectoryPrefix :: FilePath -> FilePath -> FilePath +dropDirectoryPrefix prefix path = + case stripPrefix (toStandard prefix ++ "/") (toStandard path) of + Nothing -> error + ( "dropDirectoryPrefix: cannot drop " + ++ show prefix + ++ " from " + ++ show path + ) + Just stripped -> stripped + +-- | String representation of Stack package version. +stackVersionStr :: Global -> String +stackVersionStr = + display . pkgVersion . package . gStackPackageDescription + +-- | Current operating system. +platformOS :: OS +platformOS = + let Platform _ os = buildPlatform + in os + +-- | Target operating system +targetPlatformOS :: OS +targetPlatformOS = Linux + +-- | Directory in which to store build and intermediate files. +releaseDir :: FilePath +releaseDir = "_release" + +-- | @--build-args@ command-line option name. +buildArgsOptName :: String +buildArgsOptName = "build-args" + +-- | @--alpine@ command-line option name. +alpineOptName :: String +alpineOptName = "alpine" + +-- | Arguments to pass to all 'stack' invocations. +stackArgs :: Global -> [String] +stackArgs global = [ "--arch=" ++ display global.gArch + , "--interleaved-output" + ] + +-- | Name of the 'stack' program. +stackProgName :: FilePath +stackProgName = "stack" + +-- | Global values and options. +data Global = Global + { gStackPackageDescription :: !PackageDescription + , gArch :: !Arch + , gBuildArgs :: [String] + } + deriving Show diff --git a/etc/scripts/release.hs b/etc/scripts/release.hs index 9716f3847d..a0ace5f5a9 100644 --- a/etc/scripts/release.hs +++ b/etc/scripts/release.hs @@ -1,343 +1,487 @@ {- stack script - --resolver lts-14.27 - --install-ghc - --ghc-options -Wall - --package Cabal - --package aeson - --package bytestring - --package case-insensitive - --package conduit - --package conduit-combinators - --package cryptohash - --package directory - --package extra - --package http-conduit - --package http-types - --package mime-types - --package process - --package resourcet - --package shake - --package tar - --package text - --package zip-archive - --package zlib + --snapshot lts-24.37 + --ghc-options -Wall -} -{-# LANGUAGE CPP #-} -{-# LANGUAGE RecordWildCards #-} -import Control.Applicative -import Control.Exception -import Control.Monad -import qualified Data.ByteString.Lazy.Char8 as L8 -import Data.List -import Data.Maybe -import Distribution.PackageDescription.Parsec -import Distribution.Text -import Distribution.System -import Distribution.Package -import Distribution.PackageDescription hiding (options) -import Distribution.Verbosity -import System.Console.GetOpt -import System.Directory -import System.IO.Error -import System.Process +-- As no packages are specified in the `stack script` command in the Stack +-- interpreter options comment, Stack deduces the required packages from the +-- module imports, being: Cabal, base, bytestring, directory, extra, process, +-- shake, tar, zip-archive and zlib. These are either GHC boot packages or in +-- the snapshot. Stackage LTS Haskell 24.37 does not include boot packages +-- directly. As GHC 9.10.3 boot packages Cabal and Cabal-syntax expose modules +-- with the same names, the language extension PackageImports is required. + +-- EXPERIMENTAL + +-- release.hs can be run on macOS/AArch64, using a Docker image for +-- Alpine Linux/AArch64, in order to create a statically-linked Linux/AArch64 +-- version of Stack: +-- +-- Install pre-requisites: +-- +-- > brew install docker +-- > brew install colima +-- +-- Start colima (with sufficient memory for Stack's integration tests) and run +-- script: +-- +-- > colima start --memory 4 # The default 2 GB is likely insufficient +-- > stack etc/scripts/release.hs check --alpine --stack-args=--docker-stack-exe=image +-- > stack etc/scripts/release.hs build --alpine --stack-args=--docker-stack-exe=image +-- > colima stop + +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE PackageImports #-} +{-# LANGUAGE PatternSynonyms #-} import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as TarEntry import qualified Codec.Archive.Zip as Zip import qualified Codec.Compression.GZip as GZip -import Data.List.Extra -import Development.Shake -import Development.Shake.FilePath -import Prelude -- Silence AMP warning +import Control.Exception ( tryJust ) +import Control.Monad ( forM, guard, when ) +import qualified Data.ByteString.Lazy.Char8 as L8 +import "extra" Data.List.Extra ( isInfixOf, lower, stripPrefix, trim ) +import Data.Maybe ( fromMaybe ) +import Development.Shake + ( Action, Change (..), pattern Chatty, CmdOption (..), Rules + , ShakeOptions (..), Stdout (..), (%>), actionOnException + , alwaysRerun, cmd, command_, copyFileChanged + , getDirectoryFiles, liftIO, need, phony, putInfo + , removeFilesAfter, shakeArgsWith, shakeOptions, want + ) +import Development.Shake.FilePath + ( (<.>), (), dropFileName, exe, takeDirectory, toStandard + ) +import "Cabal" Distribution.PackageDescription + ( PackageDescription (..), packageDescription, pkgVersion + ) +import Distribution.Simple.PackageDescription + ( readGenericPackageDescription ) +import "Cabal" Distribution.System + ( Arch, OS (..), Platform (..), buildPlatform ) +import "Cabal" Distribution.Text ( display, simpleParse ) +import "Cabal" Distribution.Utils.ShortText ( fromShortText ) +import Distribution.Verbosity ( silent ) +import System.Console.GetOpt ( ArgDescr (..), OptDescr (..) ) +import System.Directory ( copyFile, getHomeDirectory, removeFile ) +import System.IO.Error ( isDoesNotExistError ) +import System.Process ( readProcess ) -- | Entrypoint. main :: IO () -main = - shakeArgsWith - shakeOptions { shakeFiles = releaseDir - , shakeVerbosity = Chatty - , shakeChange = ChangeModtimeAndDigestInput } - options $ - \flags args -> do - -- build the default value of type Global, with predefined constants - - -- 'stack build --dry-run' just ensures that 'stack.cabal' is generated from hpack - _ <- readProcess "stack" ["build", "--dry-run"] "" - gStackPackageDescription <- - packageDescription <$> readGenericPackageDescription silent "stack.cabal" - gGitRevCount <- length . lines <$> readProcess "git" ["rev-list", "HEAD"] "" - gGitSha <- trim <$> readProcess "git" ["rev-parse", "HEAD"] "" - gHomeDir <- getHomeDirectory - - let gAllowDirty = False - Platform arch _ = buildPlatform - gArch = arch - gBinarySuffix = "" - gTestHaddocks = True - gProjectRoot = "" -- Set to real value velow. - gBuildArgs = ["--flag", "stack:-developer-mode"] - gCertificateName = Nothing - global0 = foldl (flip id) Global{..} flags - - -- Need to get paths after options since the '--arch' argument can effect them. - projectRoot' <- getStackPath global0 "project-root" - let global = global0 - { gProjectRoot = projectRoot' } - return $ Just $ rules global args - where - getStackPath global path = do - out <- readProcess stackProgName (stackArgs global ++ ["path", "--" ++ path]) "" - return $ trim $ fromMaybe out $ stripPrefix (path ++ ":") out +main = shakeArgsWith + shakeOptions { shakeFiles = releaseDir + , shakeVerbosity = Chatty + , shakeChange = ChangeModtimeAndDigestInput + } + options $ + \flags args -> do + -- build the default value of type Global, with predefined constants + + -- 'stack build --dry-run' just ensures that 'stack.cabal' is generated from + -- 'package.yaml'. + _ <- readProcess "stack" ["build", "--dry-run"] "" + gStackPackageDescription <- + packageDescription <$> readGenericPackageDescription silent "stack.cabal" + gGitRevCount <- length . lines <$> readProcess "git" ["rev-list", "HEAD"] "" + gGitSha <- trim <$> readProcess "git" ["rev-parse", "HEAD"] "" + gHomeDir <- getHomeDirectory + + let gAllowDirty = False + Platform arch _ = buildPlatform + gArch = arch + gTargetOS = platformOS + gBinarySuffix = "" + gTestHaddocks = True + gProjectRoot = "" -- Set to real value below. + gBuildArgs = ["--flag", "stack:-developer-mode"] + gStackArgs = [] + gCheckStackArgs = [] + gCertificateName = Nothing + global0 = foldl + (flip id) + Global + { gStackPackageDescription + , gAllowDirty + , gGitRevCount + , gGitSha + , gProjectRoot + , gHomeDir + , gArch + , gTargetOS + , gBinarySuffix + , gTestHaddocks + , gBuildArgs + , gStackArgs + , gCheckStackArgs + , gCertificateName + } + flags + + -- Need to get paths after options since the '--arch' argument can effect + -- them. + projectRoot' <- getStackPath global0 "project-root" + let global = global0 { gProjectRoot = projectRoot' } + pure $ Just $ rules global args + where + getStackPath global path = do + out <- + readProcess stackProgName (stackArgs global ++ ["path", "--" ++ path]) "" + pure $ trim $ fromMaybe out $ stripPrefix (path ++ ":") out -- | Additional command-line options. options :: [OptDescr (Either String (Global -> Global))] options = - [ Option "" [allowDirtyOptName] (NoArg $ Right $ \g -> g{gAllowDirty = True}) - "Allow a dirty working tree for release." - , Option "" [archOptName] - (ReqArg - (\v -> case simpleParse v of - Nothing -> Left $ "Unknown architecture in --arch option: " ++ v - Just arch -> Right $ \g -> g{gArch = arch}) - "ARCHITECTURE") - "Architecture to build (e.g. 'i386' or 'x86_64')." - , Option "" [binaryVariantOptName] - (ReqArg (\v -> Right $ \g -> g{gBinarySuffix = v}) "SUFFIX") - "Extra suffix to add to binary executable archive filename." - , Option "" [noTestHaddocksOptName] (NoArg $ Right $ \g -> g{gTestHaddocks = False}) - "Disable testing building haddocks." - , Option "" [alpineOptName] - (NoArg $ Right $ \g -> - g{gBuildArgs = - gBuildArgs g ++ - ["--flag=stack:static", "--docker", "--system-ghc", "--no-install-ghc"]}) - "Build a static binary using Alpine Docker image." - , Option "" [buildArgsOptName] - (ReqArg - (\v -> Right $ \g -> g{gBuildArgs = gBuildArgs g ++ words v}) - "\"ARG1 ARG2 ...\"") - "Additional arguments to pass to 'stack build'." - , Option "" [certificateNameOptName] - (ReqArg (\v -> Right $ \g -> g{gCertificateName = Just v}) "NAME") - "Certificate name for code signing on Windows" - ] + [ Option "" [allowDirtyOptName] + (NoArg $ Right $ \g -> g{gAllowDirty = True}) + "Allow a dirty working tree for release." + , Option "" [archOptName] + ( ReqArg + ( \v -> case simpleParse v of + Nothing -> Left $ "Unknown architecture in --arch option: " ++ v + Just arch -> Right $ \g -> g{gArch = arch} + ) + "ARCHITECTURE" + ) + "Architecture to build (e.g. 'i386' or 'x86_64')." + , Option "" [binaryVariantOptName] + (ReqArg (\v -> Right $ \g -> g{gBinarySuffix = v}) "SUFFIX") + "Extra suffix to add to binary executable archive filename." + , Option "" [noTestHaddocksOptName] + (NoArg $ Right $ \g -> g{gTestHaddocks = False}) + "Disable testing building haddocks." + , Option "" [alpineOptName] + ( NoArg $ Right $ \g -> + g { gBuildArgs = + gBuildArgs g + ++ [ "--flag=stack:static" + ] + , gStackArgs = + gStackArgs g + ++ [ "--docker" + , "--system-ghc" + , "--no-install-ghc" + ] + , gCheckStackArgs = + gCheckStackArgs g + ++ [ "--system-ghc" + , "--no-install-ghc" + ] + , gTargetOS = Linux + } + ) + "Build a statically-linked binary using an Alpine Linux Docker image." + , Option "" [stackArgsOptName] + ( ReqArg + (\v -> Right $ \g -> g{gStackArgs = gStackArgs g ++ words v}) + "\"ARG1 ARG2 ...\"" + ) + "Additional arguments to pass to 'stack'." + , Option "" [buildArgsOptName] + ( ReqArg + (\v -> Right $ \g -> g{gBuildArgs = gBuildArgs g ++ words v}) + "\"ARG1 ARG2 ...\"" + ) + "Additional arguments to pass to 'stack build'." + , Option "" [certificateNameOptName] + (ReqArg (\v -> Right $ \g -> g{gCertificateName = Just v}) "NAME") + "Certificate name for code signing on Windows" + ] -- | Shake rules. rules :: Global -> [String] -> Rules () -rules global@Global{..} args = do - case args of - [] -> error "No wanted target(s) specified." - _ -> want args - - phony releasePhony $ do - need [checkPhony] - need [buildPhony] - - phony cleanPhony $ - removeFilesAfter releaseDir ["//*"] - - phony checkPhony $ - need [releaseCheckDir binaryExeFileName] - - phony buildPhony $ - mapM_ (\f -> need [releaseDir f]) binaryPkgFileNames - - releaseCheckDir binaryExeFileName %> \out -> do - need [releaseBinDir binaryName stackExeFileName] - Stdout dirty <- cmd "git status --porcelain" - when (not gAllowDirty && not (null (trim dirty))) $ - error $ concat - [ "Working tree is dirty. Use --" - , allowDirtyOptName - , " option to continue anyway. Output:\n" - , show dirty - ] - () <- cmd - [gProjectRoot releaseBinDir binaryName stackExeFileName] - (stackArgs global) - ["build"] - gBuildArgs - integrationTestFlagArgs - ["--pedantic", "--no-haddock-deps", "--test"] - ["--haddock" | gTestHaddocks] - ["stack"] - () <- cmd - [gProjectRoot releaseBinDir binaryName stackExeFileName] - ["exec"] - [gProjectRoot releaseBinDir binaryName "stack-integration-test"] - copyFileChanged (releaseBinDir binaryName stackExeFileName) out - - releaseDir binaryPkgZipFileName %> \out -> do - stageFiles <- getBinaryPkgStageFiles - putNormal $ "zip " ++ out - liftIO $ do - entries <- forM stageFiles $ \stageFile -> do - Zip.readEntry - [Zip.OptLocation -#if MIN_VERSION_zip_archive(0,3,0) - (dropFileName (dropDirectoryPrefix (releaseStageDir binaryName) stageFile)) -#else - (dropDirectoryPrefix (releaseStageDir binaryName) stageFile) -#endif - False] - stageFile - let archive = foldr Zip.addEntryToArchive Zip.emptyArchive entries - L8.writeFile out (Zip.fromArchive archive) - - releaseDir binaryPkgTarGzFileName %> \out -> do - stageFiles <- getBinaryPkgStageFiles - writeTarGz out releaseStageDir stageFiles - - releaseStageDir binaryName stackExeFileName %> \out -> do - copyFileChanged (releaseDir binaryExeFileName) out - - releaseStageDir (binaryName ++ "//*") %> \out -> do - copyFileChanged - (dropDirectoryPrefix (releaseStageDir binaryName) out) - out - - releaseDir binaryExeFileName %> \out -> do - need [releaseBinDir binaryName stackExeFileName] - (Stdout versionOut) <- cmd (releaseBinDir binaryName stackExeFileName) "--version" - when (not gAllowDirty && "dirty" `isInfixOf` lower versionOut) $ - error ("Refusing continue because 'stack --version' reports dirty. Use --" ++ - allowDirtyOptName ++ " option to continue anyway.") - case platformOS of - Windows -> do - -- Windows doesn't have or need a 'strip' command, so skip it. - -- Instead, we sign the executable - liftIO $ copyFile (releaseBinDir binaryName stackExeFileName) out - case gCertificateName of - Nothing -> return () - Just certName -> - actionOnException - (command_ [] "c:\\Program Files\\Microsoft SDKs\\Windows\\v7.1\\Bin\\signtool.exe" - ["sign" - ,"/v" - ,"/d", synopsis gStackPackageDescription - ,"/du", homepage gStackPackageDescription - ,"/n", certName - ,"/t", "http://timestamp.verisign.com/scripts/timestamp.dll" - ,out]) - (removeFile out) - Linux -> - -- Using Ubuntu's strip to strip an Alpine exe doesn't work, so just copy - liftIO $ copyFile (releaseBinDir binaryName stackExeFileName) out - _ -> - cmd "strip -o" - [out, releaseBinDir binaryName stackExeFileName] - - releaseDir binaryInstallerFileName %> \_ -> do - need [releaseDir binaryExeFileName] - need [releaseDir binaryInstallerNSIFileName] - - command_ [Cwd releaseDir] "c:\\Program Files (x86)\\NSIS\\Unicode\\makensis.exe" - [ "-V3" - , binaryInstallerNSIFileName] - - releaseDir binaryInstallerNSIFileName %> \out -> do - need ["etc" "scripts" "build-stack-installer" <.> "hs"] - cmd "stack etc/scripts/build-stack-installer.hs" - [ binaryExeFileName - , binaryInstallerFileName - , out - ] :: Action () - - releaseBinDir binaryName stackExeFileName %> \out -> do - alwaysRerun - actionOnException - (cmd stackProgName - (stackArgs global) - ["--local-bin-path=" ++ takeDirectory out] - "install" - gBuildArgs - integrationTestFlagArgs - "--pedantic" - "stack") - (tryJust (guard . isDoesNotExistError) (removeFile out)) - - where - - integrationTestFlagArgs = - -- Explicitly enabling 'hide-dependency-versions' and 'supported-build' to work around - -- https://github.com/commercialhaskell/stack/issues/4960 - [ "--flag=stack:integration-tests" - , "--flag=stack:hide-dependency-versions" - , "--flag=stack:supported-build" +rules global args = do + case args of + [] -> error "No wanted target(s) specified." + _ -> want args + + phony releasePhony $ do + need [checkPhony] + need [buildPhony] + + phony cleanPhony $ + removeFilesAfter releaseDir ["//*"] + + phony checkPhony $ + need [releaseCheckDir binaryExeFileName] + + phony buildPhony $ + mapM_ (\f -> need [releaseDir f]) binaryPkgFileNames + + releaseCheckDir binaryExeFileName %> \out -> do + need [releaseBinDir binaryName stackExeFileName] + Stdout dirty <- cmd "git status --porcelain" + when (not global.gAllowDirty && not (null (trim dirty))) $ + error $ concat + [ "Working tree is dirty. Use --" + , allowDirtyOptName + , " option to continue anyway. Output:\n" + , show dirty ] + () <- cmd + stackProgName -- Use the platform's Stack + global.gStackArgs -- Possibly to set up a Docker container + ["exec"] -- To execute the target Stack + [ global.gProjectRoot releaseBinDir binaryName + stackExeFileName + ] + ["--"] + (stackArgs global) + global.gCheckStackArgs -- Possible use the Docker image's GHC + ["build"] -- To build the target Stack (Stack builds Stack) + global.gBuildArgs + integrationTestFlagArgs + ["--pedantic", "--no-haddock-deps", "--test"] + ["--haddock" | global.gTestHaddocks] + ["stack"] + -- We use the target Stack to execute the target stack-integration-test + -- outside of any Alpine Linux Docker container as the default linker on + -- Alpine Linux is ld.bfd and it is remarkably slow. stack-integration-test + -- will seek to use lld as the linker on Linux. + () <- cmd + (global.gProjectRoot releaseBinDir binaryName + stackExeFileName) -- Use the target Stack + ["exec"] -- To execute the target stack-integration-test + [ global.gProjectRoot releaseBinDir binaryName + "stack-integration-test" + ] + copyFileChanged (releaseBinDir binaryName stackExeFileName) out + + releaseDir binaryPkgZipFileName %> \out -> do + stageFiles <- getBinaryPkgStageFiles + putInfo $ "zip " ++ out + liftIO $ do + entries <- forM stageFiles $ \stageFile -> do + Zip.readEntry + [ Zip.OptLocation + ( dropFileName + ( dropDirectoryPrefix + (releaseStageDir binaryName) + stageFile + ) + ) + False + ] + stageFile + let archive = foldr Zip.addEntryToArchive Zip.emptyArchive entries + L8.writeFile out (Zip.fromArchive archive) + + releaseDir binaryPkgTarGzFileName %> \out -> do + stageFiles <- getBinaryPkgStageFiles + writeTarGz id out releaseStageDir stageFiles + + releaseStageDir binaryName stackExeFileName %> \out -> do + copyFileChanged (releaseDir binaryExeFileName) out + + releaseStageDir (binaryName ++ "//*") %> \out -> do + copyFileChanged + (dropDirectoryPrefix (releaseStageDir binaryName) out) + out + + releaseDir binaryExeFileName %> \out -> do + need [releaseBinDir binaryName stackExeFileName] + (Stdout versionOut) <- + cmd + stackProgName -- Use the platform's Stack + global.gStackArgs -- Possibly to set up a Docker container + ["exec"] -- To execute the target Stack and get its version info + (releaseBinDir binaryName stackExeFileName) + ["--"] + ["--version"] + when (not global.gAllowDirty && "dirty" `isInfixOf` lower versionOut) $ + error + ( "Refusing continue because 'stack --version' reports dirty. Use --" + ++ allowDirtyOptName + ++ " option to continue anyway." + ) + case platformOS of + Windows -> do + -- Windows doesn't have or need a 'strip' command, so skip it. + -- Instead, we sign the executable + liftIO $ copyFile (releaseBinDir binaryName stackExeFileName) out + case global.gCertificateName of + Nothing -> pure () + Just certName -> + actionOnException + ( command_ + [] + "c:\\Program Files\\Microsoft SDKs\\Windows\\v7.1\\Bin\\signtool.exe" + [ "sign" + , "/v" + , "/d" + , fromShortText $ synopsis global.gStackPackageDescription + , "/du" + , fromShortText $ homepage global.gStackPackageDescription + , "/n" + , certName + , "/t" + , "http://timestamp.verisign.com/scripts/timestamp.dll" + , out + ] + ) + (removeFile out) + Linux -> + -- Using Ubuntu's strip to strip an Alpine exe doesn't work, so just copy + liftIO $ copyFile (releaseBinDir binaryName stackExeFileName) out + _ -> + cmd "strip -o" + [out, releaseBinDir binaryName stackExeFileName] + + releaseDir binaryInstallerFileName %> \_ -> do + need [releaseDir binaryExeFileName] + need [releaseDir binaryInstallerNSIFileName] + + command_ [Cwd releaseDir] "makensis.exe" + [ "-V3" + , binaryInstallerNSIFileName + ] + + releaseDir binaryInstallerNSIFileName %> \out -> do + need ["etc" "scripts" "build-stack-installer" <.> "hs"] + -- Added as part of the work around for: + -- https://github.com/commercialhaskell/stack/issues/6711 + -- + -- On Windows only, for some unidentified reason, stack script can fail when + -- using a pre-compiled package. This can affect the script + -- build-stack-installer.hs. The work around is to build the package + -- required for that script using the same Stack configuration as used by + -- the script. + () <- cmd "stack --stack-yaml etc/scripts/stack.yaml build nsis" + cmd "stack etc/scripts/build-stack-installer.hs" + [ binaryExeFileName + , binaryInstallerFileName + , out + , stackVersionStr global + ] :: Action () + + releaseBinDir binaryName stackExeFileName %> \out -> do + alwaysRerun + actionOnException + ( cmd + stackProgName -- Use the platform's Stack + (stackArgs global) + ["--local-bin-path=" ++ takeDirectory out] + global.gStackArgs -- Possibly to set up a Docker container + "install" -- To build and install Stack to that local bin path + global.gBuildArgs + integrationTestFlagArgs + "--pedantic" + "stack" + ) + (tryJust (guard . isDoesNotExistError) (removeFile out)) + + where + integrationTestFlagArgs = + -- Explicitly enabling 'hide-dependency-versions' and 'supported-build' to + -- work around https://github.com/commercialhaskell/stack/issues/4960 + [ "--flag=stack:integration-tests" + , "--flag=stack:hide-dependency-versions" + , "--flag=stack:supported-build" + ] - getBinaryPkgStageFiles = do - docFiles <- getDocFiles - let stageFiles = concat - [[releaseStageDir binaryName stackExeFileName] - ,map ((releaseStageDir binaryName) ) docFiles] - need stageFiles - return stageFiles - - getDocFiles = getDirectoryFiles "." ["LICENSE", "*.md", "doc//*.md"] - - releasePhony = "release" - checkPhony = "check" - cleanPhony = "clean" - buildPhony = "build" - - releaseCheckDir = releaseDir "check" - releaseStageDir = releaseDir "stage" - releaseBinDir = releaseDir "bin" - - binaryPkgFileNames = - case platformOS of - Windows -> [binaryExeFileName, binaryPkgZipFileName, binaryPkgTarGzFileName, binaryInstallerFileName] - _ -> [binaryExeFileName, binaryPkgTarGzFileName] - binaryPkgZipFileName = binaryName <.> zipExt - binaryPkgTarGzFileName = binaryName <.> tarGzExt - -- Adding '-bin' to name to work around https://github.com/commercialhaskell/stack/issues/4961 - binaryExeFileName = binaryName ++ "-bin" <.> exe - binaryInstallerNSIFileName = binaryName ++ "-installer" <.> nsiExt - binaryInstallerFileName = binaryName ++ "-installer" <.> exe - binaryName = - concat - [ stackProgName - , "-" - , stackVersionStr global - , "-" - , display platformOS - , "-" - , display gArch - , if null gBinarySuffix then "" else "-" ++ gBinarySuffix ] - stackExeFileName = stackProgName <.> exe - - zipExt = ".zip" - tarGzExt = tarExt <.> gzExt - gzExt = ".gz" - tarExt = ".tar" - nsiExt = ".nsi" + getBinaryPkgStageFiles = do + docFiles <- getDocFiles + let stageFiles = concat + [ [releaseStageDir binaryName stackExeFileName] + , map ((releaseStageDir binaryName) ) docFiles + ] + need stageFiles + pure stageFiles + + getDocFiles = getDirectoryFiles "." ["LICENSE", "*.md", "doc//*.md"] + + releasePhony = "release" + checkPhony = "check" + cleanPhony = "clean" + buildPhony = "build" + + releaseCheckDir = releaseDir "check" + releaseStageDir = releaseDir "stage" + releaseBinDir = releaseDir "bin" + + binaryPkgFileNames = + case global.gTargetOS of + Windows -> + [ binaryExeFileName + , binaryPkgZipFileName + , binaryPkgTarGzFileName + , binaryInstallerFileName + ] + Linux -> [binaryExeFileName, binaryPkgTarGzFileName] + _ -> [binaryExeFileName, binaryPkgTarGzFileName] + binaryPkgZipFileName = binaryName <.> zipExt + binaryPkgTarGzFileName = binaryName <.> tarGzExt + binaryExeFileName = binaryName ++ "-bin" <.> exe + -- Prefix with 'installer-' so it doesn't get included in release artifacts + -- (due to NSIS limitation, needs to be in same directory as executable) + binaryInstallerNSIFileName = "installer-" ++ binaryName <.> nsiExt + binaryInstallerFileName = binaryName ++ "-installer" <.> exe + binaryName = concat + [ stackProgName + , "-" + , stackVersionStr global + , "-" + , display global.gTargetOS + , "-" + , display global.gArch + , if null global.gBinarySuffix then "" else "-" ++ global.gBinarySuffix + ] + stackExeFileName = stackProgName <.> exe + + zipExt = ".zip" + tarGzExt = tarExt <.> gzExt + gzExt = ".gz" + tarExt = ".tar" + nsiExt = ".nsi" -- | Create a .tar.gz files from files. The paths should be absolute, and will -- be made relative to the base directory in the tarball. -writeTarGz :: FilePath -> FilePath -> [FilePath] -> Action () -writeTarGz out baseDir inputFiles = liftIO $ do - content <- Tar.pack baseDir $ map (dropDirectoryPrefix baseDir) inputFiles - L8.writeFile out $ GZip.compress $ Tar.write content - --- | Drops a directory prefix from a path. The prefix automatically has a path --- separator character appended. Fails if the path does not begin with the prefix. +writeTarGz :: + (FilePath -> FilePath) + -> FilePath + -> FilePath + -> [FilePath] + -> Action () +writeTarGz fixPath out baseDir inputFiles = liftIO $ do + content <- Tar.pack baseDir $ map (dropDirectoryPrefix baseDir) inputFiles + L8.writeFile out $ GZip.compress $ Tar.write $ map fixPath' content + where + fixPath' :: Tar.Entry -> Tar.Entry + fixPath' entry = + case TarEntry.toTarPath isDir $ fixPath $ TarEntry.entryPath entry of + Left e -> error $ show (Tar.entryPath entry, e) + Right tarPath -> entry { TarEntry.entryTarPath = tarPath } + where + isDir = + case TarEntry.entryContent entry of + TarEntry.Directory -> True + _ -> False + +-- | Drops a directory prefix from a path. The prefix automatically has a path +-- separator character appended. Fails if the path does not begin with the +-- prefix. dropDirectoryPrefix :: FilePath -> FilePath -> FilePath dropDirectoryPrefix prefix path = - case stripPrefix (toStandard prefix ++ "/") (toStandard path) of - Nothing -> error ("dropDirectoryPrefix: cannot drop " ++ show prefix ++ " from " ++ show path) - Just stripped -> stripped - --- | String representation of stack package version. + case stripPrefix (toStandard prefix ++ "/") (toStandard path) of + Nothing -> error + ( "dropDirectoryPrefix: cannot drop " + ++ show prefix + ++ " from " + ++ show path + ) + Just stripped -> stripped + +-- | String representation of Stack package version. stackVersionStr :: Global -> String stackVersionStr = - display . pkgVersion . package . gStackPackageDescription + display . pkgVersion . package . gStackPackageDescription -- | Current operating system. platformOS :: OS platformOS = - let Platform _ os = buildPlatform - in os + let Platform _ os = buildPlatform + in os -- | Directory in which to store build and intermediate files. releaseDir :: FilePath @@ -359,6 +503,10 @@ binaryVariantOptName = "binary-variant" noTestHaddocksOptName :: String noTestHaddocksOptName = "no-test-haddocks" +-- | @--stack-args@ command-line option name. +stackArgsOptName :: String +stackArgsOptName = "stack-args" + -- | @--build-args@ command-line option name. buildArgsOptName :: String buildArgsOptName = "build-args" @@ -373,30 +521,29 @@ certificateNameOptName = "certificate-name" -- | Arguments to pass to all 'stack' invocations. stackArgs :: Global -> [String] -stackArgs Global{..} = ["--arch=" ++ display gArch, "--interleaved-output"] +stackArgs global = [ "--arch=" ++ display global.gArch + , "--interleaved-output" + ] -- | Name of the 'stack' program. stackProgName :: FilePath stackProgName = "stack" --- | Linux distribution/version combination. -data DistroVersion = DistroVersion - { dvDistro :: !String - , dvVersion :: !String - , dvCodeName :: !String } - -- | Global values and options. data Global = Global - { gStackPackageDescription :: !PackageDescription - , gAllowDirty :: !Bool - , gGitRevCount :: !Int - , gGitSha :: !String - , gProjectRoot :: !FilePath - , gHomeDir :: !FilePath - , gArch :: !Arch - , gBinarySuffix :: !String - , gTestHaddocks :: !Bool - , gBuildArgs :: [String] - , gCertificateName :: !(Maybe String) - } - deriving (Show) + { gStackPackageDescription :: !PackageDescription + , gAllowDirty :: !Bool + , gGitRevCount :: !Int + , gGitSha :: !String + , gProjectRoot :: !FilePath + , gHomeDir :: !FilePath + , gArch :: !Arch + , gTargetOS :: !OS + , gBinarySuffix :: !String + , gTestHaddocks :: !Bool + , gBuildArgs :: [String] + , gStackArgs :: [String] + , gCheckStackArgs :: [String] + , gCertificateName :: !(Maybe String) + } + deriving Show diff --git a/etc/scripts/stack.yaml b/etc/scripts/stack.yaml new file mode 100644 index 0000000000..590b0311bc --- /dev/null +++ b/etc/scripts/stack.yaml @@ -0,0 +1,9 @@ +# Added as part of the work around for: +# https://github.com/commercialhaskell/stack/issues/6711 +# +# On Windows only, for some unidentified reason, stack script can fail when +# using a pre-compiled package. This can affect the script +# build-stack-installer.hs. The work around is to build the package required for +# that script using the same Stack configuration as used by the script. + +snapshot: lts-24.37 diff --git a/mkdocs.yml b/mkdocs.yml index 60d930f1fe..d1d726a342 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,52 +1,188 @@ -site_name: The Haskell Tool Stack -site_description: The Haskell Tool Stack +site_name: Stack +site_description: | + A program for developing Haskell projects. site_author: Stack contributors +site_url: !ENV READTHEDOCS_CANONICAL_URL repo_url: https://github.com/commercialhaskell/stack/ -edit_uri: edit/stable/doc/ -copyright: Copyright (c) 2015-2020, Stack contributors +edit_uri: tree/stable/doc/ +copyright: Copyright (c) 2015-2025, Stack contributors docs_dir: doc site_dir: _site -theme: readthedocs +theme: + name: material + palette: + primary: 'deep purple' + accent: 'deep purple' + logo: img/stack-logo-white.svg + favicon: img/stack-favicon.svg + features: + - content.code.annotate + - content.code.copy + - content.code.select + - content.tabs.link + - navigation.indexes + - navigation.tabs + - navigation.top extra_css: - css/extra.css extra_javascript: - js/searchhack.js +# Read the Docs requires JQuery for its JavaScript code to inject the flyout +# menu. Material for MkDocs does not come with JQuery. +- 'https://code.jquery.com/jquery-3.6.1.min.js' +- https://cdn.jsdelivr.net/npm/mermaid@11.7.0/dist/mermaid.min.js +exclude_docs: | + /maintainers/archive/ + /maintainers/stack_errors.md -pages: -- Home: README.md -- Changelog: ChangeLog.md -- Tool documentation: - - Install/upgrade: install_and_upgrade.md - - User guide: GUIDE.md - - FAQ: faq.md - - Configuration (project and global): yaml_configuration.md - - stack.yaml vs cabal package files: stack_yaml_vs_cabal_package_file.md - - Build command: build_command.md - - Developing on Windows: developing_on_windows.md - - Dependency visualization: dependency_visualization.md - - Docker integration: docker_integration.md - - Nix integration: nix_integration.md - - Non-standard project initialization: nonstandard_project_init.md - - Shell auto-completion: shell_autocompletion.md - - Travis CI: travis_ci.md - - Azure CI: azure_ci.md - - Custom snapshots: custom_snapshot.md - - Code coverage: coverage.md - - GHCi: ghci.md - - Pantry: pantry.md - - Lock files: lock_files.md -- Advanced documentation: - - Build overview: build_overview.md -- Project documentation: - - Contributors guide: CONTRIBUTING.md - - Maintainer guide: +nav: +- Welcome!: README.md +- Setting up: install_and_upgrade.md +- Getting started: + - tutorial/index.md + - 1. A Hello World example: tutorial/hello_world_example.md + - 2. Package description: tutorial/package_description.md + - 3. Project configuration: tutorial/project_configuration.md + - 4. Building your project: tutorial/building_your_project.md + - 5. Building existing projects: tutorial/building_existing_projects.md + - 6. Installed package databases: tutorial/installed_package_databases.md + - 7. stack build synonyms: tutorial/stack_build_synonyms.md + - 8. stack build targets: tutorial/stack_build_targets.md + - 9. Multi-package projects: tutorial/multi-package_projects.md + - 10. Cabal flags and GHC options: tutorial/cabal_flags_and_ghc_options.md + - 11. Locations used by Stack: tutorial/locations_used_by_stack.md + - 12. Executing commands: tutorial/executing_commands.md + - 13. Using GHC interactively: tutorial/using_ghc_interactively.md + - 14. Stack configuration: tutorial/stack_configuration.md + - 15. In conclusion: tutorial/tutorial_conclusion.md +- Commands: + - commands/index.md + - bench: commands/bench_command.md + - build: commands/build_command.md + - clean: commands/clean_command.md + - config: commands/config_command.md + - dot: commands/dot_command.md + - docker: commands/docker_command.md + - eval: commands/eval_command.md + - exec: commands/exec_command.md + - ghc: commands/ghc_command.md + - ghci: commands/ghci_command.md + - haddock: commands/haddock_command.md + - hoogle: commands/hoogle_command.md + - hpc: commands/hpc_command.md + - ide: commands/ide_command.md + - init: commands/init_command.md + - install: commands/install_command.md + - list: commands/list_command.md + - ls: commands/ls_command.md + - new: commands/new_command.md + - path: commands/path_command.md + - purge: commands/purge_command.md + - query: commands/query_command.md + - repl: commands/repl_command.md + - run: commands/run_command.md + - runghc: commands/runghc_command.md + - runhaskell: commands/runhaskell_command.md + - script: commands/script_command.md + - sdist: commands/sdist_command.md + - setup: commands/setup_command.md + - templates: commands/templates_command.md + - test: commands/test_command.md + - uninstall: commands/uninstall_command.md + - unpack: commands/unpack_command.md + - update: commands/update_command.md + - upgrade: commands/upgrade_command.md + - upload: commands/upload_command.md +- Configure: + - configure/index.md + - Environment variables: configure/environment_variables.md + - Configuration files: + - configure/yaml/index.md + - Project-specific configuration: configure/yaml/project.md + - Non-project specific configuration: configure/yaml/non-project.md + - The !include directive: configure/yaml/include.md + - Global flags and options: configure/global_flags.md + - Customisation scripts: configure/customisation_scripts.md +- Topics: + - topics/index.md + - Stack root: topics/stack_root.md + - Stack work directories: topics/stack_work.md + - Snapshot location: topics/snapshot_location.md + - Package location: topics/package_location.md + - Snapshot specification: topics/custom_snapshot.md + - stack.yaml vs a Cabal file: topics/stack_yaml_vs_cabal_package_file.md + - Script interpreter: topics/scripts.md + - Docker integration: topics/docker_integration.md + - Nix integration: topics/nix_integration.md + - Non-standard project initialization: topics/nonstandard_project_init.md + - Building GHC from source: topics/GHC_from_source.md + - Debugging: topics/debugging.md + - Editor integration: topics/editor_integration.md + - Stack and Visual Studio Code: topics/Stack_and_VS_Code.md + - Developing on Windows: topics/developing_on_windows.md + - Shell auto-completion: topics/shell_autocompletion.md + - CI: topics/CI.md + - Travis CI: topics/travis_ci.md + - Azure CI: topics/azure_ci.md + - Lock files: topics/lock_files.md + - Haskell and C code: topics/haskell_and_c_code.md +- Get involved: + - community/index.md + - Contributors: + - Contributor's guide: CONTRIBUTING.md + - Dev Containers: dev_containers.md + - Maintainers: + - Version scheme: maintainers/version_scheme.md - Releases: maintainers/releases.md - Maintainer team process: maintainers/team_process.md - Add GHC version: maintainers/ghc.md - - Docker images: maintainers/docker.md - - Upgrading msys: maintainers/msys.md - - Signing key: SIGNING_KEY.md + - Upgrading MSYS2: maintainers/msys.md + - Upgrading 7-Zip: maintainers/7zip.md + - HaskellStack.org: maintainers/haskellstack.org.md + - Self-hosted runners: maintainers/self-hosted_runners.md + - Signing key: SIGNING_KEY.md + - How Stack works: + - Build overview: build_overview.md +- FAQ: faq.md +- Glossary: glossary.md +- More: + - Other resources: other_resources.md + - Version history: ChangeLog.md markdown_extensions: +- abbr +- admonition +- attr_list +- def_list +- footnotes +- md_in_html +- pymdownx.details +- pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg +- pymdownx.highlight: + anchor_linenums: true +- pymdownx.inlinehilite +- pymdownx.snippets +- pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format +- pymdownx.tabbed: + alternate_style: true +- pymdownx.tilde - toc: permalink: true + +plugins: +- search: {} +- redirects: + redirect_maps: + 'GUIDE.md': 'tutorial/index.md' + 'yaml_configuration.md': 'configure/yaml/index.md' + 'build_command.md': 'commands/build_command.md' + 'custom_snapshot.md': 'topics/custom_snapshot.md' + 'lock_files.md': 'topics/lock_files.md' + 'scripts.md': 'topics/scripts.md' +- social diff --git a/package.yaml b/package.yaml index e889cbd2af..42f8aa4354 100644 --- a/package.yaml +++ b/package.yaml @@ -1,29 +1,39 @@ +# Hpack >= 0.35.0 is provided by Stack >= 2.9.1. +spec-version: 0.35.0 + name: stack -version: '2.6.0' -synopsis: The Haskell Tool Stack +version: '3.10.0' +synopsis: A program for developing Haskell projects description: | - Please see the documentation at - for usage information. - . + Stack (the Haskell Tool Stack) is a program for developing Haskell projects. + It is aimed at new and experienced users of Haskell and seeks to support them + fully on Linux, macOS and Windows. + + For information about how to use Stack, see . + If building a 'stack' executable for distribution, please download the source code from - and build it using Stack itself in order to ensure identical behaviour - to official binaries. This package on Hackage is provided for convenience - and bootstrapping purposes. - . - Note that the API for the library is not currently stable, and may - change significantly, even between minor releases. It is - currently only intended for use by the executable. + and build it with Stack in order to ensure identical behaviour to official + binaries. + + This package is provided on Hackage for convenience and bootstrapping + purposes. + + Currently, the library exposed by the package is intended for use only by the + executable. The library's API may change significantly, even between minor + releases. category: Development author: Commercial Haskell SIG -maintainer: manny@fpcomplete.com -license: BSD3 +maintainer: +- Mike Pilgrem +- Emanuel Borsboom +license: BSD-3-Clause github: commercialhaskell/stack homepage: http://haskellstack.org custom-setup: dependencies: - - base >=4.10 && < 5 - - Cabal + - base >= 4.14.3.0 && < 5 + - Cabal >= 3.14 && < 3.18 - filepath extra-source-files: # note: leaving out 'package.yaml' because it causes confusion with hackage metadata revisions @@ -31,38 +41,45 @@ extra-source-files: - ChangeLog.md - README.md - stack.yaml -- doc/*.md +- doc/**/*.md - src/setup-shim/StackSetupShim.hs -- test/package-dump/ghc-7.10.txt -- test/package-dump/ghc-7.8.4-osx.txt -- test/package-dump/ghc-7.8.txt -- test/package-dump/ghc-head.txt -- src/test/Stack/Untar/test1.tar.gz -- src/test/Stack/Untar/test2.tar.gz +- tests/unit/package-dump/ghc-7.10.txt +- tests/unit/package-dump/ghc-7.8.4-osx.txt +- tests/unit/package-dump/ghc-7.8.txt +- tests/unit/package-dump/ghc-head.txt +- tests/unit/Stack/Untar/test1.tar.gz +- tests/unit/Stack/Untar/test2.tar.gz +# Support for people who prefer, or need, to build Stack with Cabal (the tool): +- cabal.project +- cabal.config +# GHC's GHC2024 language extension is supported by GHC >= 9.10.1 +language: GHC2024 ghc-options: +- -fwrite-ide-info +- -hiedir=.hie - -Wall -- -fwarn-tabs -- -fwarn-incomplete-uni-patterns -- -fwarn-incomplete-record-updates -- -optP-Wno-nonportable-include-path # workaround [Filename case on macOS · Issue #4739 · haskell/cabal](https://github.com/haskell/cabal/issues/4739) +- -Wmissing-export-lists +# Workaround for https://github.com/haskell/cabal/issues/4739. +# -Wnon-noportable-include-path is a Clang diagnostic flag. See +# https://clang.llvm.org/docs/DiagnosticsReference.html#wnonportable-include-path +- -optP-Wno-nonportable-include-path dependencies: -- Cabal -- aeson -- annotated-wl-pprint -- ansi-terminal +- base >= 4.16.0.0 && < 5 +- Cabal >= 3.14 && < 3.17 +- aeson >= 2.0.3.0 +- aeson-warning-parser >= 0.1.1 +- ansi-terminal >= 1.0.2 - array - async - attoparsec -- base >=4.10 && < 5 - base64-bytestring - bytestring -- colour +- casa-client >= 0.0.2 +- companion - conduit - conduit-extra - containers -- cryptonite -- cryptonite-conduit -- deepseq +- crypton - directory - echo - exceptions @@ -70,65 +87,51 @@ dependencies: - file-embed - filelock - filepath -- fsnotify +- fsnotify >= 0.4.1 - generic-deriving -- hackage-security +- ghc-boot - hashable -- hi-file-parser -- hpack +- hi-file-parser >= 0.1.8.0 +- hpack >= 0.36.0 - hpc - http-client -- http-client-tls +- http-client-tls >= 0.3.6.2 - http-conduit -- http-download +- http-download >= 0.2.1.0 - http-types - memory - microlens -- mintty -- mono-traversable - mtl - mustache - neat-interpolation -- network-uri - open-browser -- optparse-applicative >= 0.14.3.0 -- pantry >= 0.5.1.3 -- casa-client -- casa-types -- path +- optparse-applicative >= 0.18.1.0 +- pantry >= 0.11.0 +- path >= 0.9.5 - path-io -- persistent +# In order for Cabal (the tool) to build Stack, it needs to be told of the +# upper bound on persistent. See +# https://github.com/commercialhaskell/stack/pull/5677#issuecomment-1193318542 +- persistent >= 2.14.0.0 && < 2.19 - persistent-sqlite -- persistent-template - pretty -- primitive -- process +- process >= 1.6.13.2 - project-template -- regex-applicative-text -- retry -- rio >= 0.1.18.0 -- rio-prettyprint >= 0.1.1.0 -- semigroups +- random +- rio >= 0.1.22.0 && ( < 0.1.23.0 || > 0.1.23.0 ) +- rio-prettyprint >= 0.1.8.0 +- semaphore-compat - split - stm -- streaming-commons -- tar +- tar >= 0.6.2.0 - template-haskell -- temporary - text -- text-metrics -- th-reify-many - time -- tls - transformers -- typed-process -- unicode-transforms - unix-compat -- unliftio - unordered-containers - vector - yaml -- zip-archive - zlib when: - condition: os(windows) @@ -142,16 +145,25 @@ when: hsc2hs:hsc2hs dependencies: - unix +# See https://github.com/haskell/network/pull/552. +- condition: impl(ghc >= 9.4.5) && os(windows) + dependencies: network >= 3.1.2.9 - condition: flag(developer-mode) then: cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=True else: cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=False +- condition: flag(disable-stack-upload) + then: + cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=True + else: + cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=False library: - source-dirs: src/ + source-dirs: src ghc-options: - - -fwarn-identities + - -Widentities generated-exposed-modules: + - Build_stack - Paths_stack exposed-modules: - Control.Concurrent.Execute @@ -159,6 +171,7 @@ library: - Data.Attoparsec.Combinators - Data.Attoparsec.Interpreter - Data.Monoid.Map + - GHC.Utils.GhcPkg.Main.Compat - Network.HTTP.StackClient - Options.Applicative.Args - Options.Applicative.Builder.Extra @@ -166,27 +179,42 @@ library: - Path.CheckInstall - Path.Extra - Path.Find + - Stack - Stack.Build - Stack.Build.Cache - Stack.Build.ConstructPlan - Stack.Build.Execute + - Stack.Build.ExecuteEnv + - Stack.Build.ExecutePackage - Stack.Build.Haddock - Stack.Build.Installed - Stack.Build.Source - Stack.Build.Target + - Stack.BuildInfo + - Stack.BuildOpts - Stack.BuildPlan + - Stack.CLI - Stack.Clean + - Stack.Component + - Stack.ComponentFile - Stack.Config - Stack.Config.Build + - Stack.Config.ConfigureScript - Stack.Config.Docker - Stack.Config.Nix + - Stack.ConfigureOpts - Stack.ConfigCmd - Stack.Constants - Stack.Constants.Config + - Stack.Constants.StackProgName - Stack.Coverage - Stack.DefaultColorWhen + - Stack.DependencyGraph - Stack.Docker + - Stack.DockerCmd - Stack.Dot + - Stack.Eval + - Stack.Exec - Stack.FileWatch - Stack.GhcPkg - Stack.Ghci @@ -204,29 +232,45 @@ library: - Stack.Options.BuildParser - Stack.Options.CleanParser - Stack.Options.ConfigParser + - Stack.Options.ConfigEnvParser + - Stack.Options.ConfigSetParser - Stack.Options.Completion - Stack.Options.DockerParser - Stack.Options.DotParser + - Stack.Options.EvalParser - Stack.Options.ExecParser + - Stack.Options.FlagsParser - Stack.Options.GhcBuildParser - Stack.Options.GhciParser - Stack.Options.GhcVariantParser - Stack.Options.GlobalParser - Stack.Options.HaddockParser - Stack.Options.HpcReportParser + - Stack.Options.IdeParser + - Stack.Options.InitParser - Stack.Options.LogLevelParser + - Stack.Options.LsParser - Stack.Options.NewParser - Stack.Options.NixParser - Stack.Options.PackageParser - - Stack.Options.ResolverParser - - Stack.Options.ScriptParser + - Stack.Options.PackagesParser + - Stack.Options.PathParser + - Stack.Options.PvpBoundsParser - Stack.Options.SDistParser + - Stack.Options.ScriptParser + - Stack.Options.SetupParser + - Stack.Options.SnapshotParser - Stack.Options.TestParser + - Stack.Options.UnpackParser + - Stack.Options.UpgradeParser + - Stack.Options.UploadParser - Stack.Options.Utils - Stack.Package - Stack.PackageDump + - Stack.PackageFile - Stack.Path - Stack.Prelude + - Stack.Query - Stack.Runners - Stack.Script - Stack.SDist @@ -237,23 +281,95 @@ library: - Stack.Storage.Project - Stack.Storage.User - Stack.Storage.Util + - Stack.Templates + - Stack.Types.AddCommand + - Stack.Types.AllowNewerDeps + - Stack.Types.ApplyGhcOptions + - Stack.Types.ApplyProgOptions - Stack.Types.Build + - Stack.Types.Build.ConstructPlan + - Stack.Types.Build.Exception + - Stack.Types.BuildConfig + - Stack.Types.BuildOpts + - Stack.Types.BuildOptsCLI + - Stack.Types.BuildOptsMonoid + - Stack.Types.CabalConfigKey + - Stack.Types.Cache + - Stack.Types.Casa + - Stack.Types.ColorWhen + - Stack.Types.CompCollection - Stack.Types.CompilerBuild + - Stack.Types.CompilerPaths - Stack.Types.Compiler + - Stack.Types.Component + - Stack.Types.ComponentUtils - Stack.Types.Config - - Stack.Types.Config.Build + - Stack.Types.Config.Exception + - Stack.Types.ConfigMonoid + - Stack.Types.ConfigSetOpts + - Stack.Types.ConfigureOpts + - Stack.Types.Curator + - Stack.Types.Dependency + - Stack.Types.DependencyTree - Stack.Types.Docker + - Stack.Types.DockerEntrypoint + - Stack.Types.DotConfig + - Stack.Types.DotOpts + - Stack.Types.DownloadInfo + - Stack.Types.DumpLogs + - Stack.Types.DumpPackage + - Stack.Types.EnvConfig + - Stack.Types.EnvSettings + - Stack.Types.ExtraDirs + - Stack.Types.FileDigestCache + - Stack.Types.GHCDownloadInfo + - Stack.Types.GHCVariant + - Stack.Types.GhcOptionKey + - Stack.Types.GhcOptions + - Stack.Types.GhcPkgExe - Stack.Types.GhcPkgId + - Stack.Types.GhciOpts + - Stack.Types.GlobalOpts + - Stack.Types.GlobalOptsMonoid + - Stack.Types.HpcReportOpts + - Stack.Types.IdeOpts + - Stack.Types.Installed + - Stack.Types.InterfaceOpt + - Stack.Types.IsMutable + - Stack.Types.LockFileBehavior + - Stack.Types.LsOpts + - Stack.Types.MsysEnvironment - Stack.Types.NamedComponent - Stack.Types.Nix - Stack.Types.Package + - Stack.Types.PackageFile - Stack.Types.PackageName - - Stack.Types.Resolver + - Stack.Types.ParentMap + - Stack.Types.Plan + - Stack.Types.Platform + - Stack.Types.Project + - Stack.Types.ProjectAndConfigMonoid + - Stack.Types.ProjectConfig + - Stack.Types.PvpBounds + - Stack.Types.Runner + - Stack.Types.SCM + - Stack.Types.SDistOpts + - Stack.Types.SetupInfo + - Stack.Types.SetupOpts + - Stack.Types.Snapshot - Stack.Types.SourceMap + - Stack.Types.StackYamlLoc + - Stack.Types.Storage - Stack.Types.TemplateName + - Stack.Types.UnusedFlags + - Stack.Types.UpgradeOpts + - Stack.Types.UploadOpts - Stack.Types.Version - - Stack.Types.VersionIntervals + - Stack.Types.VersionedDownloadInfo + - Stack.Types.WantedCompilerSetter + - Stack.Uninstall - Stack.Unpack + - Stack.Update - Stack.Upgrade - Stack.Upload - System.Info.ShortPathName @@ -267,13 +383,19 @@ library: else: source-dirs: src/unix/ c-sources: src/unix/cbits/uname.c + - condition: ! '!(flag(disable-git-info))' + cpp-options: -DUSE_GIT_INFO + dependencies: + - githash + - optparse-simple + - condition: flag(hide-dependency-versions) + cpp-options: -DHIDE_DEP_VERSIONS + - condition: flag(supported-build) + cpp-options: -DSUPPORTED_BUILD executables: stack: main: Main.hs - source-dirs: src/main - generated-other-modules: - - Build_stack - - Paths_stack + source-dirs: app ghc-options: - -threaded - -rtsopts @@ -284,20 +406,11 @@ executables: ld-options: - -static - -pthread - - condition: ! '!(flag(disable-git-info))' - cpp-options: -DUSE_GIT_INFO - dependencies: - - githash - - optparse-simple - - condition: flag(hide-dependency-versions) - cpp-options: -DHIDE_DEP_VERSIONS - - condition: flag(supported-build) - cpp-options: -DSUPPORTED_BUILD stack-integration-test: main: IntegrationSpec.hs source-dirs: - - test/integration - - test/integration/lib + - tests/integration + - tests/integration/lib ghc-options: - -threaded - -rtsopts @@ -314,9 +427,15 @@ executables: - -static - -pthread tests: - stack-test: + stack-unit-test: main: Spec.hs - source-dirs: src/test + source-dirs: tests/unit + when: + - condition: 'os(windows)' + then: + source-dirs: tests/unit/windows/ + else: + source-dirs: tests/unit/unix/ ghc-options: - -threaded dependencies: @@ -324,38 +443,53 @@ tests: - hspec - raw-strings-qq - stack - - smallcheck + verbatim: | + build-tool-depends: + hspec-discover:hspec-discover flags: static: - description: Pass -static/-pthread to ghc when linking the stack binary. + description: >- + When building the Stack executable, or the stack-integration-test + executable, pass the -static and -pthread flags to the linker used by GHC. manual: true default: false disable-git-info: - description: Disable compile-time inclusion of current git info in stack + description: >- + Disable inclusion of current Git information in the Stack executable when + it is built. manual: true default: false hide-dependency-versions: - description: "Hides dependency versions from 'stack --version', used only by building - Stack and the default 'stack.yaml'. Note to packagers/distributors: - DO NOT OVERRIDE THIS FLAG IF YOU ARE BUILDING 'stack' ANY OTHER WAY - (e.g. using cabal or from Hackage), as it makes debugging support - requests more difficult." + description: >- + Hides dependency versions from 'stack --version'. Used only when building + a Stack executable for official release. Note to packagers/distributors: + DO NOT OVERRIDE THIS FLAG IF YOU ARE BUILDING STACK ANY OTHER WAY (e.g. + using Cabal or from Hackage), as it makes debugging support requests more + difficult. manual: true default: false integration-tests: - description: Run the integration test suite + description: Run the integration test suite. manual: true default: false supported-build: - description: "If false, causes 'stack --version' to issue a warning about - the build being unsupported. Should be True only if building with - Stack and the default 'stack.yaml'. Note to packagers/distributors: - DO NOT OVERRIDE THIS FLAG IF YOU ARE BUILDING 'stack' ANY OTHER WAY - (e.g. using cabal or from Hackage), as it makes debugging support - requests more difficult." + description: >- + If false, causes 'stack --version' to issue a warning about the build + being unsupported. Used only when building a Stack executable for official + release. Note to packagers/distributors: DO NOT OVERRIDE THIS FLAG IF YOU + ARE BUILDING STACK ANY OTHER WAY (e.g. using Cabal or from Hackage), as it + makes debugging support requests more difficult. manual: true default: false developer-mode: - description: "By default, should extra developer information be output?" + description: >- + By default, output extra developer information. + manual: true + default: false + disable-stack-upload: + description: >- + For use only during development and debugging. Disable 'stack upload' so + that it does not make HTTP requests. Stack will output information about + the HTTP request(s) that it would have made if the command was enabled. manual: true default: false diff --git a/src/Control/Concurrent/Execute.hs b/src/Control/Concurrent/Execute.hs index 82bc5c55e9..db8fd9a42c 100644 --- a/src/Control/Concurrent/Execute.hs +++ b/src/Control/Concurrent/Execute.hs @@ -1,154 +1,199 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE RecordWildCards #-} --- Concurrent execution with dependencies. Types currently hard-coded for needs --- of stack, but could be generalized easily. +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Control.Concurrent.Execute +Description : Concurrent execution with dependencies. +License : BSD-3-Clause + +Concurrent execution with dependencies. Types currently hard-coded for needs of +stack, but could be generalized easily. +-} + module Control.Concurrent.Execute ( ActionType (..) , ActionId (..) , ActionContext (..) , Action (..) - , Concurrency(..) + , Concurrency (..) , runActions ) where -import Control.Concurrent.STM (retry) +import Control.Concurrent.STM ( check ) import Stack.Prelude -import Data.List (sortBy) -import qualified Data.Set as Set +import Data.List ( sortBy ) +import qualified Data.Set as Set + +-- | Type representing exceptions thrown by functions exported by the +-- "Control.Concurrent.Execute" module. +data ExecuteException + = InconsistentDependenciesBug + deriving Show + +instance Exception ExecuteException where + displayException InconsistentDependenciesBug = bugReport "[S-2816]" + "Inconsistent dependencies were discovered while executing your build \ + \plan." +-- | Type representing types of Stack build actions. data ActionType - = ATBuild - -- ^ Action for building a package's library and executables. If - -- 'taskAllInOne' is 'True', then this will also build benchmarks - -- and tests. It is 'False' when then library's benchmarks or - -- test-suites have cyclic dependencies. - | ATBuildFinal - -- ^ Task for building the package's benchmarks and test-suites. - -- Requires that the library was already built. - | ATRunTests - -- ^ Task for running the package's test-suites. - | ATRunBenchmarks - -- ^ Task for running the package's benchmarks. - deriving (Show, Eq, Ord) -data ActionId = ActionId !PackageIdentifier !ActionType - deriving (Show, Eq, Ord) + = ATBuild + -- ^ Action for building a package's library and executables. If + -- 'Stack.Types.Build.Task.allInOne' is 'True', then this will also build + -- benchmarks and tests. It is 'False' when the library's benchmarks or + -- test-suites have cyclic dependencies. + | ATBuildFinal + -- ^ Task for building the package's benchmarks and test-suites. Requires + -- that the library was already built. + | ATRunTests + -- ^ Task for running the package's test-suites. + | ATRunBenchmarks + -- ^ Task for running the package's benchmarks. + deriving (Show, Eq, Ord) + +-- | Types representing the unique ids of Stack build actions. +data ActionId + = ActionId !PackageIdentifier !ActionType + deriving (Eq, Ord, Show) + +-- | Type representing Stack build actions. data Action = Action - { actionId :: !ActionId - , actionDeps :: !(Set ActionId) - , actionDo :: !(ActionContext -> IO ()) - , actionConcurrency :: !Concurrency - } + { actionId :: !ActionId + -- ^ The action's unique id. + , actionDeps :: !(Set ActionId) + -- ^ Actions on which this action depends. + , action :: !(ActionContext -> IO ()) + -- ^ The action's 'IO' action, given a context. + , concurrency :: !Concurrency + -- ^ Whether this action may be run concurrently with others. + } -data Concurrency = ConcurrencyAllowed | ConcurrencyDisallowed - deriving (Eq) +-- | Type representing permissions for actions to be run concurrently with +-- others. +data Concurrency + = ConcurrencyAllowed + | ConcurrencyDisallowed + deriving Eq data ActionContext = ActionContext - { acRemaining :: !(Set ActionId) - -- ^ Does not include the current action - , acDownstream :: [Action] - -- ^ Actions which depend on the current action - , acConcurrency :: !Concurrency - -- ^ Whether this action may be run concurrently with others - } + { remaining :: !(Set ActionId) + -- ^ Does not include the current action. + , downstream :: [Action] + -- ^ Actions which depend on the current action. + , concurrency :: !Concurrency + -- ^ Whether this action may be run concurrently with others. + } data ExecuteState = ExecuteState - { esActions :: TVar [Action] - , esExceptions :: TVar [SomeException] - , esInAction :: TVar (Set ActionId) - , esCompleted :: TVar Int - , esKeepGoing :: Bool - } + { actions :: TVar [Action] + , exceptions :: TVar [SomeException] + , inAction :: TVar (Set ActionId) + , completed :: TVar Int + , keepGoing :: Bool + } -data ExecuteException - = InconsistentDependencies - deriving Typeable -instance Exception ExecuteException - -instance Show ExecuteException where - show InconsistentDependencies = - "Inconsistent dependencies were discovered while executing your build plan. This should never happen, please report it as a bug to the stack team." - -runActions :: Int -- ^ threads - -> Bool -- ^ keep going after one task has failed - -> [Action] - -> (TVar Int -> TVar (Set ActionId) -> IO ()) -- ^ progress updated - -> IO [SomeException] -runActions threads keepGoing actions0 withProgress = do - es <- ExecuteState - <$> newTVarIO (sortActions actions0) - <*> newTVarIO [] - <*> newTVarIO Set.empty - <*> newTVarIO 0 - <*> pure keepGoing - _ <- async $ withProgress (esCompleted es) (esInAction es) - if threads <= 1 - then runActions' es - else replicateConcurrently_ threads $ runActions' es - readTVarIO $ esExceptions es +runActions :: + Int -- ^ threads + -> Bool -- ^ keep going after one task has failed + -> [Action] + -> (TVar Int -> TVar (Set ActionId) -> IO ()) -- ^ progress updated + -> IO [SomeException] +runActions threads keepGoing actions withProgress = do + es <- ExecuteState + <$> newTVarIO (sortActions actions) -- esActions + <*> newTVarIO [] -- esExceptions + <*> newTVarIO Set.empty -- esInAction + <*> newTVarIO 0 -- esCompleted + <*> pure keepGoing -- esKeepGoing + _ <- async $ withProgress es.completed es.inAction + if threads <= 1 + then runActions' es + else replicateConcurrently_ threads $ runActions' es + readTVarIO es.exceptions -- | Sort actions such that those that can't be run concurrently are at -- the end. sortActions :: [Action] -> [Action] -sortActions = sortBy (compareConcurrency `on` actionConcurrency) - where - -- NOTE: Could derive Ord. However, I like to make this explicit so - -- that changes to the datatype must consider how it's affecting - -- this. - compareConcurrency ConcurrencyAllowed ConcurrencyDisallowed = LT - compareConcurrency ConcurrencyDisallowed ConcurrencyAllowed = GT - compareConcurrency _ _ = EQ +sortActions = sortBy (compareConcurrency `on` (.concurrency)) + where + -- NOTE: Could derive Ord. However, I like to make this explicit so + -- that changes to the datatype must consider how it's affecting + -- this. + compareConcurrency ConcurrencyAllowed ConcurrencyDisallowed = LT + compareConcurrency ConcurrencyDisallowed ConcurrencyAllowed = GT + compareConcurrency _ _ = EQ runActions' :: ExecuteState -> IO () -runActions' ExecuteState {..} = - loop - where - breakOnErrs inner = do - errs <- readTVar esExceptions - if null errs || esKeepGoing - then inner - else return $ return () - withActions inner = do - as <- readTVar esActions - if null as - then return $ return () - else inner as - loop = join $ atomically $ breakOnErrs $ withActions $ \as -> - case break (Set.null . actionDeps) as of - (_, []) -> do - inAction <- readTVar esInAction - if Set.null inAction - then do - unless esKeepGoing $ - modifyTVar esExceptions (toException InconsistentDependencies:) - return $ return () - else retry - (xs, action:ys) -> do - inAction <- readTVar esInAction - case actionConcurrency action of - ConcurrencyAllowed -> return () - ConcurrencyDisallowed -> unless (Set.null inAction) retry - let as' = xs ++ ys - remaining = Set.union - (Set.fromList $ map actionId as') - inAction - writeTVar esActions as' - modifyTVar esInAction (Set.insert $ actionId action) - return $ mask $ \restore -> do - eres <- try $ restore $ actionDo action ActionContext - { acRemaining = remaining - , acDownstream = downstreamActions (actionId action) as' - , acConcurrency = actionConcurrency action - } - atomically $ do - modifyTVar esInAction (Set.delete $ actionId action) - modifyTVar esCompleted (+1) - case eres of - Left err -> modifyTVar esExceptions (err:) - Right () -> - let dropDep a = a { actionDeps = Set.delete (actionId action) $ actionDeps a } - in modifyTVar esActions $ map dropDep - restore loop - -downstreamActions :: ActionId -> [Action] -> [Action] -downstreamActions aid = filter (\a -> aid `Set.member` actionDeps a) +runActions' es = loop + where + loop :: IO () + loop = join $ atomically $ breakOnErrs $ withActions processActions + + breakOnErrs :: STM (IO ()) -> STM (IO ()) + breakOnErrs inner = do + errs <- readTVar es.exceptions + if null errs || es.keepGoing + then inner + else doNothing + + withActions :: ([Action] -> STM (IO ())) -> STM (IO ()) + withActions inner = do + actions <- readTVar es.actions + if null actions + then doNothing + else inner actions + + processActions :: [Action] -> STM (IO ()) + processActions actions = do + inAction <- readTVar es.inAction + case break (Set.null . (.actionDeps)) actions of + (_, []) -> do + check (Set.null inAction) + unless es.keepGoing $ + modifyTVar es.exceptions (toException InconsistentDependenciesBug:) + doNothing + (xs, action:ys) -> processAction inAction (xs ++ ys) action + + processAction :: Set ActionId -> [Action] -> Action -> STM (IO ()) + processAction inAction otherActions action = do + let concurrency = action.concurrency + unless (concurrency == ConcurrencyAllowed) $ + check (Set.null inAction) + let action' = action.actionId + otherActions' = Set.fromList $ map (.actionId) otherActions + remaining = Set.union otherActions' inAction + downstream = downstreamActions action' otherActions + actionContext = ActionContext + { remaining + , downstream + , concurrency + } + writeTVar es.actions otherActions + modifyTVar es.inAction (Set.insert action') + pure $ do + mask $ \restore -> do + eres <- try $ restore $ action.action actionContext + atomically $ do + modifyTVar es.inAction (Set.delete action') + modifyTVar es.completed (+1) + case eres of + Left err -> modifyTVar es.exceptions (err:) + Right () -> modifyTVar es.actions $ map (dropDep action') + loop + + -- | Filter a list of actions to include only those that depend on the given + -- action. + downstreamActions :: ActionId -> [Action] -> [Action] + downstreamActions aid = filter (\a -> aid `Set.member` a.actionDeps) + + -- | Given two actions (the first specified by its id) yield an action + -- equivalent to the second but excluding any dependency on the first action. + dropDep :: ActionId -> Action -> Action + dropDep action' action = + action { actionDeps = Set.delete action' action.actionDeps } + + -- | @IO ()@ lifted into 'STM'. + doNothing :: STM (IO ()) + doNothing = pure $ pure () diff --git a/src/Data/Attoparsec/Args.hs b/src/Data/Attoparsec/Args.hs index 9fcdad5e66..b5a72fb458 100644 --- a/src/Data/Attoparsec/Args.hs +++ b/src/Data/Attoparsec/Args.hs @@ -1,24 +1,31 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} --- | Parsing of stack command line arguments + +{-| +Module : Data.Attoparsec.Args +Description : Parsing of Stack command line arguments. +License : BSD-3-Clause + +Parsing of Stack command line arguments. +-} module Data.Attoparsec.Args - ( EscapingMode(..) - , argsParser - , parseArgs - , parseArgsFromString - ) where + ( EscapingMode (..) + , argsParser + , parseArgs + , parseArgsFromString + ) where -import Data.Attoparsec.Text (()) +import Data.Attoparsec.Text ( () ) import qualified Data.Attoparsec.Text as P import qualified Data.Text as T import Stack.Prelude -- | Mode for parsing escape characters. data EscapingMode - = Escaping - | NoEscaping - deriving (Show,Eq,Enum) + = Escaping + | NoEscaping + deriving (Enum, Eq, Show) -- | Parse arguments using 'argsParser'. parseArgs :: EscapingMode -> Text -> Either String [String] @@ -28,17 +35,24 @@ parseArgs mode = P.parseOnly (argsParser mode) parseArgsFromString :: EscapingMode -> String -> Either String [String] parseArgsFromString mode = P.parseOnly (argsParser mode) . T.pack --- | A basic argument parser. It supports space-separated text, and --- string quotation with identity escaping: \x -> x. +-- | A basic argument parser. It supports space-separated text, and string +-- quotation with identity escaping: \x -> x. argsParser :: EscapingMode -> P.Parser [String] -argsParser mode = many (P.skipSpace *> (quoted <|> unquoted)) <* - P.skipSpace <* (P.endOfInput "unterminated string") - where - unquoted = P.many1 naked - quoted = P.char '"' *> string <* P.char '"' - string = many (case mode of - Escaping -> escaped <|> nonquote - NoEscaping -> nonquote) - escaped = P.char '\\' *> P.anyChar - nonquote = P.satisfy (/= '"') - naked = P.satisfy (not . flip elem ("\" " :: String)) +argsParser mode = + many + ( P.skipSpace + *> (quoted <|> unquoted) + ) + <* P.skipSpace + <* (P.endOfInput "unterminated string") + where + quoted = P.char '"' *> str <* P.char '"' + unquoted = P.many1 naked + str = many + ( case mode of + Escaping -> escaped <|> nonquote + NoEscaping -> nonquote + ) + escaped = P.char '\\' *> P.anyChar + nonquote = P.satisfy (/= '"') + naked = P.satisfy (not . flip elem ("\" " :: String)) diff --git a/src/Data/Attoparsec/Combinators.hs b/src/Data/Attoparsec/Combinators.hs index 2c69761351..a3ab042221 100644 --- a/src/Data/Attoparsec/Combinators.hs +++ b/src/Data/Attoparsec/Combinators.hs @@ -1,24 +1,38 @@ {-# LANGUAGE NoImplicitPrelude #-} --- | More readable combinators for writing parsers. -module Data.Attoparsec.Combinators where +{-| +Module : Data.Attoparsec.Combinators +Description : More readable combinators for writing parsers. +License : BSD-3-Clause -import Stack.Prelude +More readable combinators for writing parsers. +-} + +module Data.Attoparsec.Combinators + ( alternating + , appending + , concating + , pured + ) where + +import Stack.Prelude -- | Concatenate two parsers. -appending :: (Applicative f,Semigroup a) - => f a -> f a -> f a +appending :: + (Applicative f, Semigroup a) + => f a + -> f a + -> f a appending a b = (<>) <$> a <*> b -- | Alternative parsers. -alternating :: Alternative f - => f a -> f a -> f a +alternating :: Alternative f => f a -> f a -> f a alternating a b = a <|> b -- | Pure something. -pured :: (Applicative g,Applicative f) => g a -> g (f a) +pured :: (Applicative g, Applicative f) => g a -> g (f a) pured = fmap pure --- | Concatting the result of an action. -concating :: (Monoid m,Applicative f) => f [m] -> f m +-- | Concating the result of an action. +concating :: (Monoid m, Applicative f) => f [m] -> f m concating = fmap mconcat diff --git a/src/Data/Attoparsec/Interpreter.hs b/src/Data/Attoparsec/Interpreter.hs index b679ff91d5..93087ae0c8 100644 --- a/src/Data/Attoparsec/Interpreter.hs +++ b/src/Data/Attoparsec/Interpreter.hs @@ -1,152 +1,162 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -{- | This module implements parsing of additional arguments embedded in a - comment when stack is invoked as a script interpreter - ===Specifying arguments in script interpreter mode - @/stack/@ can execute a Haskell source file using @/runghc/@ and if required - it can also install and setup the compiler and any package dependencies - automatically. +{-| +Module : Data.Attoparsec.Interpreter +License : BSD-3-Clause - For using a Haskell source file as an executable script on a Unix like OS, - the first line of the file must specify @stack@ as the interpreter using a - shebang directive e.g. +This module implements parsing of additional arguments embedded in a comment +when Stack is invoked as a script interpreter - > #!/usr/bin/env stack +=== Specifying arguments in script interpreter mode - Additional arguments can be specified in a haskell comment following the - @#!@ line. The contents inside the comment must be a single valid stack - command line, starting with @stack@ as the command and followed by the - options to use for executing this file. +@/stack/@ can execute a Haskell source file using @/runghc/@ and if required it +can also install and setup the compiler and any package dependencies +automatically. - The comment must be on the line immediately following the @#!@ line. The - comment must start in the first column of the line. When using a block style - comment the command can be split on multiple lines. +For using a Haskell source file as an executable script on a Unix like OS, the +first line of the file must specify @stack@ as the interpreter using a shebang +directive e.g. - Here is an example of a single line comment: +> #!/usr/bin/env stack - > #!/usr/bin/env stack - > -- stack --resolver lts-3.14 --install-ghc runghc --package random +Additional arguments can be specified in a haskell comment following the @#!@ +line. The contents inside the comment must be a single valid stack command line, +starting with @stack@ as the command and followed by the options to use for +executing this file. - Here is an example of a multi line block comment: +The comment must be on the line immediately following the @#!@ line. The +comment must start in the first column of the line. When using a block style +comment the command can be split on multiple lines. + +Here is an example of a single line comment: + +> #!/usr/bin/env stack +> -- stack --snapshot lts-3.14 --install-ghc runghc --package random + +Here is an example of a multi line block comment: @ #!\/usr\/bin\/env stack {\- stack - --resolver lts-3.14 + --snapshot lts-3.14 --install-ghc runghc --package random -\} @ - When the @#!@ line is not present, the file can still be executed - using @stack \@ command if the file starts with a valid stack - interpreter comment. This can be used to execute the file on Windows for - example. +When the @#!@ line is not present, the file can still be executed using +@stack \@ command if the file starts with a valid stack interpreter +comment. This can be used to execute the file on Windows for example. - Nested block comments are not supported. +Nested block comments are not supported. -} module Data.Attoparsec.Interpreter - ( interpreterArgsParser -- for unit tests - , getInterpreterArgs - ) where + ( interpreterArgsParser -- for unit tests + , getInterpreterArgs + ) where -import Data.Attoparsec.Args -import Data.Attoparsec.Text (()) +import Data.Attoparsec.Args ( EscapingMode (..), argsParser ) +import Data.Attoparsec.Text ( () ) import qualified Data.Attoparsec.Text as P -import Data.Char (isSpace) -import Conduit -import Data.Conduit.Attoparsec -import Data.List (intercalate) -import Data.Text (pack) +import Data.Char ( isSpace ) +import Conduit ( decodeUtf8C, withSourceFile ) +import Data.Conduit.Attoparsec ( ParseError (..), Position (..), sinkParserEither ) +import Data.List ( intercalate ) +import Data.List.NonEmpty ( singleton ) +import Data.Text ( pack ) +import RIO.NonEmpty ( nonEmpty ) +import Stack.Constants ( stackProgName ) import Stack.Prelude -import System.FilePath (takeExtension) -import System.IO (hPutStrLn) +import System.FilePath ( takeExtension ) +import System.IO ( hPutStrLn ) --- | Parser to extract the stack command line embedded inside a comment +-- | Parser to extract the Stack command line embedded inside a comment -- after validating the placement and formatting rules for a valid -- interpreter specification. interpreterArgsParser :: Bool -> String -> P.Parser String interpreterArgsParser isLiterate progName = P.option "" sheBangLine *> interpreterComment - where - sheBangLine = P.string "#!" - *> P.manyTill P.anyChar P.endOfLine - - commentStart psr = (psr (progName ++ " options comment")) - *> P.skipSpace - *> (P.string (pack progName) show progName) - - -- Treat newlines as spaces inside the block comment - anyCharNormalizeSpace = let normalizeSpace c = if isSpace c then ' ' else c - in P.satisfyWith normalizeSpace $ const True - - comment start end = commentStart start - *> ((end >> return "") - <|> (P.space *> (P.manyTill anyCharNormalizeSpace end "-}"))) - - horizontalSpace = P.satisfy P.isHorizontalSpace - - lineComment = comment "--" (P.endOfLine <|> P.endOfInput) - literateLineComment = comment - (">" *> horizontalSpace *> "--") - (P.endOfLine <|> P.endOfInput) - blockComment = comment "{-" (P.string "-}") - - literateBlockComment = - (">" *> horizontalSpace *> "{-") - *> P.skipMany (("" <$ horizontalSpace) <|> (P.endOfLine *> ">")) - *> (P.string (pack progName) progName) - *> P.manyTill' (P.satisfy (not . P.isEndOfLine) - <|> (' ' <$ (P.endOfLine *> ">" ">"))) "-}" - - interpreterComment = if isLiterate - then literateLineComment <|> literateBlockComment - else lineComment <|> blockComment - --- | Extract stack arguments from a correctly placed and correctly formatted + where + sheBangLine = P.string "#!" + *> P.manyTill P.anyChar P.endOfLine + + commentStart :: P.Parser Text -> P.Parser Text + commentStart psr = (psr (progName ++ " options comment")) + *> P.skipSpace + *> (P.string (pack progName) show progName) + + -- Treat newlines as spaces inside the block comment + anyCharNormalizeSpace = let normalizeSpace c = if isSpace c then ' ' else c + in P.satisfyWith normalizeSpace $ const True + + comment :: P.Parser Text -> P.Parser a -> P.Parser String + comment start end = commentStart start + *> ((end >> pure "") + <|> (P.space *> (P.manyTill anyCharNormalizeSpace end "-}"))) + + horizontalSpace = P.satisfy P.isHorizontalSpace + + lineComment = comment "--" (P.endOfLine <|> P.endOfInput) + literateLineComment = comment + (">" *> horizontalSpace *> "--") + (P.endOfLine <|> P.endOfInput) + blockComment = comment "{-" (P.string "-}") + + literateBlockComment = + (">" *> horizontalSpace *> "{-") + *> P.skipMany (("" <$ horizontalSpace) <|> (P.endOfLine *> ">")) + *> (P.string (pack progName) progName) + *> P.manyTill' (P.satisfy (not . P.isEndOfLine) + <|> (' ' <$ (P.endOfLine *> ">" ">"))) "-}" + + interpreterComment = if isLiterate + then literateLineComment <|> literateBlockComment + else lineComment <|> blockComment + +-- | Extract Stack arguments from a correctly placed and correctly formatted -- comment when it is being used as an interpreter -getInterpreterArgs :: String -> IO [String] -getInterpreterArgs file = do - eArgStr <- withSourceFile file parseFile - case eArgStr of - Left err -> handleFailure $ decodeError err - Right str -> parseArgStr str - where - parseFile src = - runConduit - $ src - .| decodeUtf8C - .| sinkParserEither (interpreterArgsParser isLiterate stackProgName) - - isLiterate = takeExtension file == ".lhs" - - -- FIXME We should print anything only when explicit verbose mode is - -- specified by the user on command line. But currently the - -- implementation does not accept or parse any command line flags in - -- interpreter mode. We can only invoke the interpreter as - -- "stack " strictly without any options. - stackWarn s = hPutStrLn stderr $ stackProgName ++ ": WARNING! " ++ s - - handleFailure err = do - mapM_ stackWarn (lines err) - stackWarn "Missing or unusable stack options specification" - stackWarn "Using runghc without any additional stack options" - return ["runghc"] - - parseArgStr str = - case P.parseOnly (argsParser Escaping) (pack str) of - Left err -> handleFailure ("Error parsing command specified in the " - ++ "stack options comment: " ++ err) - Right [] -> handleFailure "Empty argument list in stack options comment" - Right args -> return args - - decodeError e = - case e of - ParseError ctxs _ (Position line col _) -> - if null ctxs - then "Parse error" - else ("Expecting " ++ intercalate " or " ctxs) - ++ " at line " ++ show line ++ ", column " ++ show col - DivergentParser -> "Divergent parser" +getInterpreterArgs :: String -> IO (NonEmpty String) +getInterpreterArgs file = withSourceFile file parseFile >>= \case + Left err -> handleFailure $ decodeError err + Right str -> parseArgStr str + where + parseFile src = + runConduit + $ src + .| decodeUtf8C + .| sinkParserEither (interpreterArgsParser isLiterate stackProgName) + + isLiterate = takeExtension file == ".lhs" + + -- FIXME We should print anything only when explicit verbose mode is + -- specified by the user on command line. But currently the + -- implementation does not accept or parse any command line flags in + -- interpreter mode. We can only invoke the interpreter as + -- "stack " strictly without any options. + stackWarn s = hPutStrLn stderr $ stackProgName ++ ": WARNING! " ++ s + + handleFailure err = do + mapM_ stackWarn (lines err) + stackWarn "Missing or unusable Stack options specification" + stackWarn "Using runghc without any additional Stack options" + pure $ singleton "runghc" + + parseArgStr str = + case P.parseOnly (argsParser Escaping) (pack str) of + Left err -> handleFailure ("Error parsing command specified in the " + ++ "Stack options comment: " ++ err) + Right args -> maybe + (handleFailure "Empty argument list in Stack options comment") + pure + (nonEmpty args) + + decodeError e = + case e of + ParseError ctxs _ (Position l col _) -> + if null ctxs + then "Parse error" + else ("Expecting " ++ intercalate " or " ctxs) + ++ " at line " ++ show l ++ ", column " ++ show col + DivergentParser -> "Divergent parser" diff --git a/src/Data/Monoid/Map.hs b/src/Data/Monoid/Map.hs index 15bb9acb2b..e7dd7cb31d 100644 --- a/src/Data/Monoid/Map.hs +++ b/src/Data/Monoid/Map.hs @@ -1,18 +1,24 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -module Data.Monoid.Map where +{-| +Module : Data.Monoid.Map +License : BSD-3-Clause +-} + +module Data.Monoid.Map + ( MonoidMap (..) + ) where import qualified Data.Map as M import Stack.Prelude --- | Utility newtype wrapper to make make Map's Monoid also use the +-- | Utility newtype wrapper to make Map's Monoid also use the -- element's Monoid. -newtype MonoidMap k a = MonoidMap (Map k a) - deriving (Eq, Ord, Read, Show, Generic, Functor) +newtype MonoidMap k a + = MonoidMap (Map k a) + deriving (Eq, Functor, Generic, Ord, Read, Show) instance (Ord k, Semigroup a) => Semigroup (MonoidMap k a) where - MonoidMap mp1 <> MonoidMap mp2 = MonoidMap (M.unionWith (<>) mp1 mp2) + MonoidMap mp1 <> MonoidMap mp2 = MonoidMap (M.unionWith (<>) mp1 mp2) instance (Ord k, Semigroup a) => Monoid (MonoidMap k a) where - mappend = (<>) - mempty = MonoidMap mempty + mappend = (<>) + mempty = MonoidMap mempty diff --git a/src/GHC/Utils/GhcPkg/Main/Compat.hs b/src/GHC/Utils/GhcPkg/Main/Compat.hs new file mode 100644 index 0000000000..26f2da47b4 --- /dev/null +++ b/src/GHC/Utils/GhcPkg/Main/Compat.hs @@ -0,0 +1,536 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : GHC.Utils.GhcPkg.Main.Compat +License : BSD-3-Clause + +This module is based on GHC's utils\ghc-pkg\Main.hs at +commit f66fc15f2e6849125074bcfeb44334a663323ca6 (see GHC merge request !11142), +with: + +* changeDBDir' does not perform an effective @ghc-pkg recache@, +* the cache is not used, +* consistency checks are not performed, +* use Stack program name, +* use "Stack.Prelude" rather than "Prelude", +* use t'RIO' @env@ monad, +* use well-typed representations of paths from the @path@ package, +* add pretty messages and exceptions, +* redundant code deleted, +* Hlint applied, and +* explicit import lists. + +The version of the ghc-pkg executable supplied with GHCs published before +28 August 2023 does not efficiently bulk unregister. This module exports a +function that does efficiently bulk unregister. +-} + +module GHC.Utils.GhcPkg.Main.Compat + ( ghcPkgUnregisterForce + ) where + +----------------------------------------------------------------------------- +-- +-- (c) The University of Glasgow 2004-2009. +-- +-- Package management tool +-- +----------------------------------------------------------------------------- + +import qualified Data.Foldable as F +import qualified Data.Traversable as F +import Distribution.InstalledPackageInfo as Cabal +import Distribution.Package ( UnitId, mungedId ) +import qualified Distribution.Parsec as Cabal +import Distribution.Text ( display ) +import Distribution.Version ( nullVersion ) +import GHC.IO.Exception (IOErrorType(InappropriateType)) +import qualified GHC.Unit.Database as GhcPkg +import Path + ( SomeBase (..), fileExtension, mapSomeBase, parseRelFile + , parseSomeDir, prjSomeBase + ) +import qualified Path as P +import Path.IO + ( createDirIfMissing, doesDirExist, listDir, removeFile ) +import qualified RIO.ByteString as BS +import RIO.List ( isPrefixOf, stripSuffix ) +import RIO.NonEmpty ( nonEmpty ) +import qualified RIO.NonEmpty as NE +import Stack.Constants ( relFilePackageCache ) +import Stack.Prelude hiding ( display ) +import Stack.Types.GhcPkgExe + ( GhcPkgPrettyException (..), GlobPackageIdentifier (..) + , PackageArg (..) + ) +import System.Environment ( getEnv ) +import System.FilePath as FilePath +import System.IO ( readFile ) +import System.IO.Error + ( ioeGetErrorType, ioError, isDoesNotExistError ) + +-- | Function equivalent to: +-- +-- > ghc-pkg --no-user-package-db --package-db= unregister [--ipid]

+-- +ghcPkgUnregisterForce :: + HasTerm env + => Path Abs Dir -- ^ Path to the global package database + -> Path Abs Dir -- ^ Path to the package database + -> Bool -- ^ Apply ghc-pkg's --ipid, --unit-id flag? + -> [String] -- ^ Packages to unregister + -> RIO env () +ghcPkgUnregisterForce globalDb pkgDb hasIpid pkgarg_strs = do + pkgargs <- forM pkgarg_strs $ readPackageArg as_arg + prettyDebugL + $ flow "Unregistering from" + : (pretty pkgDb <> ":") + : mkNarrativeList (Just Current) False + (map (fromString . show) pkgargs :: [StyleDoc]) + unregisterPackages globalDb pkgargs pkgDb + where + as_arg = if hasIpid then AsUnitId else AsDefault + +-- ----------------------------------------------------------------------------- +-- Do the business + +-- | Enum flag representing argument type +data AsPackageArg + = AsUnitId + | AsDefault + +parseCheck :: Cabal.Parsec a => String -> String -> RIO env a +parseCheck str what = + case Cabal.eitherParsec str of + Left e -> prettyThrowIO $ CannotParse str what e + Right x -> pure x + +readGlobPkgId :: String -> RIO env GlobPackageIdentifier +readGlobPkgId str = case stripSuffix "-*" str of + Nothing -> + ExactPackageIdentifier <$> parseCheck str "package identifier (exact)" + Just str' -> + GlobPackageIdentifier <$> parseCheck str' "package identifier (glob)" + +readPackageArg :: AsPackageArg -> String -> RIO env PackageArg +readPackageArg AsUnitId str = IUId <$> parseCheck str "installed package id" +readPackageArg AsDefault str = Id <$> readGlobPkgId str + +-- ----------------------------------------------------------------------------- +-- Package databases + +data PackageDB (mode :: GhcPkg.DbMode) = PackageDB + { location :: !(SomeBase Dir) + -- We only need possibly-relative package db location. The relative + -- location is used as an identifier for the db, so it is important we do + -- not modify it. + , packageDbLock :: !(GhcPkg.DbOpenMode mode GhcPkg.PackageDbLock) + -- If package db is open in read write mode, we keep its lock around for + -- transactional updates. + , packages :: [InstalledPackageInfo] + } + +-- | A stack of package databases. Convention: head is the topmost in the stack. +type PackageDBStack = [PackageDB 'GhcPkg.DbReadOnly] + +-- | Selector for picking the right package DB to modify as \'modify\' changes +-- the first database that contains a specific package. +newtype DbModifySelector = ContainsPkg PackageArg + +getPkgDatabases :: + forall env. HasTerm env + => Path Abs Dir + -- ^ Path to the global package database. + -> PackageArg + -> Path Abs Dir + -- ^ Path to the package database. + -> RIO + env + ( PackageDBStack + -- the real package DB stack: [global,user] ++ DBs specified on the + -- command line with -f. + , GhcPkg.DbOpenMode GhcPkg.DbReadWrite (PackageDB GhcPkg.DbReadWrite) + -- which one to modify, if any + , PackageDBStack + -- the package DBs specified on the command line, or [global,user] + -- otherwise. This is used as the list of package DBs for commands + -- that just read the DB, such as 'list'. + ) +getPkgDatabases globalDb pkgarg pkgDb = do + -- Second we determine the location of the global package config. On Windows, + -- this is found relative to the ghc-pkg.exe binary, whereas on Unix the + -- location is passed to the binary using the --global-package-db flag by the + -- wrapper script. + let sys_databases = [Abs globalDb] + e_pkg_path <- tryIO (liftIO $ System.Environment.getEnv "GHC_PACKAGE_PATH") + let env_stack = + case nonEmpty <$> e_pkg_path of + Left _ -> sys_databases + Right Nothing -> [] + Right (Just path) + | isSearchPathSeparator (NE.last path) + -> mapMaybe parseSomeDir (splitSearchPath (NE.init path)) <> sys_databases + | otherwise + -> mapMaybe parseSomeDir (splitSearchPath $ NE.toList path) + + -- -f flags on the command line add to the database stack, unless any of them + -- are present in the stack already. + let final_stack = [Abs pkgDb | Abs pkgDb `notElem` env_stack] <> env_stack + + (db_stack, db_to_operate_on) <- getDatabases pkgDb final_stack + + let flag_db_stack = [ db | db <- db_stack, db.location == Abs pkgDb ] + + prettyDebugL + $ flow "Db stack:" + : map (pretty . (.location)) db_stack + F.forM_ db_to_operate_on $ \db -> + prettyDebugL + [ "Modifying:" + , pretty db.location + ] + prettyDebugL + $ flow "Flag db stack:" + : map (pretty . (.location)) flag_db_stack + + pure (db_stack, db_to_operate_on, flag_db_stack) + where + getDatabases flag_db_name final_stack = do + -- The package db we open in read write mode is the first one included in + -- flag_db_names that contains specified package. Therefore we need to + -- open each one in read/write mode first and decide whether it's for + -- modification based on its contents. + (db_stack, mto_modify) <- stateSequence Nothing + [ \case + to_modify@(Just _) -> (, to_modify) <$> readDatabase db_path + Nothing -> if db_path /= Abs flag_db_name + then (, Nothing) <$> readDatabase db_path + else do + let hasPkg :: PackageDB mode -> Bool + hasPkg = not . null . findPackage pkgarg . (.packages) + + openRo (e::IOException) = do + db <- readDatabase db_path + if hasPkg db + then + prettyThrowIO $ CannotOpenDBForModification db_path e + else pure (db, Nothing) + + -- If we fail to open the database in read/write mode, we need + -- to check if it's for modification first before throwing an + -- error, so we attempt to open it in read only mode. + handle openRo $ do + db <- readParseDatabase + (GhcPkg.DbOpenReadWrite $ ContainsPkg pkgarg) db_path + let ro_db = db { packageDbLock = GhcPkg.DbOpenReadOnly } + if hasPkg db + then pure (ro_db, Just db) + else do + -- If the database is not for modification after all, + -- drop the write lock as we are already finished with + -- the database. + case db.packageDbLock of + GhcPkg.DbOpenReadWrite lock -> + liftIO $ GhcPkg.unlockPackageDb lock + pure (ro_db, Nothing) + | db_path <- final_stack ] + + to_modify <- case mto_modify of + Just db -> pure db + Nothing -> cannotFindPackage pkgarg Nothing + + pure (db_stack, GhcPkg.DbOpenReadWrite to_modify) + where + -- Parse package db in read-only mode. + readDatabase :: SomeBase Dir -> RIO env (PackageDB 'GhcPkg.DbReadOnly) + readDatabase = readParseDatabase GhcPkg.DbOpenReadOnly + + stateSequence :: Monad m => s -> [s -> m (a, s)] -> m ([a], s) + stateSequence s [] = pure ([], s) + stateSequence s (m:ms) = do + (a, s') <- m s + (as, s'') <- stateSequence s' ms + pure (a : as, s'') + +readParseDatabase :: + forall mode t env. HasTerm env + => GhcPkg.DbOpenMode mode t + -> SomeBase Dir + -> RIO env (PackageDB mode) +readParseDatabase mode path = tryIO (prjSomeBase listDir path) >>= \case + Left err + | ioeGetErrorType err == InappropriateType -> + -- We provide a limited degree of backwards compatibility for + -- old single-file style db: + tryReadParseOldFileStyleDatabase mode path >>= \case + Just db -> pure db + Nothing -> prettyThrowIO $ SingleFileDBUnsupported path + | otherwise -> liftIO $ ioError err + Right (_, fs) -> ignore_cache + where + confs = filter isConf fs + + isConf :: Path Abs File -> Bool + isConf f = case fileExtension f of + Nothing -> False + Just ext -> ext == ".conf" + + ignore_cache :: RIO env (PackageDB mode) + ignore_cache = do + -- If we're opening for modification, we need to acquire a lock even if + -- we don't open the cache now, because we are going to modify it later. + lock <- liftIO $ + F.mapM (const $ GhcPkg.lockPackageDb (prjSomeBase toFilePath cache)) mode + pkgs <- mapM parseSingletonPackageConf confs + mkPackageDB pkgs lock + where + cache = mapSomeBase (P. relFilePackageCache) path + + mkPackageDB :: + [InstalledPackageInfo] + -> GhcPkg.DbOpenMode mode GhcPkg.PackageDbLock + -> RIO env (PackageDB mode) + mkPackageDB pkgs lock = + pure PackageDB + { location = path + , packageDbLock = lock + , packages = pkgs + } + +parseSingletonPackageConf :: + HasTerm env + => Path Abs File + -> RIO env InstalledPackageInfo +parseSingletonPackageConf file = do + prettyDebugL + [ flow "Reading package config:" + , pretty file + ] + BS.readFile (toFilePath file) >>= fmap fst . parsePackageInfo + +-- ----------------------------------------------------------------------------- +-- Workaround for old single-file style package dbs + +-- Single-file style package dbs have been deprecated for some time, but +-- it turns out that Cabal was using them in one place. So this code is for a +-- workaround to allow older Cabal versions to use this newer ghc. + +-- We check if the file db contains just "[]" and if so, we look for a new +-- dir-style db in path.d/, ie in a dir next to the given file. +-- We cannot just replace the file with a new dir style since Cabal still +-- assumes it's a file and tries to overwrite with 'writeFile'. + +-- ghc itself also cooperates in this workaround + +tryReadParseOldFileStyleDatabase :: + HasTerm env + => GhcPkg.DbOpenMode mode t + -> SomeBase Dir + -> RIO env (Maybe (PackageDB mode)) +tryReadParseOldFileStyleDatabase mode path = do + -- assumes we've already established that path exists and is not a dir + content <- liftIO $ readFile (prjSomeBase toFilePath path) `catchIO` \_ -> pure "" + if take 2 content == "[]" + then do + path_dir <- adjustOldDatabasePath path + prettyWarnL + [ flow "Ignoring old file-style db and trying" + , pretty path_dir + ] + direxists <- prjSomeBase doesDirExist path_dir + if direxists + then do + db <- readParseDatabase mode path_dir + -- but pretend it was at the original location + pure $ Just db { location = path } + else do + lock <- F.forM mode $ \_ -> do + prjSomeBase (createDirIfMissing True) path_dir + liftIO $ GhcPkg.lockPackageDb $ + prjSomeBase (toFilePath . (P. relFilePackageCache)) path_dir + pure $ Just PackageDB + { location = path + , packageDbLock = lock + , packages = [] + } + + -- if the path is not a file, or is not an empty db then we fail + else pure Nothing + +adjustOldFileStylePackageDB :: PackageDB mode -> RIO env (PackageDB mode) +adjustOldFileStylePackageDB db = do + -- assumes we have not yet established if it's an old style or not + mcontent <- liftIO $ + fmap Just (readFile (prjSomeBase toFilePath db.location)) `catchIO` \_ -> pure Nothing + case fmap (take 2) mcontent of + -- it is an old style and empty db, so look for a dir kind in location.d/ + Just "[]" -> do + adjustedDatabasePath <- adjustOldDatabasePath db.location + pure db { location = adjustedDatabasePath } + -- it is old style but not empty, we have to bail + Just _ -> prettyThrowIO $ SingleFileDBUnsupported db.location + -- probably not old style, carry on as normal + Nothing -> pure db + +adjustOldDatabasePath :: SomeBase Dir -> RIO env (SomeBase Dir) +adjustOldDatabasePath = prjSomeBase addDToDirName + where + addDToDirName dir = do + let dirNameWithD = toFilePath dir <> ".d" + maybe + (prettyThrowIO $ CannotParseDirectoryWithDBug dirNameWithD) + pure + (parseSomeDir dirNameWithD) + +parsePackageInfo :: BS.ByteString -> RIO env (InstalledPackageInfo, [String]) +parsePackageInfo str = + case parseInstalledPackageInfo str of + Right (warnings, ok) -> pure (mungePackageInfo ok, ws) + where + ws = [ msg | msg <- warnings + , not ("Unrecognized field pkgroot" `isPrefixOf` msg) ] + Left err -> prettyThrowIO $ ParsePackageInfoExceptions (unlines (F.toList err)) + +mungePackageInfo :: InstalledPackageInfo -> InstalledPackageInfo +mungePackageInfo ipi = ipi + +-- ----------------------------------------------------------------------------- +-- Making changes to a package database + +newtype DBOp = RemovePackage InstalledPackageInfo + +changeNewDB :: + HasTerm env + => [DBOp] + -> PackageDB 'GhcPkg.DbReadWrite + -> RIO env () +changeNewDB cmds new_db = do + new_db' <- adjustOldFileStylePackageDB new_db + prjSomeBase (createDirIfMissing True) new_db'.location + changeDBDir' cmds new_db' + +changeDBDir' :: + HasTerm env + => [DBOp] + -> PackageDB 'GhcPkg.DbReadWrite + -> RIO env () +changeDBDir' cmds db = do + mapM_ do_cmd cmds + case db.packageDbLock of + GhcPkg.DbOpenReadWrite lock -> liftIO $ GhcPkg.unlockPackageDb lock + where + do_cmd (RemovePackage p) = do + let relFileConfName = display (installedUnitId p) <> ".conf" + relFileConf <- maybe + (prettyThrowIO $ CannotParseRelFileBug relFileConfName) + pure + (parseRelFile relFileConfName) + let file = mapSomeBase (P. relFileConf) db.location + prettyDebugL + [ "Removing" + , pretty file + ] + removeFileSafe file + +unregisterPackages :: + forall env. HasTerm env + => Path Abs Dir + -- ^ Path to the global package database. + -> [PackageArg] + -> Path Abs Dir + -- ^ Path to the package database. + -> RIO env () +unregisterPackages globalDb pkgargs pkgDb = do + pkgsByPkgDBs <- F.foldlM (getPkgsByPkgDBs []) [] pkgargs + forM_ pkgsByPkgDBs unregisterPackages' + where + -- Update a list of 'packages by package database' for a package. Assumes that + -- a package to be unregistered is in no more than one database. + getPkgsByPkgDBs :: + [(PackageDB GhcPkg.DbReadWrite, [UnitId])] + -- ^ List of considered 'packages by package database' + -> [(PackageDB GhcPkg.DbReadWrite, [UnitId])] + -- ^ List of to be considered 'packages by package database' + -> PackageArg + -- Package to update + -> RIO env [(PackageDB GhcPkg.DbReadWrite, [UnitId])] + -- No more 'packages by package database' to consider? We need to try to get + -- another package database. + getPkgsByPkgDBs pkgsByPkgDBs [] pkgarg = + getPkgDatabases globalDb pkgarg pkgDb >>= \case + (_, GhcPkg.DbOpenReadWrite (db :: PackageDB GhcPkg.DbReadWrite), _) -> do + pks <- do + let pkgs = db.packages + ps = findPackage pkgarg pkgs + -- This shouldn't happen if getPkgsByPkgDBs picks the DB correctly. + when (null ps) $ cannotFindPackage pkgarg $ Just db + pure (map installedUnitId ps) + let pkgsByPkgDB = (db, pks) + pure (pkgsByPkgDB : pkgsByPkgDBs) + -- Consider the next 'packages by package database' in the list of ones to + -- consider. + getPkgsByPkgDBs pkgsByPkgDBs ( pkgsByPkgDB : pkgsByPkgDBs') pkgarg = do + let (db, pks') = pkgsByPkgDB + pkgs = db.packages + ps = findPackage pkgarg pkgs + pks = map installedUnitId ps + pkgByPkgDB' = (db, pks <> pks') + if null ps + then + -- Not found in the package database? Add the package database to those + -- considered and try with the remaining package databases to consider. + getPkgsByPkgDBs ( pkgsByPkgDB : pkgsByPkgDBs ) pkgsByPkgDBs' pkgarg + else + -- Found in the package database? Add to the list of packages to be + -- unregistered from that package database. TO DO: Perhaps check not + -- already in that list for better error messages when there are + -- duplicated requests to unregister. + pure (pkgsByPkgDBs <> (pkgByPkgDB' : pkgsByPkgDBs')) + + unregisterPackages' :: (PackageDB GhcPkg.DbReadWrite, [UnitId]) -> RIO env () + unregisterPackages' (db, pks) = do + let pkgs = db.packages + cmds = [ RemovePackage pkg + | pkg <- pkgs, installedUnitId pkg `elem` pks + ] + new_db = db{ packages = pkgs' } + where + deleteFirstsBy' :: (a -> b -> Bool) -> [a] -> [b] -> [a] + deleteFirstsBy' eq = foldl' (deleteBy' eq) + + deleteBy' :: (a -> b -> Bool) -> [a] -> b -> [a] + deleteBy' _ [] _ = [] + deleteBy' eq (y:ys) x = if y `eq` x then ys else y : deleteBy' eq ys x + + pkgs' = deleteFirstsBy' (\p1 p2 -> installedUnitId p1 == p2) pkgs pks + -- Use changeNewDB, rather than changeDB, to avoid duplicating + -- updateInternalDB db cmds + changeNewDB cmds new_db + +findPackage :: PackageArg -> [InstalledPackageInfo] -> [InstalledPackageInfo] +findPackage pkgarg = filter (pkgarg `matchesPkg`) + +cannotFindPackage :: PackageArg -> Maybe (PackageDB mode) -> RIO env a +cannotFindPackage pkgarg mdb = + prettyThrowIO $ CannotFindPackage pkgarg ((.location) <$> mdb) + +matches :: GlobPackageIdentifier -> MungedPackageId -> Bool +GlobPackageIdentifier pn `matches` pid' = pn == mungedName pid' +ExactPackageIdentifier pid `matches` pid' = + mungedName pid == mungedName pid' + && ( mungedVersion pid == mungedVersion pid' + || mungedVersion pid == nullVersion + ) + +matchesPkg :: PackageArg -> InstalledPackageInfo -> Bool +(Id pid) `matchesPkg` pkg = pid `matches` mungedId pkg +(IUId ipid) `matchesPkg` pkg = ipid == installedUnitId pkg +(Substring _ m) `matchesPkg` pkg = m (display (mungedId pkg)) + +-- removeFileSave doesn't throw an exceptions, if the file is already deleted +removeFileSafe :: SomeBase File -> RIO env () +removeFileSafe fn = do + prjSomeBase removeFile fn `catchIO` \ e -> + unless (isDoesNotExistError e) $ liftIO $ ioError e diff --git a/src/Network/HTTP/StackClient.hs b/src/Network/HTTP/StackClient.hs index e43ae625f4..2a6bc48b90 100644 --- a/src/Network/HTTP/StackClient.hs +++ b/src/Network/HTTP/StackClient.hs @@ -1,9 +1,13 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE LambdaCase #-} --- | --- Wrapper functions of 'Network.HTTP.Simple' and 'Network.HTTP.Client' to --- add the 'User-Agent' HTTP request header to each request. + +{-| +Module : Network.HTTP.StackClient +License : BSD-3-Clause + +Wrapper functions of 'Network.HTTP.Simple' and 'Network.HTTP.Client' to add the +'User-Agent' HTTP request header to each request. +-} module Network.HTTP.StackClient ( httpJSON @@ -14,6 +18,7 @@ module Network.HTTP.StackClient , setRequestCheckStatus , setRequestMethod , setRequestHeader + , setRequestHeaders , addRequestHeader , setRequestBody , getResponseHeaders @@ -29,20 +34,25 @@ module Network.HTTP.StackClient , applyDigestAuth , displayDigestAuthException , Request - , RequestBody(RequestBodyBS, RequestBodyLBS) - , Response - , HttpException + , RequestBody (RequestBodyBS, RequestBodyLBS) + , Response (..) + , HttpException (..) + , HttpExceptionContent (..) + , notFound404 , hAccept , hContentLength , hContentMD5 + , method + , methodPost , methodPut , formDataBody , partFileRequestBody , partBS , partLBS - , setGithubHeaders + , setGitHubHeaders , download , redownload + , requestBody , verifiedDownload , verifiedDownloadWithProgress , CheckHexDigest (..) @@ -57,63 +67,92 @@ module Network.HTTP.StackClient , setForceDownload ) where -import Control.Monad.State (get, put, modify) -import Data.Aeson (FromJSON) +import Control.Monad.State ( get, put, modify ) +import Data.Aeson ( FromJSON ) import qualified Data.ByteString as Strict -import Data.Conduit (ConduitM, ConduitT, awaitForever, (.|), yield, await) -import Data.Conduit.Lift (evalStateC) +import Data.Conduit + ( ConduitM, ConduitT, awaitForever, (.|), yield, await ) +import Data.Conduit.Lift ( evalStateC ) import qualified Data.Conduit.List as CL -import Data.Monoid (Sum (..)) +import Data.List.Extra ( (!?) ) +import Data.Monoid ( Sum (..) ) import qualified Data.Text as T -import Data.Time.Clock (NominalDiffTime, diffUTCTime, getCurrentTime) -import Network.HTTP.Client (Request, RequestBody(..), Response, parseRequest, getUri, path, checkResponse, parseUrlThrow) -import Network.HTTP.Simple (setRequestCheckStatus, setRequestMethod, setRequestBody, setRequestHeader, addRequestHeader, HttpException(..), getResponseBody, getResponseStatusCode, getResponseHeaders) -import Network.HTTP.Types (hAccept, hContentLength, hContentMD5, methodPut) -import Network.HTTP.Conduit (requestHeaders) -import Network.HTTP.Client.TLS (getGlobalManager, applyDigestAuth, displayDigestAuthException) -import Network.HTTP.Download hiding (download, redownload, verifiedDownload) +import Data.Time.Clock + ( NominalDiffTime, diffUTCTime, getCurrentTime ) +import Network.HTTP.Client + ( HttpException (..), HttpExceptionContent (..), Manager + , Request, RequestBody (..), Response (..), checkResponse + , getUri, method, parseRequest, parseUrlThrow, path + , requestBody + ) +import Network.HTTP.Client.MultipartFormData + ( formDataBody, partBS, partFileRequestBody, partLBS ) +import Network.HTTP.Client.TLS + ( displayDigestAuthException, getGlobalManager ) +import qualified Network.HTTP.Client.TLS ( applyDigestAuth ) +import Network.HTTP.Conduit ( requestHeaders ) +import Network.HTTP.Download + ( CheckHexDigest (..), DownloadRequest, HashCheck (..) + , VerifiedDownloadException (..), drRetryPolicyDefault + , mkDownloadRequest, modifyRequest, setForceDownload + , setHashChecks, setLengthCheck, setRetryPolicy + ) import qualified Network.HTTP.Download as Download +import Network.HTTP.Simple + ( addRequestHeader, getResponseBody, getResponseHeaders + , getResponseStatusCode, setRequestBody + , setRequestCheckStatus, setRequestHeader, setRequestHeaders + , setRequestMethod + ) import qualified Network.HTTP.Simple -import Network.HTTP.Client.MultipartFormData (formDataBody, partFileRequestBody, partBS, partLBS) -import Path -import Prelude (until, (!!)) + ( httpJSON, httpLbs, httpNoBody, httpSink, withResponse ) +import Network.HTTP.Types + ( hAccept, hContentLength, hContentMD5, methodPost, methodPut + , notFound404 + ) +import Path ( Abs, File, Path ) +import Prelude ( until ) import RIO -import RIO.PrettyPrint -import Text.Printf (printf) - +import RIO.PrettyPrint ( HasTerm ) +import Text.Printf ( printf ) +-- | Set the User-Agent request header to @The Haskell Stack@. setUserAgent :: Request -> Request setUserAgent = setRequestHeader "User-Agent" ["The Haskell Stack"] - +-- | Like 'Network.HTTP.Simple.httpJSON' but sets the User-Agent request header. httpJSON :: (MonadIO m, FromJSON a) => Request -> m (Response a) httpJSON = Network.HTTP.Simple.httpJSON . setUserAgent - +-- | Like 'Network.HTTP.Simple.httpLbs' but sets the User-Agent request header. httpLbs :: MonadIO m => Request -> m (Response LByteString) httpLbs = Network.HTTP.Simple.httpLbs . setUserAgent - +-- | Like 'Network.HTTP.Simple.httpNoBody' but sets the User-Agent request +-- header. httpNoBody :: MonadIO m => Request -> m (Response ()) httpNoBody = Network.HTTP.Simple.httpNoBody . setUserAgent - -httpSink - :: MonadUnliftIO m +-- | Like 'Network.HTTP.Simple.httpSink' but sets the User-Agent request header. +httpSink :: + MonadUnliftIO m => Request -> (Response () -> ConduitM Strict.ByteString Void m a) -> m a httpSink = Network.HTTP.Simple.httpSink . setUserAgent - -withResponse - :: (MonadUnliftIO m, MonadIO n) - => Request -> (Response (ConduitM i Strict.ByteString n ()) -> m a) -> m a +-- | Like 'Network.HTTP.Simple.withResponse' but sets the User-Agent request +-- header. +withResponse :: + (MonadUnliftIO m, MonadIO n) + => Request + -> (Response (ConduitM i Strict.ByteString n ()) -> m a) + -> m a withResponse = Network.HTTP.Simple.withResponse . setUserAgent --- | Set the user-agent request header -setGithubHeaders :: Request -> Request -setGithubHeaders = setRequestHeader "Accept" ["application/vnd.github.v3+json"] +-- | Set the Accept request header to specify GitHub API v3. +setGitHubHeaders :: Request -> Request +setGitHubHeaders = setRequestHeader "Accept" ["application/vnd.github.v3+json"] -- | Download the given URL to the given location. If the file already exists, -- no download is performed. Otherwise, creates the parent directory, downloads @@ -121,20 +160,25 @@ setGithubHeaders = setRequestHeader "Accept" ["application/vnd.github.v3+json"] -- appropriate destination. -- -- Throws an exception if things go wrong -download :: HasTerm env - => Request - -> Path Abs File -- ^ destination - -> RIO env Bool -- ^ Was a downloaded performed (True) or did the file already exist (False)? -download req dest = Download.download (setUserAgent req) dest +download :: + HasTerm env + => Request + -> Path Abs File + -- ^ destination + -> RIO env Bool + -- ^ Was a downloaded performed (True) or did the file already exist + -- (False)? +download req = Download.download (setUserAgent req) -- | Same as 'download', but will download a file a second time if it is already present. -- -- Returns 'True' if the file was downloaded, 'False' otherwise -redownload :: HasTerm env - => Request - -> Path Abs File -- ^ destination - -> RIO env Bool -redownload req dest = Download.redownload (setUserAgent req) dest +redownload :: + HasTerm env + => Request + -> Path Abs File -- ^ destination + -> RIO env Bool +redownload req = Download.redownload (setUserAgent req) -- | Copied and extended version of Network.HTTP.Download.download. -- @@ -149,19 +193,19 @@ redownload req dest = Download.redownload (setUserAgent req) dest -- Throws VerifiedDownloadException. -- Throws IOExceptions related to file system operations. -- Throws HttpException. -verifiedDownload - :: HasTerm env - => DownloadRequest - -> Path Abs File -- ^ destination - -> (Maybe Integer -> ConduitM ByteString Void (RIO env) ()) -- ^ custom hook to observe progress - -> RIO env Bool -- ^ Whether a download was performed -verifiedDownload dr destpath progressSink = - Download.verifiedDownload dr' destpath progressSink - where - dr' = modifyRequest setUserAgent dr - -verifiedDownloadWithProgress - :: HasTerm env +verifiedDownload :: + HasTerm env + => DownloadRequest + -> Path Abs File -- ^ destination + -> (Maybe Integer -> ConduitM ByteString Void (RIO env) ()) + -- ^ custom hook to observe progress + -> RIO env Bool -- ^ Whether a download was performed +verifiedDownload dr = Download.verifiedDownload dr' + where + dr' = modifyRequest setUserAgent dr + +verifiedDownloadWithProgress :: + HasTerm env => DownloadRequest -> Path Abs File -> Text @@ -170,8 +214,8 @@ verifiedDownloadWithProgress verifiedDownloadWithProgress req destpath lbl msize = verifiedDownload req destpath (chattyDownloadProgress lbl msize) -chattyDownloadProgress - :: ( HasLogFunc env +chattyDownloadProgress :: + ( HasLogFunc env , MonadIO m , MonadReader env m ) @@ -180,35 +224,38 @@ chattyDownloadProgress -> f -> ConduitT ByteString c m () chattyDownloadProgress label mtotalSize _ = do - _ <- logSticky $ RIO.display label <> ": download has begun" - CL.map (Sum . Strict.length) - .| chunksOverTime 1 - .| go - where - go = evalStateC 0 $ awaitForever $ \(Sum size) -> do - modify (+ size) - totalSoFar <- get - logSticky $ fromString $ - case mtotalSize of - Nothing -> chattyProgressNoTotal totalSoFar - Just 0 -> chattyProgressNoTotal totalSoFar - Just totalSize -> chattyProgressWithTotal totalSoFar totalSize - - -- Example: ghc: 42.13 KiB downloaded... - chattyProgressNoTotal totalSoFar = - printf ("%s: " <> bytesfmt "%7.2f" totalSoFar <> " downloaded...") - (T.unpack label) + _ <- logSticky $ RIO.display label <> ": download has begun" + CL.map (Sum . Strict.length) + .| chunksOverTime 1 + .| go + where + go = evalStateC 0 $ awaitForever $ \(Sum size) -> do + modify (+ size) + totalSoFar <- get + logSticky $ fromString $ + case mtotalSize of + Nothing -> chattyProgressNoTotal totalSoFar + Just 0 -> chattyProgressNoTotal totalSoFar + Just totalSize -> chattyProgressWithTotal totalSoFar totalSize + + -- Example: ghc: 42.13 KiB downloaded... + chattyProgressNoTotal totalSoFar = + printf ("%s: " <> bytesfmt "%7.2f" totalSoFar <> " downloaded...") + (T.unpack label) -- Example: ghc: 50.00 MiB / 100.00 MiB (50.00%) downloaded... - chattyProgressWithTotal totalSoFar total = - printf ("%s: " <> - bytesfmt "%7.2f" totalSoFar <> " / " <> - bytesfmt "%.2f" total <> - " (%6.2f%%) downloaded...") - (T.unpack label) - percentage - where percentage :: Double - percentage = fromIntegral totalSoFar / fromIntegral total * 100 + chattyProgressWithTotal totalSoFar total = + printf ( "%s: " + <> bytesfmt "%7.2f" totalSoFar + <> " / " + <> bytesfmt "%.2f" total + <> " (%6.2f%%) downloaded..." + ) + (T.unpack label) + percentage + where + percentage :: Double + percentage = fromIntegral totalSoFar / fromIntegral total * 100 -- | Given a printf format string for the decimal part and a number of -- bytes, formats the bytes using an appropriate unit and returns the @@ -219,36 +266,55 @@ chattyDownloadProgress label mtotalSize _ = do bytesfmt :: Integral a => String -> a -> String bytesfmt formatter bs = printf (formatter <> " %s") (fromIntegral (signum bs) * dec :: Double) - (bytesSuffixes !! i) - where - (dec,i) = getSuffix (abs bs) - getSuffix n = until p (\(x,y) -> (x / 1024, y+1)) (fromIntegral n,0) - where p (n',numDivs) = n' < 1024 || numDivs == (length bytesSuffixes - 1) - bytesSuffixes :: [String] - bytesSuffixes = ["B","KiB","MiB","GiB","TiB","PiB","EiB","ZiB","YiB"] + bytesSuffix + where + (dec, i) = getSuffix (abs bs) + getSuffix n = until p (\(x, y) -> (x / 1024, y + 1)) (fromIntegral n, 0) + where + p (n', numDivs) = n' < 1024 || numDivs == length bytesSuffixes - 1 + bytesSuffixes :: [String] + bytesSuffixes = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"] + bytesSuffix = fromMaybe + (error "bytesfmt: the impossible happened! Index out of range.") + (bytesSuffixes !? i) -- Await eagerly (collect with monoidal append), -- but space out yields by at least the given amount of time. -- The final yield may come sooner, and may be a superfluous mempty. -- Note that Integer and Float literals can be turned into NominalDiffTime -- (these literals are interpreted as "seconds") -chunksOverTime :: (Monoid a, Semigroup a, MonadIO m) => NominalDiffTime -> ConduitM a a m () +chunksOverTime :: + (Monoid a, Semigroup a, MonadIO m) + => NominalDiffTime + -> ConduitM a a m () chunksOverTime diff = do - currentTime <- liftIO getCurrentTime - evalStateC (currentTime, mempty) go - where - -- State is a tuple of: - -- * the last time a yield happened (or the beginning of the sink) - -- * the accumulated awaits since the last yield - go = await >>= \case - Nothing -> do - (_, acc) <- get - yield acc - Just a -> do - (lastTime, acc) <- get - let acc' = acc <> a - currentTime <- liftIO getCurrentTime - if diff < diffUTCTime currentTime lastTime - then put (currentTime, mempty) >> yield acc' - else put (lastTime, acc') - go + currentTime <- liftIO getCurrentTime + evalStateC (currentTime, mempty) go + where + -- State is a tuple of: + -- * the last time a yield happened (or the beginning of the sink) + -- * the accumulated awaits since the last yield + go = await >>= \case + Nothing -> do + (_, acc) <- get + yield acc + Just a -> do + (lastTime, acc) <- get + let acc' = acc <> a + currentTime <- liftIO getCurrentTime + if diff < diffUTCTime currentTime lastTime + then put (currentTime, mempty) >> yield acc' + else put (lastTime, acc') + go + +-- | Like 'Network.HTTP.Client.TLS.applyDigestAuth' but sets the User-Agent +-- request header. +applyDigestAuth :: + (MonadIO m, MonadThrow n) + => Strict.ByteString + -> Strict.ByteString + -> Request + -> Manager + -> m (n Request) +applyDigestAuth user pass = + Network.HTTP.Client.TLS.applyDigestAuth user pass . setUserAgent diff --git a/src/Options/Applicative/Args.hs b/src/Options/Applicative/Args.hs index 6d076afef7..a77b567efc 100644 --- a/src/Options/Applicative/Args.hs +++ b/src/Options/Applicative/Args.hs @@ -1,38 +1,51 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} --- | Accepting arguments to be passed through to a sub-process. +{-| +Module : Options.Applicative.Args +Description : Accepting arguments to be passed through to a sub-process. +License : BSD-3-Clause + +Accepting arguments to be passed through to a sub-process. +-} module Options.Applicative.Args - (argsArgument - ,argsOption - ,cmdOption) - where + ( argsArgument + , argsOption + , cmdOption + ) where -import Data.Attoparsec.Args +import Data.Attoparsec.Args ( EscapingMode (..), parseArgsFromString ) import qualified Options.Applicative as O import Stack.Prelude --- | An argument which accepts a list of arguments e.g. @--ghc-options="-X P.hs \"this\""@. +-- | An argument which accepts a list of arguments +-- e.g. @--ghc-options="-X P.hs \"this\""@. argsArgument :: O.Mod O.ArgumentFields [String] -> O.Parser [String] argsArgument = - O.argument - (do string <- O.str - either O.readerError return (parseArgsFromString Escaping string)) + O.argument + ( do s <- O.str + either O.readerError pure (parseArgsFromString Escaping s) + ) --- | An option which accepts a list of arguments e.g. @--ghc-options="-X P.hs \"this\""@. +-- | An option which accepts a list of arguments +-- e.g. @--ghc-options="-X P.hs \"this\""@. argsOption :: O.Mod O.OptionFields [String] -> O.Parser [String] argsOption = - O.option - (do string <- O.str - either O.readerError return (parseArgsFromString Escaping string)) + O.option + ( do s <- O.str + either O.readerError pure (parseArgsFromString Escaping s) + ) --- | An option which accepts a command and a list of arguments e.g. @--exec "echo hello world"@ -cmdOption :: O.Mod O.OptionFields (String, [String]) -> O.Parser (String, [String]) +-- | An option which accepts a command and a list of arguments +-- e.g. @--exec "echo hello world"@ +cmdOption :: + O.Mod O.OptionFields (String, [String]) + -> O.Parser (String, [String]) cmdOption = - O.option - (do string <- O.str - xs <- either O.readerError return (parseArgsFromString Escaping string) - case xs of - [] -> O.readerError "Must provide a command" - x:xs' -> return (x, xs')) + O.option + ( do s <- O.str + either O.readerError pure (parseArgsFromString Escaping s) >>= \case + [] -> O.readerError "Must provide a command" + x:xs' -> pure (x, xs') + ) diff --git a/src/Options/Applicative/Builder/Extra.hs b/src/Options/Applicative/Builder/Extra.hs index 87d22b90f6..18eb7c1d07 100644 --- a/src/Options/Applicative/Builder/Extra.hs +++ b/src/Options/Applicative/Builder/Extra.hs @@ -1,58 +1,87 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} --- | Extra functions for optparse-applicative. +{-| +Module : Options.Applicative.Builder.Extra +Description : Extra functions for optparse-applicative. +License : BSD-3-Clause + +Extra functions for optparse-applicative. +-} module Options.Applicative.Builder.Extra - (boolFlags - ,boolFlagsNoDefault - ,firstBoolFlagsNoDefault - ,firstBoolFlagsTrue - ,firstBoolFlagsFalse - ,enableDisableFlags - ,enableDisableFlagsNoDefault - ,extraHelpOption - ,execExtraHelp - ,textOption - ,textArgument - ,optionalFirst - ,optionalFirstTrue - ,optionalFirstFalse - ,absFileOption - ,relFileOption - ,absDirOption - ,relDirOption - ,eitherReader' - ,fileCompleter - ,fileExtCompleter - ,dirCompleter - ,PathCompleterOpts(..) - ,defaultPathCompleterOpts - ,pathCompleterWith - ,unescapeBashArg - ,showHelpText + ( boolFlags + , boolFlagsNoDefault + , firstBoolFlagsNoDefault + , firstBoolFlagsTrue + , firstBoolFlagsFalse + , enableDisableFlags + , enableDisableFlagsNoDefault + , extraHelpOption + , execExtraHelp + , textOption + , textArgument + , optionalFirst + , optionalFirstTrue + , optionalFirstFalse + , absFileOption + , relFileOption + , absDirOption + , relDirOption + , eitherReader' + , fileCompleter + , fileExtCompleter + , dirCompleter + , PathCompleterOpts (..) + , defaultPathCompleterOpts + , pathCompleterWith + , unescapeBashArg + , showHelpText ) where -import Data.List (isPrefixOf) -import Data.Maybe -import Data.Monoid hiding ((<>)) +import Data.List ( isPrefixOf ) import qualified Data.Text as T -import Options.Applicative -import Options.Applicative.Types (readerAsk) -import Path hiding (()) -import Stack.Prelude -import System.Directory (getCurrentDirectory, getDirectoryContents, doesDirectoryExist) -import System.Environment (withArgs) -import System.FilePath (takeBaseName, (), splitFileName, isRelative, takeExtension) +import Options.Applicative + ( ArgumentFields, Completer, FlagFields, Mod, OptionFields + , ParseError (..), Parser, ReadM, abortOption, argument + , completer, eitherReader, execParser, flag', fullDesc, help + , hidden, idm, info, infoOption, internal, long, metavar + , mkCompleter, option, progDesc, strArgument + ) +import Options.Applicative.Types ( readerAsk ) +import Path ( parseAbsDir, parseAbsFile, parseRelDir, parseRelFile ) +import Stack.Prelude +import System.Directory + ( doesDirectoryExist, getCurrentDirectory + , getDirectoryContents + ) +import System.Environment ( withArgs ) +import System.FilePath + ( (), isRelative, splitFileName, takeBaseName + , takeExtension + ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Options.Applicative.Builder.Extra" module. +data OptionsApplicativeExtraException + = FlagNotFoundBug + deriving Show + +instance Exception OptionsApplicativeExtraException where + displayException FlagNotFoundBug = + "Error: [S-2797]\n" + ++ "The impossible happened! No valid flags found in \ + \enableDisableFlagsNoDefault. Please report this bug at Stack's \ + \repository." -- | Enable/disable flags for a 'Bool'. -boolFlags :: Bool -- ^ Default value - -> String -- ^ Flag name - -> String -- ^ Help suffix - -> Mod FlagFields Bool - -> Parser Bool +boolFlags :: + Bool -- ^ Default value + -> String -- ^ Flag name + -> String -- ^ Help suffix + -> Mod FlagFields Bool + -> Parser Bool boolFlags defaultValue name helpSuffix = enableDisableFlags defaultValue True False name $ concat [ helpSuffix @@ -61,112 +90,158 @@ boolFlags defaultValue name helpSuffix = , ")" ] --- | Enable/disable flags for a 'Bool', without a default case (to allow chaining with '<|>'). -boolFlagsNoDefault :: String -- ^ Flag name - -> String -- ^ Help suffix - -> Mod FlagFields Bool - -> Parser Bool +-- | Enable/disable flags for a 'Bool', without a default case (to allow +-- chaining with '<|>'). +boolFlagsNoDefault :: + String -- ^ Flag name + -> String -- ^ Help suffix + -> Mod FlagFields Bool + -> Parser Bool boolFlagsNoDefault = enableDisableFlagsNoDefault True False -- | Flag with no default of True or False -firstBoolFlagsNoDefault :: String -> String -> Mod FlagFields (Maybe Bool) -> Parser (First Bool) +firstBoolFlagsNoDefault :: + String + -> String + -> Mod FlagFields (Maybe Bool) + -> Parser (First Bool) firstBoolFlagsNoDefault name helpSuffix mod' = First <$> enableDisableFlags Nothing (Just True) (Just False) name helpSuffix mod' -- | Flag with a Semigroup instance and a default of True -firstBoolFlagsTrue :: String -> String -> Mod FlagFields FirstTrue -> Parser FirstTrue +firstBoolFlagsTrue :: + String + -> String + -> Mod FlagFields FirstTrue + -> Parser FirstTrue firstBoolFlagsTrue name helpSuffix = enableDisableFlags mempty (FirstTrue (Just True)) (FirstTrue (Just False)) name $ helpSuffix ++ " (default: enabled)" -- | Flag with a Semigroup instance and a default of False -firstBoolFlagsFalse :: String -> String -> Mod FlagFields FirstFalse -> Parser FirstFalse +firstBoolFlagsFalse :: + String + -> String + -> Mod FlagFields FirstFalse + -> Parser FirstFalse firstBoolFlagsFalse name helpSuffix = enableDisableFlags mempty (FirstFalse (Just True)) (FirstFalse (Just False)) name $ helpSuffix ++ " (default: disabled)" -- | Enable/disable flags for any type. -enableDisableFlags :: a -- ^ Default value - -> a -- ^ Enabled value - -> a -- ^ Disabled value - -> String -- ^ Name - -> String -- ^ Help suffix - -> Mod FlagFields a - -> Parser a -enableDisableFlags defaultValue enabledValue disabledValue name helpSuffix mods = - enableDisableFlagsNoDefault enabledValue disabledValue name helpSuffix mods <|> - pure defaultValue +enableDisableFlags :: + a -- ^ Default value + -> a -- ^ Enabled value + -> a -- ^ Disabled value + -> String -- ^ Name + -> String -- ^ Help suffix + -> Mod FlagFields a + -> Parser a +enableDisableFlags defaultValue enabledValue disabledValue name helpSuffix + mods = + enableDisableFlagsNoDefault + enabledValue + disabledValue + name + helpSuffix + mods <|> pure defaultValue -- | Enable/disable flags for any type, without a default (to allow chaining with '<|>') -enableDisableFlagsNoDefault :: a -- ^ Enabled value - -> a -- ^ Disabled value - -> String -- ^ Name - -> String -- ^ Help suffix - -> Mod FlagFields a - -> Parser a +enableDisableFlagsNoDefault :: + a -- ^ Enabled value + -> a -- ^ Disabled value + -> String -- ^ Name + -> String -- ^ Help suffix + -> Mod FlagFields a + -> Parser a enableDisableFlagsNoDefault enabledValue disabledValue name helpSuffix mods = last <$> some - ((flag' - enabledValue - (hidden <> - internal <> - long name <> - help helpSuffix <> - mods) <|> - flag' - disabledValue - (hidden <> - internal <> - long ("no-" ++ name) <> - help helpSuffix <> - mods)) <|> - flag' - disabledValue - (long ("[no-]" ++ name) <> - help ("Enable/disable " ++ helpSuffix) <> - mods)) - where - last xs = - case reverse xs of - [] -> impureThrow $ stringException "enableDisableFlagsNoDefault.last" - x:_ -> x - --- | Show an extra help option (e.g. @--docker-help@ shows help for all @--docker*@ args). + ( flag' + enabledValue + ( hidden + <> internal + <> long name + <> help helpSuffix + <> mods + ) + <|> flag' + disabledValue + ( hidden + <> internal + <> long ("no-" ++ name) + <> help helpSuffix + <> mods + ) + <|> flag' + disabledValue + ( long ("[no-]" ++ name) + <> help ("Enable/disable " ++ helpSuffix) + <> mods + ) + ) + where + last xs = + case reverse xs of + [] -> impureThrow FlagNotFoundBug + x:_ -> x + +-- | Show an extra help option (e.g. @--docker-help@ shows help for all +-- @--docker*@ args). -- --- To actually have that help appear, use 'execExtraHelp' before executing the main parser. -extraHelpOption :: Bool -- ^ Hide from the brief description? - -> String -- ^ Program name, e.g. @"stack"@ - -> String -- ^ Option glob expression, e.g. @"docker*"@ - -> String -- ^ Help option name, e.g. @"docker-help"@ - -> Parser (a -> a) +-- To actually have that help appear, use 'execExtraHelp' before executing the +-- main parser. +extraHelpOption :: + Bool -- ^ Hide from the brief description? + -> String -- ^ Program name, e.g. @"stack"@ + -> String -- ^ Option glob expression, e.g. @"docker*"@ + -> String -- ^ Help option name, e.g. @"docker-help"@ + -> Parser (a -> a) extraHelpOption hide progName fakeName helpName = - infoOption (optDesc' ++ ".") (long helpName <> hidden <> internal) <*> - infoOption (optDesc' ++ ".") (long fakeName <> - help optDesc' <> - (if hide then hidden <> internal else idm)) - where optDesc' = concat ["Run '", takeBaseName progName, " --", helpName, "' for details"] + infoOption + (optDesc' ++ ".") + (long helpName <> hidden <> internal) + <*> infoOption + (optDesc' ++ ".") + ( long fakeName + <> help optDesc' + <> (if hide then hidden <> internal else idm) + ) + where + optDesc' = concat + [ "Run '" + , takeBaseName progName + , " --" + , helpName + , "' for details." + ] -- | Display extra help if extra help option passed in arguments. -- --- Since optparse-applicative doesn't allow an arbitrary IO action for an 'abortOption', this --- was the best way I found that doesn't require manually formatting the help. -execExtraHelp :: [String] -- ^ Command line arguments - -> String -- ^ Extra help option name, e.g. @"docker-help"@ - -> Parser a -- ^ Option parser for the relevant command - -> String -- ^ Option description - -> IO () +-- Since optparse-applicative doesn't allow an arbitrary IO action for an +-- 'abortOption', this was the best way I found that doesn't require manually +-- formatting the help. +execExtraHelp :: + [String] -- ^ Command line arguments + -> String -- ^ Extra help option name, e.g. @"docker-help"@ + -> Parser a -- ^ Option parser for the relevant command + -> String -- ^ Option description + -> IO () execExtraHelp args helpOpt parser pd = - when (args == ["--" ++ helpOpt]) $ - withArgs ["--help"] $ do - _ <- execParser (info (hiddenHelper <*> - ((,) <$> - parser <*> - some (strArgument (metavar "OTHER ARGUMENTS") :: Parser String))) - (fullDesc <> progDesc pd)) - return () - where hiddenHelper = abortOption showHelpText (long "help" <> hidden <> internal) + when (args == ["--" ++ helpOpt]) $ + withArgs ["--help"] $ + void $ execParser (info + ( hiddenHelper + <*> ( (,) + <$> parser + <*> some (strArgument + (metavar "OTHER ARGUMENTS") :: Parser String) + ) + ) + (fullDesc <> progDesc pd)) + where + hiddenHelper = abortOption showHelpText (long "help" <> hidden <> internal) -- | 'option', specialized to 'Text'. textOption :: Mod OptionFields Text -> Parser Text @@ -176,119 +251,139 @@ textOption = option (T.pack <$> readerAsk) textArgument :: Mod ArgumentFields Text -> Parser Text textArgument = argument (T.pack <$> readerAsk) --- | Like 'optional', but returning a 'First'. +-- | Like 'optional', but returning a t'First'. optionalFirst :: Alternative f => f a -> f (First a) optionalFirst = fmap First . optional --- | Like 'optional', but returning a 'FirstTrue'. +-- | Like 'optional', but returning a t'FirstTrue'. optionalFirstTrue :: Alternative f => f Bool -> f FirstTrue optionalFirstTrue = fmap FirstTrue . optional --- | Like 'optional', but returning a 'FirstFalse'. +-- | Like 'optional', but returning a t'FirstFalse'. optionalFirstFalse :: Alternative f => f Bool -> f FirstFalse optionalFirstFalse = fmap FirstFalse . optional absFileOption :: Mod OptionFields (Path Abs File) -> Parser (Path Abs File) absFileOption mods = option (eitherReader' parseAbsFile) $ - completer (pathCompleterWith defaultPathCompleterOpts { pcoRelative = False }) <> mods + completer + (pathCompleterWith defaultPathCompleterOpts { relative = False }) + <> mods relFileOption :: Mod OptionFields (Path Rel File) -> Parser (Path Rel File) relFileOption mods = option (eitherReader' parseRelFile) $ - completer (pathCompleterWith defaultPathCompleterOpts { pcoAbsolute = False }) <> mods + completer + (pathCompleterWith defaultPathCompleterOpts { absolute = False }) + <> mods absDirOption :: Mod OptionFields (Path Abs Dir) -> Parser (Path Abs Dir) absDirOption mods = option (eitherReader' parseAbsDir) $ - completer (pathCompleterWith defaultPathCompleterOpts { pcoRelative = False, pcoFileFilter = const False }) <> mods + completer + ( pathCompleterWith + defaultPathCompleterOpts + { relative = False + , fileFilter = const False + } + ) + <> mods relDirOption :: Mod OptionFields (Path Rel Dir) -> Parser (Path Rel Dir) relDirOption mods = option (eitherReader' parseRelDir) $ - completer (pathCompleterWith defaultPathCompleterOpts { pcoAbsolute = False, pcoFileFilter = const False }) <> mods + completer + ( pathCompleterWith + defaultPathCompleterOpts + { absolute = False + , fileFilter = const False + } + ) + <> mods -- | Like 'eitherReader', but accepting any @'Show' e@ on the 'Left'. eitherReader' :: Show e => (String -> Either e a) -> ReadM a eitherReader' f = eitherReader (mapLeft show . f) data PathCompleterOpts = PathCompleterOpts - { pcoAbsolute :: Bool - , pcoRelative :: Bool - , pcoRootDir :: Maybe FilePath - , pcoFileFilter :: FilePath -> Bool - , pcoDirFilter :: FilePath -> Bool - } + { absolute :: Bool + , relative :: Bool + , rootDir :: Maybe FilePath + , fileFilter :: FilePath -> Bool + , dirFilter :: FilePath -> Bool + } defaultPathCompleterOpts :: PathCompleterOpts defaultPathCompleterOpts = PathCompleterOpts - { pcoAbsolute = True - , pcoRelative = True - , pcoRootDir = Nothing - , pcoFileFilter = const True - , pcoDirFilter = const True - } + { absolute = True + , relative = True + , rootDir = Nothing + , fileFilter = const True + , dirFilter = const True + } fileCompleter :: Completer fileCompleter = pathCompleterWith defaultPathCompleterOpts fileExtCompleter :: [String] -> Completer -fileExtCompleter exts = pathCompleterWith defaultPathCompleterOpts { pcoFileFilter = (`elem` exts) . takeExtension } +fileExtCompleter exts = + pathCompleterWith + defaultPathCompleterOpts { fileFilter = (`elem` exts) . takeExtension } dirCompleter :: Completer -dirCompleter = pathCompleterWith defaultPathCompleterOpts { pcoFileFilter = const False } +dirCompleter = + pathCompleterWith defaultPathCompleterOpts { fileFilter = const False } pathCompleterWith :: PathCompleterOpts -> Completer -pathCompleterWith PathCompleterOpts {..} = mkCompleter $ \inputRaw -> do - -- Unescape input, to handle single and double quotes. Note that the - -- results do not need to be re-escaped, due to some fiddly bash - -- magic. - let input = unescapeBashArg inputRaw - let (inputSearchDir0, searchPrefix) = splitFileName input - inputSearchDir = if inputSearchDir0 == "./" then "" else inputSearchDir0 - msearchDir <- - case (isRelative inputSearchDir, pcoAbsolute, pcoRelative) of - (True, _, True) -> do - rootDir <- maybe getCurrentDirectory return pcoRootDir - return $ Just (rootDir inputSearchDir) - (False, True, _) -> return $ Just inputSearchDir - _ -> return Nothing - case msearchDir of - Nothing - | input == "" && pcoAbsolute -> return ["/"] - | otherwise -> return [] - Just searchDir -> do - entries <- getDirectoryContents searchDir `catch` \(_ :: IOException) -> return [] - fmap catMaybes $ forM entries $ \entry -> - -- Skip . and .. unless user is typing . or .. - if entry `elem` ["..", "."] && searchPrefix `notElem` ["..", "."] then return Nothing else - if searchPrefix `isPrefixOf` entry - then do - let path = searchDir entry - case (pcoFileFilter path, pcoDirFilter path) of - (True, True) -> return $ Just (inputSearchDir entry) - (fileAllowed, dirAllowed) -> do - isDir <- doesDirectoryExist path - if (if isDir then dirAllowed else fileAllowed) - then return $ Just (inputSearchDir entry) - else return Nothing - else return Nothing +pathCompleterWith pco = mkCompleter $ \inputRaw -> do + -- Unescape input, to handle single and double quotes. Note that the + -- results do not need to be re-escaped, due to some fiddly bash + -- magic. + let input = unescapeBashArg inputRaw + let (inputSearchDir0, searchPrefix) = splitFileName input + inputSearchDir = if inputSearchDir0 == "./" then "" else inputSearchDir0 + msearchDir <- + case (isRelative inputSearchDir, pco.absolute, pco.relative) of + (True, _, True) -> do + rootDir <- maybe getCurrentDirectory pure pco.rootDir + pure $ Just (rootDir inputSearchDir) + (False, True, _) -> pure $ Just inputSearchDir + _ -> pure Nothing + case msearchDir of + Nothing + | input == "" && pco.absolute -> pure ["/"] + | otherwise -> pure [] + Just searchDir -> do + entries <- + getDirectoryContents searchDir `catch` \(_ :: IOException) -> pure [] + fmap catMaybes $ forM entries $ \entry -> + -- Skip . and .. unless user is typing . or .. + if entry `elem` ["..", "."] && searchPrefix `notElem` ["..", "."] + then pure Nothing + else + if searchPrefix `isPrefixOf` entry + then do + let path = searchDir entry + case (pco.fileFilter path, pco.dirFilter path) of + (True, True) -> pure $ Just (inputSearchDir entry) + (fileAllowed, dirAllowed) -> do + isDir <- doesDirectoryExist path + if (if isDir then dirAllowed else fileAllowed) + then pure $ Just (inputSearchDir entry) + else pure Nothing + else pure Nothing unescapeBashArg :: String -> String unescapeBashArg ('\'' : rest) = rest unescapeBashArg ('\"' : rest) = go rest - where - pattern = "$`\"\\\n" :: String - go [] = [] - go ('\\' : x : xs) - | x `elem` pattern = x : xs - | otherwise = '\\' : x : go xs - go (x : xs) = x : go xs + where + special = "$`\"\\\n" :: String + go [] = [] + go ('\\' : x : xs) + | x `elem` special = x : xs + | otherwise = '\\' : x : go xs + go (x : xs) = x : go xs unescapeBashArg input = go input - where - go [] = [] - go ('\\' : x : xs) = x : go xs - go (x : xs) = x : go xs + where + go [] = [] + go ('\\' : x : xs) = x : go xs + go (x : xs) = x : go xs showHelpText :: ParseError -#if MIN_VERSION_optparse_applicative(0,16,0) showHelpText = ShowHelpText Nothing -#else -showHelpText = ShowHelpText -#endif diff --git a/src/Options/Applicative/Complicated.hs b/src/Options/Applicative/Complicated.hs index 0a7a80804c..6cbb1cf8d5 100644 --- a/src/Options/Applicative/Complicated.hs +++ b/src/Options/Applicative/Complicated.hs @@ -1,10 +1,17 @@ {-# LANGUAGE NoImplicitPrelude #-} --- | Simple interface to complicated program arguments. --- --- This is a "fork" of the @optparse-simple@ package that has some workarounds for --- optparse-applicative issues that become problematic with programs that have many options and --- subcommands. Because it makes the interface more complex, these workarounds are not suitable for --- pushing upstream to optparse-applicative. + +{-| +Module : Options.Applicative.Complicated +Description : Simple interface to complicated program arguments. +License : BSD-3-Clause + +Simple interface to complicated program arguments. + +This is a "fork" of the @optparse-simple@ package that has some workarounds for +optparse-applicative issues that become problematic with programs that have many +options and subcommands. Because it makes the interface more complex, these +workarounds are not suitable for pushing upstream to optparse-applicative. +-} module Options.Applicative.Complicated ( addCommand @@ -13,146 +20,176 @@ module Options.Applicative.Complicated , complicatedParser ) where -import Control.Monad.Trans.Except -import Control.Monad.Trans.Writer +import Control.Monad.Trans.Except ( runExceptT ) +import Control.Monad.Trans.Writer ( runWriter, tell ) import Options.Applicative -import Options.Applicative.Types -import Options.Applicative.Builder.Extra -import Options.Applicative.Builder.Internal + ( Parser, ParserFailure, ParserHelp, ParserResult (..) + , abortOption, command, execParserPure, footer, fullDesc + , handleParseResult, header, help, hsubparser, info + , infoOption, long, metavar, noBacktrack, prefs, progDesc + , short, showHelpOnEmpty + ) +import Options.Applicative.Builder.Extra ( showHelpText ) import Stack.Prelude -import System.Environment +import Stack.Types.AddCommand ( AddCommand ) +import Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid ) +import Stack.Types.Runner ( Runner ) +import System.Environment ( getArgs ) -- | Generate and execute a complicated options parser. -complicatedOptions - :: Monoid a - => Version - -- ^ numeric version +complicatedOptions :: + Version + -- ^ numeric version -> Maybe String - -- ^ version string + -- ^ version string -> String - -- ^ hpack numeric version, as string + -- ^ Hpack numeric version, as string -> String - -- ^ header + -- ^ header -> String - -- ^ program description (displayed between usage and options listing in the help output) + -- ^ program description (displayed between usage and options listing in + -- the help output) -> String - -- ^ footer - -> Parser a - -- ^ common settings - -> Maybe (ParserFailure ParserHelp -> [String] -> IO (a,(b,a))) - -- ^ optional handler for parser failure; 'handleParseResult' is called by - -- default - -> ExceptT b (Writer (Mod CommandFields (b,a))) () - -- ^ commands (use 'addCommand') - -> IO (a,b) -complicatedOptions numericVersion stringVersion numericHpackVersion h pd footerStr commonParser mOnFailure commandParser = - do args <- getArgs - (a,(b,c)) <- case execParserPure (prefs noBacktrack) parser args of - Failure _ | null args -> withArgs ["--help"] (execParser parser) + -- ^ footer + -> Parser GlobalOptsMonoid + -- ^ common settings + -> Maybe ( ParserFailure ParserHelp + -> [String] + -> IO (GlobalOptsMonoid, (RIO Runner (), GlobalOptsMonoid)) + ) + -- ^ optional handler for parser failure; 'handleParseResult' is called by + -- default + -> AddCommand + -- ^ commands (use 'addCommand') + -> IO (GlobalOptsMonoid, RIO Runner ()) +complicatedOptions + numericVersion + stringVersion + numericHpackVersion + h + pd + footerStr + commonParser + mOnFailure + commandParser + = do + args <- getArgs + (a, (b, c)) <- let parserPrefs = prefs $ noBacktrack <> showHelpOnEmpty + in case execParserPure parserPrefs parser args of -- call onFailure handler if it's present and parsing options failed - Failure f | Just onFailure <- mOnFailure -> onFailure f args - parseResult -> handleParseResult parseResult - return (mappend c a,b) - where parser = info (helpOption <*> versionOptions <*> complicatedParser "COMMAND|FILE" commonParser commandParser) desc - desc = fullDesc <> header h <> progDesc pd <> footer footerStr - versionOptions = - case stringVersion of - Nothing -> versionOption (versionString numericVersion) - Just s -> versionOption s <*> numericVersionOption <*> numericHpackVersionOption - versionOption s = - infoOption - s - (long "version" <> - help "Show version") - numericVersionOption = - infoOption - (versionString numericVersion) - (long "numeric-version" <> - help "Show only version number") - numericHpackVersionOption = - infoOption - numericHpackVersion - (long "hpack-numeric-version" <> - help "Show only hpack's version number") + Failure f | Just onFailure <- mOnFailure -> onFailure f args + parseResult -> handleParseResult parseResult + pure (mappend c a, b) + where + parser = info + ( helpOption + <*> versionOptions + <*> complicatedParser "COMMAND|FILE" commonParser commandParser + ) + desc + desc = fullDesc <> header h <> progDesc pd <> footer footerStr + versionOptions = + case stringVersion of + Nothing -> versionOption (versionString numericVersion) + Just s -> + versionOption s + <*> numericVersionOption + <*> numericHpackVersionOption + versionOption s = + infoOption + s + ( long "version" + <> help "Show version." + ) + numericVersionOption = + infoOption + (versionString numericVersion) + ( long "numeric-version" + <> help "Show only version number." + ) + numericHpackVersionOption = + infoOption + numericHpackVersion + ( long "hpack-numeric-version" + <> help "Show only Hpack's version number." + ) -- | Add a command to the options dispatcher. -addCommand :: String -- ^ command string - -> String -- ^ title of command - -> String -- ^ footer of command help - -> (a -> b) -- ^ constructor to wrap up command in common data type - -> (a -> c -> c) -- ^ extend common settings from local settings - -> Parser c -- ^ common parser - -> Parser a -- ^ command parser - -> ExceptT b (Writer (Mod CommandFields (b,c))) () +addCommand :: + String -- ^ command string + -> String -- ^ title of command + -> String -- ^ footer of command help + -> (opts -> RIO Runner ()) + -- ^ constructor to wrap up command in common data type + -> (opts -> GlobalOptsMonoid -> GlobalOptsMonoid) + -- ^ extend common settings from local settings + -> Parser GlobalOptsMonoid -- ^ common parser + -> Parser opts -- ^ command parser + -> AddCommand addCommand cmd title footerStr constr extendCommon = - addCommand' cmd title footerStr (\a c -> (constr a,extendCommon a c)) + addCommand' cmd title footerStr (\a c -> (constr a, extendCommon a c)) -- | Add a command that takes sub-commands to the options dispatcher. -addSubCommands - :: Monoid c - => String - -- ^ command string +addSubCommands :: + String + -- ^ command string -> String - -- ^ title of command + -- ^ title of command -> String - -- ^ footer of command help - -> Parser c - -- ^ common parser - -> ExceptT b (Writer (Mod CommandFields (b,c))) () - -- ^ sub-commands (use 'addCommand') - -> ExceptT b (Writer (Mod CommandFields (b,c))) () + -- ^ footer of command help + -> Parser GlobalOptsMonoid + -- ^ common parser + -> AddCommand + -- ^ sub-commands (use 'addCommand') + -> AddCommand addSubCommands cmd title footerStr commonParser commandParser = - addCommand' cmd - title - footerStr - (\(c1,(a,c2)) c3 -> (a,mconcat [c3, c2, c1])) - commonParser - (complicatedParser "COMMAND" commonParser commandParser) + addCommand' + cmd + title + footerStr + (\(c1, (a, c2)) c3 -> (a, mconcat [c3, c2, c1])) + commonParser + (complicatedParser "COMMAND" commonParser commandParser) -- | Add a command to the options dispatcher. -addCommand' :: String -- ^ command string - -> String -- ^ title of command - -> String -- ^ footer of command help - -> (a -> c -> (b,c)) -- ^ constructor to wrap up command in common data type - -> Parser c -- ^ common parser - -> Parser a -- ^ command parser - -> ExceptT b (Writer (Mod CommandFields (b,c))) () +addCommand' :: + String -- ^ command string + -> String -- ^ title of command + -> String -- ^ footer of command help + -> (opts -> GlobalOptsMonoid -> (RIO Runner (),GlobalOptsMonoid)) + -- ^ constructor to wrap up command in common data type + -> Parser GlobalOptsMonoid -- ^ common parser + -> Parser opts -- ^ command parser + -> AddCommand addCommand' cmd title footerStr constr commonParser inner = - lift (tell (command cmd - (info (constr <$> inner <*> commonParser) - (progDesc title <> footer footerStr)))) + lift $ tell $ + command + cmd + ( info + (constr <$> inner <*> commonParser) + (progDesc title <> footer footerStr) + ) -- | Generate a complicated options parser. -complicatedParser - :: Monoid a - => String - -- ^ metavar for the sub-command - -> Parser a - -- ^ common settings - -> ExceptT b (Writer (Mod CommandFields (b,a))) () - -- ^ commands (use 'addCommand') - -> Parser (a,(b,a)) +complicatedParser :: + String + -- ^ metavar for the sub-command + -> Parser GlobalOptsMonoid + -- ^ common settings + -> AddCommand + -- ^ commands (use 'addCommand') + -> Parser (GlobalOptsMonoid, (RIO Runner (), GlobalOptsMonoid)) complicatedParser commandMetavar commonParser commandParser = - (,) <$> - commonParser <*> - case runWriter (runExceptT commandParser) of - (Right (),d) -> hsubparser' commandMetavar d - (Left b,_) -> pure (b,mempty) - --- | Subparser with @--help@ argument. Borrowed with slight modification --- from Options.Applicative.Extra. -hsubparser' :: String -> Mod CommandFields a -> Parser a -hsubparser' commandMetavar m = mkParser d g rdr - where - Mod _ d g = metavar commandMetavar `mappend` m - (groupName, cmds, subs) = mkCommand m - rdr = CmdReader groupName cmds (fmap add_helper . subs) - add_helper pinfo = pinfo - { infoParser = infoParser pinfo <**> helpOption } + (,) + <$> commonParser + <*> case runWriter (runExceptT commandParser) of + (Right (), m) -> hsubparser (m <> metavar commandMetavar) + (Left b, _) -> pure (b, mempty) -- | Non-hidden help option. helpOption :: Parser (a -> a) helpOption = - abortOption showHelpText $ - long "help" <> - help "Show this help text" + abortOption showHelpText $ + long "help" + <> short 'h' + <> help "Show this help text." diff --git a/src/Path/CheckInstall.hs b/src/Path/CheckInstall.hs index ce52c7f52c..6b324fcd7e 100644 --- a/src/Path/CheckInstall.hs +++ b/src/Path/CheckInstall.hs @@ -1,53 +1,66 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedStrings #-} -module Path.CheckInstall where +{-| +Module : Path.CheckInstall +License : BSD-3-Clause +-} -import Control.Monad.Extra (anyM, (&&^)) -import qualified Data.Text as T +module Path.CheckInstall + ( warnInstallSearchPathIssues + ) where + +import Control.Monad.Extra ( (&&^), anyM ) import Stack.Prelude -import RIO.PrettyPrint -import Stack.Types.Config +import Stack.Types.Config ( HasConfig ) import qualified System.Directory as D import qualified System.FilePath as FP --- | Checks if the installed executable will be available on the user's --- PATH. This doesn't use @envSearchPath menv@ because it includes paths --- only visible when running in the stack environment. -warnInstallSearchPathIssues :: HasConfig env => FilePath -> [Text] -> RIO env () +-- | Checks if the installed executable will be available on the user's PATH. +-- This doesn't use @envSearchPath menv@ because it includes paths only visible +-- when running in the Stack environment. +warnInstallSearchPathIssues :: + HasConfig env + => FilePath + -> [String] + -> RIO env () warnInstallSearchPathIssues destDir installed = do - searchPath <- liftIO FP.getSearchPath - destDirIsInPATH <- liftIO $ - anyM (\dir -> D.doesDirectoryExist dir &&^ fmap (FP.equalFilePath destDir) (D.canonicalizePath dir)) searchPath - if destDirIsInPATH - then forM_ installed $ \exe -> do - mexePath <- (liftIO . D.findExecutable . T.unpack) exe - case mexePath of - Just exePath -> do - exeDir <- (liftIO . fmap FP.takeDirectory . D.canonicalizePath) exePath - unless (exeDir `FP.equalFilePath` destDir) $ do - prettyWarnL - [ flow "The" - , style File . fromString . T.unpack $ exe - , flow "executable found on the PATH environment variable is" - , style File . fromString $ exePath - , flow "and not the version that was just installed." - , flow "This means that" - , style File . fromString . T.unpack $ exe - , "calls on the command line will not use this version." - ] - Nothing -> do - prettyWarnL - [ flow "Installation path" - , style Dir . fromString $ destDir - , flow "is on the PATH but the" - , style File . fromString . T.unpack $ exe - , flow "executable that was just installed could not be found on the PATH." - ] - else do + searchPath <- liftIO FP.getSearchPath + destDirIsInPATH <- liftIO $ + anyM + ( \dir -> D.doesDirectoryExist dir + &&^ fmap (FP.equalFilePath destDir) (D.canonicalizePath dir) + ) + searchPath + if destDirIsInPATH + then forM_ installed $ \exe -> do + (liftIO . D.findExecutable) exe >>= \case + Just exePath -> do + exeDir <- + (liftIO . fmap FP.takeDirectory . D.canonicalizePath) exePath + unless (exeDir `FP.equalFilePath` destDir) $ prettyWarnL - [ flow "Installation path " - , style Dir . fromString $ destDir - , "not found on the PATH environment variable." + [ flow "The" + , style File . fromString $ exe + , flow "executable found on the PATH environment variable is" + , style File . fromString $ exePath + , flow "and not the version that was just installed." + , flow "This means that" + , style File . fromString $ exe + , "calls on the command line will not use this version." ] + Nothing -> + prettyWarnL + [ flow "Installation path" + , style Dir . fromString $ destDir + , flow "is on the PATH but the" + , style File . fromString $ exe + , flow "executable that was just installed could not be found on \ + \the PATH." + ] + else + prettyWarnL + [ flow "Installation path " + , style Dir . fromString $ destDir + , "not found on the PATH environment variable." + ] diff --git a/src/Path/Extended.hs b/src/Path/Extended.hs deleted file mode 100644 index 70dfaa25a9..0000000000 --- a/src/Path/Extended.hs +++ /dev/null @@ -1,40 +0,0 @@ -{-# LANGUAGE CPP #-} - -module Path.Extended - ( fileExtension - , addExtension - , replaceExtension - ) where - -import Control.Monad.Catch -import qualified Path -import Path (Path, File) - -fileExtension :: MonadThrow m => Path b File -> m String -fileExtension = -#if MIN_VERSION_path(0,7,0) - Path.fileExtension -#else - pure . Path.fileExtension -#endif - -addExtension :: MonadThrow m - => String - -> Path b File - -> m (Path b File) -addExtension = -#if MIN_VERSION_path(0,7,0) - Path.addExtension -#else - Path.addFileExtension -#endif - -replaceExtension :: MonadThrow m - => String - -> Path b File - -> m (Path b File) -#if MIN_VERSION_path(0,7,0) -replaceExtension ext = Path.replaceExtension ('.' : ext) -#else -replaceExtension = flip (Path.-<.>) -#endif diff --git a/src/Path/Extra.hs b/src/Path/Extra.hs index 53417eac44..8a149b18bc 100644 --- a/src/Path/Extra.hs +++ b/src/Path/Extra.hs @@ -1,32 +1,44 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE ViewPatterns #-} --- | Extra Path utilities. +{-| +Module : Path.Extra +Description : Extra Path utilities. +License : BSD-3-Clause +-} module Path.Extra - (toFilePathNoTrailingSep - ,dropRoot - ,parseCollapsedAbsDir - ,parseCollapsedAbsFile - ,concatAndColapseAbsDir - ,rejectMissingFile - ,rejectMissingDir - ,pathToByteString - ,pathToLazyByteString - ,pathToText - ,tryGetModificationTime + ( toFilePathNoTrailingSep + , parseCollapsedAbsDir + , parseCollapsedAbsFile + , concatAndCollapseAbsDir + , rejectMissingFile + , rejectMissingDir + , pathToByteString + , pathToLazyByteString + , pathToText + , tryGetModificationTime + , forgivingResolveDir + , forgivingResolveFile + , forgivingResolveFile' ) where -import Data.Time (UTCTime) +import Data.Time ( UTCTime ) import Path + ( Abs, Dir, File, Path, PathException (..), parseAbsDir + , parseAbsFile, toFilePath + ) import Path.IO -import Path.Internal (Path(..)) + ( doesDirExist, doesFileExist, getCurrentDir + , getModificationTime + ) import RIO -import System.IO.Error (isDoesNotExistError) +import System.IO.Error ( isDoesNotExistError ) import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy.Char8 as BSL import qualified Data.Text as T import qualified Data.Text.Encoding as T +import qualified System.Directory as D import qualified System.FilePath as FP -- | Convert to FilePath but don't add a trailing slash. @@ -48,8 +60,13 @@ parseCollapsedAbsFile = parseAbsFile . collapseFilePath -- | Add a relative FilePath to the end of a Path -- We can't parse the FilePath first because we need to account for ".." -- in the FilePath (#2895) -concatAndColapseAbsDir :: MonadThrow m => Path Abs Dir -> FilePath -> m (Path Abs Dir) -concatAndColapseAbsDir base rel = parseCollapsedAbsDir (toFilePath base FP. rel) +concatAndCollapseAbsDir :: + MonadThrow m + => Path Abs Dir + -> FilePath + -> m (Path Abs Dir) +concatAndCollapseAbsDir base rel = + parseCollapsedAbsDir (toFilePath base FP. rel) -- | Collapse intermediate "." and ".." directories from a path. -- @@ -64,49 +81,46 @@ concatAndColapseAbsDir base rel = parseCollapsedAbsDir (toFilePath base FP. r -- (adapted from @Text.Pandoc.Shared@) collapseFilePath :: FilePath -> FilePath collapseFilePath = FP.joinPath . reverse . foldl' go [] . FP.splitDirectories - where - go rs "." = rs - go r@(p:rs) ".." = case p of - ".." -> "..":r - (checkPathSeparator -> True) -> "..":r - _ -> rs - go _ (checkPathSeparator -> True) = [[FP.pathSeparator]] - go rs x = x:rs - checkPathSeparator [x] = FP.isPathSeparator x - checkPathSeparator _ = False - --- | Drop the root (either @\/@ on POSIX or @C:\\@, @D:\\@, etc. on --- Windows). -dropRoot :: Path Abs t -> Path Rel t -dropRoot (Path l) = Path (FP.dropDrive l) + where + go rs "." = rs + go r@(p:rs) ".." = case p of + ".." -> "..":r + (checkPathSeparator -> True) -> "..":r + _ -> rs + go _ (checkPathSeparator -> True) = [[FP.pathSeparator]] + go rs x = x:rs + checkPathSeparator [x] = FP.isPathSeparator x + checkPathSeparator _ = False -- | If given file in 'Maybe' does not exist, ensure we have 'Nothing'. This --- is to be used in conjunction with 'forgivingAbsence' and --- 'resolveFile'. +-- is to be used in conjunction with 'Path.IO.forgivingAbsence' and +-- 'Path.IO.resolveFile'. -- --- Previously the idiom @forgivingAbsence (relsoveFile …)@ alone was used, --- which relied on 'canonicalizePath' throwing 'isDoesNotExistError' when --- path does not exist. As it turns out, this behavior is actually not --- intentional and unreliable, see --- . This was “fixed” in --- version @1.2.3.0@ of @directory@ package (now it never throws). To make --- it work with all versions, we need to use the following idiom: +-- Previously the idiom @forgivingAbsence (resolveFile …)@ alone was used, which +-- relied on 'Path.IO.canonicalizePath' throwing 'isDoesNotExistError' when path +-- does not exist. As it turns out, this behavior is actually not intentional +-- and unreliable, see . This +-- was “fixed” in version @1.2.3.0@ of @directory@ package (now it never +-- throws). To make it work with all versions, we need to use the following +-- idiom: -- -- > forgivingAbsence (resolveFile …) >>= rejectMissingFile -rejectMissingFile :: MonadIO m +rejectMissingFile :: + MonadIO m => Maybe (Path Abs File) -> m (Maybe (Path Abs File)) -rejectMissingFile Nothing = return Nothing -rejectMissingFile (Just p) = bool Nothing (Just p) `liftM` doesFileExist p +rejectMissingFile Nothing = pure Nothing +rejectMissingFile (Just p) = bool Nothing (Just p) <$> doesFileExist p -- | See 'rejectMissingFile'. -rejectMissingDir :: MonadIO m +rejectMissingDir :: + MonadIO m => Maybe (Path Abs Dir) -> m (Maybe (Path Abs Dir)) -rejectMissingDir Nothing = return Nothing -rejectMissingDir (Just p) = bool Nothing (Just p) `liftM` doesDirExist p +rejectMissingDir Nothing = pure Nothing +rejectMissingDir (Just p) = bool Nothing (Just p) <$> doesDirExist p -- | Convert to a lazy ByteString using toFilePath and UTF8. pathToLazyByteString :: Path b t -> BSL.ByteString @@ -116,8 +130,67 @@ pathToLazyByteString = BSL.fromStrict . pathToByteString pathToByteString :: Path b t -> BS.ByteString pathToByteString = T.encodeUtf8 . pathToText +-- | Convert to a 'T.Text' type. pathToText :: Path b t -> T.Text pathToText = T.pack . toFilePath +-- | Attempt to get the time at which the given file was last modified. Yields +-- `Left ()` if the file does not exist. +-- +-- The operation may fail with 'System.IO.Error.isPermissionError' if the user +-- is not permitted to read the modification time. +-- +-- Caveat for POSIX systems: This function returns a timestamp with sub-second +-- resolution only if this package is compiled against `unix-2.6.0.0` or later +-- and the underlying filesystem supports them. tryGetModificationTime :: MonadIO m => Path Abs File -> m (Either () UTCTime) -tryGetModificationTime = liftIO . tryJust (guard . isDoesNotExistError) . getModificationTime +tryGetModificationTime = + liftIO . tryJust (guard . isDoesNotExistError) . getModificationTime + +-- | 'Path.IO.resolveDir' (@path-io@ package) throws 'InvalidAbsDir' (@path@ +-- package) in certain circumstances; this function yields 'Nothing' in those +-- circumstances. +forgivingResolveDir :: + MonadIO m + => Path Abs Dir + -- ^ Base directory + -> FilePath + -- ^ Path to resolve + -> m (Maybe (Path Abs Dir)) +forgivingResolveDir b p = liftIO $ + D.canonicalizePath (toFilePath b FP. p) >>= \cp -> + catch + (Just <$> parseAbsDir cp) + ( \e -> case e of + InvalidAbsDir _ -> pure Nothing + _ -> throwIO e + ) + +-- | 'Path.IO.resolveFile' (@path-io@ package) throws 'InvalidAbsFile' (@path@ +-- package) in certain circumstances; this function yields 'Nothing' in those +-- circumstances. +forgivingResolveFile :: + MonadIO m + => Path Abs Dir + -- ^ Base directory + -> FilePath + -- ^ Path to resolve + -> m (Maybe (Path Abs File)) +forgivingResolveFile b p = liftIO $ + D.canonicalizePath (toFilePath b FP. p) >>= \cp -> + catch + (Just <$> parseAbsFile cp) + ( \e -> case e of + InvalidAbsFile _ -> pure Nothing + _ -> throwIO e + ) + +-- | 'Path.IO.resolveFile'' (@path-io@ package) throws 'InvalidAbsFile' (@path@ +-- package) in certain circumstances; this function yields 'Nothing' in those +-- circumstances. +forgivingResolveFile' :: + MonadIO m + => FilePath + -- ^ Path to resolve + -> m (Maybe (Path Abs File)) +forgivingResolveFile' p = getCurrentDir >>= flip forgivingResolveFile p diff --git a/src/Path/Find.hs b/src/Path/Find.hs index 581b639b58..6828629047 100644 --- a/src/Path/Find.hs +++ b/src/Path/Find.hs @@ -1,53 +1,66 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DataKinds #-} --- | Finding files. +{-| +Module : Path.Find +Description : Finding files. +License : BSD-3-Clause + +Finding files. +-} module Path.Find - (findFileUp - ,findDirUp - ,findFiles - ,findInParents) - where + ( findFileUp + , findDirUp + , findFiles + , findInParents + ) where -import RIO -import System.IO.Error (isPermissionError) -import Data.List -import Path -import Path.IO hiding (findFiles) -import System.PosixCompat.Files (getSymbolicLinkStatus, isSymbolicLink) +import qualified Data.List as L +import Path ( Abs, Dir, File, Path, parent, toFilePath ) +import Path.IO ( listDir ) +import RIO +import System.IO.Error ( isPermissionError ) +import System.PosixCompat.Files + ( getSymbolicLinkStatus, isSymbolicLink ) -- | Find the location of a file matching the given predicate. -findFileUp :: (MonadIO m,MonadThrow m) - => Path Abs Dir -- ^ Start here. - -> (Path Abs File -> Bool) -- ^ Predicate to match the file. - -> Maybe (Path Abs Dir) -- ^ Do not ascend above this directory. - -> m (Maybe (Path Abs File)) -- ^ Absolute file path. +findFileUp :: + (MonadIO m, MonadThrow m) + => Path Abs Dir -- ^ Start here. + -> (Path Abs File -> Bool) -- ^ Predicate to match the file. + -> Maybe (Path Abs Dir) -- ^ Do not ascend above this directory. + -> m (Maybe (Path Abs File)) -- ^ Absolute file path. findFileUp = findPathUp snd -- | Find the location of a directory matching the given predicate. -findDirUp :: (MonadIO m,MonadThrow m) - => Path Abs Dir -- ^ Start here. - -> (Path Abs Dir -> Bool) -- ^ Predicate to match the directory. - -> Maybe (Path Abs Dir) -- ^ Do not ascend above this directory. - -> m (Maybe (Path Abs Dir)) -- ^ Absolute directory path. +findDirUp :: + (MonadIO m,MonadThrow m) + => Path Abs Dir -- ^ Start here. + -> (Path Abs Dir -> Bool) -- ^ Predicate to match the directory. + -> Maybe (Path Abs Dir) -- ^ Do not ascend above this directory. + -> m (Maybe (Path Abs Dir)) -- ^ Absolute directory path. findDirUp = findPathUp fst -- | Find the location of a path matching the given predicate. -findPathUp :: (MonadIO m,MonadThrow m) - => (([Path Abs Dir],[Path Abs File]) -> [Path Abs t]) - -- ^ Choose path type from pair. - -> Path Abs Dir -- ^ Start here. - -> (Path Abs t -> Bool) -- ^ Predicate to match the path. - -> Maybe (Path Abs Dir) -- ^ Do not ascend above this directory. - -> m (Maybe (Path Abs t)) -- ^ Absolute path. -findPathUp pathType dir p upperBound = - do entries <- listDir dir - case find p (pathType entries) of - Just path -> return (Just path) - Nothing | Just dir == upperBound -> return Nothing - | parent dir == dir -> return Nothing - | otherwise -> findPathUp pathType (parent dir) p upperBound +findPathUp :: + (MonadIO m,MonadThrow m) + => (([Path Abs Dir],[Path Abs File]) -> [Path Abs t]) + -- ^ Choose path type from pair. + -> Path Abs Dir + -- ^ Start here. + -> (Path Abs t -> Bool) + -- ^ Predicate to match the path. + -> Maybe (Path Abs Dir) + -- ^ Do not ascend above this directory. + -> m (Maybe (Path Abs t)) + -- ^ Absolute path. +findPathUp pathType dir p upperBound = do + entries <- listDir dir + case L.find p (pathType entries) of + Just path -> pure (Just path) + Nothing | Just dir == upperBound -> pure Nothing + | parent dir == dir -> pure Nothing + | otherwise -> findPathUp pathType (parent dir) p upperBound -- | Find files matching predicate below a root directory. -- @@ -56,38 +69,44 @@ findPathUp pathType dir p upperBound = -- -- TODO: write one of these that traverses symbolic links but -- efficiently ignores loops. -findFiles :: Path Abs Dir -- ^ Root directory to begin with. - -> (Path Abs File -> Bool) -- ^ Predicate to match files. - -> (Path Abs Dir -> Bool) -- ^ Predicate for which directories to traverse. - -> IO [Path Abs File] -- ^ List of matching files. -findFiles dir p traversep = - do (dirs,files) <- catchJust (\ e -> if isPermissionError e - then Just () - else Nothing) - (listDir dir) - (\ _ -> return ([], [])) - filteredFiles <- evaluate $ force (filter p files) - filteredDirs <- filterM (fmap not . isSymLink) dirs - subResults <- - forM filteredDirs - (\entry -> - if traversep entry - then findFiles entry p traversep - else return []) - return (concat (filteredFiles : subResults)) +findFiles :: + Path Abs Dir + -- ^ Root directory to begin with. + -> (Path Abs File -> Bool) + -- ^ Predicate to match files. + -> (Path Abs Dir -> Bool) + -- ^ Predicate for which directories to traverse. + -> IO [Path Abs File] + -- ^ List of matching files. +findFiles dir p traversep = do + (dirs,files) <- catchJust (\ e -> if isPermissionError e + then Just () + else Nothing) + (listDir dir) + (\ _ -> pure ([], [])) + filteredFiles <- evaluate $ force (filter p files) + filteredDirs <- filterM (fmap not . isSymLink) dirs + subResults <- + forM filteredDirs + (\entry -> + if traversep entry + then findFiles entry p traversep + else pure []) + pure (concat (filteredFiles : subResults)) isSymLink :: Path Abs t -> IO Bool isSymLink = fmap isSymbolicLink . getSymbolicLinkStatus . toFilePath -- | @findInParents f path@ applies @f@ to @path@ and its 'parent's until -- it finds a 'Just' or reaches the root directory. -findInParents :: MonadIO m => (Path Abs Dir -> m (Maybe a)) -> Path Abs Dir -> m (Maybe a) -findInParents f path = do - mres <- f path - case mres of - Just res -> return (Just res) - Nothing -> do - let next = parent path - if next == path - then return Nothing - else findInParents f next +findInParents :: + MonadIO m + => (Path Abs Dir -> m (Maybe a)) + -> Path Abs Dir -> m (Maybe a) +findInParents f path = f path >>= \case + Just res -> pure (Just res) + Nothing -> do + let next = parent path + if next == path + then pure Nothing + else findInParents f next diff --git a/src/Stack.hs b/src/Stack.hs new file mode 100644 index 0000000000..44061d1c1f --- /dev/null +++ b/src/Stack.hs @@ -0,0 +1,143 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack +Description : Main Stack tool entry point. +License : BSD-3-Clause + +Main Stack tool entry point. +-} + +module Stack + ( main + ) where + +import Control.Monad.Extra ( whenJust ) +import GHC.IO.Encoding ( mkTextEncoding, textEncodingName ) +import Options.Applicative.Builder.Extra ( execExtraHelp ) +import Path ( parseAbsFile ) +import Stack.BuildInfo ( versionString' ) +import Stack.CLI ( commandLineHandler ) +import Stack.Constants ( stackProgName ) +import Stack.Docker ( dockerCmdName, dockerHelpOptName ) +import Stack.Nix ( nixCmdName, nixHelpOptName ) +import Stack.Options.DockerParser ( dockerOptsParser ) +import Stack.Options.GlobalParser ( globalOptsFromMonoid ) +import Stack.Options.NixParser ( nixOptsParser ) +import Stack.Prelude +import Stack.Runners + ( ShouldReexec (..), withConfig, withRunnerGlobal ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Runner ( Runner ) +import Stack.Types.Version + ( VersionCheck (..), checkVersion, showStackVersion + , stackVersion + ) +import System.Directory ( getCurrentDirectory ) +import System.Environment ( executablePath, getArgs, getProgName ) +import System.IO ( hGetEncoding, hPutStrLn, hSetEncoding ) +import System.Terminal ( hIsTerminalDeviceOrMinTTY ) + +-- | Type representing exceptions thrown by functions in the "Stack" module. +data StackException + = InvalidReExecVersion String String + deriving Show + +instance Exception StackException where + displayException (InvalidReExecVersion expected actual) = concat + [ "Error: [S-2186]\n" + , "When re-executing '" + , stackProgName + , "' in a container, the incorrect version was found\nExpected: " + , expected + , "; found: " + , actual + ] + +-- | Main Stack tool entry point. +main :: IO () +main = do + -- Line buffer the output by default, particularly for non-terminal runs. + -- See https://github.com/commercialhaskell/stack/pull/360 + hSetBuffering stdout LineBuffering + hSetBuffering stdin LineBuffering + hSetBuffering stderr LineBuffering + hSetTranslit stdout + hSetTranslit stderr + args <- getArgs + progName <- getProgName + mExecutableFilePath <- fromMaybe (pure Nothing) executablePath + let mExecutablePath = mExecutableFilePath >>= parseAbsFile + isTerminal <- hIsTerminalDeviceOrMinTTY stdout + execExtraHelp + args + dockerHelpOptName + (dockerOptsParser False) + ("Only showing --" ++ dockerCmdName ++ "* options.") + execExtraHelp + args + nixHelpOptName + (nixOptsParser False) + ("Only showing --" ++ nixCmdName ++ "* options.") + currentDir <- getCurrentDirectory + try (commandLineHandler currentDir progName mExecutablePath False) >>= \case + Left (exitCode :: ExitCode) -> + throwIO exitCode + Right (globalMonoid, run) -> do + global <- + globalOptsFromMonoid progName mExecutablePath isTerminal globalMonoid + when (global.logLevel == LevelDebug) $ + hPutStrLn stderr versionString' + whenJust global.reExecVersion $ \expectVersion -> do + expectVersion' <- parseVersionThrowing expectVersion + unless (checkVersion MatchMinor expectVersion' stackVersion) $ + throwIO $ InvalidReExecVersion expectVersion showStackVersion + withRunnerGlobal global $ run `catches` + [ Handler handleExitCode + , Handler handlePrettyException + , Handler handlePantryException + , Handler handleSomeException + ] + +-- | Change the character encoding of the given Handle to transliterate on +-- unsupported characters instead of throwing an exception +hSetTranslit :: Handle -> IO () +hSetTranslit h = do + menc <- hGetEncoding h + whenJust (fmap textEncodingName menc) $ \name -> + unless ('/' `elem` name) $ do + enc' <- mkTextEncoding $ name ++ "//TRANSLIT" + hSetEncoding h enc' + +-- | Handle ExitCode exceptions. +handleExitCode :: ExitCode -> RIO Runner a +handleExitCode = exitWith + +-- | Handle PrettyException exceptions. +handlePrettyException :: PrettyException -> RIO Runner a +handlePrettyException = handleAnyPrettyException + +-- | Handle (pretty) PantryException exceptions. +handlePantryException :: PantryException -> RIO Runner a +handlePantryException = handleAnyPrettyException + +-- | Handle any pretty exception. +handleAnyPrettyException :: (Exception e, Pretty e) => e -> RIO Runner a +handleAnyPrettyException e = do + -- The code below loads the entire Stack configuration, when all that is + -- needed are the Stack colours. A tailored approach may be better. + tryAny (withConfig NoReexec $ prettyError $ pretty e) >>= \case + -- Falls back to the command line's Stack colours if there is any error in + -- loading the entire Stack configuration. + Left _ -> prettyError $ pretty e + Right _ -> pure () + exitFailure + +-- | Handle SomeException exceptions. This special handler stops "stack: " from +-- being printed before the exception. +handleSomeException :: SomeException -> RIO Runner a +handleSomeException (SomeException e) = do + logError $ fromString $ displayException e + exitFailure diff --git a/src/Stack/Build.hs b/src/Stack/Build.hs index 23b9a97199..fb70273576 100644 --- a/src/Stack/Build.hs +++ b/src/Stack/Build.hs @@ -1,54 +1,150 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} --- | Build the project. +{-| +Module : Stack.Build +Description : Build the project. +License : BSD-3-Clause + +Build the project. +-} module Stack.Build - (build - ,buildLocalTargets - ,loadPackage - ,mkBaseConfigOpts - ,queryBuildInfo - ,splitObjsWarning - ,CabalVersionException(..)) - where + ( buildCmd + , build + , buildLocalTargets + , loadPackage + , mkBaseConfigOpts + , splitObjsWarning + ) where -import Stack.Prelude hiding (loadPackage) -import Data.Aeson (Value (Object, Array), (.=), object) -import qualified Data.HashMap.Strict as HM -import Data.List ((\\), isPrefixOf) -import Data.List.Extra (groupSort) -import qualified Data.List.NonEmpty as NE +import Data.Attoparsec.Args ( EscapingMode (Escaping), parseArgs ) +import qualified Data.Either.Extra as EE +import Data.List ( (\\) ) +import Data.List.Extra ( groupSort ) import qualified Data.Map as Map import qualified Data.Set as Set -import qualified Data.Text as T -import Data.Text.Encoding (decodeUtf8) -import qualified Data.Text.IO as TIO -import Data.Text.Read (decimal) -import qualified Data.Vector as V -import qualified Data.Yaml as Yaml -import qualified Distribution.PackageDescription as C -import Distribution.Types.Dependency (depLibraries) -import Distribution.Version (mkVersion) -import Path (parent) -import Stack.Build.ConstructPlan -import Stack.Build.Execute -import Stack.Build.Installed -import Stack.Build.Source -import Stack.Package -import Stack.Setup (withNewLocalBuildTargets) -import Stack.Types.Build -import Stack.Types.Config -import Stack.Types.NamedComponent +-- import qualified Distribution.PackageDescription as C +-- import Distribution.Types.Dependency ( Dependency (..), depLibraries ) +import Distribution.Version ( mkVersion ) +import RIO.NonEmpty ( nonEmpty ) +import qualified RIO.NonEmpty as NE +import Stack.Build.ConstructPlan ( constructPlan ) +import Stack.Build.Execute ( executePlan, preFetch, printPlan ) +import Stack.Build.Installed ( getInstalled, toInstallMap ) +import Stack.Build.Source ( localDependencies, projectLocalPackages ) +import Stack.Build.Target ( NeedTargets (..) ) +import Stack.FileWatch ( fileWatch, fileWatchPoll ) +import Stack.Package ( buildableExes, resolvePackage ) +import Stack.Prelude hiding ( loadPackage ) +import Stack.Runners ( ShouldReexec (..), withConfig, withEnvConfig ) +import Stack.Setup ( withNewLocalBuildTargets ) +import Stack.Types.Build.ConstructPlan ( PackageLoader ) +import Stack.Types.Build.Exception + ( BuildException (..), BuildPrettyException (..) ) +import Stack.Types.BuildConfig ( HasBuildConfig, configFileL ) +import Stack.Types.BuildOpts ( BuildOpts (..) ) +import Stack.Types.BuildOptsCLI + ( BuildCommand (..), BuildOptsCLI (..), FileWatchOpts (..) ) +import Stack.Types.BuildOptsMonoid + ( buildOptsMonoidBenchmarksL, buildOptsMonoidHaddockL + , buildOptsMonoidInstallExesL, buildOptsMonoidTestsL + ) +import Stack.Types.CompilerPaths ( HasCompiler, cabalVersionL ) +import Stack.Types.ComponentUtils + ( StackUnqualCompName, unqualCompToString ) +import Stack.Types.Config ( Config (..), HasConfig (..), buildOptsL ) +import Stack.Types.ConfigureOpts ( BaseConfigOpts (..) ) +import Stack.Types.EnvConfig + ( EnvConfig (..), HasEnvConfig (..), HasSourceMap + , actualCompilerVersionL, installationRootDeps + , installationRootLocal, packageDatabaseDeps + , packageDatabaseExtra, packageDatabaseLocal + ) +import Stack.Types.GlobalOpts ( globalOptsBuildOptsMonoidL ) +import Stack.Types.NamedComponent ( exeComponents ) import Stack.Types.Package + ( InstallLocation (..), LocalPackage (..), Package (..) + , PackageConfig (..), lpFiles, lpFilesForComponents + ) +import Stack.Types.Plan + ( Plan (..), Task (..), TaskType (..), taskLocation + , taskProvides + ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Runner ( Runner, globalOptsL ) import Stack.Types.SourceMap + ( SMTargets (..), SourceMap (..), Target (..) ) + +newtype CabalVersionPrettyException + = CabalVersionNotSupported Version + deriving Show + +instance Pretty CabalVersionPrettyException where + pretty (CabalVersionNotSupported cabalVer) = + "[S-5973]" + <> line + <> fillSep + [ flow "Stack builds with the version of the Cabal package that comes \ + \with the specified version of GHC. However, Stack no longer \ + \supports such Cabal versions before 2.2. Version" + , fromString $ versionString cabalVer + , flow "was found. To fix this, either use Stack" + , downgradeRecommendation + , flow "or earlier or use a snapshot that specifies a version of GHC \ + \that is 8.4 or later. Stackage LTS Haskell 12.0" + , parens (style Shell "lts-12.0") + , flow "or later or Nightly 2018-03-13" + , parens (style Shell "nightly-2018-03-13") + , flow "or later specify such GHC versions." + ] + where + -- Due to a bug, Stack 2.15.1 does not support Cabal < 2. + downgradeRecommendation = if cabalVer < mkVersion [2] + then "2.15.3 or 2.13.1" + else "2.15.3" -import Stack.Types.Compiler (compilerVersionText, getGhcVersion) -import System.Terminal (fixCodePage) +instance Exception CabalVersionPrettyException + +-- | Helper for build and install commands +buildCmd :: BuildOptsCLI -> RIO Runner () +buildCmd opts = do + when (any (("-prof" `elem`) . fromRight [] . parseArgs Escaping) opts.ghcOptions) $ + prettyThrowIO GHCProfOptionInvalid + local (over globalOptsL modifyGO) $ + case opts.fileWatch of + FileWatchPoll -> withFileWatchHook fileWatchPoll + FileWatch -> withFileWatchHook fileWatch + NoFileWatch -> inner Nothing + where + withFileWatchHook fileWatchAction = + -- This loads the full configuration in order to obtain the file-watch-hook + -- setting. That is likely not the most efficient approach. + withConfig YesReexec $ withEnvConfig NeedTargets opts $ + fileWatchAction (inner . Just) + inner :: + Maybe (Set (Path Abs File) -> IO ()) + -> RIO Runner () + inner setLocalFiles = withConfig YesReexec $ withEnvConfig NeedTargets opts $ + Stack.Build.build setLocalFiles + -- Read the build command from the CLI and enable it to run + modifyGO = + case opts.command of + Test -> set + (globalOptsBuildOptsMonoidL . buildOptsMonoidTestsL) + (Just True) + Haddock -> set + (globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL) + (Just True) + Bench -> set + (globalOptsBuildOptsMonoidL . buildOptsMonoidBenchmarksL) + (Just True) + Install -> set + (globalOptsBuildOptsMonoidL . buildOptsMonoidInstallExesL) + (Just True) + Build -> id -- Default case is just Build -- | Build. -- @@ -59,322 +155,282 @@ build :: HasEnvConfig env => Maybe (Set (Path Abs File) -> IO ()) -- ^ callback after discovering all local files -> RIO env () build msetLocalFiles = do - mcp <- view $ configL.to configModifyCodePage - ghcVersion <- view $ actualCompilerVersionL.to getGhcVersion - fixCodePage mcp ghcVersion $ do - bopts <- view buildOptsL - sourceMap <- view $ envConfigL.to envConfigSourceMap - locals <- projectLocalPackages - depsLocals <- localDependencies - let allLocals = locals <> depsLocals - - checkSubLibraryDependencies (Map.elems $ smProject sourceMap) - - boptsCli <- view $ envConfigL.to envConfigBuildOptsCLI - -- Set local files, necessary for file watching - stackYaml <- view stackYamlL - for_ msetLocalFiles $ \setLocalFiles -> do - files <- - if boptsCLIWatchAll boptsCli - then sequence [lpFiles lp | lp <- allLocals] - else forM allLocals $ \lp -> do - let pn = packageName (lpPackage lp) - case Map.lookup pn (smtTargets $ smTargets sourceMap) of - Nothing -> - pure Set.empty - Just (TargetAll _) -> - lpFiles lp - Just (TargetComps components) -> - lpFilesForComponents components lp - liftIO $ setLocalFiles $ Set.insert stackYaml $ Set.unions files + bopts <- view buildOptsL + sourceMap <- view $ envConfigL . to (.sourceMap) + locals <- projectLocalPackages + depsLocals <- localDependencies + boptsCli <- view $ envConfigL . to (.buildOptsCLI) + -- Set local files, necessary for file watching + configFile <- view configFileL + let allLocals = locals <> depsLocals + -- We are indifferent as to whether the configuration file is a + -- user-specifc global or a project-level one. + eitherConfigFile = EE.fromEither configFile + for_ msetLocalFiles $ \setLocalFiles -> do + files <- + if boptsCli.watchAll + then sequence [lpFiles lp | lp <- allLocals] + else forM allLocals $ \lp -> do + let pn = lp.package.name + case Map.lookup pn sourceMap.targets.targets of + Nothing -> + pure Set.empty + Just (TargetAll _) -> + lpFiles lp + Just (TargetComps components) -> + lpFilesForComponents components lp + liftIO $ setLocalFiles $ Set.insert eitherConfigFile $ Set.unions files - checkComponentsBuildable allLocals + checkComponentsBuildable allLocals - installMap <- toInstallMap sourceMap - (installedMap, globalDumpPkgs, snapshotDumpPkgs, localDumpPkgs) <- - getInstalled installMap + installMap <- toInstallMap sourceMap + (installedMap, globalDumpPkgs, snapshotDumpPkgs, localDumpPkgs) <- + getInstalled installMap - baseConfigOpts <- mkBaseConfigOpts boptsCli - plan <- constructPlan baseConfigOpts localDumpPkgs loadPackage sourceMap installedMap (boptsCLIInitialBuildSteps boptsCli) + baseConfigOpts <- mkBaseConfigOpts boptsCli + plan <- constructPlan + baseConfigOpts + localDumpPkgs + loadPackage + sourceMap + installedMap + boptsCli.initialBuildSteps - allowLocals <- view $ configL.to configAllowLocals - unless allowLocals $ case justLocals plan of - [] -> return () - localsIdents -> throwM $ LocalPackagesPresent localsIdents + allowLocals <- view $ configL . to (.allowLocals) + unless allowLocals $ case justLocals plan of + [] -> pure () + localsIdents -> throwM $ LocalPackagesPresent localsIdents - checkCabalVersion - warnAboutSplitObjs bopts - warnIfExecutablesWithSameNameCouldBeOverwritten locals plan + checkCabalVersion + haddockCompsSupported <- warnAboutHaddockComps bopts + let disableHaddockComps = + local $ over buildOptsL $ \bo -> + bo + { haddockExecutables = False + , haddockTests = False + , haddockBenchmarks = False + } + withHaddockCompsGuarded = if haddockCompsSupported + then id + else disableHaddockComps + warnAboutSplitObjs bopts + warnIfExecutablesWithSameNameCouldBeOverwritten locals plan - when (boptsPreFetch bopts) $ - preFetch plan + when bopts.preFetch $ + preFetch plan - if boptsCLIDryrun boptsCli - then printPlan plan - else executePlan boptsCli baseConfigOpts locals - globalDumpPkgs - snapshotDumpPkgs - localDumpPkgs - installedMap - (smtTargets $ smTargets sourceMap) - plan + if boptsCli.dryrun + then + printPlan plan + else + withHaddockCompsGuarded $ executePlan + boptsCli + baseConfigOpts + locals + globalDumpPkgs + snapshotDumpPkgs + localDumpPkgs + installedMap + sourceMap.targets.targets + plan -buildLocalTargets :: HasEnvConfig env => NonEmpty Text -> RIO env (Either SomeException ()) +-- | Build one or more local targets. +buildLocalTargets :: + HasEnvConfig env + => NonEmpty Text + -- ^ Local targets to build. + -> RIO env (Either SomeException ()) buildLocalTargets targets = tryAny $ withNewLocalBuildTargets (NE.toList targets) $ build Nothing justLocals :: Plan -> [PackageIdentifier] justLocals = - map taskProvides . - filter ((== Local) . taskLocation) . - Map.elems . - planTasks + map taskProvides . + filter ((== Local) . taskLocation) . + Map.elems . + (.tasks) checkCabalVersion :: HasEnvConfig env => RIO env () checkCabalVersion = do - allowNewer <- view $ configL.to configAllowNewer - cabalVer <- view cabalVersionL - -- https://github.com/haskell/cabal/issues/2023 - when (allowNewer && cabalVer < mkVersion [1, 22]) $ throwM $ - CabalVersionException $ - "Error: --allow-newer requires at least Cabal version 1.22, but version " ++ - versionString cabalVer ++ - " was found." - -- Since --exact-configuration is always passed, some old cabal - -- versions can no longer be used. See the following link for why - -- it's 1.19.2: - -- https://github.com/haskell/cabal/blob/580fe6b6bf4e1648b2f66c1cb9da9f1f1378492c/cabal-install/Distribution/Client/Setup.hs#L592 - when (cabalVer < mkVersion [1, 19, 2]) $ throwM $ - CabalVersionException $ - "Stack no longer supports Cabal versions older than 1.19.2, but version " ++ - versionString cabalVer ++ - " was found. To fix this, consider updating the resolver to lts-3.0 or later / nightly-2015-05-05 or later." - -newtype CabalVersionException = CabalVersionException { unCabalVersionException :: String } - deriving (Typeable) - -instance Show CabalVersionException where show = unCabalVersionException -instance Exception CabalVersionException + cabalVer <- view cabalVersionL + when (cabalVer < mkVersion [2, 2]) $ + prettyThrowM $ CabalVersionNotSupported cabalVer -- | See https://github.com/commercialhaskell/stack/issues/1198. -warnIfExecutablesWithSameNameCouldBeOverwritten - :: HasLogFunc env => [LocalPackage] -> Plan -> RIO env () +warnIfExecutablesWithSameNameCouldBeOverwritten :: + HasTerm env + => [LocalPackage] + -> Plan + -> RIO env () warnIfExecutablesWithSameNameCouldBeOverwritten locals plan = do - logDebug "Checking if we are going to build multiple executables with the same name" - forM_ (Map.toList warnings) $ \(exe,(toBuild,otherLocals)) -> do - let exe_s - | length toBuild > 1 = "several executables with the same name:" - | otherwise = "executable" - exesText pkgs = - T.intercalate - ", " - ["'" <> T.pack (packageNameString p) <> ":" <> exe <> "'" | p <- pkgs] - (logWarn . display . T.unlines . concat) - [ [ "Building " <> exe_s <> " " <> exesText toBuild <> "." ] - , [ "Only one of them will be available via 'stack exec' or locally installed." - | length toBuild > 1 - ] - , [ "Other executables with the same name might be overwritten: " <> - exesText otherLocals <> "." - | not (null otherLocals) - ] - ] - where - -- Cases of several local packages having executables with the same name. - -- The Map entries have the following form: - -- - -- executable name: ( package names for executables that are being built - -- , package names for other local packages that have an - -- executable with the same name - -- ) - warnings :: Map Text ([PackageName],[PackageName]) - warnings = - Map.mapMaybe - (\(pkgsToBuild,localPkgs) -> - case (pkgsToBuild,NE.toList localPkgs \\ NE.toList pkgsToBuild) of - (_ :| [],[]) -> - -- We want to build the executable of single local package - -- and there are no other local packages with an executable of - -- the same name. Nothing to warn about, ignore. - Nothing - (_,otherLocals) -> - -- We could be here for two reasons (or their combination): - -- 1) We are building two or more executables with the same - -- name that will end up overwriting each other. - -- 2) In addition to the executable(s) that we want to build - -- there are other local packages with an executable of the - -- same name that might get overwritten. - -- Both cases warrant a warning. - Just (NE.toList pkgsToBuild,otherLocals)) - (Map.intersectionWith (,) exesToBuild localExes) - exesToBuild :: Map Text (NonEmpty PackageName) - exesToBuild = - collect - [ (exe,pkgName') - | (pkgName',task) <- Map.toList (planTasks plan) - , TTLocalMutable lp <- [taskType task] - , exe <- (Set.toList . exeComponents . lpComponents) lp + logDebug "Checking if we are going to build multiple executables with the same name" + forM_ (Map.toList warnings) $ \(exe, (toBuild, otherLocals)) -> do + let exe_s + | length toBuild > 1 = flow "several executables with the same name:" + | otherwise = "executable" + exesText pkgs = + fillSep $ punctuate + "," + [ style + PkgComponent + (fromString $ packageNameString p <> ":" <> unqualCompToString exe) + | p <- pkgs ] - localExes :: Map Text (NonEmpty PackageName) - localExes = - collect - [ (exe,packageName pkg) - | pkg <- map lpPackage locals - , exe <- Set.toList (packageExes pkg) - ] - collect :: Ord k => [(k,v)] -> Map k (NonEmpty v) - collect = Map.map NE.fromList . Map.fromDistinctAscList . groupSort + prettyWarnL $ + [ "Building" + , exe_s + , exesText toBuild <> "." + ] + <> [ fillSep + [ flow "Only one of them will be available via" + , style Shell "stack exec" + , flow "or locally installed." + ] + | length toBuild > 1 + ] + <> [ fillSep + [ flow "Other executables with the same name might be overwritten:" + , exesText otherLocals <> "." + ] + | not (null otherLocals) + ] + where + -- Cases of several project packages having executables with the same name. + -- The Map entries have the following form: + -- + -- executable name: ( package names for executables that are being built + -- , package names for other project packages that have an + -- executable with the same name + -- ) + warnings :: Map StackUnqualCompName ([PackageName],[PackageName]) + warnings = + Map.mapMaybe + (\(pkgsToBuild, localPkgs) -> + case (pkgsToBuild, NE.toList localPkgs \\ NE.toList pkgsToBuild) of + (_ :| [], []) -> + -- We want to build the executable of single project package + -- and there are no other project packages with an executable of + -- the same name. Nothing to warn about, ignore. + Nothing + (_, otherLocals) -> + -- We could be here for two reasons (or their combination): + -- 1) We are building two or more executables with the same + -- name that will end up overwriting each other. + -- 2) In addition to the executable(s) that we want to build + -- there are other project packages with an executable of the + -- same name that might get overwritten. + -- Both cases warrant a warning. + Just (NE.toList pkgsToBuild, otherLocals)) + (Map.intersectionWith (,) exesToBuild localExes) + exesToBuild :: Map StackUnqualCompName (NonEmpty PackageName) + exesToBuild = + collect + [ (exe, pkgName') + | (pkgName', task) <- Map.toList plan.tasks + , TTLocalMutable lp <- [task.taskType] + , exe <- (Set.toList . exeComponents . (.components)) lp + ] + localExes :: Map StackUnqualCompName (NonEmpty PackageName) + localExes = + collect + [ (exe, pkg.name) + | pkg <- map (.package) locals + , exe <- Set.toList (buildableExes pkg) + ] + collect :: Ord k => [(k, v)] -> Map k (NonEmpty v) + collect = Map.mapMaybe nonEmpty . Map.fromDistinctAscList . groupSort -warnAboutSplitObjs :: HasLogFunc env => BuildOpts -> RIO env () -warnAboutSplitObjs bopts | boptsSplitObjs bopts = do - logWarn $ "Building with --split-objs is enabled. " <> fromString splitObjsWarning -warnAboutSplitObjs _ = return () +warnAboutHaddockComps :: + (HasCompiler env, HasTerm env) + => BuildOpts + -> RIO env Bool +warnAboutHaddockComps bopts = do + let haddockCompsWanted = + bopts.haddockExecutables + || bopts.haddockTests + || bopts.haddockBenchmarks + cabalVer <- view cabalVersionL + if haddockCompsWanted && cabalVer < mkVersion [3, 8, 1] + then do + prettyWarnL + [ flow "Stack builds Haddock documentation with the version of the \ + \Cabal package that comes with the specified version of GHC. \ + \Version" + , fromString $ versionString cabalVer + , flow "was found, which does not support the building of \ + \documentation for executables, test suites or benchmarks. \ + \Options to build such documentation will be ignored. To use \ + \the options, use a snapshot that specifies a version of GHC \ + \that is 9.4 or later. Stackage LTS Haskell 21.0" + , parens (style Shell "lts-21.0") + , flow "or later or Nightly 2022-11-19" + , parens (style Shell "nightly-2022-11-19") + , flow "or later specify such GHC versions." + ] + pure False + else pure haddockCompsWanted + +warnAboutSplitObjs :: HasTerm env => BuildOpts -> RIO env () +warnAboutSplitObjs bopts | bopts.splitObjs = + prettyWarnL + [ flow "Building with" + , style Shell "--split-objs" + , flow "is enabled." + , flow splitObjsWarning + ] +warnAboutSplitObjs _ = pure () +-- | Text warning about the experimental nature of Stack's @--split-objs@ flag. splitObjsWarning :: String -splitObjsWarning = unwords - [ "Note that this feature is EXPERIMENTAL, and its behavior may be changed and improved." - , "You will need to clean your workdirs before use. If you want to compile all dependencies" - , "with split-objs, you will need to delete the snapshot (and all snapshots that could" - , "reference that snapshot)." - ] +splitObjsWarning = + "Note that this feature is EXPERIMENTAL, and its behavior may be changed and \ + \improved. You will need to clean your workdirs before use. If you want to \ + \compile all dependencies with split-objs, you will need to delete the \ + \snapshot (and all snapshots that could reference that snapshot)." -- | Get the @BaseConfigOpts@ necessary for constructing configure options mkBaseConfigOpts :: (HasEnvConfig env) => BuildOptsCLI -> RIO env BaseConfigOpts -mkBaseConfigOpts boptsCli = do - bopts <- view buildOptsL - snapDBPath <- packageDatabaseDeps - localDBPath <- packageDatabaseLocal - snapInstallRoot <- installationRootDeps - localInstallRoot <- installationRootLocal - packageExtraDBs <- packageDatabaseExtra - return BaseConfigOpts - { bcoSnapDB = snapDBPath - , bcoLocalDB = localDBPath - , bcoSnapInstallRoot = snapInstallRoot - , bcoLocalInstallRoot = localInstallRoot - , bcoBuildOpts = bopts - , bcoBuildOptsCLI = boptsCli - , bcoExtraDBs = packageExtraDBs - } +mkBaseConfigOpts buildOptsCLI = do + buildOpts <- view buildOptsL + snapDB <- packageDatabaseDeps + localDB <- packageDatabaseLocal + snapInstallRoot <- installationRootDeps + localInstallRoot <- installationRootLocal + extraDBs <- packageDatabaseExtra + pure BaseConfigOpts + { snapDB + , localDB + , snapInstallRoot + , localInstallRoot + , buildOpts + , buildOptsCLI + , extraDBs + } -- | Provide a function for loading package information from the package index -loadPackage - :: (HasBuildConfig env, HasSourceMap env) - => PackageLocationImmutable - -> Map FlagName Bool - -> [Text] -- ^ GHC options - -> [Text] -- ^ Cabal configure options - -> RIO env Package +loadPackage :: (HasBuildConfig env, HasSourceMap env) => PackageLoader (RIO env) loadPackage loc flags ghcOptions cabalConfigOpts = do - compiler <- view actualCompilerVersionL + compilerVersion <- view actualCompilerVersionL platform <- view platformL let pkgConfig = PackageConfig - { packageConfigEnableTests = False - , packageConfigEnableBenchmarks = False - , packageConfigFlags = flags - , packageConfigGhcOptions = ghcOptions - , packageConfigCabalConfigOpts = cabalConfigOpts - , packageConfigCompilerVersion = compiler - , packageConfigPlatform = platform + { enableTests = False + , enableBenchmarks = False + , flags + , ghcOptions + , cabalConfigOpts + , compilerVersion + , platform } resolvePackage pkgConfig <$> loadCabalFileImmutable loc --- | Query information about the build and print the result to stdout in YAML format. -queryBuildInfo :: HasEnvConfig env - => [Text] -- ^ selectors - -> RIO env () -queryBuildInfo selectors0 = - rawBuildInfo - >>= select id selectors0 - >>= liftIO . TIO.putStrLn . addGlobalHintsComment . decodeUtf8 . Yaml.encode - where - select _ [] value = return value - select front (sel:sels) value = - case value of - Object o -> - case HM.lookup sel o of - Nothing -> err "Selector not found" - Just value' -> cont value' - Array v -> - case decimal sel of - Right (i, "") - | i >= 0 && i < V.length v -> cont $ v V.! i - | otherwise -> err "Index out of range" - _ -> err "Encountered array and needed numeric selector" - _ -> err $ "Cannot apply selector to " ++ show value - where - cont = select (front . (sel:)) sels - err msg = throwString $ msg ++ ": " ++ show (front [sel]) - -- Include comments to indicate that this portion of the "stack - -- query" API is not necessarily stable. - addGlobalHintsComment - | null selectors0 = T.replace globalHintsLine ("\n" <> globalHintsComment <> globalHintsLine) - -- Append comment instead of pre-pending. The reasoning here is - -- that something *could* expect that the result of 'stack query - -- global-hints ghc-boot' is just a string literal. Seems easier - -- for to expect the first line of the output to be the literal. - | ["global-hints"] `isPrefixOf` selectors0 = (<> ("\n" <> globalHintsComment)) - | otherwise = id - globalHintsLine = "\nglobal-hints:\n" - globalHintsComment = T.concat - [ "# Note: global-hints is experimental and may be renamed / removed in the future.\n" - , "# See https://github.com/commercialhaskell/stack/issues/3796" - ] --- | Get the raw build information object -rawBuildInfo :: HasEnvConfig env => RIO env Value -rawBuildInfo = do - locals <- projectLocalPackages - wantedCompiler <- view $ wantedCompilerVersionL.to (utf8BuilderToText . display) - actualCompiler <- view $ actualCompilerVersionL.to compilerVersionText - return $ object - [ "locals" .= Object (HM.fromList $ map localToPair locals) - , "compiler" .= object - [ "wanted" .= wantedCompiler - , "actual" .= actualCompiler - ] - ] - where - localToPair lp = - (T.pack $ packageNameString $ packageName p, value) - where - p = lpPackage lp - value = object - [ "version" .= CabalString (packageVersion p) - , "path" .= toFilePath (parent $ lpCabalFile lp) - ] - checkComponentsBuildable :: MonadThrow m => [LocalPackage] -> m () checkComponentsBuildable lps = - unless (null unbuildable) $ throwM $ SomeTargetsNotBuildable unbuildable - where - unbuildable = - [ (packageName (lpPackage lp), c) - | lp <- lps - , c <- Set.toList (lpUnbuildable lp) - ] - --- | Find if sublibrary dependency exist in each project -checkSubLibraryDependencies :: HasLogFunc env => [ProjectPackage] -> RIO env () -checkSubLibraryDependencies proj = do - forM_ proj $ \p -> do - C.GenericPackageDescription _ _ lib subLibs foreignLibs exes tests benches <- liftIO $ cpGPD . ppCommon $ p - - let dependencies = concatMap getDeps subLibs <> - concatMap getDeps foreignLibs <> - concatMap getDeps exes <> - concatMap getDeps tests <> - concatMap getDeps benches <> - maybe [] C.condTreeConstraints lib - libraries = concatMap (toList . depLibraries) dependencies - - when (subLibDepExist libraries) - (logWarn "SubLibrary dependency is not supported, this will almost certainly fail") - where - getDeps (_, C.CondNode _ dep _) = dep - subLibDepExist lib = - any (\x -> - case x of - C.LSubLibName _ -> True - C.LMainLibName -> False - ) lib + unless (null unbuildable) $ + prettyThrowM $ SomeTargetsNotBuildable unbuildable + where + unbuildable = + [ (lp.package.name, c) + | lp <- lps + , c <- Set.toList lp.unbuildable + ] diff --git a/src/Stack/Build/Cache.hs b/src/Stack/Build/Cache.hs index 9107473c3d..2f5aca0d3e 100644 --- a/src/Stack/Build/Cache.hs +++ b/src/Stack/Build/Cache.hs @@ -1,149 +1,204 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE FlexibleContexts #-} - --- | Cache information about previous builds -module Stack.Build.Cache - ( tryGetBuildCache - , tryGetConfigCache - , tryGetCabalMod - , tryGetSetupConfigMod - , getInstalledExes - , tryGetFlagCache - , deleteCaches - , markExeInstalled - , markExeNotInstalled - , writeFlagCache - , writeBuildCache - , writeConfigCache - , writeCabalMod - , TestStatus (..) - , setTestStatus - , getTestStatus - , writePrecompiledCache - , readPrecompiledCache - -- Exported for testing - , BuildCache(..) - ) where +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} -import Stack.Prelude -import Crypto.Hash (hashWith, SHA256(..)) -import qualified Data.ByteArray as Mem (convert) +{-| +Module : Stack.Build.Cache +Description : Cache information about previous builds. +License : BSD-3-Clause + +Cache information about previous builds. +-} + +module Stack.Build.Cache + ( tryGetBuildCache + , tryGetConfigCache + , tryGetCabalMod + , tryGetSetupConfigMod + , tryGetPackageProjectRoot + , getInstalledExes + , tryGetFlagCache + , deleteCaches + , markExeInstalled + , markExeNotInstalled + , writeFlagCache + , writeBuildCache + , writeConfigCache + , writeCabalMod + , writeSetupConfigMod + , writePackageProjectRoot + , TestStatus (..) + , setTestStatus + , getTestStatus + , writePrecompiledCache + , readPrecompiledCache + -- Exported for testing + , BuildFileCache (..) + ) where + +import Crypto.Hash ( hashWith, SHA256 (..) ) +import qualified Data.ByteArray as Mem ( convert ) +import Data.ByteString.Builder ( byteString ) import qualified Data.Map as M import qualified Data.Set as Set -import qualified Data.Text as T import qualified Data.Yaml as Yaml -import Foreign.C.Types (CTime) -import Path +import Foreign.C.Types ( CTime ) +import Path ( (), filename, parent, parseRelFile ) import Path.IO -import Stack.Constants + ( ensureDir, ignoringAbsence, listDir, makeRelative + , removeFile + ) +import Stack.Constants ( bindirSuffix, relDirInstalledPackages ) import Stack.Constants.Config + ( buildCachesDir, configCabalMod, configPackageProjectRoot + , configSetupConfigMod, testSuccessFile + ) +import Stack.Prelude import Stack.Storage.Project + ( ConfigCacheKey, configCacheKey, deactiveConfigCache + , loadConfigCache, saveConfigCache + ) import Stack.Storage.User -import Stack.Types.Build + ( PrecompiledCacheKey, loadPrecompiledCache + , precompiledCacheKey, savePrecompiledCache + ) import Stack.Types.Cache -import Stack.Types.Config -import Stack.Types.GhcPkgId + ( BuildFileCache (..), ConfigCache, ConfigCacheType (..) + , FileCache, PrecompiledCache (..) + ) +import Stack.Types.CompilerPaths ( cabalVersionL ) +import Stack.Types.ComponentUtils + ( StackUnqualCompName, unqualCompToString ) +import Stack.Types.Config ( stackRootL ) +import Stack.Types.ConfigureOpts + ( BaseConfigOpts (..), ConfigureOpts (..) ) +import Stack.Types.EnvConfig + ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL + , installationRootDeps, installationRootLocal + , platformGhcRelDir + ) +import Stack.Types.GhcPkgId ( ghcPkgIdString ) +import Stack.Types.Installed + ( InstallLocation (..), Installed (..) + , InstalledLibraryInfo (..), foldOnGhcPkgId' + ) import Stack.Types.NamedComponent -import Stack.Types.SourceMap (smRelDir) -import System.PosixCompat.Files (modificationTime, getFileStatus, setFileTimes) - --- | Directory containing files to mark an executable as installed -exeInstalledDir :: (HasEnvConfig env) - => InstallLocation -> RIO env (Path Abs Dir) -exeInstalledDir Snap = ( relDirInstalledPackages) `liftM` installationRootDeps -exeInstalledDir Local = ( relDirInstalledPackages) `liftM` installationRootLocal - --- | Get all of the installed executables -getInstalledExes :: (HasEnvConfig env) - => InstallLocation -> RIO env [PackageIdentifier] + ( NamedComponent (..), componentCachePath ) +import Stack.Types.SourceMap ( smRelDir ) +import System.PosixCompat.Files + ( getFileStatus, modificationTime, setFileTimes ) + +-- | Directory containing files to mark an executable as installed. +exeInstalledDir :: + (HasEnvConfig env) + => InstallLocation + -> RIO env (Path Abs Dir) +exeInstalledDir Snap = ( relDirInstalledPackages) <$> installationRootDeps +exeInstalledDir Local = ( relDirInstalledPackages) <$> installationRootLocal + +-- | Get all of the installed executables. +getInstalledExes :: + (HasEnvConfig env) + => InstallLocation + -> RIO env [PackageIdentifier] getInstalledExes loc = do - dir <- exeInstalledDir loc - (_, files) <- liftIO $ handleIO (const $ return ([], [])) $ listDir dir - return $ - concat $ - M.elems $ - -- If there are multiple install records (from a stack version - -- before https://github.com/commercialhaskell/stack/issues/2373 - -- was fixed), then we don't know which is correct - ignore them. - M.fromListWith (\_ _ -> []) $ - map (\x -> (pkgName x, [x])) $ - mapMaybe (parsePackageIdentifier . toFilePath . filename) files - --- | Mark the given executable as installed -markExeInstalled :: (HasEnvConfig env) - => InstallLocation -> PackageIdentifier -> RIO env () + dir <- exeInstalledDir loc + (_, files) <- liftIO $ handleIO (const $ pure ([], [])) $ listDir dir + pure $ + concat $ + M.elems $ + -- If there are multiple install records (from a Stack version before + -- https://github.com/commercialhaskell/stack/issues/2373 was fixed), then + -- we don't know which is correct - ignore them. + M.fromListWith (\_ _ -> []) $ + map (\x -> (pkgName x, [x])) $ + mapMaybe (parsePackageIdentifier . toFilePath . filename) files + +-- | Mark the given executable as installed. +markExeInstalled :: + (HasEnvConfig env) + => InstallLocation + -> PackageIdentifier + -> RIO env () markExeInstalled loc ident = do - dir <- exeInstalledDir loc - ensureDir dir - ident' <- parseRelFile $ packageIdentifierString ident - let fp = dir ident' - -- Remove old install records for this package. - -- TODO: This is a bit in-efficient. Put all this metadata into one file? - installed <- getInstalledExes loc - forM_ (filter (\x -> pkgName ident == pkgName x) installed) - (markExeNotInstalled loc) - -- TODO consideration for the future: list all of the executables - -- installed, and invalidate this file in getInstalledExes if they no - -- longer exist - writeBinaryFileAtomic fp "Installed" - --- | Mark the given executable as not installed -markExeNotInstalled :: (HasEnvConfig env) - => InstallLocation -> PackageIdentifier -> RIO env () + dir <- exeInstalledDir loc + ensureDir dir + ident' <- parseRelFile $ packageIdentifierString ident + let fp = dir ident' + -- Remove old install records for this package. + -- TODO: This is a bit in-efficient. Put all this metadata into one file? + installed <- getInstalledExes loc + forM_ (filter (\x -> pkgName ident == pkgName x) installed) + (markExeNotInstalled loc) + -- TODO consideration for the future: list all of the executables installed, + -- and invalidate this file in getInstalledExes if they no longer exist + writeBinaryFileAtomic fp "Installed" + +-- | Mark the given executable as not installed. +markExeNotInstalled :: + (HasEnvConfig env) + => InstallLocation + -> PackageIdentifier + -> RIO env () markExeNotInstalled loc ident = do - dir <- exeInstalledDir loc - ident' <- parseRelFile $ packageIdentifierString ident - liftIO $ ignoringAbsence (removeFile $ dir ident') - -buildCacheFile :: (HasEnvConfig env, MonadReader env m, MonadThrow m) - => Path Abs Dir - -> NamedComponent - -> m (Path Abs File) + dir <- exeInstalledDir loc + ident' <- parseRelFile $ packageIdentifierString ident + liftIO $ ignoringAbsence (removeFile $ dir ident') + +buildCacheFile :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => Path Abs Dir + -- ^ Package directory. + -> NamedComponent + -- ^ Package component. + -> m (Path Abs File) buildCacheFile dir component = do - cachesDir <- buildCachesDir dir - smh <- view $ envConfigL.to envConfigSourceMapHash - smDirName <- smRelDir smh - let nonLibComponent prefix name = prefix <> "-" <> T.unpack name - cacheFileName <- parseRelFile $ case component of - CLib -> "lib" - CInternalLib name -> nonLibComponent "internal-lib" name - CExe name -> nonLibComponent "exe" name - CTest name -> nonLibComponent "test" name - CBench name -> nonLibComponent "bench" name - return $ cachesDir smDirName cacheFileName + cachesDir <- buildCachesDir dir + smh <- view $ envConfigL . to (.sourceMapHash) + smDirName <- smRelDir smh + cacheFileName <- parseRelFile $ componentCachePath component + pure $ cachesDir smDirName cacheFileName -- | Try to read the dirtiness cache for the given package directory. -tryGetBuildCache :: HasEnvConfig env - => Path Abs Dir - -> NamedComponent - -> RIO env (Maybe (Map FilePath FileCacheInfo)) +tryGetBuildCache :: + HasEnvConfig env + => Path Abs Dir + -- ^ Package directory. + -> NamedComponent + -- ^ Package component. + -> RIO env (Maybe FileCache) tryGetBuildCache dir component = do fp <- buildCacheFile dir component ensureDir $ parent fp - either (const Nothing) (Just . buildCacheTimes) <$> - liftIO (tryAny (Yaml.decodeFileThrow (toFilePath fp))) - --- | Try to read the dirtiness cache for the given package directory. -tryGetConfigCache :: HasEnvConfig env - => Path Abs Dir -> RIO env (Maybe ConfigCache) + let decode :: MonadIO m => m BuildFileCache + decode = Yaml.decodeFileThrow (toFilePath fp) + either (const Nothing) (Just . (.fileCache)) <$> liftIO (tryAny decode) + +-- | Try to read the Cabal configuration cache for the given package directory. +tryGetConfigCache :: + HasEnvConfig env + => Path Abs Dir + -- ^ Package directory. + -> RIO env (Maybe ConfigCache) tryGetConfigCache dir = - loadConfigCache $ configCacheKey dir ConfigCacheTypeConfig - --- | Try to read the mod time of the cabal file from the last build -tryGetCabalMod :: HasEnvConfig env - => Path Abs Dir -> RIO env (Maybe CTime) + loadConfigCache $ configCacheKey dir ConfigCacheTypeConfig + +-- | Try to read the modification time of the Cabal file from the last build. +tryGetCabalMod :: + HasEnvConfig env + => Path Abs Dir + -- ^ Package directory. + -> RIO env (Maybe CTime) tryGetCabalMod dir = do fp <- toFilePath <$> configCabalMod dir tryGetFileMod fp --- | Try to read the mod time of setup-config file from the last build -tryGetSetupConfigMod :: HasEnvConfig env - => Path Abs Dir -> RIO env (Maybe CTime) +-- | Try to read the modification time of setup-config file from the last build. +tryGetSetupConfigMod :: + HasEnvConfig env + => Path Abs Dir + -- ^ Package directory. + -> RIO env (Maybe CTime) tryGetSetupConfigMod dir = do fp <- toFilePath <$> configSetupConfigMod dir tryGetFileMod fp @@ -151,111 +206,172 @@ tryGetSetupConfigMod dir = do tryGetFileMod :: MonadIO m => FilePath -> m (Maybe CTime) tryGetFileMod fp = liftIO $ either (const Nothing) (Just . modificationTime) <$> - tryIO (getFileStatus fp) + tryIO (getFileStatus fp) + +-- | Try to read the project root from the last build of a package. +tryGetPackageProjectRoot :: + HasEnvConfig env + => Path Abs Dir + -> RIO env (Maybe ByteString) +tryGetPackageProjectRoot dir = do + fp <- toFilePath <$> configPackageProjectRoot dir + tryReadFileBinary fp + +tryReadFileBinary :: MonadIO m => FilePath -> m (Maybe ByteString) +tryReadFileBinary fp = + liftIO $ either (const Nothing) Just <$> + tryIO (readFileBinary fp) -- | Write the dirtiness cache for this package's files. -writeBuildCache :: HasEnvConfig env - => Path Abs Dir - -> NamedComponent - -> Map FilePath FileCacheInfo -> RIO env () -writeBuildCache dir component times = do - fp <- toFilePath <$> buildCacheFile dir component - liftIO $ Yaml.encodeFile fp BuildCache - { buildCacheTimes = times - } - --- | Write the dirtiness cache for this package's configuration. -writeConfigCache :: HasEnvConfig env - => Path Abs Dir - -> ConfigCache - -> RIO env () +writeBuildCache :: + HasEnvConfig env + => Path Abs Dir + -- ^ Package directory. + -> NamedComponent + -- ^ Package component. + -> FileCache + -- ^ File cache. + -> RIO env () +writeBuildCache dir component fileCache = do + fp <- toFilePath <$> buildCacheFile dir component + liftIO $ Yaml.encodeFile fp BuildFileCache { fileCache } + +-- | Write the given Cabal configuration cache for the given package directory. +writeConfigCache :: + HasEnvConfig env + => Path Abs Dir + -- ^ Package directory. + -> ConfigCache + -- ^ Cabal configuration cache. + -> RIO env () writeConfigCache dir = - saveConfigCache (configCacheKey dir ConfigCacheTypeConfig) + saveConfigCache (configCacheKey dir ConfigCacheTypeConfig) -- | See 'tryGetCabalMod' -writeCabalMod :: HasEnvConfig env - => Path Abs Dir - -> CTime - -> RIO env () +writeCabalMod :: + HasEnvConfig env + => Path Abs Dir + -- ^ Package directory. + -> CTime + -> RIO env () writeCabalMod dir x = do - fp <- configCabalMod dir - writeBinaryFileAtomic fp "Just used for its modification time" - liftIO $ setFileTimes (toFilePath fp) x x - --- | Delete the caches for the project. -deleteCaches :: HasEnvConfig env => Path Abs Dir -> RIO env () -deleteCaches dir - {- FIXME confirm that this is acceptable to remove - bfp <- buildCacheFile dir - removeFileIfExists bfp - -} - = deactiveConfigCache $ configCacheKey dir ConfigCacheTypeConfig - + fp <- configCabalMod dir + writeBinaryFileAtomic fp "Just used for its modification time" + liftIO $ setFileTimes (toFilePath fp) x x + +-- | See 'tryGetSetupConfigMod' +writeSetupConfigMod :: + HasEnvConfig env + => Path Abs Dir + -> Maybe CTime + -> RIO env () +writeSetupConfigMod dir Nothing = do + fp <- configSetupConfigMod dir + ignoringAbsence $ removeFile fp +writeSetupConfigMod dir (Just x) = do + fp <- configSetupConfigMod dir + writeBinaryFileAtomic fp "Just used for its modification time" + liftIO $ setFileTimes (toFilePath fp) x x + +-- | See 'tryGetPackageProjectRoot'. +writePackageProjectRoot :: + HasEnvConfig env + => Path Abs Dir + -> ByteString + -> RIO env () +writePackageProjectRoot dir projectRoot = do + fp <- configPackageProjectRoot dir + writeBinaryFileAtomic fp (byteString projectRoot) + +-- | Delete the Cabal configuration cache for the given package directory. +deleteCaches :: + HasEnvConfig env + => Path Abs Dir + -- ^ Package directory. + -> RIO env () +deleteCaches dir = + {- FIXME confirm that this is acceptable to remove + bfp <- buildCacheFile dir + removeFileIfExists bfp + -} + deactiveConfigCache $ configCacheKey dir ConfigCacheTypeConfig + +-- | For the given installed item, yields the key used to retrieve a record from +-- the library Cabal flag cache or executable Cabal flag cache. flagCacheKey :: (HasEnvConfig env) => Installed -> RIO env ConfigCacheKey flagCacheKey installed = do - installationRoot <- installationRootLocal - case installed of - Library _ gid _ -> - return $ - configCacheKey installationRoot (ConfigCacheTypeFlagLibrary gid) - Executable ident -> - return $ - configCacheKey - installationRoot - (ConfigCacheTypeFlagExecutable ident) - --- | Loads the flag cache for the given installed extra-deps -tryGetFlagCache :: HasEnvConfig env - => Installed - -> RIO env (Maybe ConfigCache) + installationRoot <- installationRootLocal + case installed of + Library _ installedInfo -> do + let gid = installedInfo.ghcPkgId + pure $ configCacheKey installationRoot (ConfigCacheTypeFlagLibrary gid) + Executable ident -> pure $ + configCacheKey installationRoot (ConfigCacheTypeFlagExecutable ident) + +-- | Loads the Cabal flag cache for the given installed extra-deps. +tryGetFlagCache :: + HasEnvConfig env + => Installed + -> RIO env (Maybe ConfigCache) tryGetFlagCache gid = do - key <- flagCacheKey gid - loadConfigCache key - -writeFlagCache :: HasEnvConfig env - => Installed - -> ConfigCache - -> RIO env () + key <- flagCacheKey gid + loadConfigCache key + +-- | Write the Cabal flag cache for the given installed extra-deps. +writeFlagCache :: + HasEnvConfig env + => Installed + -> ConfigCache + -> RIO env () writeFlagCache gid cache = do - key <- flagCacheKey gid - saveConfigCache key cache + key <- flagCacheKey gid + saveConfigCache key cache successBS, failureBS, unknownBS :: IsString s => s successBS = "success" failureBS = "failure" unknownBS = "unknown" --- | Status of a test suite -data TestStatus = TSSuccess | TSFailure | TSUnknown - --- | Mark test suite status -setTestStatus :: HasEnvConfig env - => Path Abs Dir - -> TestStatus - -> RIO env () +-- | Status of test suite(s). +data TestStatus + = TSSuccess + -- ^ The test suite(s) succeeded. + | TSFailure + -- ^ One or more test suites failed. + | TSUnknown + -- ^ The outcome of the test suite(s) is unknown. + +-- | Mark test suite status. +setTestStatus :: + HasEnvConfig env + => Path Abs Dir + -- ^ Package directory. + -> TestStatus + -- ^ The status of the test suite(s). + -> RIO env () setTestStatus dir status = do - fp <- testSuccessFile dir - writeBinaryFileAtomic fp $ - case status of - TSSuccess -> successBS - TSFailure -> failureBS - TSUnknown -> unknownBS - --- | Check if the test suite already passed -getTestStatus :: HasEnvConfig env - => Path Abs Dir - -> RIO env TestStatus + fp <- testSuccessFile dir + writeBinaryFileAtomic fp $ + case status of + TSSuccess -> successBS + TSFailure -> failureBS + TSUnknown -> unknownBS + +-- | Check if the test suite(s) already passed. +getTestStatus :: + HasEnvConfig env + => Path Abs Dir + -- ^ Package directory. + -> RIO env TestStatus getTestStatus dir = do fp <- testSuccessFile dir - -- we could ensure the file is the right size first, - -- but we're not expected an attack from the user's filesystem - eres <- tryIO (readFileBinary $ toFilePath fp) - pure $ - case eres of - Right bs - | bs == successBS -> TSSuccess - | bs == failureBS -> TSFailure - _ -> TSUnknown + -- we could ensure the file is the right size first, but we're not expected an + -- attack from the user's filesystem + tryIO (readFileBinary $ toFilePath fp) <&> \case + Right bs + | bs == successBS -> TSSuccess + | bs == failureBS -> TSFailure + _ -> TSUnknown -------------------------------------- -- Precompiled Cache @@ -271,97 +387,107 @@ getTestStatus dir = do -- -- We only pay attention to non-directory options. We don't want to avoid a -- cache hit just because it was installed in a different directory. -getPrecompiledCacheKey :: HasEnvConfig env - => PackageLocationImmutable - -> ConfigureOpts - -> Bool -- ^ build haddocks - -> Set GhcPkgId -- ^ dependencies - -> RIO env PrecompiledCacheKey -getPrecompiledCacheKey loc copts buildHaddocks installedPackageIDs = do +getPrecompiledCacheKey :: + HasEnvConfig env + => PackageLocationImmutable + -> ConfigureOpts + -> Bool -- ^ build haddocks + -> RIO env PrecompiledCacheKey +getPrecompiledCacheKey loc configureOpts buildHaddocks = do compiler <- view actualCompilerVersionL cabalVersion <- view cabalVersionL - -- The goal here is to come up with a string representing the - -- package location which is unique. Luckily @TreeKey@s are exactly - -- that! + -- The goal here is to come up with a string representing the package location + -- which is unique. Luckily @TreeKey@s are exactly that! treeKey <- getPackageLocationTreeKey loc let packageKey = utf8BuilderToText $ display treeKey platformGhcDir <- platformGhcRelDir -- In Cabal versions 1.22 and later, the configure options contain the - -- installed package IDs, which is what we need for a unique hash. - -- Unfortunately, earlier Cabals don't have the information, so we must - -- supplement it with the installed package IDs directly. - -- See issue: https://github.com/commercialhaskell/stack/issues/1103 - let input = (coNoDirs copts, installedPackageIDs) - optionsHash = Mem.convert $ hashWith SHA256 $ encodeUtf8 $ tshow input + -- installed package IDs, which is what we need for a unique hash. See also + -- issue: https://github.com/commercialhaskell/stack/issues/1103 + let optionsToHash = configureOpts.nonPathRelated + optionsHash = + Mem.convert $ hashWith SHA256 $ encodeUtf8 $ tshow optionsToHash - return $ precompiledCacheKey platformGhcDir compiler cabalVersion packageKey optionsHash buildHaddocks + pure $ precompiledCacheKey + platformGhcDir compiler cabalVersion packageKey optionsHash buildHaddocks -- | Write out information about a newly built package -writePrecompiledCache :: HasEnvConfig env - => BaseConfigOpts - -> PackageLocationImmutable - -> ConfigureOpts - -> Bool -- ^ build haddocks - -> Set GhcPkgId -- ^ dependencies - -> Installed -- ^ library - -> [GhcPkgId] -- ^ sublibraries, in the GhcPkgId format - -> Set Text -- ^ executables - -> RIO env () -writePrecompiledCache baseConfigOpts loc copts buildHaddocks depIDs mghcPkgId sublibs exes = do - key <- getPrecompiledCacheKey loc copts buildHaddocks depIDs - ec <- view envConfigL - let stackRootRelative = makeRelative (view stackRootL ec) - mlibpath <- case mghcPkgId of - Executable _ -> return Nothing - Library _ ipid _ -> Just <$> pathFromPkgId stackRootRelative ipid - sublibpaths <- mapM (pathFromPkgId stackRootRelative) sublibs - exes' <- forM (Set.toList exes) $ \exe -> do - name <- parseRelFile $ T.unpack exe - stackRootRelative $ bcoSnapInstallRoot baseConfigOpts bindirSuffix name - let precompiled = PrecompiledCache - { pcLibrary = mlibpath - , pcSubLibs = sublibpaths - , pcExes = exes' - } - savePrecompiledCache key precompiled - -- reuse precompiled cache with haddocks also in case when haddocks are not required - when buildHaddocks $ do - key' <- getPrecompiledCacheKey loc copts False depIDs - savePrecompiledCache key' precompiled - where - pathFromPkgId stackRootRelative ipid = do - ipid' <- parseRelFile $ ghcPkgIdString ipid ++ ".conf" - stackRootRelative $ bcoSnapDB baseConfigOpts ipid' - --- | Check the cache for a precompiled package matching the given --- configuration. -readPrecompiledCache :: forall env. HasEnvConfig env - => PackageLocationImmutable -- ^ target package - -> ConfigureOpts - -> Bool -- ^ build haddocks - -> Set GhcPkgId -- ^ dependencies - -> RIO env (Maybe (PrecompiledCache Abs)) -readPrecompiledCache loc copts buildHaddocks depIDs = do - key <- getPrecompiledCacheKey loc copts buildHaddocks depIDs - mcache <- loadPrecompiledCache key - maybe (pure Nothing) (fmap Just . mkAbs) mcache - where - -- Since commit ed9ccc08f327bad68dd2d09a1851ce0d055c0422, - -- pcLibrary paths are stored as relative to the stack - -- root. Therefore, we need to prepend the stack root when - -- checking that the file exists. For the older cached paths, the - -- file will contain an absolute path, which will make `stackRoot - -- ` a no-op. - mkAbs :: PrecompiledCache Rel -> RIO env (PrecompiledCache Abs) - mkAbs pc0 = do - stackRoot <- view stackRootL - let mkAbs' = (stackRoot ) - return PrecompiledCache - { pcLibrary = mkAbs' <$> pcLibrary pc0 - , pcSubLibs = mkAbs' <$> pcSubLibs pc0 - , pcExes = mkAbs' <$> pcExes pc0 - } - +writePrecompiledCache :: + HasEnvConfig env + => BaseConfigOpts + -> PackageLocationImmutable + -> ConfigureOpts + -> Bool -- ^ build haddocks + -> Installed -- ^ library + -> Set StackUnqualCompName -- ^ executables + -> RIO env () +writePrecompiledCache + baseConfigOpts + loc + copts + buildHaddocks + mghcPkgId + exes + = do + key <- getPrecompiledCacheKey loc copts buildHaddocks + ec <- view envConfigL + let stackRootRelative = makeRelative (view stackRootL ec) + exes' <- forM (Set.toList exes) $ \exe -> do + name <- parseRelFile $ unqualCompToString exe + stackRootRelative $ + baseConfigOpts.snapInstallRoot bindirSuffix name + let installedLibToPath libName ghcPkgId pcAction = do + libPath <- pathFromPkgId stackRootRelative ghcPkgId + pc <- pcAction + pure $ case libName of + Nothing -> pc { library = Just libPath } + _ -> pc { subLibs = libPath : pc.subLibs } + precompiled <- foldOnGhcPkgId' + installedLibToPath + mghcPkgId + ( pure PrecompiledCache + { library = Nothing + , subLibs = [] + , exes = exes' + } + ) + savePrecompiledCache key precompiled + -- reuse precompiled cache with haddocks also in case when haddocks are + -- not required + when buildHaddocks $ do + key' <- getPrecompiledCacheKey loc copts False + savePrecompiledCache key' precompiled + where + pathFromPkgId stackRootRelative ipid = do + ipid' <- parseRelFile $ ghcPkgIdString ipid ++ ".conf" + stackRootRelative $ baseConfigOpts.snapDB ipid' + +-- | Check the cache for a precompiled package matching the given configuration. +readPrecompiledCache :: + forall env. HasEnvConfig env + => PackageLocationImmutable -- ^ target package + -> ConfigureOpts + -> Bool -- ^ build haddocks + -> RIO env (Maybe (PrecompiledCache Abs)) +readPrecompiledCache loc copts buildHaddocks = do + key <- getPrecompiledCacheKey loc copts buildHaddocks + mcache <- loadPrecompiledCache key + maybe (pure Nothing) (fmap Just . mkAbs) mcache + where + -- Since commit ed9ccc08f327bad68dd2d09a1851ce0d055c0422, pcLibrary paths are + -- stored as relative to the Stack root. Therefore, we need to prepend the + -- Stack root when checking that the file exists. For the older cached paths, + -- the file will contain an absolute path, which will make `stackRoot ` + -- a no-op. + mkAbs :: PrecompiledCache Rel -> RIO env (PrecompiledCache Abs) + mkAbs pc0 = do + stackRoot <- view stackRootL + let mkAbs' = (stackRoot ) + pure PrecompiledCache + { library = mkAbs' <$> pc0.library + , subLibs = mkAbs' <$> pc0.subLibs + , exes = mkAbs' <$> pc0.exes + } diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index afa51c4d4a..37959fe739 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -1,916 +1,1238 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE ViewPatterns #-} --- | Construct a @Plan@ for how to build +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} + +{-| +Module : Stack.Build.ConstructPlan +Description : Construct a @Plan@ for how to build. +License : BSD-3-Clause + +Construct a @Plan@ for how to build. +-} + module Stack.Build.ConstructPlan - ( constructPlan - ) where + ( constructPlan + ) where -import Stack.Prelude hiding (Display (..), loadPackage) -import Control.Monad.RWS.Strict hiding ((<>)) -import Control.Monad.State.Strict (execState) -import Data.List -import qualified Data.Map.Strict as M +import Control.Monad.Trans.Maybe ( MaybeT (..) ) +import qualified Data.Map.Merge.Strict as Map import qualified Data.Map.Strict as Map -import Data.Monoid.Map (MonoidMap(..)) +import Data.Monoid.Map ( MonoidMap(..) ) import qualified Data.Set as Set import qualified Data.Text as T -import qualified Distribution.Text as Cabal -import qualified Distribution.Version as Cabal -import Distribution.Types.BuildType (BuildType (Configure)) -import Distribution.Types.PackageName (mkPackageName) -import Distribution.Version (mkVersion) -import Generics.Deriving.Monoid (memptydefault, mappenddefault) -import Path (parent) -import qualified RIO -import Stack.Build.Cache -import Stack.Build.Haddock -import Stack.Build.Installed -import Stack.Build.Source -import Stack.Constants +import Distribution.Types.BuildType ( BuildType (Configure) ) +import Distribution.Types.PackageName ( mkPackageName ) +import Distribution.Version ( mkVersion ) +import Path ( parent ) +import qualified RIO.NonEmpty as NE +import RIO.Process ( findExecutable ) +import RIO.State + ( State, StateT (..), execState, get, modify, modify', put ) +import RIO.Writer ( WriterT (..), pass, tell ) +import Stack.Build.Cache ( tryGetFlagCache ) +import Stack.Build.Haddock ( shouldHaddockDeps ) +import Stack.Build.Source ( loadLocalPackage ) +import Stack.ConfigureOpts + ( configureOptsFromBase, packageConfigureOptsFromPackage + , renderConfigureOpts + ) +import Stack.Constants ( compilerOptionsCabalFlag ) import Stack.Package -import Stack.PackageDump -import Stack.SourceMap -import Stack.Types.Build -import Stack.Types.Compiler -import Stack.Types.Config -import Stack.Types.GhcPkgId -import Stack.Types.NamedComponent + ( applyForceCustomBuild, buildableExes, packageUnknownTools + , processPackageDepsEither + ) +import Stack.Prelude hiding ( loadPackage ) +import Stack.SourceMap ( getPLIVersion, mkProjectPackage ) +import Stack.Types.Build.ConstructPlan + ( AddDepRes (..), CombinedMap, Ctx (..), LibraryMap, M + , MissingPresentDeps (..), PackageInfo (..), PackageLoader + , ToolWarning(..), UnregisterState (..), W (..) + , adrHasLibrary, adrVersion, isAdrToInstall, toTask + ) +import Stack.Types.Build.Exception + ( BadDependency (..), BuildException (..) + , BuildPrettyException (..), ConstructPlanException (..) + ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..), configFileL ) +import Stack.Types.BuildOpts ( BuildOpts (..) ) +import Stack.Types.BuildOptsCLI + ( BuildOptsCLI (..), BuildSubset (..) ) +import Stack.Types.Cache ( CachePkgSrc (..), ConfigCache (..) ) +import Stack.Types.CompCollection ( collectionMember ) +import Stack.Types.Compiler ( WhichCompiler (..), getGhcVersion ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), HasCompiler (..) ) +import Stack.Types.ComponentUtils ( unqualCompFromText ) +import Stack.Types.Config ( Config (..), HasConfig (..), stackRootL ) +import Stack.Types.ConfigureOpts ( BaseConfigOpts (..) ) +import Stack.Types.Curator ( Curator (..) ) +import Stack.Types.Dependency ( DepValue (..), isDepTypeLibrary ) +import Stack.Types.DumpPackage ( DumpPackage (..), sublibParentPkgId ) +import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..) ) +import Stack.Types.EnvSettings + ( EnvSettings (..), minimalEnvSettings ) +import Stack.Types.GhcPkgId ( GhcPkgId ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Installed + ( InstallLocation (..), Installed (..), InstalledMap + , installedVersion + ) +import Stack.Types.IsMutable ( IsMutable (..) ) +import Stack.Types.NamedComponent ( exeComponents, renderComponent ) import Stack.Types.Package + ( ExeName (..), LocalPackage (..), Package (..) + , PackageSource (..), installedMapGhcPkgId + , packageIdentifier, psVersion, runMemoizedWith + ) +import Stack.Types.Plan + ( Plan (..), Task (..), TaskConfigOpts (..), TaskType (..) + , installLocationIsMutable, taskIsTarget, taskLocation + , taskProvides, taskTargetIsMutable + ) +import Stack.Types.ProjectConfig ( isPCGlobalProject ) +import Stack.Types.Runner ( HasRunner (..), globalOptsL ) import Stack.Types.SourceMap + ( CommonPackage (..), DepPackage (..), FromSnapshot (..) + , GlobalPackage (..), SMTargets (..), SourceMap (..) + ) import Stack.Types.Version -import System.Environment (lookupEnv) -import System.IO (putStrLn) -import RIO.PrettyPrint -import RIO.Process (findExecutable, HasProcessContext (..)) - -data PackageInfo - = - -- | This indicates that the package is already installed, and - -- that we shouldn't build it from source. This is only the case - -- for global packages. - PIOnlyInstalled InstallLocation Installed - -- | This indicates that the package isn't installed, and we know - -- where to find its source. - | PIOnlySource PackageSource - -- | This indicates that the package is installed and we know - -- where to find its source. We may want to reinstall from source. - | PIBoth PackageSource Installed - deriving (Show) - -combineSourceInstalled :: PackageSource - -> (InstallLocation, Installed) - -> PackageInfo -combineSourceInstalled ps (location, installed) = - assert (psVersion ps == installedVersion installed) $ - case location of - -- Always trust something in the snapshot - Snap -> PIOnlyInstalled location installed - Local -> PIBoth ps installed - -type CombinedMap = Map PackageName PackageInfo - -combineMap :: Map PackageName PackageSource -> InstalledMap -> CombinedMap -combineMap = Map.mergeWithKey - (\_ s i -> Just $ combineSourceInstalled s i) - (fmap PIOnlySource) - (fmap (uncurry PIOnlyInstalled)) - -data AddDepRes - = ADRToInstall Task - | ADRFound InstallLocation Installed - deriving Show - -type ParentMap = MonoidMap PackageName (First Version, [(PackageIdentifier, VersionRange)]) - -data W = W - { wFinals :: !(Map PackageName (Either ConstructPlanException Task)) - , wInstall :: !(Map Text InstallLocation) - -- ^ executable to be installed, and location where the binary is placed - , wDirty :: !(Map PackageName Text) - -- ^ why a local package is considered dirty - , wWarnings :: !([Text] -> [Text]) - -- ^ Warnings - , wParents :: !ParentMap - -- ^ Which packages a given package depends on, along with the package's version - } deriving Generic -instance Semigroup W where - (<>) = mappenddefault -instance Monoid W where - mempty = memptydefault - mappend = (<>) - -type M = RWST -- TODO replace with more efficient WS stack on top of StackT - Ctx - W - (Map PackageName (Either ConstructPlanException AddDepRes)) - IO - -data Ctx = Ctx - { baseConfigOpts :: !BaseConfigOpts - , loadPackage :: !(PackageLocationImmutable -> Map FlagName Bool -> [Text] -> [Text] -> M Package) - , combinedMap :: !CombinedMap - , ctxEnvConfig :: !EnvConfig - , callStack :: ![PackageName] - , wanted :: !(Set PackageName) - , localNames :: !(Set PackageName) - , mcurator :: !(Maybe Curator) - , pathEnvVar :: !Text - } + ( VersionRange, latestApplicableVersion, versionRangeText + , withinRange + ) +import System.Environment ( lookupEnv ) -instance HasPlatform Ctx -instance HasGHCVariant Ctx -instance HasLogFunc Ctx where - logFuncL = configL.logFuncL -instance HasRunner Ctx where - runnerL = configL.runnerL -instance HasStylesUpdate Ctx where - stylesUpdateL = runnerL.stylesUpdateL -instance HasTerm Ctx where - useColorL = runnerL.useColorL - termWidthL = runnerL.termWidthL -instance HasConfig Ctx -instance HasPantryConfig Ctx where - pantryConfigL = configL.pantryConfigL -instance HasProcessContext Ctx where - processContextL = configL.processContextL -instance HasBuildConfig Ctx -instance HasSourceMap Ctx where - sourceMapL = envConfigL.sourceMapL -instance HasCompiler Ctx where - compilerPathsL = envConfigL.compilerPathsL -instance HasEnvConfig Ctx where - envConfigL = lens ctxEnvConfig (\x y -> x { ctxEnvConfig = y }) - --- | Computes a build plan. This means figuring out which build 'Task's --- to take, and the interdependencies among the build 'Task's. In --- particular: +-- | Computes a build plan. This means figuring out which build t'Task's to +-- take, and the interdependencies among the build t'Task's. In particular: -- --- 1) It determines which packages need to be built, based on the --- transitive deps of the current targets. For local packages, this is --- indicated by the 'lpWanted' boolean. For extra packages to build, --- this comes from the @extraToBuild0@ argument of type @Set --- PackageName@. These are usually packages that have been specified on --- the commandline. +-- 1) It determines which packages need to be built, based on the transitive +-- deps of the current targets. For project packages, this is indicated by the +-- 'Stack.Types.Package.wanted' boolean. For extra packages to build, this comes +-- from the @extraToBuild0@ argument of type @Set PackageName@. These are +-- usually packages that have been specified on the command line. -- --- 2) It will only rebuild an upstream package if it isn't present in --- the 'InstalledMap', or if some of its dependencies have changed. +-- 2) It will only rebuild an upstream package if it isn't present in the +-- 'InstalledMap', or if some of its dependencies have changed. -- --- 3) It will only rebuild a local package if its files are dirty or --- some of its dependencies have changed. -constructPlan :: forall env. HasEnvConfig env - => BaseConfigOpts - -> [DumpPackage] -- ^ locally registered - -> (PackageLocationImmutable -> Map FlagName Bool -> [Text] -> [Text] -> RIO EnvConfig Package) -- ^ load upstream package - -> SourceMap - -> InstalledMap - -> Bool - -> RIO env Plan -constructPlan baseConfigOpts0 localDumpPkgs loadPackage0 sourceMap installedMap initialBuildSteps = do +-- 3) It will only rebuild a local package if its files are dirty or some of its +-- dependencies have changed. +constructPlan :: + forall env. HasEnvConfig env + => BaseConfigOpts + -> [DumpPackage] + -- ^ Locally registered. + -> PackageLoader (RIO EnvConfig) + -- ^ Function to load a 'Package' given the location of a package assumed + -- to be immutable. + -> SourceMap + -> InstalledMap + -> Bool + -- ^ Only include initial build steps required for GHCi? + -> RIO env Plan +constructPlan + baseConfigOpts0 + localDumpPkgs + loadPackage0 + sourceMap + installedMap + initialBuildSteps + = do logDebug "Constructing the build plan" - when hasBaseInDeps $ - prettyWarn $ flow "You are trying to upgrade/downgrade base, which is almost certainly not what you really want. Please, consider using another GHC version if you need a certain version of base, or removing base from extra-deps. See more at https://github.com/commercialhaskell/stack/issues/3940." <> line + config <- view configL + let ghcVersion = getGhcVersion sourceMap.compiler + isBaseWiredIn = ghcVersion < mkVersion [9,12] + when (hasBaseInDeps && (isBaseWiredIn || config.notifyIfBaseNotBoot)) $ do + let intro = fillSep + [ flow "Before GHC 9.12.1, the base package is a GHC wired-in \ + \one. For other GHC versions it is not. You are using" + , style Current $ "GHC " <> fromString (versionString ghcVersion) + , flow "and trying to replace its" + , style Current "base" + , flow "boot package." + ] + adviceInit = if isBaseWiredIn + then + [ flow "Almost certainly, that is not what you really want to \ + \do. Consider removing" + ] + else + [ flow "That may be not what you want to do. If not, consider \ + \removing" + ] + adviceRest = + [ style Current "base" + , flow "as an" + , style Shell "extra-deps" <> "," + , flow "or, if you need a particular version of" + , style Current "base" <> "," + , flow "consider using a different GHC version." + ] + adviceMute = fillSep + [ flow "To mute this message in future, set" + , style Shell (flow "notify-if-base-not-boot: false") + , flow "in Stack's configuration." + ] + prettyWarn $ + intro + <> blankLine + <> fillSep (adviceInit <> adviceRest) + <> (if isBaseWiredIn then mempty else blankLine <> adviceMute) + <> line econfig <- view envConfigL - globalCabalVersion <- view $ compilerPathsL.to cpCabalVersion + globalCabalVersion <- view $ compilerPathsL . to (.cabalVersion) sources <- getSources globalCabalVersion - mcur <- view $ buildConfigL.to bcCurator - - let onTarget = void . addDep - let inner = mapM_ onTarget $ Map.keys (smtTargets $ smTargets sourceMap) - pathEnvVar' <- liftIO $ maybe mempty T.pack <$> lookupEnv "PATH" - let ctx = mkCtx econfig globalCabalVersion sources mcur pathEnvVar' - ((), m, W efinals installExes dirtyReason warnings parents) <- - liftIO $ runRWST inner ctx M.empty - mapM_ (logWarn . RIO.display) (warnings []) - let toEither (_, Left e) = Left e - toEither (k, Right v) = Right (k, v) - (errlibs, adrs) = partitionEithers $ map toEither $ M.toList m - (errfinals, finals) = partitionEithers $ map toEither $ M.toList efinals + curator <- view $ buildConfigL . to (.curator) + pathEnvVar <- liftIO $ maybe mempty T.pack <$> lookupEnv "PATH" + let ctx = mkCtx econfig globalCabalVersion sources curator pathEnvVar + targetPackageNames = Map.keys sourceMap.targets.targets + -- Ignore the result of 'getCachedDepOrAddDep'. + onTarget pkgName = do + logDebugPlanS "constructPlan" $ + "Constructing for target " + <> fromPackageName pkgName + void $ getCachedDepOrAddDep pkgName + inner :: M () + inner = mapM_ onTarget targetPackageNames + action :: RIO Ctx (((), W), LibraryMap) + action = runStateT (runWriterT inner) Map.empty + (((), output), libraryMap) <- liftIO $ runRIO ctx action + let W efinals installExes dirtyReason warnings parents = output + -- Report any warnings + mapM_ prettyWarn (warnings []) + -- Separate out errors + let (errlibs, adrs) = + partitionEithers $ map toEither $ Map.toList libraryMap + (errfinals, finals) = + partitionEithers $ map toEither $ Map.toList efinals errs = errlibs ++ errfinals if null errs - then do - let toTask (_, ADRFound _ _) = Nothing - toTask (name, ADRToInstall task) = Just (name, task) - tasks = M.fromList $ mapMaybe toTask adrs - takeSubset = - case boptsCLIBuildSubset $ bcoBuildOptsCLI baseConfigOpts0 of - BSAll -> pure - BSOnlySnapshot -> pure . stripLocals - BSOnlyDependencies -> pure . stripNonDeps (M.keysSet $ smDeps sourceMap) - BSOnlyLocals -> errorOnSnapshot - takeSubset Plan - { planTasks = tasks - , planFinals = M.fromList finals - , planUnregisterLocal = mkUnregisterLocal tasks dirtyReason localDumpPkgs initialBuildSteps - , planInstallExes = - if boptsInstallExes (bcoBuildOpts baseConfigOpts0) || - boptsInstallCompilerTool (bcoBuildOpts baseConfigOpts0) - then installExes - else Map.empty - } - else do - planDebug $ show errs - stackYaml <- view stackYamlL - stackRoot <- view stackRootL - prettyErrorNoIndent $ - pprintExceptions errs stackYaml stackRoot parents (wanted ctx) prunedGlobalDeps - throwM $ ConstructPlanFailed "Plan construction failed." - where - hasBaseInDeps = Map.member (mkPackageName "base") (smDeps sourceMap) - - mkCtx econfig globalCabalVersion sources mcur pathEnvVar' = Ctx - { baseConfigOpts = baseConfigOpts0 - , loadPackage = \w x y z -> runRIO econfig $ - applyForceCustomBuild globalCabalVersion <$> loadPackage0 w x y z - , combinedMap = combineMap sources installedMap - , ctxEnvConfig = econfig - , callStack = [] - , wanted = Map.keysSet (smtTargets $ smTargets sourceMap) - , localNames = Map.keysSet (smProject sourceMap) - , mcurator = mcur - , pathEnvVar = pathEnvVar' - } - - prunedGlobalDeps = flip Map.mapMaybe (smGlobal sourceMap) $ \gp -> - case gp of - ReplacedGlobalPackage deps -> - let pruned = filter (not . inSourceMap) deps - in if null pruned then Nothing else Just pruned - GlobalPackage _ -> Nothing - - inSourceMap pname = pname `Map.member` smDeps sourceMap || - pname `Map.member` smProject sourceMap - - getSources globalCabalVersion = do - let loadLocalPackage' pp = do - lp <- loadLocalPackage pp - pure lp { lpPackage = applyForceCustomBuild globalCabalVersion $ lpPackage lp } - pPackages <- for (smProject sourceMap) $ \pp -> do - lp <- loadLocalPackage' pp - return $ PSFilePath lp - bopts <- view $ configL.to configBuild - deps <- for (smDeps sourceMap) $ \dp -> - case dpLocation dp of - PLImmutable loc -> - return $ PSRemote loc (getPLIVersion loc) (dpFromSnapshot dp) (dpCommon dp) - PLMutable dir -> do - pp <- mkProjectPackage YesPrintWarnings dir (shouldHaddockDeps bopts) - lp <- loadLocalPackage' pp - return $ PSFilePath lp - return $ pPackages <> deps - --- | Throw an exception if there are any snapshot packages in the plan. -errorOnSnapshot :: Plan -> RIO env Plan -errorOnSnapshot plan@(Plan tasks _finals _unregister installExes) = do - let snapTasks = Map.keys $ Map.filter (\t -> taskLocation t == Snap) tasks - let snapExes = Map.keys $ Map.filter (== Snap) installExes - unless (null snapTasks && null snapExes) $ throwIO $ - NotOnlyLocal snapTasks snapExes - pure plan - -data NotOnlyLocal = NotOnlyLocal [PackageName] [Text] - -instance Show NotOnlyLocal where - show (NotOnlyLocal packages exes) = concat - [ "Specified only-locals, but I need to build snapshot contents:\n" - , if null packages then "" else concat - [ "Packages: " - , intercalate ", " (map packageNameString packages) - , "\n" - ] - , if null exes then "" else concat - [ "Executables: " - , intercalate ", " (map T.unpack exes) - , "\n" - ] - ] -instance Exception NotOnlyLocal + then do + let tasks = Map.fromList $ mapMaybe (toMaybe . second toTask) adrs + takeSubset Plan + { tasks = tasks + , finals = Map.fromList finals + , unregisterLocal = + mkUnregisterLocal tasks dirtyReason localDumpPkgs initialBuildSteps + , installExes = + if baseConfigOpts0.buildOpts.installExes + || baseConfigOpts0.buildOpts.installCompilerTool + then installExes + else Map.empty + } + else do + configFile <- view configFileL + stackRoot <- view stackRootL + isImplicitGlobal <- + view $ configL . to (isPCGlobalProject . (.project)) + prettyThrowM $ ConstructPlanFailed + errs + configFile + stackRoot + isImplicitGlobal + parents + ctx.wanted + prunedGlobalDeps + where + sourceProject = sourceMap.project + sourceDeps = sourceMap.deps + + hasBaseInDeps = Map.member (mkPackageName "base") sourceDeps + + mkCtx :: + EnvConfig + -> Version + -> Map PackageName PackageSource + -> Maybe Curator + -> Text + -> Ctx + mkCtx ctxEnvConfig globalCabalVersion sources curator pathEnvVar = + let loadPackage loc flags ghcOptions cabalConfigOpts = do + let action = do + package <- loadPackage0 loc flags ghcOptions cabalConfigOpts + pure $ applyForceCustomBuild globalCabalVersion package + runRIO ctxEnvConfig action + in Ctx + { baseConfigOpts = baseConfigOpts0 + , loadPackage + , combinedMap = combineMap sources installedMap + , ctxEnvConfig + , callStack = [] + , wanted = Map.keysSet sourceMap.targets.targets + , localNames = Map.keysSet sourceProject + , curator + , pathEnvVar + } + + toEither :: (k, Either e v) -> Either e (k, v) + toEither (_, Left e) = Left e + toEither (k, Right v) = Right (k, v) + + toMaybe :: (k, Maybe v) -> Maybe (k, v) + toMaybe (_, Nothing) = Nothing + toMaybe (k, Just v) = Just (k, v) + + takeSubset :: Plan -> RIO env Plan + takeSubset = case baseConfigOpts0.buildOptsCLI.buildSubset of + BSAll -> pure + BSOnlySnapshot -> stripLocals + BSOnlyDependencies -> stripNonDeps + BSOnlyLocals -> errorOnSnapshot + + -- | Strip out anything from the 'Plan' intended for the local database. + stripLocals :: Plan -> RIO env Plan + stripLocals plan = pure plan + { tasks = Map.filter checkTask plan.tasks + , finals = Map.empty + , unregisterLocal = Map.empty + , installExes = Map.filter (/= Local) plan.installExes + } + where + checkTask task = taskLocation task == Snap --- | State to be maintained during the calculation of local packages --- to unregister. -data UnregisterState = UnregisterState - { usToUnregister :: !(Map GhcPkgId (PackageIdentifier, Text)) - , usKeep :: ![DumpPackage] - , usAnyAdded :: !Bool + stripNonDeps :: Plan -> RIO env Plan + stripNonDeps plan = pure plan + { tasks = Map.filter checkTask plan.tasks + , finals = Map.empty + , installExes = Map.empty -- TODO maybe don't disable this? } + where + deps = Map.keysSet sourceDeps + checkTask task = taskProvides task `Set.member` missingForDeps + providesDep task = pkgName (taskProvides task) `Set.member` deps + tasks = Map.elems plan.tasks + missing = + Map.fromList $ map (taskProvides &&& (.configOpts.missing)) tasks + missingForDeps = flip execState mempty $ + for_ tasks $ \task -> + when (providesDep task) $ + collectMissing mempty (taskProvides task) + collectMissing dependents pid = do + when (pid `elem` dependents) $ + impureThrow $ TaskCycleBug pid + modify' (<> Set.singleton pid) + mapM_ + (collectMissing (pid:dependents)) + (fromMaybe mempty $ Map.lookup pid missing) + + -- | Throw an exception if there are any snapshot packages in the plan. + errorOnSnapshot :: Plan -> RIO env Plan + errorOnSnapshot plan@(Plan tasks _finals _unregister installExes) = do + let snapTasks = Map.keys $ Map.filter (\t -> taskLocation t == Snap) tasks + snapExes = Map.keys $ Map.filter (== Snap) installExes + unless (null snapTasks && null snapExes) $ + prettyThrowIO $ NotOnlyLocal snapTasks snapExes + pure plan + + prunedGlobalDeps :: Map PackageName [PackageName] + prunedGlobalDeps = flip Map.mapMaybe sourceMap.globalPkgs $ + \case + ReplacedGlobalPackage deps -> + let pruned = filter (not . inSourceMap) deps + in if null pruned then Nothing else Just pruned + GlobalPackage _ -> Nothing + where + inSourceMap pname = + pname `Map.member` sourceDeps || pname `Map.member` sourceProject + + getSources :: Version -> RIO env (Map PackageName PackageSource) + getSources globalCabalVersion = do + let loadLocalPackage' pp = do + lp <- loadLocalPackage pp + let lpPackage' = + applyForceCustomBuild globalCabalVersion lp.package + pure lp { package = lpPackage' } + pPackages <- for sourceProject $ \pp -> do + lp <- loadLocalPackage' pp + pure $ PSFilePath lp + bopts <- view $ configL . to (.build) + deps <- for sourceDeps $ \dp -> + case dp.location of + PLImmutable loc -> + pure $ + PSRemote loc (getPLIVersion loc) dp.fromSnapshot dp.depCommon + PLMutable dir -> do + pp <- mkProjectPackage YesPrintWarnings dir (shouldHaddockDeps bopts) + lp <- loadLocalPackage' pp + pure $ PSFilePath lp + pure $ pPackages <> deps -- | Determine which packages to unregister based on the given tasks and --- already registered local packages -mkUnregisterLocal :: Map PackageName Task - -- ^ Tasks - -> Map PackageName Text - -- ^ Reasons why packages are dirty and must be rebuilt - -> [DumpPackage] - -- ^ Local package database dump - -> Bool - -- ^ If true, we're doing a special initialBuildSteps - -- build - don't unregister target packages. - -> Map GhcPkgId (PackageIdentifier, Text) +-- already registered project packages and local extra-deps. +mkUnregisterLocal :: + Map PackageName Task + -- ^ Tasks + -> Map PackageName Text + -- ^ Reasons why packages are dirty and must be rebuilt + -> [DumpPackage] + -- ^ Local package database dump + -> Bool + -- ^ If true, we're doing a special initialBuildSteps build - don't + -- unregister target packages. + -> Map GhcPkgId (PackageIdentifier, Text) mkUnregisterLocal tasks dirtyReason localDumpPkgs initialBuildSteps = - -- We'll take multiple passes through the local packages. This - -- will allow us to detect that a package should be unregistered, - -- as well as all packages directly or transitively depending on - -- it. - loop Map.empty localDumpPkgs - where - loop toUnregister keep - -- If any new packages were added to the unregister Map, we - -- need to loop through the remaining packages again to detect - -- if a transitive dependency is being unregistered. - | usAnyAdded us = loop (usToUnregister us) (usKeep us) - -- Nothing added, so we've already caught them all. Return the - -- Map we've already calculated. - | otherwise = usToUnregister us - where - -- Run the unregister checking function on all packages we - -- currently think we'll be keeping. - us = execState (mapM_ go keep) UnregisterState - { usToUnregister = toUnregister - , usKeep = [] - , usAnyAdded = False - } - - go dp = do - us <- get - case go' (usToUnregister us) ident deps of - -- Not unregistering, add it to the keep list - Nothing -> put us { usKeep = dp : usKeep us } - -- Unregistering, add it to the unregister Map and - -- indicate that a package was in fact added to the - -- unregister Map so we loop again. - Just reason -> put us - { usToUnregister = Map.insert gid (ident, reason) (usToUnregister us) - , usAnyAdded = True - } - where - gid = dpGhcPkgId dp - ident = dpPackageIdent dp - deps = dpDepends dp - - go' toUnregister ident deps - -- If we're planning on running a task on it, then it must be - -- unregistered, unless it's a target and an initial-build-steps - -- build is being done. - | Just task <- Map.lookup name tasks - = if initialBuildSteps && taskIsTarget task && taskProvides task == ident - then Nothing - else Just $ fromMaybe "" $ Map.lookup name dirtyReason - -- Check if a dependency is going to be unregistered - | (dep, _):_ <- mapMaybe (`Map.lookup` toUnregister) deps - = Just $ "Dependency being unregistered: " <> T.pack (packageIdentifierString dep) - -- None of the above, keep it! - | otherwise = Nothing - where - name :: PackageName - name = pkgName ident - --- | Given a 'LocalPackage' and its 'lpTestBench', adds a 'Task' for --- running its tests and benchmarks. + -- We'll take multiple passes through the local packages. This will allow us + -- to detect that a package should be unregistered, as well as all packages + -- directly or transitively depending on it. + loop Map.empty localDumpPkgs + where + loop :: + Map GhcPkgId (PackageIdentifier, Text) + -- ^ Current local packages to unregister. + -> [DumpPackage] + -- ^ Current local packages to keep. + -> Map GhcPkgId (PackageIdentifier, Text) + -- ^ Revised local packages to unregister. + loop toUnregister keep + -- If any new packages were added to the unregister Map, we need to loop + -- through the remaining packages again to detect if a transitive dependency + -- is being unregistered. + | us.anyAdded = loop us.toUnregister us.toKeep + -- Nothing added, so we've already caught them all. Return the Map we've + -- already calculated. + | otherwise = us.toUnregister + where + -- Run the unregister checking function on all packages we currently think + -- we'll be keeping. + us = execState (mapM_ go keep) initialUnregisterState + initialUnregisterState = UnregisterState + { toUnregister + , toKeep = [] + , anyAdded = False + } + + go :: DumpPackage -> State UnregisterState () + go dp = do + us <- get + case maybeUnregisterReason us.toUnregister ident mParentLibId deps of + -- Not unregistering, add it to the keep list. + Nothing -> put us { toKeep = dp : us.toKeep } + -- Unregistering, add it to the unregister Map; and indicate that a + -- package was in fact added to the unregister Map, so we loop again. + Just reason -> put us + { toUnregister = Map.insert gid (ident, reason) us.toUnregister + , anyAdded = True + } + where + gid = dp.ghcPkgId + ident = dp.packageIdent + mParentLibId = sublibParentPkgId dp + deps = dp.depends + + maybeUnregisterReason :: + Map GhcPkgId (PackageIdentifier, Text) + -- ^ Current local packages to unregister. + -> PackageIdentifier + -- ^ Package identifier. + -> Maybe PackageIdentifier + -- ^ If package for sub library, package identifier of the parent. + -> [GhcPkgId] + -- ^ Dependencies of the package. + -> Maybe Text + -- ^ If to be unregistered, the reason for doing so. + maybeUnregisterReason toUnregister ident mParentLibId deps + -- If the package is not for a sub library, then it is directly relevant. If + -- it is, then the relevant package is the parent. If we are planning on + -- running a task on the relevant package, then the package must be + -- unregistered, unless it is a target and an initial-build-steps build is + -- being done. + | Just task <- Map.lookup relevantPkgName tasks = + if initialBuildSteps + && taskIsTarget task + && taskProvides task == relevantPkgId + then Nothing + else Just $ fromMaybe "" $ Map.lookup relevantPkgName dirtyReason + -- Check if a dependency is going to be unregistered + | (dep, _):_ <- mapMaybe (`Map.lookup` toUnregister) deps = + Just $ "Dependency being unregistered: " + <> T.pack (packageIdentifierString dep) + -- None of the above, keep it! + | otherwise = Nothing + where + -- If the package is not for a sub library, then the relevant package + -- identifier is that of the package. If it is, then the relevant package + -- identifier is that of the parent. + relevantPkgId :: PackageIdentifier + relevantPkgId = fromMaybe ident mParentLibId + -- If the package is not for a sub library, then the relevant package name + -- is that of the package. If it is, then the relevant package name is + -- that of the parent. + relevantPkgName :: PackageName + relevantPkgName = maybe (pkgName ident) pkgName mParentLibId + +-- | Given a t'LocalPackage' and its 'testBench', adds a t'Task' for running +-- its tests and benchmarks. -- --- If @isAllInOne@ is 'True', then this means that the build step will --- also build the tests. Otherwise, this indicates that there's a cyclic --- dependency and an additional build step needs to be done. +-- If @isAllInOne@ is 'True', then this means that the build step will also +-- build the tests. Otherwise, this indicates that there's a cyclic dependency +-- and an additional build step needs to be done. -- --- This will also add all the deps needed to build the tests / --- benchmarks. If @isAllInOne@ is 'True' (the common case), then all of --- these should have already been taken care of as part of the build --- step. -addFinal :: LocalPackage -> Package -> Bool -> Bool -> M () -addFinal lp package isAllInOne buildHaddocks = do - depsRes <- addPackageDeps package - res <- case depsRes of - Left e -> return $ Left e - Right (missing, present, _minLoc) -> do - ctx <- ask - return $ Right Task - { taskProvides = PackageIdentifier - (packageName package) - (packageVersion package) - , taskConfigOpts = TaskConfigOpts missing $ \missing' -> - let allDeps = Map.union present missing' - in configureOpts - (view envConfigL ctx) - (baseConfigOpts ctx) - allDeps - True -- local - Mutable - package - , taskBuildHaddock = buildHaddocks - , taskPresent = present - , taskType = TTLocalMutable lp - , taskAllInOne = isAllInOne - , taskCachePkgSrc = CacheSrcLocal (toFilePath (parent (lpCabalFile lp))) - , taskAnyMissing = not $ Set.null missing - , taskBuildTypeConfig = packageBuildTypeConfig package - } - tell mempty { wFinals = Map.singleton (packageName package) res } - --- | Given a 'PackageName', adds all of the build tasks to build the --- package, if needed. +-- This will also add all the deps needed to build the tests / benchmarks. If +-- @isAllInOne@ is 'True' (the common case), then all of these should have +-- already been taken care of as part of the build step. +addFinal :: + LocalPackage + -> Package + -> Bool + -- ^ Will the build step also build the tests? + -> Bool + -- ^ Should Haddock documentation be built? + -> M () +addFinal lp package allInOne buildHaddocks = do + let name = package.name + logDebugPlanS "addFinal" "Clearing the call stack." + res <- local (\ctx' -> ctx' { callStack = [] }) $ + addPackageDeps package >>= \case + Left e -> pure $ Left e + Right (MissingPresentDeps missing present _minLoc) -> do + let pkgConfigOpts = packageConfigureOptsFromPackage package + ctx <- ask + let configOpts = TaskConfigOpts + { missing + , envConfig = ctx.ctxEnvConfig + , baseConfigOpts = ctx.baseConfigOpts + , isLocalNonExtraDep = True + , isMutable = Mutable + , pkgConfigOpts + } + pure $ Right Task + { configOpts + , buildHaddocks + , present + , taskType = TTLocalMutable lp + , allInOne + , cachePkgSrc = CacheSrcLocal (toFilePath (parent lp.cabalFP)) + , buildTypeConfig = packageBuildTypeConfig package + } + ctx <- ask + logDebugPlanS "addFinal" $ + "Restoring the call stack: " + <> fromString (show $ map packageNameString ctx.callStack) + logDebugPlanS "addFinal" $ + "Adding to construction output " + <> fromPackageName name + <> summariseResult res + tell mempty { wFinals = Map.singleton name res } + +-- | Given a 'PackageName', adds all of the build tasks to build the package, if +-- needed. First checks if the package name is in the library map. -- --- 'constructPlan' invokes this on all the target packages, setting --- @treatAsDep'@ to False, because those packages are direct build --- targets. 'addPackageDeps' invokes this while recursing into the --- dependencies of a package. As such, it sets @treatAsDep'@ to True, --- forcing this package to be marked as a dependency, even if it is --- directly wanted. This makes sense - if we left out packages that are --- deps, it would break the --only-dependencies build plan. -addDep :: PackageName - -> M (Either ConstructPlanException AddDepRes) -addDep name = do - ctx <- ask - m <- get - case Map.lookup name m of - Just res -> do - planDebug $ "addDep: Using cached result for " ++ show name ++ ": " ++ show res - return res - Nothing -> do - res <- if name `elem` callStack ctx - then do - planDebug $ "addDep: Detected cycle " ++ show name ++ ": " ++ show (callStack ctx) - return $ Left $ DependencyCycleDetected $ name : callStack ctx - else local (\ctx' -> ctx' { callStack = name : callStack ctx' }) $ do - let mpackageInfo = Map.lookup name $ combinedMap ctx - planDebug $ "addDep: Package info for " ++ show name ++ ": " ++ show mpackageInfo - case mpackageInfo of - -- TODO look up in the package index and see if there's a - -- recommendation available - Nothing -> return $ Left $ UnknownPackage name - Just (PIOnlyInstalled loc installed) -> do - -- FIXME Slightly hacky, no flags since - -- they likely won't affect executable - -- names. This code does not feel right. - let version = installedVersion installed - askPkgLoc = liftRIO $ do - mrev <- getLatestHackageRevision YesRequireHackageIndex name version - case mrev of - Nothing -> do - -- this could happen for GHC boot libraries missing from Hackage - logWarn $ "No latest package revision found for: " <> - fromString (packageNameString name) <> ", dependency callstack: " <> - displayShow (map packageNameString $ callStack ctx) - return Nothing - Just (_rev, cfKey, treeKey) -> - return . Just $ - PLIHackage (PackageIdentifier name version) cfKey treeKey - tellExecutablesUpstream name askPkgLoc loc Map.empty - return $ Right $ ADRFound loc installed - Just (PIOnlySource ps) -> do - tellExecutables name ps - installPackage name ps Nothing - Just (PIBoth ps installed) -> do - tellExecutables name ps - installPackage name ps (Just installed) - updateLibMap name res - return res - --- FIXME what's the purpose of this? Add a Haddock! +-- 'constructPlan' invokes this on all the target packages. +-- +-- 'addPackageDeps' invokes this while recursing into the dependencies of a +-- package, even if it is directly wanted. This makes sense - if we left out +-- packages that are deps, it would break the --only-dependencies build plan. +getCachedDepOrAddDep :: + PackageName + -> M (Either ConstructPlanException AddDepRes) +getCachedDepOrAddDep name = do + libraryMap <- get + case Map.lookup name libraryMap of + Just res -> do + logDebugPlanS "getCachedDepOrAddDep" $ + "Using cached result for " + <> fromPackageName name + <> ": " + <> fromString (show res) + pure res + Nothing -> checkCallStackAndAddDep name + +-- | Given a 'PackageName', known not to be in the library map, adds all of the +-- build tasks to build the package. First checks that the package name is not +-- already in the call stack. +checkCallStackAndAddDep :: + PackageName + -> M (Either ConstructPlanException AddDepRes) +checkCallStackAndAddDep name = do + ctx <- ask + let compiler = ctx.ctxEnvConfig.sourceMap.compiler + res <- if name `elem` ctx.callStack + then do + logDebugPlanS "checkCallStackAndAddDep" $ + "Detected cycle " + <> fromPackageName name + <> ": " + <> fromString (show $ map packageNameString ctx.callStack) + pure $ Left $ DependencyCycleDetected $ name : ctx.callStack + else case Map.lookup name ctx.combinedMap of + -- TODO look up in the package index and see if there's a + -- recommendation available + Nothing -> do + logDebugPlanS "checkCallStackAndAddDep" $ + "No package info for " + <> fromPackageName name + <> "." + pure $ Left $ UnknownPackage compiler name + Just packageInfo -> do + logDebugPlanS "checkCallStackAndAddDep" $ + "Pushing " + <> fromPackageName name + <> " on to the call stack." + -- Add the current package name to the head of the call stack. + res <- local (\ctx' -> ctx' { callStack = name : ctx'.callStack }) $ + addDep name packageInfo + logDebugPlanS "checkCallStackAndAddDep" $ + "Popped " + <> fromPackageName name + <> " from the call stack." + pure res + updateLibMap name res + pure res + +-- | Given a 'PackageName' and its 'PackageInfo' from the combined map, adds all +-- of the build tasks to build the package. Assumes that the head of the call +-- stack is the current package name. +addDep :: + PackageName + -> PackageInfo + -> M (Either ConstructPlanException AddDepRes) +addDep name packageInfo = do + logDebugPlanS "addDep" $ + "Package info for " + <> fromPackageName name + <> ": " + <> fromString (show packageInfo) + case packageInfo of + PIOnlyInstalled loc installed -> do + -- FIXME Slightly hacky, no flags since they likely won't affect + -- executable names. This code does not feel right. + let version = installedVersion installed + askPkgLoc = liftRIO $ + getLatestHackageRevision YesRequireHackageIndex name version >>= \case + Nothing -> do + -- This could happen for GHC boot libraries missing from + -- Hackage. + cs <- asks (NE.nonEmpty . (.callStack)) + cs' <- maybe + (throwIO CallStackEmptyBug) + (pure . NE.tail) + cs + prettyWarnL + $ flow "No latest package revision found for" + : style Current (fromPackageName name) <> "," + : flow "dependency callstack:" + : mkNarrativeList Nothing False + (map fromPackageName cs' :: [StyleDoc]) + pure Nothing + Just (_rev, cfKey, treeKey) -> + pure $ Just $ + PLIHackage (PackageIdentifier name version) cfKey treeKey + tellExecutablesUpstream name askPkgLoc loc Map.empty + pure $ Right $ ADRFound loc installed + PIOnlySource ps -> do + tellExecutables name ps + installPackage name ps Nothing + PIBoth ps installed -> do + tellExecutables name ps + installPackage name ps (Just installed) + +-- | For given 'PackageName' and 'PackageSource' values, adds relevant +-- executables to the collected output. tellExecutables :: PackageName -> PackageSource -> M () tellExecutables _name (PSFilePath lp) - | lpWanted lp = tellExecutablesPackage Local $ lpPackage lp - | otherwise = return () --- Ignores ghcOptions because they don't matter for enumerating --- executables. -tellExecutables name (PSRemote pkgloc _version _fromSnaphot cp) = - tellExecutablesUpstream name (pure $ Just pkgloc) Snap (cpFlags cp) - + | lp.wanted = tellExecutablesPackage Local lp.package + | otherwise = pure () +-- Ignores ghcOptions because they don't matter for enumerating executables. +tellExecutables name (PSRemote pkgloc _version _fromSnapshot cp) = + tellExecutablesUpstream name (pure $ Just pkgloc) Snap cp.flags + +-- | For a given 'PackageName' value, known to be immutable, adds relevant +-- executables to the collected output. tellExecutablesUpstream :: - PackageName - -> M (Maybe PackageLocationImmutable) - -> InstallLocation - -> Map FlagName Bool - -> M () + PackageName + -> M (Maybe PackageLocationImmutable) + -> InstallLocation + -> Map FlagName Bool + -> M () tellExecutablesUpstream name retrievePkgLoc loc flags = do - ctx <- ask - when (name `Set.member` wanted ctx) $ do - mPkgLoc <- retrievePkgLoc - forM_ mPkgLoc $ \pkgLoc -> do - p <- loadPackage ctx pkgLoc flags [] [] - tellExecutablesPackage loc p - + ctx <- ask + when (name `Set.member` ctx.wanted) $ do + mPkgLoc <- retrievePkgLoc + forM_ mPkgLoc $ \pkgLoc -> do + p <- ctx.loadPackage pkgLoc flags [] [] + tellExecutablesPackage loc p + +-- | For given 'InstallLocation' and t'Package' values, adds relevant +-- executables to the collected output. In most cases, the relevant executables +-- are all the executables of the package. If the package is a wanted local one, +-- the executables are those executables that are wanted executables. tellExecutablesPackage :: InstallLocation -> Package -> M () tellExecutablesPackage loc p = do - cm <- asks combinedMap - -- Determine which components are enabled so we know which ones to copy - let myComps = - case Map.lookup (packageName p) cm of - Nothing -> assert False Set.empty - Just (PIOnlyInstalled _ _) -> Set.empty - Just (PIOnlySource ps) -> goSource ps - Just (PIBoth ps _) -> goSource ps - - goSource (PSFilePath lp) - | lpWanted lp = exeComponents (lpComponents lp) - | otherwise = Set.empty - goSource PSRemote{} = Set.empty - - tell mempty { wInstall = Map.fromList $ map (, loc) $ Set.toList $ filterComps myComps $ packageExes p } - where - filterComps myComps x - | Set.null myComps = x - | otherwise = Set.intersection x myComps + cm <- asks (.combinedMap) + -- Determine which components are enabled so we know which ones to copy + let myComps = + case Map.lookup p.name cm of + Nothing -> assert False Set.empty + Just (PIOnlyInstalled _ _) -> Set.empty + Just (PIOnlySource ps) -> goSource ps + Just (PIBoth ps _) -> goSource ps + + goSource (PSFilePath lp) + | lp.wanted = exeComponents lp.components + | otherwise = Set.empty + goSource PSRemote{} = Set.empty + + tell mempty + { wInstall = Map.fromList $ + map (, loc) $ Set.toList $ filterComps myComps $ buildableExes p + } + where + filterComps myComps x + | Set.null myComps = x + | otherwise = Set.intersection x myComps -- | Given a 'PackageSource' and perhaps an 'Installed' value, adds --- build 'Task's for the package and its dependencies. -installPackage :: PackageName - -> PackageSource - -> Maybe Installed - -> M (Either ConstructPlanException AddDepRes) +-- build t'Task's for the package and its dependencies. +installPackage :: + PackageName + -> PackageSource + -> Maybe Installed + -> M (Either ConstructPlanException AddDepRes) installPackage name ps minstalled = do - ctx <- ask - case ps of - PSRemote pkgLoc _version _fromSnaphot cp -> do - planDebug $ "installPackage: Doing all-in-one build for upstream package " ++ show name - package <- loadPackage ctx pkgLoc (cpFlags cp) (cpGhcOptions cp) (cpCabalConfigOpts cp) - resolveDepsAndInstall True (cpHaddocks cp) ps package minstalled - PSFilePath lp -> do - case lpTestBench lp of - Nothing -> do - planDebug $ "installPackage: No test / bench component for " ++ show name ++ " so doing an all-in-one build." - resolveDepsAndInstall True (lpBuildHaddocks lp) ps (lpPackage lp) minstalled - Just tb -> do - -- Attempt to find a plan which performs an all-in-one - -- build. Ignore the writer action + reset the state if - -- it fails. - s <- get - res <- pass $ do - res <- addPackageDeps tb - let writerFunc w = case res of - Left _ -> mempty - _ -> w - return (res, writerFunc) - case res of - Right deps -> do - planDebug $ "installPackage: For " ++ show name ++ ", successfully added package deps" - -- in curator builds we can't do all-in-one build as test/benchmark failure - -- could prevent library from being available to its dependencies - -- but when it's already available it's OK to do that - splitRequired <- expectedTestOrBenchFailures <$> asks mcurator - let isAllInOne = not splitRequired - adr <- installPackageGivenDeps isAllInOne (lpBuildHaddocks lp) ps tb minstalled deps - let finalAllInOne = case adr of - ADRToInstall _ | splitRequired -> False - _ -> True - -- FIXME: this redundantly adds the deps (but - -- they'll all just get looked up in the map) - addFinal lp tb finalAllInOne False - return $ Right adr - Left _ -> do - -- Reset the state to how it was before - -- attempting to find an all-in-one build - -- plan. - planDebug $ "installPackage: Before trying cyclic plan, resetting lib result map to " ++ show s - put s - -- Otherwise, fall back on building the - -- tests / benchmarks in a separate step. - res' <- resolveDepsAndInstall False (lpBuildHaddocks lp) ps (lpPackage lp) minstalled - when (isRight res') $ do - -- Insert it into the map so that it's - -- available for addFinal. - updateLibMap name res' - addFinal lp tb False False - return res' + ctx <- ask + case ps of + PSRemote pkgLoc _version _fromSnapshot cp -> do + logDebugPlanS "installPackage" $ + "Doing all-in-one build for upstream package " + <> fromPackageName name + <> "." + package <- ctx.loadPackage + pkgLoc cp.flags cp.ghcOptions cp.cabalConfigOpts + resolveDepsAndInstall True cp.buildHaddocks ps package minstalled + PSFilePath lp -> do + case lp.testBench of + Nothing -> do + logDebugPlanS "installPackage" $ + "No test or bench component for " + <> fromPackageName name + <> " so doing an all-in-one build." + resolveDepsAndInstall + True lp.buildHaddocks ps lp.package minstalled + Just tb -> do + -- Preserve the current library map. + libMap <- get + -- Attempt to find a plan which performs an all-in-one build. Ignore + -- the writer action + reset the state if it fails. + res <- pass $ do + res <- addPackageDeps tb + let modifyOutput = case res of + Left _ -> const mempty + _ -> id + pure (res, modifyOutput) + case res of + Right deps -> do + logDebugPlanS "installPackage" $ + "For " + <> fromPackageName name + <> ", successfully added package deps." + -- in curator builds we can't do all-in-one build as + -- test/benchmark failure could prevent library from being + -- available to its dependencies but when it's already available + -- it's OK to do that + splitRequired <- expectedTestOrBenchFailures <$> asks (.curator) + let isAllInOne = not splitRequired + adr <- installPackageGivenDeps + isAllInOne lp.buildHaddocks ps tb minstalled deps + let finalAllInOne = not (isAdrToInstall adr && splitRequired) + -- FIXME: this redundantly adds the deps (but they'll all just + -- get looked up in the map) + addFinal lp tb finalAllInOne False + pure $ Right adr + Left _ -> do + -- Reset the state to how it was before attempting to find an + -- all-in-one build plan. + logDebugPlanS "installPackage" $ + "Before trying cyclic plan, resetting lib result map to: " + <> fromString (show libMap) + put libMap + -- Otherwise, fall back on building the tests / benchmarks in a + -- separate step. + res' <- resolveDepsAndInstall + False lp.buildHaddocks ps lp.package minstalled + when (isRight res') $ do + -- Insert it into the map so that it's available for addFinal. + updateLibMap name res' + addFinal lp tb False False + pure res' where - expectedTestOrBenchFailures maybeCurator = fromMaybe False $ do - curator <- maybeCurator - pure $ Set.member name (curatorExpectTestFailure curator) || - Set.member name (curatorExpectBenchmarkFailure curator) - -resolveDepsAndInstall :: Bool - -> Bool - -> PackageSource - -> Package - -> Maybe Installed - -> M (Either ConstructPlanException AddDepRes) -resolveDepsAndInstall isAllInOne buildHaddocks ps package minstalled = do - res <- addPackageDeps package - case res of - Left err -> return $ Left err - Right deps -> liftM Right $ installPackageGivenDeps isAllInOne buildHaddocks ps package minstalled deps - --- | Checks if we need to install the given 'Package', given the results --- of 'addPackageDeps'. If dependencies are missing, the package is --- dirty, or it's not installed, then it needs to be installed. -installPackageGivenDeps :: Bool - -> Bool - -> PackageSource - -> Package - -> Maybe Installed - -> ( Set PackageIdentifier - , Map PackageIdentifier GhcPkgId - , IsMutable ) - -> M AddDepRes -installPackageGivenDeps isAllInOne buildHaddocks ps package minstalled (missing, present, minMutable) = do - let name = packageName package + expectedTestOrBenchFailures maybeCurator = fromMaybe False $ do + curator <- maybeCurator + pure $ Set.member name curator.expectTestFailure + || Set.member name curator.expectBenchmarkFailure + +resolveDepsAndInstall :: + Bool + -- ^ will the build step also build any tests? + -> Bool + -- ^ Should Haddock documentation be built? + -> PackageSource + -> Package + -> Maybe Installed + -> M (Either ConstructPlanException AddDepRes) +resolveDepsAndInstall isAllInOne buildHaddocks ps package minstalled = + addPackageDeps package >>= \case + Left err -> pure $ Left err + Right deps -> + Right <$> + installPackageGivenDeps + isAllInOne buildHaddocks ps package minstalled deps + +-- | Checks if we need to install the given t'Package', given the results of +-- 'addPackageDeps'. If dependencies are missing, the package is dirty, or it is +-- not installed, then it needs to be installed. +installPackageGivenDeps :: + Bool + -- ^ will the build step also build any tests? + -> Bool + -- ^ Should Haddock documentation be built? + -> PackageSource + -> Package + -> Maybe Installed + -> MissingPresentDeps + -> M AddDepRes +installPackageGivenDeps + allInOne + buildHaddocks + ps + package + minstalled + (MissingPresentDeps missing present minMutable) + = do + let name = package.name + mRightVersionInstalled <- case minstalled of + Just installed -> if Set.null missing + then do + shouldInstall <- + checkDirtiness ps installed package present buildHaddocks + pure $ if shouldInstall then Nothing else Just installed + else do + let packageNameText = T.pack . packageNameString . pkgName + t = T.intercalate ", " $ map packageNameText (Set.toList missing) + tell mempty + { wDirty = + Map.singleton name $ "missing dependencies: " <> addEllipsis t + } + pure Nothing + Nothing -> pure Nothing ctx <- ask - mRightVersionInstalled <- case (minstalled, Set.null missing) of - (Just installed, True) -> do - shouldInstall <- checkDirtiness ps installed package present buildHaddocks - return $ if shouldInstall then Nothing else Just installed - (Just _, False) -> do - let t = T.intercalate ", " $ map (T.pack . packageNameString . pkgName) (Set.toList missing) - tell mempty { wDirty = Map.singleton name $ "missing dependencies: " <> addEllipsis t } - return Nothing - (Nothing, _) -> return Nothing let loc = psLocation ps - mutable = installLocationIsMutable loc <> minMutable - return $ case mRightVersionInstalled of - Just installed -> ADRFound loc installed - Nothing -> ADRToInstall Task - { taskProvides = PackageIdentifier - (packageName package) - (packageVersion package) - , taskConfigOpts = TaskConfigOpts missing $ \missing' -> - let allDeps = Map.union present missing' - in configureOpts - (view envConfigL ctx) - (baseConfigOpts ctx) - allDeps - (psLocal ps) - mutable - package - , taskBuildHaddock = buildHaddocks - , taskPresent = present - , taskType = - case ps of - PSFilePath lp -> - TTLocalMutable lp - PSRemote pkgLoc _version _fromSnaphot _cp -> - TTRemotePackage mutable package pkgLoc - , taskAllInOne = isAllInOne - , taskCachePkgSrc = toCachePkgSrc ps - , taskAnyMissing = not $ Set.null missing - , taskBuildTypeConfig = packageBuildTypeConfig package + isMutable = installLocationIsMutable loc <> minMutable + pkgConfigOpts = packageConfigureOptsFromPackage package + configOpts = TaskConfigOpts + { missing + , envConfig = ctx.ctxEnvConfig + , baseConfigOpts = ctx.baseConfigOpts + , isLocalNonExtraDep = psLocal ps + , isMutable + , pkgConfigOpts } + pure $ case mRightVersionInstalled of + Just installed -> ADRFound loc installed + Nothing -> ADRToInstall Task + { configOpts + , buildHaddocks + , present + , taskType = + case ps of + PSFilePath lp -> + TTLocalMutable lp + PSRemote pkgLoc _version _fromSnapshot _cp -> + TTRemotePackage isMutable package pkgLoc + , allInOne + , cachePkgSrc = toCachePkgSrc ps + , buildTypeConfig = packageBuildTypeConfig package + } -- | Is the build type of the package Configure packageBuildTypeConfig :: Package -> Bool -packageBuildTypeConfig pkg = packageBuildType pkg == Configure +packageBuildTypeConfig pkg = pkg.buildType == Configure --- Update response in the lib map. If it is an error, and there's --- already an error about cyclic dependencies, prefer the cyclic error. +-- Update response in the library map. If it is an error, and there's already an +-- error about cyclic dependencies, prefer the cyclic error. updateLibMap :: PackageName -> Either ConstructPlanException AddDepRes -> M () -updateLibMap name val = modify $ \mp -> - case (M.lookup name mp, val) of - (Just (Left DependencyCycleDetected{}), Left _) -> mp - _ -> M.insert name val mp +updateLibMap name res = do + logDebugPlanS "updateLibMap" $ + "Updating for: " + <> fromPackageName name + <> summariseResult res + modify $ \mp -> + case (Map.lookup name mp, res) of + (Just (Left DependencyCycleDetected{}), Left _) -> mp + _ -> Map.insert name res mp addEllipsis :: Text -> Text addEllipsis t - | T.length t < 100 = t - | otherwise = T.take 97 t <> "..." - --- | Given a package, recurses into all of its dependencies. The results --- indicate which packages are missing, meaning that their 'GhcPkgId's --- will be figured out during the build, after they've been built. The --- 2nd part of the tuple result indicates the packages that are already --- installed which will be used. --- --- The 3rd part of the tuple is an 'InstallLocation'. If it is 'Local', --- then the parent package must be installed locally. Otherwise, if it --- is 'Snap', then it can either be installed locally or in the --- snapshot. -addPackageDeps :: Package -> M (Either ConstructPlanException (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, IsMutable)) + | T.length t < 100 = t + | otherwise = T.take 97 t <> "..." + +-- | Given a package, recurses into all of its dependencies. The resulting +-- triple indicates: (1) which packages are missing. This means that their +-- 'GhcPkgId's will be figured out during the build, after they've been built; +-- (2) the packages that are already installed and which will be used; and +-- (3) whether the package itself is mutable or immutable. +addPackageDeps :: + Package + -> M (Either ConstructPlanException MissingPresentDeps) addPackageDeps package = do - ctx <- ask - deps' <- packageDepsWithTools package - deps <- forM (Map.toList deps') $ \(depname, DepValue range depType) -> do - eres <- addDep depname - let getLatestApplicableVersionAndRev :: M (Maybe (Version, BlobKey)) - getLatestApplicableVersionAndRev = do - vsAndRevs <- runRIO ctx $ getHackagePackageVersions YesRequireHackageIndex UsePreferredVersions depname - pure $ do - lappVer <- latestApplicableVersion range $ Map.keysSet vsAndRevs - revs <- Map.lookup lappVer vsAndRevs - (cabalHash, _) <- Map.maxView revs - Just (lappVer, cabalHash) - case eres of - Left e -> do - addParent depname range Nothing - let bd = - case e of - UnknownPackage name -> assert (name == depname) NotInBuildPlan - DependencyCycleDetected names -> BDDependencyCycleDetected names - -- ultimately we won't show any - -- information on this to the user, we'll - -- allow the dependency failures alone to - -- display to avoid spamming the user too - -- much - DependencyPlanFailures _ _ -> Couldn'tResolveItsDependencies (packageVersion package) - mlatestApplicable <- getLatestApplicableVersionAndRev - return $ Left (depname, (range, mlatestApplicable, bd)) - Right adr | depType == AsLibrary && not (adrHasLibrary adr) -> - return $ Left (depname, (range, Nothing, HasNoLibrary)) - Right adr -> do - addParent depname range Nothing - inRange <- if adrVersion adr `withinRange` range - then return True - else do - let warn_ reason = - tell mempty { wWarnings = (msg:) } - where - msg = T.concat - [ "WARNING: Ignoring " - , T.pack $ packageNameString $ packageName package - , "'s bounds on " - , T.pack $ packageNameString depname - , " (" - , versionRangeText range - , "); using " - , T.pack $ packageIdentifierString $ PackageIdentifier depname (adrVersion adr) - , ".\nReason: " - , reason - , "." - ] - allowNewer <- view $ configL.to configAllowNewer - if allowNewer - then do - warn_ "allow-newer enabled" - return True - else do - -- We ignore dependency information for packages in a snapshot - x <- inSnapshot (packageName package) (packageVersion package) - y <- inSnapshot depname (adrVersion adr) - if x && y - then do - warn_ "trusting snapshot over cabal file dependency information" - return True - else return False - if inRange - then case adr of - ADRToInstall task -> return $ Right - (Set.singleton $ taskProvides task, Map.empty, taskTargetIsMutable task) - ADRFound loc (Executable _) -> return $ Right - (Set.empty, Map.empty, installLocationIsMutable loc) - ADRFound loc (Library ident gid _) -> return $ Right - (Set.empty, Map.singleton ident gid, installLocationIsMutable loc) - else do - mlatestApplicable <- getLatestApplicableVersionAndRev - return $ Left (depname, (range, mlatestApplicable, DependencyMismatch $ adrVersion adr)) - case partitionEithers deps of - -- Note that the Monoid for 'InstallLocation' means that if any - -- is 'Local', the result is 'Local', indicating that the parent - -- package must be installed locally. Otherwise the result is - -- 'Snap', indicating that the parent can either be installed - -- locally or in the snapshot. - ([], pairs) -> return $ Right $ mconcat pairs - (errs, _) -> return $ Left $ DependencyPlanFailures - package - (Map.fromList errs) - where - adrVersion (ADRToInstall task) = pkgVersion $ taskProvides task - adrVersion (ADRFound _ installed) = installedVersion installed - -- Update the parents map, for later use in plan construction errors - -- - see 'getShortestDepsPath'. - addParent depname range mversion = tell mempty { wParents = MonoidMap $ M.singleton depname val } - where - val = (First mversion, [(packageIdentifier package, range)]) - - adrHasLibrary :: AddDepRes -> Bool - adrHasLibrary (ADRToInstall task) = taskHasLibrary task - adrHasLibrary (ADRFound _ Library{}) = True - adrHasLibrary (ADRFound _ Executable{}) = False - - taskHasLibrary :: Task -> Bool - taskHasLibrary task = - case taskType task of - TTLocalMutable lp -> packageHasLibrary $ lpPackage lp - TTRemotePackage _ p _ -> packageHasLibrary p - - -- make sure we consider internal libraries as libraries too - packageHasLibrary :: Package -> Bool - packageHasLibrary p = - not (Set.null (packageInternalLibraries p)) || - case packageLibraries p of - HasLibraries _ -> True - NoLibraries -> False - -checkDirtiness :: PackageSource - -> Installed - -> Package - -> Map PackageIdentifier GhcPkgId - -> Bool - -> M Bool + checkAndWarnForUnknownTools package + let pkgId = packageIdentifier package + processPackageDepsEither package (processDep pkgId) <&> \case + -- Note that the Monoid for 'IsMutable' means that if any is 'Mutable', + -- the result is 'Mutable'. Otherwise the result is 'Immutable'. + Right v -> Right v + Left errs -> + Left $ DependencyPlanFailures package errs + +-- | Given a dependency, yields either information for an error message or a +-- triple indicating: (1) if the dependency is to be installed, its package +-- identifier; (2) if the dependency is installed and a library, its package +-- identifier and 'GhcPkgId'; and (3) if the dependency is, or will be when +-- installed, mutable or immutable. +processDep :: + PackageIdentifier + -- ^ The package which has the dependency being processed. + -> PackageName + -- ^ The name of the dependency. + -> DepValue + -- ^ The version range and dependency type of the dependency. + -> M ( Either + ( Map + PackageName + (VersionRange, Maybe (Version, BlobKey), BadDependency) + ) + MissingPresentDeps + ) +processDep pkgId name value = do + let failure mLatestApp err = + Left $ Map.singleton name (range, mLatestApp, err) + getCachedDepOrAddDep name >>= \case + Left e -> do + addParent + let bd = case e of + UnknownPackage _ name' -> assert (name' == name) NotInBuildPlan + DependencyCycleDetected names -> BDDependencyCycleDetected names + -- Ultimately we won't show any information on this to the user; + -- we'll allow the dependency failures alone to display to avoid + -- spamming the user too much. + DependencyPlanFailures _ _ -> + Couldn'tResolveItsDependencies version + mLatestApplicable <- getLatestApplicableVersionAndRev name range + pure $ failure mLatestApplicable bd + Right adr + | isDepTypeLibrary value.depType && not (adrHasLibrary adr) -> + pure $ failure Nothing HasNoLibrary + Right adr -> do + addParent + inRange <- adrInRange pkgId name range adr + if inRange + then pure $ Right $ processAdr adr + else do + mLatestApplicable <- getLatestApplicableVersionAndRev name range + pure $ failure mLatestApplicable (DependencyMismatch $ adrVersion adr) + where + range = value.versionRange + version = pkgVersion pkgId + -- Update the parents map, for later use in plan construction errors + -- - see 'getShortestDepsPath'. + addParent = + let parentMap = Map.singleton name [(pkgId, range)] + in tell mempty { wParents = MonoidMap parentMap } + +getLatestApplicableVersionAndRev :: + PackageName + -> VersionRange + -> M (Maybe (Version, BlobKey)) +getLatestApplicableVersionAndRev name range = do + ctx <- ask + vsAndRevs <- runRIO ctx $ + getHackagePackageVersions YesRequireHackageIndex UsePreferredVersions name + pure $ do + lappVer <- latestApplicableVersion range $ Map.keysSet vsAndRevs + revs <- Map.lookup lappVer vsAndRevs + (cabalHash, _) <- Map.maxView revs + Just (lappVer, cabalHash) + +-- | Function to determine whether the result of 'addDep' is within range, given +-- the version range of the dependency and taking into account Stack's +-- @allow-newer@ configuration. +adrInRange :: + PackageIdentifier + -- ^ The package which has the dependency. + -> PackageName + -- ^ The name of the dependency. + -> VersionRange + -- ^ The version range of the dependency. + -> AddDepRes + -- ^ The result of 'addDep'. + -> M Bool +adrInRange pkgId name range adr = if adrVersion adr `withinRange` range + then pure True + else do + config <- view configL + allowNewerCLI <- view $ envConfigL . to (.buildOptsCLI) . to (.allowNewer) + let allowNewerConfig = config.allowNewer + allowNewer = fromFirst False $ allowNewerCLI <> allowNewerConfig + allowNewerDeps = config.allowNewerDeps + if allowNewer + then case allowNewerDeps of + Nothing -> do + warn_ True $ + fillSep + [ style Shell "allow-newer" + , "enabled" + ] + pure True + Just boundsIgnoredDeps -> do + let pkgName' = fromPackageName pkgName + isBoundsIgnoreDep = pkgName `elem` boundsIgnoredDeps + reason = if isBoundsIgnoreDep + then fillSep + [ style Current pkgName' + , flow "is an" + , style Shell "allow-newer-dep" + , flow "and" + , style Shell "allow-newer" + , "enabled" + ] + else fillSep + [ style Current pkgName' + , flow "is not an" + , style Shell "allow-newer-dep" + , flow "although" + , style Shell "allow-newer" + , "enabled" + ] + warn_ isBoundsIgnoreDep reason + pure isBoundsIgnoreDep + else do + when (isJust allowNewerDeps) $ + warn_ False $ + fillSep + [ "although" + , style Shell "allow-newer-deps" + , flow "are specified," + , style Shell "allow-newer" + , "is" + , style Shell "false" + ] + -- We ignore dependency information for packages in a snapshot + pkgInSnapshot <- inSnapshot pkgName version + adrInSnapshot <- inSnapshot name (adrVersion adr) + if pkgInSnapshot && adrInSnapshot + then do + warn_ True + ( flow "trusting snapshot over Cabal file dependency \ + \information" + ) + pure True + else pure False + where + PackageIdentifier pkgName version = pkgId + warn_ isIgnoring reason = tell mempty { wWarnings = (msg:) } + where + msg = fillSep + [ if isIgnoring + then "Ignoring" + else flow "Not ignoring" + , style Current (fromPackageName pkgName) <> "'s" + , flow "bounds on" + , style Current (fromPackageName name) + , parens (fromString . T.unpack $ versionRangeText range) + , flow "and using" + , style + Current + (fromPackageId $ PackageIdentifier name (adrVersion adr)) <> "." + ] + <> line + <> fillSep + [ "Reason:" + , reason <> "." + ] + +-- | Given a result of 'addDep', yields a triple indicating: (1) if the +-- dependency is to be installed, its package identifier; (2) if the dependency +-- is installed and a library, its package identifier and 'GhcPkgId'; and (3) if +-- the dependency is, or will be when installed, mutable or immutable. +processAdr :: + AddDepRes + -> MissingPresentDeps +processAdr adr = case adr of + ADRToInstall task -> + MissingPresentDeps + { missingPackages = Set.singleton $ taskProvides task + , presentPackages = mempty + , isMutable = taskTargetIsMutable task + } + ADRFound loc installed -> + MissingPresentDeps + { missingPackages = mempty + , presentPackages = presentPackagesV + , isMutable = installLocationIsMutable loc + } + where + presentPackagesV = case installed of + Library ident installedInfo -> installedMapGhcPkgId ident installedInfo + _ -> Map.empty + +checkDirtiness :: + PackageSource + -> Installed + -> Package + -> Map PackageIdentifier GhcPkgId + -> Bool + -- ^ Is Haddock documentation being built? + -> M Bool checkDirtiness ps installed package present buildHaddocks = do - ctx <- ask - moldOpts <- runRIO ctx $ tryGetFlagCache installed - let configOpts = configureOpts - (view envConfigL ctx) - (baseConfigOpts ctx) - present - (psLocal ps) - (installLocationIsMutable $ psLocation ps) -- should be Local i.e. mutable always - package - wantConfigCache = ConfigCache - { configCacheOpts = configOpts - , configCacheDeps = Set.fromList $ Map.elems present - , configCacheComponents = - case ps of - PSFilePath lp -> Set.map (encodeUtf8 . renderComponent) $ lpComponents lp - PSRemote{} -> Set.empty - , configCacheHaddock = buildHaddocks - , configCachePkgSrc = toCachePkgSrc ps - , configCachePathEnvVar = pathEnvVar ctx - } - config = view configL ctx - mreason <- - case moldOpts of - Nothing -> pure $ Just "old configure information not found" - Just oldOpts - | Just reason <- describeConfigDiff config oldOpts wantConfigCache -> pure $ Just reason - | True <- psForceDirty ps -> pure $ Just "--force-dirty specified" - | otherwise -> do - dirty <- psDirty ps - pure $ - case dirty of - Just files -> Just $ "local file changes: " <> addEllipsis (T.pack $ unwords $ Set.toList files) - Nothing -> Nothing - case mreason of - Nothing -> return False - Just reason -> do - tell mempty { wDirty = Map.singleton (packageName package) reason } - return True - -describeConfigDiff :: Config -> ConfigCache -> ConfigCache -> Maybe Text + ctx <- ask + moldOpts <- runRIO ctx $ tryGetFlagCache installed + let packageConfigureOpt = packageConfigureOptsFromPackage package + configureOpts = configureOptsFromBase + (view envConfigL ctx) + ctx.baseConfigOpts + present + (psLocal ps) + (installLocationIsMutable $ psLocation ps) -- should be Local i.e. mutable always + packageConfigureOpt + components = case ps of + PSFilePath lp -> + Set.map (encodeUtf8 . renderComponent) lp.components + PSRemote{} -> Set.empty + wantConfigCache = ConfigCache + { configureOpts + , deps = Set.fromList $ Map.elems present + , components + , buildHaddocks + , pkgSrc = toCachePkgSrc ps + , pathEnvVar = ctx.pathEnvVar + } + config = view configL ctx + mreason <- + case moldOpts of + Nothing -> pure $ Just "old configure information not found" + Just oldOpts + | Just reason <- describeConfigDiff config oldOpts wantConfigCache -> + pure $ Just reason + | True <- psForceDirty ps -> pure $ Just "--force-dirty specified" + | otherwise -> + psDirty ps <&> \case + Just files -> Just $ + "local file changes: " + <> addEllipsis (T.pack $ unwords $ Set.toList files) + Nothing -> Nothing + case mreason of + Nothing -> pure False + Just reason -> do + tell mempty { wDirty = Map.singleton package.name reason } + pure True + +-- | If the new Cabal configuration cache is the same as the old, yields +-- 'Nothing'. Otherwise yields 'Just' a textual explanation of how they differ. +describeConfigDiff :: + Config + -> ConfigCache + -- ^ The old Cabal configuration cache. + -> ConfigCache + -- ^ The new Cabal configuration cache. + -> Maybe Text describeConfigDiff config old new - | configCachePkgSrc old /= configCachePkgSrc new = Just $ - "switching from " <> - pkgSrcName (configCachePkgSrc old) <> " to " <> - pkgSrcName (configCachePkgSrc new) - | not (configCacheDeps new `Set.isSubsetOf` configCacheDeps old) = Just "dependencies changed" - | not $ Set.null newComponents = - Just $ "components added: " `T.append` T.intercalate ", " - (map (decodeUtf8With lenientDecode) (Set.toList newComponents)) - | not (configCacheHaddock old) && configCacheHaddock new = Just "rebuilding with haddocks" - | oldOpts /= newOpts = Just $ T.pack $ concat - [ "flags changed from " - , show oldOpts - , " to " - , show newOpts - ] - | otherwise = Nothing - where - stripGhcOptions = - go - where - go [] = [] - go ("--ghc-option":x:xs) = go' Ghc x xs - go ("--ghc-options":x:xs) = go' Ghc x xs - go ((T.stripPrefix "--ghc-option=" -> Just x):xs) = go' Ghc x xs - go ((T.stripPrefix "--ghc-options=" -> Just x):xs) = go' Ghc x xs - go (x:xs) = x : go xs - - go' wc x xs = checkKeepers wc x $ go xs - - checkKeepers wc x xs = - case filter isKeeper $ T.words x of - [] -> xs - keepers -> T.pack (compilerOptionsCabalFlag wc) : T.unwords keepers : xs - - -- GHC options which affect build results and therefore should always - -- force a rebuild - -- - -- For the most part, we only care about options generated by Stack - -- itself - isKeeper = (== "-fhpc") -- more to be added later - - userOpts = filter (not . isStackOpt) - . (if configRebuildGhcOptions config - then id - else stripGhcOptions) - . map T.pack - . (\(ConfigureOpts x y) -> x ++ y) - . configCacheOpts - - (oldOpts, newOpts) = removeMatching (userOpts old) (userOpts new) - - removeMatching (x:xs) (y:ys) - | x == y = removeMatching xs ys - removeMatching xs ys = (xs, ys) - - newComponents = configCacheComponents new `Set.difference` configCacheComponents old - - pkgSrcName (CacheSrcLocal fp) = T.pack fp - pkgSrcName CacheSrcUpstream = "upstream source" + | old.pkgSrc /= new.pkgSrc = Just $ + "switching from " <> + pkgSrcName old.pkgSrc <> " to " <> + pkgSrcName new.pkgSrc + | not (new.deps `Set.isSubsetOf` old.deps) = + Just "dependencies changed" + | not $ Set.null newComponents = + Just $ "components added: " `T.append` T.intercalate ", " + (map (decodeUtf8With lenientDecode) (Set.toList newComponents)) + | not old.buildHaddocks && new.buildHaddocks = + Just "rebuilding with haddocks" + | oldOpts /= newOpts = Just $ T.pack $ concat + [ "flags changed from " + , show oldOpts + , " to " + , show newOpts + ] + | otherwise = Nothing + where + stripGhcOptions = go + where + go [] = [] + go ("--ghc-option":x:xs) = go' Ghc x xs + go ("--ghc-options":x:xs) = go' Ghc x xs + go ((T.stripPrefix "--ghc-option=" -> Just x):xs) = go' Ghc x xs + go ((T.stripPrefix "--ghc-options=" -> Just x):xs) = go' Ghc x xs + go (x:xs) = x : go xs + + go' wc x xs = checkKeepers wc x $ go xs + + checkKeepers wc x xs = + case filter isKeeper $ T.words x of + [] -> xs + keepers -> T.pack (compilerOptionsCabalFlag wc) : T.unwords keepers : xs + + -- GHC options which affect build results and therefore should always force + -- a rebuild + -- + -- For the most part, we only care about options generated by Stack itself + isKeeper = (== "-fhpc") -- more to be added later + + userOpts = filter (not . isStackOpt) + . (if config.rebuildGhcOptions + then id + else stripGhcOptions) + . map T.pack + . renderConfigureOpts + . (.configureOpts) + where + -- options set by Stack + isStackOpt :: Text -> Bool + isStackOpt t = any (`T.isPrefixOf` t) + [ "--dependency=" + , "--constraint=" + , "--package-db=" + , "--libdir=" + , "--bindir=" + , "--datadir=" + , "--libexecdir=" + , "--sysconfdir" + , "--docdir=" + , "--htmldir=" + , "--haddockdir=" + , "--enable-tests" + , "--enable-benchmarks" + , "--exact-configuration" + -- Treat these as causing dirtiness, to resolve + -- https://github.com/commercialhaskell/stack/issues/2984 + -- + -- , "--enable-library-profiling" + -- , "--enable-executable-profiling" + -- , "--enable-profiling" + ] || t == "--user" + + (oldOpts, newOpts) = removeMatching (userOpts old) (userOpts new) + + removeMatching (x:xs) (y:ys) + | x == y = removeMatching xs ys + removeMatching xs ys = (xs, ys) + + newComponents = + new.components `Set.difference` old.components + + pkgSrcName (CacheSrcLocal fp) = T.pack fp + pkgSrcName CacheSrcUpstream = "upstream source" psForceDirty :: PackageSource -> Bool -psForceDirty (PSFilePath lp) = lpForceDirty lp +psForceDirty (PSFilePath lp) = lp.forceDirty psForceDirty PSRemote{} = False -psDirty - :: (MonadIO m, HasEnvConfig env, MonadReader env m) +psDirty :: + (MonadIO m, HasEnvConfig env, MonadReader env m) => PackageSource -> m (Maybe (Set FilePath)) -psDirty (PSFilePath lp) = runMemoizedWith $ lpDirtyFiles lp +psDirty (PSFilePath lp) = runMemoizedWith lp.dirtyFiles psDirty PSRemote {} = pure Nothing -- files never change in a remote package psLocal :: PackageSource -> Bool @@ -921,334 +1243,103 @@ psLocation :: PackageSource -> InstallLocation psLocation (PSFilePath _) = Local psLocation PSRemote{} = Snap --- | Get all of the dependencies for a given package, including build --- tool dependencies. -packageDepsWithTools :: Package -> M (Map PackageName DepValue) -packageDepsWithTools p = do - -- Check whether the tool is on the PATH before warning about it. - warnings <- fmap catMaybes $ forM (Set.toList $ packageUnknownTools p) $ - \name@(ExeName toolName) -> do - let settings = minimalEnvSettings { esIncludeLocals = True } - config <- view configL - menv <- liftIO $ configProcessContextSettings config settings - mfound <- runRIO menv $ findExecutable $ T.unpack toolName - case mfound of - Left _ -> return $ Just $ ToolWarning name (packageName p) - Right _ -> return Nothing - tell mempty { wWarnings = (map toolWarningText warnings ++) } - return $ packageDeps p - --- | Warn about tools in the snapshot definition. States the tool name --- expected and the package name using it. -data ToolWarning = ToolWarning ExeName PackageName - deriving Show - -toolWarningText :: ToolWarning -> Text -toolWarningText (ToolWarning (ExeName toolName) pkgName') = - "No packages found in snapshot which provide a " <> - T.pack (show toolName) <> - " executable, which is a build-tool dependency of " <> - T.pack (packageNameString pkgName') - --- | Strip out anything from the @Plan@ intended for the local database -stripLocals :: Plan -> Plan -stripLocals plan = plan - { planTasks = Map.filter checkTask $ planTasks plan - , planFinals = Map.empty - , planUnregisterLocal = Map.empty - , planInstallExes = Map.filter (/= Local) $ planInstallExes plan - } - where - checkTask task = taskLocation task == Snap - -stripNonDeps :: Set PackageName -> Plan -> Plan -stripNonDeps deps plan = plan - { planTasks = Map.filter checkTask $ planTasks plan - , planFinals = Map.empty - , planInstallExes = Map.empty -- TODO maybe don't disable this? - } - where - checkTask task = taskProvides task `Set.member` missingForDeps - providesDep task = pkgName (taskProvides task) `Set.member` deps - missing = Map.fromList $ map (taskProvides &&& tcoMissing . taskConfigOpts) $ - Map.elems (planTasks plan) - missingForDeps = flip execState mempty $ do - for_ (Map.elems $ planTasks plan) $ \task -> - when (providesDep task) $ collectMissing mempty (taskProvides task) - - collectMissing dependents pid = do - when (pid `elem` dependents) $ error $ - "Unexpected: task cycle for " <> packageNameString (pkgName pid) - modify'(<> Set.singleton pid) - mapM_ (collectMissing (pid:dependents)) (fromMaybe mempty $ M.lookup pid missing) - --- | Is the given package/version combo defined in the snapshot or in the global database? +-- | For the given package, warn about any unknown tools that are not on the +-- PATH and not one of the executables of the package. +checkAndWarnForUnknownTools :: Package -> M () +checkAndWarnForUnknownTools p = do + let unknownTools = Set.toList $ packageUnknownTools p + -- Check whether the tool is on the PATH or a package executable before + -- warning about it. + warnings <- + fmap catMaybes $ forM unknownTools $ \toolName -> + runMaybeT $ notOnPath toolName *> notPackageExe toolName *> warn toolName + tell mempty { wWarnings = (map toolWarningText warnings ++) } + where + -- From Cabal 2.0, build-tools can specify a pre-built executable that should + -- already be on the PATH. + notOnPath toolName = MaybeT $ do + let settings = minimalEnvSettings { includeLocals = True } + config <- view configL + menv <- liftIO $ config.processContextSettings settings + eFound <- runRIO menv $ findExecutable $ T.unpack toolName + skipIf $ isRight eFound + -- From Cabal 1.12, build-tools can specify another executable in the same + -- package. + notPackageExe toolName = + MaybeT $ skipIf $ + collectionMember (unqualCompFromText toolName) p.executables + warn name = MaybeT . pure . Just $ ToolWarning (ExeName name) p.name + skipIf p' = pure $ if p' then Nothing else Just () + +toolWarningText :: ToolWarning -> StyleDoc +toolWarningText (ToolWarning (ExeName toolName) pkgName') = fillSep + [ flow "No packages found in snapshot which provide a" + , style PkgComponent (fromString $ show toolName) + , flow "executable, which is a build-tool dependency of" + , style Current (fromPackageName pkgName') + ] + +-- | Is the given package/version combo defined in the snapshot or in the global +-- database? inSnapshot :: PackageName -> Version -> M Bool inSnapshot name version = do - ctx <- ask - return $ fromMaybe False $ do - ps <- Map.lookup name (combinedMap ctx) - case ps of - PIOnlySource (PSRemote _ srcVersion FromSnapshot _) -> - return $ srcVersion == version - PIBoth (PSRemote _ srcVersion FromSnapshot _) _ -> - return $ srcVersion == version - -- OnlyInstalled occurs for global database - PIOnlyInstalled loc (Library pid _gid _lic) -> - assert (loc == Snap) $ - assert (pkgVersion pid == version) $ - Just True - _ -> return False - -data ConstructPlanException - = DependencyCycleDetected [PackageName] - | DependencyPlanFailures Package (Map PackageName (VersionRange, LatestApplicableVersion, BadDependency)) - | UnknownPackage PackageName -- TODO perhaps this constructor will be removed, and BadDependency will handle it all - -- ^ Recommend adding to extra-deps, give a helpful version number? - deriving (Typeable, Eq, Show) - --- | The latest applicable version and it's latest cabal file revision. --- For display purposes only, Nothing if package not found -type LatestApplicableVersion = Maybe (Version, BlobKey) - --- | Reason why a dependency was not used -data BadDependency - = NotInBuildPlan - | Couldn'tResolveItsDependencies Version - | DependencyMismatch Version - | HasNoLibrary - -- ^ See description of 'DepType' - | BDDependencyCycleDetected ![PackageName] - deriving (Typeable, Eq, Ord, Show) + ctx <- ask + pure $ fromMaybe False $ + Map.lookup name ctx.combinedMap >>= \case + PIOnlySource (PSRemote _ srcVersion FromSnapshot _) -> + pure $ srcVersion == version + PIBoth (PSRemote _ srcVersion FromSnapshot _) _ -> + pure $ srcVersion == version + -- OnlyInstalled occurs for global database + PIOnlyInstalled loc (Library pid _) -> + assert (loc == Snap) $ + assert (pkgVersion pid == version) $ + Just True + _ -> pure False -- TODO: Consider intersecting version ranges for multiple deps on a -- package. This is why VersionRange is in the parent map. -pprintExceptions - :: [ConstructPlanException] - -> Path Abs File - -> Path Abs Dir - -> ParentMap - -> Set PackageName - -> Map PackageName [PackageName] - -> StyleDoc -pprintExceptions exceptions stackYaml stackRoot parentMap wanted' prunedGlobalDeps = - mconcat $ - [ flow "While constructing the build plan, the following exceptions were encountered:" - , line <> line - , mconcat (intersperse (line <> line) (mapMaybe pprintException exceptions')) - , line <> line - , flow "Some different approaches to resolving this:" - , line <> line - ] ++ - (if not onlyHasDependencyMismatches then [] else - [ " *" <+> align (flow "Set 'allow-newer: true' in " <+> pretty (defaultUserConfigPath stackRoot) <+> "to ignore all version constraints and build anyway.") - , line <> line - ] - ) ++ addExtraDepsRecommendations - - where - exceptions' = {- should we dedupe these somehow? nubOrd -} exceptions - - addExtraDepsRecommendations - | Map.null extras = [] - | (Just _) <- Map.lookup (mkPackageName "base") extras = - [ " *" <+> align (flow "Build requires unattainable version of base. Since base is a part of GHC, you most likely need to use a different GHC version with the matching base.") - , line - ] - | otherwise = - [ " *" <+> align - (style Recommendation (flow "Recommended action:") <+> - flow "try adding the following to your extra-deps in" <+> - pretty stackYaml <> ":") - , line <> line - , vsep (map pprintExtra (Map.toList extras)) - , line - ] - - extras = Map.unions $ map getExtras exceptions' - getExtras DependencyCycleDetected{} = Map.empty - getExtras UnknownPackage{} = Map.empty - getExtras (DependencyPlanFailures _ m) = - Map.unions $ map go $ Map.toList m - where - -- TODO: Likely a good idea to distinguish these to the user. In particular, for DependencyMismatch - go (name, (_range, Just (version,cabalHash), NotInBuildPlan)) = - Map.singleton name (version,cabalHash) - go (name, (_range, Just (version,cabalHash), DependencyMismatch{})) = - Map.singleton name (version, cabalHash) - go _ = Map.empty - pprintExtra (name, (version, BlobKey cabalHash cabalSize)) = - let cfInfo = CFIHash cabalHash (Just cabalSize) - packageIdRev = PackageIdentifierRevision name version cfInfo - in fromString ("- " ++ T.unpack (utf8BuilderToText (RIO.display packageIdRev))) - - allNotInBuildPlan = Set.fromList $ concatMap toNotInBuildPlan exceptions' - toNotInBuildPlan (DependencyPlanFailures _ pDeps) = - map fst $ filter (\(_, (_, _, badDep)) -> badDep == NotInBuildPlan) $ Map.toList pDeps - toNotInBuildPlan _ = [] - - -- This checks if 'allow-newer: true' could resolve all issues. - onlyHasDependencyMismatches = all go exceptions' - where - go DependencyCycleDetected{} = False - go UnknownPackage{} = False - go (DependencyPlanFailures _ m) = - all (\(_, _, depErr) -> isMismatch depErr) (M.elems m) - isMismatch DependencyMismatch{} = True - isMismatch Couldn'tResolveItsDependencies{} = True - isMismatch _ = False - - pprintException (DependencyCycleDetected pNames) = Just $ - flow "Dependency cycle detected in packages:" <> line <> - indent 4 (encloseSep "[" "]" "," (map (style Error . fromString . packageNameString) pNames)) - pprintException (DependencyPlanFailures pkg pDeps) = - case mapMaybe pprintDep (Map.toList pDeps) of - [] -> Nothing - depErrors -> Just $ - flow "In the dependencies for" <+> pkgIdent <> - pprintFlags (packageFlags pkg) <> ":" <> line <> - indent 4 (vsep depErrors) <> - case getShortestDepsPath parentMap wanted' (packageName pkg) of - Nothing -> line <> flow "needed for unknown reason - stack invariant violated." - Just [] -> line <> flow "needed since" <+> pkgName' <+> flow "is a build target." - Just (target:path) -> line <> flow "needed due to" <+> encloseSep "" "" " -> " pathElems - where - pathElems = - [style Target . fromString . packageIdentifierString $ target] ++ - map (fromString . packageIdentifierString) path ++ - [pkgIdent] - where - pkgName' = style Current . fromString . packageNameString $ packageName pkg - pkgIdent = style Current . fromString . packageIdentifierString $ packageIdentifier pkg - -- Skip these when they are redundant with 'NotInBuildPlan' info. - pprintException (UnknownPackage name) - | name `Set.member` allNotInBuildPlan = Nothing - | name `Set.member` wiredInPackages = - Just $ flow "Can't build a package with same name as a wired-in-package:" <+> (style Current . fromString . packageNameString $ name) - | Just pruned <- Map.lookup name prunedGlobalDeps = - let prunedDeps = map (style Current . fromString . packageNameString) pruned - in Just $ flow "Can't use GHC boot package" <+> - (style Current . fromString . packageNameString $ name) <+> - flow "when it has an overridden dependency (issue #4510);" <+> - flow "you need to add the following as explicit dependencies to the project:" <+> - line <+> encloseSep "" "" ", " prunedDeps - | otherwise = Just $ flow "Unknown package:" <+> (style Current . fromString . packageNameString $ name) - - pprintFlags flags - | Map.null flags = "" - | otherwise = parens $ sep $ map pprintFlag $ Map.toList flags - pprintFlag (name, True) = "+" <> fromString (flagNameString name) - pprintFlag (name, False) = "-" <> fromString (flagNameString name) - - pprintDep (name, (range, mlatestApplicable, badDep)) = case badDep of - NotInBuildPlan - | name `elem` fold prunedGlobalDeps -> Just $ - style Error (fromString $ packageNameString name) <+> - align ((if range == Cabal.anyVersion - then flow "needed" - else flow "must match" <+> goodRange) <> "," <> softline <> - flow "but this GHC boot package has been pruned (issue #4510);" <+> - flow "you need to add the package explicitly to extra-deps" <+> - latestApplicable Nothing) - | otherwise -> Just $ - style Error (fromString $ packageNameString name) <+> - align ((if range == Cabal.anyVersion - then flow "needed" - else flow "must match" <+> goodRange) <> "," <> softline <> - flow "but the stack configuration has no specified version" <+> - latestApplicable Nothing) - -- TODO: For local packages, suggest editing constraints - DependencyMismatch version -> Just $ - (style Error . fromString . packageIdentifierString) (PackageIdentifier name version) <+> - align (flow "from stack configuration does not match" <+> goodRange <+> - latestApplicable (Just version)) - -- I think the main useful info is these explain why missing - -- packages are needed. Instead lets give the user the shortest - -- path from a target to the package. - Couldn'tResolveItsDependencies _version -> Nothing - HasNoLibrary -> Just $ - style Error (fromString $ packageNameString name) <+> - align (flow "is a library dependency, but the package provides no library") - BDDependencyCycleDetected names -> Just $ - style Error (fromString $ packageNameString name) <+> - align (flow $ "dependency cycle detected: " ++ intercalate ", " (map packageNameString names)) - where - goodRange = style Good (fromString (Cabal.display range)) - latestApplicable mversion = - case mlatestApplicable of - Nothing - | isNothing mversion -> - flow "(no package with that name found, perhaps there is a typo in a package's build-depends or an omission from the stack.yaml packages list?)" - | otherwise -> "" - Just (laVer, _) - | Just laVer == mversion -> softline <> - flow "(latest matching version is specified)" - | otherwise -> softline <> - flow "(latest matching version is" <+> style Good (fromString $ versionString laVer) <> ")" - --- | Get the shortest reason for the package to be in the build plan. In --- other words, trace the parent dependencies back to a 'wanted' --- package. -getShortestDepsPath - :: ParentMap - -> Set PackageName - -> PackageName - -> Maybe [PackageIdentifier] -getShortestDepsPath (MonoidMap parentsMap) wanted' name = - if Set.member name wanted' - then Just [] - else case M.lookup name parentsMap of - Nothing -> Nothing - Just (_, parents) -> Just $ findShortest 256 paths0 - where - paths0 = M.fromList $ map (\(ident, _) -> (pkgName ident, startDepsPath ident)) parents - where - -- The 'paths' map is a map from PackageName to the shortest path - -- found to get there. It is the frontier of our breadth-first - -- search of dependencies. - findShortest :: Int -> Map PackageName DepsPath -> [PackageIdentifier] - findShortest fuel _ | fuel <= 0 = - [PackageIdentifier (mkPackageName "stack-ran-out-of-jet-fuel") (mkVersion [0])] - findShortest _ paths | M.null paths = [] - findShortest fuel paths = - case targets of - [] -> findShortest (fuel - 1) $ M.fromListWith chooseBest $ concatMap extendPath recurses - _ -> let (DepsPath _ _ path) = minimum (map snd targets) in path - where - (targets, recurses) = partition (\(n, _) -> n `Set.member` wanted') (M.toList paths) - chooseBest :: DepsPath -> DepsPath -> DepsPath - chooseBest x y = if x > y then x else y - -- Extend a path to all its parents. - extendPath :: (PackageName, DepsPath) -> [(PackageName, DepsPath)] - extendPath (n, dp) = - case M.lookup n parentsMap of - Nothing -> [] - Just (_, parents) -> map (\(pkgId, _) -> (pkgName pkgId, extendDepsPath pkgId dp)) parents - -data DepsPath = DepsPath - { dpLength :: Int -- ^ Length of dpPath - , dpNameLength :: Int -- ^ Length of package names combined - , dpPath :: [PackageIdentifier] -- ^ A path where the packages later - -- in the list depend on those that - -- come earlier - } - deriving (Eq, Ord, Show) - -startDepsPath :: PackageIdentifier -> DepsPath -startDepsPath ident = DepsPath - { dpLength = 1 - , dpNameLength = length (packageNameString (pkgName ident)) - , dpPath = [ident] - } - -extendDepsPath :: PackageIdentifier -> DepsPath -> DepsPath -extendDepsPath ident dp = DepsPath - { dpLength = dpLength dp + 1 - , dpNameLength = dpNameLength dp + length (packageNameString (pkgName ident)) - , dpPath = [ident] - } +logDebugPlanS :: + (HasCallStack, HasRunner env, MonadIO m, MonadReader env m) + => LogSource + -> Utf8Builder + -> m () +logDebugPlanS s msg = do + debugPlan <- view $ globalOptsL . to (.planInLog) + when debugPlan $ logDebugS s msg + +-- | A function to summarise a result. Assumes that 'Left' is an error and +-- 'Right' is not. Intended to be used to annotate, so includes an initial space +-- character. +summariseResult :: Either a b -> Utf8Builder +summariseResult res = " (" <> either (const "error") (const "ok") res <> ")" + +-- | A function to yield a 'PackageInfo' value from: (1) a 'PackageSource' +-- value; and (2) a pair of an 'InstallLocation' value and an 'Installed' value. +-- Checks that the version of the 'PackageSource' value and the version of the +-- `Installed` value are the same. +combineSourceInstalled :: + PackageSource + -> (InstallLocation, Installed) + -> PackageInfo +combineSourceInstalled ps (location, installed) = + assert (psVersion ps == installedVersion installed) $ + case location of + -- Always trust something in the snapshot + Snap -> PIOnlyInstalled location installed + Local -> PIBoth ps installed --- Switch this to 'True' to enable some debugging putStrLn in this module -planDebug :: MonadIO m => String -> m () -planDebug = if False then liftIO . putStrLn else \_ -> return () +-- | A function to yield a 'CombinedMap' value from: (1) a dictionary of package +-- names, and where the source code of the named package is located; and (2) an +-- 'InstalledMap' value. +combineMap :: Map PackageName PackageSource -> InstalledMap -> CombinedMap +combineMap = Map.merge + (Map.mapMissing (\_ s -> PIOnlySource s)) + (Map.mapMissing (\_ i -> uncurry PIOnlyInstalled i)) + (Map.zipWithMatched (\_ s i -> combineSourceInstalled s i)) + +toCachePkgSrc :: PackageSource -> CachePkgSrc +toCachePkgSrc (PSFilePath lp) = + CacheSrcLocal (toFilePath (parent lp.cabalFP)) +toCachePkgSrc PSRemote{} = CacheSrcUpstream diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index a409313f09..a64035fbdf 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -1,2300 +1,651 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE TupleSections #-} --- | Perform a build +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +{-| +Module : Stack.Build.Execute +Description : Perform a build. +License : BSD-3-Clause + +Perform a build. +-} + module Stack.Build.Execute - ( printPlan - , preFetch - , executePlan - -- * Running Setup.hs - , ExecuteEnv - , withExecuteEnv - , withSingleContext - , ExcludeTHLoading(..) - , KeepOutputOpen(..) - ) where + ( printPlan + , preFetch + , executePlan + -- * Running Setup.hs + , ExcludeTHLoading (..) + , KeepOutputOpen (..) + ) where import Control.Concurrent.Execute -import Control.Concurrent.STM (check) -import Stack.Prelude hiding (Display (..)) -import Crypto.Hash -import Data.Attoparsec.Text hiding (try) -import qualified Data.ByteArray as Mem (convert) -import qualified Data.ByteString as S -import qualified Data.ByteString.Builder -import qualified Data.ByteString.Lazy as BL -import qualified Data.ByteString.Base64.URL as B64URL -import Data.Char (isSpace) -import Conduit -import qualified Data.Conduit.Binary as CB -import qualified Data.Conduit.Filesystem as CF -import qualified Data.Conduit.List as CL -import Data.Conduit.Process.Typed (createSource) -import qualified Data.Conduit.Text as CT -import Data.List hiding (any) -import Data.List.NonEmpty (nonEmpty) -import qualified Data.List.NonEmpty as NonEmpty (toList) -import Data.List.Split (chunksOf) -import qualified Data.Map.Strict as M + ( Action (..), ActionId (..), ActionType (..) + , Concurrency (..), runActions + ) +import Control.Concurrent.STM ( check ) +import Control.Monad.Extra ( whenJust ) +import qualified Data.List as L +import Data.List.Split ( chunksOf ) +import qualified Data.Map.Merge.Strict as Map import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T -import Data.Text.Encoding (decodeUtf8) -import Data.Tuple -import Data.Time (ZonedTime, getZonedTime, formatTime, defaultTimeLocale) -import qualified Data.ByteString.Char8 as S8 -import qualified Distribution.PackageDescription as C -import qualified Distribution.Simple.Build.Macros as C -import Distribution.System (OS (Windows), - Platform (Platform)) -import qualified Distribution.Text as C -import Distribution.Types.PackageName (mkPackageName) -import Distribution.Types.UnqualComponentName (mkUnqualComponentName) -import Distribution.Version (mkVersion) -import Path -import Path.CheckInstall -import Path.Extra (toFilePathNoTrailingSep, rejectMissingFile) -import Path.IO hiding (findExecutable, makeAbsolute, withSystemTempDir) -import qualified RIO -import Stack.Build.Cache +import Data.Tuple ( swap ) +import Distribution.System ( OS (..), Platform (..) ) +import Path ( (), parent ) +import Path.CheckInstall ( warnInstallSearchPathIssues ) +import Path.Extra ( forgivingResolveFile, rejectMissingFile ) +import Path.IO ( ensureDir ) +import RIO.NonEmpty ( nonEmpty ) +import qualified RIO.NonEmpty as NE +import RIO.Process ( HasProcessContext (..), proc, runProcess_ ) +import Stack.Build.ExecuteEnv ( ExecuteEnv (..), withExecuteEnv ) +import Stack.Build.ExecutePackage + ( singleBench, singleBuild, singleTest ) import Stack.Build.Haddock -import Stack.Build.Installed -import Stack.Build.Source -import Stack.Build.Target -import Stack.Config -import Stack.Constants -import Stack.Constants.Config + ( generateDepsHaddockIndex + , generateLocalHaddockForHackageArchives + , generateLocalHaddockIndex, generateSnapHaddockIndex + , openHaddocksInBrowser + ) +import Stack.Constants ( bindirSuffix ) import Stack.Coverage -import Stack.DefaultColorWhen (defaultColorWhen) -import Stack.GhcPkg -import Stack.Package -import Stack.PackageDump + ( deleteHpcReports, generateHpcMarkupIndex + , generateHpcUnifiedReport + ) +import Stack.GhcPkg ( unregisterGhcPkgIds ) +import Stack.Prelude import Stack.Types.Build -import Stack.Types.Compiler -import Stack.Types.Config -import Stack.Types.GhcPkgId + ( ExcludeTHLoading (..), KeepOutputOpen (..) ) +import Stack.Types.Build.Exception ( BuildPrettyException (..) ) +import Stack.Types.BuildOpts + ( BenchmarkOpts (..), BuildOpts (..), TestOpts (..) ) +import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) +import Stack.Types.BuildOptsMonoid ( ProgressBarFormat (..) ) +import Stack.Types.CompilerPaths ( HasCompiler (..), getGhcPkgExe ) +import Stack.Types.ComponentUtils + ( StackUnqualCompName, unqualCompToString ) +import Stack.Types.Config ( Config (..), HasConfig (..), buildOptsL ) +import Stack.Types.ConfigureOpts ( BaseConfigOpts (..) ) +import Stack.Types.DumpPackage ( DumpPackage (..) ) +import Stack.Types.EnvConfig + ( HasEnvConfig (..), bindirCompilerTools + , installationRootDeps, installationRootLocal + , packageDatabaseLocal + ) +import Stack.Types.EnvSettings ( EnvSettings (..) ) +import Stack.Types.GhcPkgId ( GhcPkgId ) +import Stack.Types.Installed + ( InstallLocation (..), InstalledMap + , installedPackageIdentifier + ) import Stack.Types.NamedComponent + ( NamedComponent, benchComponents, testComponents ) import Stack.Types.Package -import Stack.Types.Version + ( LocalPackage (..), Package (..), packageIdentifier ) +import Stack.Types.Plan + ( Plan (..), Task (..), TaskConfigOpts (..), TaskType (..) + , taskLocation, taskProvides + ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Runner ( terminalL, viewExecutablePath ) +import Stack.Types.SourceMap ( Target ) import qualified System.Directory as D -import System.Environment (getExecutablePath, lookupEnv) -import System.FileLock (withTryFileLock, SharedExclusive (Exclusive), withFileLock) import qualified System.FilePath as FP -import System.IO.Error (isDoesNotExistError) -import System.PosixCompat.Files (createLink, modificationTime, getFileStatus) -import RIO.PrettyPrint -import RIO.Process -import Pantry.Internal.Companion --- | Has an executable been built or not? -data ExecutableBuildStatus - = ExecutableBuilt - | ExecutableNotBuilt - deriving (Show, Eq, Ord) - --- | Fetch the packages necessary for a build, for example in combination with a dry run. +-- | Fetch the packages necessary for a build, for example in combination with +-- a dry run. preFetch :: HasEnvConfig env => Plan -> RIO env () preFetch plan - | Set.null pkgLocs = logDebug "Nothing to fetch" - | otherwise = do - logDebug $ - "Prefetching: " <> - mconcat (intersperse ", " (RIO.display <$> Set.toList pkgLocs)) - fetchPackages pkgLocs - where - pkgLocs = Set.unions $ map toPkgLoc $ Map.elems $ planTasks plan - - toPkgLoc task = - case taskType task of - TTLocalMutable{} -> Set.empty - TTRemotePackage _ _ pkgloc -> Set.singleton pkgloc + | Set.null pkgLocs = logDebug "Nothing to fetch" + | otherwise = do + logDebug $ + "Prefetching: " + <> mconcat (L.intersperse ", " (display <$> Set.toList pkgLocs)) + fetchPackages pkgLocs + where + pkgLocs = Set.unions $ map toPkgLoc $ Map.elems plan.tasks + + toPkgLoc task = + case task.taskType of + TTLocalMutable{} -> Set.empty + TTRemotePackage _ _ pkgloc -> Set.singleton pkgloc -- | Print a description of build plan for human consumption. -printPlan :: HasRunner env => Plan -> RIO env () +printPlan :: HasEnvConfig env => Plan -> RIO env () printPlan plan = do - case Map.elems $ planUnregisterLocal plan of - [] -> logInfo "No packages would be unregistered." - xs -> do - logInfo "Would unregister locally:" - forM_ xs $ \(ident, reason) -> logInfo $ - fromString (packageIdentifierString ident) <> - if T.null reason - then "" - else " (" <> RIO.display reason <> ")" - - logInfo "" - - case Map.elems $ planTasks plan of - [] -> logInfo "Nothing to build." - xs -> do - logInfo "Would build:" - mapM_ (logInfo . displayTask) xs - - let hasTests = not . Set.null . testComponents . taskComponents - hasBenches = not . Set.null . benchComponents . taskComponents - tests = Map.elems $ Map.filter hasTests $ planFinals plan - benches = Map.elems $ Map.filter hasBenches $ planFinals plan - - unless (null tests) $ do - logInfo "" - logInfo "Would test:" - mapM_ (logInfo . displayTask) tests - unless (null benches) $ do - logInfo "" - logInfo "Would benchmark:" - mapM_ (logInfo . displayTask) benches - - logInfo "" - - case Map.toList $ planInstallExes plan of - [] -> logInfo "No executables to be installed." - xs -> do - logInfo "Would install executables:" - forM_ xs $ \(name, loc) -> logInfo $ - RIO.display name <> - " from " <> - (case loc of - Snap -> "snapshot" - Local -> "local") <> - " database" + case Map.elems plan.unregisterLocal of + [] -> prettyInfo $ + flow "No packages would be unregistered." + <> line + xs -> do + let unregisterMsg (ident, reason) = fillSep $ + fromString (packageIdentifierString ident) + : [ parens $ flow (T.unpack reason) | not $ T.null reason ] + prettyInfo $ + flow "Would unregister locally:" + <> line + <> bulletedList (map unregisterMsg xs) + <> line + + case Map.elems plan.tasks of + [] -> prettyInfo $ + flow "Nothing to build." + <> line + xs -> do + prettyInfo $ + flow "Would build:" + <> line + <> bulletedList (map displayTask xs) + <> line + + buildOpts <- view buildOptsL + let hasTests = not . Set.null . testComponents . taskComponents + hasBenches = not . Set.null . benchComponents . taskComponents + tests = Map.elems $ Map.filter hasTests plan.finals + benches = Map.elems $ Map.filter hasBenches plan.finals + runTests = buildOpts.testOpts.runTests + runBenchmarks = buildOpts.benchmarkOpts.runBenchmarks + + unless (null tests) $ + if runTests + then + prettyInfo $ + flow "Would test:" + <> line + <> bulletedList (map displayTask tests) + <> line + else + prettyInfo $ + fillSep + [ flow "Would not test, as running disabled by" + , style Shell "--no-run-tests" + , "flag." + ] + <> line + + unless (null benches) $ + if runBenchmarks + then + prettyInfo $ + flow "Would benchmark:" + <> line + <> bulletedList (map displayTask benches) + <> line + else + prettyInfo $ + fillSep + [ flow "Would not benchmark, as running disabled by" + , style Shell "--no-run-benchmarks" + , "flag." + ] + <> line + + case Map.toList plan.installExes of + [] -> prettyInfo $ + flow "No executables to be installed." + <> line + xs -> do + let executableMsg (name, loc) = fillSep $ + fromString (unqualCompToString name) + : "from" + : ( case loc of + Snap -> "snapshot" :: StyleDoc + Local -> "local" :: StyleDoc + ) + : ["database."] + prettyInfo $ + flow "Would install executables:" + <> line + <> bulletedList (map executableMsg xs) + <> line -- | For a dry run -displayTask :: Task -> Utf8Builder -displayTask task = - fromString (packageIdentifierString (taskProvides task)) <> - ": database=" <> - (case taskLocation task of - Snap -> "snapshot" - Local -> "local") <> - ", source=" <> - (case taskType task of - TTLocalMutable lp -> fromString $ toFilePath $ parent $ lpCabalFile lp - TTRemotePackage _ _ pl -> RIO.display pl) <> - (if Set.null missing - then "" - else ", after: " <> - mconcat (intersperse "," (fromString . packageIdentifierString <$> Set.toList missing))) - where - missing = tcoMissing $ taskConfigOpts task - -data ExecuteEnv = ExecuteEnv - { eeConfigureLock :: !(MVar ()) - , eeInstallLock :: !(MVar ()) - , eeBuildOpts :: !BuildOpts - , eeBuildOptsCLI :: !BuildOptsCLI - , eeBaseConfigOpts :: !BaseConfigOpts - , eeGhcPkgIds :: !(TVar (Map PackageIdentifier Installed)) - , eeTempDir :: !(Path Abs Dir) - , eeSetupHs :: !(Path Abs File) - -- ^ Temporary Setup.hs for simple builds - , eeSetupShimHs :: !(Path Abs File) - -- ^ Temporary SetupShim.hs, to provide access to initial-build-steps - , eeSetupExe :: !(Maybe (Path Abs File)) - -- ^ Compiled version of eeSetupHs - , eeCabalPkgVer :: !Version - , eeTotalWanted :: !Int - , eeLocals :: ![LocalPackage] - , eeGlobalDB :: !(Path Abs Dir) - , eeGlobalDumpPkgs :: !(Map GhcPkgId DumpPackage) - , eeSnapshotDumpPkgs :: !(TVar (Map GhcPkgId DumpPackage)) - , eeLocalDumpPkgs :: !(TVar (Map GhcPkgId DumpPackage)) - , eeLogFiles :: !(TChan (Path Abs Dir, Path Abs File)) - , eeCustomBuilt :: !(IORef (Set PackageName)) - -- ^ Stores which packages with custom-setup have already had their - -- Setup.hs built. - , eeLargestPackageName :: !(Maybe Int) - -- ^ For nicer interleaved output: track the largest package name size - , eePathEnvVar :: !Text - -- ^ Value of the PATH environment variable - } - -buildSetupArgs :: [String] -buildSetupArgs = - [ "-rtsopts" - , "-threaded" - , "-clear-package-db" - , "-global-package-db" - , "-hide-all-packages" - , "-package" - , "base" - , "-main-is" - , "StackSetupShim.mainOverride" +displayTask :: Task -> StyleDoc +displayTask task = fillSep $ + [ fromString (packageIdentifierString (taskProvides task)) <> ":" + , "database=" + <> ( case taskLocation task of + Snap -> "snapshot" :: StyleDoc + Local -> "local" :: StyleDoc + ) + <> "," + , "source=" + <> ( case task.taskType of + TTLocalMutable lp -> pretty $ parent lp.cabalFP + TTRemotePackage _ _ pl -> fromString $ T.unpack $ textDisplay pl + ) + <> if Set.null missing + then mempty + else "," ] - -simpleSetupCode :: Builder -simpleSetupCode = "import Distribution.Simple\nmain = defaultMain" - -simpleSetupHash :: String -simpleSetupHash = - T.unpack $ decodeUtf8 $ S.take 8 $ B64URL.encode $ Mem.convert $ hashWith SHA256 $ - toStrictBytes $ - Data.ByteString.Builder.toLazyByteString $ - encodeUtf8Builder (T.pack (unwords buildSetupArgs)) <> setupGhciShimCode <> simpleSetupCode - --- | Get a compiled Setup exe -getSetupExe :: HasEnvConfig env - => Path Abs File -- ^ Setup.hs input file - -> Path Abs File -- ^ SetupShim.hs input file - -> Path Abs Dir -- ^ temporary directory - -> RIO env (Maybe (Path Abs File)) -getSetupExe setupHs setupShimHs tmpdir = do - wc <- view $ actualCompilerVersionL.whichCompilerL - platformDir <- platformGhcRelDir - config <- view configL - cabalVersionString <- view $ cabalVersionL.to versionString - actualCompilerVersionString <- view $ actualCompilerVersionL.to compilerVersionString - platform <- view platformL - let baseNameS = concat - [ "Cabal-simple_" - , simpleSetupHash - , "_" - , cabalVersionString - , "_" - , actualCompilerVersionString - ] - exeNameS = baseNameS ++ - case platform of - Platform _ Windows -> ".exe" - _ -> "" - outputNameS = - case wc of - Ghc -> exeNameS - setupDir = - view stackRootL config - relDirSetupExeCache - platformDir - - exePath <- (setupDir ) <$> parseRelFile exeNameS - - exists <- liftIO $ D.doesFileExist $ toFilePath exePath - - if exists - then return $ Just exePath - else do - tmpExePath <- fmap (setupDir ) $ parseRelFile $ "tmp-" ++ exeNameS - tmpOutputPath <- fmap (setupDir ) $ parseRelFile $ "tmp-" ++ outputNameS - ensureDir setupDir - let args = buildSetupArgs ++ - [ "-package" - , "Cabal-" ++ cabalVersionString - , toFilePath setupHs - , toFilePath setupShimHs - , "-o" - , toFilePath tmpOutputPath - ] - compilerPath <- getCompilerPath - withWorkingDir (toFilePath tmpdir) (proc (toFilePath compilerPath) args $ \pc0 -> do - let pc = setStdout (useHandleOpen stderr) pc0 - runProcess_ pc) - `catch` \ece -> - throwM $ SetupHsBuildFailure (eceExitCode ece) Nothing compilerPath args Nothing [] - renameFile tmpExePath exePath - return $ Just exePath - --- | Execute a function that takes an 'ExecuteEnv'. -withExecuteEnv :: forall env a. HasEnvConfig env - => BuildOpts - -> BuildOptsCLI - -> BaseConfigOpts - -> [LocalPackage] - -> [DumpPackage] -- ^ global packages - -> [DumpPackage] -- ^ snapshot packages - -> [DumpPackage] -- ^ local packages - -> Maybe Int -- ^ largest package name, for nicer interleaved output - -> (ExecuteEnv -> RIO env a) - -> RIO env a -withExecuteEnv bopts boptsCli baseConfigOpts locals globalPackages snapshotPackages localPackages mlargestPackageName inner = - createTempDirFunction stackProgName $ \tmpdir -> do - configLock <- liftIO $ newMVar () - installLock <- liftIO $ newMVar () - idMap <- liftIO $ newTVarIO Map.empty - config <- view configL - - customBuiltRef <- newIORef Set.empty - - -- Create files for simple setup and setup shim, if necessary - let setupSrcDir = - view stackRootL config - relDirSetupExeSrc - ensureDir setupSrcDir - setupFileName <- parseRelFile ("setup-" ++ simpleSetupHash ++ ".hs") - let setupHs = setupSrcDir setupFileName - setupHsExists <- doesFileExist setupHs - unless setupHsExists $ writeBinaryFileAtomic setupHs simpleSetupCode - setupShimFileName <- parseRelFile ("setup-shim-" ++ simpleSetupHash ++ ".hs") - let setupShimHs = setupSrcDir setupShimFileName - setupShimHsExists <- doesFileExist setupShimHs - unless setupShimHsExists $ writeBinaryFileAtomic setupShimHs setupGhciShimCode - setupExe <- getSetupExe setupHs setupShimHs tmpdir - - cabalPkgVer <- view cabalVersionL - globalDB <- view $ compilerPathsL.to cpGlobalDB - snapshotPackagesTVar <- liftIO $ newTVarIO (toDumpPackagesByGhcPkgId snapshotPackages) - localPackagesTVar <- liftIO $ newTVarIO (toDumpPackagesByGhcPkgId localPackages) - logFilesTChan <- liftIO $ atomically newTChan - let totalWanted = length $ filter lpWanted locals - pathEnvVar <- liftIO $ maybe mempty T.pack <$> lookupEnv "PATH" - inner ExecuteEnv - { eeBuildOpts = bopts - , eeBuildOptsCLI = boptsCli - -- Uncertain as to why we cannot run configures in parallel. This appears - -- to be a Cabal library bug. Original issue: - -- https://github.com/fpco/stack/issues/84. Ideally we'd be able to remove - -- this. - , eeConfigureLock = configLock - , eeInstallLock = installLock - , eeBaseConfigOpts = baseConfigOpts - , eeGhcPkgIds = idMap - , eeTempDir = tmpdir - , eeSetupHs = setupHs - , eeSetupShimHs = setupShimHs - , eeSetupExe = setupExe - , eeCabalPkgVer = cabalPkgVer - , eeTotalWanted = totalWanted - , eeLocals = locals - , eeGlobalDB = globalDB - , eeGlobalDumpPkgs = toDumpPackagesByGhcPkgId globalPackages - , eeSnapshotDumpPkgs = snapshotPackagesTVar - , eeLocalDumpPkgs = localPackagesTVar - , eeLogFiles = logFilesTChan - , eeCustomBuilt = customBuiltRef - , eeLargestPackageName = mlargestPackageName - , eePathEnvVar = pathEnvVar - } `finally` dumpLogs logFilesTChan totalWanted - where - toDumpPackagesByGhcPkgId = Map.fromList . map (\dp -> (dpGhcPkgId dp, dp)) - - createTempDirFunction - | boptsKeepTmpFiles bopts = withKeepSystemTempDir - | otherwise = withSystemTempDir - - dumpLogs :: TChan (Path Abs Dir, Path Abs File) -> Int -> RIO env () - dumpLogs chan totalWanted = do - allLogs <- fmap reverse $ liftIO $ atomically drainChan - case allLogs of - -- No log files generated, nothing to dump - [] -> return () - firstLog:_ -> do - toDump <- view $ configL.to configDumpLogs - case toDump of - DumpAllLogs -> mapM_ (dumpLog "") allLogs - DumpWarningLogs -> mapM_ dumpLogIfWarning allLogs - DumpNoLogs - | totalWanted > 1 -> - logInfo $ - "Build output has been captured to log files, use " <> - "--dump-logs to see it on the console" - | otherwise -> return () - logInfo $ "Log files have been written to: " <> - fromString (toFilePath (parent (snd firstLog))) - - -- We only strip the colors /after/ we've dumped logs, so that - -- we get pretty colors in our dump output on the terminal. - colors <- shouldForceGhcColorFlag - when colors $ liftIO $ mapM_ (stripColors . snd) allLogs - where - drainChan :: STM [(Path Abs Dir, Path Abs File)] - drainChan = do - mx <- tryReadTChan chan - case mx of - Nothing -> return [] - Just x -> do - xs <- drainChan - return $ x:xs - - dumpLogIfWarning :: (Path Abs Dir, Path Abs File) -> RIO env () - dumpLogIfWarning (pkgDir, filepath) = do - firstWarning <- withSourceFile (toFilePath filepath) $ \src -> - runConduit - $ src - .| CT.decodeUtf8Lenient - .| CT.lines - .| CL.map stripCR - .| CL.filter isWarning - .| CL.take 1 - unless (null firstWarning) $ dumpLog " due to warnings" (pkgDir, filepath) - - isWarning :: Text -> Bool - isWarning t = ": Warning:" `T.isSuffixOf` t -- prior to GHC 8 - || ": warning:" `T.isInfixOf` t -- GHC 8 is slightly different - || "mwarning:" `T.isInfixOf` t -- colorized output - - dumpLog :: String -> (Path Abs Dir, Path Abs File) -> RIO env () - dumpLog msgSuffix (pkgDir, filepath) = do - logInfo $ - "\n-- Dumping log file" <> - fromString msgSuffix <> - ": " <> - fromString (toFilePath filepath) <> - "\n" - compilerVer <- view actualCompilerVersionL - withSourceFile (toFilePath filepath) $ \src -> - runConduit - $ src - .| CT.decodeUtf8Lenient - .| mungeBuildOutput ExcludeTHLoading ConvertPathsToAbsolute pkgDir compilerVer - .| CL.mapM_ (logInfo . RIO.display) - logInfo $ "\n-- End of log file: " <> fromString (toFilePath filepath) <> "\n" - - stripColors :: Path Abs File -> IO () - stripColors fp = do - let colorfp = toFilePath fp ++ "-color" - withSourceFile (toFilePath fp) $ \src -> - withSinkFile colorfp $ \sink -> - runConduit $ src .| sink - withSourceFile colorfp $ \src -> - withSinkFile (toFilePath fp) $ \sink -> - runConduit $ src .| noColors .| sink - - where - noColors = do - CB.takeWhile (/= 27) -- ESC - mnext <- CB.head - case mnext of - Nothing -> return () - Just x -> assert (x == 27) $ do - -- Color sequences always end with an m - CB.dropWhile (/= 109) -- m - CB.drop 1 -- drop the m itself - noColors + <> [ fillSep $ + "after:" + : mkNarrativeList Nothing False + (map fromPackageId (Set.toList missing) :: [StyleDoc]) + | not $ Set.null missing + ] + where + missing = task.configOpts.missing -- | Perform the actual plan -executePlan :: HasEnvConfig env - => BuildOptsCLI - -> BaseConfigOpts - -> [LocalPackage] - -> [DumpPackage] -- ^ global packages - -> [DumpPackage] -- ^ snapshot packages - -> [DumpPackage] -- ^ local packages - -> InstalledMap - -> Map PackageName Target - -> Plan - -> RIO env () -executePlan boptsCli baseConfigOpts locals globalPackages snapshotPackages localPackages installedMap targets plan = do +executePlan :: + HasEnvConfig env + => BuildOptsCLI + -> BaseConfigOpts + -> [LocalPackage] + -> [DumpPackage] + -- ^ global packages + -> [DumpPackage] + -- ^ snapshot packages + -> [DumpPackage] + -- ^ project packages and local extra-deps + -> InstalledMap + -> Map PackageName Target + -> Plan + -> RIO env () +executePlan + boptsCli + baseConfigOpts + locals + globalPackages + snapshotPackages + localPackages + installedMap + targets + plan + = do logDebug "Executing the build plan" bopts <- view buildOptsL - withExecuteEnv bopts boptsCli baseConfigOpts locals globalPackages snapshotPackages localPackages mlargestPackageName + withExecuteEnv + bopts + boptsCli + baseConfigOpts + locals + globalPackages + snapshotPackages + localPackages + mlargestPackageName (executePlan' installedMap targets plan) - copyExecutables (planInstallExes plan) + copyExecutables plan.installExes config <- view configL - menv' <- liftIO $ configProcessContextSettings config EnvSettings - { esIncludeLocals = True - , esIncludeGhcPackagePath = True - , esStackExe = True - , esLocaleUtf8 = False - , esKeepGhcRts = False - } + menv' <- liftIO $ config.processContextSettings EnvSettings + { includeLocals = True + , includeGhcPackagePath = True + , stackExe = True + , localeUtf8 = False + , keepGhcRts = False + } withProcessContext menv' $ - forM_ (boptsCLIExec boptsCli) $ \(cmd, args) -> + forM_ boptsCli.exec $ \(cmd, args) -> proc cmd args runProcess_ - where - mlargestPackageName = - Set.lookupMax $ - Set.map (length . packageNameString) $ - Map.keysSet (planTasks plan) <> Map.keysSet (planFinals plan) - -copyExecutables - :: HasEnvConfig env - => Map Text InstallLocation - -> RIO env () -copyExecutables exes | Map.null exes = return () + where + mlargestPackageName = + Set.lookupMax $ + Set.map (length . packageNameString) $ + Map.keysSet plan.tasks <> Map.keysSet plan.finals + +copyExecutables :: + HasEnvConfig env + => Map StackUnqualCompName InstallLocation + -> RIO env () +copyExecutables exes | Map.null exes = pure () copyExecutables exes = do - snapBin <- ( bindirSuffix) `liftM` installationRootDeps - localBin <- ( bindirSuffix) `liftM` installationRootLocal - compilerSpecific <- boptsInstallCompilerTool <$> view buildOptsL - destDir <- if compilerSpecific - then bindirCompilerTools - else view $ configL.to configLocalBin - ensureDir destDir - - destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir - - platform <- view platformL - let ext = - case platform of - Platform _ Windows -> ".exe" - _ -> "" - - currExe <- liftIO getExecutablePath -- needed for windows, see below - - installed <- forMaybeM (Map.toList exes) $ \(name, loc) -> do - let bindir = - case loc of - Snap -> snapBin - Local -> localBin - mfp <- liftIO $ forgivingAbsence (resolveFile bindir $ T.unpack name ++ ext) - >>= rejectMissingFile - case mfp of - Nothing -> do - logWarn $ - "Couldn't find executable " <> - RIO.display name <> - " in directory " <> - fromString (toFilePath bindir) - return Nothing - Just file -> do - let destFile = destDir' FP. T.unpack name ++ ext - logInfo $ - "Copying from " <> - fromString (toFilePath file) <> - " to " <> - fromString destFile - - liftIO $ case platform of - Platform _ Windows | FP.equalFilePath destFile currExe -> - windowsRenameCopy (toFilePath file) destFile - _ -> D.copyFile (toFilePath file) destFile - return $ Just (name <> T.pack ext) - - unless (null installed) $ do - logInfo "" - logInfo $ - "Copied executables to " <> - fromString destDir' <> - ":" - forM_ installed $ \exe -> logInfo ("- " <> RIO.display exe) - unless compilerSpecific $ warnInstallSearchPathIssues destDir' installed - + snapBin <- ( bindirSuffix) <$> installationRootDeps + localBin <- ( bindirSuffix) <$> installationRootLocal + compilerSpecific <- (.installCompilerTool) <$> view buildOptsL + destDir <- if compilerSpecific + then bindirCompilerTools + else view $ configL . to (.localBin) + ensureDir destDir + + destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir + + platform <- view platformL + let ext = + case platform of + Platform _ Windows -> ".exe" + _ -> "" + + -- needed for windows, see below + currExe <- toFilePath <$> viewExecutablePath + + installed <- forMaybeM (Map.toList exes) $ \(name, loc) -> do + let strName = unqualCompToString name + bindir = + case loc of + Snap -> snapBin + Local -> localBin + mfp <- forgivingResolveFile bindir (strName ++ ext) + >>= rejectMissingFile + case mfp of + Nothing -> do + prettyWarnL + [ flow "Couldn't find executable" + , style Current (fromString strName) + , flow "in directory" + , pretty bindir <> "." + ] + pure Nothing + Just file -> do + let destFile = destDir' FP. strName ++ ext + prettyInfoL + [ flow "Copying from" + , pretty file + , "to" + , style File (fromString destFile) <> "." + ] + + liftIO $ case platform of + Platform _ Windows | FP.equalFilePath destFile currExe -> + windowsRenameCopy (toFilePath file) destFile + _ -> D.copyFile (toFilePath file) destFile + pure $ Just (strName ++ ext) + + unless (null installed) $ do + prettyInfo $ + fillSep + [ flow "Copied executables to" + , pretty destDir <> ":" + ] + <> line + <> bulletedList + (map fromString installed :: [StyleDoc]) + unless compilerSpecific $ warnInstallSearchPathIssues destDir' installed -- | Windows can't write over the current executable. Instead, we rename the -- current executable to something else and then do the copy. windowsRenameCopy :: FilePath -> FilePath -> IO () windowsRenameCopy src dest = do - D.copyFile src new - D.renameFile dest old - D.renameFile new dest - where - new = dest ++ ".new" - old = dest ++ ".old" + D.copyFile src new + D.renameFile dest old + D.renameFile new dest + where + new = dest ++ ".new" + old = dest ++ ".old" -- | Perform the actual plan (internal) -executePlan' :: HasEnvConfig env - => InstalledMap - -> Map PackageName Target - -> Plan - -> ExecuteEnv - -> RIO env () -executePlan' installedMap0 targets plan ee@ExecuteEnv {..} = do - when (toCoverage $ boptsTestOpts eeBuildOpts) deleteHpcReports - cv <- view actualCompilerVersionL - case nonEmpty . Map.toList $ planUnregisterLocal plan of - Nothing -> return () - Just ids -> do - localDB <- packageDatabaseLocal - unregisterPackages cv localDB ids - - liftIO $ atomically $ modifyTVar' eeLocalDumpPkgs $ \initMap -> - foldl' (flip Map.delete) initMap $ Map.keys (planUnregisterLocal plan) - - run <- askRunInIO - - -- If running tests concurrently with eachother, then create an MVar - -- which is empty while each test is being run. - concurrentTests <- view $ configL.to configConcurrentTests - mtestLock <- if concurrentTests then return Nothing else Just <$> liftIO (newMVar ()) - - let actions = concatMap (toActions installedMap' mtestLock run ee) $ Map.elems $ Map.mergeWithKey - (\_ b f -> Just (Just b, Just f)) - (fmap (\b -> (Just b, Nothing))) - (fmap (\f -> (Nothing, Just f))) - (planTasks plan) - (planFinals plan) - threads <- view $ configL.to configJobs - let keepGoing = - fromMaybe (not (M.null (planFinals plan))) (boptsKeepGoing eeBuildOpts) - terminal <- view terminalL - errs <- liftIO $ runActions threads keepGoing actions $ \doneVar actionsVar -> do - let total = length actions - loop prev - | prev == total = - run $ logStickyDone ("Completed " <> RIO.display total <> " action(s).") - | otherwise = do - inProgress <- readTVarIO actionsVar - let packageNames = map (\(ActionId pkgID _) -> pkgName pkgID) (toList inProgress) - nowBuilding :: [PackageName] -> Utf8Builder - nowBuilding [] = "" - nowBuilding names = mconcat $ ": " : intersperse ", " (map (fromString . packageNameString) names) - when terminal $ run $ - logSticky $ - "Progress " <> RIO.display prev <> "/" <> RIO.display total <> - nowBuilding packageNames - done <- atomically $ do - done <- readTVar doneVar - check $ done /= prev - return done - loop done - when (total > 1) $ loop 0 - when (toCoverage $ boptsTestOpts eeBuildOpts) $ do - generateHpcUnifiedReport - generateHpcMarkupIndex - unless (null errs) $ throwM $ ExecutionFailure errs - when (boptsHaddock eeBuildOpts) $ do - snapshotDumpPkgs <- liftIO (readTVarIO eeSnapshotDumpPkgs) - localDumpPkgs <- liftIO (readTVarIO eeLocalDumpPkgs) - generateLocalHaddockIndex eeBaseConfigOpts localDumpPkgs eeLocals - generateDepsHaddockIndex eeBaseConfigOpts eeGlobalDumpPkgs snapshotDumpPkgs localDumpPkgs eeLocals - generateSnapHaddockIndex eeBaseConfigOpts eeGlobalDumpPkgs snapshotDumpPkgs - when (boptsOpenHaddocks eeBuildOpts) $ do - let planPkgs, localPkgs, installedPkgs, availablePkgs - :: Map PackageName (PackageIdentifier, InstallLocation) - planPkgs = Map.map (taskProvides &&& taskLocation) (planTasks plan) - localPkgs = - Map.fromList - [(packageName p, (packageIdentifier p, Local)) | p <- map lpPackage eeLocals] - installedPkgs = Map.map (swap . second installedPackageIdentifier) installedMap' - availablePkgs = Map.unions [planPkgs, localPkgs, installedPkgs] - openHaddocksInBrowser eeBaseConfigOpts availablePkgs (Map.keysSet targets) - where - installedMap' = Map.difference installedMap0 - $ Map.fromList - $ map (\(ident, _) -> (pkgName ident, ())) - $ Map.elems - $ planUnregisterLocal plan +executePlan' :: + HasEnvConfig env + => InstalledMap + -> Map PackageName Target + -> Plan + -> ExecuteEnv + -> RIO env () +executePlan' installedMap0 targets plan ee = do + config <- view configL + let !buildOpts = ee.buildOpts + !testOpts = buildOpts.testOpts + !benchmarkOpts = buildOpts.benchmarkOpts + runTests = testOpts.runTests + runBenchmarks = benchmarkOpts.runBenchmarks + noNotifyIfNoRunTests = not config.notifyIfNoRunTests + noNotifyIfNoRunBenchmarks = not config.notifyIfNoRunBenchmarks + hasTests = not . Set.null . testComponents . taskComponents + hasBenches = not . Set.null . benchComponents . taskComponents + tests = Map.elems $ Map.filter hasTests plan.finals + benches = Map.elems $ Map.filter hasBenches plan.finals + when testOpts.coverage deleteHpcReports + whenJust (nonEmpty $ Map.toList plan.unregisterLocal) $ \ids -> do + localDB <- packageDatabaseLocal + unregisterPackages localDB ids + + liftIO $ atomically $ modifyTVar' ee.localDumpPkgs $ \initMap -> + foldl' (flip Map.delete) initMap $ Map.keys plan.unregisterLocal + + run <- askRunInIO + + -- If running tests concurrently with each other, then create an MVar + -- which is empty while each test is being run. + concurrentTests <- view $ configL . to (.concurrentTests) + mtestLock <- if concurrentTests + then pure Nothing + else Just <$> liftIO (newMVar ()) + + let actions = concatMap (toActions installedMap' mtestLock run ee) $ + Map.elems $ Map.merge + (Map.mapMissing (\_ b -> (Just b, Nothing))) + (Map.mapMissing (\_ f -> (Nothing, Just f))) + (Map.zipWithMatched (\_ b f -> (Just b, Just f))) + plan.tasks + plan.finals + threads <- view $ configL . to (.jobs) + let keepGoing = fromMaybe + (not (Map.null plan.finals)) + buildOpts.keepGoing + terminal <- view terminalL + terminalWidth <- view termWidthL + unless (noNotifyIfNoRunTests || runTests || null tests) $ + prettyInfo $ + fillSep + [ flow "All test running disabled by" + , style Shell "--no-run-tests" + , flow "flag. To mute this message in future, set" + , style Shell (flow "notify-if-no-run-tests: false") + , flow "in Stack's configuration." + ] + unless (noNotifyIfNoRunBenchmarks || runBenchmarks || null benches) $ + prettyInfo $ + fillSep + [ flow "All benchmark running disabled by" + , style Shell "--no-run-benchmarks" + , flow "flag. To mute this message in future, set" + , style Shell (flow "notify-if-no-run-benchmarks: false") + , flow "in Stack's configuration." + ] + errs <- liftIO $ runActions threads keepGoing actions $ + \doneVar actionsVar -> do + let total = length actions + loop prev + | prev == total = + run $ logStickyDone + ( "Completed " <> display total <> " action(s).") + | otherwise = do + inProgress <- readTVarIO actionsVar + let packageNames = map + (\(ActionId pkgID _) -> pkgName pkgID) + (toList inProgress) + nowBuilding :: [PackageName] -> Utf8Builder + nowBuilding [] = "" + nowBuilding names = mconcat $ + ": " + : L.intersperse ", " (map fromPackageName names) + progressFormat = buildOpts.progressBar + progressLine prev' total' = + "Progress " + <> display prev' <> "/" <> display total' + <> if progressFormat == CountOnlyBar + then mempty + else nowBuilding packageNames + ellipsize n text = + if T.length text <= n || progressFormat /= CappedBar + then text + else T.take (n - 1) text <> "…" + when (terminal && progressFormat /= NoBar) $ + run $ logSticky $ display $ ellipsize terminalWidth $ + utf8BuilderToText $ progressLine prev total + done <- atomically $ do + done <- readTVar doneVar + check $ done /= prev + pure done + loop done + when (total > 1) $ loop 0 + when testOpts.coverage $ do + generateHpcUnifiedReport + generateHpcMarkupIndex + unless (null errs) $ + prettyThrowM $ ExecutionFailure errs + when buildOpts.buildHaddocks $ do + if buildOpts.haddockForHackage + then + generateLocalHaddockForHackageArchives ee.locals + else do + snapshotDumpPkgs <- liftIO (readTVarIO ee.snapshotDumpPkgs) + localDumpPkgs <- liftIO (readTVarIO ee.localDumpPkgs) + generateLocalHaddockIndex ee.baseConfigOpts localDumpPkgs ee.locals + generateDepsHaddockIndex + ee.baseConfigOpts + ee.globalDumpPkgs + snapshotDumpPkgs + localDumpPkgs + ee.locals + generateSnapHaddockIndex + ee.baseConfigOpts + ee.globalDumpPkgs + snapshotDumpPkgs + when buildOpts.openHaddocks $ do + let planPkgs, localPkgs, installedPkgs, availablePkgs + :: Map PackageName (PackageIdentifier, InstallLocation) + planPkgs = + Map.map (taskProvides &&& taskLocation) plan.tasks + localPkgs = + Map.fromList + [ (p.name, (packageIdentifier p, Local)) + | p <- map (.package) ee.locals + ] + installedPkgs = + Map.map (swap . second installedPackageIdentifier) installedMap' + availablePkgs = Map.unions [planPkgs, localPkgs, installedPkgs] + openHaddocksInBrowser + ee.baseConfigOpts + availablePkgs + (Map.keysSet targets) + where + installedMap' = Map.difference installedMap0 + $ Map.fromList + $ map (\(ident, _) -> (pkgName ident, ())) + $ Map.elems plan.unregisterLocal unregisterPackages :: - (HasProcessContext env, HasLogFunc env, HasPlatform env, HasCompiler env) - => ActualCompiler - -> Path Abs Dir - -> NonEmpty (GhcPkgId, (PackageIdentifier, Text)) - -> RIO env () -unregisterPackages cv localDB ids = do - let logReason ident reason = - logInfo $ - fromString (packageIdentifierString ident) <> ": unregistering" <> - if T.null reason - then "" - else " (" <> RIO.display reason <> ")" - let unregisterSinglePkg select (gid, (ident, reason)) = do - logReason ident reason - pkg <- getGhcPkgExe - unregisterGhcPkgIds pkg localDB $ select ident gid :| [] - - case cv of - -- GHC versions >= 8.2.1 support batch unregistering of packages. See - -- https://gitlab.haskell.org/ghc/ghc/issues/12637 - ACGhc v | v >= mkVersion [8, 2, 1] -> do - platform <- view platformL - -- According to https://support.microsoft.com/en-us/help/830473/command-prompt-cmd-exe-command-line-string-limitation - -- the maximum command line length on Windows since XP is 8191 characters. - -- We use conservative batch size of 100 ids on this OS thus argument name '-ipid', package name, - -- its version and a hash should fit well into this limit. - -- On Unix-like systems we're limited by ARG_MAX which is normally hundreds - -- of kilobytes so batch size of 500 should work fine. - let batchSize = case platform of - Platform _ Windows -> 100 - _ -> 500 - let chunksOfNE size = mapMaybe nonEmpty . chunksOf size . NonEmpty.toList - for_ (chunksOfNE batchSize ids) $ \batch -> do - for_ batch $ \(_, (ident, reason)) -> logReason ident reason - pkg <- getGhcPkgExe - unregisterGhcPkgIds pkg localDB $ fmap (Right . fst) batch - - -- GHC versions >= 7.9 support unregistering of packages via their - -- GhcPkgId. - ACGhc v | v >= mkVersion [7, 9] -> for_ ids . unregisterSinglePkg $ \_ident gid -> Right gid - - _ -> for_ ids . unregisterSinglePkg $ \ident _gid -> Left ident - -toActions :: HasEnvConfig env - => InstalledMap - -> Maybe (MVar ()) - -> (RIO env () -> IO ()) - -> ExecuteEnv - -> (Maybe Task, Maybe Task) -- build and final - -> [Action] + (HasCompiler env, HasPlatform env, HasProcessContext env, HasTerm env) + => Path Abs Dir + -> NonEmpty (GhcPkgId, (PackageIdentifier, Text)) + -> RIO env () +unregisterPackages localDB ids = do + let logReason ident reason = + prettyInfoL + ( [ fromString (packageIdentifierString ident) <> ":" + , "unregistering" + ] + <> [ parens (flow $ T.unpack reason) | not $ T.null reason ] + ) + -- GHC versions >= 8.2.1 support batch unregistering of packages. See + -- https://gitlab.haskell.org/ghc/ghc/issues/12637 + platform <- view platformL + -- According to + -- https://support.microsoft.com/en-us/help/830473/command-prompt-cmd-exe-command-line-string-limitation + -- the maximum command line length on Windows since XP is 8191 characters. We + -- use conservative batch size of 100 ids on this OS thus argument name + -- '-ipid', package name, its version and a hash should fit well into this + -- limit. On Unix-like systems we're limited by ARG_MAX which is normally + -- hundreds of kilobytes so batch size of 500 should work fine. + let batchSize = case platform of + Platform _ Windows -> 100 + _ -> 500 + let chunksOfNE size = mapMaybe nonEmpty . chunksOf size . NE.toList + for_ (chunksOfNE batchSize ids) $ \batch -> do + for_ batch $ \(_, (ident, reason)) -> logReason ident reason + pkg <- getGhcPkgExe + unregisterGhcPkgIds True pkg localDB $ fmap (Right . fst) batch + +toActions :: + HasEnvConfig env + => InstalledMap + -> Maybe (MVar ()) + -> (RIO env () -> IO ()) + -> ExecuteEnv + -> (Maybe Task, Maybe Task) -- build and final + -> [Action] toActions installedMap mtestLock runInBase ee (mbuild, mfinal) = - abuild ++ afinal - where - abuild = - case mbuild of - Nothing -> [] - Just task@Task {..} -> - [ Action - { actionId = ActionId taskProvides ATBuild - , actionDeps = - Set.map (\ident -> ActionId ident ATBuild) (tcoMissing taskConfigOpts) - , actionDo = \ac -> runInBase $ singleBuild ac ee task installedMap False - , actionConcurrency = ConcurrencyAllowed - } - ] - afinal = - case mfinal of - Nothing -> [] - Just task@Task {..} -> - (if taskAllInOne then id else (:) - Action - { actionId = ActionId taskProvides ATBuildFinal - , actionDeps = addBuild - (Set.map (\ident -> ActionId ident ATBuild) (tcoMissing taskConfigOpts)) - , actionDo = \ac -> runInBase $ singleBuild ac ee task installedMap True - , actionConcurrency = ConcurrencyAllowed - }) $ - -- These are the "final" actions - running tests and benchmarks. - (if Set.null tests then id else (:) - Action - { actionId = ActionId taskProvides ATRunTests - , actionDeps = finalDeps - , actionDo = \ac -> withLock mtestLock $ runInBase $ do - singleTest topts (Set.toList tests) ac ee task installedMap - -- Always allow tests tasks to run concurrently with - -- other tasks, particularly build tasks. Note that - -- 'mtestLock' can optionally make it so that only - -- one test is run at a time. - , actionConcurrency = ConcurrencyAllowed - }) $ - (if Set.null benches then id else (:) - Action - { actionId = ActionId taskProvides ATRunBenchmarks - , actionDeps = finalDeps - , actionDo = \ac -> runInBase $ do - singleBench beopts (Set.toList benches) ac ee task installedMap - -- Never run benchmarks concurrently with any other task, see #3663 - , actionConcurrency = ConcurrencyDisallowed - }) - [] - where - comps = taskComponents task - tests = testComponents comps - benches = benchComponents comps - finalDeps = - if taskAllInOne - then addBuild mempty - else Set.singleton (ActionId taskProvides ATBuildFinal) - addBuild = - case mbuild of - Nothing -> id - Just _ -> Set.insert $ ActionId taskProvides ATBuild - withLock Nothing f = f - withLock (Just lock) f = withMVar lock $ \() -> f - bopts = eeBuildOpts ee - topts = boptsTestOpts bopts - beopts = boptsBenchmarkOpts bopts - --- | Generate the ConfigCache -getConfigCache :: HasEnvConfig env - => ExecuteEnv -> Task -> InstalledMap -> Bool -> Bool - -> RIO env (Map PackageIdentifier GhcPkgId, ConfigCache) -getConfigCache ExecuteEnv {..} task@Task {..} installedMap enableTest enableBench = do - let extra = - -- We enable tests if the test suite dependencies are already - -- installed, so that we avoid unnecessary recompilation based on - -- cabal_macros.h changes when switching between 'stack build' and - -- 'stack test'. See: - -- https://github.com/commercialhaskell/stack/issues/805 - case taskType of - TTLocalMutable _ -> - -- FIXME: make this work with exact-configuration. - -- Not sure how to plumb the info atm. See - -- https://github.com/commercialhaskell/stack/issues/2049 - [ "--enable-tests" | enableTest] ++ - [ "--enable-benchmarks" | enableBench] - TTRemotePackage{} -> [] - idMap <- liftIO $ readTVarIO eeGhcPkgIds - let getMissing ident = - case Map.lookup ident idMap of - Nothing - -- Expect to instead find it in installedMap if it's - -- an initialBuildSteps target. - | boptsCLIInitialBuildSteps eeBuildOptsCLI && taskIsTarget task, - Just (_, installed) <- Map.lookup (pkgName ident) installedMap - -> installedToGhcPkgId ident installed - Just installed -> installedToGhcPkgId ident installed - _ -> error $ "singleBuild: invariant violated, missing package ID missing: " ++ show ident - installedToGhcPkgId ident (Library ident' x _) = assert (ident == ident') $ Just (ident, x) - installedToGhcPkgId _ (Executable _) = Nothing - missing' = Map.fromList $ mapMaybe getMissing $ Set.toList missing - TaskConfigOpts missing mkOpts = taskConfigOpts - opts = mkOpts missing' - allDeps = Set.fromList $ Map.elems missing' ++ Map.elems taskPresent - cache = ConfigCache - { configCacheOpts = opts - { coNoDirs = coNoDirs opts ++ map T.unpack extra - } - , configCacheDeps = allDeps - , configCacheComponents = - case taskType of - TTLocalMutable lp -> Set.map (encodeUtf8 . renderComponent) $ lpComponents lp - TTRemotePackage{} -> Set.empty - , configCacheHaddock = taskBuildHaddock - , configCachePkgSrc = taskCachePkgSrc - , configCachePathEnvVar = eePathEnvVar + abuild ++ afinal + where + abuild = case mbuild of + Nothing -> [] + Just task -> + [ Action + { actionId = ActionId (taskProvides task) ATBuild + , actionDeps = + Set.map (`ActionId` ATBuild) task.configOpts.missing + , action = + \ac -> runInBase $ singleBuild ac ee task installedMap False + , concurrency = ConcurrencyAllowed + } + ] + afinal = case mfinal of + Nothing -> [] + Just task -> + ( if task.allInOne + then id + else (:) Action + { actionId = ActionId pkgId ATBuildFinal + , actionDeps = addBuild + (Set.map (`ActionId` ATBuild) task.configOpts.missing) + , action = + \ac -> runInBase $ singleBuild ac ee task installedMap True + , concurrency = ConcurrencyAllowed } - allDepsMap = Map.union missing' taskPresent - return (allDepsMap, cache) - --- | Ensure that the configuration for the package matches what is given -ensureConfig :: HasEnvConfig env - => ConfigCache -- ^ newConfigCache - -> Path Abs Dir -- ^ package directory - -> ExecuteEnv - -> RIO env () -- ^ announce - -> (ExcludeTHLoading -> [String] -> RIO env ()) -- ^ cabal - -> Path Abs File -- ^ .cabal file - -> Task - -> RIO env Bool -ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp task = do - newCabalMod <- liftIO $ modificationTime <$> getFileStatus (toFilePath cabalfp) - setupConfigfp <- setupConfigFromDir pkgDir - newSetupConfigMod <- liftIO $ either (const Nothing) (Just . modificationTime) <$> - tryJust (guard . isDoesNotExistError) (getFileStatus (toFilePath setupConfigfp)) - -- See https://github.com/commercialhaskell/stack/issues/3554 - taskAnyMissingHack <- view $ actualCompilerVersionL.to getGhcVersion.to (< mkVersion [8, 4]) - needConfig <- - if boptsReconfigure eeBuildOpts || (taskAnyMissing task && taskAnyMissingHack) - then return True - else do - -- We can ignore the components portion of the config - -- cache, because it's just used to inform 'construct - -- plan that we need to plan to build additional - -- components. These components don't affect the actual - -- package configuration. - let ignoreComponents cc = cc { configCacheComponents = Set.empty } - -- Determine the old and new configuration in the local directory, to - -- determine if we need to reconfigure. - mOldConfigCache <- tryGetConfigCache pkgDir - - mOldCabalMod <- tryGetCabalMod pkgDir - - -- Cabal's setup-config is created per OS/Cabal version, multiple - -- projects using the same package could get a conflict because of this - mOldSetupConfigMod <- tryGetSetupConfigMod pkgDir - - return $ fmap ignoreComponents mOldConfigCache /= Just (ignoreComponents newConfigCache) - || mOldCabalMod /= Just newCabalMod - || mOldSetupConfigMod /= newSetupConfigMod - let ConfigureOpts dirs nodirs = configCacheOpts newConfigCache - - when (taskBuildTypeConfig task) ensureConfigureScript - - when needConfig $ withMVar eeConfigureLock $ \_ -> do - deleteCaches pkgDir - announce - cp <- view compilerPathsL - let (GhcPkgExe pkgPath) = cpPkg cp - let programNames = - case cpWhich cp of - Ghc -> - [ "--with-ghc=" ++ toFilePath (cpCompiler cp) - , "--with-ghc-pkg=" ++ toFilePath pkgPath - ] - exes <- forM programNames $ \name -> do - mpath <- findExecutable name - return $ case mpath of - Left _ -> [] - Right x -> return $ concat ["--with-", name, "=", x] - -- Configure cabal with arguments determined by - -- Stack.Types.Build.configureOpts - cabal KeepTHLoading $ "configure" : concat - [ concat exes - , dirs - , nodirs - ] - -- Only write the cache for local packages. Remote packages are built - -- in a temporary directory so the cache would never be used anyway. - case taskType task of - TTLocalMutable{} -> writeConfigCache pkgDir newConfigCache - TTRemotePackage{} -> return () - writeCabalMod pkgDir newCabalMod - - return needConfig - where - -- When build-type is Configure, we need to have a configure - -- script in the local directory. If it doesn't exist, build it - -- with autoreconf -i. See: - -- https://github.com/commercialhaskell/stack/issues/3534 - ensureConfigureScript = do - let fp = pkgDir relFileConfigure - exists <- doesFileExist fp - unless exists $ do - logInfo $ "Trying to generate configure with autoreconf in " <> fromString (toFilePath pkgDir) - let autoreconf = if osIsWindows - then readProcessNull "sh" ["autoreconf", "-i"] - else readProcessNull "autoreconf" ["-i"] - -- On Windows 10, an upstream issue with the `sh autoreconf -i` - -- command means that command clears, but does not then restore, the - -- ENABLE_VIRTUAL_TERMINAL_PROCESSING flag for native terminals. The - -- following hack re-enables the lost ANSI-capability. - fixupOnWindows = when osIsWindows (void $ liftIO defaultColorWhen) - withWorkingDir (toFilePath pkgDir) $ autoreconf `catchAny` \ex -> do - fixupOnWindows - logWarn $ "Unable to run autoreconf: " <> displayShow ex - when osIsWindows $ do - logInfo $ "Check that executable perl is on the path in stack's " <> - "MSYS2 \\usr\\bin folder, and working, and that script file " <> - "autoreconf is on the path in that location. To check that " <> - "perl or autoreconf are on the path in the required location, " <> - "run commands:" - logInfo "" - logInfo " stack exec where -- perl" - logInfo " stack exec where -- autoreconf" - logInfo "" - logInfo $ "If perl or autoreconf is not on the path in the " <> - "required location, add them with command (note that the " <> - "relevant package name is 'autoconf' not 'autoreconf'):" - logInfo "" - logInfo " stack exec pacman -- --sync --refresh autoconf" - logInfo "" - logInfo $ "Some versions of perl from MYSY2 are broken. See " <> - "https://github.com/msys2/MSYS2-packages/issues/1611 and " <> - "https://github.com/commercialhaskell/stack/pull/4781. To " <> - "test if perl in the required location is working, try command:" - logInfo "" - logInfo " stack exec perl -- --version" - logInfo "" - fixupOnWindows - --- | Make a padded prefix for log messages -packageNamePrefix :: ExecuteEnv -> PackageName -> Utf8Builder -packageNamePrefix ee name' = - let name = packageNameString name' - paddedName = - case eeLargestPackageName ee of - Nothing -> name - Just len -> assert (len >= length name) $ RIO.take len $ name ++ repeat ' ' - in fromString paddedName <> "> " - -announceTask :: HasLogFunc env => ExecuteEnv -> Task -> Utf8Builder -> RIO env () -announceTask ee task action = logInfo $ - packageNamePrefix ee (pkgName (taskProvides task)) <> - action - --- | Ensure we're the only action using the directory. See --- -withLockedDistDir - :: HasEnvConfig env - => (Utf8Builder -> RIO env ()) -- ^ announce - -> Path Abs Dir -- ^ root directory for package - -> RIO env a - -> RIO env a -withLockedDistDir announce root inner = do - distDir <- distRelativeDir - let lockFP = root distDir relFileBuildLock - ensureDir $ parent lockFP - - mres <- - withRunInIO $ \run -> - withTryFileLock (toFilePath lockFP) Exclusive $ \_lock -> - run inner - - case mres of - Just res -> pure res - Nothing -> do - let complainer delay = do - delay 5000000 -- 5 seconds - announce $ "blocking for directory lock on " <> fromString (toFilePath lockFP) - forever $ do - delay 30000000 -- 30 seconds - announce $ "still blocking for directory lock on " <> - fromString (toFilePath lockFP) <> - "; maybe another Stack process is running?" - withCompanion complainer $ - \stopComplaining -> - withRunInIO $ \run -> - withFileLock (toFilePath lockFP) Exclusive $ \_ -> - run $ stopComplaining *> inner - --- | How we deal with output from GHC, either dumping to a log file or the --- console (with some prefix). -data OutputType - = OTLogFile !(Path Abs File) !Handle - | OTConsole !(Maybe Utf8Builder) - --- | This sets up a context for executing build steps which need to run --- Cabal (via a compiled Setup.hs). In particular it does the following: --- --- * Ensures the package exists in the file system, downloading if necessary. --- --- * Opens a log file if the built output shouldn't go to stderr. --- --- * Ensures that either a simple Setup.hs is built, or the package's --- custom setup is built. --- --- * Provides the user a function with which run the Cabal process. -withSingleContext :: forall env a. HasEnvConfig env - => ActionContext - -> ExecuteEnv - -> Task - -> Map PackageIdentifier GhcPkgId - -- ^ All dependencies' package ids to provide to Setup.hs. - -> Maybe String - -> ( Package -- Package info - -> Path Abs File -- Cabal file path - -> Path Abs Dir -- Package root directory file path - -- Note that the `Path Abs Dir` argument is redundant with the `Path Abs File` - -- argument, but we provide both to avoid recalculating `parent` of the `File`. - -> (KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) - -- Function to run Cabal with args - -> (Utf8Builder -> RIO env ()) -- An 'announce' function, for different build phases - -> OutputType - -> RIO env a) - -> RIO env a -withSingleContext ActionContext {..} ee@ExecuteEnv {..} task@Task {..} allDeps msuffix inner0 = - withPackage $ \package cabalfp pkgDir -> - withOutputType pkgDir package $ \outputType -> - withCabal package pkgDir outputType $ \cabal -> - inner0 package cabalfp pkgDir cabal announce outputType - where - announce = announceTask ee task - - wanted = - case taskType of - TTLocalMutable lp -> lpWanted lp - TTRemotePackage{} -> False - - -- Output to the console if this is the last task, and the user - -- asked to build it specifically. When the action is a - -- 'ConcurrencyDisallowed' action (benchmarks), then we can also be - -- sure to have excluse access to the console, so output is also - -- sent to the console in this case. - -- - -- See the discussion on #426 for thoughts on sending output to the - -- console from concurrent tasks. - console = - (wanted && - all (\(ActionId ident _) -> ident == taskProvides) (Set.toList acRemaining) && - eeTotalWanted == 1 - ) || (acConcurrency == ConcurrencyDisallowed) - - withPackage inner = - case taskType of - TTLocalMutable lp -> do - let root = parent $ lpCabalFile lp - withLockedDistDir announce root $ - inner (lpPackage lp) (lpCabalFile lp) root - TTRemotePackage _ package pkgloc -> do - suffix <- parseRelDir $ packageIdentifierString $ packageIdent package - let dir = eeTempDir suffix - unpackPackageLocation dir pkgloc - - -- See: https://github.com/fpco/stack/issues/157 - distDir <- distRelativeDir - let oldDist = dir relDirDist - newDist = dir distDir - exists <- doesDirExist oldDist - when exists $ do - -- Previously used takeDirectory, but that got confused - -- by trailing slashes, see: - -- https://github.com/commercialhaskell/stack/issues/216 - -- - -- Instead, use Path which is a bit more resilient - ensureDir $ parent newDist - renameDir oldDist newDist - - let name = pkgName taskProvides - cabalfpRel <- parseRelFile $ packageNameString name ++ ".cabal" - let cabalfp = dir cabalfpRel - inner package cabalfp dir - - withOutputType pkgDir package inner - -- Not in interleaved mode. When building a single wanted package, dump - -- to the console with no prefix. - | console = inner $ OTConsole Nothing - - -- If the user requested interleaved output, dump to the console with a - -- prefix. - | boptsInterleavedOutput eeBuildOpts = - inner $ OTConsole $ Just $ packageNamePrefix ee $ packageName package - - -- Neither condition applies, dump to a file. - | otherwise = do - logPath <- buildLogPath package msuffix - ensureDir (parent logPath) - let fp = toFilePath logPath - - -- We only want to dump logs for local non-dependency packages - case taskType of - TTLocalMutable lp | lpWanted lp -> - liftIO $ atomically $ writeTChan eeLogFiles (pkgDir, logPath) - _ -> return () - - withBinaryFile fp WriteMode $ \h -> inner $ OTLogFile logPath h - - withCabal - :: Package - -> Path Abs Dir - -> OutputType - -> ((KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) -> RIO env a) - -> RIO env a - withCabal package pkgDir outputType inner = do - config <- view configL - unless (configAllowDifferentUser config) $ - checkOwnership (pkgDir configWorkDir config) - let envSettings = EnvSettings - { esIncludeLocals = taskLocation task == Local - , esIncludeGhcPackagePath = False - , esStackExe = False - , esLocaleUtf8 = True - , esKeepGhcRts = False - } - menv <- liftIO $ configProcessContextSettings config envSettings - distRelativeDir' <- distRelativeDir - esetupexehs <- - -- Avoid broken Setup.hs files causing problems for simple build - -- types, see: - -- https://github.com/commercialhaskell/stack/issues/370 - case (packageBuildType package, eeSetupExe) of - (C.Simple, Just setupExe) -> return $ Left setupExe - _ -> liftIO $ Right <$> getSetupHs pkgDir - inner $ \keepOutputOpen stripTHLoading args -> do - let cabalPackageArg - -- Omit cabal package dependency when building - -- Cabal. See - -- https://github.com/commercialhaskell/stack/issues/1356 - | packageName package == mkPackageName "Cabal" = [] - | otherwise = - ["-package=" ++ packageIdentifierString - (PackageIdentifier cabalPackageName - eeCabalPkgVer)] - packageDBArgs = - ( "-clear-package-db" - : "-global-package-db" - : map (("-package-db=" ++) . toFilePathNoTrailingSep) (bcoExtraDBs eeBaseConfigOpts) - ) ++ - ( ("-package-db=" ++ toFilePathNoTrailingSep (bcoSnapDB eeBaseConfigOpts)) - : ("-package-db=" ++ toFilePathNoTrailingSep (bcoLocalDB eeBaseConfigOpts)) - : ["-hide-all-packages"] - ) - - warnCustomNoDeps :: RIO env () - warnCustomNoDeps = - case (taskType, packageBuildType package) of - (TTLocalMutable lp, C.Custom) | lpWanted lp -> do - prettyWarnL - [ flow "Package" - , fromString $ packageNameString $ packageName package - , flow "uses a custom Cabal build, but does not use a custom-setup stanza" - ] - _ -> return () - - getPackageArgs :: Path Abs Dir -> RIO env [String] - getPackageArgs setupDir = - case packageSetupDeps package of - -- The package is using the Cabal custom-setup - -- configuration introduced in Cabal 1.24. In - -- this case, the package is providing an - -- explicit list of dependencies, and we - -- should simply use all of them. - Just customSetupDeps -> do - unless (Map.member (mkPackageName "Cabal") customSetupDeps) $ - prettyWarnL - [ fromString $ packageNameString $ packageName package - , "has a setup-depends field, but it does not mention a Cabal dependency. This is likely to cause build errors." - ] - matchedDeps <- forM (Map.toList customSetupDeps) $ \(name, range) -> do - let matches (PackageIdentifier name' version) = - name == name' && - version `withinRange` range - case filter (matches . fst) (Map.toList allDeps) of - x:xs -> do - unless (null xs) - (logWarn ("Found multiple installed packages for custom-setup dep: " <> fromString (packageNameString name))) - return ("-package-id=" ++ ghcPkgIdString (snd x), Just (fst x)) - [] -> do - logWarn ("Could not find custom-setup dep: " <> fromString (packageNameString name)) - return ("-package=" ++ packageNameString name, Nothing) - let depsArgs = map fst matchedDeps - -- Generate setup_macros.h and provide it to ghc - let macroDeps = mapMaybe snd matchedDeps - cppMacrosFile = setupDir relFileSetupMacrosH - cppArgs = ["-optP-include", "-optP" ++ toFilePath cppMacrosFile] - writeBinaryFileAtomic cppMacrosFile (encodeUtf8Builder (T.pack (C.generatePackageVersionMacros macroDeps))) - return (packageDBArgs ++ depsArgs ++ cppArgs) - - -- This branch is usually taken for builds, and - -- is always taken for `stack sdist`. - -- - -- This approach is debatable. It adds access to the - -- snapshot package database for Cabal. There are two - -- possible objections: - -- - -- 1. This doesn't isolate the build enough; arbitrary - -- other packages available could cause the build to - -- succeed or fail. - -- - -- 2. This doesn't provide enough packages: we should also - -- include the local database when building local packages. - -- - -- Currently, this branch is only taken via `stack - -- sdist` or when explicitly requested in the - -- stack.yaml file. - Nothing -> do - warnCustomNoDeps - return $ cabalPackageArg ++ - -- NOTE: This is different from - -- packageDBArgs above in that it does not - -- include the local database and does not - -- pass in the -hide-all-packages argument - ("-clear-package-db" - : "-global-package-db" - : map (("-package-db=" ++) . toFilePathNoTrailingSep) (bcoExtraDBs eeBaseConfigOpts) - ++ ["-package-db=" ++ toFilePathNoTrailingSep (bcoSnapDB eeBaseConfigOpts)]) - - setupArgs = ("--builddir=" ++ toFilePathNoTrailingSep distRelativeDir') : args - - runExe :: Path Abs File -> [String] -> RIO env () - runExe exeName fullArgs = do - compilerVer <- view actualCompilerVersionL - runAndOutput compilerVer `catch` \ece -> do - (mlogFile, bss) <- - case outputType of - OTConsole _ -> return (Nothing, []) - OTLogFile logFile h -> - if keepOutputOpen == KeepOpen - then return (Nothing, []) -- expected failure build continues further - else do - liftIO $ hClose h - fmap (Just logFile,) $ withSourceFile (toFilePath logFile) $ \src -> - runConduit - $ src - .| CT.decodeUtf8Lenient - .| mungeBuildOutput stripTHLoading makeAbsolute pkgDir compilerVer - .| CL.consume - throwM $ CabalExitedUnsuccessfully - (eceExitCode ece) - taskProvides - exeName - fullArgs - mlogFile - bss - where - runAndOutput :: ActualCompiler -> RIO env () - runAndOutput compilerVer = withWorkingDir (toFilePath pkgDir) $ withProcessContext menv $ case outputType of - OTLogFile _ h -> do - let prefixWithTimestamps = - if configPrefixTimestamps config - then PrefixWithTimestamps - else WithoutTimestamps - void $ sinkProcessStderrStdout (toFilePath exeName) fullArgs - (sinkWithTimestamps prefixWithTimestamps h) - (sinkWithTimestamps prefixWithTimestamps h) - OTConsole mprefix -> - let prefix = fold mprefix in - void $ sinkProcessStderrStdout (toFilePath exeName) fullArgs - (outputSink KeepTHLoading LevelWarn compilerVer prefix) - (outputSink stripTHLoading LevelInfo compilerVer prefix) - outputSink - :: HasCallStack - => ExcludeTHLoading - -> LogLevel - -> ActualCompiler - -> Utf8Builder - -> ConduitM S.ByteString Void (RIO env) () - outputSink excludeTH level compilerVer prefix = - CT.decodeUtf8Lenient - .| mungeBuildOutput excludeTH makeAbsolute pkgDir compilerVer - .| CL.mapM_ (logGeneric "" level . (prefix <>) . RIO.display) - -- If users want control, we should add a config option for this - makeAbsolute :: ConvertPathsToAbsolute - makeAbsolute = case stripTHLoading of - ExcludeTHLoading -> ConvertPathsToAbsolute - KeepTHLoading -> KeepPathsAsIs - - exeName <- case esetupexehs of - Left setupExe -> return setupExe - Right setuphs -> do - distDir <- distDirFromDir pkgDir - let setupDir = distDir relDirSetup - outputFile = setupDir relFileSetupLower - customBuilt <- liftIO $ readIORef eeCustomBuilt - if Set.member (packageName package) customBuilt - then return outputFile - else do - ensureDir setupDir - compilerPath <- view $ compilerPathsL.to cpCompiler - packageArgs <- getPackageArgs setupDir - runExe compilerPath $ - [ "--make" - , "-odir", toFilePathNoTrailingSep setupDir - , "-hidir", toFilePathNoTrailingSep setupDir - , "-i", "-i." - ] ++ packageArgs ++ - [ toFilePath setuphs - , toFilePath eeSetupShimHs - , "-main-is" - , "StackSetupShim.mainOverride" - , "-o", toFilePath outputFile - , "-threaded" - ] ++ - - -- Apply GHC options - -- https://github.com/commercialhaskell/stack/issues/4526 - map T.unpack ( - Map.findWithDefault [] AGOEverything (configGhcOptionsByCat config) ++ - case configApplyGhcOptions config of - AGOEverything -> boptsCLIGhcOptions eeBuildOptsCLI - AGOTargets -> [] - AGOLocals -> []) - - liftIO $ atomicModifyIORef' eeCustomBuilt $ - \oldCustomBuilt -> (Set.insert (packageName package) oldCustomBuilt, ()) - return outputFile - runExe exeName $ (if boptsCabalVerbose eeBuildOpts then ("--verbose":) else id) setupArgs - --- Implements running a package's build, used to implement 'ATBuild' and --- 'ATBuildFinal' tasks. In particular this does the following: --- --- * Checks if the package exists in the precompiled cache, and if so, --- add it to the database instead of performing the build. --- --- * Runs the configure step if needed ('ensureConfig') --- --- * Runs the build step --- --- * Generates haddocks --- --- * Registers the library and copies the built executables into the --- local install directory. Note that this is literally invoking Cabal --- with @copy@, and not the copying done by @stack install@ - that is --- handled by 'copyExecutables'. -singleBuild :: forall env. (HasEnvConfig env, HasRunner env) - => ActionContext - -> ExecuteEnv - -> Task - -> InstalledMap - -> Bool -- ^ Is this a final build? - -> RIO env () -singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap isFinalBuild = do - (allDepsMap, cache) <- getConfigCache ee task installedMap enableTests enableBenchmarks - mprecompiled <- getPrecompiled cache - minstalled <- - case mprecompiled of - Just precompiled -> copyPreCompiled precompiled - Nothing -> do - mcurator <- view $ buildConfigL.to bcCurator - realConfigAndBuild cache mcurator allDepsMap - case minstalled of - Nothing -> return () - Just installed -> do - writeFlagCache installed cache - liftIO $ atomically $ modifyTVar eeGhcPkgIds $ Map.insert taskProvides installed - where - pname = pkgName taskProvides - doHaddock mcurator package - = taskBuildHaddock && - not isFinalBuild && - -- Works around haddock failing on bytestring-builder since it has no modules - -- when bytestring is new enough. - packageHasExposedModules package && - -- Special help for the curator tool to avoid haddocks that are known to fail - maybe True (Set.notMember pname . curatorSkipHaddock) mcurator - expectHaddockFailure mcurator = - maybe False (Set.member pname . curatorExpectHaddockFailure) mcurator - fulfillHaddockExpectations mcurator action | expectHaddockFailure mcurator = do - eres <- tryAny $ action KeepOpen - case eres of - Right () -> logWarn $ fromString (packageNameString pname) <> ": unexpected Haddock success" - Left _ -> return () - fulfillHaddockExpectations _ action = do - action CloseOnException - - buildingFinals = isFinalBuild || taskAllInOne - enableTests = buildingFinals && any isCTest (taskComponents task) - enableBenchmarks = buildingFinals && any isCBench (taskComponents task) - - annSuffix executableBuildStatuses = if result == "" then "" else " (" <> result <> ")" - where - result = T.intercalate " + " $ concat - [ ["lib" | taskAllInOne && hasLib] - , ["internal-lib" | taskAllInOne && hasSubLib] - , ["exe" | taskAllInOne && hasExe] - , ["test" | enableTests] - , ["bench" | enableBenchmarks] - ] - (hasLib, hasSubLib, hasExe) = case taskType of - TTLocalMutable lp -> - let package = lpPackage lp - hasLibrary = - case packageLibraries package of - NoLibraries -> False - HasLibraries _ -> True - hasSubLibrary = not . Set.null $ packageInternalLibraries package - hasExecutables = not . Set.null $ exesToBuild executableBuildStatuses lp - in (hasLibrary, hasSubLibrary, hasExecutables) - -- This isn't true, but we don't want to have this info for - -- upstream deps. - _ -> (False, False, False) - - getPrecompiled cache = - case taskType of - TTRemotePackage Immutable _ loc -> do - mpc <- readPrecompiledCache - loc - (configCacheOpts cache) - (configCacheHaddock cache) - (configCacheDeps cache) - case mpc of - Nothing -> return Nothing - -- Only pay attention to precompiled caches that refer to packages within - -- the snapshot. - Just pc | maybe False - (bcoSnapInstallRoot eeBaseConfigOpts `isProperPrefixOf`) - (pcLibrary pc) -> - return Nothing - -- If old precompiled cache files are left around but snapshots are deleted, - -- it is possible for the precompiled file to refer to the very library - -- we're building, and if flags are changed it may try to copy the library - -- to itself. This check prevents that from happening. - Just pc -> do - let allM _ [] = return True - allM f (x:xs) = do - b <- f x - if b then allM f xs else return False - b <- liftIO $ allM doesFileExist $ maybe id (:) (pcLibrary pc) $ pcExes pc - return $ if b then Just pc else Nothing - _ -> return Nothing - - copyPreCompiled (PrecompiledCache mlib sublibs exes) = do - wc <- view $ actualCompilerVersionL.whichCompilerL - announceTask ee task "using precompiled package" - - -- We need to copy .conf files for the main library and all sublibraries which exist in the cache, - -- from their old snapshot to the new one. However, we must unregister any such library in the new - -- snapshot, in case it was built with different flags. - let - subLibNames = map T.unpack . Set.toList $ case taskType of - TTLocalMutable lp -> packageInternalLibraries $ lpPackage lp - TTRemotePackage _ p _ -> packageInternalLibraries p - PackageIdentifier name version = taskProvides - mainLibName = packageNameString name - mainLibVersion = versionString version - pkgName = mainLibName ++ "-" ++ mainLibVersion - -- z-package-z-internal for internal lib internal of package package - toCabalInternalLibName n = concat ["z-", mainLibName, "-z-", n, "-", mainLibVersion] - allToUnregister = map (const pkgName) (maybeToList mlib) ++ map toCabalInternalLibName subLibNames - allToRegister = maybeToList mlib ++ sublibs - - unless (null allToRegister) $ do - withMVar eeInstallLock $ \() -> do - -- We want to ignore the global and user databases. - -- Unfortunately, ghc-pkg doesn't take such arguments on the - -- command line. Instead, we'll set GHC_PACKAGE_PATH. See: - -- https://github.com/commercialhaskell/stack/issues/1146 - - let modifyEnv = Map.insert - (ghcPkgPathEnvVar wc) - (T.pack $ toFilePathNoTrailingSep $ bcoSnapDB eeBaseConfigOpts) - - withModifyEnvVars modifyEnv $ do - GhcPkgExe ghcPkgExe <- getGhcPkgExe - - -- first unregister everything that needs to be unregistered - forM_ allToUnregister $ \packageName -> catchAny - (readProcessNull (toFilePath ghcPkgExe) [ "unregister", "--force", packageName]) - (const (return ())) - - -- now, register the cached conf files - forM_ allToRegister $ \libpath -> - proc (toFilePath ghcPkgExe) [ "register", "--force", toFilePath libpath] readProcess_ - - liftIO $ forM_ exes $ \exe -> do - ensureDir bindir - let dst = bindir filename exe - createLink (toFilePath exe) (toFilePath dst) `catchIO` \_ -> copyFile exe dst - case (mlib, exes) of - (Nothing, _:_) -> markExeInstalled (taskLocation task) taskProvides - _ -> return () - - -- Find the package in the database - let pkgDbs = [bcoSnapDB eeBaseConfigOpts] - - case mlib of - Nothing -> return $ Just $ Executable taskProvides - Just _ -> do - mpkgid <- loadInstalledPkg pkgDbs eeSnapshotDumpPkgs pname - - return $ Just $ - case mpkgid of - Nothing -> assert False $ Executable taskProvides - Just pkgid -> Library taskProvides pkgid Nothing - where - bindir = bcoSnapInstallRoot eeBaseConfigOpts bindirSuffix - - realConfigAndBuild cache mcurator allDepsMap = withSingleContext ac ee task allDepsMap Nothing - $ \package cabalfp pkgDir cabal0 announce _outputType -> do - let cabal = cabal0 CloseOnException - executableBuildStatuses <- getExecutableBuildStatuses package pkgDir - when (not (cabalIsSatisfied executableBuildStatuses) && taskIsTarget task) - (logInfo - ("Building all executables for `" <> fromString (packageNameString (packageName package)) <> - "' once. After a successful build of all of them, only specified executables will be rebuilt.")) - - _neededConfig <- ensureConfig cache pkgDir ee (announce ("configure" <> RIO.display (annSuffix executableBuildStatuses))) cabal cabalfp task - let installedMapHasThisPkg :: Bool - installedMapHasThisPkg = - case Map.lookup (packageName package) installedMap of - Just (_, Library ident _ _) -> ident == taskProvides - Just (_, Executable _) -> True - _ -> False - - case ( boptsCLIOnlyConfigure eeBuildOptsCLI - , boptsCLIInitialBuildSteps eeBuildOptsCLI && taskIsTarget task) of - -- A full build is done if there are downstream actions, - -- because their configure step will require that this - -- package is built. See - -- https://github.com/commercialhaskell/stack/issues/2787 - (True, _) | null acDownstream -> return Nothing - (_, True) | null acDownstream || installedMapHasThisPkg -> do - initialBuildSteps executableBuildStatuses cabal announce - return Nothing - _ -> fulfillCuratorBuildExpectations pname mcurator enableTests enableBenchmarks Nothing $ - Just <$> realBuild cache package pkgDir cabal0 announce executableBuildStatuses - - initialBuildSteps executableBuildStatuses cabal announce = do - announce ("initial-build-steps" <> RIO.display (annSuffix executableBuildStatuses)) - cabal KeepTHLoading ["repl", "stack-initial-build-steps"] - - realBuild - :: ConfigCache - -> Package - -> Path Abs Dir - -> (KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) - -> (Utf8Builder -> RIO env ()) - -> Map Text ExecutableBuildStatus - -> RIO env Installed - realBuild cache package pkgDir cabal0 announce executableBuildStatuses = do - let cabal = cabal0 CloseOnException - wc <- view $ actualCompilerVersionL.whichCompilerL - - markExeNotInstalled (taskLocation task) taskProvides - case taskType of - TTLocalMutable lp -> do - when enableTests $ setTestStatus pkgDir TSUnknown - caches <- runMemoizedWith $ lpNewBuildCaches lp - mapM_ (uncurry (writeBuildCache pkgDir)) - (Map.toList caches) - TTRemotePackage{} -> return () - - -- FIXME: only output these if they're in the build plan. - - let postBuildCheck _succeeded = do - mlocalWarnings <- case taskType of - TTLocalMutable lp -> do - warnings <- checkForUnlistedFiles taskType pkgDir - -- TODO: Perhaps only emit these warnings for non extra-dep? - return (Just (lpCabalFile lp, warnings)) - _ -> return Nothing - -- NOTE: once - -- https://github.com/commercialhaskell/stack/issues/2649 - -- is resolved, we will want to partition the warnings - -- based on variety, and output in different lists. - let showModuleWarning (UnlistedModulesWarning comp modules) = - "- In" <+> - fromString (T.unpack (renderComponent comp)) <> - ":" <> line <> - indent 4 (mconcat $ intersperse line $ map (style Good . fromString . C.display) modules) - forM_ mlocalWarnings $ \(cabalfp, warnings) -> do - unless (null warnings) $ prettyWarn $ - "The following modules should be added to exposed-modules or other-modules in" <+> - pretty cabalfp <> ":" <> line <> - indent 4 (mconcat $ intersperse line $ map showModuleWarning warnings) <> - line <> line <> - "Missing modules in the cabal file are likely to cause undefined reference errors from the linker, along with other problems." - - () <- announce ("build" <> RIO.display (annSuffix executableBuildStatuses)) - config <- view configL - extraOpts <- extraBuildOptions wc eeBuildOpts - let stripTHLoading - | configHideTHLoading config = ExcludeTHLoading - | otherwise = KeepTHLoading - cabal stripTHLoading (("build" :) $ (++ extraOpts) $ - case (taskType, taskAllInOne, isFinalBuild) of - (_, True, True) -> error "Invariant violated: cannot have an all-in-one build that also has a final build step." - (TTLocalMutable lp, False, False) -> primaryComponentOptions executableBuildStatuses lp - (TTLocalMutable lp, False, True) -> finalComponentOptions lp - (TTLocalMutable lp, True, False) -> primaryComponentOptions executableBuildStatuses lp ++ finalComponentOptions lp - (TTRemotePackage{}, _, _) -> []) - `catch` \ex -> case ex of - CabalExitedUnsuccessfully{} -> postBuildCheck False >> throwM ex - _ -> throwM ex - postBuildCheck True - - mcurator <- view $ buildConfigL.to bcCurator - when (doHaddock mcurator package) $ do - announce "haddock" - sourceFlag <- if not (boptsHaddockHyperlinkSource eeBuildOpts) then return [] else do - -- See #2429 for why the temp dir is used - ec - <- withWorkingDir (toFilePath eeTempDir) - $ proc "haddock" ["--hyperlinked-source"] - $ \pc -> withProcessWait - (setStdout createSource $ setStderr createSource pc) $ \p -> - runConcurrently - $ Concurrently (runConduit $ getStdout p .| CL.sinkNull) - *> Concurrently (runConduit $ getStderr p .| CL.sinkNull) - *> Concurrently (waitExitCode p) - case ec of - -- Fancy crosslinked source - ExitSuccess -> return ["--haddock-option=--hyperlinked-source"] - -- Older hscolour colouring - ExitFailure _ -> do - hscolourExists <- doesExecutableExist "HsColour" - unless hscolourExists $ logWarn - ("Warning: haddock not generating hyperlinked sources because 'HsColour' not\n" <> - "found on PATH (use 'stack install hscolour' to install).") - return ["--hyperlink-source" | hscolourExists] - - -- For GHC 8.4 and later, provide the --quickjump option. - actualCompiler <- view actualCompilerVersionL - let quickjump = - case actualCompiler of - ACGhc ghcVer - | ghcVer >= mkVersion [8, 4] -> ["--haddock-option=--quickjump"] - _ -> [] - - fulfillHaddockExpectations mcurator $ \keep -> cabal0 keep KeepTHLoading $ concat - [ ["haddock", "--html", "--hoogle", "--html-location=../$pkg-$version/"] - , sourceFlag - , ["--internal" | boptsHaddockInternal eeBuildOpts] - , [ "--haddock-option=" <> opt - | opt <- hoAdditionalArgs (boptsHaddockOpts eeBuildOpts) ] - , quickjump - ] - - let hasLibrary = - case packageLibraries package of - NoLibraries -> False - HasLibraries _ -> True - packageHasComponentSet f = not $ Set.null $ f package - hasInternalLibrary = packageHasComponentSet packageInternalLibraries - hasExecutables = packageHasComponentSet packageExes - shouldCopy = not isFinalBuild && (hasLibrary || hasInternalLibrary || hasExecutables) - when shouldCopy $ withMVar eeInstallLock $ \() -> do - announce "copy/register" - eres <- try $ cabal KeepTHLoading ["copy"] - case eres of - Left err@CabalExitedUnsuccessfully{} -> - throwM $ CabalCopyFailed (packageBuildType package == C.Simple) (show err) - _ -> return () - when hasLibrary $ cabal KeepTHLoading ["register"] - - -- copy ddump-* files - case T.unpack <$> boptsDdumpDir eeBuildOpts of - Just ddumpPath | buildingFinals && not (null ddumpPath) -> do - distDir <- distRelativeDir - ddumpDir <- parseRelDir ddumpPath - - logDebug $ fromString ("ddump-dir: " <> toFilePath ddumpDir) - logDebug $ fromString ("dist-dir: " <> toFilePath distDir) - - runConduitRes - $ CF.sourceDirectoryDeep False (toFilePath distDir) - .| CL.filter (isInfixOf ".dump-") - .| CL.mapM_ (\src -> liftIO $ do - parentDir <- parent <$> parseRelDir src - destBaseDir <- (ddumpDir ) <$> stripProperPrefix distDir parentDir - -- exclude .stack-work dir - unless (".stack-work" `isInfixOf` toFilePath destBaseDir) $ do - ensureDir destBaseDir - src' <- parseRelFile src - copyFile src' (destBaseDir filename src')) - _ -> pure () - - let (installedPkgDb, installedDumpPkgsTVar) = - case taskLocation task of - Snap -> - ( bcoSnapDB eeBaseConfigOpts - , eeSnapshotDumpPkgs ) - Local -> - ( bcoLocalDB eeBaseConfigOpts - , eeLocalDumpPkgs ) - let ident = PackageIdentifier (packageName package) (packageVersion package) - -- only return the sublibs to cache them if we also cache the main lib (that is, if it exists) - (mpkgid, sublibsPkgIds) <- case packageLibraries package of - HasLibraries _ -> do - sublibsPkgIds <- fmap catMaybes $ - forM (Set.toList $ packageInternalLibraries package) $ \sublib -> do - -- z-haddock-library-z-attoparsec for internal lib attoparsec of haddock-library - let sublibName = T.concat ["z-", T.pack $ packageNameString $ packageName package, "-z-", sublib] - case parsePackageName $ T.unpack sublibName of - Nothing -> return Nothing -- invalid lib, ignored - Just subLibName -> loadInstalledPkg [installedPkgDb] installedDumpPkgsTVar subLibName - - mpkgid <- loadInstalledPkg [installedPkgDb] installedDumpPkgsTVar (packageName package) - case mpkgid of - Nothing -> throwM $ Couldn'tFindPkgId $ packageName package - Just pkgid -> return (Library ident pkgid Nothing, sublibsPkgIds) - NoLibraries -> do - markExeInstalled (taskLocation task) taskProvides -- TODO unify somehow with writeFlagCache? - return (Executable ident, []) -- don't return sublibs in this case - - case taskType of - TTRemotePackage Immutable _ loc -> - writePrecompiledCache - eeBaseConfigOpts - loc - (configCacheOpts cache) - (configCacheHaddock cache) - (configCacheDeps cache) - mpkgid sublibsPkgIds (packageExes package) - _ -> return () - - case taskType of - -- For packages from a package index, pkgDir is in the tmp - -- directory. We eagerly delete it if no other tasks - -- require it, to reduce space usage in tmp (#3018). - TTRemotePackage{} -> do - let remaining = filter (\(ActionId x _) -> x == taskProvides) (Set.toList acRemaining) - when (null remaining) $ removeDirRecur pkgDir - TTLocalMutable{} -> return () - - return mpkgid - - loadInstalledPkg pkgDbs tvar name = do - pkgexe <- getGhcPkgExe - dps <- ghcPkgDescribe pkgexe name pkgDbs $ conduitDumpPackage .| CL.consume - case dps of - [] -> return Nothing - [dp] -> do - liftIO $ atomically $ modifyTVar' tvar (Map.insert (dpGhcPkgId dp) dp) - return $ Just (dpGhcPkgId dp) - _ -> error $ "singleBuild: invariant violated: multiple results when describing installed package " ++ show (name, dps) - --- | Get the build status of all the package executables. Do so by --- testing whether their expected output file exists, e.g. --- --- .stack-work/dist/x86_64-osx/Cabal-1.22.4.0/build/alpha/alpha --- .stack-work/dist/x86_64-osx/Cabal-1.22.4.0/build/alpha/alpha.exe --- .stack-work/dist/x86_64-osx/Cabal-1.22.4.0/build/alpha/alpha.jsexe/ (NOTE: a dir) -getExecutableBuildStatuses - :: HasEnvConfig env - => Package -> Path Abs Dir -> RIO env (Map Text ExecutableBuildStatus) -getExecutableBuildStatuses package pkgDir = do - distDir <- distDirFromDir pkgDir - platform <- view platformL - fmap - M.fromList - (mapM (checkExeStatus platform distDir) (Set.toList (packageExes package))) - --- | Check whether the given executable is defined in the given dist directory. -checkExeStatus - :: HasLogFunc env - => Platform - -> Path b Dir - -> Text - -> RIO env (Text, ExecutableBuildStatus) -checkExeStatus platform distDir name = do - exename <- parseRelDir (T.unpack name) - exists <- checkPath (distDir relDirBuild exename) - pure - ( name - , if exists - then ExecutableBuilt - else ExecutableNotBuilt) - where - checkPath base = - case platform of - Platform _ Windows -> do - fileandext <- parseRelFile (file ++ ".exe") - doesFileExist (base fileandext) - _ -> do - fileandext <- parseRelFile file - doesFileExist (base fileandext) - where - file = T.unpack name - --- | Check if any unlisted files have been found, and add them to the build cache. -checkForUnlistedFiles :: HasEnvConfig env => TaskType -> Path Abs Dir -> RIO env [PackageWarning] -checkForUnlistedFiles (TTLocalMutable lp) pkgDir = do - caches <- runMemoizedWith $ lpNewBuildCaches lp - (addBuildCache,warnings) <- - addUnlistedToBuildCache - (lpPackage lp) - (lpCabalFile lp) - (lpComponents lp) - caches - forM_ (M.toList addBuildCache) $ \(component, newToCache) -> do - let cache = Map.findWithDefault Map.empty component caches - writeBuildCache pkgDir component $ - Map.unions (cache : newToCache) - return warnings -checkForUnlistedFiles TTRemotePackage{} _ = return [] - --- | Implements running a package's tests. Also handles producing --- coverage reports if coverage is enabled. -singleTest :: HasEnvConfig env - => TestOpts - -> [Text] - -> ActionContext - -> ExecuteEnv - -> Task - -> InstalledMap - -> RIO env () -singleTest topts testsToRun ac ee task installedMap = do - -- FIXME: Since this doesn't use cabal, we should be able to avoid using a - -- fullblown 'withSingleContext'. - (allDepsMap, _cache) <- getConfigCache ee task installedMap True False - mcurator <- view $ buildConfigL.to bcCurator - let pname = pkgName $ taskProvides task - expectFailure = expectTestFailure pname mcurator - withSingleContext ac ee task allDepsMap (Just "test") $ \package _cabalfp pkgDir _cabal announce outputType -> do - config <- view configL - let needHpc = toCoverage topts - - toRun <- - if toDisableRun topts - then do - announce "Test running disabled by --no-run-tests flag." - return False - else if toRerunTests topts - then return True - else do - status <- getTestStatus pkgDir - case status of - TSSuccess -> do - unless (null testsToRun) $ announce "skipping already passed test" - return False - TSFailure - | expectFailure -> do - announce "skipping already failed test that's expected to fail" - return False - | otherwise -> do - announce "rerunning previously failed test" - return True - TSUnknown -> return True - - when toRun $ do - buildDir <- distDirFromDir pkgDir - hpcDir <- hpcDirFromDir pkgDir - when needHpc (ensureDir hpcDir) - - let suitesToRun - = [ testSuitePair - | testSuitePair <- Map.toList $ packageTests package - , let testName = fst testSuitePair - , testName `elem` testsToRun - ] - - errs <- liftM Map.unions $ forM suitesToRun $ \(testName, suiteInterface) -> do - let stestName = T.unpack testName - (testName', isTestTypeLib) <- - case suiteInterface of - C.TestSuiteLibV09{} -> return (stestName ++ "Stub", True) - C.TestSuiteExeV10{} -> return (stestName, False) - interface -> throwM (TestSuiteTypeUnsupported interface) - - let exeName = testName' ++ - case configPlatform config of - Platform _ Windows -> ".exe" - _ -> "" - tixPath <- liftM (pkgDir ) $ parseRelFile $ exeName ++ ".tix" - exePath <- liftM (buildDir ) $ parseRelFile $ "build/" ++ testName' ++ "/" ++ exeName - exists <- doesFileExist exePath - -- in Stack.Package.packageFromPackageDescription we filter out - -- package itself of any dependencies so any tests requiring loading - -- of their own package library will fail - -- so to prevent this we return it back here but unfortunately unconditionally - installed <- case Map.lookup pname installedMap of - Just (_, installed) -> pure $ Just installed - Nothing -> do - idMap <- liftIO $ readTVarIO (eeGhcPkgIds ee) - pure $ Map.lookup (taskProvides task) idMap - let pkgGhcIdList = case installed of - Just (Library _ ghcPkgId _) -> [ghcPkgId] - _ -> [] - -- doctest relies on template-haskell in QuickCheck-based tests - thGhcId <- case find ((== "template-haskell") . pkgName . dpPackageIdent. snd) - (Map.toList $ eeGlobalDumpPkgs ee) of - Just (ghcId, _) -> return ghcId - Nothing -> error "template-haskell is a wired-in GHC boot library but it wasn't found" - -- env variable GHC_ENVIRONMENT is set for doctest so module names for - -- packages with proper dependencies should no longer get ambiguous - -- see e.g. https://github.com/doctest/issues/119 - -- also we set HASKELL_DIST_DIR to a package dist directory so - -- doctest will be able to load modules autogenerated by Cabal - let setEnv f pc = modifyEnvVars pc $ \envVars -> - Map.insert "HASKELL_DIST_DIR" (T.pack $ toFilePath buildDir) $ - Map.insert "GHC_ENVIRONMENT" (T.pack f) envVars - fp = toFilePath $ eeTempDir ee testGhcEnvRelFile - snapDBPath = toFilePathNoTrailingSep (bcoSnapDB $ eeBaseConfigOpts ee) - localDBPath = toFilePathNoTrailingSep (bcoLocalDB $ eeBaseConfigOpts ee) - ghcEnv = - "clear-package-db\n" <> - "global-package-db\n" <> - "package-db " <> fromString snapDBPath <> "\n" <> - "package-db " <> fromString localDBPath <> "\n" <> - foldMap (\ghcId -> "package-id " <> RIO.display (unGhcPkgId ghcId) <> "\n") - (pkgGhcIdList ++ thGhcId:M.elems allDepsMap) - writeFileUtf8Builder fp ghcEnv - menv <- liftIO $ setEnv fp =<< configProcessContextSettings config EnvSettings - { esIncludeLocals = taskLocation task == Local - , esIncludeGhcPackagePath = True - , esStackExe = True - , esLocaleUtf8 = False - , esKeepGhcRts = False - } - let emptyResult = Map.singleton testName Nothing - withProcessContext menv $ if exists - then do - -- We clear out the .tix files before doing a run. - when needHpc $ do - tixexists <- doesFileExist tixPath - when tixexists $ - logWarn ("Removing HPC file " <> fromString (toFilePath tixPath)) - liftIO $ ignoringAbsence (removeFile tixPath) - - let args = toAdditionalArgs topts - argsDisplay = case args of - [] -> "" - _ -> ", args: " <> T.intercalate " " (map showProcessArgDebug args) - announce $ "test (suite: " <> RIO.display testName <> RIO.display argsDisplay <> ")" - - -- Clear "Progress: ..." message before - -- redirecting output. - case outputType of - OTConsole _ -> do - logStickyDone "" - liftIO $ hFlush stdout - liftIO $ hFlush stderr - OTLogFile _ _ -> pure () - - let output = - case outputType of - OTConsole Nothing -> Nothing <$ inherit - OTConsole (Just prefix) -> fmap - (\src -> Just $ runConduit $ src .| - CT.decodeUtf8Lenient .| - CT.lines .| - CL.map stripCR .| - CL.mapM_ (\t -> logInfo $ prefix <> RIO.display t)) - createSource - OTLogFile _ h -> Nothing <$ useHandleOpen h - optionalTimeout action - | Just maxSecs <- toMaximumTimeSeconds topts, maxSecs > 0 = do - timeout (maxSecs * 1000000) action - | otherwise = Just <$> action - - mec <- withWorkingDir (toFilePath pkgDir) $ - optionalTimeout $ proc (toFilePath exePath) args $ \pc0 -> do - stdinBS <- - if isTestTypeLib - then do - logPath <- buildLogPath package (Just stestName) - ensureDir (parent logPath) - pure $ BL.fromStrict - $ encodeUtf8 $ fromString $ - show (logPath, mkUnqualComponentName (T.unpack testName)) - else pure mempty - let pc = setStdin (byteStringInput stdinBS) - $ setStdout output - $ setStderr output - pc0 - withProcessWait pc $ \p -> do - case (getStdout p, getStderr p) of - (Nothing, Nothing) -> pure () - (Just x, Just y) -> concurrently_ x y - (x, y) -> assert False $ concurrently_ (fromMaybe (pure ()) x) (fromMaybe (pure ()) y) - waitExitCode p - -- Add a trailing newline, incase the test - -- output didn't finish with a newline. - case outputType of - OTConsole Nothing -> logInfo "" - _ -> pure () - -- Move the .tix file out of the package - -- directory into the hpc work dir, for - -- tidiness. - when needHpc $ - updateTixFile (packageName package) tixPath testName' - let announceResult result = announce $ "Test suite " <> RIO.display testName <> " " <> result - case mec of - Just ExitSuccess -> do - announceResult "passed" - return Map.empty - Nothing -> do - announceResult "timed out" - if expectFailure - then return Map.empty - else return $ Map.singleton testName Nothing - Just ec -> do - announceResult "failed" - if expectFailure - then return Map.empty - else return $ Map.singleton testName (Just ec) - else do - unless expectFailure $ logError $ displayShow $ TestSuiteExeMissing - (packageBuildType package == C.Simple) - exeName - (packageNameString (packageName package)) - (T.unpack testName) - return emptyResult - - when needHpc $ do - let testsToRun' = map f testsToRun - f tName = - case Map.lookup tName (packageTests package) of - Just C.TestSuiteLibV09{} -> tName <> "Stub" - _ -> tName - generateHpcReport pkgDir package testsToRun' - - bs <- liftIO $ - case outputType of - OTConsole _ -> return "" - OTLogFile logFile h -> do - hClose h - S.readFile $ toFilePath logFile - - let succeeded = Map.null errs - unless (succeeded || expectFailure) $ throwM $ TestSuiteFailure - (taskProvides task) - errs - (case outputType of - OTLogFile fp _ -> Just fp - OTConsole _ -> Nothing) - bs - - setTestStatus pkgDir $ if succeeded then TSSuccess else TSFailure - --- | Implements running a package's benchmarks. -singleBench :: HasEnvConfig env - => BenchmarkOpts - -> [Text] - -> ActionContext - -> ExecuteEnv - -> Task - -> InstalledMap - -> RIO env () -singleBench beopts benchesToRun ac ee task installedMap = do - (allDepsMap, _cache) <- getConfigCache ee task installedMap False True - withSingleContext ac ee task allDepsMap (Just "bench") $ \_package _cabalfp _pkgDir cabal announce _outputType -> do - let args = map T.unpack benchesToRun <> maybe [] - ((:[]) . ("--benchmark-options=" <>)) - (beoAdditionalArgs beopts) - - toRun <- - if beoDisableRun beopts - then do - announce "Benchmark running disabled by --no-run-benchmarks flag." - return False - else do - return True - - when toRun $ do - announce "benchmarks" - cabal CloseOnException KeepTHLoading ("bench" : args) - -data ExcludeTHLoading = ExcludeTHLoading | KeepTHLoading -data ConvertPathsToAbsolute = ConvertPathsToAbsolute | KeepPathsAsIs --- | special marker for expected failures in curator builds, using those --- we need to keep log handle open as build continues further even after a failure -data KeepOutputOpen = KeepOpen | CloseOnException deriving Eq - --- | Strip Template Haskell "Loading package" lines and making paths absolute. -mungeBuildOutput :: forall m. MonadIO m - => ExcludeTHLoading -- ^ exclude TH loading? - -> ConvertPathsToAbsolute -- ^ convert paths to absolute? - -> Path Abs Dir -- ^ package's root directory - -> ActualCompiler -- ^ compiler we're building with - -> ConduitM Text Text m () -mungeBuildOutput excludeTHLoading makeAbsolute pkgDir compilerVer = void $ - CT.lines - .| CL.map stripCR - .| CL.filter (not . isTHLoading) - .| filterLinkerWarnings - .| toAbsolute - where - -- | Is this line a Template Haskell "Loading package" line - -- ByteString - isTHLoading :: Text -> Bool - isTHLoading = case excludeTHLoading of - KeepTHLoading -> const False - ExcludeTHLoading -> \bs -> - "Loading package " `T.isPrefixOf` bs && - ("done." `T.isSuffixOf` bs || "done.\r" `T.isSuffixOf` bs) - - filterLinkerWarnings :: ConduitM Text Text m () - filterLinkerWarnings - -- Check for ghc 7.8 since it's the only one prone to producing - -- linker warnings on Windows x64 - | getGhcVersion compilerVer >= mkVersion [7, 8] = doNothing - | otherwise = CL.filter (not . isLinkerWarning) - - isLinkerWarning :: Text -> Bool - isLinkerWarning str = - ("ghc.exe: warning:" `T.isPrefixOf` str || "ghc.EXE: warning:" `T.isPrefixOf` str) && - "is linked instead of __imp_" `T.isInfixOf` str - - -- | Convert GHC error lines with file paths to have absolute file paths - toAbsolute :: ConduitM Text Text m () - toAbsolute = case makeAbsolute of - KeepPathsAsIs -> doNothing - ConvertPathsToAbsolute -> CL.mapM toAbsolutePath - - toAbsolutePath :: Text -> m Text - toAbsolutePath bs = do - let (x, y) = T.break (== ':') bs - mabs <- - if isValidSuffix y - then liftIO $ liftM (fmap ((T.takeWhile isSpace x <>) . T.pack . toFilePath)) $ - forgivingAbsence (resolveFile pkgDir (T.unpack $ T.dropWhile isSpace x)) `catch` - \(_ :: PathException) -> return Nothing - else return Nothing - case mabs of - Nothing -> return bs - Just fp -> return $ fp `T.append` y - - doNothing :: ConduitM Text Text m () - doNothing = awaitForever yield - - -- | Match the error location format at the end of lines - isValidSuffix = isRight . parseOnly lineCol - lineCol = char ':' - >> choice - [ num >> char ':' >> num >> optional (char '-' >> num) >> return () - , char '(' >> num >> char ',' >> num >> string ")-(" >> num >> char ',' >> num >> char ')' >> return () - ] - >> char ':' - >> return () - where num = some digit - --- | Whether to prefix log lines with timestamps. -data PrefixWithTimestamps = PrefixWithTimestamps | WithoutTimestamps - --- | Write stream of lines to handle, but adding timestamps. -sinkWithTimestamps :: MonadIO m => PrefixWithTimestamps -> Handle -> ConduitT ByteString Void m () -sinkWithTimestamps prefixWithTimestamps h = - case prefixWithTimestamps of - PrefixWithTimestamps -> - CB.lines .| CL.mapM addTimestamp .| CL.map (<> "\n") .| sinkHandle h - WithoutTimestamps -> sinkHandle h - where - addTimestamp theLine = do - now <- liftIO getZonedTime - pure (formatZonedTimeForLog now <> " " <> theLine) - --- | Format a time in ISO8601 format. We choose ZonedTime over UTCTime --- because a user expects to see logs in their local time, and would --- be confused to see UTC time. Stack's debug logs also use the local --- time zone. -formatZonedTimeForLog :: ZonedTime -> ByteString -formatZonedTimeForLog = S8.pack . formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%S%6Q" - --- | Find the Setup.hs or Setup.lhs in the given directory. If none exists, --- throw an exception. -getSetupHs :: Path Abs Dir -- ^ project directory - -> IO (Path Abs File) -getSetupHs dir = do - exists1 <- doesFileExist fp1 - if exists1 - then return fp1 - else do - exists2 <- doesFileExist fp2 - if exists2 - then return fp2 - else throwM $ NoSetupHsFound dir - where - fp1 = dir relFileSetupHs - fp2 = dir relFileSetupLhs - --- Do not pass `-hpcdir` as GHC option if the coverage is not enabled. --- This helps running stack-compiled programs with dynamic interpreters like `hint`. --- Cfr: https://github.com/commercialhaskell/stack/issues/997 -extraBuildOptions :: (HasEnvConfig env, HasRunner env) - => WhichCompiler -> BuildOpts -> RIO env [String] -extraBuildOptions wc bopts = do - colorOpt <- appropriateGhcColorFlag - let optsFlag = compilerOptionsCabalFlag wc - baseOpts = maybe "" (" " ++) colorOpt - if toCoverage (boptsTestOpts bopts) - then do - hpcIndexDir <- toFilePathNoTrailingSep <$> hpcRelativeDir - return [optsFlag, "-hpcdir " ++ hpcIndexDir ++ baseOpts] - else - return [optsFlag, baseOpts] - --- Library, internal and foreign libraries and executable build components. -primaryComponentOptions :: Map Text ExecutableBuildStatus -> LocalPackage -> [String] -primaryComponentOptions executableBuildStatuses lp = - -- TODO: get this information from target parsing instead, - -- which will allow users to turn off library building if - -- desired - (case packageLibraries package of - NoLibraries -> [] - HasLibraries names -> - map T.unpack - $ T.append "lib:" (T.pack (packageNameString (packageName package))) - : map (T.append "flib:") (Set.toList names)) ++ - map (T.unpack . T.append "lib:") (Set.toList $ packageInternalLibraries package) ++ - map (T.unpack . T.append "exe:") (Set.toList $ exesToBuild executableBuildStatuses lp) - where - package = lpPackage lp - --- | History of this function: --- --- * Normally it would do either all executables or if the user --- specified requested components, just build them. Afterwards, due --- to this Cabal bug , --- we had to make Stack build all executables every time. --- --- * In this --- was flagged up as very undesirable behavior on a large project, --- hence the behavior below that we build all executables once --- (modulo success), and thereafter pay attention to user-wanted --- components. --- -exesToBuild :: Map Text ExecutableBuildStatus -> LocalPackage -> Set Text -exesToBuild executableBuildStatuses lp = - if cabalIsSatisfied executableBuildStatuses && lpWanted lp - then exeComponents (lpComponents lp) - else packageExes (lpPackage lp) - --- | Do the current executables satisfy Cabal's bugged out requirements? -cabalIsSatisfied :: Map k ExecutableBuildStatus -> Bool -cabalIsSatisfied = all (== ExecutableBuilt) . M.elems - --- Test-suite and benchmark build components. -finalComponentOptions :: LocalPackage -> [String] -finalComponentOptions lp = - map (T.unpack . renderComponent) $ - Set.toList $ - Set.filter (\c -> isCTest c || isCBench c) (lpComponents lp) + ) $ + -- These are the "final" actions - running test suites and benchmarks, + -- unless --no-run-tests or --no-run-benchmarks is enabled. + ( if Set.null tests || not runTests + then id + else (:) Action + { actionId = ActionId pkgId ATRunTests + , actionDeps = finalDeps + , action = \ac -> withLock mtestLock $ runInBase $ + singleTest topts (Set.toList tests) ac ee task installedMap + -- Always allow tests tasks to run concurrently with other tasks, + -- particularly build tasks. Note that 'mtestLock' can optionally + -- make it so that only one test is run at a time. + , concurrency = ConcurrencyAllowed + } + ) $ + ( if Set.null benches || not runBenchmarks + then id + else (:) Action + { actionId = ActionId pkgId ATRunBenchmarks + , actionDeps = finalDeps + , action = \ac -> runInBase $ + singleBench + beopts + (Set.toList benches) + ac + ee + task + installedMap + -- Never run benchmarks concurrently with any other task, see + -- #3663 + , concurrency = ConcurrencyDisallowed + } + ) + [] + where + pkgId = taskProvides task + comps = taskComponents task + tests = testComponents comps + benches = benchComponents comps + finalDeps = + if task.allInOne + then addBuild mempty + else Set.singleton (ActionId pkgId ATBuildFinal) + addBuild = + case mbuild of + Nothing -> id + Just _ -> Set.insert $ ActionId pkgId ATBuild + withLock Nothing f = f + withLock (Just lock) f = withMVar lock $ \() -> f + bopts = ee.buildOpts + topts = bopts.testOpts + beopts = bopts.benchmarkOpts + runTests = topts.runTests + runBenchmarks = beopts.runBenchmarks taskComponents :: Task -> Set NamedComponent taskComponents task = - case taskType task of - TTLocalMutable lp -> lpComponents lp -- FIXME probably just want lpWanted - TTRemotePackage{} -> Set.empty - -expectTestFailure :: PackageName -> Maybe Curator -> Bool -expectTestFailure pname mcurator = - maybe False (Set.member pname . curatorExpectTestFailure) mcurator - -expectBenchmarkFailure :: PackageName -> Maybe Curator -> Bool -expectBenchmarkFailure pname mcurator = - maybe False (Set.member pname . curatorExpectBenchmarkFailure) mcurator - -fulfillCuratorBuildExpectations :: - (HasLogFunc env, HasCallStack) - => PackageName - -> Maybe Curator - -> Bool - -> Bool - -> b - -> RIO env b - -> RIO env b -fulfillCuratorBuildExpectations pname mcurator enableTests _ defValue action | enableTests && - expectTestFailure pname mcurator = do - eres <- tryAny action - case eres of - Right res -> do - logWarn $ fromString (packageNameString pname) <> ": unexpected test build success" - return res - Left _ -> return defValue -fulfillCuratorBuildExpectations pname mcurator _ enableBench defValue action | enableBench && - expectBenchmarkFailure pname mcurator = do - eres <- tryAny action - case eres of - Right res -> do - logWarn $ fromString (packageNameString pname) <> ": unexpected benchmark build success" - return res - Left _ -> return defValue -fulfillCuratorBuildExpectations _ _ _ _ _ action = do - action + case task.taskType of + TTLocalMutable lp -> lp.components -- FIXME probably just want lpWanted + TTRemotePackage{} -> Set.empty diff --git a/src/Stack/Build/ExecuteEnv.hs b/src/Stack/Build/ExecuteEnv.hs new file mode 100644 index 0000000000..dfee2c548e --- /dev/null +++ b/src/Stack/Build/ExecuteEnv.hs @@ -0,0 +1,1097 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +{-| +Module : Stack.Build.ExecuteEnv +License : BSD-3-Clause + +Provides all the necessary types and functions for running cabal Setup.hs +commands. Only used in the "Execute" and "ExecutePackage" modules. +-} + +module Stack.Build.ExecuteEnv + ( ExecuteEnv (..) + , withExecuteEnv + , withSingleContext + , ExcludeTHLoading (..) + , KeepOutputOpen (..) + , OutputType (..) + ) where + +import Control.Concurrent.Companion ( Companion, withCompanion ) +import Control.Concurrent.Execute + ( ActionContext (..), ActionId (..), Concurrency (..) ) +import Control.Monad.Extra ( whenJust ) +import Crypto.Hash ( SHA256 (..), hashWith ) +import Data.Attoparsec.Text ( char, choice, digit, parseOnly ) +import qualified Data.Attoparsec.Text as P ( string ) +import qualified Data.ByteArray as Mem ( convert ) +import qualified Data.ByteString as S +import qualified Data.ByteString.Base64.URL as B64URL +import qualified Data.ByteString.Builder ( toLazyByteString ) +import qualified Data.ByteString.Char8 as S8 +import Data.Char ( isSpace ) +import Conduit + ( ConduitT, awaitForever, sinkHandle, withSinkFile + , withSourceFile, yield + ) +import qualified Data.Conduit.Binary as CB +import qualified Data.Conduit.List as CL +import qualified Data.Conduit.Text as CT +import qualified Data.List as L +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set +import qualified Data.Text as T +import Data.Text.Encoding ( decodeUtf8 ) +import Data.Time + ( ZonedTime, defaultTimeLocale, formatTime, getZonedTime ) +import qualified Distribution.PackageDescription as C +import qualified Distribution.Simple.Build.Macros as C +import Distribution.System ( OS (..), Platform (..) ) +import Distribution.Types.PackageName ( mkPackageName ) +import Distribution.Verbosity ( showForCabal ) +import Distribution.Version ( mkVersion ) +import Path + ( PathException, (), parent, parseRelDir, parseRelFile ) +import Path.Extra ( forgivingResolveFile, toFilePathNoTrailingSep ) +import Path.IO + ( doesDirExist, doesFileExist, ensureDir, ignoringAbsence + , removeFile, renameDir, renameFile + ) +import RIO.Process + ( eceExitCode, proc, runProcess_, setStdout, useHandleOpen + , withWorkingDir + ) +import Stack.Config ( checkOwnership ) +import Stack.Constants + ( cabalPackageName, relDirDist, relDirSetup + , relDirSetupExeCache, relDirSetupExeSrc, relFileBuildLock + , relFileSetupHs, relFileSetupLhs, relFileSetupLower + , relFileSetupMacrosH, setupGhciShimCode, stackProgName + ) +import Stack.Constants.Config ( distDirFromDir, distRelativeDir ) +import Stack.Package ( buildLogPath ) +import Stack.Prelude +import Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) +import Stack.Types.Build + ( ConvertPathsToAbsolute (..), ExcludeTHLoading (..) + , KeepOutputOpen (..) + ) +import Stack.Types.Build.Exception + ( BuildException (..), BuildPrettyException (..) ) +import Stack.Types.BuildOpts ( BuildOpts (..) ) +import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) +import Stack.Types.BuildOptsMonoid ( CabalVerbosity (..) ) +import Stack.Types.Compiler + ( WhichCompiler (..), compilerVersionString + , getGhcVersion, whichCompilerL + ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), HasCompiler (..), cabalVersionL + , getCompilerPath + ) +import Stack.Types.Config + ( Config (..), HasConfig (..), stackRootL ) +import Stack.Types.ConfigureOpts ( BaseConfigOpts (..) ) +import Stack.Types.Dependency ( DepValue(..) ) +import Stack.Types.DumpLogs ( DumpLogs (..) ) +import Stack.Types.DumpPackage ( DumpPackage (..) ) +import Stack.Types.EnvConfig + ( HasEnvConfig (..), actualCompilerVersionL + , platformGhcRelDir, shouldForceGhcColorFlag + ) +import Stack.Types.EnvSettings ( EnvSettings (..) ) +import Stack.Types.GhcPkgId ( GhcPkgId, ghcPkgIdString ) +import Stack.Types.Installed ( InstallLocation (..), Installed (..) ) +import Stack.Types.Package + ( LocalPackage (..), Package (..), packageIdentifier ) +import Stack.Types.Plan + ( TaskType (..), taskTypeLocation, taskTypePackageIdentifier + ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Version ( withinRange ) +import qualified System.Directory as D +import System.Environment ( lookupEnv ) +import System.FileLock + ( SharedExclusive (..), withFileLock, withTryFileLock ) +import System.Semaphore + ( Semaphore, destroySemaphore, freshSemaphore ) + +-- | Type representing environments in which the @Setup.hs@ commands of Cabal +-- (the library) can be executed. +data ExecuteEnv = ExecuteEnv + { installLock :: !(MVar ()) + , buildOpts :: !BuildOpts + , buildOptsCLI :: !BuildOptsCLI + , baseConfigOpts :: !BaseConfigOpts + , ghcPkgIds :: !(TVar (Map PackageIdentifier Installed)) + , tempDir :: !(Path Abs Dir) + , setupHs :: !(Path Abs File) + -- ^ Temporary Setup.hs for simple builds + , setupShimHs :: !(Path Abs File) + -- ^ Temporary SetupShim.hs, to provide access to initial-build-steps + , setupExe :: !(Maybe (Path Abs File)) + -- ^ Compiled version of eeSetupHs + , cabalPkgVer :: !Version + -- ^ The version of the compiler's Cabal boot package. + , totalWanted :: !Int + , locals :: ![LocalPackage] + , globalDB :: !(Path Abs Dir) + , globalDumpPkgs :: !(Map GhcPkgId DumpPackage) + , snapshotDumpPkgs :: !(TVar (Map GhcPkgId DumpPackage)) + , localDumpPkgs :: !(TVar (Map GhcPkgId DumpPackage)) + , logFiles :: !(TChan (Path Abs Dir, Path Abs File)) + , customBuilt :: !(IORef (Set PackageName)) + -- ^ Stores which packages with custom-setup have already had their + -- Setup.hs built. + , largestPackageName :: !(Maybe Int) + -- ^ For nicer interleaved output: track the largest package name size + , pathEnvVar :: !Text + -- ^ Value of the PATH environment variable + , semaphore :: !(Maybe Semaphore) + -- ^ The semaphore that is used for job control, if --semaphore is given + } + +-- | Type representing setup executable circumstances. +data SetupExe + = SimpleSetupExe !(Path Abs File) + -- ^ The build type is Simple and there is a path to an existing setup + -- executable. + | OtherSetupHs !(Path Abs File) + -- ^ Other circumstances with a path to the source code for the setup + -- executable. + +buildSetupArgs :: [String] +buildSetupArgs = + [ "-rtsopts" + , "-threaded" + , "-clear-package-db" + , "-global-package-db" + , "-hide-all-packages" + , "-package" + , "base" + , "-main-is" + , "StackSetupShim.mainOverride" + ] + +simpleSetupCode :: Builder +simpleSetupCode = "import Distribution.Simple\nmain = defaultMain" + +simpleSetupHash :: String +simpleSetupHash = + T.unpack + $ decodeUtf8 + $ S.take 8 + $ B64URL.encode + $ Mem.convert + $ hashWith SHA256 + $ toStrictBytes + $ Data.ByteString.Builder.toLazyByteString + $ encodeUtf8Builder (T.pack (unwords buildSetupArgs)) + <> setupGhciShimCode + <> simpleSetupCode + +-- | Get a compiled Setup exe +getSetupExe :: + HasEnvConfig env + => Path Abs File + -- ^ Setup.hs input file + -> Path Abs File + -- ^ SetupShim.hs input file + -> Path Abs Dir + -- ^ temporary directory + -> RIO env (Maybe (Path Abs File)) +getSetupExe setupHs setupShimHs tmpdir = do + wc <- view $ actualCompilerVersionL . whichCompilerL + platformDir <- platformGhcRelDir + config <- view configL + cabalVersionString <- view $ cabalVersionL . to versionString + actualCompilerVersionString <- + view $ actualCompilerVersionL . to compilerVersionString + platform <- view platformL + let baseNameS = concat + [ "Cabal-simple_" + , simpleSetupHash + , "_" + , cabalVersionString + , "_" + , actualCompilerVersionString + ] + exeNameS = baseNameS ++ + case platform of + Platform _ Windows -> ".exe" + _ -> "" + outputNameS = + case wc of + Ghc -> exeNameS + setupDir = + view stackRootL config + relDirSetupExeCache + platformDir + + exePath <- (setupDir ) <$> parseRelFile exeNameS + + exists <- liftIO $ D.doesFileExist $ toFilePath exePath + + if exists + then pure $ Just exePath + else do + tmpExePath <- fmap (setupDir ) $ parseRelFile $ "tmp-" ++ exeNameS + tmpOutputPath <- + fmap (setupDir ) $ parseRelFile $ "tmp-" ++ outputNameS + ensureDir setupDir + let args = buildSetupArgs ++ + [ "-package" + , "Cabal-" ++ cabalVersionString + , toFilePath setupHs + , toFilePath setupShimHs + , "-o" + , toFilePath tmpOutputPath + ] + compilerPath <- getCompilerPath + withWorkingDir (toFilePath tmpdir) $ + proc (toFilePath compilerPath) args (\pc0 -> do + let pc = setStdout (useHandleOpen stderr) pc0 + runProcess_ pc) + `catch` \ece -> + prettyThrowM $ SetupHsBuildFailure + (eceExitCode ece) Nothing compilerPath args Nothing [] + renameFile tmpExePath exePath + pure $ Just exePath + +semaphorePrefix :: String +semaphorePrefix = "stack" + +-- | Execute a function that takes an t'ExecuteEnv'. +withExecuteEnv :: + forall env a. HasEnvConfig env + => BuildOpts + -> BuildOptsCLI + -> BaseConfigOpts + -> [LocalPackage] + -> [DumpPackage] + -- ^ global packages + -> [DumpPackage] + -- ^ snapshot packages + -> [DumpPackage] + -- ^ project packages and local extra-deps + -> Maybe Int + -- ^ largest package name, for nicer interleaved output + -> (ExecuteEnv -> RIO env a) + -> RIO env a +withExecuteEnv + buildOpts + buildOptsCLI + baseConfigOpts + locals + globalPackages + snapshotPackages + localPackages + largestPackageName + inner + = createTempDirFunction stackProgName $ \tempDir -> do + installLock <- liftIO $ newMVar () + ghcPkgIds <- liftIO $ newTVarIO Map.empty + config <- view configL + customBuilt <- newIORef Set.empty + -- Create files for simple setup and setup shim, if necessary + let setupSrcDir = + view stackRootL config + relDirSetupExeSrc + ensureDir setupSrcDir + let setupStub = "setup-" ++ simpleSetupHash + setupFileName <- parseRelFile (setupStub ++ ".hs") + setupHiName <- parseRelFile (setupStub ++ ".hi") + setupOName <- parseRelFile (setupStub ++ ".o") + let setupHs = setupSrcDir setupFileName + setupHi = setupSrcDir setupHiName + setupO = setupSrcDir setupOName + setupHsExists <- doesFileExist setupHs + unless setupHsExists $ writeBinaryFileAtomic setupHs simpleSetupCode + let setupShimStub = "setup-shim-" ++ simpleSetupHash + setupShimFileName <- parseRelFile (setupShimStub ++ ".hs") + setupShimHiName <- parseRelFile (setupShimStub ++ ".hi") + setupShimOName <- parseRelFile (setupShimStub ++ ".o") + let setupShimHs = setupSrcDir setupShimFileName + setupShimHi = setupSrcDir setupShimHiName + setupShimO = setupSrcDir setupShimOName + setupShimHsExists <- doesFileExist setupShimHs + unless setupShimHsExists $ + writeBinaryFileAtomic setupShimHs setupGhciShimCode + setupExe <- getSetupExe setupHs setupShimHs tempDir + -- See https://github.com/commercialhaskell/stack/issues/6267. Remove any + -- historical *.hi or *.o files. This can be dropped when Stack drops + -- support for the problematic versions of GHC. + ignoringAbsence (removeFile setupHi) + ignoringAbsence (removeFile setupO) + ignoringAbsence (removeFile setupShimHi) + ignoringAbsence (removeFile setupShimO) + compilerVersion <- view actualCompilerVersionL + let ghcVersion = getGhcVersion compilerVersion + cabalPkgVer <- view cabalVersionL + globalDB <- view $ compilerPathsL . to (.globalDB) + let globalDumpPkgs = toDumpPackagesByGhcPkgId globalPackages + snapshotDumpPkgs <- + liftIO $ newTVarIO (toDumpPackagesByGhcPkgId snapshotPackages) + localDumpPkgs <- + liftIO $ newTVarIO (toDumpPackagesByGhcPkgId localPackages) + logFiles <- liftIO $ atomically newTChan + let totalWanted = length $ filter (.wanted) locals + pathEnvVar <- liftIO $ maybe mempty T.pack <$> lookupEnv "PATH" + jobs <- view $ configL . to (.jobs) + let semaphoreSupported = + (cabalPkgVer >= mkVersion [3, 12, 0, 0]) + && (ghcVersion >= mkVersion [9, 8, 1]) + semaphoreUnsupportedWarning = + prettyWarnL + [ "The" + , style Shell "--semaphore" + , flow "flag was specified, which is supported by GHC 9.8.1 or \ + \later with Cabal 3.12.0.0 (a boot package of GHC 9.10.1) \ + \or later. GHC version" + , fromString (versionString ghcVersion) + , flow "and Cabal version" + , fromString (versionString cabalPkgVer) + , flow "was found. The flag will be ignored." + ] + semaphore <- if not buildOpts.semaphore + then pure Nothing + else if semaphoreSupported + then Just <$> liftIO (freshSemaphore semaphorePrefix jobs) + else semaphoreUnsupportedWarning >> pure Nothing + inner ExecuteEnv + { buildOpts + , buildOptsCLI + -- Uncertain as to why we cannot run configures in parallel. This + -- appears to be a Cabal library bug. Original issue: + -- https://github.com/commercialhaskell/stack/issues/84. Ideally + -- we'd be able to remove this. + , installLock + , baseConfigOpts + , ghcPkgIds + , tempDir + , setupHs + , setupShimHs + , setupExe + , cabalPkgVer + , totalWanted + , locals + , globalDB + , globalDumpPkgs + , snapshotDumpPkgs + , localDumpPkgs + , logFiles + , customBuilt + , largestPackageName + , pathEnvVar + , semaphore + } `finally` do + liftIO (whenJust semaphore destroySemaphore) + dumpLogs logFiles totalWanted + where + toDumpPackagesByGhcPkgId = Map.fromList . map (\dp -> (dp.ghcPkgId, dp)) + + createTempDirFunction + | buildOpts.keepTmpFiles = withKeepSystemTempDir + | otherwise = withSystemTempDir + + dumpLogs :: TChan (Path Abs Dir, Path Abs File) -> Int -> RIO env () + dumpLogs chan totalWanted = do + allLogs <- fmap reverse $ liftIO $ atomically drainChan + case allLogs of + -- No log files generated, nothing to dump + [] -> pure () + firstLog:_ -> do + view (configL . to (.dumpLogs)) >>= \case + DumpAllLogs -> mapM_ (dumpLog "") allLogs + DumpWarningLogs -> mapM_ dumpLogIfWarning allLogs + DumpNoLogs + | totalWanted > 1 -> + prettyInfoL + [ flow "Build output has been captured to log files, use" + , style Shell "--dump-logs" + , flow "to see it on the console." + ] + | otherwise -> pure () + prettyInfoL + [ flow "Log files have been written to:" + , pretty (parent (snd firstLog)) + ] + + -- We only strip the colors /after/ we've dumped logs, so that we get pretty + -- colors in our dump output on the terminal. + colors <- shouldForceGhcColorFlag + when colors $ liftIO $ mapM_ (stripColors . snd) allLogs + where + drainChan :: STM [(Path Abs Dir, Path Abs File)] + drainChan = + tryReadTChan chan >>= \case + Nothing -> pure [] + Just x -> do + xs <- drainChan + pure $ x:xs + + dumpLogIfWarning :: (Path Abs Dir, Path Abs File) -> RIO env () + dumpLogIfWarning (pkgDir, filepath) = do + firstWarning <- withSourceFile (toFilePath filepath) $ \src -> + runConduit + $ src + .| CT.decodeUtf8Lenient + .| CT.lines + .| CL.map stripCR + .| CL.filter isWarning + .| CL.take 1 + unless (null firstWarning) $ dumpLog " due to warnings" (pkgDir, filepath) + + isWarning :: Text -> Bool + isWarning t = ": Warning:" `T.isSuffixOf` t -- prior to GHC 8 + || ": warning:" `T.isInfixOf` t -- GHC 8 is slightly different + || "mwarning:" `T.isInfixOf` t -- colorized output + + dumpLog :: String -> (Path Abs Dir, Path Abs File) -> RIO env () + dumpLog msgSuffix (pkgDir, filepath) = do + prettyNote $ + fillSep + ( ( fillSep + ( flow "Dumping log file" + : [ flow msgSuffix | not (L.null msgSuffix) ] + ) + <> ":" + ) + : [ pretty filepath <> "." ] + ) + <> line + withSourceFile (toFilePath filepath) $ \src -> + runConduit + $ src + .| CT.decodeUtf8Lenient + .| mungeBuildOutput ExcludeTHLoading ConvertPathsToAbsolute pkgDir + .| CL.mapM_ (logInfo . display) + prettyNote $ + fillSep + [ flow "End of log file:" + , pretty filepath <> "." + ] + <> line + + stripColors :: Path Abs File -> IO () + stripColors fp = do + let colorfp = toFilePath fp ++ "-color" + withSourceFile (toFilePath fp) $ \src -> + withSinkFile colorfp $ \sink -> + runConduit $ src .| sink + withSourceFile colorfp $ \src -> + withSinkFile (toFilePath fp) $ \sink -> + runConduit $ src .| noColors .| sink + + where + noColors = do + CB.takeWhile (/= 27) -- ESC + mnext <- CB.head + whenJust mnext $ \x -> assert (x == 27) $ do + -- Color sequences always end with an m + CB.dropWhile (/= 109) -- m + CB.drop 1 -- drop the m itself + noColors + +-- | Make a padded prefix for log messages +packageNamePrefix :: ExecuteEnv -> PackageName -> String +packageNamePrefix ee name' = + let name = packageNameString name' + paddedName = + case ee.largestPackageName of + Nothing -> name + Just len -> + assert (len >= length name) $ take len $ name ++ L.repeat ' ' + in paddedName <> "> " + +announceTask :: + HasLogFunc env + => ExecuteEnv + -> TaskType + -> Utf8Builder + -> RIO env () +announceTask ee taskType action = logInfo $ + fromString + (packageNamePrefix ee (pkgName (taskTypePackageIdentifier taskType))) + <> action + +prettyAnnounceTask :: + HasTerm env + => ExecuteEnv + -> TaskType + -> StyleDoc + -> RIO env () +prettyAnnounceTask ee taskType action = prettyInfo $ + fromString + (packageNamePrefix ee (pkgName (taskTypePackageIdentifier taskType))) + <> action + +-- | Ensure we're the only action using the directory. See +-- +withLockedDistDir :: + forall env a. HasEnvConfig env + => (StyleDoc -> RIO env ()) + -- ^ A pretty announce function + -> Path Abs Dir + -- ^ root directory for package + -> RIO env a + -> RIO env a +withLockedDistDir announce root inner = do + distDir <- distRelativeDir + let lockFP = root distDir relFileBuildLock + ensureDir $ parent lockFP + + mres <- + withRunInIO $ \run -> + withTryFileLock (toFilePath lockFP) Exclusive $ \_lock -> + run inner + + case mres of + Just res -> pure res + Nothing -> do + let complainer :: Companion (RIO env) + complainer delay = do + delay 5000000 -- 5 seconds + announce $ fillSep + [ flow "blocking for directory lock on" + , pretty lockFP + ] + forever $ do + delay 30000000 -- 30 seconds + announce $ fillSep + [ flow "still blocking for directory lock on" + , pretty lockFP <> ";" + , flow "maybe another Stack process is running?" + ] + withCompanion complainer $ + \stopComplaining -> + withRunInIO $ \run -> + withFileLock (toFilePath lockFP) Exclusive $ \_ -> + run $ stopComplaining *> inner + +-- | How we deal with output from GHC, either dumping to a log file or the +-- console (with some prefix). +data OutputType + = OTLogFile !(Path Abs File) !Handle + | OTConsole !(Maybe Utf8Builder) + +-- | This sets up a context for executing build steps which need to run +-- Cabal (via a compiled Setup.hs). In particular it does the following: +-- +-- * Ensures the package exists in the file system, downloading if necessary. +-- +-- * Opens a log file if the built output shouldn't go to stderr. +-- +-- * Ensures that either a simple Setup.hs is built, or the package's +-- custom setup is built. +-- +-- * Provides the user a function with which run the Cabal process. +withSingleContext :: + forall env a. HasEnvConfig env + => ActionContext + -> ExecuteEnv + -> TaskType + -> Map PackageIdentifier GhcPkgId + -- ^ All dependencies' package ids to provide to Setup.hs. + -> Maybe String + -> ( Package -- Package info + -> Path Abs File -- Cabal file path + -> Path Abs Dir -- Package root directory file path + -- Note that the `Path Abs Dir` argument is redundant with the + -- `Path Abs File` argument, but we provide both to avoid recalculating + -- `parent` of the `File`. + -> (KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) + -- Function to run Cabal with args + -> (Utf8Builder -> RIO env ()) + -- An plain 'announce' function, for different build phases + -> OutputType + -> RIO env a) + -> RIO env a +withSingleContext + ac + ee + taskType + allDeps + msuffix + inner0 + = withPackage $ \package cabalFP pkgDir -> + withOutputType pkgDir package $ \outputType -> + withCabal package pkgDir outputType $ \cabal -> + inner0 package cabalFP pkgDir cabal announce outputType + where + pkgId = taskTypePackageIdentifier taskType + announce = announceTask ee taskType + prettyAnnounce = prettyAnnounceTask ee taskType + + wanted = + case taskType of + TTLocalMutable lp -> lp.wanted + TTRemotePackage{} -> False + + -- Output to the console if this is the last task, and the user asked to build + -- it specifically. When the action is a 'ConcurrencyDisallowed' action + -- (benchmarks), then we can also be sure to have exclusive access to the + -- console, so output is also sent to the console in this case. + -- + -- See the discussion on #426 for thoughts on sending output to the console + --from concurrent tasks. + console = + ( wanted + && all + (\(ActionId ident _) -> ident == pkgId) + (Set.toList ac.remaining) + && ee.totalWanted == 1 + ) + || ac.concurrency == ConcurrencyDisallowed + + withPackage inner = + case taskType of + TTLocalMutable lp -> do + let root = parent lp.cabalFP + withLockedDistDir prettyAnnounce root $ + inner lp.package lp.cabalFP root + TTRemotePackage _ package pkgloc -> do + suffix <- + parseRelDir $ packageIdentifierString $ packageIdentifier package + let dir = ee.tempDir suffix + unpackPackageLocation dir pkgloc + + -- See: https://github.com/commercialhaskell/stack/issues/157 + distDir <- distRelativeDir + let oldDist = dir relDirDist + newDist = dir distDir + exists <- doesDirExist oldDist + when exists $ do + -- Previously used takeDirectory, but that got confused + -- by trailing slashes, see: + -- https://github.com/commercialhaskell/stack/issues/216 + -- + -- Instead, use Path which is a bit more resilient + ensureDir $ parent newDist + renameDir oldDist newDist + + let name = pkgName pkgId + cabalfpRel <- parseRelFile $ packageNameString name ++ ".cabal" + let cabalFP = dir cabalfpRel + inner package cabalFP dir + + withOutputType pkgDir package inner + -- Not in interleaved mode. When building a single wanted package, dump + -- to the console with no prefix. + | console = inner $ OTConsole Nothing + + -- If the user requested interleaved output, dump to the console with a + -- prefix. + | ee.buildOpts.interleavedOutput = inner $ + OTConsole $ Just $ fromString (packageNamePrefix ee package.name) + + -- Neither condition applies, dump to a file. + | otherwise = do + logPath <- buildLogPath package msuffix + ensureDir (parent logPath) + let fp = toFilePath logPath + + -- We only want to dump logs for local non-dependency packages + case taskType of + TTLocalMutable lp | lp.wanted -> + liftIO $ atomically $ writeTChan ee.logFiles (pkgDir, logPath) + _ -> pure () + + withBinaryFile fp WriteMode $ \h -> inner $ OTLogFile logPath h + + withCabal :: + Package + -> Path Abs Dir + -> OutputType + -> ( (KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) + -> RIO env a + ) + -> RIO env a + withCabal package pkgDir outputType inner = do + config <- view configL + unless config.allowDifferentUser $ + checkOwnership (pkgDir config.workDir) + let envSettings = EnvSettings + { includeLocals = taskTypeLocation taskType == Local + , includeGhcPackagePath = False + , stackExe = False + , localeUtf8 = True + , keepGhcRts = False + } + menv <- liftIO $ config.processContextSettings envSettings + distRelativeDir' <- distRelativeDir + setupexehs <- + -- Avoid broken Setup.hs files causing problems for simple build + -- types, see: + -- https://github.com/commercialhaskell/stack/issues/370 + case (package.buildType, ee.setupExe) of + (C.Simple, Just setupExe) -> pure $ SimpleSetupExe setupExe + _ -> liftIO $ OtherSetupHs <$> getSetupHs pkgDir + inner $ \keepOutputOpen stripTHLoading args -> do + let cabalPackageArg + -- Omit cabal package dependency when building + -- Cabal. See + -- https://github.com/commercialhaskell/stack/issues/1356 + | package.name == mkPackageName "Cabal" = [] + | otherwise = + ["-package=" ++ packageIdentifierString + (PackageIdentifier cabalPackageName + ee.cabalPkgVer)] + packageDBArgs = + ( "-clear-package-db" + : "-global-package-db" + : map + (("-package-db=" ++) . toFilePathNoTrailingSep) + ee.baseConfigOpts.extraDBs + ) ++ + ( ( "-package-db=" + ++ toFilePathNoTrailingSep ee.baseConfigOpts.snapDB + ) + : ( "-package-db=" + ++ toFilePathNoTrailingSep ee.baseConfigOpts.localDB + ) + : ["-hide-all-packages"] + ) + + warnCustomNoDeps :: RIO env () + warnCustomNoDeps = + case (taskType, package.buildType) of + (TTLocalMutable lp, C.Custom) | lp.wanted -> + prettyWarnL + [ flow "Package" + , fromPackageName package.name + , flow "uses a custom Cabal build, but does not use a \ + \custom-setup stanza" + ] + _ -> pure () + + getPackageArgs :: Path Abs Dir -> RIO env [String] + getPackageArgs setupDir = + case package.setupDeps of + -- The package is using the Cabal custom-setup configuration + -- introduced in Cabal 1.24. In this case, the package is + -- providing an explicit list of dependencies, and we should + -- simply use all of them. + Just customSetupDeps -> do + cabalPackageArg' <- + if Map.member (mkPackageName "Cabal") customSetupDeps + then pure [] + else do + prettyWarnL + [ style Current (fromPackageName package.name) + , flow "has a" + , style Shell "setup-depends" + , flow "field, but it does not mention a" + , style Current "Cabal" + , flow "dependency. Stack customizes setup using \ + \Cabal, so it has added the GHC boot package as \ + \a dependency." + ] + pure cabalPackageArg + matchedDeps <- + forM (Map.toList customSetupDeps) $ \(name, depValue) -> do + let matches (PackageIdentifier name' version) = + name == name' + && version `withinRange` depValue.versionRange + case filter (matches . fst) (Map.toList allDeps) of + x:xs -> do + unless (null xs) $ + prettyWarnL + [ flow "Found multiple installed packages for \ + \custom-setup dep:" + , style Current (fromPackageName name) <> "." + ] + pure ("-package-id=" ++ ghcPkgIdString (snd x), Just (fst x)) + [] -> do + prettyWarnL + [ flow "Could not find custom-setup dep:" + , style Current (fromPackageName name) <> "." + ] + pure ("-package=" ++ packageNameString name, Nothing) + let depsArgs = map fst matchedDeps + -- Generate setup_macros.h and provide it to ghc + let macroDeps = mapMaybe snd matchedDeps + cppMacrosFile = setupDir relFileSetupMacrosH + cppArgs = + ["-optP-include", "-optP" ++ toFilePath cppMacrosFile] + writeBinaryFileAtomic + cppMacrosFile + ( encodeUtf8Builder + ( T.pack + ( C.generatePackageVersionMacros + package.version + macroDeps + ) + ) + ) + pure (packageDBArgs ++ depsArgs ++ cabalPackageArg' ++ cppArgs) + + -- This branch is usually taken for builds, and is always taken + -- for `stack sdist`. + -- + -- This approach is debatable. It adds access to the snapshot + -- package database for Cabal. There are two possible objections: + -- + -- 1. This doesn't isolate the build enough; arbitrary other + -- packages available could cause the build to succeed or fail. + -- + -- 2. This doesn't provide enough packages: we should also + -- include the local database when building local packages. + -- + -- Currently, this branch is only taken via `stack sdist` or when + -- explicitly requested in the stack.yaml file. + Nothing -> do + warnCustomNoDeps + let packageDBArgs' = case package.buildType of + -- The Configure build type is very similar to Simple. As + -- such, Stack builds the setup executable in much the + -- same way as it would in the case of Simple. + C.Configure -> + [ "-hide-all-packages" + , "-package base" + ] + -- NOTE: This is different from packageDBArgs above in + -- that it does not include the local database and does + -- not pass in the -hide-all-packages argument + _ -> + map + (("-package-db=" ++) . toFilePathNoTrailingSep) + ee.baseConfigOpts.extraDBs + <> [ "-package-db=" + <> toFilePathNoTrailingSep ee.baseConfigOpts.snapDB + ] + pure $ + [ "-clear-package-db" + , "-global-package-db" + ] + <> packageDBArgs' + <> cabalPackageArg + + setupArgs = + ("--builddir=" ++ toFilePathNoTrailingSep distRelativeDir') : args + + runExe :: Path Abs File -> [String] -> RIO env () + runExe exeName fullArgs = do + runAndOutput `catch` \ece -> do + (mlogFile, bss) <- + case outputType of + OTConsole _ -> pure (Nothing, []) + OTLogFile logFile h -> + if keepOutputOpen == KeepOpen + then + pure (Nothing, []) -- expected failure build continues further + else do + liftIO $ hClose h + fmap (Just logFile,) $ withSourceFile (toFilePath logFile) $ + \src -> + runConduit + $ src + .| CT.decodeUtf8Lenient + .| mungeBuildOutput stripTHLoading makeAbsolute pkgDir + .| CL.consume + prettyThrowM $ CabalExitedUnsuccessfully + (eceExitCode ece) pkgId exeName fullArgs mlogFile bss + where + runAndOutput :: RIO env () + runAndOutput = withWorkingDir (toFilePath pkgDir) $ + withProcessContext menv $ case outputType of + OTLogFile _ h -> do + let prefixWithTimestamps = + if config.prefixTimestamps + then PrefixWithTimestamps + else WithoutTimestamps + void $ sinkProcessStderrStdout (toFilePath exeName) fullArgs + (sinkWithTimestamps prefixWithTimestamps h) + (sinkWithTimestamps prefixWithTimestamps h) + OTConsole mprefix -> + let prefix = fromMaybe mempty mprefix + in void $ sinkProcessStderrStdout + (toFilePath exeName) + fullArgs + (outputSink KeepTHLoading LevelWarn prefix) + (outputSink stripTHLoading LevelInfo prefix) + outputSink :: + HasCallStack + => ExcludeTHLoading + -> LogLevel + -> Utf8Builder + -> ConduitM S.ByteString Void (RIO env) () + outputSink excludeTH level prefix = + CT.decodeUtf8Lenient + .| mungeBuildOutput excludeTH makeAbsolute pkgDir + .| CL.mapM_ (logGeneric "" level . (prefix <>) . display) + -- If users want control, we should add a config option for this + makeAbsolute :: ConvertPathsToAbsolute + makeAbsolute = case stripTHLoading of + ExcludeTHLoading -> ConvertPathsToAbsolute + KeepTHLoading -> KeepPathsAsIs + + exeName <- case setupexehs of + SimpleSetupExe setupExe -> pure setupExe + OtherSetupHs setuphs -> do + distDir <- distDirFromDir pkgDir + let setupDir = distDir relDirSetup + outputFile = setupDir relFileSetupLower + customBuilt <- liftIO $ readIORef ee.customBuilt + if Set.member package.name customBuilt + then pure outputFile + else do + ensureDir setupDir + compilerPath <- view $ compilerPathsL . to (.compiler) + packageArgs <- getPackageArgs setupDir + runExe compilerPath $ + [ "--make" + , "-odir", toFilePathNoTrailingSep setupDir + , "-hidir", toFilePathNoTrailingSep setupDir + , "-i", "-i." + ] + <> packageArgs + <> [ toFilePath setuphs + , toFilePath ee.setupShimHs + , "-main-is" + , "StackSetupShim.mainOverride" + , "-o", toFilePath outputFile + , "-threaded" + ] + -- Apply GHC options + -- https://github.com/commercialhaskell/stack/issues/4526 + <> map + T.unpack + ( Map.findWithDefault + [] + AGOEverything + config.ghcOptionsByCat + <> case config.applyGhcOptions of + AGOEverything -> ee.buildOptsCLI.ghcOptions + AGOTargets -> [] + AGOLocals -> [] + ) + + liftIO $ atomicModifyIORef' ee.customBuilt $ + \oldCustomBuilt -> + (Set.insert package.name oldCustomBuilt, ()) + pure outputFile + let cabalVerboseArg = + let CabalVerbosity cv = ee.buildOpts.cabalVerbose + in "--verbose=" <> showForCabal cv + runExe exeName $ cabalVerboseArg:setupArgs + +-- | Strip Template Haskell "Loading package" lines and making paths absolute. +mungeBuildOutput :: + forall m. (MonadIO m, MonadUnliftIO m) + => ExcludeTHLoading + -- ^ exclude TH loading? + -> ConvertPathsToAbsolute + -- ^ convert paths to absolute? + -> Path Abs Dir + -- ^ package's root directory + -> ConduitM Text Text m () +mungeBuildOutput excludeTHLoading makeAbsolute pkgDir = void $ + CT.lines + .| CL.map stripCR + .| CL.filter (not . isTHLoading) + .| filterLinkerWarnings + .| toAbsolute + where + -- | Is this line a Template Haskell "Loading package" line + -- ByteString + isTHLoading :: Text -> Bool + isTHLoading = case excludeTHLoading of + KeepTHLoading -> const False + ExcludeTHLoading -> \bs -> + "Loading package " `T.isPrefixOf` bs && + ("done." `T.isSuffixOf` bs || "done.\r" `T.isSuffixOf` bs) + + filterLinkerWarnings :: ConduitM Text Text m () + filterLinkerWarnings = + -- Check for ghc 7.8 since it's the only one prone to producing + -- linker warnings on Windows x64 + doNothing + + -- | Convert GHC error lines with file paths to have absolute file paths + toAbsolute :: ConduitM Text Text m () + toAbsolute = case makeAbsolute of + KeepPathsAsIs -> doNothing + ConvertPathsToAbsolute -> CL.mapM toAbsolutePath + + toAbsolutePath :: Text -> m Text + toAbsolutePath bs = do + let (x, y) = T.break (== ':') bs + mabs <- + if isValidSuffix y + then + fmap (fmap ((T.takeWhile isSpace x <>) . T.pack . toFilePath)) $ + forgivingResolveFile pkgDir (T.unpack $ T.dropWhile isSpace x) `catch` + \(_ :: PathException) -> pure Nothing + else pure Nothing + case mabs of + Nothing -> pure bs + Just fp -> pure $ fp `T.append` y + + doNothing :: ConduitM Text Text m () + doNothing = awaitForever yield + + -- | Match the error location format at the end of lines + isValidSuffix = isRight . parseOnly lineCol + lineCol = char ':' + >> choice + [ num >> char ':' >> num >> optional (char '-' >> num) >> pure () + , char '(' >> num >> char ',' >> num >> P.string ")-(" >> num >> + char ',' >> num >> char ')' >> pure () + ] + >> char ':' + >> pure () + where + num = some digit + +-- | Whether to prefix log lines with timestamps. +data PrefixWithTimestamps + = PrefixWithTimestamps + | WithoutTimestamps + +-- | Write stream of lines to handle, but adding timestamps. +sinkWithTimestamps :: + MonadIO m + => PrefixWithTimestamps + -> Handle + -> ConduitT ByteString Void m () +sinkWithTimestamps prefixWithTimestamps h = + case prefixWithTimestamps of + PrefixWithTimestamps -> + CB.lines .| CL.mapM addTimestamp .| CL.map (<> "\n") .| sinkHandle h + WithoutTimestamps -> sinkHandle h + where + addTimestamp theLine = do + now <- liftIO getZonedTime + pure (formatZonedTimeForLog now <> " " <> theLine) + +-- | Format a time in ISO8601 format. We choose ZonedTime over UTCTime +-- because a user expects to see logs in their local time, and would +-- be confused to see UTC time. Stack's debug logs also use the local +-- time zone. +formatZonedTimeForLog :: ZonedTime -> ByteString +formatZonedTimeForLog = + S8.pack . formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%S%6Q" + +-- | Find the Setup.hs or Setup.lhs in the given directory. If none exists, +-- throw an exception. +getSetupHs :: + Path Abs Dir -- ^ project directory + -> IO (Path Abs File) +getSetupHs dir = do + exists1 <- doesFileExist fp1 + if exists1 + then pure fp1 + else do + exists2 <- doesFileExist fp2 + if exists2 + then pure fp2 + else throwM $ NoSetupHsFound dir + where + fp1 = dir relFileSetupHs + fp2 = dir relFileSetupLhs diff --git a/src/Stack/Build/ExecutePackage.hs b/src/Stack/Build/ExecutePackage.hs new file mode 100644 index 0000000000..903a647b77 --- /dev/null +++ b/src/Stack/Build/ExecutePackage.hs @@ -0,0 +1,1424 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +{-| +Module : Stack.Build.ExecutePackage +Description : Perform a build. +License : BSD-3-Clause + +Perform a build. +-} + +module Stack.Build.ExecutePackage + ( singleBuild + , singleTest + , singleBench + ) where + +import Control.Concurrent.Execute + ( ActionContext (..), ActionId (..) ) +import Control.Monad.Extra ( whenJust ) +import qualified Data.ByteString as S +import qualified Data.ByteString.Char8 as S8 +import qualified Data.ByteString.Lazy as BL +import Conduit ( runConduitRes ) +import qualified Data.Conduit.Filesystem as CF +import qualified Data.Conduit.List as CL +import Data.Conduit.Process.Typed ( createSource ) +import qualified Data.Conduit.Text as CT +import qualified Data.List as L +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set +import qualified Data.Text as T +import qualified Distribution.PackageDescription as C +import Distribution.System ( OS (..), Platform (..) ) +import qualified Distribution.Text as C +import Distribution.Types.MungedPackageName + ( encodeCompatPackageName ) +import Path + ( (), addExtension, filename, isProperPrefixOf, parent + , parseRelDir, parseRelFile, stripProperPrefix + ) +import Path.Extra ( toFilePathNoTrailingSep ) +import Path.IO + ( copyFile, doesFileExist, ensureDir, ignoringAbsence + , removeDirRecur, removeFile + ) +import RIO.NonEmpty ( nonEmpty ) +import RIO.Process + ( HasProcessContext, byteStringInput, findExecutable + , getStderr, getStdout, inherit, modifyEnvVars, proc + , setStderr, setStdin, setStdout, showProcessArgDebug + , useHandleOpen, waitExitCode, withModifyEnvVars + , withProcessWait, withWorkingDir + ) +import Stack.Build.Cache + ( TestStatus (..), deleteCaches, getTestStatus + , markExeInstalled, markExeNotInstalled, readPrecompiledCache + , setTestStatus, tryGetCabalMod, tryGetConfigCache + , tryGetPackageProjectRoot, tryGetSetupConfigMod + , writeBuildCache, writeCabalMod, writeConfigCache + , writeFlagCache, writePrecompiledCache + , writePackageProjectRoot, writeSetupConfigMod + ) +import Stack.Build.ExecuteEnv + ( ExcludeTHLoading (..), ExecuteEnv (..), KeepOutputOpen (..) + , OutputType (..), withSingleContext + ) +import Stack.Build.TestSuiteTimeout + ( forceKill, prepareForEscalation, terminateGracefully ) +import Stack.Build.Source ( addUnlistedToBuildCache ) +import Stack.Config.ConfigureScript ( ensureConfigureScript ) +import Stack.ConfigureOpts + ( configureOptsFromBase, renderConfigureOpts ) +import Stack.Constants + ( bindirSuffix, compilerOptionsCabalFlag, testGhcEnvRelFile ) +import Stack.Constants.Config + ( distDirFromDir, distRelativeDir, hpcDirFromDir + , hpcRelativeDir, setupConfigFromDir + ) +import Stack.Coverage ( generateHpcReport, updateTixFile ) +import Stack.GhcPkg ( ghcPkg, ghcPkgPathEnvVar, unregisterGhcPkgIds ) +import Stack.Package + ( buildLogPath, buildableExes, buildableSubLibs + , hasBuildableMainLibrary + ) +import Stack.PackageDump ( conduitDumpPackage, ghcPkgDescribe ) +import Stack.Prelude +import Stack.Types.Build.Exception + ( BuildException (..), BuildPrettyException (..) ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..), configFileRootL ) +import Stack.Types.BuildOpts + ( BenchmarkOpts (..), BuildOpts (..), HaddockOpts (..) + , TestOpts (..) + ) +import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) +import Stack.Types.Cache + ( ConfigCache (..), PrecompiledCache (..) ) +import qualified Stack.Types.Cache as ConfigCache ( ConfigCache (..) ) +import Stack.Types.CompCollection + ( collectionKeyValueList, collectionLookup + , foldComponentToAnotherCollection, getBuildableListText + ) +import Stack.Types.Compiler + ( WhichCompiler (..), whichCompiler, whichCompilerL ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), GhcPkgExe (..), HasCompiler (..) + , cpWhich, getGhcPkgExe + ) +import qualified Stack.Types.Component as Component +import Stack.Types.ComponentUtils + ( StackUnqualCompName, toCabalName, unqualCompToString + , unqualCompToText + ) +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.ConfigureOpts + ( BaseConfigOpts (..), ConfigureOpts (..) ) +import Stack.Types.Curator ( Curator (..) ) +import Stack.Types.DumpPackage ( DumpPackage (..) ) +import Stack.Types.EnvConfig + ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL + , appropriateGhcColorFlag + ) +import Stack.Types.EnvSettings ( EnvSettings (..) ) +import Stack.Types.GhcPkgId ( GhcPkgId, ghcPkgIdToText ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Installed + ( InstallLocation (..), Installed (..), InstalledMap + , InstalledLibraryInfo (..) + ) +import Stack.Types.IsMutable ( IsMutable (..) ) +import Stack.Types.NamedComponent + ( NamedComponent, exeComponents, isCBench, isCTest + , renderComponent + ) +import Stack.Types.Package + ( LocalPackage (..), Package (..), installedPackageToGhcPkgId + , runMemoizedWith, simpleInstalledLib + , toCabalMungedPackageName + ) +import Stack.Types.PackageFile ( PackageWarning (..) ) +import Stack.Types.Plan + ( Task (..), TaskConfigOpts (..), TaskType (..), taskIsTarget + , taskLocation, taskProvides, taskTargetIsMutable + , taskTypePackageIdentifier + ) +import Stack.Types.Runner ( HasRunner, globalOptsL ) +import Stack.Types.SourceMap ( SourceMap (..) ) +import System.IO.Error ( isDoesNotExistError ) +import System.PosixCompat.Files + ( createLink, getFileStatus, modificationTime ) +import System.Random ( randomIO ) +import System.Semaphore ( Semaphore (..), SemaphoreName (..) ) + +-- | Generate the t'ConfigCache' value. +getConfigCache :: + HasEnvConfig env + => ExecuteEnv + -> Task + -> InstalledMap + -> Bool + -> Bool + -> RIO env (Map PackageIdentifier GhcPkgId, ConfigCache) +getConfigCache ee task installedMap enableTest enableBench = do + let extra = + -- We enable tests if the test suite dependencies are already + -- installed, so that we avoid unnecessary recompilation based on + -- cabal_macros.h changes when switching between 'stack build' and + -- 'stack test'. See: + -- https://github.com/commercialhaskell/stack/issues/805 + case task.taskType of + TTLocalMutable _ -> + -- FIXME: make this work with exact-configuration. + -- Not sure how to plumb the info atm. See + -- https://github.com/commercialhaskell/stack/issues/2049 + [ "--enable-tests" | enableTest] ++ + [ "--enable-benchmarks" | enableBench] + TTRemotePackage{} -> [] + idMap <- liftIO $ readTVarIO ee.ghcPkgIds + let getMissing ident = + case Map.lookup ident idMap of + Nothing + -- Expect to instead find it in installedMap if it's + -- an initialBuildSteps target. + | ee.buildOptsCLI.initialBuildSteps && taskIsTarget task + , Just (_, installed) <- Map.lookup (pkgName ident) installedMap + -> pure $ installedPackageToGhcPkgId ident installed + Just installed -> pure $ installedPackageToGhcPkgId ident installed + _ -> throwM $ PackageIdMissingBug ident + let cOpts = task.configOpts + missingMapList <- traverse getMissing $ toList cOpts.missing + let pcOpts = cOpts.pkgConfigOpts + missing' = Map.unions missingMapList + -- Historically the leftermost was missing' for union preference in case of + -- collision for the return here. But unifying things with configureOpts + -- where it was the opposite resulted in this. It doesn't seem to make any + -- difference anyway. + allDepsMap = Map.union missing' task.present + configureOpts' = configureOptsFromBase + cOpts.envConfig + cOpts.baseConfigOpts + allDepsMap + cOpts.isLocalNonExtraDep + cOpts.isMutable + pcOpts + configureOpts = configureOpts' + { nonPathRelated = configureOpts'.nonPathRelated ++ map T.unpack extra } + deps = Set.fromList $ Map.elems missing' ++ Map.elems task.present + components = case task.taskType of + TTLocalMutable lp -> + Set.map (encodeUtf8 . renderComponent) lp.components + TTRemotePackage{} -> Set.empty + cache = ConfigCache + { configureOpts + , deps + , components + , buildHaddocks = task.buildHaddocks + , pkgSrc = task.cachePkgSrc + , pathEnvVar = ee.pathEnvVar + } + pure (allDepsMap, cache) + +-- | Ensure that the configuration for the package matches what is given +ensureConfig :: + HasEnvConfig env + => ConfigCache + -- ^ newConfigCache + -> Path Abs Dir + -- ^ package directory + -> BuildOpts + -> RIO env () + -- ^ announce + -> (ExcludeTHLoading -> [String] -> RIO env ()) + -- ^ cabal + -> Path Abs File + -- ^ Cabal file + -> Task + -> RIO env Bool +ensureConfig newConfigCache pkgDir buildOpts announce cabal cabalFP task = do + newCabalMod <- + liftIO $ modificationTime <$> getFileStatus (toFilePath cabalFP) + setupConfigfp <- setupConfigFromDir pkgDir + let getNewSetupConfigMod = + liftIO $ either (const Nothing) (Just . modificationTime) <$> + tryJust + (guard . isDoesNotExistError) + (getFileStatus (toFilePath setupConfigfp)) + newSetupConfigMod <- getNewSetupConfigMod + newConfigFileRoot <- S8.pack . toFilePath <$> view configFileRootL + needConfig <- + if buildOpts.reconfigure + -- The reason 'taskAnyMissing' is necessary is a bug in Cabal. See: + -- . + -- The problem is that Cabal may end up generating the same package ID + -- for a dependency, even if the ABI has changed. As a result, without + -- check, Stack would think that a reconfigure is unnecessary, when in + -- fact we _do_ need to reconfigure. The details here suck. We really + -- need proper hashes for package identifiers. + then pure True + else do + -- We can ignore the components field of the Cabal configuration cache, + -- because it is only used to inform 'construct plan' that we need to + -- plan to build additional components. These components don't affect + -- the Cabal configuration for the package. + let ignoreComponents :: ConfigCache -> ConfigCache + ignoreComponents cc = cc { ConfigCache.components = Set.empty } + -- Determine the old and new Cabal configuration for the package + -- directory, to determine if we need to reconfigure. + mOldConfigCache <- tryGetConfigCache pkgDir + + mOldCabalMod <- tryGetCabalMod pkgDir + + -- Cabal's setup-config is created per OS/Cabal version, multiple + -- projects using the same package could get a conflict because of this + mOldSetupConfigMod <- tryGetSetupConfigMod pkgDir + mOldProjectRoot <- tryGetPackageProjectRoot pkgDir + + pure $ + fmap ignoreComponents mOldConfigCache + /= Just (ignoreComponents newConfigCache) + || mOldCabalMod /= Just newCabalMod + || mOldSetupConfigMod /= newSetupConfigMod + || mOldProjectRoot /= Just newConfigFileRoot + + when task.buildTypeConfig $ + -- When build-type is Configure, we need to have a configure script in the + -- local directory. If it doesn't exist, build it with autoreconf -i. See: + -- https://github.com/commercialhaskell/stack/issues/3534 + ensureConfigureScript pkgDir + + when needConfig $ do + deleteCaches pkgDir + announce + cp <- view compilerPathsL + let (GhcPkgExe pkgPath) = cp.pkg + let programNames = + case cpWhich cp of + Ghc -> + [ ("ghc", toFilePath cp.compiler) + , ("ghc-pkg", toFilePath pkgPath) + ] + exes <- forM programNames $ \(name, file) -> + findExecutable file <&> \case + Left _ -> [] + Right x -> pure $ concat ["--with-", name, "=", x] + let allOpts = + concat exes + <> renderConfigureOpts newConfigCache.configureOpts + -- Configure cabal with arguments determined by + -- Stack.Types.Build.configureOpts + cabal KeepTHLoading $ "configure" : allOpts + -- Only write the cache for local packages. Remote packages are built in a + -- temporary directory so the cache would never be used anyway. + case task.taskType of + TTLocalMutable{} -> writeConfigCache pkgDir newConfigCache + TTRemotePackage{} -> pure () + writeCabalMod pkgDir newCabalMod + -- This file gets updated one more time by the configure step, so get the + -- most recent value. We could instead change our logic above to check if + -- our config mod file is newer than the file above, but this seems + -- reasonable too. + getNewSetupConfigMod >>= writeSetupConfigMod pkgDir + writePackageProjectRoot pkgDir newConfigFileRoot + pure needConfig + +-- | Make a padded prefix for log messages +packageNamePrefix :: ExecuteEnv -> PackageName -> String +packageNamePrefix ee name' = + let name = packageNameString name' + paddedName = + case ee.largestPackageName of + Nothing -> name + Just len -> + assert (len >= length name) $ take len $ name ++ L.repeat ' ' + in paddedName <> "> " + +announceTask :: + HasLogFunc env + => ExecuteEnv + -> TaskType + -> Utf8Builder + -> RIO env () +announceTask ee taskType action = logInfo $ + fromString + (packageNamePrefix ee (pkgName (taskTypePackageIdentifier taskType))) + <> action + +-- | Implements running a package's build, used to implement +-- 'Control.Concurrent.Execute.ATBuild' and +-- 'Control.Concurrent.Execute.ATBuildFinal' tasks. The latter is a task for +-- building a package's benchmarks and test-suites. +-- +-- In particular this does the following: +-- +-- * Checks if the package exists in the precompiled cache, and if so, add it to +-- the database instead of performing the build. +-- +-- * Runs the configure step if needed (@ensureConfig@) +-- +-- * Runs the build step +-- +-- * Generates haddocks +-- +-- * Registers the library and copies the built executables into the local +-- install directory. Note that this is literally invoking Cabal with @copy@, +-- and not the copying done by @stack install@ - that is handled by +-- 'Stack.Build.copyExecutables'. +singleBuild :: + forall env. (HasEnvConfig env, HasRunner env) + => ActionContext + -> ExecuteEnv + -> Task + -> InstalledMap + -> Bool + -- ^ Is this a final build? + -> RIO env () +singleBuild + ac + ee + task + installedMap + isFinalBuild + = do + (allDepsMap, cache) <- + getConfigCache ee task installedMap enableTests enableBenchmarks + let bcoSnapInstallRoot = ee.baseConfigOpts.snapInstallRoot + mprecompiled <- getPrecompiled cache task.taskType bcoSnapInstallRoot + minstalled <- + case mprecompiled of + Just precompiled -> copyPreCompiled ee task pkgId precompiled + Nothing -> do + curator <- view $ buildConfigL . to (.curator) + realConfigAndBuild + ac + ee + task + installedMap + (enableTests, enableBenchmarks) + (isFinalBuild, buildingFinals) + cache + curator + allDepsMap + whenJust minstalled $ \installed -> do + writeFlagCache installed cache + liftIO $ atomically $ modifyTVar ee.ghcPkgIds $ Map.insert pkgId installed + where + pkgId = taskProvides task + buildingFinals = isFinalBuild || task.allInOne + enableTests = buildingFinals && any isCTest (taskComponents task) + enableBenchmarks = buildingFinals && any isCBench (taskComponents task) + +realConfigAndBuild :: + forall env a. HasEnvConfig env + => ActionContext + -> ExecuteEnv + -> Task + -> Map PackageName (a, Installed) + -> (Bool, Bool) + -- ^ (enableTests, enableBenchmarks) + -> (Bool, Bool) + -- ^ (isFinalBuild, buildingFinals) + -> ConfigCache + -> Maybe Curator + -> Map PackageIdentifier GhcPkgId + -> RIO env (Maybe Installed) +realConfigAndBuild + ac + ee + task + installedMap + (enableTests, enableBenchmarks) + (isFinalBuild, buildingFinals) + cache + mcurator0 + allDepsMap + = withSingleContext ac ee task.taskType allDepsMap Nothing $ + \package cabalFP pkgDir cabal0 announce _outputType -> do + let cabal = cabal0 CloseOnException + _neededConfig <- + ensureConfig + cache + pkgDir + ee.buildOpts + (announce ("configure" <> display annSuffix)) + cabal + cabalFP + task + let installedMapHasThisPkg :: Bool + installedMapHasThisPkg = + case Map.lookup package.name installedMap of + Just (_, Library ident _) -> ident == pkgId + Just (_, Executable _) -> True + _ -> False + + case ( ee.buildOptsCLI.onlyConfigure + , ee.buildOptsCLI.initialBuildSteps && taskIsTarget task + ) of + -- A full build is done if there are downstream actions, + -- because their configure step will require that this + -- package is built. See + -- https://github.com/commercialhaskell/stack/issues/2787 + (True, _) | null ac.downstream -> pure Nothing + (_, True) | null ac.downstream || installedMapHasThisPkg -> do + initialBuildSteps cabal announce + pure Nothing + _ -> fulfillCuratorBuildExpectations + pname + mcurator0 + enableTests + enableBenchmarks + Nothing + (Just <$> realBuild package pkgDir cabal0 announce) + where + pkgId = taskProvides task + PackageIdentifier pname _ = pkgId + doHaddock curator = + task.buildHaddocks + && not isFinalBuild + -- Special help for the curator tool to avoid haddocks that are known + -- to fail + && maybe True (Set.notMember pname . (.skipHaddock)) curator + + annSuffix = if result == "" then "" else " (" <> result <> ")" + where + result = T.intercalate " + " $ concat + [ ["lib" | task.allInOne && hasLib] + , ["sub-lib" | task.allInOne && hasSubLib] + , ["exe" | task.allInOne && hasExe] + , ["test" | enableTests] + , ["bench" | enableBenchmarks] + ] + (hasLib, hasSubLib, hasExe) = case task.taskType of + TTLocalMutable lp -> + let package = lp.package + hasLibrary = hasBuildableMainLibrary package + hasSubLibraries = not $ null package.subLibraries + hasExecutables = not . Set.null $ exesToBuild lp + in (hasLibrary, hasSubLibraries, hasExecutables) + -- This isn't true, but we don't want to have this info for upstream deps. + _ -> (False, False, False) + initialBuildSteps cabal announce = do + announce ("initial-build-steps" <> display annSuffix) + cabal KeepTHLoading ["repl", "stack-initial-build-steps"] + + realBuild :: + Package + -> Path Abs Dir + -> (KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) + -> (Utf8Builder -> RIO env ()) + -- ^ A plain 'announce' function + -> RIO env Installed + realBuild package pkgDir cabal0 announce = do + let cabal = cabal0 CloseOnException + wc <- view $ actualCompilerVersionL . whichCompilerL + + markExeNotInstalled (taskLocation task) pkgId + case task.taskType of + TTLocalMutable lp -> do + when enableTests $ setTestStatus pkgDir TSUnknown + caches <- runMemoizedWith lp.newBuildCaches + mapM_ + (uncurry (writeBuildCache pkgDir)) + (Map.toList caches) + TTRemotePackage{} -> pure () + + -- FIXME: only output these if they're in the build plan. + let postBuildCheck _succeeded = do + mlocalWarnings <- case task.taskType of + TTLocalMutable lp -> do + warnings <- checkForUnlistedFiles task.taskType pkgDir + -- TODO: Perhaps only emit these warnings for non extra-dep? + pure (Just (lp.cabalFP, warnings)) + _ -> pure Nothing + -- NOTE: once + -- https://github.com/commercialhaskell/stack/issues/2649 + -- is resolved, we will want to partition the warnings + -- based on variety, and output in different lists. + let showModuleWarning (UnlistedModulesWarning comp modules) = + "- In" <+> + fromString (T.unpack (renderComponent comp)) <> + ":" <> line <> + indent 4 ( mconcat + $ L.intersperse line + $ map + (style Good . fromString . C.display) + modules + ) + forM_ mlocalWarnings $ \(cabalFP, warnings) -> + unless (null warnings) $ prettyWarn $ + flow "The following modules should be added to \ + \exposed-modules or other-modules in" <+> + pretty cabalFP + <> ":" + <> line + <> indent 4 ( mconcat + $ L.intersperse line + $ map showModuleWarning warnings + ) + <> blankLine + <> flow "Missing modules in the Cabal file are likely to cause \ + \undefined reference errors from the linker, along with \ + \other problems." + + actualCompiler <- view actualCompilerVersionL + () <- announce + ( "build" + <> display annSuffix + <> " with " + <> display actualCompiler + ) + config <- view configL + extraOpts <- extraBuildOptions wc ee.buildOpts ee.semaphore + let stripTHLoading + | config.hideTHLoading = ExcludeTHLoading + | otherwise = KeepTHLoading + (buildOpts, copyOpts) <- + case (task.taskType, task.allInOne, isFinalBuild) of + (_, True, True) -> throwM AllInOneBuildBug + (TTLocalMutable lp, False, False) -> + let componentOpts = primaryComponentOptions lp + in pure (componentOpts, componentOpts) + (TTLocalMutable lp, False, True) -> pure (finalComponentOptions lp, []) + (TTLocalMutable lp, True, False) -> + let componentOpts = primaryComponentOptions lp + in pure (componentOpts <> finalComponentOptions lp, componentOpts) + (TTRemotePackage{}, _, _) -> pure ([], []) + cabal stripTHLoading ("build" : buildOpts <> extraOpts) + `catch` \ex -> case ex of + CabalExitedUnsuccessfully{} -> + postBuildCheck False >> prettyThrowM ex + _ -> throwM ex + postBuildCheck True + + mcurator <- view $ buildConfigL . to (.curator) + when (doHaddock mcurator) $ do + let isTaskTargetMutable = taskTargetIsMutable task == Mutable + isHaddockForHackage = + ee.buildOpts.haddockForHackage && isTaskTargetMutable + announce $ if isHaddockForHackage + then "haddock for Hackage" + else "haddock" + + -- For GHC 8.4 and later, provide the --quickjump option. + let quickjump = ["--haddock-option=--quickjump"] + + fulfillHaddockExpectations pname mcurator $ \keep -> do + let args = concat + ( ( if isHaddockForHackage + then + [ [ "--for-hackage" ] ] + else + [ [ "--html" + , "--hoogle" + , "--html-location=../$pkg-$version/" + ] + , [ "--haddock-option=--hyperlinked-source" + | ee.buildOpts.haddockHyperlinkSource + ] + , [ "--executables" | ee.buildOpts.haddockExecutables ] + , [ "--tests" | ee.buildOpts.haddockTests ] + , [ "--benchmarks" | ee.buildOpts.haddockBenchmarks ] + , [ "--internal" | ee.buildOpts.haddockInternal ] + , quickjump + ] + ) + <> [ [ "--haddock-option=" <> opt + | opt <- ee.buildOpts.haddockOpts.additionalArgs + ] + ] + ) + + cabal0 keep KeepTHLoading $ "haddock" : args + + let hasLibrary = hasBuildableMainLibrary package + hasSubLibraries = not $ null package.subLibraries + hasExecutables = not $ null package.executables + shouldCopy = + not isFinalBuild + && (hasLibrary || hasSubLibraries || hasExecutables) + when shouldCopy $ withMVar ee.installLock $ \() -> do + announce "copy/register" + try (cabal KeepTHLoading $ "copy" : copyOpts) >>= \case + Left err@CabalExitedUnsuccessfully{} -> + prettyThrowM $ CabalCopyFailed + (package.buildType == C.Simple) + err + _ -> pure () + when (hasLibrary || hasSubLibraries) $ cabal KeepTHLoading ["register"] + + copyDdumpFilesIfNeeded buildingFinals ee.buildOpts.ddumpDir + installedPkg <- + fetchAndMarkInstalledPackage ee (taskLocation task) package pkgId + postProcessRemotePackage + task.taskType + ac + cache + ee + installedPkg + package + pkgId + pkgDir + pure installedPkg + +-- | Action in the case that the task relates to a remote package. +postProcessRemotePackage :: + (HasEnvConfig env) + => TaskType + -> ActionContext + -> ConfigCache + -> ExecuteEnv + -> Installed + -> Package + -> PackageIdentifier + -> Path b Dir + -> RIO env () +postProcessRemotePackage + taskType + ac + cache + ee + installedPackage + package + pkgId + pkgDir + = case taskType of + TTRemotePackage isMutable _ loc -> do + when (isMutable == Immutable) $ writePrecompiledCache + ee.baseConfigOpts + loc + cache.configureOpts + cache.buildHaddocks + installedPackage + (buildableExes package) + -- For packages from a package index, pkgDir is in the tmp directory. We + -- eagerly delete it if no other tasks require it, to reduce space usage + -- in tmp (#3018). + let remaining = + Set.filter + (\(ActionId x _) -> x == pkgId) + ac.remaining + when (null remaining) $ removeDirRecur pkgDir + _ -> pure () + +-- | Once all the Cabal-related tasks have run for a package, we should be able +-- to gather the information needed to create an 'Installed' package value. For +-- now, either there's a main library (in which case we consider the 'GhcPkgId' +-- values of the package's libraries) or we just consider it's an executable +-- (and mark all the executables as installed, if any). +-- +-- Note that this also modifies the installedDumpPkgsTVar which is used for +-- generating Haddocks. +-- +fetchAndMarkInstalledPackage :: + (HasEnvConfig env, HasTerm env) + => ExecuteEnv + -> InstallLocation + -> Package + -> PackageIdentifier + -> RIO env Installed +fetchAndMarkInstalledPackage ee taskInstallLocation package pkgId = do + let ghcPkgIdLoader = fetchGhcPkgIdForLib ee taskInstallLocation package.name + -- Only pure the sub-libraries to cache them if we also cache the main + -- library (that is, if it exists) + if hasBuildableMainLibrary package + then do + let foldSubLibToMap subLib mapInMonad = do + maybeGhcpkgId <- ghcPkgIdLoader (Just subLib.name) + mapInMonad <&> case maybeGhcpkgId of + Just v -> Map.insert subLib.name v + _ -> id + subLibsPkgIds <- foldComponentToAnotherCollection + package.subLibraries + foldSubLibToMap + mempty + ghcPkgIdLoader Nothing >>= \case + Nothing -> throwM $ Couldn'tFindPkgId package.name + Just ghcPkgId -> pure $ simpleInstalledLib pkgId ghcPkgId subLibsPkgIds + else do + markExeInstalled taskInstallLocation pkgId -- TODO unify somehow + -- with writeFlagCache? + pure $ Executable pkgId + +fetchGhcPkgIdForLib :: + (HasTerm env, HasEnvConfig env) + => ExecuteEnv + -> InstallLocation + -> PackageName + -> Maybe Component.StackUnqualCompName + -> RIO env (Maybe GhcPkgId) +fetchGhcPkgIdForLib ee installLocation pkgName libName = do + let baseConfigOpts = ee.baseConfigOpts + (installedPkgDb, installedDumpPkgsTVar) = + case installLocation of + Snap -> + ( baseConfigOpts.snapDB + , ee.snapshotDumpPkgs ) + Local -> + ( baseConfigOpts.localDB + , ee.localDumpPkgs ) + let commonLoader = loadInstalledPkg [installedPkgDb] installedDumpPkgsTVar + case libName of + Nothing -> commonLoader pkgName + Just v -> do + let mungedName = encodeCompatPackageName $ toCabalMungedPackageName pkgName v + commonLoader mungedName + +-- | Copy ddump-* files, if we are building finals and a non-empty ddump-dir +-- has been specified. +copyDdumpFilesIfNeeded :: HasEnvConfig env => Bool -> Maybe Text -> RIO env () +copyDdumpFilesIfNeeded buildingFinals mDdumpPath = when buildingFinals $ + whenJust mDdumpPath $ \ddumpPath -> unless (T.null ddumpPath) $ do + distDir <- distRelativeDir + ddumpRelDir <- parseRelDir $ T.unpack ddumpPath + prettyDebugL + [ "ddump-dir:" + , pretty ddumpRelDir + ] + prettyDebugL + [ "dist-dir:" + , pretty distDir + ] + runConduitRes + $ CF.sourceDirectoryDeep False (toFilePath distDir) + .| CL.filter (L.isInfixOf ".dump-") + .| CL.mapM_ (\src -> liftIO $ do + parentDir <- parent <$> parseRelDir src + destBaseDir <- + (ddumpRelDir ) <$> stripProperPrefix distDir parentDir + -- exclude .stack-work dir + unless (".stack-work" `L.isInfixOf` toFilePath destBaseDir) $ do + ensureDir destBaseDir + src' <- parseRelFile src + copyFile src' (destBaseDir filename src')) + +getPrecompiled :: + HasEnvConfig env + => ConfigCache + -> TaskType + -> Path Abs Dir + -> RIO env (Maybe (PrecompiledCache Abs)) +getPrecompiled cache taskType bcoSnapInstallRoot = + case taskType of + TTRemotePackage Immutable _ loc -> + readPrecompiledCache loc cache.configureOpts cache.buildHaddocks >>= \case + Nothing -> pure Nothing + -- Only pay attention to precompiled caches that refer to packages + -- within the snapshot. + Just pc + | maybe False + (bcoSnapInstallRoot `isProperPrefixOf`) + pc.library -> pure Nothing + -- If old precompiled cache files are left around but snapshots are + -- deleted, it is possible for the precompiled file to refer to the + -- very library we're building, and if flags are changed it may try to + -- copy the library to itself. This check prevents that from + -- happening. + Just pc -> do + let allM _ [] = pure True + allM f (x:xs) = do + b <- f x + if b then allM f xs else pure False + b <- liftIO $ + allM doesFileExist $ maybe id (:) pc.library pc.exes + pure $ if b then Just pc else Nothing + _ -> pure Nothing + +copyPreCompiled :: + ( HasLogFunc env + , HasCompiler env + , HasTerm env + , HasProcessContext env + , HasEnvConfig env + ) + => ExecuteEnv + -> Task + -> PackageIdentifier + -> PrecompiledCache b0 + -> RIO env (Maybe Installed) +copyPreCompiled ee task pkgId (PrecompiledCache mlib subLibs exes) = do + let PackageIdentifier pname pversion = pkgId + announceTask ee task.taskType "using precompiled package" + + -- We need to copy .conf files for the main library and all sub-libraries + -- which exist in the cache, from their old snapshot to the new one. + -- However, we must unregister any such library in the new snapshot, in case + -- it was built with different flags. + let + subLibNames = Set.toList $ buildableSubLibs $ case task.taskType of + TTLocalMutable lp -> lp.package + TTRemotePackage _ p _ -> p + toMungedPackageId :: StackUnqualCompName -> MungedPackageId + toMungedPackageId subLib = + let subLibName = LSubLibName $ toCabalName subLib + in MungedPackageId (MungedPackageName pname subLibName) pversion + toPackageId :: MungedPackageId -> PackageIdentifier + toPackageId (MungedPackageId n v) = + PackageIdentifier (encodeCompatPackageName n) v + allToUnregister :: [Either PackageIdentifier GhcPkgId] + allToUnregister = mcons + (Left pkgId <$ mlib) + (map (Left . toPackageId . toMungedPackageId) subLibNames) + allToRegister = mcons mlib subLibs + + unless (null allToRegister) $ + withMVar ee.installLock $ \() -> do + -- We want to ignore the global and user package databases. ghc-pkg + -- allows us to specify --no-user-package-db and --package-db= on + -- the command line. + let pkgDb = ee.baseConfigOpts.snapDB + ghcPkgExe <- getGhcPkgExe + -- First unregister, silently, everything that needs to be unregistered. + whenJust (nonEmpty allToUnregister) $ \allToUnregister' -> do + logLevel <- view $ globalOptsL . to (.logLevel) + let isDebug = logLevel == LevelDebug + catchAny + (unregisterGhcPkgIds isDebug ghcPkgExe pkgDb allToUnregister') + (const (pure ())) + -- There appears to be a bug in the ghc-pkg executable such that, on + -- Windows only, it cannot register a package into a package database that + -- is also listed in the GHC_PACKAGE_PATH environment variable. See: + -- https://gitlab.haskell.org/ghc/ghc/-/issues/25962. We work around that + -- by removing GHC_PACKAGE_PATH from the environment for the register + -- step. + wc <- view $ envConfigL . to (.sourceMap.compiler) . to whichCompiler + withModifyEnvVars (Map.delete $ ghcPkgPathEnvVar wc) $ + forM_ allToRegister $ \libpath -> do + let args = ["register", "--force", toFilePath libpath] + ghcPkg ghcPkgExe [pkgDb] args >>= \case + Left e -> prettyWarn $ + "[S-4541]" + <> line + <> fillSep + [ flow "While registering" + , pretty libpath + , "in" + , pretty pkgDb <> "," + , flow "Stack encountered the following error:" + ] + <> blankLine + <> string (displayException e) + Right _ -> pure () + liftIO $ forM_ exes $ \exe -> do + ensureDir bindir + let dst = bindir filename exe + createLink (toFilePath exe) (toFilePath dst) `catchIO` \_ -> copyFile exe dst + case (mlib, exes) of + (Nothing, _:_) -> markExeInstalled (taskLocation task) pkgId + _ -> pure () + + -- Find the package in the database + let pkgDbs = [ee.baseConfigOpts.snapDB] + + case mlib of + Nothing -> pure $ Just $ Executable pkgId + Just _ -> do + mpkgid <- loadInstalledPkg pkgDbs ee.snapshotDumpPkgs pname + + pure $ Just $ + case mpkgid of + Nothing -> assert False $ Executable pkgId + Just pkgid -> simpleInstalledLib pkgId pkgid mempty + where + bindir = ee.baseConfigOpts.snapInstallRoot bindirSuffix + +loadInstalledPkg :: + (HasCompiler env, HasProcessContext env, HasTerm env) + => [Path Abs Dir] + -> TVar (Map GhcPkgId DumpPackage) + -> PackageName + -> RIO env (Maybe GhcPkgId) +loadInstalledPkg pkgDbs tvar name = do + pkgexe <- getGhcPkgExe + dps <- ghcPkgDescribe pkgexe name pkgDbs $ conduitDumpPackage .| CL.consume + case dps of + [] -> pure Nothing + [dp] -> do + liftIO $ atomically $ modifyTVar' tvar (Map.insert dp.ghcPkgId dp) + pure $ Just dp.ghcPkgId + _ -> throwM $ MultipleResultsBug name dps + +fulfillHaddockExpectations :: + (MonadUnliftIO m, HasTerm env, MonadReader env m) + => PackageName + -> Maybe Curator + -> (KeepOutputOpen -> m ()) + -> m () +fulfillHaddockExpectations pname mcurator action + | expectHaddockFailure mcurator = + tryAny (action KeepOpen) >>= \case + Right () -> prettyWarnL + [ style Current (fromPackageName pname) <> ":" + , flow "unexpected Haddock success." + ] + Left _ -> pure () + where + expectHaddockFailure = maybe False (Set.member pname . (.expectHaddockFailure)) +fulfillHaddockExpectations _ _ action = action CloseOnException + +-- | Check if any unlisted files have been found, and add them to the build cache. +checkForUnlistedFiles :: + HasEnvConfig env + => TaskType + -> Path Abs Dir + -> RIO env [PackageWarning] +checkForUnlistedFiles (TTLocalMutable lp) pkgDir = do + caches <- runMemoizedWith lp.newBuildCaches + (addBuildCache,warnings) <- + addUnlistedToBuildCache + lp.package + lp.cabalFP + lp.components + caches + forM_ (Map.toList addBuildCache) $ \(component, newToCache) -> do + let cache = Map.findWithDefault Map.empty component caches + writeBuildCache pkgDir component $ + Map.unions (cache : newToCache) + pure warnings +checkForUnlistedFiles TTRemotePackage{} _ = pure [] + +-- | Implements running a package's tests. Also handles producing +-- coverage reports if coverage is enabled. +singleTest :: + HasEnvConfig env + => TestOpts + -> [StackUnqualCompName] + -> ActionContext + -> ExecuteEnv + -> Task + -> InstalledMap + -> RIO env () +singleTest topts testsToRun ac ee task installedMap = do + -- FIXME: Since this doesn't use cabal, we should be able to avoid using a + -- full blown 'withSingleContext'. + (allDepsMap, _cache) <- getConfigCache ee task installedMap True False + mcurator <- view $ buildConfigL . to (.curator) + let pname = pkgName $ taskProvides task + expectFailure = expectTestFailure pname mcurator + withSingleContext ac ee task.taskType allDepsMap (Just "test") $ + \package _cabalfp pkgDir _cabal announce outputType -> do + config <- view configL + let needHpc = topts.coverage + toRun <- + if topts.runTests + then if topts.rerunTests + then pure True + else + getTestStatus pkgDir >>= \case + TSSuccess -> do + unless (null testsToRun) $ + announce "skipping already passed test" + pure False + TSFailure + | expectFailure -> do + announce "skipping already failed test that's expected to fail" + pure False + | otherwise -> do + announce "rerunning previously failed test" + pure True + TSUnknown -> pure True + else prettyThrowM $ ActionNotFilteredBug "singleTest" + when toRun $ do + buildDir <- distDirFromDir pkgDir + hpcDir <- hpcDirFromDir pkgDir + when needHpc (ensureDir hpcDir) + + let suitesToRun + = [ testSuitePair + | testSuitePair <- + ((fmap . fmap) (.interface) <$> collectionKeyValueList) + package.testSuites + , let testName = fst testSuitePair + , testName `elem` testsToRun + ] + + errs <- fmap Map.unions $ forM suitesToRun $ \(testName, suiteInterface) -> do + let stestName = unqualCompToString testName + (testName', isTestTypeLib) <- + case suiteInterface of + C.TestSuiteLibV09{} -> pure (stestName ++ "Stub", True) + C.TestSuiteExeV10{} -> pure (stestName, False) + interface -> throwM (TestSuiteTypeUnsupported interface) + + let exeName = testName' ++ + case config.platform of + Platform _ Windows -> ".exe" + _ -> "" + tixPath <- fmap (pkgDir ) $ parseRelFile $ exeName ++ ".tix" + exePath <- + fmap (buildDir ) $ parseRelFile $ + "build/" ++ testName' ++ "/" ++ exeName + exists <- doesFileExist exePath + -- in Stack.Package.packageFromPackageDescription we filter out + -- package itself of any dependencies so any tests requiring loading + -- of their own package library will fail so to prevent this we return + -- it back here but unfortunately unconditionally + installed <- case Map.lookup pname installedMap of + Just (_, installed) -> pure $ Just installed + Nothing -> do + idMap <- liftIO $ readTVarIO ee.ghcPkgIds + pure $ Map.lookup (taskProvides task) idMap + let pkgGhcIdList = case installed of + Just (Library _ libInfo) -> [libInfo.ghcPkgId] + _ -> [] + -- doctest relies on template-haskell in QuickCheck-based tests + thGhcId <- + case L.find ((== "template-haskell") . pkgName . (.packageIdent) . snd) + (Map.toList ee.globalDumpPkgs) of + Just (ghcId, _) -> pure ghcId + Nothing -> throwIO TemplateHaskellNotFoundBug + -- env variable GHC_ENVIRONMENT is set for doctest so module names for + -- packages with proper dependencies should no longer get ambiguous + -- see e.g. https://github.com/doctest/issues/119 + -- also we set HASKELL_DIST_DIR to a package dist directory so + -- doctest will be able to load modules autogenerated by Cabal + let setEnv f pc = modifyEnvVars pc $ \envVars -> + Map.insert "HASKELL_DIST_DIR" (T.pack $ toFilePath buildDir) $ + Map.insert "GHC_ENVIRONMENT" (T.pack f) envVars + fp' = ee.tempDir testGhcEnvRelFile + -- Add a random suffix to avoid conflicts between parallel jobs + -- See https://github.com/commercialhaskell/stack/issues/5024 + randomInt <- liftIO (randomIO :: IO Int) + let randomSuffix = "." <> show (abs randomInt) + fp <- toFilePath <$> addExtension randomSuffix fp' + let snapDBPath = + toFilePathNoTrailingSep ee.baseConfigOpts.snapDB + localDBPath = + toFilePathNoTrailingSep ee.baseConfigOpts.localDB + ghcEnv = + "clear-package-db\n" + <> "global-package-db\n" + <> "package-db " + <> fromString snapDBPath + <> "\n" + <> "package-db " + <> fromString localDBPath + <> "\n" + <> foldMap + ( \ghcId -> + "package-id " + <> display (ghcPkgIdToText ghcId) + <> "\n" + ) + (pkgGhcIdList ++ thGhcId:Map.elems allDepsMap) + writeFileUtf8Builder fp ghcEnv + menv <- liftIO $ + setEnv fp =<< config.processContextSettings EnvSettings + { includeLocals = taskLocation task == Local + , includeGhcPackagePath = True + , stackExe = True + , localeUtf8 = False + , keepGhcRts = False + } + let emptyResult = Map.singleton testName Nothing + withProcessContext menv $ if exists + then do + -- We clear out the .tix files before doing a run. + when needHpc $ do + tixexists <- doesFileExist tixPath + when tixexists $ + prettyWarnL + [ flow "Removing HPC file" + , pretty tixPath <> "." + ] + liftIO $ ignoringAbsence (removeFile tixPath) + + let args = topts.additionalArgs + argsDisplay = case args of + [] -> "" + _ -> ", args: " + <> T.intercalate " " (map showProcessArgDebug args) + announce $ + "test (suite: " + <> display (unqualCompToText testName) + <> display argsDisplay + <> ")" + + -- Clear "Progress: ..." message before + -- redirecting output. + case outputType of + OTConsole _ -> do + logStickyDone "" + liftIO $ hFlush stdout + liftIO $ hFlush stderr + OTLogFile _ _ -> pure () + + let output = case outputType of + OTConsole Nothing -> Nothing <$ inherit + OTConsole (Just prefix) -> fmap + ( \src -> Just $ + runConduit $ src + .| CT.decodeUtf8Lenient + .| CT.lines + .| CL.map stripCR + .| CL.mapM_ (\t -> logInfo $ prefix <> display t) + ) + createSource + OTLogFile _ h -> Nothing <$ useHandleOpen h + runOutput p = + case (getStdout p, getStderr p) of + (Nothing, Nothing) -> pure () + (Just x, Just y) -> concurrently_ x y + (x, y) -> assert False $ + concurrently_ + (fromMaybe (pure ()) x) + (fromMaybe (pure ()) y) + timeoutWithGrace p maxSecs graceSecs = do + mExit <- timeout (maxSecs * 1000000) (waitExitCode p) + case mExit of + Just ec -> pure (Just ec) + Nothing -> do + terminateGracefully p + mGraceExit <- timeout (graceSecs * 1000000) + (waitExitCode p) + case mGraceExit of + Just _ -> pure Nothing + Nothing -> do + forceKill p + void $ waitExitCode p + pure Nothing + runWithTimeout pc + | Just maxSecs <- topts.maximumTimeSeconds, maxSecs > 0 + , Just graceSecs <- topts.timeoutGraceSeconds + , graceSecs > 0 = + withProcessWait (prepareForEscalation pc) $ \p -> do + (_, mec') <- concurrently + (runOutput p) + (timeoutWithGrace p maxSecs graceSecs) + pure mec' + | Just maxSecs <- topts.maximumTimeSeconds, maxSecs > 0 = + timeout (maxSecs * 1000000) $ + withProcessWait pc $ \p -> do + runOutput p + waitExitCode p + | otherwise = + Just <$> withProcessWait pc (\p -> runOutput p *> waitExitCode p) + + mec <- withWorkingDir (toFilePath pkgDir) $ + proc (toFilePath exePath) args $ \pc0 -> do + changeStdin <- + if isTestTypeLib + then do + logPath <- buildLogPath package (Just stestName) + ensureDir (parent logPath) + pure $ + setStdin + $ byteStringInput + $ BL.fromStrict + $ encodeUtf8 $ fromString $ + show ( logPath + , toCabalName testName + ) + else do + isTerminal <- view $ globalOptsL . to (.terminal) + if topts.allowStdin && isTerminal + then pure id + else pure $ setStdin $ byteStringInput mempty + let pc = changeStdin + $ setStdout output + $ setStderr output + pc0 + runWithTimeout pc + -- Add a trailing newline, incase the test + -- output didn't finish with a newline. + case outputType of + OTConsole Nothing -> prettyInfo blankLine + _ -> pure () + -- Move the .tix file out of the package + -- directory into the hpc work dir, for + -- tidiness. + when needHpc $ + updateTixFile package.name tixPath testName' + let announceResult result = + announce $ + "Test suite " + <> display (unqualCompToText testName) + <> " " + <> result + case mec of + Just ExitSuccess -> do + announceResult "passed" + pure Map.empty + Nothing -> do + announceResult "timed out" + if expectFailure + then pure Map.empty + else pure $ Map.singleton testName Nothing + Just ec -> do + announceResult "failed" + if expectFailure + then pure Map.empty + else pure $ Map.singleton testName (Just ec) + else do + unless expectFailure $ + prettyError $ + pretty $ TestSuiteExeMissing + (package.buildType == C.Simple) + exeName + package.name + testName + pure emptyResult + + when needHpc $ do + let testsToRun' = map f testsToRun + f tName = + case (.interface) <$> mComponent of + Just C.TestSuiteLibV09{} -> unqualCompToText tName <> "Stub" + _ -> unqualCompToText tName + where + mComponent = collectionLookup tName package.testSuites + generateHpcReport pkgDir package testsToRun' + + bs <- liftIO $ + case outputType of + OTConsole _ -> pure "" + OTLogFile logFile h -> do + hClose h + S.readFile $ toFilePath logFile + + let succeeded = Map.null errs + unless (succeeded || expectFailure) $ + throwM $ TestSuiteFailure + (taskProvides task) + errs + (case outputType of + OTLogFile fp _ -> Just fp + OTConsole _ -> Nothing) + bs + + setTestStatus pkgDir $ if succeeded then TSSuccess else TSFailure + +-- | Implements running a package's benchmarks. +singleBench :: + HasEnvConfig env + => BenchmarkOpts + -> [StackUnqualCompName] + -> ActionContext + -> ExecuteEnv + -> Task + -> InstalledMap + -> RIO env () +singleBench beopts benchesToRun ac ee task installedMap = do + (allDepsMap, _cache) <- getConfigCache ee task installedMap False True + withSingleContext ac ee task.taskType allDepsMap (Just "bench") $ + \_package _cabalfp _pkgDir cabal announce _outputType -> do + let args = map unqualCompToString benchesToRun <> maybe [] + ((:[]) . ("--benchmark-options=" <>)) + beopts.additionalArgs + toRun <- + if beopts.runBenchmarks + then pure True + else prettyThrowM $ ActionNotFilteredBug "singleBench" + when toRun $ do + announce "benchmarks" + cabal CloseOnException KeepTHLoading ("bench" : args) + +-- Do not pass `-hpcdir` as GHC option if the coverage is not enabled. +-- This helps running stack-compiled programs with dynamic interpreters like +-- `hint`. Cfr: https://github.com/commercialhaskell/stack/issues/997 +extraBuildOptions :: + (HasEnvConfig env, HasRunner env) + => WhichCompiler + -> BuildOpts + -> Maybe Semaphore + -> RIO env [String] +extraBuildOptions wc bopts semaphore = do + colorOpt <- appropriateGhcColorFlag + let optsFlag = compilerOptionsCabalFlag wc + semaphoreFlag = maybe + [] + (("--semaphore":) . L.singleton . getSemaphoreName . semaphoreName) + semaphore + baseOpts = maybe "" (" " ++) colorOpt + if bopts.testOpts.coverage + then do + hpcIndexDir <- toFilePathNoTrailingSep <$> hpcRelativeDir + pure $ semaphoreFlag ++ [optsFlag, "-hpcdir " ++ hpcIndexDir ++ baseOpts] + else + pure $ semaphoreFlag ++ [optsFlag, baseOpts] + +-- Library, sub-library, foreign library and executable build components. +primaryComponentOptions :: LocalPackage -> [String] +primaryComponentOptions lp = + -- TODO: get this information from target parsing instead, which will allow + -- users to turn off library building if desired + ( if hasBuildableMainLibrary package + then map T.unpack + $ T.append "lib:" (T.pack (packageNameString package.name)) + : map + (T.append "flib:") + (getBuildableListText package.foreignLibraries) + else [] + ) + ++ map + (T.unpack . T.append "lib:") + (getBuildableListText package.subLibraries) + ++ Set.toList + ( Set.mapMonotonic + (\s -> "exe:" ++ unqualCompToString s) + (exesToBuild lp) + ) + where + package = lp.package + +-- | Either build all executables or, if the user specifies requested +-- components, just build them. +exesToBuild :: LocalPackage -> Set StackUnqualCompName +exesToBuild lp = if lp.wanted + then exeComponents lp.components + else buildableExes lp.package + +-- Test-suite and benchmark build components. +finalComponentOptions :: LocalPackage -> [String] +finalComponentOptions lp = + map (T.unpack . renderComponent) $ + Set.toList $ + Set.filter (\c -> isCTest c || isCBench c) lp.components + +taskComponents :: Task -> Set NamedComponent +taskComponents task = + case task.taskType of + TTLocalMutable lp -> lp.components -- FIXME probably just want lpWanted + TTRemotePackage{} -> Set.empty + +expectTestFailure :: PackageName -> Maybe Curator -> Bool +expectTestFailure pname = + maybe False (Set.member pname . (.expectTestFailure)) + +expectBenchmarkFailure :: PackageName -> Maybe Curator -> Bool +expectBenchmarkFailure pname = + maybe False (Set.member pname . (.expectBenchmarkFailure)) + +fulfillCuratorBuildExpectations :: + (HasCallStack, HasTerm env) + => PackageName + -> Maybe Curator + -> Bool + -> Bool + -> b + -> RIO env b + -> RIO env b +fulfillCuratorBuildExpectations pname mcurator enableTests _ defValue action + | enableTests && expectTestFailure pname mcurator = + tryAny action >>= \case + Right res -> do + prettyWarnL + [ style Current (fromPackageName pname) <> ":" + , flow "unexpected test build success." + ] + pure res + Left _ -> pure defValue +fulfillCuratorBuildExpectations pname mcurator _ enableBench defValue action + | enableBench && expectBenchmarkFailure pname mcurator = + tryAny action >>= \case + Right res -> do + prettyWarnL + [ style Current (fromPackageName pname) <> ":" + , flow "unexpected benchmark build success." + ] + pure res + Left _ -> pure defValue +fulfillCuratorBuildExpectations _ _ _ _ _ action = action diff --git a/src/Stack/Build/Haddock.hs b/src/Stack/Build/Haddock.hs index 74bc200eea..9b1712c261 100644 --- a/src/Stack/Build/Haddock.hs +++ b/src/Stack/Build/Haddock.hs @@ -1,292 +1,476 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Build.Haddock +Description : Generate Haddock documentation. +License : BSD-3-Clause + +Generate Haddock documentation. +-} --- | Generate haddocks module Stack.Build.Haddock - ( generateLocalHaddockIndex - , generateDepsHaddockIndex - , generateSnapHaddockIndex - , openHaddocksInBrowser - , shouldHaddockPackage - , shouldHaddockDeps - ) where + ( generateDepsHaddockIndex + , generateLocalHaddockIndex + , generateSnapHaddockIndex + , openHaddocksInBrowser + , shouldHaddockDeps + , shouldHaddockPackage + , generateLocalHaddockForHackageArchives + ) where -import Stack.Prelude +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Compression.GZip as GZip import qualified Data.Foldable as F import qualified Data.HashSet as HS import qualified Data.Map.Strict as Map import qualified Data.Set as Set -import Data.Time (UTCTime) +import qualified Data.Text as T +import Distribution.Text ( display ) import Path + ( (), addExtension, dirname, fileExtension, filename + , fromAbsDir, fromAbsFile, fromRelDir, parent, parseRelDir + , parseRelFile + ) import Path.Extra + ( parseCollapsedAbsFile, toFilePathNoTrailingSep + , tryGetModificationTime + ) import Path.IO -import RIO.List (intercalate) -import RIO.PrettyPrint + ( copyDirRecur, copyDirRecur', doesDirExist, doesFileExist + , ensureDir, ignoringAbsence, listDir, removeDirRecur + ) +import qualified RIO.ByteString.Lazy as BL +import RIO.List ( intercalate, intersperse ) +import RIO.Process ( HasProcessContext, withWorkingDir ) import Stack.Constants -import Stack.PackageDump -import Stack.Types.Build -import Stack.Types.Config -import Stack.Types.GhcPkgId + ( docDirSuffix, htmlDirSuffix, relDirAll, relFileIndexHtml ) +import Stack.Constants.Config ( distDirFromDir ) +import Stack.Prelude hiding ( Display (..) ) +import Stack.Types.Build.Exception ( BuildException (..) ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), HasCompiler (..) ) +import Stack.Types.ConfigureOpts ( BaseConfigOpts (..) ) +import Stack.Types.BuildOpts ( BuildOpts (..), HaddockOpts (..) ) +import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..), BuildSubset (BSOnlyDependencies, BSOnlySnapshot) ) +import Stack.Types.DumpPackage ( DumpPackage (..) ) +import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..) ) +import Stack.Types.GhcPkgId ( GhcPkgId ) +import Stack.Types.InterfaceOpt ( InterfaceOpt (..) ) import Stack.Types.Package + ( InstallLocation (..), LocalPackage (..), Package (..) ) import qualified System.FilePath as FP -import RIO.Process -import Web.Browser (openBrowser) +import Web.Browser ( openBrowser ) +import RIO.FilePath (dropTrailingPathSeparator) -openHaddocksInBrowser - :: HasTerm env - => BaseConfigOpts - -> Map PackageName (PackageIdentifier, InstallLocation) - -- ^ Available packages and their locations for the current project - -> Set PackageName - -- ^ Build targets as determined by 'Stack.Build.Source.loadSourceMap' - -> RIO env () +openHaddocksInBrowser :: + HasTerm env + => BaseConfigOpts + -> Map PackageName (PackageIdentifier, InstallLocation) + -- ^ Available packages and their locations for the current project + -> Set PackageName + -- ^ Build targets as determined by 'Stack.Build.Source.loadSourceMap' + -> RIO env () openHaddocksInBrowser bco pkgLocations buildTargets = do - let cliTargets = (boptsCLITargets . bcoBuildOptsCLI) bco - getDocIndex = do - let localDocs = haddockIndexFile (localDepsDocDir bco) - localExists <- doesFileExist localDocs - if localExists - then return localDocs - else do - let snapDocs = haddockIndexFile (snapDocDir bco) - snapExists <- doesFileExist snapDocs - if snapExists - then return snapDocs - else throwString "No local or snapshot doc index found to open." - docFile <- - case (cliTargets, map (`Map.lookup` pkgLocations) (Set.toList buildTargets)) of - ([_], [Just (pkgId, iloc)]) -> do - pkgRelDir <- (parseRelDir . packageIdentifierString) pkgId - let docLocation = - case iloc of - Snap -> snapDocDir bco - Local -> localDocDir bco - let docFile = haddockIndexFile (docLocation pkgRelDir) - exists <- doesFileExist docFile - if exists - then return docFile - else do - logWarn $ - "Expected to find documentation at " <> - fromString (toFilePath docFile) <> - ", but that file is missing. Opening doc index instead." - getDocIndex - _ -> getDocIndex - prettyInfo $ "Opening" <+> pretty docFile <+> "in the browser." - _ <- liftIO $ openBrowser (toFilePath docFile) - return () + let cliTargets = bco.buildOptsCLI.targetsCLI + getDocIndex = do + let localDocs = haddockIndexFile (localDepsDocDir bco) + localExists <- doesFileExist localDocs + if localExists + then pure localDocs + else do + let snapDocs = haddockIndexFile (snapDocDir bco) + snapExists <- doesFileExist snapDocs + if snapExists + then pure snapDocs + else throwIO HaddockIndexNotFound + docFile <- + case (cliTargets, map (`Map.lookup` pkgLocations) (Set.toList buildTargets)) of + ([_], [Just (pkgId, iloc)]) -> do + pkgRelDir <- (parseRelDir . packageIdentifierString) pkgId + let docLocation = + case iloc of + Snap -> snapDocDir bco + Local -> localDocDir bco + let docFile = haddockIndexFile (docLocation pkgRelDir) + exists <- doesFileExist docFile + if exists + then pure docFile + else do + prettyWarnL + [ flow "Expected to find documentation at" + , pretty docFile <> "," + , flow "but that file is missing. Opening doc index instead." + ] + getDocIndex + _ -> getDocIndex + prettyInfo $ "Opening" <+> pretty docFile <+> "in the browser." + void $ liftIO $ openBrowser (toFilePath docFile) -- | Determine whether we should haddock for a package. -shouldHaddockPackage :: BuildOpts - -> Set PackageName -- ^ Packages that we want to generate haddocks for - -- in any case (whether or not we are going to generate - -- haddocks for dependencies) - -> PackageName - -> Bool +shouldHaddockPackage :: + BuildOpts + -> Set PackageName + -- ^ Packages that we want to generate haddocks for in any case (whether or + -- not we are going to generate haddocks for dependencies) + -> PackageName + -> Bool shouldHaddockPackage bopts wanted name = - if Set.member name wanted - then boptsHaddock bopts - else shouldHaddockDeps bopts + if Set.member name wanted + then bopts.buildHaddocks + else shouldHaddockDeps bopts -- | Determine whether to build haddocks for dependencies. shouldHaddockDeps :: BuildOpts -> Bool -shouldHaddockDeps bopts = fromMaybe (boptsHaddock bopts) (boptsHaddockDeps bopts) +shouldHaddockDeps bopts = fromMaybe bopts.buildHaddocks bopts.haddockDeps --- | Generate Haddock index and contents for local packages. -generateLocalHaddockIndex - :: (HasProcessContext env, HasLogFunc env, HasCompiler env) - => BaseConfigOpts - -> Map GhcPkgId DumpPackage -- ^ Local package dump - -> [LocalPackage] - -> RIO env () +-- | Generate Haddock index and contents for project packages. +generateLocalHaddockIndex :: + (HasCompiler env, HasProcessContext env, HasTerm env) + => BaseConfigOpts + -> Map GhcPkgId DumpPackage -- ^ Local package dump + -> [LocalPackage] + -> RIO env () generateLocalHaddockIndex bco localDumpPkgs locals = do - let dumpPackages = - mapMaybe - (\LocalPackage{lpPackage = Package{..}} -> - F.find - (\dp -> dpPackageIdent dp == PackageIdentifier packageName packageVersion) - localDumpPkgs) - locals - generateHaddockIndex - "local packages" - bco - dumpPackages - "." - (localDocDir bco) + let dumpPackages = + mapMaybe + ( \LocalPackage {package = Package {name, version}} -> + F.find + ( \dp -> dp.packageIdent == + PackageIdentifier name version + ) + localDumpPkgs + ) + locals + generateHaddockIndex + "project packages" + bco + dumpPackages + "." + (localDocDir bco) --- | Generate Haddock index and contents for local packages and their dependencies. -generateDepsHaddockIndex - :: (HasProcessContext env, HasLogFunc env, HasCompiler env) - => BaseConfigOpts - -> Map GhcPkgId DumpPackage -- ^ Global dump information - -> Map GhcPkgId DumpPackage -- ^ Snapshot dump information - -> Map GhcPkgId DumpPackage -- ^ Local dump information - -> [LocalPackage] - -> RIO env () +-- | Generate Haddock index and contents for project packages and their +-- dependencies. +generateDepsHaddockIndex :: + (HasCompiler env, HasProcessContext env, HasTerm env) + => BaseConfigOpts + -> Map GhcPkgId DumpPackage -- ^ Global dump information + -> Map GhcPkgId DumpPackage -- ^ Snapshot dump information + -> Map GhcPkgId DumpPackage -- ^ Local dump information + -> [LocalPackage] + -> RIO env () generateDepsHaddockIndex bco globalDumpPkgs snapshotDumpPkgs localDumpPkgs locals = do - let deps = (mapMaybe (`lookupDumpPackage` allDumpPkgs) . nubOrd . findTransitiveDepends . mapMaybe getGhcPkgId) locals - depDocDir = localDepsDocDir bco - generateHaddockIndex - "local packages and dependencies" - bco - deps - ".." - depDocDir - where - getGhcPkgId :: LocalPackage -> Maybe GhcPkgId - getGhcPkgId LocalPackage{lpPackage = Package{..}} = - let pkgId = PackageIdentifier packageName packageVersion - mdpPkg = F.find (\dp -> dpPackageIdent dp == pkgId) localDumpPkgs - in fmap dpGhcPkgId mdpPkg - findTransitiveDepends :: [GhcPkgId] -> [GhcPkgId] - findTransitiveDepends = (`go` HS.empty) . HS.fromList - where - go todo checked = - case HS.toList todo of - [] -> HS.toList checked - (ghcPkgId:_) -> - let deps = - case lookupDumpPackage ghcPkgId allDumpPkgs of - Nothing -> HS.empty - Just pkgDP -> HS.fromList (dpDepends pkgDP) - deps' = deps `HS.difference` checked - todo' = HS.delete ghcPkgId (deps' `HS.union` todo) - checked' = HS.insert ghcPkgId checked - in go todo' checked' - allDumpPkgs = [localDumpPkgs, snapshotDumpPkgs, globalDumpPkgs] + let deps = ( mapMaybe + (`lookupDumpPackage` allDumpPkgs) + . nubOrd + . findTransitiveDepends + . mapMaybe getGhcPkgId + ) locals + depDocDir = localDepsDocDir bco + generateHaddockIndex + "project packages and dependencies" + bco + deps + ".." + depDocDir + where + getGhcPkgId :: LocalPackage -> Maybe GhcPkgId + getGhcPkgId LocalPackage {package = Package {name, version}} = + let pkgId = PackageIdentifier name version + mdpPkg = F.find (\dp -> dp.packageIdent == pkgId) localDumpPkgs + in fmap (.ghcPkgId) mdpPkg + findTransitiveDepends :: [GhcPkgId] -> [GhcPkgId] + findTransitiveDepends = (`go` HS.empty) . HS.fromList + where + go todo checked = + case HS.toList todo of + [] -> HS.toList checked + (ghcPkgId:_) -> + let deps = case lookupDumpPackage ghcPkgId allDumpPkgs of + Nothing -> HS.empty + Just pkgDP -> HS.fromList pkgDP.depends + deps' = deps `HS.difference` checked + todo' = HS.delete ghcPkgId (deps' `HS.union` todo) + checked' = HS.insert ghcPkgId checked + in go todo' checked' + allDumpPkgs = [localDumpPkgs, snapshotDumpPkgs, globalDumpPkgs] -- | Generate Haddock index and contents for all snapshot packages. -generateSnapHaddockIndex - :: (HasProcessContext env, HasLogFunc env, HasCompiler env) - => BaseConfigOpts - -> Map GhcPkgId DumpPackage -- ^ Global package dump - -> Map GhcPkgId DumpPackage -- ^ Snapshot package dump - -> RIO env () +generateSnapHaddockIndex :: + (HasCompiler env, HasProcessContext env, HasTerm env) + => BaseConfigOpts + -> Map GhcPkgId DumpPackage -- ^ Global package dump + -> Map GhcPkgId DumpPackage -- ^ Snapshot package dump + -> RIO env () generateSnapHaddockIndex bco globalDumpPkgs snapshotDumpPkgs = - generateHaddockIndex - "snapshot packages" - bco - (Map.elems snapshotDumpPkgs ++ Map.elems globalDumpPkgs) - "." - (snapDocDir bco) + generateHaddockIndex + "snapshot packages" + bco + (Map.elems snapshotDumpPkgs ++ Map.elems globalDumpPkgs) + "." + (snapDocDir bco) -- | Generate Haddock index and contents for specified packages. -generateHaddockIndex - :: (HasProcessContext env, HasLogFunc env, HasCompiler env) - => Text - -> BaseConfigOpts - -> [DumpPackage] - -> FilePath - -> Path Abs Dir - -> RIO env () +generateHaddockIndex :: + (HasCompiler env, HasProcessContext env, HasTerm env) + => Text + -> BaseConfigOpts + -> [DumpPackage] + -> FilePath + -> Path Abs Dir + -> RIO env () generateHaddockIndex descr bco dumpPackages docRelFP destDir = do - ensureDir destDir - interfaceOpts <- (liftIO . fmap nubOrd . mapMaybeM toInterfaceOpt) dumpPackages - unless (null interfaceOpts) $ do - let destIndexFile = haddockIndexFile destDir - eindexModTime <- liftIO (tryGetModificationTime destIndexFile) - let needUpdate = - case eindexModTime of - Left _ -> True - Right indexModTime -> - or [mt > indexModTime | (_,mt,_,_) <- interfaceOpts] - if needUpdate - then do - logInfo $ - "Updating Haddock index for " <> - Stack.Prelude.display descr <> - " in\n" <> - fromString (toFilePath destIndexFile) - liftIO (mapM_ copyPkgDocs interfaceOpts) - haddockExeName <- view $ compilerPathsL.to (toFilePath . cpHaddock) - withWorkingDir (toFilePath destDir) $ readProcessNull - haddockExeName - (map (("--optghc=-package-db=" ++ ) . toFilePathNoTrailingSep) - [bcoSnapDB bco, bcoLocalDB bco] ++ - hoAdditionalArgs (boptsHaddockOpts (bcoBuildOpts bco)) ++ - ["--gen-contents", "--gen-index"] ++ - [x | (xs,_,_,_) <- interfaceOpts, x <- xs]) - else - logInfo $ - "Haddock index for " <> - Stack.Prelude.display descr <> - " already up to date at:\n" <> - fromString (toFilePath destIndexFile) - where - toInterfaceOpt :: DumpPackage -> IO (Maybe ([String], UTCTime, Path Abs File, Path Abs File)) - toInterfaceOpt DumpPackage {..} = - case dpHaddockInterfaces of - [] -> return Nothing - srcInterfaceFP:_ -> do - srcInterfaceAbsFile <- parseCollapsedAbsFile srcInterfaceFP - let (PackageIdentifier name _) = dpPackageIdent - destInterfaceRelFP = - docRelFP FP. - packageIdentifierString dpPackageIdent FP. - (packageNameString name FP.<.> "haddock") - docPathRelFP = - fmap ((docRelFP FP.) . FP.takeFileName) dpHaddockHtml - interfaces = intercalate "," $ - maybeToList docPathRelFP ++ [srcInterfaceFP] - - destInterfaceAbsFile <- parseCollapsedAbsFile (toFilePath destDir FP. destInterfaceRelFP) - esrcInterfaceModTime <- tryGetModificationTime srcInterfaceAbsFile - return $ - case esrcInterfaceModTime of - Left _ -> Nothing - Right srcInterfaceModTime -> - Just - ( [ "-i", interfaces ] - , srcInterfaceModTime - , srcInterfaceAbsFile - , destInterfaceAbsFile ) - copyPkgDocs :: (a, UTCTime, Path Abs File, Path Abs File) -> IO () - copyPkgDocs (_,srcInterfaceModTime,srcInterfaceAbsFile,destInterfaceAbsFile) = do - -- Copy dependencies' haddocks to documentation directory. This way, relative @../$pkg-$ver@ - -- links work and it's easy to upload docs to a web server or otherwise view them in a - -- non-local-filesystem context. We copy instead of symlink for two reasons: (1) symlinks - -- aren't reliably supported on Windows, and (2) the filesystem containing dependencies' - -- docs may not be available where viewing the docs (e.g. if building in a Docker - -- container). - edestInterfaceModTime <- tryGetModificationTime destInterfaceAbsFile - case edestInterfaceModTime of - Left _ -> doCopy - Right destInterfaceModTime - | destInterfaceModTime < srcInterfaceModTime -> doCopy - | otherwise -> return () - where - doCopy = do - ignoringAbsence (removeDirRecur destHtmlAbsDir) - ensureDir destHtmlAbsDir - onException - (copyDirRecur' (parent srcInterfaceAbsFile) destHtmlAbsDir) - (ignoringAbsence (removeDirRecur destHtmlAbsDir)) - destHtmlAbsDir = parent destInterfaceAbsFile + ensureDir destDir + interfaceOpts <- + (liftIO . fmap nubOrd . mapMaybeM toInterfaceOpt) dumpPackages + unless (null interfaceOpts) $ do + let destIndexFile = haddockIndexFile destDir + prettyDescr = style Current (fromString $ T.unpack descr) + needUpdate <- liftIO (tryGetModificationTime destIndexFile) <&> \case + Left _ -> True + Right indexModTime -> + or [ mt > indexModTime + | mt <- map (.srcInterfaceFileModTime) interfaceOpts + ] + if needUpdate + then do + prettyInfo $ + fillSep + [ flow "Updating Haddock index for" + , prettyDescr + , "in:" + ] + <> line + <> pretty destIndexFile + liftIO (mapM_ copyPkgDocs interfaceOpts) + haddockExeName <- view $ compilerPathsL . to (toFilePath . (.haddock)) + withWorkingDir (toFilePath destDir) $ readProcessNull + haddockExeName + ( map + (("--optghc=-package-db=" ++ ) . toFilePathNoTrailingSep) + [bco.snapDB, bco.localDB] + ++ bco.buildOpts.haddockOpts.additionalArgs + ++ ["--gen-contents", "--gen-index"] + ++ [x | xs <- map (.readInterfaceArgs) interfaceOpts, x <- xs] + ) + else + prettyInfo $ + fillSep + [ flow "Haddock index for" + , prettyDescr + , flow "already up to date at:" + ] + <> line + <> pretty destIndexFile + where + toInterfaceOpt :: + DumpPackage + -> IO (Maybe InterfaceOpt) + toInterfaceOpt dp = + case dp.haddockInterfaces of + [] -> pure Nothing + srcInterfaceFP:_ -> do + srcInterfaceFile <- parseCollapsedAbsFile srcInterfaceFP + let (PackageIdentifier name _) = dp.packageIdent + srcInterfaceDir = parent srcInterfaceFile + compInterfaceDirsAndFiles <- do + -- It is possible that the *.haddock file specified by the + -- haddock-interfaces key for an installed package may not exist. For + -- example, with GHC 9.6.6 on Windows, there is no + -- + -- ${pkgroot}/../doc/html/libraries/rts-1.0.2\rts.haddock + (srcInterfaceSubDirs, _) <- doesDirExist srcInterfaceDir >>= \case + True -> listDir srcInterfaceDir + False -> pure ([], []) + -- This assumes that Cabal (the library) `haddock --executables` for + -- component my-component of package my-package puts one *.haddock + -- file and associated files in directory: + -- + -- my-package/my-component + -- + -- Not all directories in directory my-package relate to components. + -- For example, my-package/src relates to the files for the + -- colourised code of the main library of package my-package. + let isCompInterfaceDir dir = do + (_, files) <- listDir dir + pure $ (dir, ) <$> F.find isInterface files + where + isInterface file = fileExtension file == Just ".haddock" + mapMaybeM isCompInterfaceDir srcInterfaceSubDirs + -- Lift a copy of the component's Haddock directory up to the same level + -- as the main library's Haddock directory. For compontent my-component + -- of package my-package we name the directory my-package_my-component. + let liftcompInterfaceDir dir file = do + let parentDir = parent dir + parentName = dirname parentDir + compName = dirname dir + uniqueName <- do + let parentName' = + dropTrailingPathSeparator $ toFilePath parentName + compName' = + dropTrailingPathSeparator $ toFilePath compName + parseRelDir $ parentName' <> "_" <> compName' + let destCompDir = parent parentDir uniqueName + destCompFile = destCompDir filename file + ignoringAbsence (removeDirRecur destCompDir) + ensureDir destCompDir + onException + (copyDirRecur dir destCompDir) + (ignoringAbsence (removeDirRecur destCompDir)) + pure (destCompFile, uniqueName) + destInterfaceRelFP = + docRelFP FP. + packageIdentifierString dp.packageIdent FP. + (packageNameString name FP.<.> "haddock") + docPathRelFP = + fmap ((docRelFP FP.) . FP.takeFileName) dp.haddockHtml + mkInterface :: Maybe FilePath -> FilePath -> String + mkInterface mDocPath file = + intercalate "," $ mcons mDocPath [file] + compInterface :: (Path Abs Dir, Path Abs File) -> IO String + compInterface (dir, file) = do + (file', uniqueName) <- liftcompInterfaceDir dir file + let compDir = dropTrailingPathSeparator $ toFilePath uniqueName + docDir = docRelFP FP. compDir + pure $ mkInterface (Just docDir) (toFilePath file') + interfaces = mkInterface docPathRelFP srcInterfaceFP + compInterfaces <- forM compInterfaceDirsAndFiles compInterface + let readInterfaceArgs = + "-i" : intersperse "-i" (interfaces : compInterfaces) + destInterfaceFile <- + parseCollapsedAbsFile (toFilePath destDir FP. destInterfaceRelFP) + tryGetModificationTime srcInterfaceFile <&> \case + Left _ -> Nothing + Right srcInterfaceFileModTime -> + Just InterfaceOpt + { readInterfaceArgs + , srcInterfaceFileModTime + , srcInterfaceFile + , destInterfaceFile + } + copyPkgDocs :: InterfaceOpt -> IO () + copyPkgDocs opts = + -- Copy dependencies' haddocks to documentation directory. This way, + -- relative @../$pkg-$ver@ links work and it's easy to upload docs to a web + -- server or otherwise view them in a non-local-filesystem context. We copy + -- instead of symlink for two reasons: (1) symlinks aren't reliably supported + -- on Windows, and (2) the filesystem containing dependencies' docs may not be + -- available where viewing the docs (e.g. if building in a Docker container). + tryGetModificationTime opts.destInterfaceFile >>= \case + Left _ -> doCopy + Right destInterfaceModTime + | destInterfaceModTime < opts.srcInterfaceFileModTime -> doCopy + | otherwise -> pure () + where + doCopy = do + ignoringAbsence (removeDirRecur destHtmlAbsDir) + ensureDir destHtmlAbsDir + onException + (copyDirRecur' (parent opts.srcInterfaceFile) destHtmlAbsDir) + (ignoringAbsence (removeDirRecur destHtmlAbsDir)) + destHtmlAbsDir = parent opts.destInterfaceFile -- | Find first DumpPackage matching the GhcPkgId -lookupDumpPackage :: GhcPkgId - -> [Map GhcPkgId DumpPackage] - -> Maybe DumpPackage +lookupDumpPackage :: + GhcPkgId + -> [Map GhcPkgId DumpPackage] + -> Maybe DumpPackage lookupDumpPackage ghcPkgId dumpPkgs = - listToMaybe $ mapMaybe (Map.lookup ghcPkgId) dumpPkgs + listToMaybe $ mapMaybe (Map.lookup ghcPkgId) dumpPkgs -- | Path of haddock index file. haddockIndexFile :: Path Abs Dir -> Path Abs File haddockIndexFile destDir = destDir relFileIndexHtml --- | Path of local packages documentation directory. +-- | Path of project packages documentation directory. localDocDir :: BaseConfigOpts -> Path Abs Dir -localDocDir bco = bcoLocalInstallRoot bco docDirSuffix +localDocDir bco = bco.localInstallRoot docDirSuffix --- | Path of documentation directory for the dependencies of local packages +-- | Path of documentation directory for the dependencies of project packages localDepsDocDir :: BaseConfigOpts -> Path Abs Dir localDepsDocDir bco = localDocDir bco relDirAll -- | Path of snapshot packages documentation directory. snapDocDir :: BaseConfigOpts -> Path Abs Dir -snapDocDir bco = bcoSnapInstallRoot bco docDirSuffix +snapDocDir bco = bco.snapInstallRoot docDirSuffix + +generateLocalHaddockForHackageArchives :: + (HasEnvConfig env, HasTerm env) + => [LocalPackage] + -> RIO env () +generateLocalHaddockForHackageArchives lps = do + buildSubset <- view $ envConfigL . to (.buildOptsCLI.buildSubset) + let localsExcluded = + buildSubset == BSOnlyDependencies || buildSubset == BSOnlySnapshot + unless localsExcluded $ + forM_ lps $ \lp -> + let pkg = lp.package + pkgId = PackageIdentifier pkg.name pkg.version + pkgDir = parent lp.cabalFP + in when lp.wanted $ + generateLocalHaddockForHackageArchive pkgDir pkgId + +-- | Generate an archive file containing local Haddock documentation for +-- Hackage, in a form accepted by Hackage. +generateLocalHaddockForHackageArchive :: + (HasEnvConfig env, HasTerm env) + => Path Abs Dir + -- ^ The package directory. + -> PackageIdentifier + -- ^ The package name and version. + -> RIO env () +generateLocalHaddockForHackageArchive pkgDir pkgId = do + distDir <- distDirFromDir pkgDir + let pkgIdName = display pkgId + name = pkgIdName <> "-docs" + (nameRelDir, tarGzFileName) = fromMaybe + (error "impossible") + ( do relDir <- parseRelDir name + nameRelFile <- parseRelFile name + tarGz <- addExtension ".gz" =<< addExtension ".tar" nameRelFile + pure (relDir, tarGz) + ) + tarGzFile = distDir tarGzFileName + docDir = distDir docDirSuffix htmlDirSuffix + tarGzFileCreated <- createTarGzFile tarGzFile docDir nameRelDir + if tarGzFileCreated + then + prettyInfo $ + fillSep + [ flow "Archive of Haddock documentation for Hackage for" + , style Current (fromString pkgIdName) + , flow "created at:" + ] + <> line + <> pretty tarGzFile + else + prettyWarnL + [ flow "No Haddock documentation for Hackage available for" + , style Error (fromString pkgIdName) <> "." + ] + +createTarGzFile :: + Path Abs File + -- ^ Full path to archive file + -> Path Abs Dir + -- ^ Base directory + -> Path Rel Dir + -- ^ Directory to archive, relative to base directory + -> RIO env Bool +createTarGzFile tar base dir = do + dirExists <- doesDirExist $ base dir + if dirExists + then do + entries <- liftIO $ Tar.pack base' [dir'] + if null entries + then pure False + else do + ensureDir $ parent tar + BL.writeFile tar' $ GZip.compress $ Tar.write entries + pure True + else pure False + where + base' = fromAbsDir base + dir' = fromRelDir dir + tar' = fromAbsFile tar diff --git a/src/Stack/Build/Installed.hs b/src/Stack/Build/Installed.hs index 25571e8387..1e2d4d5b45 100644 --- a/src/Stack/Build/Installed.hs +++ b/src/Stack/Build/Installed.hs @@ -1,237 +1,335 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} --- Determine which packages are already installed + +{-| +Module : Stack.Build.Installed +Description : Determine which packages are already installed. +License : BSD-3-Clause + +Determine which packages are already installed. +-} + module Stack.Build.Installed - ( InstalledMap - , Installed (..) - , getInstalled - , InstallMap - , toInstallMap - ) where - -import Data.Conduit + ( getInstalled + , toInstallMap + ) where + +import Data.Conduit ( ZipSink (..), getZipSink ) import qualified Data.Conduit.List as CL import qualified Data.Set as Set -import Data.List import qualified Data.Map.Strict as Map -import Path -import Stack.Build.Cache -import Stack.Constants +import Stack.Build.Cache ( getInstalledExes ) +import Stack.Constants ( wiredInPackages ) import Stack.PackageDump + ( conduitDumpPackage, ghcPkgDump, pruneDeps ) import Stack.Prelude -import Stack.SourceMap (getPLIVersion, loadVersion) -import Stack.Types.Build -import Stack.Types.Config -import Stack.Types.GhcPkgId -import Stack.Types.Package +import Stack.SourceMap ( getPLIVersion, loadVersion ) +import Stack.Types.Compiler ( ActualCompiler ) +import Stack.Types.CompilerPaths ( getGhcPkgExe ) +import Stack.Types.DumpPackage + ( DumpPackage (..), SublibDump (..), sublibParentPkgId ) +import Stack.Types.EnvConfig + ( HasEnvConfig, HasSourceMap (..), packageDatabaseDeps + , packageDatabaseExtra, packageDatabaseLocal + ) +import Stack.Types.GhcPkgId ( GhcPkgId ) +import Stack.Types.Installed + ( InstallLocation (..), InstallMap, Installed (..) + , InstalledLibraryInfo (..), InstalledMap + , InstalledPackageLocation (..), PackageDatabase (..) + , PackageDbVariety (..), toPackageDbVariety + ) import Stack.Types.SourceMap + ( DepPackage (..), ProjectPackage (..), SourceMap (..) ) +-- | For the given t'SourceMap', yield a dictionary of package names for a +-- project's packages and dependencies, and pairs of their relevant database +-- (write-only or mutable) and package versions. toInstallMap :: MonadIO m => SourceMap -> m InstallMap toInstallMap sourceMap = do - projectInstalls <- - for (smProject sourceMap) $ \pp -> do - version <- loadVersion (ppCommon pp) - return (Local, version) - depInstalls <- - for (smDeps sourceMap) $ \dp -> - case dpLocation dp of - PLImmutable pli -> pure (Snap, getPLIVersion pli) - PLMutable _ -> do - version <- loadVersion (dpCommon dp) - return (Local, version) - return $ projectInstalls <> depInstalls + projectInstalls <- + for sourceMap.project $ \pp -> do + version <- loadVersion pp.projectCommon + pure (Local, version) + depInstalls <- + for sourceMap.deps $ \dp -> + case dp.location of + PLImmutable pli -> pure (Snap, getPLIVersion pli) + PLMutable _ -> do + version <- loadVersion dp.depCommon + pure (Local, version) + pure $ projectInstalls <> depInstalls -- | Returns the new InstalledMap and all of the locally registered packages. -getInstalled :: HasEnvConfig env - => InstallMap -- ^ does not contain any installed information - -> RIO env - ( InstalledMap - , [DumpPackage] -- globally installed - , [DumpPackage] -- snapshot installed - , [DumpPackage] -- locally installed - ) +getInstalled :: + HasEnvConfig env + => InstallMap -- ^ does not contain any installed information + -> RIO env + ( InstalledMap + , [DumpPackage] -- globally installed + , [DumpPackage] -- snapshot installed + , [DumpPackage] -- locally installed + ) getInstalled {-opts-} installMap = do - logDebug "Finding out which packages are already installed" - snapDBPath <- packageDatabaseDeps - localDBPath <- packageDatabaseLocal - extraDBPaths <- packageDatabaseExtra - - let loadDatabase' = loadDatabase {-opts mcache-} installMap - - (installedLibs0, globalDumpPkgs) <- loadDatabase' Nothing [] - (installedLibs1, _extraInstalled) <- - foldM (\lhs' pkgdb -> - loadDatabase' (Just (ExtraGlobal, pkgdb)) (fst lhs') - ) (installedLibs0, globalDumpPkgs) extraDBPaths - (installedLibs2, snapshotDumpPkgs) <- - loadDatabase' (Just (InstalledTo Snap, snapDBPath)) installedLibs1 - (installedLibs3, localDumpPkgs) <- - loadDatabase' (Just (InstalledTo Local, localDBPath)) installedLibs2 - let installedLibs = Map.fromList $ map lhPair installedLibs3 - - -- Add in the executables that are installed, making sure to only trust a - -- listed installation under the right circumstances (see below) - let exesToSM loc = Map.unions . map (exeToSM loc) - exeToSM loc (PackageIdentifier name version) = - case Map.lookup name installMap of - -- Doesn't conflict with anything, so that's OK - Nothing -> m - Just (iLoc, iVersion) - -- Not the version we want, ignore it - | version /= iVersion || mismatchingLoc loc iLoc -> Map.empty - - | otherwise -> m - where - m = Map.singleton name (loc, Executable $ PackageIdentifier name version) - mismatchingLoc installed target | target == installed = False - | installed == Local = False -- snapshot dependency could end up - -- in a local install as being mutable - | otherwise = True - exesSnap <- getInstalledExes Snap - exesLocal <- getInstalledExes Local - let installedMap = Map.unions - [ exesToSM Local exesLocal - , exesToSM Snap exesSnap - , installedLibs - ] - - return ( installedMap - , globalDumpPkgs - , snapshotDumpPkgs - , localDumpPkgs - ) - --- | Outputs both the modified InstalledMap and the Set of all installed packages in this database + logDebug "Finding out which packages are already installed" + snapDBPath <- packageDatabaseDeps + localDBPath <- packageDatabaseLocal + extraDBPaths <- packageDatabaseExtra + + let loadDatabase' = loadDatabase {-opts mcache-} installMap + + (installedLibs0, globalDumpPkgs) <- loadDatabase' GlobalPkgDb [] + (installedLibs1, _extraInstalled) <- + foldM (\lhs' pkgdb -> + loadDatabase' (UserPkgDb ExtraPkgDb pkgdb) (fst lhs') + ) (installedLibs0, globalDumpPkgs) extraDBPaths + (installedLibs2, snapshotDumpPkgs) <- + loadDatabase' (UserPkgDb (InstalledTo Snap) snapDBPath) installedLibs1 + (installedLibs3, localDumpPkgs) <- + loadDatabase' (UserPkgDb (InstalledTo Local) localDBPath) installedLibs2 + let installedLibs = + foldr' gatherAndTransformSubLoadHelper mempty installedLibs3 + + -- Add in the executables that are installed, making sure to only trust a + -- listed installation under the right circumstances (see below) + let exesToSM loc = Map.unions . map (exeToSM loc) + exeToSM loc (PackageIdentifier name version) = + case Map.lookup name installMap of + -- Doesn't conflict with anything, so that's OK + Nothing -> m + Just (iLoc, iVersion) + -- Not the version we want, ignore it + | version /= iVersion || mismatchingLoc loc iLoc -> Map.empty + | otherwise -> m + where + m = Map.singleton name (loc, Executable $ PackageIdentifier name version) + mismatchingLoc installed target + | target == installed = False + | installed == Local = False -- snapshot dependency could end up + -- in a local install as being mutable + | otherwise = True + exesSnap <- getInstalledExes Snap + exesLocal <- getInstalledExes Local + let installedMap = Map.unions + [ exesToSM Local exesLocal + , exesToSM Snap exesSnap + , installedLibs + ] + + pure ( installedMap + , globalDumpPkgs + , snapshotDumpPkgs + , localDumpPkgs + ) + +-- | Outputs both the modified InstalledMap and the Set of all installed +-- packages in this database -- -- The goal is to ascertain that the dependencies for a package are present, -- that it has profiling if necessary, and that it matches the version and --- location needed by the SourceMap -loadDatabase :: HasEnvConfig env - => InstallMap -- ^ to determine which installed things we should include - -> Maybe (InstalledPackageLocation, Path Abs Dir) -- ^ package database, Nothing for global - -> [LoadHelper] -- ^ from parent databases - -> RIO env ([LoadHelper], [DumpPackage]) -loadDatabase installMap mdb lhs0 = do - pkgexe <- getGhcPkgExe - (lhs1', dps) <- ghcPkgDump pkgexe (fmap snd (maybeToList mdb)) - $ conduitDumpPackage .| sink - lhs1 <- mapMaybeM (processLoadResult mdb) lhs1' - let lhs = pruneDeps - id - lhId - lhDeps - const - (lhs0 ++ lhs1) - return (map (\lh -> lh { lhDeps = [] }) $ Map.elems lhs, dps) - where - mloc = fmap fst mdb - sinkDP = CL.map (isAllowed installMap mloc &&& toLoadHelper mloc) - .| CL.consume - sink = getZipSink $ (,) - <$> ZipSink sinkDP - <*> ZipSink CL.consume - -processLoadResult :: HasLogFunc env - => Maybe (InstalledPackageLocation, Path Abs Dir) - -> (Allowed, LoadHelper) - -> RIO env (Maybe LoadHelper) -processLoadResult _ (Allowed, lh) = return (Just lh) -processLoadResult mdb (reason, lh) = do +-- location needed by the SourceMap. +loadDatabase :: + forall env. HasEnvConfig env + => InstallMap + -- ^ to determine which installed things we should include + -> PackageDatabase + -- ^ package database. + -> [LoadHelper] + -- ^ from parent databases + -> RIO env ([LoadHelper], [DumpPackage]) +loadDatabase installMap db lhs0 = do + sourceMap <- view sourceMapL + let compiler = sourceMap.compiler + pkgexe <- getGhcPkgExe + (lhs1', dps) <- ghcPkgDump pkgexe pkgDb $ conduitDumpPackage .| sink compiler + lhs1 <- mapMaybeM processLoadResult lhs1' + let lhs = pruneDeps id (.ghcPkgId) (.depsGhcPkgId) const (lhs0 ++ lhs1) + pure (map (\lh -> lh { depsGhcPkgId = [] }) $ Map.elems lhs, dps) + where + pkgDb = case db of + GlobalPkgDb -> [] + UserPkgDb _ fp -> [fp] + + sinkDP compiler = + CL.map (isAllowed installMap db' &&& toLoadHelper compiler db') + .| CL.consume + where + db' = toPackageDbVariety db + sink compiler = getZipSink $ (,) + <$> ZipSink (sinkDP compiler) + <*> ZipSink CL.consume + + processLoadResult :: (Allowed, LoadHelper) -> RIO env (Maybe LoadHelper) + processLoadResult (Allowed, lh) = pure (Just lh) + processLoadResult (reason, lh) = do logDebug $ - "Ignoring package " <> - fromString (packageNameString (fst (lhPair lh))) <> - maybe mempty (\db -> ", from " <> displayShow db <> ",") mdb <> - " due to" <> - case reason of - Allowed -> " the impossible?!?!" - UnknownPkg -> " it being unknown to the resolver / extra-deps." - WrongLocation mloc loc -> " wrong location: " <> displayShow (mloc, loc) - WrongVersion actual wanted -> - " wanting version " <> - fromString (versionString wanted) <> - " instead of " <> - fromString (versionString actual) - return Nothing + "Ignoring package " + <> fromPackageName (fst lh.pair) + <> case db of + GlobalPkgDb -> mempty + UserPkgDb loc fp -> ", from " <> displayShow (loc, fp) <> "," + <> " due to" + <> case reason of + UnknownPkg -> " it being unknown to the snapshot or extra-deps." + WrongLocation db' loc -> + " wrong location: " <> displayShow (db', loc) + WrongVersion actual wanted -> + " wanting version " + <> fromString (versionString wanted) + <> " instead of " + <> fromString (versionString actual) + pure Nothing +-- | Type representing results of 'isAllowed'. data Allowed - = Allowed - | UnknownPkg - | WrongLocation (Maybe InstalledPackageLocation) InstallLocation - | WrongVersion Version Version - deriving (Eq, Show) - --- | Check if a can be included in the set of installed packages or not, based --- on the package selections made by the user. This does not perform any --- dirtiness or flag change checks. -isAllowed :: InstallMap - -> Maybe InstalledPackageLocation - -> DumpPackage - -> Allowed -isAllowed installMap mloc dp = - case Map.lookup name installMap of - Nothing -> - -- If the sourceMap has nothing to say about this package, - -- check if it represents a sublibrary first - -- See: https://github.com/commercialhaskell/stack/issues/3899 - case dpParentLibIdent dp of - Just (PackageIdentifier parentLibName version') -> - case Map.lookup parentLibName installMap of - Nothing -> checkNotFound - Just instInfo - | version' == version -> checkFound instInfo - | otherwise -> checkNotFound -- different versions - Nothing -> checkNotFound - Just pii -> checkFound pii - where - PackageIdentifier name version = dpPackageIdent dp - -- Ensure that the installed location matches where the sourceMap says it - -- should be installed - checkLocation Snap = True -- snapshot deps could become mutable after getting any mutable dependency - checkLocation Local = mloc == Just (InstalledTo Local) || mloc == Just ExtraGlobal -- 'locally' installed snapshot packages can come from extra dbs - -- Check if a package is allowed if it is found in the sourceMap - checkFound (installLoc, installVer) - | not (checkLocation installLoc) = WrongLocation mloc installLoc - | version /= installVer = WrongVersion version installVer - | otherwise = Allowed - -- check if a package is allowed if it is not found in the sourceMap - checkNotFound = case mloc of - -- The sourceMap has nothing to say about this global package, so we can use it - Nothing -> Allowed - Just ExtraGlobal -> Allowed - -- For non-global packages, don't include unknown packages. - -- See: https://github.com/commercialhaskell/stack/issues/292 - Just _ -> UnknownPkg + = Allowed + -- ^ The installed package can be included in the set of relevant installed + -- packages. + | UnknownPkg + -- ^ The installed package cannot be included in the set of relevant + -- installed packages because the package is unknown. + | WrongLocation PackageDbVariety InstallLocation + -- ^ The installed package cannot be included in the set of relevant + -- installed packages because the package is in the wrong package database. + | WrongVersion Version Version + -- ^ The installed package cannot be included in the set of relevant + -- installed packages because the package has the wrong version. + deriving (Eq, Show) + +-- | Check if an installed package can be included in the set of relevant +-- installed packages or not, based on the package selections made by the user. +-- This does not perform any dirtiness or flag change checks. +isAllowed :: + InstallMap + -> PackageDbVariety + -- ^ The package database providing the installed package. + -> DumpPackage + -- ^ The installed package to check. + -> Allowed +isAllowed installMap pkgDb dp = case Map.lookup name installMap of + Nothing -> + -- If the sourceMap has nothing to say about this package, + -- check if it represents a sub-library first + -- See: https://github.com/commercialhaskell/stack/issues/3899 + case sublibParentPkgId dp of + Just (PackageIdentifier parentLibName version') -> + case Map.lookup parentLibName installMap of + Nothing -> checkNotFound + Just instInfo + | version' == version -> checkFound instInfo + | otherwise -> checkNotFound -- different versions + Nothing -> checkNotFound + Just pii -> checkFound pii + where + PackageIdentifier name version = dp.packageIdent + -- Ensure that the installed location matches where the sourceMap says it + -- should be installed. + checkLocation Snap = + -- snapshot deps could become mutable after getting any mutable dependency. + True + checkLocation Local = case pkgDb of + GlobalDb -> False + -- 'locally' installed snapshot packages can come from 'extra' package + -- databases. + ExtraDb -> True + WriteOnlyDb -> False + MutableDb -> True + -- Check if an installed package is allowed if it is found in the sourceMap. + checkFound (installLoc, installVer) + | not (checkLocation installLoc) = WrongLocation pkgDb installLoc + | version /= installVer = WrongVersion version installVer + | otherwise = Allowed + -- Check if an installed package is allowed if it is not found in the + -- sourceMap. + checkNotFound = case pkgDb of + -- The sourceMap has nothing to say about this global package, so we can use + -- it. + GlobalDb -> Allowed + ExtraDb -> Allowed + -- For non-global packages, don't include unknown packages. + -- See: https://github.com/commercialhaskell/stack/issues/292 + WriteOnlyDb -> UnknownPkg + MutableDb -> UnknownPkg +-- | Type representing certain information about an installed package. data LoadHelper = LoadHelper - { lhId :: !GhcPkgId - , lhDeps :: ![GhcPkgId] - , lhPair :: !(PackageName, (InstallLocation, Installed)) - } - deriving Show - -toLoadHelper :: Maybe InstalledPackageLocation -> DumpPackage -> LoadHelper -toLoadHelper mloc dp = LoadHelper - { lhId = gid - , lhDeps = - -- We always want to consider the wired in packages as having all - -- of their dependencies installed, since we have no ability to - -- reinstall them. This is especially important for using different - -- minor versions of GHC, where the dependencies of wired-in - -- packages may change slightly and therefore not match the - -- snapshot. - if name `Set.member` wiredInPackages - then [] - else dpDepends dp - , lhPair = (name, (toPackageLocation mloc, Library ident gid (Right <$> dpLicense dp))) - } - where - gid = dpGhcPkgId dp - ident@(PackageIdentifier name _) = dpPackageIdent dp - -toPackageLocation :: Maybe InstalledPackageLocation -> InstallLocation -toPackageLocation Nothing = Snap -toPackageLocation (Just ExtraGlobal) = Snap -toPackageLocation (Just (InstalledTo loc)) = loc + { ghcPkgId :: !GhcPkgId + -- ^ The package's id. + , subLibDump :: !(Maybe SublibDump) + , depsGhcPkgId :: ![GhcPkgId] + -- ^ Unless the package's name is that of a 'wired-in' package, a list of + -- the ids of the installed packages that are the package's dependencies. + , pair :: !(PackageName, (InstallLocation, Installed)) + -- ^ A pair of (a) the package's name and (b) a pair of the relevant + -- database (write-only or mutable) and information about the library + -- installed. + } + deriving Show + +toLoadHelper :: ActualCompiler -> PackageDbVariety -> DumpPackage -> LoadHelper +toLoadHelper compiler pkgDb dp = LoadHelper + { ghcPkgId + , depsGhcPkgId + , subLibDump = dp.sublib + , pair + } + where + ghcPkgId = dp.ghcPkgId + ident@(PackageIdentifier name _) = dp.packageIdent + depsGhcPkgId = + -- We always want to consider the wired in packages as having all of their + -- dependencies installed, since we have no ability to reinstall them. This + -- is especially important for using different minor versions of GHC, where + -- the dependencies of wired-in packages may change slightly and therefore + -- not match the snapshot. + if name `Set.member` wiredInPackages compiler + then [] + else dp.depends + installedLibInfo = InstalledLibraryInfo ghcPkgId (Right <$> dp.license) mempty + + toInstallLocation :: PackageDbVariety -> InstallLocation + toInstallLocation GlobalDb = Snap + toInstallLocation ExtraDb = Snap + toInstallLocation WriteOnlyDb = Snap + toInstallLocation MutableDb = Local + + pair = (name, (toInstallLocation pkgDb, Library ident installedLibInfo)) + +-- | This is where sublibraries and main libraries are assembled into a single +-- entity Installed package, where all ghcPkgId live. +gatherAndTransformSubLoadHelper :: + LoadHelper + -> Map PackageName (InstallLocation, Installed) + -> Map PackageName (InstallLocation, Installed) +gatherAndTransformSubLoadHelper lh = + Map.insertWith onPreviousLoadHelper key value + where + -- Here we assume that both have the same location which already was a prior + -- assumption in Stack. + onPreviousLoadHelper + (pLoc, Library pn incomingLibInfo) + (_, Library _ existingLibInfo) + = ( pLoc + , Library pn existingLibInfo + { subLib = Map.union + incomingLibInfo.subLib + existingLibInfo.subLib + , ghcPkgId = if isJust lh.subLibDump + then existingLibInfo.ghcPkgId + else incomingLibInfo.ghcPkgId + } + ) + onPreviousLoadHelper newVal _oldVal = newVal + (key, value) = case lh.subLibDump of + Nothing -> (rawPackageName, rawValue) + Just sd -> (sd.packageName, updateAsSublib sd <$> rawValue) + (rawPackageName, rawValue) = lh.pair + updateAsSublib + sd + (Library (PackageIdentifier _sublibMungedPackageName version) libInfo) + = Library + (PackageIdentifier key version) + libInfo { subLib = Map.singleton sd.libraryName libInfo.ghcPkgId } + updateAsSublib _ v = v diff --git a/src/Stack/Build/Source.hs b/src/Stack/Build/Source.hs index 4e06bd5917..f981877298 100644 --- a/src/Stack/Build/Source.hs +++ b/src/Stack/Build/Source.hs @@ -1,490 +1,585 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE ConstraintKinds #-} --- Load information on package sources + +{-| +Module : Stack.Build.Source +Description : Load information on package sources. +License : BSD-3-Clause + +Load information on package sources. +-} + module Stack.Build.Source - ( projectLocalPackages - , localDependencies - , loadCommonPackage - , loadLocalPackage - , loadSourceMap - , getLocalFlags - , addUnlistedToBuildCache - , hashSourceMapData - ) where + ( projectLocalPackages + , localDependencies + , loadCommonPackage + , loadLocalPackage + , loadSourceMap + , addUnlistedToBuildCache + , hashSourceMapData + ) where -import Stack.Prelude -import qualified Pantry.SHA256 as SHA256 -import Data.ByteString.Builder (toLazyByteString) -import Conduit (ZipSink (..), withSourceFile) -import qualified Distribution.PackageDescription as C -import Data.List -import qualified Data.Map as Map -import qualified Data.Map.Strict as M -import qualified Data.Set as Set -import Stack.Build.Cache -import Stack.Build.Haddock (shouldHaddockDeps) -import Stack.Build.Target -import Stack.Package -import Stack.SourceMap -import Stack.Types.Build -import Stack.Types.Config -import Stack.Types.NamedComponent -import Stack.Types.Package -import Stack.Types.SourceMap -import System.FilePath (takeFileName) -import System.IO.Error (isDoesNotExistError) +import Data.ByteString.Builder ( toLazyByteString ) +import qualified Data.List as L +import qualified Data.Map as Map +import qualified Data.Map.Merge.Lazy as Map +import qualified Data.Map.Strict as M +import qualified Data.Set as Set +import qualified Distribution.PackageDescription as C +import qualified Pantry.SHA256 as SHA256 +import Stack.Build.Cache ( tryGetBuildCache ) +import Stack.Build.Haddock ( shouldHaddockDeps ) +import Stack.Package + ( buildableBenchmarks, buildableExes, buildableTestSuites + , hasBuildableMainLibrary, resolvePackage + ) +import Stack.PackageFile ( getPackageFile ) +import Stack.Prelude +import Stack.SourceMap + ( getCompilerInfo, immutableLocSha, mkProjectPackage + , pruneGlobals + ) +import Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) +import Stack.Types.ApplyProgOptions ( ApplyProgOptions (..) ) +import Stack.Types.Build.Exception ( BuildPrettyException (..) ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.BuildOpts ( BuildOpts (..), TestOpts (..) ) +import Stack.Types.BuildOptsCLI + ( ApplyCLIFlag (..), BuildOptsCLI (..) + , boptsCLIAllProgOptions + ) +import Stack.Types.CabalConfigKey ( CabalConfigKey (..) ) +import Stack.Types.Cache ( FileCache, FileCacheInfo (..) ) +import Stack.Types.CompilerPaths ( HasCompiler, getCompilerPath ) +import Stack.Types.Config ( Config (..), HasConfig (..), buildOptsL ) +import Stack.Types.Curator ( Curator (..) ) +import Stack.Types.DumpPackage ( DumpedGlobalPackage ) +import Stack.Types.EnvConfig + ( EnvConfig (..), HasEnvConfig (..), HasSourceMap (..) + , actualCompilerVersionL + ) +import Stack.Types.FileDigestCache ( readFileDigest ) +import Stack.Types.NamedComponent + ( NamedComponent (..), isCSubLib, splitComponents ) +import Stack.Types.Package + ( LocalPackage (..), Package (..), PackageConfig (..) + , dotCabalGetPath, memoizeRefWith, runMemoizedWith + ) +import Stack.Types.PackageFile + ( PackageComponentFile (..), PackageWarning ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.SourceMap + ( CommonPackage (..), DepPackage (..), ProjectPackage (..) + , SMActual (..), SMTargets (..), SourceMap (..) + , SourceMapHash (..), Target (..), ppRoot + ) +import Stack.Types.UnusedFlags ( FlagSource (..), UnusedFlags (..) ) +import System.FilePath ( takeFileName ) +import System.IO.Error ( isDoesNotExistError ) -- | loads and returns project packages -projectLocalPackages :: HasEnvConfig env - => RIO env [LocalPackage] +projectLocalPackages :: HasEnvConfig env => RIO env [LocalPackage] projectLocalPackages = do - sm <- view $ envConfigL.to envConfigSourceMap - for (toList $ smProject sm) loadLocalPackage + sm <- view $ envConfigL . to (.sourceMap) + for (toList sm.project) loadLocalPackage -- | loads all local dependencies - project packages and local extra-deps localDependencies :: HasEnvConfig env => RIO env [LocalPackage] localDependencies = do - bopts <- view $ configL.to configBuild - sourceMap <- view $ envConfigL . to envConfigSourceMap - forMaybeM (Map.elems $ smDeps sourceMap) $ \dp -> - case dpLocation dp of - PLMutable dir -> do - pp <- mkProjectPackage YesPrintWarnings dir (shouldHaddockDeps bopts) - Just <$> loadLocalPackage pp - _ -> return Nothing + bopts <- view $ configL . to (.build) + sourceMap <- view $ envConfigL . to (.sourceMap) + forMaybeM (Map.elems sourceMap.deps) $ \dp -> + case dp.location of + PLMutable dir -> do + pp <- mkProjectPackage YesPrintWarnings dir (shouldHaddockDeps bopts) + Just <$> loadLocalPackage pp + _ -> pure Nothing --- | Given the parsed targets and build command line options constructs --- a source map -loadSourceMap :: HasBuildConfig env - => SMTargets - -> BuildOptsCLI - -> SMActual DumpedGlobalPackage - -> RIO env SourceMap -loadSourceMap smt boptsCli sma = do - bconfig <- view buildConfigL - let compiler = smaCompiler sma - project = M.map applyOptsFlagsPP $ smaProject sma - bopts = configBuild (bcConfig bconfig) - applyOptsFlagsPP p@ProjectPackage{ppCommon = c} = - p{ppCommon = applyOptsFlags (M.member (cpName c) (smtTargets smt)) True c} - deps0 = smtDeps smt <> smaDeps sma - deps = M.map applyOptsFlagsDep deps0 - applyOptsFlagsDep d@DepPackage{dpCommon = c} = - d{dpCommon = applyOptsFlags (M.member (cpName c) (smtDeps smt)) False c} - applyOptsFlags isTarget isProjectPackage common = - let name = cpName common - flags = getLocalFlags boptsCli name - ghcOptions = - generalGhcOptions bconfig boptsCli isTarget isProjectPackage - cabalConfigOpts = - loadCabalConfigOpts bconfig (cpName common) isTarget isProjectPackage - in common - { cpFlags = - if M.null flags - then cpFlags common - else flags - , cpGhcOptions = - ghcOptions ++ cpGhcOptions common - , cpCabalConfigOpts = - cabalConfigOpts ++ cpCabalConfigOpts common - , cpHaddocks = - if isTarget - then boptsHaddock bopts - else shouldHaddockDeps bopts - } - packageCliFlags = Map.fromList $ - mapMaybe maybeProjectFlags $ - Map.toList (boptsCLIFlags boptsCli) - maybeProjectFlags (ACFByName name, fs) = Just (name, fs) - maybeProjectFlags _ = Nothing - globals = pruneGlobals (smaGlobal sma) (Map.keysSet deps) - logDebug "Checking flags" - checkFlagsUsedThrowing packageCliFlags FSCommandLine project deps - logDebug "SourceMap constructed" - return - SourceMap - { smTargets = smt - , smCompiler = compiler - , smProject = project - , smDeps = deps - , smGlobal = globals - } +-- | Given the parsed targets and build command line options constructs a source +-- map +loadSourceMap :: + forall env. HasBuildConfig env + => SMTargets + -> BuildOptsCLI + -> SMActual DumpedGlobalPackage + -> RIO env SourceMap +loadSourceMap targets boptsCli sma = do + logDebug "Applying and checking flags" + let errsPackages = mapMaybe checkPackage packagesWithCliFlags + eProject <- mapM applyOptsFlagsPP (M.toList sma.project) + eDeps <- mapM applyOptsFlagsDep (M.toList targetsAndSmaDeps) + let (errsProject, project') = partitionEithers eProject + (errsDeps, deps') = partitionEithers eDeps + errs = errsPackages <> errsProject <> errsDeps + unless (null errs) $ prettyThrowM $ InvalidFlagSpecification errs + let compiler = sma.compiler + project = M.fromList project' + deps = M.fromList deps' + globalPkgs = pruneGlobals sma.globals (Map.keysSet deps) + logDebug "SourceMap constructed" + pure SourceMap + { targets + , compiler + , project + , deps + , globalPkgs + } + where + cliFlags = boptsCli.flags + targetsAndSmaDeps = targets.deps <> sma.deps + packagesWithCliFlags = mapMaybe maybeProjectWithCliFlags $ Map.toList cliFlags + where + maybeProjectWithCliFlags (ACFByName name, _) = Just name + maybeProjectWithCliFlags _ = Nothing + checkPackage :: PackageName -> Maybe UnusedFlags + checkPackage name = + let maybeCommon = + fmap (.projectCommon) (Map.lookup name sma.project) + <|> fmap (.depCommon) (Map.lookup name targetsAndSmaDeps) + in maybe + (Just $ UFNoPackage FSCommandLine name) + (const Nothing) + maybeCommon + applyOptsFlagsPP :: + (a, ProjectPackage) + -> RIO env (Either UnusedFlags (a, ProjectPackage)) + applyOptsFlagsPP (name, p@ProjectPackage{ projectCommon = common }) = do + let isTarget = M.member common.name targets.targets + eCommon <- applyOptsFlags isTarget True common + pure $ (\common' -> (name, p { projectCommon = common' })) <$> eCommon + applyOptsFlagsDep :: + (a, DepPackage) + -> RIO env (Either UnusedFlags (a, DepPackage)) + applyOptsFlagsDep (name, d@DepPackage{ depCommon = common }) = do + let isTarget = M.member common.name targets.deps + eCommon <- applyOptsFlags isTarget False common + pure $ (\common' -> (name, d { depCommon = common' })) <$> eCommon + applyOptsFlags :: + Bool + -> Bool + -> CommonPackage + -> RIO env (Either UnusedFlags CommonPackage) + applyOptsFlags isTarget isProjectPackage common = do + let name = common.name + cliFlagsByName = Map.findWithDefault Map.empty (ACFByName name) cliFlags + cliFlagsAll = + Map.findWithDefault Map.empty ACFAllProjectPackages cliFlags + noOptsToApply = Map.null cliFlagsByName && Map.null cliFlagsAll + (flags, unusedByName, pkgFlags) <- if noOptsToApply + then + pure (Map.empty, Set.empty, Set.empty) + else do + gpd <- + -- This action is expensive. We want to avoid it if we can. + liftIO common.gpd + let pkgFlags = Set.fromList $ map C.flagName $ C.genPackageFlags gpd + unusedByName = Map.keysSet $ Map.withoutKeys cliFlagsByName pkgFlags + cliFlagsAllRelevant = + Map.filterWithKey (\k _ -> k `Set.member` pkgFlags) cliFlagsAll + flags = cliFlagsByName <> cliFlagsAllRelevant + pure (flags, unusedByName, pkgFlags) + if Set.null unusedByName + -- All flags are defined, nothing to do + then do + bconfig <- view buildConfigL + let bopts = bconfig.config.build + ghcOptions = + generalGhcOptions bconfig boptsCli isTarget isProjectPackage + cabalConfigOpts = generalCabalConfigOpts + bconfig + boptsCli + name + isTarget + isProjectPackage + pure $ Right common + { flags = + if M.null flags + then common.flags + else flags + , ghcOptions = + ghcOptions ++ common.ghcOptions + , cabalConfigOpts = + cabalConfigOpts ++ common.cabalConfigOpts + , buildHaddocks = + if isTarget + then bopts.buildHaddocks + else shouldHaddockDeps bopts + } + -- Error about the undefined flags + else + pure $ Left $ UFFlagsNotDefined FSCommandLine name pkgFlags unusedByName --- | Get a 'SourceMapHash' for a given 'SourceMap' +-- | Get a t'SourceMapHash' for a given t'SourceMap' -- -- Basic rules: -- --- * If someone modifies a GHC installation in any way after Stack --- looks at it, they voided the warranty. This includes installing a --- brand new build to the same directory, or registering new --- packages to the global database. +-- * If someone modifies a GHC installation in any way after Stack looks at it, +-- they voided the warranty. This includes installing a brand new build to the +-- same directory, or registering new packages to the global database. -- --- * We should include everything in the hash that would relate to --- immutable packages and identifying the compiler itself. Mutable --- packages (both project packages and dependencies) will never make --- it into the snapshot database, and can be ignored. +-- * We should include everything in the hash that would relate to immutable +-- packages and identifying the compiler itself. Mutable packages (both +-- project packages and dependencies) will never make it into the snapshot +-- database, and can be ignored. -- --- * Target information is only relevant insofar as it effects the --- dependency map. The actual current targets for this build are --- irrelevant to the cache mechanism, and can be ignored. +-- * Target information is only relevant insofar as it effects the dependency +-- map. The actual current targets for this build are irrelevant to the cache +-- mechanism, and can be ignored. -- -- * Make sure things like profiling and haddocks are included in the hash -- -hashSourceMapData - :: (HasBuildConfig env, HasCompiler env) - => BuildOptsCLI - -> SourceMap - -> RIO env SourceMapHash +hashSourceMapData :: + (HasBuildConfig env, HasCompiler env) + => BuildOptsCLI + -> SourceMap + -> RIO env SourceMapHash hashSourceMapData boptsCli sm = do - compilerPath <- getUtf8Builder . fromString . toFilePath <$> getCompilerPath - compilerInfo <- getCompilerInfo - immDeps <- forM (Map.elems (smDeps sm)) depPackageHashableContent - bc <- view buildConfigL - let -- extra bytestring specifying GHC options supposed to be applied to - -- GHC boot packages so we'll have differrent hashes when bare - -- resolver 'ghc-X.Y.Z' is used, no extra-deps and e.g. user wants builds - -- with profiling or without - bootGhcOpts = map display (generalGhcOptions bc boptsCli False False) - hashedContent = toLazyByteString $ compilerPath <> compilerInfo <> - getUtf8Builder (mconcat bootGhcOpts) <> mconcat immDeps - return $ SourceMapHash (SHA256.hashLazyBytes hashedContent) + compilerPath <- getUtf8Builder . fromString . toFilePath <$> getCompilerPath + compilerInfo <- getCompilerInfo + immDeps <- forM (Map.elems sm.deps) depPackageHashableContent + bc <- view buildConfigL + let -- extra bytestring specifying GHC options supposed to be applied to GHC + -- boot packages so we'll have different hashes when bare snapshot + -- 'ghc-X.Y.Z' is used, no extra-deps and e.g. user wants builds with + -- profiling or without + bootGhcOpts = map display (generalGhcOptions bc boptsCli False False) + hashedContent = + toLazyByteString $ compilerPath + <> compilerInfo + <> getUtf8Builder (mconcat bootGhcOpts) + <> mconcat immDeps + pure $ SourceMapHash (SHA256.hashLazyBytes hashedContent) depPackageHashableContent :: (HasConfig env) => DepPackage -> RIO env Builder -depPackageHashableContent DepPackage {..} = do - case dpLocation of - PLMutable _ -> return "" - PLImmutable pli -> do - let flagToBs (f, enabled) = - if enabled - then "" - else "-" <> fromString (C.unFlagName f) - flags = map flagToBs $ Map.toList (cpFlags dpCommon) - ghcOptions = map display (cpGhcOptions dpCommon) - cabalConfigOpts = map display (cpCabalConfigOpts dpCommon) - haddocks = if cpHaddocks dpCommon then "haddocks" else "" - hash = immutableLocSha pli - return $ hash <> haddocks <> getUtf8Builder (mconcat flags) <> - getUtf8Builder (mconcat ghcOptions) <> - getUtf8Builder (mconcat cabalConfigOpts) - --- | All flags for a local package. -getLocalFlags - :: BuildOptsCLI - -> PackageName - -> Map FlagName Bool -getLocalFlags boptsCli name = Map.unions - [ Map.findWithDefault Map.empty (ACFByName name) cliFlags - , Map.findWithDefault Map.empty ACFAllProjectPackages cliFlags - ] - where - cliFlags = boptsCLIFlags boptsCli +depPackageHashableContent dp = + case dp.location of + PLMutable _ -> pure "" + PLImmutable pli -> do + let flagToBs (f, enabled) = + (if enabled then "" else "-") <> fromString (C.unFlagName f) + flags = map flagToBs $ Map.toList dp.depCommon.flags + ghcOptions = map display dp.depCommon.ghcOptions + cabalConfigOpts = map display dp.depCommon.cabalConfigOpts + haddocks = if dp.depCommon.buildHaddocks then "haddocks" else "" + hash = immutableLocSha pli + pure + $ hash + <> haddocks + <> getUtf8Builder (mconcat flags) + <> getUtf8Builder (mconcat ghcOptions) + <> getUtf8Builder (mconcat cabalConfigOpts) -- | Get the options to pass to @./Setup.hs configure@ -loadCabalConfigOpts :: BuildConfig -> PackageName -> Bool -> Bool -> [Text] -loadCabalConfigOpts bconfig name isTarget isLocal = concat - [ Map.findWithDefault [] CCKEverything (configCabalConfigOpts config) - , if isLocal - then Map.findWithDefault [] CCKLocals (configCabalConfigOpts config) - else [] - , if isTarget - then Map.findWithDefault [] CCKTargets (configCabalConfigOpts config) - else [] - , Map.findWithDefault [] (CCKPackage name) (configCabalConfigOpts config) - ] - where - config = view configL bconfig +generalCabalConfigOpts :: + BuildConfig + -> BuildOptsCLI + -> PackageName + -> Bool + -> Bool + -> [Text] +generalCabalConfigOpts bconfig boptsCli name isTarget isLocal = concat + [ Map.findWithDefault [] CCKEverything config.cabalConfigOpts + , if isLocal + then Map.findWithDefault [] CCKLocals config.cabalConfigOpts + else [] + , if isTarget + then Map.findWithDefault [] CCKTargets config.cabalConfigOpts + else [] + , Map.findWithDefault [] (CCKPackage name) config.cabalConfigOpts + , if includeExtraOptions + then boptsCLIAllProgOptions boptsCli + else [] + ] + where + config = view configL bconfig + includeExtraOptions = + case config.applyProgOptions of + APOTargets -> isTarget + APOLocals -> isLocal + APOEverything -> True -- | Get the configured options to pass from GHC, based on the build -- configuration and commandline. generalGhcOptions :: BuildConfig -> BuildOptsCLI -> Bool -> Bool -> [Text] generalGhcOptions bconfig boptsCli isTarget isLocal = concat - [ Map.findWithDefault [] AGOEverything (configGhcOptionsByCat config) - , if isLocal - then Map.findWithDefault [] AGOLocals (configGhcOptionsByCat config) - else [] - , if isTarget - then Map.findWithDefault [] AGOTargets (configGhcOptionsByCat config) - else [] - , concat [["-fhpc"] | isLocal && toCoverage (boptsTestOpts bopts)] - , if boptsLibProfile bopts || boptsExeProfile bopts - then ["-fprof-auto","-fprof-cafs"] - else [] - , if not $ boptsLibStrip bopts || boptsExeStrip bopts - then ["-g"] - else [] - , if includeExtraOptions - then boptsCLIGhcOptions boptsCli - else [] - ] - where - bopts = configBuild config - config = view configL bconfig - includeExtraOptions = - case configApplyGhcOptions config of - AGOTargets -> isTarget - AGOLocals -> isLocal - AGOEverything -> True - -splitComponents :: [NamedComponent] - -> (Set Text, Set Text, Set Text) -splitComponents = - go id id id - where - go a b c [] = (Set.fromList $ a [], Set.fromList $ b [], Set.fromList $ c []) - go a b c (CLib:xs) = go a b c xs - go a b c (CInternalLib x:xs) = go (a . (x:)) b c xs - go a b c (CExe x:xs) = go (a . (x:)) b c xs - go a b c (CTest x:xs) = go a (b . (x:)) c xs - go a b c (CBench x:xs) = go a b (c . (x:)) xs + [ Map.findWithDefault [] AGOEverything config.ghcOptionsByCat + , if isLocal + then Map.findWithDefault [] AGOLocals config.ghcOptionsByCat + else [] + , if isTarget + then Map.findWithDefault [] AGOTargets config.ghcOptionsByCat + else [] + , concat [["-fhpc"] | isLocal && bopts.testOpts.coverage] + , if bopts.libProfile || bopts.exeProfile + then ["-fprof-auto", "-fprof-cafs"] + else [] + , [ "-g" | not $ bopts.libStrip || bopts.exeStrip ] + , if includeExtraOptions + then boptsCli.ghcOptions + else [] + ] + where + bopts = config.build + config = view configL bconfig + includeExtraOptions = + case config.applyGhcOptions of + AGOTargets -> isTarget + AGOLocals -> isLocal + AGOEverything -> True +-- | Yield a t'Package' from the settings common to dependency and project +-- packages. loadCommonPackage :: - forall env. (HasBuildConfig env, HasSourceMap env) - => CommonPackage - -> RIO env Package + forall env. (HasBuildConfig env, HasSourceMap env) + => CommonPackage + -> RIO env Package loadCommonPackage common = do - config <- getPackageConfig (cpFlags common) (cpGhcOptions common) (cpCabalConfigOpts common) - gpkg <- liftIO $ cpGPD common - return $ resolvePackage config gpkg + (_, _, pkg) <- loadCommonPackage' common + pure pkg + +loadCommonPackage' :: + forall env. (HasBuildConfig env, HasSourceMap env) + => CommonPackage + -> RIO env (PackageConfig, C.GenericPackageDescription, Package) +loadCommonPackage' common = do + config <- + getPackageConfig + common.flags + common.ghcOptions + common.cabalConfigOpts + gpkg <- liftIO common.gpd + pure (config, gpkg, resolvePackage config gpkg) -- | Upgrade the initial project package info to a full-blown @LocalPackage@ -- based on the selected components loadLocalPackage :: - forall env. (HasBuildConfig env, HasSourceMap env) - => ProjectPackage - -> RIO env LocalPackage + forall env. (HasBuildConfig env, HasSourceMap env) + => ProjectPackage + -> RIO env LocalPackage loadLocalPackage pp = do - sm <- view sourceMapL - let common = ppCommon pp - bopts <- view buildOptsL - mcurator <- view $ buildConfigL.to bcCurator - config <- getPackageConfig (cpFlags common) (cpGhcOptions common) (cpCabalConfigOpts common) - gpkg <- ppGPD pp - let name = cpName common - mtarget = M.lookup name (smtTargets $ smTargets sm) - (exeCandidates, testCandidates, benchCandidates) = - case mtarget of - Just (TargetComps comps) -> splitComponents $ Set.toList comps - Just (TargetAll _packageType) -> - ( packageExes pkg - , if boptsTests bopts && maybe True (Set.notMember name . curatorSkipTest) mcurator - then Map.keysSet (packageTests pkg) - else Set.empty - , if boptsBenchmarks bopts && maybe True (Set.notMember name . curatorSkipBenchmark) mcurator - then packageBenchmarks pkg - else Set.empty - ) - Nothing -> mempty + sm <- view sourceMapL + let common = pp.projectCommon + bopts <- view buildOptsL + mcurator <- view $ buildConfigL . to (.curator) + (config, gpkg, pkg) <- loadCommonPackage' common + let name = common.name + mtarget = M.lookup name sm.targets.targets + (exeCandidates, testCandidates, benchCandidates) = + case mtarget of + Just (TargetComps comps) -> + -- Currently, a named library component (a sub-library) cannot be + -- specified as a build target. + let (_s, e, t, b) = splitComponents $ Set.toList comps + in (e, t, b) + Just (TargetAll _packageType) -> + ( buildableExes pkg + , if bopts.tests + && maybe True (Set.notMember name . (.skipTest)) mcurator + then buildableTestSuites pkg + else Set.empty + , if bopts.benchmarks + && maybe + True + (Set.notMember name . (.skipBenchmark)) + mcurator + then buildableBenchmarks pkg + else Set.empty + ) + Nothing -> mempty - -- See https://github.com/commercialhaskell/stack/issues/2862 - isWanted = case mtarget of - Nothing -> False - -- FIXME: When issue #1406 ("stack 0.1.8 lost ability to - -- build individual executables or library") is resolved, - -- 'hasLibrary' is only relevant if the library is - -- part of the target spec. - Just _ -> - let hasLibrary = - case packageLibraries pkg of - NoLibraries -> False - HasLibraries _ -> True - in hasLibrary - || not (Set.null nonLibComponents) - || not (Set.null $ packageInternalLibraries pkg) + -- See https://github.com/commercialhaskell/stack/issues/2862 + isWanted = case mtarget of + Nothing -> False + -- FIXME: When issue #1406 ("stack 0.1.8 lost ability to build + -- individual executables or library") is resolved, 'hasLibrary' is only + -- relevant if the library is part of the target spec. + Just _ -> + hasBuildableMainLibrary pkg + || not (Set.null nonLibComponents) + || not (null pkg.subLibraries) - filterSkippedComponents = Set.filter (not . (`elem` boptsSkipComponents bopts)) + filterSkippedComponents = + Set.filter (not . (`elem` bopts.skipComponents)) - (exes, tests, benches) = (filterSkippedComponents exeCandidates, - filterSkippedComponents testCandidates, - filterSkippedComponents benchCandidates) + (exes, tests, benches) = ( filterSkippedComponents exeCandidates + , filterSkippedComponents testCandidates + , filterSkippedComponents benchCandidates + ) - nonLibComponents = toComponents exes tests benches + nonLibComponents = toComponents exes tests benches - toComponents e t b = Set.unions - [ Set.map CExe e - , Set.map CTest t - , Set.map CBench b - ] + toComponents e t b = Set.unions + [ Set.map CExe e + , Set.map CTest t + , Set.map CBench b + ] - btconfig = config - { packageConfigEnableTests = not $ Set.null tests - , packageConfigEnableBenchmarks = not $ Set.null benches - } - testconfig = config - { packageConfigEnableTests = True - , packageConfigEnableBenchmarks = False - } - benchconfig = config - { packageConfigEnableTests = False - , packageConfigEnableBenchmarks = True - } + btconfig = config + { enableTests = not $ Set.null tests + , enableBenchmarks = not $ Set.null benches + } - -- We resolve the package in 4 different configurations: - -- - -- - pkg doesn't have tests or benchmarks enabled. - -- - -- - btpkg has them enabled if they are present. - -- - -- - testpkg has tests enabled, but not benchmarks. - -- - -- - benchpkg has benchmarks enablde, but not tests. - -- - -- The latter two configurations are used to compute the deps - -- when --enable-benchmarks or --enable-tests are configured. - -- This allows us to do an optimization where these are passed - -- if the deps are present. This can avoid doing later - -- unnecessary reconfigures. - pkg = resolvePackage config gpkg - btpkg - | Set.null tests && Set.null benches = Nothing - | otherwise = Just (resolvePackage btconfig gpkg) - testpkg = resolvePackage testconfig gpkg - benchpkg = resolvePackage benchconfig gpkg + -- We resolve the package in 2 different configurations: + -- + -- - pkg doesn't have tests or benchmarks enabled. + -- + -- - btpkg has them enabled if they are present. + -- + -- The latter two configurations are used to compute the deps when + -- --enable-benchmarks or --enable-tests are configured. This allows us to + -- do an optimization where these are passed if the deps are present. This + -- can avoid doing later unnecessary reconfigures. + btpkg + | Set.null tests && Set.null benches = Nothing + | otherwise = Just (resolvePackage btconfig gpkg) - componentFiles <- memoizeRefWith $ fst <$> getPackageFilesForTargets pkg (ppCabalFP pp) nonLibComponents + componentFiles <- memoizeRefWith $ + fst <$> getPackageFilesForTargets pkg pp.cabalFP nonLibComponents - checkCacheResults <- memoizeRefWith $ do - componentFiles' <- runMemoizedWith componentFiles - forM (Map.toList componentFiles') $ \(component, files) -> do - mbuildCache <- tryGetBuildCache (ppRoot pp) component - checkCacheResult <- checkBuildCache - (fromMaybe Map.empty mbuildCache) - (Set.toList files) - return (component, checkCacheResult) + checkCacheResults <- memoizeRefWith $ do + componentFiles' <- runMemoizedWith componentFiles + forM (Map.toList componentFiles') $ \(component, files) -> do + mbuildCache <- tryGetBuildCache (ppRoot pp) component + checkCacheResult <- checkBuildCache + (fromMaybe Map.empty mbuildCache) + (Set.toList files) + pure (component, checkCacheResult) - let dirtyFiles = do - checkCacheResults' <- checkCacheResults - let allDirtyFiles = Set.unions $ map (\(_, (x, _)) -> x) checkCacheResults' - pure $ - if not (Set.null allDirtyFiles) - then let tryStripPrefix y = - fromMaybe y (stripPrefix (toFilePath $ ppRoot pp) y) - in Just $ Set.map tryStripPrefix allDirtyFiles - else Nothing - newBuildCaches = - M.fromList . map (\(c, (_, cache)) -> (c, cache)) - <$> checkCacheResults + let dirtyFiles = do + checkCacheResults' <- checkCacheResults + let allDirtyFiles = + Set.unions $ map (\(_, (x, _)) -> x) checkCacheResults' + pure $ + if not (Set.null allDirtyFiles) + then let tryStripPrefix y = + fromMaybe y (L.stripPrefix (toFilePath $ ppRoot pp) y) + in Just $ Set.map tryStripPrefix allDirtyFiles + else Nothing + newBuildCaches = + M.fromList . map (\(c, (_, cache)) -> (c, cache)) <$> checkCacheResults - return LocalPackage - { lpPackage = pkg - , lpTestDeps = dvVersionRange <$> packageDeps testpkg - , lpBenchDeps = dvVersionRange <$> packageDeps benchpkg - , lpTestBench = btpkg - , lpComponentFiles = componentFiles - , lpBuildHaddocks = cpHaddocks (ppCommon pp) - , lpForceDirty = boptsForceDirty bopts - , lpDirtyFiles = dirtyFiles - , lpNewBuildCaches = newBuildCaches - , lpCabalFile = ppCabalFP pp - , lpWanted = isWanted - , lpComponents = nonLibComponents - -- TODO: refactor this so that it's easier to be sure that these - -- components are indeed unbuildable. - -- - -- The reasoning here is that if the STLocalComps specification - -- made it through component parsing, but the components aren't - -- present, then they must not be buildable. - , lpUnbuildable = toComponents - (exes `Set.difference` packageExes pkg) - (tests `Set.difference` Map.keysSet (packageTests pkg)) - (benches `Set.difference` packageBenchmarks pkg) - } + pure LocalPackage + { package = pkg + , testBench = btpkg + , componentFiles + , buildHaddocks = pp.projectCommon.buildHaddocks + , forceDirty = bopts.forceDirty + , dirtyFiles + , newBuildCaches + , cabalFP = pp.cabalFP + , wanted = isWanted + , components = nonLibComponents + -- TODO: refactor this so that it's easier to be sure that these + -- components are indeed unbuildable. + -- + -- The reasoning here is that if the STLocalComps specification made it + -- through component parsing, but the components aren't present, then they + -- must not be buildable. + , unbuildable = toComponents + (exes `Set.difference` buildableExes pkg) + (tests `Set.difference` buildableTestSuites pkg) + (benches `Set.difference` buildableBenchmarks pkg) + } -- | Compare the current filesystem state to the cached information, and -- determine (1) if the files are dirty, and (2) the new cache values. -checkBuildCache :: forall m. (MonadIO m) - => Map FilePath FileCacheInfo -- ^ old cache - -> [Path Abs File] -- ^ files in package - -> m (Set FilePath, Map FilePath FileCacheInfo) +checkBuildCache :: + HasEnvConfig env + => FileCache -- ^ old cache + -> [Path Abs File] -- ^ files in package + -> RIO env (Set FilePath, FileCache) checkBuildCache oldCache files = do - fileTimes <- liftM Map.fromList $ forM files $ \fp -> do - mdigest <- liftIO (getFileDigestMaybe (toFilePath fp)) - return (toFilePath fp, mdigest) - liftM (mconcat . Map.elems) $ sequence $ - Map.mergeWithKey - (\fp mdigest fci -> Just (go fp mdigest (Just fci))) - (Map.mapWithKey (\fp mdigest -> go fp mdigest Nothing)) - (Map.mapWithKey (\fp fci -> go fp Nothing (Just fci))) - fileTimes - oldCache - where - go :: FilePath -> Maybe SHA256 -> Maybe FileCacheInfo -> m (Set FilePath, Map FilePath FileCacheInfo) - -- Filter out the cabal_macros file to avoid spurious recompilations - go fp _ _ | takeFileName fp == "cabal_macros.h" = return (Set.empty, Map.empty) - -- Common case where it's in the cache and on the filesystem. - go fp (Just digest') (Just fci) - | fciHash fci == digest' = return (Set.empty, Map.singleton fp fci) - | otherwise = return (Set.singleton fp, Map.singleton fp $ FileCacheInfo digest') - -- Missing file. Add it to dirty files, but no FileCacheInfo. - go fp Nothing _ = return (Set.singleton fp, Map.empty) - -- Missing cache. Add it to dirty files and compute FileCacheInfo. - go fp (Just digest') Nothing = - return (Set.singleton fp, Map.singleton fp $ FileCacheInfo digest') + fileDigests <- fmap Map.fromList $ forM files $ \fp -> do + mdigest <- getFileDigestMaybe (toFilePath fp) + pure (toFilePath fp, mdigest) + fmap (mconcat . Map.elems) $ sequence $ + Map.merge + (Map.mapMissing (\fp mdigest -> go fp mdigest Nothing)) + (Map.mapMissing (\fp fci -> go fp Nothing (Just fci))) + (Map.zipWithMatched (\fp mdigest fci -> go fp mdigest (Just fci))) + fileDigests + oldCache + where + go :: + FilePath + -> Maybe SHA256 + -> Maybe FileCacheInfo + -> RIO env (Set FilePath, FileCache) + -- Filter out the cabal_macros file to avoid spurious recompilations + go fp _ _ | takeFileName fp == "cabal_macros.h" = pure (Set.empty, Map.empty) + -- Common case where it's in the cache and on the filesystem. + go fp (Just digest') (Just fci) + | fci.hash == digest' = pure (Set.empty, Map.singleton fp fci) + | otherwise = + pure (Set.singleton fp, Map.singleton fp $ FileCacheInfo digest') + -- Missing file. Add it to dirty files, but no FileCacheInfo. + go fp Nothing _ = pure (Set.singleton fp, Map.empty) + -- Missing cache. Add it to dirty files and compute FileCacheInfo. + go fp (Just digest') Nothing = + pure (Set.singleton fp, Map.singleton fp $ FileCacheInfo digest') --- | Returns entries to add to the build cache for any newly found unlisted modules -addUnlistedToBuildCache - :: HasEnvConfig env - => Package - -> Path Abs File - -> Set NamedComponent - -> Map NamedComponent (Map FilePath a) - -> RIO env (Map NamedComponent [Map FilePath FileCacheInfo], [PackageWarning]) +-- | Returns entries to add to the build cache for any newly found unlisted +-- modules +addUnlistedToBuildCache :: + HasEnvConfig env + => Package + -> Path Abs File + -> Set NamedComponent + -> Map NamedComponent (Map FilePath a) + -> RIO env (Map NamedComponent [FileCache], [PackageWarning]) addUnlistedToBuildCache pkg cabalFP nonLibComponents buildCaches = do - (componentFiles, warnings) <- getPackageFilesForTargets pkg cabalFP nonLibComponents - results <- forM (M.toList componentFiles) $ \(component, files) -> do - let buildCache = M.findWithDefault M.empty component buildCaches - newFiles = - Set.toList $ - Set.map toFilePath files `Set.difference` Map.keysSet buildCache - addBuildCache <- mapM addFileToCache newFiles - return ((component, addBuildCache), warnings) - return (M.fromList (map fst results), concatMap snd results) - where - addFileToCache fp = do - mdigest <- getFileDigestMaybe fp - case mdigest of - Nothing -> return Map.empty - Just digest' -> return . Map.singleton fp $ FileCacheInfo digest' + (componentFiles, warnings) <- + getPackageFilesForTargets pkg cabalFP nonLibComponents + results <- forM (M.toList componentFiles) $ \(component, files) -> do + let buildCache = M.findWithDefault M.empty component buildCaches + newFiles = + Set.toList $ + Set.map toFilePath files `Set.difference` Map.keysSet buildCache + addBuildCache <- mapM addFileToCache newFiles + pure ((component, addBuildCache), warnings) + pure (M.fromList (map fst results), concatMap snd results) + where + addFileToCache fp = + getFileDigestMaybe fp >>= \case + Nothing -> pure Map.empty + Just digest' -> pure $ Map.singleton fp $ FileCacheInfo digest' -- | Gets list of Paths for files relevant to a set of components in a package. --- Note that the library component, if any, is always automatically added to the --- set of components. -getPackageFilesForTargets - :: HasEnvConfig env - => Package - -> Path Abs File - -> Set NamedComponent - -> RIO env (Map NamedComponent (Set (Path Abs File)), [PackageWarning]) +-- Note that the library component, if any, is always automatically added to the +-- set of components. +getPackageFilesForTargets :: + HasEnvConfig env + => Package + -> Path Abs File + -> Set NamedComponent + -> RIO env (Map NamedComponent (Set (Path Abs File)), [PackageWarning]) getPackageFilesForTargets pkg cabalFP nonLibComponents = do - (components',compFiles,otherFiles,warnings) <- - getPackageFiles (packageFiles pkg) cabalFP - let necessaryComponents = Set.insert CLib $ Set.filter isCInternalLib (M.keysSet components') - components = necessaryComponents `Set.union` nonLibComponents - componentsFiles = - M.map (\files -> Set.union otherFiles (Set.map dotCabalGetPath $ Set.fromList files)) $ - M.filterWithKey (\component _ -> component `elem` components) compFiles - return (componentsFiles, warnings) + PackageComponentFile components' compFiles otherFiles warnings <- + getPackageFile pkg cabalFP + let necessaryComponents = + Set.insert CLib $ Set.filter isCSubLib (M.keysSet components') + components = necessaryComponents `Set.union` nonLibComponents + componentsFiles = M.map + (\files -> + Set.union otherFiles (Set.map dotCabalGetPath $ Set.fromList files) + ) + $ M.filterWithKey (\component _ -> component `elem` components) compFiles + pure (componentsFiles, warnings) -- | Get file digest, if it exists -getFileDigestMaybe :: MonadIO m => FilePath -> m (Maybe SHA256) +getFileDigestMaybe :: HasEnvConfig env => FilePath -> RIO env (Maybe SHA256) getFileDigestMaybe fp = do - liftIO - (catch - (liftM Just . withSourceFile fp $ getDigest) - (\e -> - if isDoesNotExistError e - then return Nothing - else throwM e)) - where - getDigest src = runConduit $ src .| getZipSink (ZipSink SHA256.sinkHash) + cache <- view $ envConfigL . to (.fileDigestCache) + catch + (Just <$> readFileDigest cache fp) + (\e -> if isDoesNotExistError e then pure Nothing else throwM e) --- | Get 'PackageConfig' for package given its name. -getPackageConfig - :: (HasBuildConfig env, HasSourceMap env) +-- | Get t'PackageConfig' for package given its name. +getPackageConfig :: + (HasBuildConfig env, HasSourceMap env) => Map FlagName Bool -> [Text] -- ^ GHC options -> [Text] -- ^ cabal config opts @@ -492,12 +587,12 @@ getPackageConfig getPackageConfig flags ghcOptions cabalConfigOpts = do platform <- view platformL compilerVersion <- view actualCompilerVersionL - return PackageConfig - { packageConfigEnableTests = False - , packageConfigEnableBenchmarks = False - , packageConfigFlags = flags - , packageConfigGhcOptions = ghcOptions - , packageConfigCabalConfigOpts = cabalConfigOpts - , packageConfigCompilerVersion = compilerVersion - , packageConfigPlatform = platform + pure PackageConfig + { enableTests = False + , enableBenchmarks = False + , flags = flags + , ghcOptions = ghcOptions + , cabalConfigOpts = cabalConfigOpts + , compilerVersion = compilerVersion + , platform = platform } diff --git a/src/Stack/Build/Target.hs b/src/Stack/Build/Target.hs index 11defe344a..26cf63ee21 100644 --- a/src/Stack/Build/Target.hs +++ b/src/Stack/Build/Target.hs @@ -1,113 +1,132 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE ViewPatterns #-} --- | Parsing command line targets --- --- There are two relevant data sources for performing this parsing: --- the project configuration, and command line arguments. Project --- configurations includes the resolver (defining a LoadedSnapshot of --- global and snapshot packages), local dependencies, and project --- packages. It also defines local flag overrides. --- --- The command line arguments specify both additional local flag --- overrides and targets in their raw form. --- --- Flags are simple: we just combine CLI flags with config flags and --- make one big map of flags, preferring CLI flags when present. --- --- Raw targets can be a package name, a package name with component, --- just a component, or a package name and version number. We first --- must resolve these raw targets into both simple targets and --- additional dependencies. This works as follows: --- --- * If a component is specified, find a unique project package which --- defines that component, and convert it into a name+component --- target. --- --- * Ensure that all name+component values refer to valid components --- in the given project package. --- --- * For names, check if the name is present in the snapshot, local --- deps, or project packages. If it is not, then look up the most --- recent version in the package index and convert to a --- name+version. --- --- * For name+version, first ensure that the name is not used by a --- project package. Next, if that name+version is present in the --- snapshot or local deps _and_ its location is PLIndex, we have the --- package. Otherwise, add to local deps with the appropriate --- PLIndex. --- --- If in either of the last two bullets we added a package to local --- deps, print a warning to the user recommending modifying the --- extra-deps. --- --- Combine the various 'ResolveResults's together into 'Target' --- values, by combining various components for a single package and --- ensuring that no conflicting statements were made about targets. --- --- At this point, we now have a Map from package name to SimpleTarget, --- and an updated Map of local dependencies. We still have the --- aggregated flags, and the snapshot and project packages. --- --- Finally, we upgrade the snapshot by using --- calculatePackagePromotion. +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} + +{-| +Module : Stack.Build.Target +Description : Parsing command line targets. +License : BSD-3-Clause + +Parsing command line targets + +There are two relevant data sources for performing this parsing: the project +configuration, and command line arguments. Project configurations includes the +snapshot (defining a LoadedSnapshot of global and snapshot packages), local +dependencies, and project packages. It also defines local flag overrides. + +The command line arguments specify both additional local flag overrides and +targets in their raw form. + +Flags are simple: we just combine CLI flags with config flags and make one big +map of flags, preferring CLI flags when present. + +Raw targets can be a package name, a package name with component, just a +component, or a package name and version number. We first must resolve these raw +targets into both simple targets and additional dependencies. This works as +follows: + +* If a component is specified, find a unique project package which defines that + component, and convert it into a name+component target. + +* Ensure that all name+component values refer to valid components in the given + project package. + +* For names, check if the name is present in the snapshot, local deps, or + project packages. If it is not, then look up the most recent version in the + package index and convert to a name+version. + +* For name+version, first ensure that the name is not used by a project + package. Next, if that name+version is present in the snapshot or local deps + _and_ its location is PLIndex, we have the package. Otherwise, add to local + deps with the appropriate PLIndex. + +If in either of the last two bullets we added a package to local deps, print a +warning to the user recommending modifying the extra-deps. + +Combine the various t'ResolveResult's together into t'Target' values, by +combining various components for a single package and ensuring that no +conflicting statements were made about targets. + +At this point, we now have a Map from package name to SimpleTarget, and an +updated Map of local dependencies. We still have the aggregated flags, and the +snapshot and project packages. + +Finally, we upgrade the snapshot by using calculatePackagePromotion. +-} + module Stack.Build.Target - ( -- * Types - Target (..) - , NeedTargets (..) - , PackageType (..) - , parseTargets - -- * Convenience helpers - , gpdVersion - -- * Test suite exports - , parseRawTarget - , RawTarget (..) - , UnresolvedComponent (..) - ) where + ( -- * Types + Target (..) + , NeedTargets (..) + , parseTargets + -- * Convenience helpers + , gpdVersion + -- * Test suite exports + , parseRawTarget + , RawTarget (..) + , UnresolvedComponent (..) + , ComponentName + , ResolveResult + ) where -import Stack.Prelude import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as T -import Path -import Path.Extra (rejectMissingDir) -import Path.IO -import RIO.Process (HasProcessContext) -import Stack.SourceMap -import Stack.Types.Config +import Path ( isProperPrefixOf ) +import Path.Extra ( forgivingResolveDir, rejectMissingDir ) +import Path.IO ( getCurrentDir ) +import RIO.Process ( HasProcessContext ) +import Stack.SourceMap ( additionalDepPackage ) +import Stack.Prelude +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) +import Stack.Types.ComponentUtils ( unqualCompFromText ) +import Stack.Types.Config ( Config (..) ) import Stack.Types.NamedComponent -import Stack.Types.Build + ( NamedComponent (..), renderComponent ) +import Stack.Types.Build.Exception ( BuildPrettyException (..) ) +import Stack.Types.ProjectConfig ( ProjectConfig (..) ) import Stack.Types.SourceMap + ( DepPackage (..), GlobalPackage (..), PackageType (..) + , ProjectPackage, SMActual (..), SMTargets (..) + , SMWanted (..), Target (..), ppComponents, ppRoot + ) -- | Do we need any targets? For example, `stack build` will fail if -- no targets are provided. -data NeedTargets = NeedTargets | AllowNoTargets +data NeedTargets + = NeedTargets + | AllowNoTargets ---------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- -- Get the RawInput ---------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- -- | Raw target information passed on the command line. -newtype RawInput = RawInput { unRawInput :: Text } +newtype RawInput = RawInput { rawInput :: Text } -getRawInput :: BuildOptsCLI -> Map PackageName ProjectPackage -> ([Text], [RawInput]) +getRawInput :: + BuildOptsCLI + -> Map PackageName ProjectPackage + -> ([Text], [RawInput]) getRawInput boptscli locals = - let textTargets' = boptsCLITargets boptscli - textTargets = - -- Handle the no targets case, which means we pass in the names of all project packages - if null textTargets' - then map (T.pack . packageNameString) (Map.keys locals) - else textTargets' - in (textTargets', map RawInput textTargets) - ---------------------------------------------------------------------------------- + let textTargets' = boptscli.targetsCLI + textTargets = + -- Handle the no targets case, which means we pass in the names of all + -- project packages + if null textTargets' + then map (T.pack . packageNameString) (Map.keys locals) + else textTargets' + in (textTargets', map RawInput textTargets) + +-------------------------------------------------------------------------------- -- Turn RawInput into RawTarget ---------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- -- | The name of a component, which applies to executables, test -- suites, and benchmarks @@ -116,337 +135,444 @@ type ComponentName = Text -- | Either a fully resolved component, or a component name that could be -- either an executable, test, or benchmark data UnresolvedComponent - = ResolvedComponent !NamedComponent - | UnresolvedComponent !ComponentName - deriving (Show, Eq, Ord) + = ResolvedComponent !NamedComponent + | UnresolvedComponent !ComponentName + deriving (Eq, Ord, Show) -- | Raw command line input, without checking against any databases or list of -- locals. Does not deal with directories data RawTarget - = RTPackageComponent !PackageName !UnresolvedComponent - | RTComponent !ComponentName - | RTPackage !PackageName - -- Explicitly _not_ supporting revisions on the command line. If - -- you want that, you should be modifying your stack.yaml! (In - -- fact, you should probably do that anyway, we're just letting - -- people be lazy, since we're Haskeletors.) - | RTPackageIdentifier !PackageIdentifier - deriving (Show, Eq) + = RTPackageComponent !PackageName !UnresolvedComponent + | RTComponent !ComponentName + | RTPackage !PackageName + -- Explicitly _not_ supporting revisions on the command line. If you want + -- that, you should be modifying your stack.yaml! (In fact, you should + -- probably do that anyway, we're just letting people be lazy, since we're + -- Haskeletors.) + | RTPackageIdentifier !PackageIdentifier + deriving (Eq, Show) -- | Same as @parseRawTarget@, but also takes directories into account. -parseRawTargetDirs :: MonadIO m - => Path Abs Dir -- ^ current directory - -> Map PackageName ProjectPackage - -> RawInput -- ^ raw target information from the commandline - -> m (Either Text [(RawInput, RawTarget)]) +parseRawTargetDirs :: + MonadIO m + => Path Abs Dir -- ^ current directory + -> Map PackageName ProjectPackage + -> RawInput -- ^ raw target information from the commandline + -> m (Either StyleDoc [(RawInput, RawTarget)]) parseRawTargetDirs root locals ri = - case parseRawTarget t of - Just rt -> return $ Right [(ri, rt)] - Nothing -> do - mdir <- liftIO $ forgivingAbsence (resolveDir root (T.unpack t)) - >>= rejectMissingDir - case mdir of - Nothing -> return $ Left $ "Directory not found: " `T.append` t - Just dir -> - case mapMaybe (childOf dir) $ Map.toList locals of - [] -> return $ Left $ - "No local directories found as children of " `T.append` - t - names -> return $ Right $ map ((ri, ) . RTPackage) names - where - childOf dir (name, pp) = - if dir == ppRoot pp || isProperPrefixOf dir (ppRoot pp) - then Just name - else Nothing - - RawInput t = ri + case parseRawTarget t of + Just rt -> pure $ Right [(ri, rt)] + Nothing -> do + forgivingResolveDir root (T.unpack t) >>= rejectMissingDir >>= \case + Nothing -> pure $ Left $ + if | T.isPrefixOf "stack-yaml=" t -> projectOptionTypo + | T.isSuffixOf ".yaml" t -> projectYamlExtTypo + | otherwise -> + fillSep + [ flow "Directory not found:" + , style Dir (fromString $ T.unpack t) <> "." + ] + Just dir -> + case mapMaybe (childOf dir) $ Map.toList locals of + [] -> pure $ Left $ + fillSep + [ style Dir (fromString $ T.unpack t) + , flow "is not a local directory for a package and it is not a \ + \parent directory of any such directory." + ] + names -> pure $ Right $ map ((ri, ) . RTPackage) names + where + childOf dir (name, pp) = + if dir == ppRoot pp || isProperPrefixOf dir (ppRoot pp) + then Just name + else Nothing + + RawInput t = ri + + projectOptionTypo :: StyleDoc + projectOptionTypo = let o = "stack-yaml=" in projectTypo 2 (length o) o + + projectYamlExtTypo :: StyleDoc + projectYamlExtTypo = let o = "stack-yaml " in projectTypo (2 + length o) 0 o + + projectTypo :: Int -> Int -> String -> StyleDoc + projectTypo padLength dropLength option = + vsep + [ style Dir (fromString (replicate padLength ' ') <> fromString (T.unpack t)) + <> " is not a directory." + , style Highlight (fromString $ "--" <> option) + <> style Dir (fromString . drop dropLength $ T.unpack t) + <> " might work as a project option." + ] -- | If this function returns @Nothing@, the input should be treated as a -- directory. parseRawTarget :: Text -> Maybe RawTarget parseRawTarget t = - (RTPackageIdentifier <$> parsePackageIdentifier s) - <|> (RTPackage <$> parsePackageName s) - <|> (RTComponent <$> T.stripPrefix ":" t) - <|> parsePackageComponent - where - s = T.unpack t - - parsePackageComponent = - case T.splitOn ":" t of - [pname, "lib"] - | Just pname' <- parsePackageName (T.unpack pname) -> - Just $ RTPackageComponent pname' $ ResolvedComponent CLib - [pname, cname] - | Just pname' <- parsePackageName (T.unpack pname) -> - Just $ RTPackageComponent pname' $ UnresolvedComponent cname - [pname, typ, cname] - | Just pname' <- parsePackageName (T.unpack pname) - , Just wrapper <- parseCompType typ -> - Just $ RTPackageComponent pname' $ ResolvedComponent $ wrapper cname - _ -> Nothing - - parseCompType t' = - case t' of - "exe" -> Just CExe - "test" -> Just CTest - "bench" -> Just CBench - _ -> Nothing - ---------------------------------------------------------------------------------- + (RTPackageIdentifier <$> parsePackageIdentifier s) + <|> (RTPackage <$> parsePackageName s) + <|> (RTComponent <$> T.stripPrefix ":" t) + <|> parsePackageComponent + where + s = T.unpack t + + parsePackageComponent = + case T.splitOn ":" t of + [pname, "lib"] + | Just pname' <- parsePackageName (T.unpack pname) -> + Just $ RTPackageComponent pname' $ ResolvedComponent CLib + [pname, cname] + | Just pname' <- parsePackageName (T.unpack pname) -> + Just $ RTPackageComponent pname' $ UnresolvedComponent cname + [pname, typ, cname] + | Just pname' <- parsePackageName (T.unpack pname) + , Just wrapper <- parseCompType typ -> + Just $ RTPackageComponent pname' $ ResolvedComponent $ wrapper cname + _ -> Nothing + + parseCompType t' = + case t' of + "exe" -> Just (CExe . unqualCompFromText) + "test" -> Just (CTest . unqualCompFromText) + "bench" -> Just (CBench . unqualCompFromText) + _ -> Nothing + +-------------------------------------------------------------------------------- -- Resolve the raw targets ---------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +-- | A type representing results of resolving a raw target. data ResolveResult = ResolveResult - { rrName :: !PackageName - , rrRaw :: !RawInput - , rrComponent :: !(Maybe NamedComponent) - -- ^ Was a concrete component specified? - , rrAddedDep :: !(Maybe PackageLocationImmutable) - -- ^ Only if we're adding this as a dependency - , rrPackageType :: !PackageType + { name :: !PackageName + , rawInput :: !RawInput + , component :: !(Maybe NamedComponent) + -- ^ Was a concrete component specified? + , addedDep :: !(Maybe PackageLocationImmutable) + -- ^ Only if we're adding this as a dependency + , packageType :: !PackageType } --- | Convert a 'RawTarget' into a 'ResolveResult' (see description on --- the module). +-- | Convert a 'RawTarget' into a t'ResolveResult' (see description on the +-- module). resolveRawTarget :: - (HasLogFunc env, HasPantryConfig env, HasProcessContext env) - => SMActual GlobalPackage - -> Map PackageName PackageLocation - -> (RawInput, RawTarget) - -> RIO env (Either Text ResolveResult) -resolveRawTarget sma allLocs (ri, rt) = + (HasLogFunc env, HasPantryConfig env, HasProcessContext env) + => SMActual GlobalPackage + -> Map PackageName PackageLocation + -> (RawInput, RawTarget) + -> RIO env (Either StyleDoc ResolveResult) +resolveRawTarget sma allLocs (rawInput, rt) = go rt - where - locals = smaProject sma - deps = smaDeps sma - globals = smaGlobal sma - -- Helper function: check if a 'NamedComponent' matches the given 'ComponentName' - isCompNamed :: ComponentName -> NamedComponent -> Bool - isCompNamed _ CLib = False - isCompNamed t1 (CInternalLib t2) = t1 == t2 - isCompNamed t1 (CExe t2) = t1 == t2 - isCompNamed t1 (CTest t2) = t1 == t2 - isCompNamed t1 (CBench t2) = t1 == t2 - - go (RTComponent cname) = do - -- Associated list from component name to package that defines - -- it. We use an assoc list and not a Map so we can detect - -- duplicates. - allPairs <- fmap concat $ flip Map.traverseWithKey locals - $ \name pp -> do - comps <- ppComponents pp - pure $ map (name, ) $ Set.toList comps - pure $ case filter (isCompNamed cname . snd) allPairs of - [] -> Left $ cname `T.append` " doesn't seem to be a local target. Run 'stack ide targets' for a list of available targets" - [(name, comp)] -> Right ResolveResult - { rrName = name - , rrRaw = ri - , rrComponent = Just comp - , rrAddedDep = Nothing - , rrPackageType = PTProject - } - matches -> Left $ T.concat - [ "Ambiugous component name " - , cname - , ", matches: " - , T.pack $ show matches - ] - go (RTPackageComponent name ucomp) = - case Map.lookup name locals of - Nothing -> pure $ Left $ T.pack $ "Unknown local package: " ++ packageNameString name - Just pp -> do - comps <- ppComponents pp - pure $ case ucomp of - ResolvedComponent comp - | comp `Set.member` comps -> Right ResolveResult - { rrName = name - , rrRaw = ri - , rrComponent = Just comp - , rrAddedDep = Nothing - , rrPackageType = PTProject - } - | otherwise -> Left $ T.pack $ concat - [ "Component " - , show comp - , " does not exist in package " - , packageNameString name - ] - UnresolvedComponent comp -> - case filter (isCompNamed comp) $ Set.toList comps of - [] -> Left $ T.concat - [ "Component " - , comp - , " does not exist in package " - , T.pack $ packageNameString name - ] - [x] -> Right ResolveResult - { rrName = name - , rrRaw = ri - , rrComponent = Just x - , rrAddedDep = Nothing - , rrPackageType = PTProject - } - matches -> Left $ T.concat - [ "Ambiguous component name " - , comp - , " for package " - , T.pack $ packageNameString name - , ": " - , T.pack $ show matches - ] - - go (RTPackage name) - | Map.member name locals = return $ Right ResolveResult - { rrName = name - , rrRaw = ri - , rrComponent = Nothing - , rrAddedDep = Nothing - , rrPackageType = PTProject - } - | Map.member name deps = - pure $ deferToConstructPlan name - | Just gp <- Map.lookup name globals = - case gp of - GlobalPackage _ -> pure $ deferToConstructPlan name - ReplacedGlobalPackage _ -> hackageLatest name - | otherwise = hackageLatest name - - -- Note that we use getLatestHackageRevision below, even though it's - -- non-reproducible, to avoid user confusion. In any event, - -- reproducible builds should be done by updating your config - -- files! - - go (RTPackageIdentifier ident@(PackageIdentifier name version)) - | Map.member name locals = return $ Left $ T.concat - [ tshow (packageNameString name) - , " target has a specific version number, but it is a local package." - , "\nTo avoid confusion, we will not install the specified version or build the local one." - , "\nTo build the local package, specify the target without an explicit version." - ] - | otherwise = - case Map.lookup name allLocs of - -- Installing it from the package index, so we're cool - -- with overriding it if necessary - Just (PLImmutable (PLIHackage (PackageIdentifier _name versionLoc) _cfKey _treeKey)) -> + where + locals = sma.project + deps = sma.deps + globals = sma.globals + -- Helper function: check if a 'NamedComponent' matches the given + -- 'ComponentName' + isCompNamed :: ComponentName -> NamedComponent -> Bool + isCompNamed _ CLib = False + isCompNamed t1 t2 = case t2 of + (CSubLib t2') -> t1' == t2' + (CExe t2') -> t1' == t2' + (CFlib t2') -> t1' == t2' + (CTest t2') -> t1' == t2' + (CBench t2') -> t1' == t2' + where + t1' = unqualCompFromText t1 + + go (RTComponent cname) = do + -- Associated list from component name to package that defines it. We use an + -- assoc list and not a Map so we can detect duplicates. + allPairs <- fmap concat $ flip Map.traverseWithKey locals + $ \name pp -> do + comps <- ppComponents pp + pure $ map (name, ) $ Set.toList comps + pure $ case filter (isCompNamed cname . snd) allPairs of + [] -> Left $ + fillSep + [ style Target . fromString . T.unpack $ cname + , flow "doesn't seem to be a local target. Run" + , style Shell $ flow "stack ide targets" + , flow "for a list of available targets." + ] + [(name, component)] -> Right ResolveResult + { name + , rawInput + , component = Just component + , addedDep = Nothing + , packageType = PTProject + } + matches -> Left $ + fillSep + [ flow "Ambiguous component name" + , style Target (fromString $ T.unpack cname) <> "," + , "matches:" + ] + <> line + <> bulletedList + ( map + ( \(pn, nc) -> fillSep + [ "component" + , style + PkgComponent + (fromString $ T.unpack $ renderComponent nc) + , flow "of package" + , style PkgComponent (fromPackageName pn) + ] + ) + matches + ) + + go (RTPackageComponent name ucomp) = + case Map.lookup name locals of + Nothing -> pure $ Left $ + fillSep + [ flow "Unknown project package:" + , style Target (fromPackageName name) <> "." + ] + Just pp -> do + comps <- ppComponents pp + pure $ case ucomp of + ResolvedComponent component + | component `Set.member` comps -> Right ResolveResult + { name + , rawInput + , component = Just component + , addedDep = Nothing + , packageType = PTProject + } + | otherwise -> Left $ + fillSep + [ "Component" + , style + Target + (fromString $ T.unpack $ renderComponent component) + , flow "does not exist in package" + , style Target (fromPackageName name) <> "." + ] + UnresolvedComponent comp' -> + case filter (isCompNamed comp') $ Set.toList comps of + [] -> Left $ + fillSep + [ "Component" + , style Target (fromString $ T.unpack comp') + , flow "does not exist in package" + , style Target (fromPackageName name) <> "." + ] + [component] -> Right ResolveResult + { name + , rawInput + , component = Just component + , addedDep = Nothing + , packageType = PTProject + } + matches -> Left $ + fillSep + [ flow "Ambiguous component name" + , style Target (fromString $ T.unpack comp') + , flow "for package" + , style Target (fromPackageName name) + , flow "matches components:" + , fillSep $ + mkNarrativeList (Just PkgComponent) False + (map ncToStyleDoc matches) + ] + where + ncToStyleDoc :: NamedComponent -> StyleDoc + ncToStyleDoc = fromString . T.unpack . renderComponent + + go (RTPackage name) + | Map.member name locals = pure $ Right ResolveResult + { name + , rawInput + , component = Nothing + , addedDep = Nothing + , packageType = PTProject + } + | Map.member name deps = + pure $ deferToConstructPlan name + | Just gp <- Map.lookup name globals = + case gp of + GlobalPackage _ -> pure $ deferToConstructPlan name + ReplacedGlobalPackage _ -> hackageLatest name + | otherwise = hackageLatest name + + -- Note that we use getLatestHackageRevision below, even though it's + -- non-reproducible, to avoid user confusion. In any event, reproducible + -- builds should be done by updating your config files! + + go (RTPackageIdentifier ident@(PackageIdentifier name version)) + | Map.member name locals = pure $ Left $ + fillSep + [ style Target (fromPackageId ident) + , flow "is a specific package version, but" + , style Target (fromPackageName name) + , flow "is the name of a project package. To avoid confusion, Stack \ + \will not try to build the specified version or the project \ + \package. To build the project package, specify only" + , style Current (fromPackageName name) <> "." + ] + | otherwise = + case Map.lookup name allLocs of + -- Installing it from the package index, so we're cool with overriding + -- it if necessary + Just + ( PLImmutable + ( PLIHackage + (PackageIdentifier _name versionLoc) _cfKey _treeKey + ) + ) -> if version == versionLoc - then pure $ deferToConstructPlan name - else hackageLatestRevision name version - -- The package was coming from something besides the - -- index, so refuse to do the override - Just loc' -> pure $ Left $ T.concat - [ "Package with identifier was targeted on the command line: " - , T.pack $ packageIdentifierString ident - , ", but it was specified from a non-index location: " - , T.pack $ show loc' - , ".\nRecommendation: add the correctly desired version to extra-deps." + then pure $ deferToConstructPlan name + else hackageLatestRevision name version versionLoc + -- The package was coming from something besides the index, so refuse + -- to do the override + Just loc' -> pure $ Left $ + fillSep + [ style Target (fromPackageId ident) + , flow "was specified from a non-index location, namely:" + , flow $ T.unpack $ textDisplay loc' <> "." + , flow "Recommendation: add the correctly desired version to \ + \extra-deps." ] - -- Not present at all, add it from Hackage - Nothing -> do - mrev <- getLatestHackageRevision YesRequireHackageIndex name version - pure $ case mrev of - Nothing -> deferToConstructPlan name - Just (_rev, cfKey, treeKey) -> Right ResolveResult - { rrName = name - , rrRaw = ri - , rrComponent = Nothing - , rrAddedDep = Just $ PLIHackage (PackageIdentifier name version) cfKey treeKey - , rrPackageType = PTDependency - } - - hackageLatest name = do - mloc <- getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions - pure $ case mloc of - Nothing -> deferToConstructPlan name - Just loc -> do - Right ResolveResult - { rrName = name - , rrRaw = ri - , rrComponent = Nothing - , rrAddedDep = Just loc - , rrPackageType = PTDependency - } - - hackageLatestRevision name version = do - mrev <- getLatestHackageRevision YesRequireHackageIndex name version - pure $ case mrev of - Nothing -> deferToConstructPlan name - Just (_rev, cfKey, treeKey) -> Right ResolveResult - { rrName = name - , rrRaw = ri - , rrComponent = Nothing - , rrAddedDep = Just $ PLIHackage (PackageIdentifier name version) cfKey treeKey - , rrPackageType = PTDependency - } - - -- This is actually an error case. We _could_ return a - -- Left value here, but it turns out to be better to defer - -- this until the ConstructPlan phase, and let it complain - -- about the missing package so that we get more errors - -- together, plus the fancy colored output from that - -- module. - deferToConstructPlan name = Right ResolveResult - { rrName = name - , rrRaw = ri - , rrComponent = Nothing - , rrAddedDep = Nothing - , rrPackageType = PTDependency - } ---------------------------------------------------------------------------------- + -- Not present at all, add it from Hackage + Nothing -> + getLatestHackageRevision YesRequireHackageIndex name version <&> \case + Nothing -> Left $ + fillSep + [ flow "Stack did not know the location of a package named" + , style Target (fromPackageName name) + , "and could not find" + , style Target (fromPackageId ident) + , flow "in the package index." + ] + Just (_rev, cfKey, treeKey) -> Right ResolveResult + { name + , rawInput + , component = Nothing + , addedDep = Just $ + PLIHackage (PackageIdentifier name version) cfKey treeKey + , packageType = PTDependency + } + + hackageLatest name = + getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions <&> \case + Nothing -> deferToConstructPlan name + Just loc -> + Right ResolveResult + { name + , rawInput + , component = Nothing + , addedDep = Just loc + , packageType = PTDependency + } + + hackageLatestRevision name version versionLoc = + getLatestHackageRevision YesRequireHackageIndex name version <&> \case + Nothing -> Left $ + fillSep + [ flow "Stack knows the location of" + , style Current (fromPackageId pkgId') + , flow "but did not know the location of" + , style Target (fromPackageId pkgId) <>"," + , flow "and did not find it in the package index." + ] + where + pkgId = PackageIdentifier name version + pkgId' = PackageIdentifier name versionLoc + Just (_rev, cfKey, treeKey) -> Right ResolveResult + { name + , rawInput + , component = Nothing + , addedDep = + Just $ PLIHackage (PackageIdentifier name version) cfKey treeKey + , packageType = PTDependency + } + + -- This is actually an error case. We _could_ pure a Left value here, but it + -- turns out to be better to defer this until the ConstructPlan phase, and let + -- it complain about the missing package so that we get more errors together, + -- plus the fancy colored output from that module. + deferToConstructPlan name = Right ResolveResult + { name + , rawInput + , component = Nothing + , addedDep = Nothing + , packageType = PTDependency + } +-------------------------------------------------------------------------------- -- Combine the ResolveResults ---------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- -combineResolveResults - :: forall env. HasLogFunc env +combineResolveResults :: + forall env. HasLogFunc env => [ResolveResult] - -> RIO env ([Text], Map PackageName Target, Map PackageName PackageLocationImmutable) + -> RIO + env + ( [StyleDoc] + , Map PackageName Target + , Map PackageName PackageLocationImmutable + ) combineResolveResults results = do - addedDeps <- fmap Map.unions $ forM results $ \result -> - case rrAddedDep result of - Nothing -> return Map.empty - Just pl -> do - return $ Map.singleton (rrName result) pl - - let m0 = Map.unionsWith (++) $ map (\rr -> Map.singleton (rrName rr) [rr]) results - (errs, ms) = partitionEithers $ flip map (Map.toList m0) $ \(name, rrs) -> - let mcomps = map rrComponent rrs in - -- Confirm that there is either exactly 1 with no component, or - -- that all rrs are components - case rrs of - [] -> assert False $ Left "Somehow got no rrComponent values, that can't happen" - [rr] | isNothing (rrComponent rr) -> Right $ Map.singleton name $ TargetAll $ rrPackageType rr - _ - | all isJust mcomps -> Right $ Map.singleton name $ TargetComps $ Set.fromList $ catMaybes mcomps - | otherwise -> Left $ T.concat - [ "The package " - , T.pack $ packageNameString name - , " was specified in multiple, incompatible ways: " - , T.unwords $ map (unRawInput . rrRaw) rrs - ] - - return (errs, Map.unions ms, addedDeps) - ---------------------------------------------------------------------------------- + addedDeps <- fmap Map.unions $ forM results $ \result -> + case result.addedDep of + Nothing -> pure Map.empty + Just pl -> pure $ Map.singleton result.name pl + + let m0 = Map.unionsWith (++) $ + map (\rr -> Map.singleton rr.name [rr]) results + (errs, ms) = partitionEithers $ flip map (Map.toList m0) $ + \(name, rrs) -> + let mcomps = map (.component) rrs in + -- Confirm that there is either exactly 1 with no component, or that + -- all rrs are components + case rrs of + [] -> assert False $ + Left $ + flow "Somehow got no rrComponent values, that can't happen." + [rr] | isNothing rr.component -> + Right $ Map.singleton name $ TargetAll rr.packageType + _ + | all isJust mcomps -> + Right $ Map.singleton name $ TargetComps $ Set.fromList $ + catMaybes mcomps + | otherwise -> Left $ fillSep + [ flow "The package" + , style Target $ fromPackageName name + , flow "was specified in multiple, incompatible ways:" + , fillSep $ + mkNarrativeList (Just Target) False + (map rrToStyleDoc rrs) + ] + pure (errs, Map.unions ms, addedDeps) + where + rrToStyleDoc :: ResolveResult -> StyleDoc + rrToStyleDoc = fromString . T.unpack . (.rawInput.rawInput) + +-------------------------------------------------------------------------------- -- OK, let's do it! ---------------------------------------------------------------------------------- - -parseTargets :: HasBuildConfig env - => NeedTargets - -> Bool - -> BuildOptsCLI - -> SMActual GlobalPackage - -> RIO env SMTargets +-------------------------------------------------------------------------------- + +-- | Parse targets and dependencies from the given command line arguments and +-- source map. +parseTargets :: + HasBuildConfig env + => NeedTargets + -> Bool + -- ^ Should Haddock documentation be built for the package? + -> BuildOptsCLI + -> SMActual GlobalPackage + -> RIO env SMTargets parseTargets needTargets haddockDeps boptscli smActual = do logDebug "Parsing the targets" bconfig <- view buildConfigL workingDir <- getCurrentDir - locals <- view $ buildConfigL.to (smwProject . bcSMWanted) + locals <- view $ buildConfigL . to (.smWanted.project) let (textTargets', rawInput) = getRawInput boptscli locals (errs1, concat -> rawTargets) <- fmap partitionEithers $ forM rawInput $ parseRawTargetDirs workingDir locals - let depLocs = Map.map dpLocation $ smaDeps smActual + let depLocs = Map.map (.location) smActual.deps (errs2, resolveResults) <- fmap partitionEithers $ forM rawTargets $ resolveRawTarget smActual depLocs @@ -454,29 +580,38 @@ parseTargets needTargets haddockDeps boptscli smActual = do (errs3, targets, addedDeps) <- combineResolveResults resolveResults case concat [errs1, errs2, errs3] of - [] -> return () - errs -> throwIO $ TargetParseException errs + [] -> pure () + errs -> prettyThrowIO $ TargetParseException errs case (Map.null targets, needTargets) of - (False, _) -> return () - (True, AllowNoTargets) -> return () + (False, _) -> pure () + (True, AllowNoTargets) -> pure () (True, NeedTargets) - | null textTargets' && bcImplicitGlobal bconfig -> throwIO $ TargetParseException - ["The specified targets matched no packages.\nPerhaps you need to run 'stack init'?"] - | null textTargets' && Map.null locals -> throwIO $ TargetParseException - ["The project contains no local packages (packages not marked with 'extra-dep')"] - | otherwise -> throwIO $ TargetParseException - ["The specified targets matched no packages"] + | null textTargets' && bcImplicitGlobal bconfig -> + prettyThrowIO $ TargetParseException + [ fillSep + [ flow "The specified targets matched no packages. Perhaps you \ + \need to run" + , style Shell (flow "stack init") <> "?" + ] + ] + | null textTargets' && Map.null locals -> + prettyThrowIO $ TargetParseException + [ flow "The project contains no project packages (packages other \ + \than extra-deps)." + ] + | otherwise -> prettyThrowIO $ TargetParseException + [ flow "The specified targets matched no packages." ] addedDeps' <- mapM (additionalDepPackage haddockDeps . PLImmutable) addedDeps - return SMTargets - { smtTargets = targets - , smtDeps = addedDeps' + pure SMTargets + { targets = targets + , deps = addedDeps' } - where - bcImplicitGlobal bconfig = - case configProject $ bcConfig bconfig of - PCProject _ -> False - PCGlobalProject -> True - PCNoProject _ -> False + where + bcImplicitGlobal bconfig = + case bconfig.config.project of + PCProject _ -> False + PCGlobalProject -> True + PCNoProject _ -> False diff --git a/src/Stack/BuildInfo.hs b/src/Stack/BuildInfo.hs new file mode 100644 index 0000000000..9600771a8c --- /dev/null +++ b/src/Stack/BuildInfo.hs @@ -0,0 +1,106 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedStrings #-} + +#ifdef USE_GIT_INFO +{-# LANGUAGE TemplateHaskell #-} +#endif + +{-| +Module : Stack.BuildInfo +License : BSD-3-Clause + +Extracted from "Stack" so that module does not use CPP or Template Haskell, and +therefore doesn't need to be recompiled as often. +-} + +module Stack.BuildInfo + ( versionString' + , hpackVersion + , maybeGitHash + ) where + +#ifndef HIDE_DEP_VERSIONS +import qualified Build_stack +#endif +import Data.Version ( versionBranch ) +import Distribution.System ( buildArch ) +import qualified Distribution.Text as Cabal ( display ) +#ifdef USE_GIT_INFO +import GitHash ( giCommitCount, giHash, tGitInfoCwdTry ) +import Options.Applicative.Simple ( simpleVersion ) +#endif +import qualified Paths_stack as Meta +import Stack.Constants ( isStackUploadDisabled ) +import Stack.Prelude +#ifndef USE_GIT_INFO +import Stack.Types.Version ( showStackVersion ) +#endif + +-- | The output of @stack --version@. +versionString' :: String +#ifdef USE_GIT_INFO +versionString' = concat $ concat + [ [$(simpleVersion Meta.version)] + -- Leave out number of commits for --depth=1 clone + -- See https://github.com/commercialhaskell/stack/issues/792 + , case giCommitCount <$> $$tGitInfoCwdTry of + Left _ -> [] + Right 1 -> [] + Right count -> [" (", show count, " commits)"] + , [afterVersion] + ] +#else +versionString' = showStackVersion ++ afterVersion +#endif + where + afterVersion = concat + [ preReleaseString + , ' ' : Cabal.display buildArch + , depsString + , warningString + , stackUploadDisabledWarningString + ] + preReleaseString = + case versionBranch Meta.version of + (_:y:_) | even y -> " PRE-RELEASE" + (_:_:z:_) | even z -> " RELEASE-CANDIDATE" + _ -> "" +#ifdef HIDE_DEP_VERSIONS + depsString = " hpack-" ++ VERSION_hpack +#else + depsString = "\nCompiled with:\n" ++ unlines (map ("- " ++) Build_stack.deps) +#endif +#ifdef SUPPORTED_BUILD + warningString = "" +#else + warningString = unlines + [ "" + , "Warning: this is an unsupported build that may use different versions of" + , "dependencies and GHC than the officially released binaries, and therefore may" + , "not behave identically. If you encounter problems, please try the latest" + , "official build by running 'stack upgrade --force-download'." + ] +#endif + stackUploadDisabledWarningString = if isStackUploadDisabled + then unlines + [ "" + , "Warning: 'stack upload' is disabled and will not make HTTP request(s). It will" + , "output information about the HTTP request(s) that would have been made if it" + , "was enabled." + ] + else "" + +-- | Hpack version we're compiled against +hpackVersion :: String +hpackVersion = VERSION_hpack + +-- | If USE_GIT_INFO is enabled, the Git hash in the build directory, otherwise +-- Nothing. +maybeGitHash :: Maybe String +maybeGitHash = +#ifdef USE_GIT_INFO + (either (const Nothing) (Just . giHash) $$tGitInfoCwdTry) +#else + Nothing +#endif diff --git a/src/Stack/BuildOpts.hs b/src/Stack/BuildOpts.hs new file mode 100644 index 0000000000..b843a5b4a8 --- /dev/null +++ b/src/Stack/BuildOpts.hs @@ -0,0 +1,92 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.BuildOpts +Description : Default configuration options for building. +License : BSD-3-Clause + +Default configuration options for building. +-} + +module Stack.BuildOpts + ( defaultBuildOpts + , defaultTestOpts + , defaultHaddockOpts + , defaultBenchmarkOpts + ) where + +import Distribution.Verbosity ( normal ) +import Stack.Prelude +import Stack.Types.BuildOpts + ( BenchmarkOpts (..), BuildOpts (..), HaddockOpts (..) + , TestOpts (..) + ) +import Stack.Types.BuildOptsMonoid + ( BenchmarkOptsMonoid (..), BuildOptsMonoid (..) + , CabalVerbosity (..), ProgressBarFormat (..) + , TestOptsMonoid (..) + ) + +defaultBuildOpts :: BuildOpts +defaultBuildOpts = BuildOpts + { libProfile = defaultFirstFalse buildMonoid.libProfile + , exeProfile = defaultFirstFalse buildMonoid.exeProfile + , libStrip = defaultFirstTrue buildMonoid.libStrip + , exeStrip = defaultFirstTrue buildMonoid.exeStrip + , buildHaddocks = False + , haddockOpts = defaultHaddockOpts + , openHaddocks = defaultFirstFalse buildMonoid.openHaddocks + , haddockDeps = Nothing + , haddockExecutables = defaultFirstFalse buildMonoid.haddockExecutables + , haddockTests = defaultFirstFalse buildMonoid.haddockTests + , haddockBenchmarks = defaultFirstFalse buildMonoid.haddockBenchmarks + , haddockInternal = defaultFirstFalse buildMonoid.haddockInternal + , haddockHyperlinkSource = defaultFirstTrue buildMonoid.haddockHyperlinkSource + , haddockForHackage = defaultFirstFalse buildMonoid.haddockForHackage + , installExes = defaultFirstFalse buildMonoid.installExes + , installCompilerTool = defaultFirstFalse buildMonoid.installCompilerTool + , preFetch = defaultFirstFalse buildMonoid.preFetch + , keepGoing = Nothing + , keepTmpFiles = defaultFirstFalse buildMonoid.keepTmpFiles + , forceDirty = defaultFirstFalse buildMonoid.forceDirty + , tests = defaultFirstFalse buildMonoid.tests + , testOpts = defaultTestOpts + , benchmarks = defaultFirstFalse buildMonoid.benchmarks + , benchmarkOpts = defaultBenchmarkOpts + , reconfigure = defaultFirstFalse buildMonoid.reconfigure + , cabalVerbose = CabalVerbosity normal + , splitObjs = defaultFirstFalse buildMonoid.splitObjs + , skipComponents = [] + , interleavedOutput = defaultFirstTrue buildMonoid.interleavedOutput + , progressBar = CappedBar + , ddumpDir = Nothing + , semaphore = defaultFirstFalse buildMonoid.semaphore + } + where + buildMonoid = undefined :: BuildOptsMonoid + +defaultTestOpts :: TestOpts +defaultTestOpts = TestOpts + { rerunTests = defaultFirstTrue toMonoid.rerunTests + , additionalArgs = [] + , coverage = defaultFirstFalse toMonoid.coverage + , runTests = defaultFirstTrue toMonoid.runTests + , maximumTimeSeconds = Nothing + , timeoutGraceSeconds = Nothing + , allowStdin = defaultFirstTrue toMonoid.allowStdin + } + where + toMonoid = undefined :: TestOptsMonoid + +defaultHaddockOpts :: HaddockOpts +defaultHaddockOpts = HaddockOpts { additionalArgs = [] } + +defaultBenchmarkOpts :: BenchmarkOpts +defaultBenchmarkOpts = BenchmarkOpts + { additionalArgs = Nothing + , runBenchmarks = defaultFirstTrue beoMonoid.runBenchmarks + } + where + beoMonoid = undefined :: BenchmarkOptsMonoid diff --git a/src/Stack/BuildPlan.hs b/src/Stack/BuildPlan.hs index 36f82236e0..03a474d0a1 100644 --- a/src/Stack/BuildPlan.hs +++ b/src/Stack/BuildPlan.hs @@ -1,328 +1,369 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} - --- | Resolving a build plan for a set of packages in a given Stackage --- snapshot. +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.BuildPlan +License : BSD-3-Clause + +Resolving a build plan for a set of packages in a given Stackage snapshot. +-} module Stack.BuildPlan - ( BuildPlanException (..) - , BuildPlanCheck (..) - , checkSnapBuildPlan - , DepError(..) - , DepErrors - , removeSrcPkgDefaultFlags - , selectBestSnapshot - , showItems - ) where - -import Stack.Prelude hiding (Display (..)) + ( BuildPlanException (..) + , BuildPlanCheck (..) + , checkSnapBuildPlan + , DepError (..) + , DepErrors + , removeSrcPkgDefaultFlags + , selectBestSnapshot + ) where + import qualified Data.Foldable as F -import qualified Data.Set as Set import Data.List (intercalate) -import qualified Data.List.NonEmpty as NonEmpty import qualified Data.Map as Map +import qualified Data.Set as Set import qualified Data.Text as T import qualified Distribution.Package as C -import Distribution.PackageDescription (GenericPackageDescription, - flagDefault, flagManual, - flagName, genPackageFlags) +import Distribution.PackageDescription + ( GenericPackageDescription, flagDefault, flagName + , flagManual, genPackageFlags + ) import qualified Distribution.PackageDescription as C -import Distribution.System (Platform) -import Distribution.Text (display) -import Distribution.Types.UnqualComponentName (unUnqualComponentName) +import Distribution.System ( Platform ) +import Distribution.Text ( display ) +import Distribution.Types.UnqualComponentName + ( unUnqualComponentName ) import qualified Distribution.Version as C -import qualified RIO -import Stack.Constants +import qualified RIO.NonEmpty as NE +import Stack.Constants ( wiredInPackages ) import Stack.Package + ( PackageConfig (..), packageDependencies + , resolvePackageDescription + ) +import Stack.Prelude hiding ( Display (..) ) import Stack.SourceMap -import Stack.Types.SourceMap -import Stack.Types.Version -import Stack.Types.Config + ( SnapshotCandidate, loadProjectSnapshotCandidate ) import Stack.Types.Compiler - + ( ActualCompiler, WhichCompiler (..), compilerVersionText + , whichCompiler + ) +import Stack.Types.Config ( HasConfig ) +import Stack.Types.GHCVariant ( HasGHCVariant ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.SourceMap + ( CommonPackage (..), DepPackage (..) + , GlobalPackageVersion (..), ProjectPackage (..) + , SMActual (..) + ) +import Stack.Types.Version ( VersionRange, withinRange ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.BuildPlan" module. data BuildPlanException - = UnknownPackages - (Path Abs File) -- stack.yaml file - (Map PackageName (Maybe Version, Set PackageName)) -- truly unknown - (Map PackageName (Set PackageIdentifier)) -- shadowed - | SnapshotNotFound SnapName - | NeitherCompilerOrResolverSpecified T.Text - deriving (Typeable) -instance Exception BuildPlanException -instance Show BuildPlanException where - show (SnapshotNotFound snapName) = unlines - [ "SnapshotNotFound " ++ snapName' - , "Non existing resolver: " ++ snapName' ++ "." - , "For a complete list of available snapshots see https://www.stackage.org/snapshots" + = UnknownPackages + (Path Abs File) -- stack.yaml file + (Map PackageName (Maybe Version, Set PackageName)) -- truly unknown + (Map PackageName (Set PackageIdentifier)) -- shadowed + | SnapshotNotFound SnapName + | NeitherCompilerOrSnapshotSpecified T.Text + | DuplicatePackagesBug + deriving Show + +instance Exception BuildPlanException where + displayException (SnapshotNotFound snapName) = unlines + [ "Error: [S-2045]" + , "SnapshotNotFound " ++ snapName' + , "Non existing snapshot: " ++ snapName' ++ "." + , "For a complete list of available snapshots see https://www.stackage.org/snapshots" + ] + where + snapName' = show snapName + displayException (UnknownPackages stackYaml unknown shadowed) = + "Error: [S-7571]\n" + ++ unlines (unknown' ++ shadowed') + where + unknown' :: [String] + unknown' + | Map.null unknown = [] + | otherwise = concat + [ ["The following packages do not exist in the build plan:"] + , map go (Map.toList unknown) + , case mapMaybe goRecommend $ Map.toList unknown of + [] -> [] + rec -> + ("Recommended action: modify the value of the extra-deps key of " ++ + toFilePath stackYaml ++ + " to include the following:") + : (rec + ++ ["Note: further dependencies may need to be added"]) + , case mapMaybe getNoKnown $ Map.toList unknown of + [] -> [] + noKnown -> + [ "There are no known versions of the following packages:" + , intercalate ", " $ map packageNameString noKnown + ] + ] + where + go (dep, (_, users)) | Set.null users = packageNameString dep + go (dep, (_, users)) = concat + [ packageNameString dep + , " (used by " + , intercalate ", " $ map packageNameString $ Set.toList users + , ")" ] - where snapName' = show snapName - show (UnknownPackages stackYaml unknown shadowed) = - unlines $ unknown' ++ shadowed' - where - unknown' :: [String] - unknown' - | Map.null unknown = [] - | otherwise = concat - [ ["The following packages do not exist in the build plan:"] - , map go (Map.toList unknown) - , case mapMaybe goRecommend $ Map.toList unknown of - [] -> [] - rec -> - ("Recommended action: modify the extra-deps field of " ++ - toFilePath stackYaml ++ - " to include the following:") - : (rec - ++ ["Note: further dependencies may need to be added"]) - , case mapMaybe getNoKnown $ Map.toList unknown of - [] -> [] - noKnown -> - [ "There are no known versions of the following packages:" - , intercalate ", " $ map packageNameString noKnown - ] - ] - where - go (dep, (_, users)) | Set.null users = packageNameString dep - go (dep, (_, users)) = concat - [ packageNameString dep - , " (used by " - , intercalate ", " $ map packageNameString $ Set.toList users - , ")" - ] - - goRecommend (name, (Just version, _)) = - Just $ "- " ++ packageIdentifierString (PackageIdentifier name version) - goRecommend (_, (Nothing, _)) = Nothing - - getNoKnown (name, (Nothing, _)) = Just name - getNoKnown (_, (Just _, _)) = Nothing - - shadowed' :: [String] - shadowed' - | Map.null shadowed = [] - | otherwise = concat - [ ["The following packages are shadowed by local packages:"] - , map go (Map.toList shadowed) - , ["Recommended action: modify the extra-deps field of " ++ - toFilePath stackYaml ++ - " to include the following:"] - , extraDeps - , ["Note: further dependencies may need to be added"] - ] - where - go (dep, users) | Set.null users = packageNameString dep ++ " (internal stack error: this should never be null)" - go (dep, users) = concat - [ packageNameString dep - , " (used by " - , intercalate ", " - $ map (packageNameString . pkgName) - $ Set.toList users - , ")" - ] - - extraDeps = map (\ident -> "- " ++ packageIdentifierString ident) - $ Set.toList - $ Set.unions - $ Map.elems shadowed - show (NeitherCompilerOrResolverSpecified url) = - "Failed to load custom snapshot at " ++ - T.unpack url ++ - ", because no 'compiler' or 'resolver' is specified." + + goRecommend (name, (Just version, _)) = + Just $ "- " ++ packageIdentifierString (PackageIdentifier name version) + goRecommend (_, (Nothing, _)) = Nothing + + getNoKnown (name, (Nothing, _)) = Just name + getNoKnown (_, (Just _, _)) = Nothing + + shadowed' :: [String] + shadowed' + | Map.null shadowed = [] + | otherwise = concat + [ ["The following packages are shadowed by project packages:"] + , map go (Map.toList shadowed) + , ["Recommended action: modify the value of the extra-deps key of " ++ + toFilePath stackYaml ++ + " to include the following:"] + , extraDeps + , ["Note: further dependencies may need to be added"] + ] + where + go (dep, users) | Set.null users = packageNameString dep ++ " (internal Stack error: this should never be null)" + go (dep, users) = concat + [ packageNameString dep + , " (used by " + , intercalate ", " + $ map (packageNameString . pkgName) + $ Set.toList users + , ")" + ] + + extraDeps = map (\ident -> "- " ++ packageIdentifierString ident) + $ Set.toList + $ Set.unions + $ Map.elems shadowed + displayException (NeitherCompilerOrSnapshotSpecified url) = concat + [ "Error: [S-8559]\n" + , "Failed to load custom snapshot at " + , T.unpack url + , ", because no 'compiler' or 'snapshot' is specified." + ] + displayException DuplicatePackagesBug = bugReport "[S-5743]" + "Duplicate packages are not expected here." gpdPackages :: [GenericPackageDescription] -> Map PackageName Version gpdPackages = Map.fromList . map (toPair . C.package . C.packageDescription) - where - toPair (C.PackageIdentifier name version) = (name, version) - -gpdPackageDeps - :: GenericPackageDescription - -> ActualCompiler - -> Platform - -> Map FlagName Bool - -> Map PackageName VersionRange -gpdPackageDeps gpd ac platform flags = - Map.filterWithKey (const . not . isLocalLibrary) (packageDependencies pkgConfig pkgDesc) - where - isLocalLibrary name' = name' == name || name' `Set.member` subs - - name = gpdPackageName gpd - subs = Set.fromList - $ map (C.mkPackageName . unUnqualComponentName . fst) - $ C.condSubLibraries gpd - - -- Since tests and benchmarks are both enabled, doesn't matter - -- if we choose modified or unmodified - pkgDesc = pdpModifiedBuildable $ resolvePackageDescription pkgConfig gpd - pkgConfig = PackageConfig - { packageConfigEnableTests = True - , packageConfigEnableBenchmarks = True - , packageConfigFlags = flags - , packageConfigGhcOptions = [] - , packageConfigCabalConfigOpts = [] - , packageConfigCompilerVersion = ac - , packageConfigPlatform = platform - } - --- Remove any src package flags having default values --- Remove any package entries with no flags set -removeSrcPkgDefaultFlags :: [C.GenericPackageDescription] - -> Map PackageName (Map FlagName Bool) - -> Map PackageName (Map FlagName Bool) + where + toPair (C.PackageIdentifier name version) = (name, version) + +gpdPackageDeps :: + GenericPackageDescription + -> ActualCompiler + -> Platform + -> Map FlagName Bool + -> Map PackageName VersionRange +gpdPackageDeps gpd compilerVersion platform flags = + Map.filterWithKey (const . not . isLocalLibrary) (packageDependencies pkgDesc) + where + isLocalLibrary name' = name' == name || name' `Set.member` subs + + name = gpdPackageName gpd + subs = Set.fromList + $ map (C.mkPackageName . unUnqualComponentName . fst) + $ C.condSubLibraries gpd + + -- Since tests and benchmarks are both enabled, doesn't matter if we choose + -- modified or unmodified + pkgDesc = resolvePackageDescription pkgConfig gpd + pkgConfig = PackageConfig + { enableTests = True + , enableBenchmarks = True + , flags + , ghcOptions = [] + , cabalConfigOpts = [] + , compilerVersion + , platform + } + +-- | For the given list of packages and dictionary of packages and Cabal flags, +-- remove flags that have defaults and packages with no remaining flags. +removeSrcPkgDefaultFlags :: + [C.GenericPackageDescription] + -> Map PackageName (Map FlagName Bool) + -> Map PackageName (Map FlagName Bool) removeSrcPkgDefaultFlags gpds flags = - let defaults = Map.unions (map gpdDefaultFlags gpds) - flags' = Map.differenceWith removeSame flags defaults - in Map.filter (not . Map.null) flags' - where - removeSame f1 f2 = - let diff v v' = if v == v' then Nothing else Just v - in Just $ Map.differenceWith diff f1 f2 - - gpdDefaultFlags gpd = - let tuples = map getDefault (C.genPackageFlags gpd) - in Map.singleton (gpdPackageName gpd) (Map.fromList tuples) - - getDefault f - | C.flagDefault f = (C.flagName f, True) - | otherwise = (C.flagName f, False) + let defaults = Map.unions (map gpdDefaultFlags gpds) + flags' = Map.differenceWith removeSame flags defaults + in Map.filter (not . Map.null) flags' + where + removeSame f1 f2 = + let diff v v' = if v == v' then Nothing else Just v + in Just $ Map.differenceWith diff f1 f2 + + gpdDefaultFlags gpd = + let pairs = map (C.flagName &&& C.flagDefault) (C.genPackageFlags gpd) + in Map.singleton (gpdPackageName gpd) (Map.fromList pairs) -- | Find the set of @FlagName@s necessary to get the given -- @GenericPackageDescription@ to compile against the given @BuildPlan@. Will -- only modify non-manual flags, and will prefer default values for flags. -- Returns the plan which produces least number of dep errors -selectPackageBuildPlan - :: Platform - -> ActualCompiler - -> Map PackageName Version - -> GenericPackageDescription - -> (Map PackageName (Map FlagName Bool), DepErrors) +selectPackageBuildPlan :: + Platform + -> ActualCompiler + -> Map PackageName Version + -> GenericPackageDescription + -> (Map PackageName (Map FlagName Bool), DepErrors) selectPackageBuildPlan platform compiler pool gpd = - (selectPlan . limitSearchSpace . NonEmpty.map makePlan) flagCombinations - where - selectPlan :: NonEmpty (a, DepErrors) -> (a, DepErrors) - selectPlan = F.foldr1 fewerErrors - where - fewerErrors p1 p2 - | nErrors p1 == 0 = p1 - | nErrors p1 <= nErrors p2 = p1 - | otherwise = p2 - where nErrors = Map.size . snd - - -- Avoid exponential complexity in flag combinations making us sad pandas. - -- See: https://github.com/commercialhaskell/stack/issues/543 - limitSearchSpace :: NonEmpty a -> NonEmpty a - limitSearchSpace (x :| xs) = x :| take (maxFlagCombinations - 1) xs - where maxFlagCombinations = 128 - - makePlan :: [(FlagName, Bool)] -> (Map PackageName (Map FlagName Bool), DepErrors) - makePlan flags = checkPackageBuildPlan platform compiler pool (Map.fromList flags) gpd - - flagCombinations :: NonEmpty [(FlagName, Bool)] - flagCombinations = mapM getOptions (genPackageFlags gpd) - where - getOptions :: C.Flag -> NonEmpty (FlagName, Bool) - getOptions f - | flagManual f = (fname, flagDefault f) :| [] - | flagDefault f = (fname, True) :| [(fname, False)] - | otherwise = (fname, False) :| [(fname, True)] - where fname = flagName f + (selectPlan . limitSearchSpace . NE.map makePlan) flagCombinations + where + selectPlan :: NonEmpty (a, DepErrors) -> (a, DepErrors) + selectPlan = F.foldr1 fewerErrors + where + fewerErrors p1 p2 + | nErrors p1 == 0 = p1 + | nErrors p1 <= nErrors p2 = p1 + | otherwise = p2 + where + nErrors = Map.size . snd + + -- Avoid exponential complexity in flag combinations making us sad pandas. + -- See: https://github.com/commercialhaskell/stack/issues/543 + limitSearchSpace :: NonEmpty a -> NonEmpty a + limitSearchSpace (x :| xs) = x :| take (maxFlagCombinations - 1) xs + where + maxFlagCombinations = 128 + + makePlan :: + [(FlagName, Bool)] + -> (Map PackageName (Map FlagName Bool), DepErrors) + makePlan flags = + checkPackageBuildPlan platform compiler pool (Map.fromList flags) gpd + + flagCombinations :: NonEmpty [(FlagName, Bool)] + flagCombinations = mapM getOptions (genPackageFlags gpd) + where + getOptions :: C.PackageFlag -> NonEmpty (FlagName, Bool) + getOptions f + | flagManual f = (fname, flagDefault f) :| [] + | flagDefault f = (fname, True) :| [(fname, False)] + | otherwise = (fname, False) :| [(fname, True)] + where + fname = flagName f -- | Check whether with the given set of flags a package's dependency -- constraints can be satisfied against a given build plan or pool of packages. -checkPackageBuildPlan - :: Platform - -> ActualCompiler - -> Map PackageName Version - -> Map FlagName Bool - -> GenericPackageDescription - -> (Map PackageName (Map FlagName Bool), DepErrors) +checkPackageBuildPlan :: + Platform + -> ActualCompiler + -> Map PackageName Version + -> Map FlagName Bool + -> GenericPackageDescription + -> (Map PackageName (Map FlagName Bool), DepErrors) checkPackageBuildPlan platform compiler pool flags gpd = - (Map.singleton pkg flags, errs) - where - pkg = gpdPackageName gpd - errs = checkPackageDeps pkg constraints pool - constraints = gpdPackageDeps gpd compiler platform flags + (Map.singleton pkg flags, errs) + where + pkg = gpdPackageName gpd + errs = checkPackageDeps pkg constraints pool + constraints = gpdPackageDeps gpd compiler platform flags -- | Checks if the given package dependencies can be satisfied by the given set -- of packages. Will fail if a package is either missing or has a version -- outside of the version range. -checkPackageDeps - :: PackageName -- ^ package using dependencies, for constructing DepErrors - -> Map PackageName VersionRange -- ^ dependency constraints - -> Map PackageName Version -- ^ Available package pool or index - -> DepErrors +checkPackageDeps :: + PackageName -- ^ package using dependencies, for constructing DepErrors + -> Map PackageName VersionRange -- ^ dependency constraints + -> Map PackageName Version -- ^ Available package pool or index + -> DepErrors checkPackageDeps myName deps packages = - Map.unionsWith combineDepError $ map go $ Map.toList deps - where - go :: (PackageName, VersionRange) -> DepErrors - go (name, range) = - case Map.lookup name packages of - Nothing -> Map.singleton name DepError - { deVersion = Nothing - , deNeededBy = Map.singleton myName range - } - Just v - | withinRange v range -> Map.empty - | otherwise -> Map.singleton name DepError - { deVersion = Just v - , deNeededBy = Map.singleton myName range - } + Map.unionsWith combineDepError $ map go $ Map.toList deps + where + go :: (PackageName, VersionRange) -> DepErrors + go (name, range) = + case Map.lookup name packages of + Nothing -> Map.singleton name DepError + { version = Nothing + , neededBy = Map.singleton myName range + } + Just v + | withinRange v range -> Map.empty + | otherwise -> Map.singleton name DepError + { version = Just v + , neededBy = Map.singleton myName range + } +-- | A type synoynm for a dictionary of packages and failures to satisfy +-- packages' dependency constraints. type DepErrors = Map PackageName DepError -data DepError = DepError - { deVersion :: !(Maybe Version) - , deNeededBy :: !(Map PackageName VersionRange) - } deriving Show --- | Combine two 'DepError's for the same 'Version'. +-- | A type representing failures to satisfy packages' dependency constraints. +data DepError = DepError + { version :: !(Maybe Version) + -- ^ If available, the available version of the package. + , neededBy :: !(Map PackageName VersionRange) + -- ^ A dictionary of the packages requiring the package and the permitted + -- range of versions. + } + deriving Show + +-- | Combine two t'DepError's for the same 'Version'. combineDepError :: DepError -> DepError -> DepError combineDepError (DepError a x) (DepError b y) = - assert (a == b) $ DepError a (Map.unionWith C.intersectVersionRanges x y) + assert (a == b) $ DepError a (Map.unionWith C.intersectVersionRanges x y) -- | Given a bundle of packages (a list of @GenericPackageDescriptions@'s) to -- build and an available package pool (snapshot) check whether the bundle's -- dependencies can be satisfied. If flags is passed as Nothing flag settings -- will be chosen automatically. -checkBundleBuildPlan - :: Platform - -> ActualCompiler - -> Map PackageName Version - -> Maybe (Map PackageName (Map FlagName Bool)) - -> [GenericPackageDescription] - -> (Map PackageName (Map FlagName Bool), DepErrors) +checkBundleBuildPlan :: + Platform + -> ActualCompiler + -> Map PackageName Version + -> Maybe (Map PackageName (Map FlagName Bool)) + -> [GenericPackageDescription] + -> (Map PackageName (Map FlagName Bool), DepErrors) checkBundleBuildPlan platform compiler pool flags gpds = - (Map.unionsWith dupError (map fst plans) - , Map.unionsWith combineDepError (map snd plans)) - - where - plans = map (pkgPlan flags) gpds - pkgPlan Nothing gpd = - selectPackageBuildPlan platform compiler pool' gpd - pkgPlan (Just f) gpd = - checkPackageBuildPlan platform compiler pool' (flags' f gpd) gpd - flags' f gpd = fromMaybe Map.empty (Map.lookup (gpdPackageName gpd) f) - pool' = Map.union (gpdPackages gpds) pool - - dupError _ _ = error "Bug: Duplicate packages are not expected here" - -data BuildPlanCheck = - BuildPlanCheckOk (Map PackageName (Map FlagName Bool)) - | BuildPlanCheckPartial (Map PackageName (Map FlagName Bool)) DepErrors - | BuildPlanCheckFail (Map PackageName (Map FlagName Bool)) DepErrors - ActualCompiler + ( Map.unionsWith dupError (map fst plans) + , Map.unionsWith combineDepError (map snd plans) + ) + where + plans = map (pkgPlan flags) gpds + pkgPlan Nothing gpd = + selectPackageBuildPlan platform compiler pool' gpd + pkgPlan (Just f) gpd = + checkPackageBuildPlan platform compiler pool' (flags' f gpd) gpd + flags' f gpd = fromMaybe Map.empty (Map.lookup (gpdPackageName gpd) f) + pool' = Map.union (gpdPackages gpds) pool + + dupError _ _ = impureThrow DuplicatePackagesBug + +-- | A type representing the results of evaluating how well a snapshot satisfies +-- the dependencies of a set of packages and a set of Cabal flags. +data BuildPlanCheck + = BuildPlanCheckOk (Map PackageName (Map FlagName Bool)) + | BuildPlanCheckPartial (Map PackageName (Map FlagName Bool)) DepErrors + | BuildPlanCheckFail + (Map PackageName (Map FlagName Bool)) + DepErrors + ActualCompiler -- | Compare 'BuildPlanCheck', where GT means a better plan. compareBuildPlanCheck :: BuildPlanCheck -> BuildPlanCheck -> Ordering -compareBuildPlanCheck (BuildPlanCheckPartial _ e1) (BuildPlanCheckPartial _ e2) = +compareBuildPlanCheck + (BuildPlanCheckPartial _ e1) + (BuildPlanCheckPartial _ e2) + = -- Note: order of comparison flipped, since it's better to have fewer errors. compare (Map.size e2) (Map.size e1) compareBuildPlanCheck (BuildPlanCheckFail _ e1 _) (BuildPlanCheckFail _ e2 _) = - let numUserPkgs e = Map.size $ Map.unions (Map.elems (fmap deNeededBy e)) - in compare (numUserPkgs e2) (numUserPkgs e1) + let numUserPkgs e = Map.size $ Map.unions (Map.elems (fmap (.neededBy) e)) + in compare (numUserPkgs e2) (numUserPkgs e1) compareBuildPlanCheck BuildPlanCheckOk{} BuildPlanCheckOk{} = EQ compareBuildPlanCheck BuildPlanCheckOk{} BuildPlanCheckPartial{} = GT compareBuildPlanCheck BuildPlanCheckOk{} BuildPlanCheckFail{} = GT @@ -330,167 +371,181 @@ compareBuildPlanCheck BuildPlanCheckPartial{} BuildPlanCheckFail{} = GT compareBuildPlanCheck _ _ = LT instance Show BuildPlanCheck where - show BuildPlanCheckOk {} = "" - show (BuildPlanCheckPartial f e) = T.unpack $ showDepErrors f e - show (BuildPlanCheckFail f e c) = T.unpack $ showCompilerErrors f e c + show BuildPlanCheckOk {} = "" + show (BuildPlanCheckPartial f e) = T.unpack $ showDepErrors f e + show (BuildPlanCheckFail f e c) = T.unpack $ showCompilerErrors f e c -- | Check a set of 'GenericPackageDescription's and a set of flags against a -- given snapshot. Returns how well the snapshot satisfies the dependencies of -- the packages. -checkSnapBuildPlan - :: (HasConfig env, HasGHCVariant env) - => [ResolvedPath Dir] - -> Maybe (Map PackageName (Map FlagName Bool)) - -> SnapshotCandidate env - -> RIO env BuildPlanCheck +checkSnapBuildPlan :: + (HasConfig env, HasGHCVariant env) + => [ResolvedPath Dir] + -> Maybe (Map PackageName (Map FlagName Bool)) + -> SnapshotCandidate env + -> RIO env BuildPlanCheck checkSnapBuildPlan pkgDirs flags snapCandidate = do - platform <- view platformL - sma <- snapCandidate pkgDirs - gpds <- liftIO $ forM (Map.elems $ smaProject sma) (cpGPD . ppCommon) - - let - compiler = smaCompiler sma - globalVersion (GlobalPackageVersion v) = v - depVersion dep | PLImmutable loc <- dpLocation dep = - Just $ packageLocationVersion loc - | otherwise = - Nothing - snapPkgs = Map.union - (Map.mapMaybe depVersion $ smaDeps sma) - (Map.map globalVersion $ smaGlobal sma) - (f, errs) = checkBundleBuildPlan platform compiler snapPkgs flags gpds - cerrs = compilerErrors compiler errs - - if Map.null errs then - return $ BuildPlanCheckOk f - else if Map.null cerrs then do - return $ BuildPlanCheckPartial f errs - else - return $ BuildPlanCheckFail f cerrs compiler - where - compilerErrors compiler errs - | whichCompiler compiler == Ghc = ghcErrors errs - | otherwise = Map.empty - - isGhcWiredIn p _ = p `Set.member` wiredInPackages - ghcErrors = Map.filterWithKey isGhcWiredIn + platform <- view platformL + sma <- snapCandidate pkgDirs + gpds <- liftIO $ forM (Map.elems sma.project) (.projectCommon.gpd) + + let compiler = sma.compiler + globalVersion (GlobalPackageVersion v) = v + depVersion dep + | PLImmutable loc <- dep.location = Just $ packageLocationVersion loc + | otherwise = Nothing + snapPkgs = Map.union + (Map.mapMaybe depVersion sma.deps) + (Map.map globalVersion sma.globals) + (f, errs) = checkBundleBuildPlan platform compiler snapPkgs flags gpds + cerrs = compilerErrors compiler errs + + if Map.null errs + then pure $ BuildPlanCheckOk f + else if Map.null cerrs + then pure $ BuildPlanCheckPartial f errs + else pure $ BuildPlanCheckFail f cerrs compiler + where + compilerErrors compiler errs + | whichCompiler compiler == Ghc = ghcErrors compiler errs + | otherwise = Map.empty + + isGhcWiredIn compiler p _ = p `Set.member` wiredInPackages compiler + ghcErrors compiler = Map.filterWithKey (isGhcWiredIn compiler) -- | Find a snapshot and set of flags that is compatible with and matches as -- best as possible with the given 'GenericPackageDescription's. -selectBestSnapshot - :: (HasConfig env, HasGHCVariant env) - => [ResolvedPath Dir] - -> NonEmpty SnapName - -> RIO env (SnapshotCandidate env, RawSnapshotLocation, BuildPlanCheck) +selectBestSnapshot :: + (HasConfig env, HasGHCVariant env) + => [ResolvedPath Dir] + -> NonEmpty SnapName + -> RIO env (SnapshotCandidate env, RawSnapshotLocation, BuildPlanCheck) selectBestSnapshot pkgDirs snaps = do - logInfo $ "Selecting the best among " - <> displayShow (NonEmpty.length snaps) - <> " snapshots...\n" - F.foldr1 go (NonEmpty.map (getResult <=< snapshotLocation) snaps) - where - go mold mnew = do - old@(_snap, _loc, bpc) <- mold - case bpc of - BuildPlanCheckOk {} -> return old - _ -> fmap (betterSnap old) mnew - - getResult loc = do - candidate <- loadProjectSnapshotCandidate loc NoPrintWarnings False - result <- checkSnapBuildPlan pkgDirs Nothing candidate - reportResult result loc - return (candidate, loc, result) - - betterSnap (s1, l1, r1) (s2, l2, r2) - | compareBuildPlanCheck r1 r2 /= LT = (s1, l1, r1) - | otherwise = (s2, l2, r2) - - reportResult BuildPlanCheckOk {} loc = do - logInfo $ "* Matches " <> RIO.display loc - logInfo "" - - reportResult r@BuildPlanCheckPartial {} loc = do - logWarn $ "* Partially matches " <> RIO.display loc - logWarn $ RIO.display $ indent $ T.pack $ show r - - reportResult r@BuildPlanCheckFail {} loc = do - logWarn $ "* Rejected " <> RIO.display loc - logWarn $ RIO.display $ indent $ T.pack $ show r - - indent t = T.unlines $ fmap (" " <>) (T.lines t) + prettyInfo $ + fillSep + [ flow "Selecting the best among" + , fromString $ show (NE.length snaps) + , "snapshots..." + ] + <> line + F.foldr1 go (NE.map (getResult <=< snapshotLocation) snaps) + where + go mold mnew = do + old@(_snap, _loc, bpc) <- mold + case bpc of + BuildPlanCheckOk {} -> pure old + _ -> fmap (betterSnap old) mnew + + getResult loc = do + candidate <- loadProjectSnapshotCandidate loc NoPrintWarnings False + result <- checkSnapBuildPlan pkgDirs Nothing candidate + reportResult result loc + pure (candidate, loc, result) + + betterSnap (s1, l1, r1) (s2, l2, r2) + | compareBuildPlanCheck r1 r2 /= LT = (s1, l1, r1) + | otherwise = (s2, l2, r2) + + reportResult BuildPlanCheckOk {} loc = + prettyNote $ + fillSep + [ flow "Matches" + , pretty $ PrettyRawSnapshotLocation loc + ] + <> line + + reportResult r@BuildPlanCheckPartial {} loc = + prettyWarn $ + fillSep + [ flow "Partially matches" + , pretty $ PrettyRawSnapshotLocation loc + ] + <> blankLine + <> indent 4 (string (show r)) + + reportResult r@BuildPlanCheckFail {} loc = + prettyWarn $ + fillSep + [ flow "Rejected" + , pretty $ PrettyRawSnapshotLocation loc + ] + <> blankLine + <> indent 4 (string (show r)) showItems :: [String] -> Text showItems items = T.concat (map formatItem items) - where - formatItem item = T.concat - [ " - " - , T.pack item - , "\n" - ] + where + formatItem item = T.concat + [ " - " + , T.pack item + , "\n" + ] showPackageFlags :: PackageName -> Map FlagName Bool -> Text showPackageFlags pkg fl = - if not $ Map.null fl then - T.concat - [ " - " - , T.pack $ packageNameString pkg - , ": " - , T.pack $ intercalate ", " - $ map formatFlags (Map.toList fl) - , "\n" - ] + if not $ Map.null fl + then + T.concat + [ " - " + , T.pack $ packageNameString pkg + , ": " + , T.pack $ intercalate ", " + $ map formatFlags (Map.toList fl) + , "\n" + ] else "" - where - formatFlags (f, v) = show f ++ " = " ++ show v + where + formatFlags (f, v) = show f ++ " = " ++ show v showMapPackages :: Map PackageName a -> Text showMapPackages mp = showItems $ map packageNameString $ Map.keys mp -showCompilerErrors - :: Map PackageName (Map FlagName Bool) - -> DepErrors - -> ActualCompiler - -> Text +showCompilerErrors :: + Map PackageName (Map FlagName Bool) + -> DepErrors + -> ActualCompiler + -> Text showCompilerErrors flags errs compiler = - T.concat - [ compilerVersionText compiler - , " cannot be used for these packages:\n" - , showMapPackages $ Map.unions (Map.elems (fmap deNeededBy errs)) - , showDepErrors flags errs -- TODO only in debug mode - ] + T.concat + [ compilerVersionText compiler + , " cannot be used for these packages:\n" + , showMapPackages $ Map.unions (Map.elems (fmap (.neededBy) errs)) + , showDepErrors flags errs -- TODO only in debug mode + ] showDepErrors :: Map PackageName (Map FlagName Bool) -> DepErrors -> Text showDepErrors flags errs = - T.concat - [ T.concat $ map formatError (Map.toList errs) - , if T.null flagVals then "" - else "Using package flags:\n" <> flagVals - ] - where - formatError (depName, DepError mversion neededBy) = T.concat - [ showDepVersion depName mversion - , T.concat (map showRequirement (Map.toList neededBy)) - ] - - showDepVersion depName mversion = T.concat - [ T.pack $ packageNameString depName - , case mversion of - Nothing -> " not found" - Just version -> T.concat - [ " version " - , T.pack $ versionString version - , " found" - ] - , "\n" + T.concat + [ T.concat $ map formatError (Map.toList errs) + , if T.null flagVals then "" + else "Using package flags:\n" <> flagVals + ] + where + formatError (depName, DepError mversion neededBy) = T.concat + [ showDepVersion depName mversion + , T.concat (map showRequirement (Map.toList neededBy)) + ] + + showDepVersion depName mversion = T.concat + [ T.pack $ packageNameString depName + , case mversion of + Nothing -> " not found" + Just version -> T.concat + [ " version " + , T.pack $ versionString version + , " found" ] - - showRequirement (user, range) = T.concat - [ " - " - , T.pack $ packageNameString user - , " requires " - , T.pack $ display range - , "\n" - ] - - flagVals = T.concat (map showFlags userPkgs) - userPkgs = Map.keys $ Map.unions (Map.elems (fmap deNeededBy errs)) - showFlags pkg = maybe "" (showPackageFlags pkg) (Map.lookup pkg flags) + , "\n" + ] + + showRequirement (user, range) = T.concat + [ " - " + , T.pack $ packageNameString user + , " requires " + , T.pack $ display range + , "\n" + ] + + flagVals = T.concat (map showFlags userPkgs) + userPkgs = Map.keys $ Map.unions (Map.elems (fmap (.neededBy) errs)) + showFlags pkg = maybe "" (showPackageFlags pkg) (Map.lookup pkg flags) diff --git a/src/Stack/CLI.hs b/src/Stack/CLI.hs new file mode 100644 index 0000000000..1d0a15e3b3 --- /dev/null +++ b/src/Stack/CLI.hs @@ -0,0 +1,744 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.CLI +License : BSD-3-Clause +-} + +module Stack.CLI + ( commandLineHandler + ) where + +import Data.Attoparsec.Interpreter ( getInterpreterArgs ) +import Data.Char ( toLower ) +import qualified Data.List as L +import Data.List.NonEmpty ( prependList ) +import Options.Applicative + ( Parser, ParserFailure, ParserHelp, ParserResult (..) + , handleParseResult, help, helpError, idm, long, metavar + , overFailure, renderFailure, strArgument, switch + ) +import Options.Applicative.Help ( errorHelp, stringChunk, vcatChunks ) +import Options.Applicative.Builder.Extra + ( boolFlags, extraHelpOption ) +import Options.Applicative.Complicated + ( addCommand, addSubCommands, complicatedOptions ) +import Path ( filename ) +import RIO.NonEmpty ( (<|) ) +import qualified RIO.NonEmpty as NE +import qualified RIO.Process ( exec ) +import RIO.Process ( withProcessContextNoLogging ) +import Stack.Build ( buildCmd ) +import Stack.BuildInfo ( hpackVersion, versionString' ) +import Stack.Clean ( CleanCommand (..), cleanCmd ) +import Stack.ConfigCmd + ( cfgCmdBuildFiles, cfgCmdBuildFilesName, cfgCmdEnv + , cfgCmdEnvName, cfgCmdName, cfgCmdSet, cfgCmdSetName + ) +import Stack.Constants + ( globalFooter, osIsWindows, relFileStack, relFileStackDotExe + , stackProgName + ) +import Stack.Coverage ( hpcReportCmd ) +import Stack.Docker + ( dockerCmdName, dockerHelpOptName, dockerPullCmdName ) +import Stack.DockerCmd ( dockerPullCmd, dockerResetCmd ) +import Stack.Dot ( dotCmd ) +import Stack.Exec ( SpecialExecCmd (..), execCmd ) +import Stack.Eval ( evalCmd ) +import Stack.Ghci ( ghciCmd ) +import Stack.Hoogle ( hoogleCmd ) +import Stack.IDE ( idePackagesCmd, ideTargetsCmd ) +import Stack.Init ( initCmd ) +import Stack.List ( listCmd ) +import Stack.Ls ( lsCmd ) +import Stack.New ( newCmd ) +import qualified Stack.Nix as Nix +import Stack.Options.BuildParser ( buildOptsParser ) +import Stack.Options.CleanParser ( cleanOptsParser ) +import Stack.Options.ConfigEnvParser ( configCmdEnvParser ) +import Stack.Options.ConfigSetParser ( configCmdSetParser ) +import Stack.Options.DotParser ( dotOptsParser ) +import Stack.Options.EvalParser ( evalOptsParser ) +import Stack.Options.ExecParser ( execOptsParser ) +import Stack.Options.GhciParser ( ghciOptsParser ) +import Stack.Options.GlobalParser ( globalOptsParser ) +import Stack.Options.HpcReportParser ( hpcReportOptsParser ) +import Stack.Options.IdeParser ( idePackagesParser, ideTargetsParser ) +import Stack.Options.InitParser ( initOptsParser ) +import Stack.Options.LsParser ( lsOptsParser ) +import Stack.Options.NewParser ( newOptsParser ) +import Stack.Options.PathParser ( pathParser ) +import Stack.Options.SDistParser ( sdistOptsParser ) +import Stack.Options.ScriptParser ( scriptOptsParser ) +import Stack.Options.SetupParser ( setupOptsParser ) +import Stack.Options.UnpackParser ( unpackOptsParser ) +import Stack.Options.UpgradeParser ( upgradeOptsParser ) +import Stack.Options.UploadParser ( uploadOptsParser ) +import Stack.Options.Utils ( GlobalOptsContext (..) ) +import qualified Stack.Path ( path ) +import Stack.Prelude +import Stack.Query ( queryCmd ) +import Stack.Runners + ( ShouldReexec (..), withBuildConfig, withConfig + , withDefaultEnvConfig + ) +import Stack.SDist ( sdistCmd ) +import Stack.Script ( ScriptOpts (..), scriptCmd ) +import Stack.SetupCmd ( setupCmd ) +import Stack.Templates ( templatesCmd ) +import Stack.Types.AddCommand ( AddCommand ) +import Stack.Types.BuildOptsCLI ( BuildCommand (..) ) +import Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid (..) ) +import Stack.Types.Runner ( Runner ) +import Stack.Types.Version ( stackVersion ) +import Stack.Uninstall ( uninstallCmd ) +import Stack.Unpack ( unpackCmd ) +import Stack.Update ( updateCmd ) +import Stack.Upgrade ( upgradeCmd ) +import Stack.Upload ( uploadCmd ) +import qualified System.Directory as D +import System.Environment ( withArgs ) +import System.FilePath ( pathSeparator, takeDirectory ) + +-- | Type representing \'pretty\' exceptions thrown by functions in the +-- "Stack.CLI" module. +data CliPrettyException + = NoArgumentsBug + deriving Show + +instance Pretty CliPrettyException where + pretty NoArgumentsBug = bugPrettyReport "[S-4639]" $ + flow "commandLineHandler: no command line arguments on event of failure." + +instance Exception CliPrettyException + +-- | Stack's command line handler. +commandLineHandler :: + FilePath + -> String + -- ^ The name of the current Stack executable, as it was invoked. + -> Maybe (Path Abs File) + -- ^ The path to the current Stack executable, if the operating system + -- provides a reliable way to determine it and where a result was + -- available. + -> Bool + -> IO (GlobalOptsMonoid, RIO Runner ()) +commandLineHandler currentDir progName mExecutablePath isInterpreter = + -- Append the relevant default (potentially affecting the LogLevel) *after* + -- appending the global options of the `stack` command to the global options + -- of the subcommand - see #5326. + first (<> defaultGlobalOpts) <$> complicatedOptions + stackVersion + (Just versionString') + hpackVersion + "stack - The Haskell Tool Stack" + "" + ("Stack's documentation is available at https://docs.haskellstack.org/. \ + \Command '" <> progName <> " COMMAND --help' for help about a Stack command. Stack also \ + \supports the Haskell Error Index at https://errors.haskell.org/.") + (globalOpts OuterGlobalOpts) + (Just failureCallback) + addCommands + where + defaultGlobalOpts = if isInterpreter + then + -- Silent except when errors occur - see #2879 + mempty { logLevel = First (Just LevelError) } + else mempty + failureCallback f args = + case L.stripPrefix "Invalid argument" (fst (renderFailure f "")) of + Just _ -> maybe + (prettyThrowIO NoArgumentsBug) + ( \args' -> if isInterpreter + then + parseResultHandler (NE.toList args') f + else + secondaryCommandHandler args' f + >>= interpreterHandler progName mExecutablePath currentDir args' + ) + (NE.nonEmpty args) + Nothing -> parseResultHandler args f + + parseResultHandler args f = + if isInterpreter + then do + let hlp = errorHelp $ stringChunk + (unwords ["Error executing interpreter command:" + , progName + , unwords args]) + handleParseResult (overFailure (vcatErrorHelp hlp) (Failure f)) + else handleParseResult (Failure f) + + -- The order of commands below determines the order in which they are listed + -- in `stack --help`. + addCommands = do + unless isInterpreter $ do + build + install + uninstall + test + bench + haddock + new + templates + init + setup + path + ls + unpack + update + upgrade + upload + sdist + dot + ghc + hoogle + -- These are the only commands allowed in interpreter mode as well + exec + run + ghci + repl + runghc + runhaskell + script + unless isInterpreter $ do + eval + clean + purge + query + list + ide + docker + config + hpc + + -- Stack's subcommands are listed below in alphabetical order + + bench = addBuildCommand' + "bench" + "Shortcut for 'build --bench'." + buildCmd + (buildOptsParser Bench) + + build = addBuildCommand' + "build" + "Build the package(s) in this directory/configuration." + buildCmd + (buildOptsParser Build) + + clean = addCommand' + "clean" + "Delete build artefacts for project packages." + cleanCmd + (cleanOptsParser Clean) + + config = addSubCommands' + cfgCmdName + "Subcommands for accessing and modifying configuration values." + ( do + addCommand' + cfgCmdSetName + "Set a key in a configuration file to value." + (withConfig NoReexec . cfgCmdSet) + configCmdSetParser + addCommandWithLocalInstallRootFooter + cfgCmdEnvName + "Print environment variables for use in a shell." + (withConfig YesReexec . withDefaultEnvConfig . cfgCmdEnv) + configCmdEnvParser + addCommand' + cfgCmdBuildFilesName + "Generate (when applicable) a Cabal file from a package \ + \ description in the Hpack format and/or a lock file for Stack's \ + \project-level configuration." + -- It is withBuildConfig that yields the desired actions; + -- cfgCmdBuildFiles itself yields nothing of interest. + (withConfig YesReexec . withBuildConfig . cfgCmdBuildFiles) + (pure ()) + ) + + docker = addSubCommands' + dockerCmdName + "Subcommands specific to Docker use." + ( do + addCommand' + dockerPullCmdName + "Pull latest version of Docker image from registry." + dockerPullCmd + (pure ()) + addCommand' + "reset" + "Reset the Docker sandbox." + dockerResetCmd + ( switch + ( long "keep-home" + <> help "Do not delete sandbox's home directory." + ) + ) + ) + + dot = addCommand' + "dot" + "Visualize your project's dependency graph using Graphviz dot." + dotCmd + (dotOptsParser False) -- Default for --external is False. + + eval = addCommand' + "eval" + "Evaluate some Haskell code inline. Shortcut for \ + \'stack exec ghc -- -e CODE'." + evalCmd + (evalOptsParser "CODE") + + exec = addCommandWithLocalInstallRootFooter + "exec" + "Execute a command. If the command is absent, the first of any arguments \ + \is taken as the command." + execCmd + (execOptsParser Nothing) + + ghc = addCommand' + "ghc" + "Run ghc." + execCmd + (execOptsParser $ Just ExecGhc) + + ghci = addGhciCommand' + "ghci" + "Run ghci in the context of package(s)." + ghciCmd + ghciOptsParser + + haddock = addBuildCommand' + "haddock" + "Shortcut for 'build --haddock'." + buildCmd + (buildOptsParser Haddock) + + hoogle = addCommand' + "hoogle" + "Run hoogle, the Haskell API search engine. Use the '-- ARGUMENT(S)' \ + \syntax to pass Hoogle arguments, e.g. 'stack hoogle -- --count=20', \ + \or 'stack hoogle -- server --local'." + hoogleCmd + ( (,,,) + <$> many (strArgument + ( metavar "-- ARGUMENT(S) (e.g. 'stack hoogle -- server --local')" + )) + <*> boolFlags + True + "setup" + "If needed: install Hoogle, build Haddock documentation and \ + \generate a Hoogle database." + idm + <*> switch + ( long "rebuild" + <> help "Rebuild the Hoogle database." + ) + <*> switch + ( long "server" + <> help "Start local Hoogle server." + ) + ) + + hpc = addSubCommands' + "hpc" + "Subcommands specific to Haskell Program Coverage." + ( addCommand' + "report" + "Generate unified HPC coverage report from tix files and project \ + \targets." + hpcReportCmd + hpcReportOptsParser + ) + + ide = addSubCommands' + "ide" + "IDE-specific commands." + ( do + addCommand' + "packages" + "List all available local loadable packages." + idePackagesCmd + idePackagesParser + addCommand' + "targets" + "List all targets or pick component types to list." + ideTargetsCmd + ideTargetsParser + ) + + init = addCommand' + "init" + "Create Stack project configuration from Cabal or Hpack package \ + \specifications. If a snapshot is specified at the command line, the \ + \command will try to use it." + initCmd + initOptsParser + + install = addBuildCommand' + "install" + "Shortcut for 'build --copy-bins'." + buildCmd + (buildOptsParser Install) + + list = addCommand' + "list" + "List package versions included in the package index, or in a specified \ + \snapshot (directly or indirectly)." + listCmd + (many $ strArgument $ metavar "PACKAGE") + + ls = addCommand' + "ls" + "List command. (Supports snapshots, global packages, dependencies, Stack's \ + \styles and installed tools.)" + lsCmd + lsOptsParser + + new = addCommand' + "new" + "Create a new project from a template. Run 'stack templates' to see \ + \available templates. A local file or a remote URL can be specified as a \ + \template. Will initialise if there is no stack.yaml file. Initialisation \ + \may be forced. If a snapshot is specified at the command line, \ + \initialisation will try to use it." + newCmd + newOptsParser + + path = addCommandWithLocalInstallRootFooter + "path" + "Print out handy path information." + Stack.Path.path + pathParser + + purge = addCommand' + "purge" + "Delete the project Stack working directories (.stack-work by \ + \default). Shortcut for 'stack clean --full'." + cleanCmd + (cleanOptsParser Purge) + + query = addCommand' + "query" + "Query general build information (experimental)." + queryCmd + (many $ strArgument $ metavar "SELECTOR...") + + repl = addGhciCommand' + "repl" + "Run ghci in the context of package(s) (alias for 'ghci')." + ghciCmd + ghciOptsParser + + run = addCommand' + "run" + "Build and run an executable. Defaults to the first available \ + \executable if none is provided as the first argument." + execCmd + (execOptsParser $ Just ExecRun) + + runghc = addCommand' + "runghc" + "Run runghc." + execCmd + (execOptsParser $ Just ExecRunGhc) + + runhaskell = addCommand' + "runhaskell" + "Run runghc (alias for 'runghc')." + execCmd + (execOptsParser $ Just ExecRunGhc) + + script = addCommand + "script" + "Run a Stack script." + globalFooter + scriptCmd + (\so gom -> gom { snapshotRoot = First $ Just $ takeDirectory so.file }) + (globalOpts OtherCmdGlobalOpts) + scriptOptsParser + + sdist = addCommand' + "sdist" + "Create source distribution tarballs." + sdistCmd + sdistOptsParser + + setup = addCommand' + "setup" + "Get the appropriate GHC for your project." + setupCmd + setupOptsParser + + templates = addCommand' + "templates" + "Show how to find templates available for 'stack new'. 'stack new' \ + \can accept a template from a remote repository (default: github), \ + \local file or remote URL. Note: this downloads the help file." + templatesCmd + (pure ()) + + test = addBuildCommand' + "test" + "Shortcut for 'build --test'." + buildCmd + (buildOptsParser Test) + + uninstall = addCommand' + "uninstall" + "Show how to uninstall Stack or a Stack-supplied tool. This command does \ + \not itself uninstall Stack or a Stack-supplied tool." + uninstallCmd + (pure ()) + + unpack = addCommand' + "unpack" + "Unpack one or more packages, or one or more package candidates, locally." + unpackCmd + unpackOptsParser + + update = addCommand' + "update" + "Update the package index." + updateCmd + (pure ()) + + upgrade = addCommand'' + "upgrade" + "Upgrade Stack, installing to Stack's local-bin directory and, if \ + \different and permitted, the directory of the current Stack \ + \executable." + upgradeCmd + "Warning: if you use GHCup to install Stack, use only GHCup to \ + \upgrade Stack." + (upgradeOptsParser onlyLocalBins) + where + isProgNameStack = + (lowercase progName == lowercase stackProgName) + || ( osIsWindows + && lowercase progName == lowercase (stackProgName <> ".EXE") + ) + isRelFileNameStack relFile = + (relFile == relFileStack) + || (osIsWindows && relFile == relFileStackDotExe ) + isExecutableNameStack = + let mExecutableName = filename <$> mExecutablePath + in maybe False isRelFileNameStack mExecutableName + onlyLocalBins = not (isProgNameStack && isExecutableNameStack) + lowercase = map toLower + + upload = addCommand' + "upload" + "Upload one or more packages, or documentation for one or more packages, \ + \to Hackage." + uploadCmd + uploadOptsParser + + -- addCommand hiding global options + addCommand' :: + String + -> String + -> (a -> RIO Runner ()) + -> Parser a + -> AddCommand + addCommand' cmd title constr = + addCommand + cmd + title + globalFooter + constr + (\_ gom -> gom) + (globalOpts OtherCmdGlobalOpts) + + -- addCommand with custom footer hiding global options + addCommand'' :: + String + -> String + -> (a -> RIO Runner ()) + -> String + -> Parser a + -> AddCommand + addCommand'' cmd title constr cmdFooter = + addCommand + cmd + title + (globalFooter <> " " <> cmdFooter) + constr + (\_ gom -> gom) + (globalOpts OtherCmdGlobalOpts) + + -- addCommand with custom footer about options affecting the local install + --root and hiding global options + addCommandWithLocalInstallRootFooter :: + String + -> String + -> (a -> RIO Runner ()) + -> Parser a + -> AddCommand + addCommandWithLocalInstallRootFooter cmd title constr = + addCommand'' + cmd + title + constr + "This command also accepts 'stack build' flags and options that affect \ + \the location of the local project installation root directory." + + addSubCommands' :: + String + -> String + -> AddCommand + -> AddCommand + addSubCommands' cmd title = + addSubCommands + cmd + title + globalFooter + (globalOpts OtherCmdGlobalOpts) + + -- Additional helper that hides global options and shows build options + addBuildCommand' :: + String + -> String + -> (a -> RIO Runner ()) + -> Parser a + -> AddCommand + addBuildCommand' cmd title constr = + addCommand + cmd + title + globalFooter + constr + (\_ gom -> gom) + (globalOpts BuildCmdGlobalOpts) + + -- Additional helper that hides global options and shows some ghci options + addGhciCommand' :: + String + -> String + -> (a -> RIO Runner ()) + -> Parser a + -> AddCommand + addGhciCommand' cmd title constr = + addCommand + cmd + title + globalFooter + constr + (\_ gom -> gom) + (globalOpts GhciCmdGlobalOpts) + + globalOpts :: GlobalOptsContext -> Parser GlobalOptsMonoid + globalOpts kind = + extraHelpOption + hide + progName + (dockerCmdName ++ "*") + dockerHelpOptName + <*> extraHelpOption + hide + progName + (Nix.nixCmdName ++ "*") + Nix.nixHelpOptName + <*> globalOptsParser currentDir kind + where + hide = kind /= OuterGlobalOpts + +-- | fall-through to external executables in \'git\' style if they exist +-- (i.e. @stack something@ looks for @stack-something@ before failing with +-- "Invalid argument \'something\'".) +secondaryCommandHandler :: + NonEmpty String + -> ParserFailure ParserHelp + -> IO (ParserFailure ParserHelp) +secondaryCommandHandler args f = + -- don't even try when the argument looks like a path or flag + if elem pathSeparator cmd || "-" `L.isPrefixOf` NE.head args + then pure f + else do + D.findExecutable cmd >>= \case + Just ex -> withProcessContextNoLogging $ do + -- TODO show the command in verbose mode + -- hPutStrLn stderr $ unwords $ + -- ["Running", "[" ++ ex, unwords (tail args) ++ "]"] + _ <- RIO.Process.exec ex (NE.tail args) + pure f + Nothing -> pure $ fmap (vcatErrorHelp (noSuchCmd cmd)) f + where + -- FIXME this is broken when any options are specified before the command + -- e.g. stack --verbosity silent cmd + cmd = stackProgName <> "-" <> NE.head args + noSuchCmd name = errorHelp $ stringChunk + ("Auxiliary command not found in path '" ++ name ++ "'.") + +interpreterHandler :: + Monoid t + => String + -- ^ The name of the current Stack executable, as it was invoked. + -> Maybe (Path Abs File) + -- ^ The path to the current Stack executable, if the operating system + -- provides a reliable way to determine it and where a result was + -- available. + -> FilePath + -> NonEmpty String + -> ParserFailure ParserHelp + -> IO (GlobalOptsMonoid, (RIO Runner (), t)) +interpreterHandler progName mExecutablePath currentDir args f = do + -- args can include top-level config such as --extra-lib-dirs=... (set by + -- nix-shell) - we need to find the first argument which is a file, everything + -- afterwards is an argument to the script, everything before is an argument + -- to Stack + (stackArgs, fileArgs) <- spanM (fmap not . D.doesFileExist) args + case fileArgs of + (file:fileArgs') -> runInterpreterCommand file stackArgs fileArgs' + [] -> parseResultHandler (errorCombine (noSuchFile firstArg)) + where + firstArg = NE.head args + + spanM p xs@(x :| rest) = do + r <- p x + if r + then case rest of + [] -> pure ([x], []) + (x': rest') -> do + (ys, zs) <- spanM p (x' :| rest') + pure (x : ys, zs) + else + pure ([], NE.toList xs) + + -- if the first argument contains a path separator then it might be a file, + -- or a Stack option referencing a file. In that case we only show the + -- interpreter error message and exclude the command related error messages. + errorCombine = + if pathSeparator `elem` firstArg + then overrideErrorHelp + else vcatErrorHelp + + overrideErrorHelp h1 h2 = h2 { helpError = helpError h1 } + + parseResultHandler fn = handleParseResult (overFailure fn (Failure f)) + noSuchFile name = errorHelp $ stringChunk + ("File does not exist or is not a regular file '" ++ name ++ "'.") + + runInterpreterCommand path stackArgs fileArgs = do + iargs <- getInterpreterArgs path + let parseCmdLine = + commandLineHandler currentDir progName mExecutablePath True + -- Implicit file arguments are put before other arguments that + -- occur after "--". See #3658 + cmdArgs = prependList stackArgs $ case NE.break (== "--") iargs of + (beforeSep, []) -> prependList beforeSep $ "--" <| path :| fileArgs + (beforeSep, optSep : afterSep) -> + prependList beforeSep $ optSep <| path :| fileArgs <> afterSep + -- TODO show the command in verbose mode + -- hPutStrLn stderr $ unwords $ + -- ["Running", "[" ++ progName, unwords cmdArgs ++ "]"] + (a,b) <- withArgs (NE.toList cmdArgs) parseCmdLine + pure (a,(b,mempty)) + +-- Vertically combine only the error component of the first argument with the +-- error component of the second. +vcatErrorHelp :: ParserHelp -> ParserHelp -> ParserHelp +vcatErrorHelp h1 h2 = h2 { helpError = vcatChunks [helpError h2, helpError h1] } diff --git a/src/Stack/Clean.hs b/src/Stack/Clean.hs index ee27229272..0778bfc672 100644 --- a/src/Stack/Clean.hs +++ b/src/Stack/Clean.hs @@ -1,82 +1,174 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Clean +Description : Types and functions related to Stack's @clean@ and @purge@ commands. +License : BSD-3-Clause + +Types and functions related to Stack's @clean@ and @purge@ commands. +-} --- | Clean a project. module Stack.Clean - (clean - ,CleanOpts(..) - ,CleanCommand(..) - ,StackCleanException(..) - ) where + ( CleanOpts (..) + , CleanDepth (..) + , CleanCommand (..) + , cleanCmd + , clean + ) where -import Stack.Prelude -import Data.List ((\\),intercalate) +import Control.Monad.Extra ( concatMapM ) +import Data.List ( (\\) ) import qualified Data.Map.Strict as Map -import Path.IO (ignoringAbsence, removeDirRecur) -import Stack.Constants.Config (rootDistDirFromDir, workDirFromDir) -import Stack.Types.Config -import Stack.Types.SourceMap +import Path ( (), isProperPrefixOf ) +import Path.IO ( ignoringAbsence, listDirRecur, removeDirRecur ) +import Stack.Config ( withBuildConfig ) +import Stack.Constants.Config + ( distRelativeDir, rootDistDirFromDir, workDirFromDir ) +import Stack.Prelude +import Stack.Runners + ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..), getWorkDir ) +import Stack.Types.Config ( Config ) +import Stack.Types.EnvConfig ( EnvConfig ) +import Stack.Types.Runner ( Runner ) +import Stack.Types.SourceMap ( ProjectPackage, SMWanted (..), ppRoot ) --- | Deletes build artifacts in the current project. --- --- Throws 'StackCleanException'. -clean :: HasBuildConfig env => CleanOpts -> RIO env () -clean cleanOpts = do - toDelete <- dirsToDelete cleanOpts - logDebug $ "Need to delete: " <> fromString (show (map toFilePath toDelete)) - failures <- mapM cleanDir toDelete - when (or failures) exitFailure - where - cleanDir dir = do - logDebug $ "Deleting directory: " <> fromString (toFilePath dir) - liftIO (ignoringAbsence (removeDirRecur dir) >> return False) `catchAny` \ex -> do - logError $ "Exception while recursively deleting " <> fromString (toFilePath dir) <> "\n" <> displayShow ex - logError "Perhaps you do not have permission to delete these files or they are in use?" - return True - -dirsToDelete :: HasBuildConfig env => CleanOpts -> RIO env [Path Abs Dir] -dirsToDelete cleanOpts = do - packages <- view $ buildConfigL.to (smwProject . bcSMWanted) - case cleanOpts of - CleanShallow [] -> - -- Filter out packages listed as extra-deps - mapM (rootDistDirFromDir . ppRoot) $ Map.elems packages - CleanShallow targets -> do - let localPkgNames = Map.keys packages - getPkgDir pkgName' = fmap ppRoot (Map.lookup pkgName' packages) - case targets \\ localPkgNames of - [] -> mapM rootDistDirFromDir (mapMaybe getPkgDir targets) - xs -> throwM (NonLocalPackages xs) - CleanFull -> do - pkgWorkDirs <- mapM (workDirFromDir . ppRoot) $ Map.elems packages - projectWorkDir <- getProjectWorkDir - return (projectWorkDir : pkgWorkDirs) - --- | Options for @stack clean@. -data CleanOpts - = CleanShallow [PackageName] - -- ^ Delete the "dist directories" as defined in 'Stack.Constants.Config.distRelativeDir' - -- for the given local packages. If no packages are given, all project packages - -- should be cleaned. - | CleanFull +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Clean" module. +data CleanPrettyException + = NonLocalPackages [PackageName] + | DeletionFailures [(Path Abs Dir, SomeException)] + deriving Show + +instance Pretty CleanPrettyException where + pretty (NonLocalPackages pkgs) = + "[S-9463]" + <> line + <> fillSep + ( flow "The following are not project packages:" + : mkNarrativeList (Just Current) False + (map fromPackageName pkgs :: [StyleDoc]) + ) + pretty (DeletionFailures failures) = + "[S-6321]" + <> line + <> flow "Exception while recursively deleting:" + <> line + <> mconcat (map prettyFailure failures) + <> flow "Perhaps you do not have permission to delete these files or they \ + \are in use?" + where + prettyFailure (dir, e) = + pretty dir + <> line + <> string (displayException e) + <> line + +instance Exception CleanPrettyException + +-- | Type representing command line options for the @stack clean@ command. +data CleanOpts = CleanOpts + { depth :: !CleanDepth + , omitThis :: !Bool + } + +-- | Type representing depths of cleaning for the @stack clean@ command. +data CleanDepth + = CleanShallow [PackageName] + -- ^ Delete the "dist directories" as defined in + -- 'Stack.Constants.Config.distRelativeDir' for the given project packages. + -- If no project packages are given, all project packages should be cleaned. + | CleanFull -- ^ Delete all work directories in the project. --- | Clean commands +-- | Type representing Stack's cleaning commands. data CleanCommand - = Clean - | Purge + = Clean + | Purge + +-- | Function underlying the @stack clean@ command. +cleanCmd :: CleanOpts -> RIO Runner () +cleanCmd = withConfig NoReexec . clean + +-- | Deletes build artifacts in the current project. +clean :: CleanOpts -> RIO Config () +clean cleanOpts = do + toDelete <- if cleanOpts.omitThis + then + withDefaultEnvConfig $ dirsToDeleteGivenConfig cleanOpts.depth + else + withBuildConfig $ dirsToDeleteSimple cleanOpts.depth + logDebug $ "Need to delete: " <> fromString (show (map toFilePath toDelete)) + failures <- catMaybes <$> mapM cleanDir toDelete + case failures of + [] -> pure () + _ -> prettyThrowIO $ DeletionFailures failures + +cleanDir :: Path Abs Dir -> RIO Config (Maybe (Path Abs Dir, SomeException)) +cleanDir dir = do + logDebug $ "Deleting directory: " <> fromString (toFilePath dir) + liftIO (ignoringAbsence (removeDirRecur dir) >> pure Nothing) `catchAny` \ex -> + pure $ Just (dir, ex) + +dirsToDeleteSimple :: CleanDepth -> RIO BuildConfig [Path Abs Dir] +dirsToDeleteSimple depth = do + packages <- view $ buildConfigL . to (.smWanted.project) + case depth of + CleanShallow [] -> do + -- Filter out packages listed as extra-deps + let pkgNames = Map.elems packages + mapM (rootDistDirFromDir . ppRoot) pkgNames + CleanShallow targets -> do + let localPkgNames = Map.keys packages + getPkgDir pkgName' = fmap ppRoot (Map.lookup pkgName' packages) + pkgNames = mapMaybe getPkgDir targets + case targets \\ localPkgNames of + [] -> mapM rootDistDirFromDir pkgNames + xs -> prettyThrowM (NonLocalPackages xs) + CleanFull -> allWorkDirs $ Map.elems packages + +dirsToDeleteGivenConfig :: CleanDepth -> RIO EnvConfig [Path Abs Dir] +dirsToDeleteGivenConfig depth = do + packages <- view $ buildConfigL . to (.smWanted.project) + case depth of + CleanShallow [] -> do + -- Filter out packages listed as extra-deps + let pkgNames = Map.elems packages + concatMapM (unusedRootDistDirsFromDir . ppRoot) pkgNames + CleanShallow targets -> do + let localPkgNames = Map.keys packages + getPkgDir pkgName' = fmap ppRoot (Map.lookup pkgName' packages) + pkgNames = mapMaybe getPkgDir targets + case targets \\ localPkgNames of + [] -> concatMapM unusedRootDistDirsFromDir pkgNames + xs -> prettyThrowM (NonLocalPackages xs) + CleanFull -> allWorkDirs $ Map.elems packages --- | Exceptions during cleanup. -newtype StackCleanException - = NonLocalPackages [PackageName] - deriving (Typeable) +allWorkDirs :: HasBuildConfig env => [ProjectPackage] -> RIO env [Path Abs Dir] +allWorkDirs pps = do + pkgWorkDirs <- mapM (workDirFromDir . ppRoot) pps + projectWorkDir <- getWorkDir + pure (projectWorkDir : pkgWorkDirs) -instance Show StackCleanException where - show (NonLocalPackages pkgs) = - "The following packages are not part of this project: " ++ - intercalate ", " (map show pkgs) +unusedRootDistDirsFromDir :: Path Abs Dir -> RIO EnvConfig [Path Abs Dir] +unusedRootDistDirsFromDir pkgDir = do + rootDistDir <- rootDistDirFromDir pkgDir + omitDir <- fmap (pkgDir ) distRelativeDir + allDirsOmittingDirs rootDistDir omitDir -instance Exception StackCleanException +allDirsOmittingDirs :: + MonadIO m + => Path Abs Dir + -> Path Abs Dir + -> m [Path Abs Dir] +allDirsOmittingDirs topDir subDir = do + allDirs <- (topDir :) . fst <$> listDirRecur topDir + let isNotInSubDir dir = not + ( isProperPrefixOf dir subDir + || subDir == dir + || isProperPrefixOf subDir dir + ) + pure $ filter isNotInSubDir allDirs diff --git a/src/Stack/Component.hs b/src/Stack/Component.hs new file mode 100644 index 0000000000..175ed76e02 --- /dev/null +++ b/src/Stack/Component.hs @@ -0,0 +1,213 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Component +License : BSD-3-Clause + +All utility functions for Components in Stack (library, internal library, +foreign library, executable, tests, benchmarks). In particular, this module +gathers all the Cabal-to-Stack component translations, which previously occurred +in the "Stack.Package" module. See "Stack.Types.Component" for more details +about the design choices. +-} + +module Stack.Component + ( isComponentBuildable + , stackLibraryFromCabal + , stackExecutableFromCabal + , stackForeignLibraryFromCabal + , stackBenchmarkFromCabal + , stackTestFromCabal + , foldOnNameAndBuildInfo + , componentDependencyMap + , fromCabalName + ) where + +import qualified Data.Map as Map +import qualified Data.Set as Set +import Data.Text ( pack ) +import Distribution.PackageDescription + ( Benchmark (..), Executable, ForeignLib, Library (..) + , TestSuite (..) + ) +import Distribution.Types.BuildInfo ( BuildInfo ) +import Distribution.Package ( mkPackageName ) +import qualified Distribution.PackageDescription as Cabal +import Distribution.Utils.Path (interpretSymbolicPathCWD) +import GHC.Records ( HasField ) +import Stack.Prelude +import Stack.Types.Component + ( HasBuildInfo, StackBenchmark (..), StackBuildInfo (..) + , StackExecutable (..), StackForeignLibrary (..) + , StackLibrary (..), StackTestSuite (..) + , StackUnqualCompName (..) + ) +import Stack.Types.ComponentUtils ( fromCabalName ) +import Stack.Types.Dependency ( cabalExeToStackDep, cabalToStackDep ) + +foldOnNameAndBuildInfo :: + ( HasField "buildInfo" a StackBuildInfo + , HasField "name" a StackUnqualCompName + , Foldable c + ) + => c a + -> (StackUnqualCompName -> StackBuildInfo -> t -> t) + -> t + -> t +foldOnNameAndBuildInfo initialCollection accumulator input = + foldr' iterator input initialCollection + where + iterator comp = accumulator comp.name comp.buildInfo + +stackLibraryFromCabal :: Library -> StackLibrary +stackLibraryFromCabal cabalLib = StackLibrary + { name = case cabalLib.libName of + LMainLibName -> StackUnqualCompName mempty + LSubLibName v -> fromCabalName v + , buildInfo = stackBuildInfoFromCabal cabalLib.libBuildInfo + , exposedModules = cabalLib.exposedModules + } + +stackExecutableFromCabal :: Executable -> StackExecutable +stackExecutableFromCabal cabalExecutable = StackExecutable + { name = fromCabalName cabalExecutable.exeName + , buildInfo = stackBuildInfoFromCabal cabalExecutable.buildInfo + , modulePath = interpretSymbolicPathCWD cabalExecutable.modulePath + } + +stackForeignLibraryFromCabal :: ForeignLib -> StackForeignLibrary +stackForeignLibraryFromCabal cabalForeignLib = StackForeignLibrary + { name = fromCabalName cabalForeignLib.foreignLibName + , buildInfo=stackBuildInfoFromCabal cabalForeignLib.foreignLibBuildInfo + } + +stackBenchmarkFromCabal :: Benchmark -> StackBenchmark +stackBenchmarkFromCabal cabalBenchmark = StackBenchmark + { name = fromCabalName cabalBenchmark.benchmarkName + , interface = cabalBenchmark.benchmarkInterface + , buildInfo = stackBuildInfoFromCabal cabalBenchmark.benchmarkBuildInfo + } + +stackTestFromCabal :: TestSuite -> StackTestSuite +stackTestFromCabal cabalTest = StackTestSuite + { name = fromCabalName cabalTest.testName + , interface = cabalTest.testInterface + , buildInfo = stackBuildInfoFromCabal cabalTest.testBuildInfo + } + +isComponentBuildable :: HasBuildInfo component => component -> Bool +isComponentBuildable componentRec = componentRec.buildInfo.buildable + +stackBuildInfoFromCabal :: BuildInfo -> StackBuildInfo +stackBuildInfoFromCabal buildInfoV = gatherComponentToolsAndDepsFromCabal + buildInfoV.buildTools + buildInfoV.buildToolDepends + buildInfoV.targetBuildDepends + StackBuildInfo + { buildable = buildInfoV.buildable + , otherModules = buildInfoV.otherModules + , jsSources = map interpretSymbolicPathCWD buildInfoV.jsSources + , hsSourceDirs = buildInfoV.hsSourceDirs + , cSources = map interpretSymbolicPathCWD buildInfoV.cSources + , dependency = mempty + , unknownTools = mempty + , cppOptions = buildInfoV.cppOptions + , targetBuildDepends = buildInfoV.targetBuildDepends + , options = buildInfoV.options + , allLanguages = Cabal.allLanguages buildInfoV + , usedExtensions = Cabal.usedExtensions buildInfoV + , includeDirs = map interpretSymbolicPathCWD buildInfoV.includeDirs + , extraLibs = buildInfoV.extraLibs + , extraLibDirs = map interpretSymbolicPathCWD buildInfoV.extraLibDirs + , frameworks = map interpretSymbolicPathCWD buildInfoV.frameworks + } + +-- | Iterate on all three dependency list given, and transform and sort them +-- between 'Stack.Types.Component.unknownTools' and +-- legitimate t'Stack.Types.Dependency.DepValue' +-- 'Stack.Types.Component.dependency'. Bear in mind that this only gathers the +-- component level dependencies. +gatherComponentToolsAndDepsFromCabal :: + [Cabal.LegacyExeDependency] + -- ^ Legacy build tools dependency from + -- 'Distribution.Types.BuildInfo.buildTools'. + -> [Cabal.ExeDependency] + -- ^ Build tools dependency from + -- `Distribution.Types.BuildInfo.buildToolDepends'. + -> [Cabal.Dependency] + -- ^ Cabal-syntax defines + -- 'Distribution.Types.BuildInfo.targetBuildDepends'. These are the + -- simplest dependencies for a component extracted from the Cabal file such + -- as: + -- @ + -- build-depends: + -- foo ^>= 1.2.3.4, + -- bar ^>= 1 + -- @ + -> StackBuildInfo + -> StackBuildInfo +gatherComponentToolsAndDepsFromCabal legacyBuildTools buildTools targetDeps = + gatherTargetDependency . gatherToolsDependency . gatherUnknownTools + where + gatherUnknownTools sbi = foldl' processLegacyExeDepency sbi legacyBuildTools + gatherToolsDependency sbi = foldl' processExeDependency sbi buildTools + gatherTargetDependency sbi = foldl' processDependency sbi targetDeps + -- This is similar to Cabal's + -- 'Distribution.Simple.BuildToolDepends.desugarBuildTool', however it uses + -- our own hard-coded map which drops tools shipped with GHC (like hsc2hs), + -- and includes some tools from Stackage. + processLegacyExeDepency sbi (Cabal.LegacyExeDependency exeName range) = + case isKnownLegacyExe exeName of + Just pName -> + processExeDependency + sbi + (Cabal.ExeDependency pName (Cabal.mkUnqualComponentName exeName) range) + Nothing -> sbi + { unknownTools = Set.insert (pack exeName) sbi.unknownTools } + processExeDependency sbi exeDep@(Cabal.ExeDependency pName _ _) + | isPreInstalledPackages pName = sbi + | otherwise = sbi + { dependency = + Map.insert pName (cabalExeToStackDep exeDep) sbi.dependency + } + processDependency sbi dep@(Cabal.Dependency pName _ _) = sbi + { dependency = Map.insert pName (cabalToStackDep dep) sbi.dependency } + +componentDependencyMap :: + (HasField "buildInfo" r1 r2, HasField "dependency" r2 a) + => r1 + -> a +componentDependencyMap component = component.buildInfo.dependency + +-- | A hard-coded map for tool dependencies. If a dependency is within this map +-- it's considered "known" (the exe will be found at the execution stage). The +-- corresponding Cabal function is +-- 'Distribution.Simple.BuildToolDepends.desugarBuildTool'. +isKnownLegacyExe :: String -> Maybe PackageName +isKnownLegacyExe input = case input of + "alex" -> justPck "alex" + "happy" -> justPck "happy" + "cpphs" -> justPck "cpphs" + "greencard" -> justPck "greencard" + "c2hs" -> justPck "c2hs" + "hscolour" -> justPck "hscolour" + "hspec-discover" -> justPck "hspec-discover" + "hsx2hs" -> justPck "hsx2hs" + "gtk2hsC2hs" -> justPck "gtk2hs-buildtools" + "gtk2hsHookGenerator" -> justPck "gtk2hs-buildtools" + "gtk2hsTypeGen" -> justPck "gtk2hs-buildtools" + _ -> Nothing + where + justPck = Just . mkPackageName + +-- | Executable-only packages which come pre-installed with GHC and do not need +-- to be built. Without this exception, we would either end up unnecessarily +-- rebuilding these packages, or failing because the packages do not appear in +-- the Stackage snapshot. +isPreInstalledPackages :: PackageName -> Bool +isPreInstalledPackages input = case input of + "hsc2hs" -> True + "haddock" -> True + _ -> False diff --git a/src/Stack/ComponentFile.hs b/src/Stack/ComponentFile.hs new file mode 100644 index 0000000000..d8bf8ad49a --- /dev/null +++ b/src/Stack/ComponentFile.hs @@ -0,0 +1,634 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.ComponentFile +License : BSD-3-Clause + +A module which exports all component-level file-gathering logic. It also +includes utility functions for handling paths and directories. +-} + +module Stack.ComponentFile + ( resolveOrWarn + , componentOutputDir + , componentBuildDir + , packageAutogenDir + , buildDir + , componentAutogenDir + , ComponentFile (..) + , stackLibraryFiles + , stackExecutableFiles + , stackTestSuiteFiles + , stackBenchmarkFiles + ) where + +import Control.Exception ( throw ) +import Data.Foldable ( foldrM ) +import Data.List ( find, isPrefixOf ) +import qualified Data.Map.Strict as M +import qualified Data.Set as S +import qualified Data.Text as T +import Distribution.ModuleName ( ModuleName ) +import qualified Distribution.ModuleName as Cabal +import Distribution.PackageDescription + ( BenchmarkInterface (..), TestSuiteInterface (..) ) +import Distribution.Text ( display ) +import Distribution.Utils.Path + ( Pkg, Source, SymbolicPath, getSymbolicPath ) +import qualified Distribution.Utils.Path as Cabal +import GHC.Records ( HasField ) +import qualified HiFileParser as Iface +import Path + ( (), filename, isProperPrefixOf, parent, parseRelDir + , stripProperPrefix + ) +import Path.Extra + ( forgivingResolveDir, forgivingResolveFile + , parseCollapsedAbsFile, rejectMissingDir, rejectMissingFile + ) +import Path.IO + ( doesDirExist, doesFileExist, getCurrentDir, listDir ) +import Stack.Constants + ( haskellDefaultPreprocessorExts, haskellFileExts + , relDirAutogen, relDirBuild, relDirGlobalAutogen + ) +import Stack.Prelude hiding ( Display (..) ) +import Stack.Types.Component + ( StackBenchmark (..), StackBuildInfo (..) + , StackExecutable (..), StackLibrary (..) + , StackTestSuite (..), StackUnqualCompName (..) + ) +import Stack.Types.ComponentUtils + ( emptyCompName, unqualCompToString ) +import Stack.Types.Config + ( Config (..), HasConfig (..), prettyStackDevL ) +import Stack.Types.NamedComponent ( NamedComponent (..) ) +import Stack.Types.Package ( PackageException (..), dotCabalModule ) +import Stack.Types.PackageFile + ( GetPackageFileContext (..), DotCabalDescriptor (..) + , DotCabalPath (..), PackageWarning (..) + ) +import qualified System.Directory as D ( doesFileExist ) +import qualified System.FilePath as FilePath + +data ComponentFile = ComponentFile + { moduleFileMap :: !(Map ModuleName (Path Abs File)) + , otherFile :: ![DotCabalPath] + , packageWarning :: ![PackageWarning] + } + +-- | Get all files referenced by the benchmark. +stackBenchmarkFiles :: + StackBenchmark + -> RIO GetPackageFileContext (NamedComponent, ComponentFile) +stackBenchmarkFiles bench = + resolveComponentFiles (CBench bench.name) build names + where + names :: [DotCabalDescriptor] + names = bnames <> exposed + + exposed :: [DotCabalDescriptor] + exposed = + case bench.interface of + BenchmarkExeV10 _ fp -> [DotCabalMain $ getSymbolicPath fp] + BenchmarkUnsupported _ -> [] + + bnames :: [DotCabalDescriptor] + bnames = map DotCabalModule build.otherModules + + build :: StackBuildInfo + build = bench.buildInfo + +-- | Get all files referenced by the test. +stackTestSuiteFiles :: + StackTestSuite + -> RIO GetPackageFileContext (NamedComponent, ComponentFile) +stackTestSuiteFiles test = + resolveComponentFiles (CTest test.name) build names + where + names :: [DotCabalDescriptor] + names = bnames <> exposed + + exposed :: [DotCabalDescriptor] + exposed = + case test.interface of + TestSuiteExeV10 _ fp -> [DotCabalMain $ getSymbolicPath fp] + TestSuiteLibV09 _ mn -> [DotCabalModule mn] + TestSuiteUnsupported _ -> [] + + bnames :: [DotCabalDescriptor] + bnames = map DotCabalModule build.otherModules + + build :: StackBuildInfo + build = test.buildInfo + +-- | Get all files referenced by the executable. +stackExecutableFiles :: + StackExecutable + -> RIO GetPackageFileContext (NamedComponent, ComponentFile) +stackExecutableFiles exe = + resolveComponentFiles (CExe exe.name) build names + where + build :: StackBuildInfo + build = exe.buildInfo + + names :: [DotCabalDescriptor] + names = + map DotCabalModule build.otherModules ++ [DotCabalMain exe.modulePath] + +-- | Get all files referenced by the library. Handle all libraries (CLib and +-- SubLib), based on empty name or not. +stackLibraryFiles :: + StackLibrary + -> RIO GetPackageFileContext (NamedComponent, ComponentFile) +stackLibraryFiles lib = + resolveComponentFiles componentName build names + where + componentRawName :: StackUnqualCompName + componentRawName = lib.name + + componentName :: NamedComponent + componentName + | componentRawName == emptyCompName = CLib + | otherwise = CSubLib componentRawName + + build :: StackBuildInfo + build = lib.buildInfo + + names :: [DotCabalDescriptor] + names = bnames ++ exposed + + exposed :: [DotCabalDescriptor] + exposed = map DotCabalModule lib.exposedModules + + bnames :: [DotCabalDescriptor] + bnames = map DotCabalModule build.otherModules + +-- | Get all files referenced by the component. +resolveComponentFiles :: + ( CAndJsSources rec + , HasField "hsSourceDirs" rec [SymbolicPath Pkg (Cabal.Dir Source)] + ) + => NamedComponent + -> rec + -> [DotCabalDescriptor] + -> RIO GetPackageFileContext (NamedComponent, ComponentFile) +resolveComponentFiles component build names = do + dirs <- mapMaybeM (resolveDirOrWarn . getSymbolicPath) build.hsSourceDirs + dir <- asks (parent . (.file)) + agdirs <- autogenDirs + (modules,files,warnings) <- + resolveFilesAndDeps + component + ((if null dirs then [dir] else dirs) ++ agdirs) + names + cfiles <- buildOtherSources build + pure (component, ComponentFile modules (files <> cfiles) warnings) + where + autogenDirs :: RIO GetPackageFileContext [Path Abs Dir] + autogenDirs = do + distDir <- asks (.distDir) + let compDir = componentAutogenDir component distDir + pkgDir = [packageAutogenDir distDir] + filterM doesDirExist $ compDir : pkgDir + +-- | Try to resolve the list of base names in the given directory by looking for +-- unique instances of base names applied with the given extensions, plus find +-- any of their module and TemplateHaskell dependencies. +resolveFilesAndDeps :: + NamedComponent -- ^ Package component name + -> [Path Abs Dir] -- ^ Directories to look in. + -> [DotCabalDescriptor] -- ^ Base names. + -> RIO + GetPackageFileContext + (Map ModuleName (Path Abs File), [DotCabalPath], [PackageWarning]) +resolveFilesAndDeps component dirs names0 = do + (dotCabalPaths, foundModules, missingModules, _) <- loop names0 S.empty M.empty + warnings <- + liftM2 (++) (warnUnlisted foundModules) (warnMissing missingModules) + pure (foundModules, dotCabalPaths, warnings) + where + loop :: + [DotCabalDescriptor] + -> Set ModuleName + -> Map FilePath (Path Abs File) + -- ^ Known file usages, where the file path has already been resolved. + -> RIO + GetPackageFileContext + ( [DotCabalPath] + , Map ModuleName (Path Abs File) + , [ModuleName] + , Map k a + ) + loop [] _ _ = pure ([], M.empty, [], M.empty) + loop names doneModules0 knownUsages = do + resolved <- resolveFiles dirs names + let foundFiles = mapMaybe snd resolved + foundModules = mapMaybe toResolvedModule resolved + missingModules = mapMaybe toMissingModule resolved + getDependenciesFold c (ps, ku) = do + p <- getDependencies ku component dirs c + pure (p : ps, ku <> snd p) + (pairs, foundUsages) <- foldrM getDependenciesFold ([], knownUsages) foundFiles + let doneModules = S.union + doneModules0 + (S.fromList (mapMaybe dotCabalModule names)) + moduleDeps = S.unions (map fst pairs) + thDepFiles = concatMap (M.elems . snd) pairs + modulesRemaining = S.difference moduleDeps doneModules + -- Ignore missing modules discovered as dependencies - they may + -- have been deleted. + (resolvedFiles, resolvedModules, _, foundUsages') <- + loop (map DotCabalModule (S.toList modulesRemaining)) doneModules foundUsages + pure + ( nubOrd $ foundFiles <> map DotCabalFilePath thDepFiles <> resolvedFiles + , M.union (M.fromList foundModules) resolvedModules + , missingModules + , foundUsages' + ) + warnUnlisted foundModules = do + let unlistedModules = + foundModules `M.difference` + M.fromList (mapMaybe (fmap (, ()) . dotCabalModule) names0) + pure $ + [ UnlistedModulesWarning + component + (map fst (M.toList unlistedModules)) + | not (M.null unlistedModules) + ] + warnMissing _missingModules = + pure [] + -- TODO: bring this back - see + -- https://github.com/commercialhaskell/stack/issues/2649 + {- + cabalfp <- asks ctxFile + pure $ + if null missingModules + then [] + else [ MissingModulesWarning + cabalfp + component + missingModules] + -} + -- TODO: In usages of toResolvedModule / toMissingModule, some sort + -- of map + partition would probably be better. + toResolvedModule :: + (DotCabalDescriptor, Maybe DotCabalPath) + -> Maybe (ModuleName, Path Abs File) + toResolvedModule (DotCabalModule mn, Just (DotCabalModulePath fp)) = + Just (mn, fp) + toResolvedModule _ = + Nothing + toMissingModule :: + (DotCabalDescriptor, Maybe DotCabalPath) + -> Maybe ModuleName + toMissingModule (DotCabalModule mn, Nothing) = + Just mn + toMissingModule _ = + Nothing + +-- | Get the dependencies of a Haskell module file. +getDependencies :: + Map FilePath (Path Abs File) + -- ^ Known file usages, where the file path has already been resolved. + -> NamedComponent + -> [Path Abs Dir] + -> DotCabalPath + -> RIO GetPackageFileContext (Set ModuleName, Map FilePath (Path Abs File)) +getDependencies knownUsages component dirs dotCabalPath = + case dotCabalPath of + DotCabalModulePath resolvedFile -> readResolvedHi resolvedFile + DotCabalMainPath resolvedFile -> readResolvedHi resolvedFile + DotCabalFilePath{} -> pure (S.empty, M.empty) + DotCabalCFilePath{} -> pure (S.empty, M.empty) + where + readResolvedHi resolvedFile = do + dumpHIDir <- componentOutputDir component <$> asks (.distDir) + dir <- asks (parent . (.file)) + let sourceDir = fromMaybe dir $ find (`isProperPrefixOf` resolvedFile) dirs + stripSourceDir d = stripProperPrefix d resolvedFile + case stripSourceDir sourceDir of + Nothing -> pure (S.empty, M.empty) + Just fileRel -> do + let hiPath = FilePath.replaceExtension + (toFilePath (dumpHIDir fileRel)) + ".hi" + dumpHIExists <- liftIO $ D.doesFileExist hiPath + if dumpHIExists + then parseHI knownUsages hiPath + else pure (S.empty, M.empty) + +-- | Parse a .hi file into a set of modules and files (a map from a given path +-- to a file to the resolved absolute path to the file). +parseHI :: + Map FilePath (Path Abs File) + -- ^ Known file usages, where the file path has already been resolved. + -> FilePath + -- ^ The path to the *.hi file to be parsed + -> RIO GetPackageFileContext (Set ModuleName, Map FilePath (Path Abs File)) +parseHI knownUsages hiPath = do + dir <- asks (parent . (.file)) + result <- + liftIO $ catchAnyDeep + (Iface.fromFile hiPath) + (pure . Left . displayException) + case result of + Left msg -> do + prettyStackDevL + [ flow "Failed to decode module interface:" + , style File $ fromString hiPath + , flow "Decoding failure:" + , style Error $ fromString msg + ] + pure (S.empty, M.empty) + Right iface -> do + let moduleNames = fmap (fromString . T.unpack . decodeUtf8Lenient . fst) . + Iface.unList . Iface.dmods . Iface.deps + resolveFileDependency file = + case M.lookup file knownUsages of + Just p -> + pure $ Just (file, p) + Nothing -> do + resolved <- forgivingResolveFile dir file >>= rejectMissingFile + when (isNothing resolved) $ + prettyWarnL + [ flow "Dependent file listed in:" + , style File $ fromString hiPath + , flow "does not exist:" + , style File $ fromString file + ] + pure $ (file,) <$> resolved + resolveUsages = traverse + (resolveFileDependency . Iface.unUsage) . Iface.unList . Iface.usage + resolvedUsages <- catMaybes <$> resolveUsages iface + pure (S.fromList $ moduleNames iface, M.fromList resolvedUsages) + +-- | The directory where generated files are put like .o or .hs (from .x files). +componentOutputDir :: NamedComponent -> Path Abs Dir -> Path Abs Dir +componentOutputDir namedComponent distDir = + case namedComponent of + CLib -> buildDir distDir + CSubLib name -> makeTmp name + CFlib name -> makeTmp name + CExe name -> makeTmp name + CTest name -> makeTmp name + CBench name -> makeTmp name + where + makeTmp name = + buildDir distDir componentNameToDirNormOrTmp True name + +-- | Try to resolve the list of base names in the given directory by +-- looking for unique instances of base names applied with the given +-- extensions. +resolveFiles :: + [Path Abs Dir] -- ^ Directories to look in. + -> [DotCabalDescriptor] -- ^ Base names. + -> RIO GetPackageFileContext [(DotCabalDescriptor, Maybe DotCabalPath)] +resolveFiles dirs names = + forM names (\name -> fmap (name, ) (findCandidate dirs name)) + +-- | Find a candidate for the given module-or-filename from the list +-- of directories and given extensions. +findCandidate :: + [Path Abs Dir] + -> DotCabalDescriptor + -> RIO GetPackageFileContext (Maybe DotCabalPath) +findCandidate dirs name = do + pkg <- asks (.file) >>= parsePackageNameFromFilePath + customPreprocessorExts <- view $ configL . to (.customPreprocessorExts) + let haskellPreprocessorExts = + haskellDefaultPreprocessorExts ++ customPreprocessorExts + liftIO (makeNameCandidates haskellPreprocessorExts) >>= \case + [candidate] -> pure (Just (cons candidate)) + [] -> do + case name of + DotCabalModule mn + | display mn /= paths_pkg pkg -> logPossibilities dirs mn + _ -> pure () + pure Nothing + (candidate:rest) -> do + warnMultiple name candidate rest + pure (Just (cons candidate)) + where + cons = + case name of + DotCabalModule{} -> DotCabalModulePath + DotCabalMain{} -> DotCabalMainPath + DotCabalFile{} -> DotCabalFilePath + DotCabalCFile{} -> DotCabalCFilePath + paths_pkg pkg = "Paths_" ++ packageNameString pkg + makeNameCandidates haskellPreprocessorExts = + fmap + (nubOrd . concat) + (mapM (makeDirCandidates haskellPreprocessorExts) dirs) + makeDirCandidates :: + [Text] + -> Path Abs Dir + -> IO [Path Abs File] + makeDirCandidates haskellPreprocessorExts dir = + case name of + DotCabalMain fp -> resolveCandidate dir fp + DotCabalFile fp -> resolveCandidate dir fp + DotCabalCFile fp -> resolveCandidate dir fp + DotCabalModule mn -> do + let perExt ext = + resolveCandidate + dir (Cabal.toFilePath mn ++ "." ++ T.unpack ext) + withHaskellExts <- mapM perExt haskellFileExts + withPPExts <- mapM perExt haskellPreprocessorExts + pure $ + case (concat withHaskellExts, concat withPPExts) of + -- If we have exactly 1 Haskell extension and exactly + -- 1 preprocessor extension, assume the former file is + -- generated from the latter + -- + -- See https://github.com/commercialhaskell/stack/issues/4076 + ([_], [y]) -> [y] + -- Otherwise, return everything + (xs, ys) -> xs ++ ys + resolveCandidate dir = fmap maybeToList . resolveDirFile dir + +-- | Log that we couldn't find a candidate, but there are +-- possibilities for custom preprocessor extensions. +-- +-- For example: .erb for a Ruby file might exist in one of the +-- directories. +logPossibilities :: HasTerm env => [Path Abs Dir] -> ModuleName -> RIO env () +logPossibilities dirs mn = do + possibilities <- fmap concat (makePossibilities mn) + unless (null possibilities) $ prettyWarnL + [ flow "Unable to find a known candidate for the Cabal entry" + , (style Module . fromString $ display mn) <> "," + , flow "but did find:" + , line <> bulletedList (map pretty possibilities) + , flow "If you are using a custom preprocessor for this module" + , flow "with its own file extension, consider adding the extension" + , flow "to the value of the" + , style Shell "custom-preprocessor-extensions" + , flow "key in Stack's project-level configuration file" + , "(" <> style File "stack.yaml" <> ")." + ] + where + makePossibilities name = + mapM + ( \dir -> do + (_,files) <- listDir dir + pure + ( map + filename + ( filter + (isPrefixOf (display name) . toFilePath . filename) + files + ) + ) + ) + dirs + +type CAndJsSources rec = + (HasField "cSources" rec [FilePath], HasField "jsSources" rec [FilePath]) + +-- | Get all C sources and extra source files in a build. +buildOtherSources :: + CAndJsSources rec + => rec + -> RIO GetPackageFileContext [DotCabalPath] +buildOtherSources build = do + cwd <- liftIO getCurrentDir + dir <- asks (parent . (.file)) + file <- asks (.file) + let resolveDirFiles files toCabalPath = + forMaybeM files $ \fp -> + resolveDirFile dir fp >>= \case + Nothing -> do + warnMissingFile "File" cwd fp file + pure Nothing + Just p -> pure $ Just (toCabalPath p) + csources <- resolveDirFiles build.cSources DotCabalCFilePath + jsources <- resolveDirFiles build.jsSources DotCabalFilePath + pure (csources <> jsources) + +-- | Resolve file as a child of a specified directory, symlinks +-- don't get followed. +resolveDirFile :: + (MonadIO m, MonadThrow m) + => Path Abs Dir + -> FilePath.FilePath + -> m (Maybe (Path Abs File)) +resolveDirFile x y = do + -- The standard canonicalizePath does not work for this case + p <- parseCollapsedAbsFile (toFilePath x FilePath. y) + exists <- doesFileExist p + pure $ if exists then Just p else Nothing + +-- | Warn the user that multiple candidates are available for an +-- entry, but that we picked one anyway and continued. +warnMultiple :: + DotCabalDescriptor + -> Path b t + -> [Path b t] + -> RIO GetPackageFileContext () +warnMultiple name candidate rest = + -- TODO: figure out how to style 'name' and the dispOne stuff + prettyWarnL + [ flow "There were multiple candidates for the Cabal entry" + , fromString . showName $ name + , line <> bulletedList (map dispOne (candidate:rest)) + , line <> flow "picking:" + , dispOne candidate + ] + where + showName (DotCabalModule name') = display name' + showName (DotCabalMain fp) = fp + showName (DotCabalFile fp) = fp + showName (DotCabalCFile fp) = fp + dispOne = fromString . toFilePath + -- TODO: figure out why dispOne can't be just `display` + -- (remove the .hlint.yaml exception if it can be) + +-- | Parse a package name from a file path. +parsePackageNameFromFilePath :: MonadThrow m => Path a File -> m PackageName +parsePackageNameFromFilePath fp = do + base <- clean $ toFilePath $ filename fp + case parsePackageName base of + Nothing -> throwM $ CabalFileNameInvalidPackageName $ toFilePath fp + Just x -> pure x + where + clean = fmap reverse . strip . reverse + strip ('l':'a':'b':'a':'c':'.':xs) = pure xs + strip _ = throwM (CabalFileNameParseFail (toFilePath fp)) + +-- | Resolve the directory, if it can't be resolved, warn for the user +-- (purely to be helpful). +resolveDirOrWarn :: + FilePath.FilePath + -> RIO GetPackageFileContext (Maybe (Path Abs Dir)) +resolveDirOrWarn = resolveOrWarn "Directory" f + where + f p x = forgivingResolveDir p x >>= rejectMissingDir + +-- | Make the global autogen dir if Cabal version is new enough. +packageAutogenDir :: Path Abs Dir -> Path Abs Dir +packageAutogenDir distDir = buildDir distDir relDirGlobalAutogen + +-- | Make the autogen dir. +componentAutogenDir :: NamedComponent -> Path Abs Dir -> Path Abs Dir +componentAutogenDir component distDir = + componentBuildDir component distDir relDirAutogen + +-- | Make the build dir. Note that Cabal >= 2.0 uses the +-- 'componentBuildDir' above for some things. +buildDir :: Path Abs Dir -> Path Abs Dir +buildDir distDir = distDir relDirBuild + +-- NOTE: don't export this, only use it for valid paths based on +-- component names. +componentNameToDir :: StackUnqualCompName -> Path Rel Dir +componentNameToDir = componentNameToDirNormOrTmp False + +componentNameToDirNormOrTmp :: Bool -> StackUnqualCompName -> Path Rel Dir +componentNameToDirNormOrTmp isTemp name = + fromMaybe (throw $ ComponentNotParsedBug sName) (parseRelDir fullName) + where + fullName = if isTemp then sName <> "/" <> sName <> "-tmp" else sName + sName = unqualCompToString name + +-- | See 'Distribution.Simple.LocalBuildInfo.componentBuildDir' +componentBuildDir :: NamedComponent -> Path Abs Dir -> Path Abs Dir +componentBuildDir component distDir = case component of + CLib -> buildDir distDir + CSubLib name -> buildDir distDir componentNameToDir name + CFlib name -> buildDir distDir componentNameToDir name + CExe name -> buildDir distDir componentNameToDir name + CTest name -> buildDir distDir componentNameToDir name + CBench name -> buildDir distDir componentNameToDir name + +-- Internal helper to define resolveFileOrWarn and resolveDirOrWarn +resolveOrWarn :: + Text + -> (Path Abs Dir -> String -> RIO GetPackageFileContext (Maybe a)) + -> FilePath.FilePath + -> RIO GetPackageFileContext (Maybe a) +resolveOrWarn subject resolver path = do + cwd <- liftIO getCurrentDir + file <- asks (.file) + dir <- asks (parent . (.file)) + result <- resolver dir path + when (isNothing result) $ warnMissingFile subject cwd path file + pure result + +warnMissingFile :: + Text + -> Path Abs Dir + -> FilePath + -> Path Abs File + -> RIO GetPackageFileContext () +warnMissingFile subject cwd path fromFile = + prettyWarnL + [ fromString . T.unpack $ subject -- TODO: needs style? + , flow "listed in" + , maybe (pretty fromFile) pretty (stripProperPrefix cwd fromFile) + , flow "file does not exist:" + , style Dir . fromString $ path + ] diff --git a/src/Stack/Config.hs b/src/Stack/Config.hs index cf41dbad9a..151ec82e6a 100644 --- a/src/Stack/Config.hs +++ b/src/Stack/Config.hs @@ -1,430 +1,680 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE TypeFamilies #-} - --- | The general Stack configuration that starts everything off. This should --- be smart to falback if there is no stack.yaml, instead relying on --- whatever files are available. --- --- If there is no stack.yaml, and there is a cabal.config, we --- read in those constraints, and if there's a cabal.sandbox.config, --- we read any constraints from there and also find the package --- database from there, etc. And if there's nothing, we should --- probably default to behaving like cabal, possibly with spitting out --- a warning that "you should run `stk init` to make things better". +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +{-| +Module : Stack.Config +Description : The general Stack configuration. +License : BSD-3-Clause + +The general Stack configuration that starts everything off. This should be smart +to fallback if there is no stack.yaml, instead relying on whatever files are +available. + +If there is no stack.yaml, and there is a cabal.config, we read in those +constraints, and if there's a cabal.sandbox.config, we read any constraints from +there and also find the package database from there, etc. And if there's +nothing, we should probably default to behaving like cabal, possibly with +spitting out a warning that "you should run `stk init` to make things better". +-} + module Stack.Config - (loadConfig - ,loadConfigYaml - ,packagesParser - ,getImplicitGlobalProjectDir - ,getSnapshots - ,makeConcreteResolver - ,checkOwnership - ,getInContainer - ,getInNixShell - ,defaultConfigYaml - ,getProjectConfig - ,withBuildConfig - ,withNewLogFunc + ( loadConfig + , loadConfigYaml + , getImplicitGlobalProjectDir + , getSnapshots + , makeConcreteSnapshot + , getRawSnapshot + , checkOwnership + , getInContainer + , getInNixShell + , defaultConfigYaml + , getProjectConfig + , withBuildConfig + , withNewLogFunc + , determineStackRootAndOwnership ) where -import Control.Monad.Extra (firstJustM) -import Stack.Prelude -import Pantry.Internal.AesonExtended -import Data.Array.IArray ((!), (//)) +import Control.Monad.Extra ( firstJustM ) +import Data.Aeson.Types ( Value ) +import Data.Aeson.WarningParser + ( WithJSONWarnings (..), logJSONWarnings ) +import Data.Array.IArray ( (!), (//) ) import qualified Data.ByteString as S -import Data.ByteString.Builder (byteString) -import Data.Coerce (coerce) +import Data.ByteString.Builder ( byteString ) +import Data.Char ( isLatin1 ) +import Data.Coerce ( coerce ) +import qualified Data.Either.Extra as EE import qualified Data.IntMap as IntMap import qualified Data.Map as Map import qualified Data.Map.Merge.Strict as MS import qualified Data.Monoid -import Data.Monoid.Map (MonoidMap(..)) +import Data.Monoid.Map ( MonoidMap (..) ) +import qualified Data.Set as Set import qualified Data.Text as T import qualified Data.Yaml as Yaml -import Distribution.System (OS (..), Platform (..), buildPlatform, Arch(OtherArch)) -import qualified Distribution.Text -import Distribution.Version (simplifyVersionRange, mkVersion') -import GHC.Conc (getNumProcessors) -import Lens.Micro ((.~)) -import Network.HTTP.StackClient (httpJSON, parseUrlThrow, getResponseBody) -import Options.Applicative (Parser, help, long, metavar, strOption) +import qualified Data.Yaml.Include as YamlInclude +import qualified Distribution.PackageDescription as PD +import Distribution.System + ( Arch (..), OS (..), Platform (..), buildPlatform ) +import qualified Distribution.Text ( simpleParse ) +import Distribution.Version ( simplifyVersionRange ) +import qualified Hpack +import GHC.Conc ( getNumProcessors ) +import Network.HTTP.StackClient + ( httpJSON, parseUrlThrow, getResponseBody ) +import Pantry ( loadSnapshot ) import Path -import Path.Extra (toFilePathNoTrailingSep) -import Path.Find (findInParents) + ( PathException (..), (), parent, parseAbsDir + , parseAbsFile, parseRelDir, stripProperPrefix + ) +import Path.Extra ( toFilePathNoTrailingSep ) +import Path.Find ( findInParents ) import Path.IO -import qualified Paths_stack as Meta -import Stack.Config.Build -import Stack.Config.Docker -import Stack.Config.Nix + ( XdgDirectory (..), canonicalizePath, doesFileExist + , ensureDir, forgivingAbsence, getAppUserDataDir + , getCurrentDir, getXdgDir, resolveDir, resolveDir' + , resolveFile, resolveFile' + ) +import RIO.List ( unzip, intersperse ) +import RIO.Process + ( HasProcessContext (..), ProcessContext, augmentPathMap + , envVarsL + , mkProcessContext + ) +import RIO.Time ( toGregorian ) +import Stack.Build.Haddock ( shouldHaddockDeps ) +import Stack.Config.Build ( buildOptsFromMonoid ) +import Stack.Config.Docker ( dockerOptsFromMonoid ) +import Stack.Config.Nix ( nixOptsFromMonoid ) import Stack.Constants -import Stack.Build.Haddock (shouldHaddockDeps) -import Stack.Lock (lockCachedWanted) -import Stack.Storage.Project (initProjectStorage) -import Stack.Storage.User (initUserStorage) -import Stack.SourceMap -import Stack.Types.Build + ( defaultGlobalConfigPath, defaultUserConfigPath + , implicitGlobalProjectDir, inContainerEnvVar + , inNixShellEnvVar, osIsWindows, pantryRootEnvVar + , platformVariantEnvVar, relDirBin, relDirStackWork + , relFileReadmeTxt, relFileStorage, relDirPantry + , relDirPrograms, relDirStackProgName, relDirUpperPrograms + , stackDeveloperModeDefault, stackDotYaml, stackProgName + , stackRootEnvVar, stackWorkEnvVar, stackXdgEnvVar + ) +import qualified Stack.Constants as Constants +import Stack.Lock ( lockCachedWanted ) +import Stack.Prelude +import Stack.SourceMap ( additionalDepPackage, mkProjectPackage ) +import Stack.Storage.Project ( initProjectStorage ) +import Stack.Storage.User ( initUserStorage ) +import Stack.Storage.Util ( handleMigrationException ) +import Stack.Types.AllowNewerDeps ( AllowNewerDeps (..) ) +import Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) +import Stack.Types.ApplyProgOptions ( ApplyProgOptions (..) ) +import Stack.Types.Build.Exception + ( BuildException (..), BuildPrettyException (..) ) +import Stack.Types.BuildConfig ( BuildConfig (..) ) +import Stack.Types.BuildOpts ( BuildOpts (..) ) +import Stack.Types.ColorWhen ( ColorWhen (..) ) import Stack.Types.Compiler + ( defaultCompilerBindistPath, defaultCompilerRepository + , defaultCompilerTarget + ) import Stack.Types.Config -import Stack.Types.Docker -import Stack.Types.Nix -import Stack.Types.Resolver + ( Config (..), HasConfig (..), askLatestSnapshotUrl + , configProjectRoot, stackRootL, workDirL + ) +import Stack.Types.Config.Exception + ( ConfigException (..), ConfigPrettyException (..) + , ParseAbsolutePathException (..) + ) +import Stack.Types.ConfigMonoid + ( ConfigMonoid (..), parseConfigMonoid ) +import Stack.Types.Casa ( CasaOptsMonoid (..) ) +import Stack.Types.Docker ( DockerOpts (..), DockerOptsMonoid (..) ) +import Stack.Types.DumpLogs ( DumpLogs (..) ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.MsysEnvironment + ( MsysEnvironment (..), msysEnvArch ) +import Stack.Types.Nix ( NixOpts (..) ) +import Stack.Types.Platform + ( PlatformVariant (..), platformOnlyRelDir ) +import Stack.Types.Project ( Project (..) ) +import qualified Stack.Types.Project as Project ( Project (..) ) +import Stack.Types.ProjectAndConfigMonoid + ( ProjectAndConfigMonoid (..), parseProjectAndConfigMonoid ) +import Stack.Types.ProjectConfig ( ProjectConfig (..) ) +import Stack.Types.PvpBounds ( PvpBounds (..), PvpBoundsType (..) ) +import Stack.Types.Runner + ( HasRunner (..), Runner (..), globalOptsL, terminalL ) +import Stack.Types.Snapshot ( AbstractSnapshot (..), Snapshots (..) ) import Stack.Types.SourceMap + ( CommonPackage (..), DepPackage (..), ProjectPackage (..) + , SMWanted (..) + ) +import Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) +import Stack.Types.UnusedFlags ( FlagSource (..), UnusedFlags (..) ) import Stack.Types.Version -import System.Console.ANSI (hSupportsANSIWithoutEmulation, setSGRCode) -import System.Environment -import System.Info.ShortPathName (getShortPathName) -import System.PosixCompat.Files (fileOwner, getFileStatus) -import System.PosixCompat.User (getEffectiveUserID) -import RIO.List (unzip) -import RIO.PrettyPrint (Style (Highlight, Secondary), - logLevelToStyle, stylesUpdateL, useColorL) -import RIO.PrettyPrint.StylesUpdate (StylesUpdate (..)) -import RIO.PrettyPrint.DefaultStyles (defaultStyles) -import RIO.Process -import RIO.Time (toGregorian) - --- | If deprecated path exists, use it and print a warning. --- Otherwise, return the new path. -tryDeprecatedPath - :: HasLogFunc env - => Maybe T.Text -- ^ Description of file for warning (if Nothing, no deprecation warning is displayed) - -> (Path Abs a -> RIO env Bool) -- ^ Test for existence - -> Path Abs a -- ^ New path - -> Path Abs a -- ^ Deprecated path - -> RIO env (Path Abs a, Bool) -- ^ (Path to use, whether it already exists) -tryDeprecatedPath mWarningDesc exists new old = do - newExists <- exists new - if newExists - then return (new, True) - else do - oldExists <- exists old - if oldExists - then do - case mWarningDesc of - Nothing -> return () - Just desc -> - logWarn $ - "Warning: Location of " <> display desc <> " at '" <> - fromString (toFilePath old) <> - "' is deprecated; rename it to '" <> - fromString (toFilePath new) <> - "' instead" - return (old, True) - else return (new, False) + ( IntersectingVersionRange (..), VersionCheck (..) + , stackVersion, withinRange + ) +import System.Console.ANSI ( hNowSupportsANSI, setSGRCode ) +import System.Environment ( getEnvironment, lookupEnv ) +import System.Info.ShortPathName ( getShortPathName ) +import System.PosixCompat.Files ( fileOwner, getFileStatus ) +import System.Posix.User ( getEffectiveUserID ) -- | Get the location of the implicit global project directory. --- If the directory already exists at the deprecated location, its location is returned. --- Otherwise, the new location is returned. -getImplicitGlobalProjectDir - :: HasLogFunc env - => Config -> RIO env (Path Abs Dir) -getImplicitGlobalProjectDir config = - --TEST no warning printed - liftM fst $ tryDeprecatedPath - Nothing - doesDirExist - (implicitGlobalProjectDir stackRoot) - (implicitGlobalProjectDirDeprecated stackRoot) - where - stackRoot = view stackRootL config - --- | Download the 'Snapshots' value from stackage.org. +getImplicitGlobalProjectDir :: HasConfig env => RIO env (Path Abs Dir) +getImplicitGlobalProjectDir = view $ stackRootL . to implicitGlobalProjectDir + +-- | Download the t'Snapshots' value from stackage.org. getSnapshots :: HasConfig env => RIO env Snapshots getSnapshots = do - latestUrlText <- askLatestSnapshotUrl - latestUrl <- parseUrlThrow (T.unpack latestUrlText) - logDebug $ "Downloading snapshot versions file from " <> display latestUrlText - result <- httpJSON latestUrl - logDebug "Done downloading and parsing snapshot versions file" - return $ getResponseBody result - --- | Turn an 'AbstractResolver' into a 'Resolver'. -makeConcreteResolver - :: HasConfig env - => AbstractResolver - -> RIO env RawSnapshotLocation -makeConcreteResolver (ARResolver r) = pure r -makeConcreteResolver ar = do - r <- - case ar of - ARResolver r -> assert False $ makeConcreteResolver (ARResolver r) - ARGlobal -> do - config <- view configL - implicitGlobalDir <- getImplicitGlobalProjectDir config - let fp = implicitGlobalDir stackDotYaml - iopc <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp - ProjectAndConfigMonoid project _ <- liftIO iopc - return $ projectResolver project - ARLatestNightly -> RSLSynonym . Nightly . snapshotsNightly <$> getSnapshots - ARLatestLTSMajor x -> do - snapshots <- getSnapshots - case IntMap.lookup x $ snapshotsLts snapshots of - Nothing -> throwString $ "No LTS release found with major version " ++ show x - Just y -> return $ RSLSynonym $ LTS x y - ARLatestLTS -> do - snapshots <- getSnapshots - if IntMap.null $ snapshotsLts snapshots - then throwString "No LTS releases found" - else let (x, y) = IntMap.findMax $ snapshotsLts snapshots - in return $ RSLSynonym $ LTS x y - logInfo $ "Selected resolver: " <> display r - return r - --- | Get the latest snapshot resolver available. -getLatestResolver :: HasConfig env => RIO env RawSnapshotLocation -getLatestResolver = do - snapshots <- getSnapshots - let mlts = uncurry LTS <$> - listToMaybe (reverse (IntMap.toList (snapshotsLts snapshots))) - pure $ RSLSynonym $ fromMaybe (Nightly (snapshotsNightly snapshots)) mlts + latestUrlText <- askLatestSnapshotUrl + latestUrl <- parseUrlThrow (T.unpack latestUrlText) + logDebug $ "Downloading snapshot versions file from " <> display latestUrlText + result <- httpJSON latestUrl + logDebug "Done downloading and parsing snapshot versions file" + pure $ getResponseBody result + +-- | Turn an 'AbstractSnapshot' into a 'RawSnapshotLocation'. +makeConcreteSnapshot :: + HasConfig env + => AbstractSnapshot + -> RIO env RawSnapshotLocation +makeConcreteSnapshot (ASSnapshot s) = pure s +makeConcreteSnapshot as = do + s <- + case as of + ASGlobal -> do + fp <- getImplicitGlobalProjectDir <&> ( stackDotYaml) + iopc <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp + ProjectAndConfigMonoid project _ <- liftIO iopc + pure project.snapshot + ASLatestNightly -> + RSLSynonym . Nightly . (.nightly) <$> getSnapshots + ASLatestLTSMajor x -> do + snapshots <- getSnapshots + case IntMap.lookup x snapshots.lts of + Nothing -> throwIO $ NoLTSWithMajorVersion x + Just y -> pure $ RSLSynonym $ LTS x y + ASLatestLTS -> do + snapshots <- getSnapshots + if IntMap.null snapshots.lts + then throwIO NoLTSFound + else let (x, y) = IntMap.findMax snapshots.lts + in pure $ RSLSynonym $ LTS x y + prettyInfoL + [ flow "Selected snapshot:" + , style Current (fromString $ T.unpack $ textDisplay s) <> "." + ] + pure s + +-- | Get the raw snapshot from the global options. +getRawSnapshot :: HasConfig env => RIO env (Maybe RawSnapshot) +getRawSnapshot = do + mASnapshot <- view $ globalOptsL . to (.snapshot) + forM mASnapshot $ \aSnapshot -> do + concrete <- makeConcreteSnapshot aSnapshot + loc <- completeSnapshotLocation concrete + loadSnapshot loc + +-- | Get the latest snapshot available. +getLatestSnapshot :: HasConfig env => RIO env RawSnapshotLocation +getLatestSnapshot = do + snapshots <- getSnapshots + let mlts = uncurry LTS <$> + listToMaybe (reverse (IntMap.toList snapshots.lts)) + pure $ RSLSynonym $ fromMaybe (Nightly snapshots.nightly) mlts -- Interprets ConfigMonoid options. +configFromConfigMonoid :: + (HasRunner env, HasTerm env) + => Path Abs Dir -- ^ Stack root, e.g. ~/.stack + -> Path Abs File + -- ^ User-specific global configuration file. + -> Maybe AbstractSnapshot + -> ProjectConfig (Project, Path Abs File) + -> ConfigMonoid + -> (Config -> RIO env a) + -> RIO env a configFromConfigMonoid - :: HasRunner env - => Path Abs Dir -- ^ stack root, e.g. ~/.stack - -> Path Abs File -- ^ user config file path, e.g. ~/.stack/config.yaml - -> Maybe AbstractResolver - -> ProjectConfig (Project, Path Abs File) - -> ConfigMonoid - -> (Config -> RIO env a) - -> RIO env a -configFromConfigMonoid - configStackRoot configUserConfigPath configResolver - configProject ConfigMonoid{..} inner = do - -- If --stack-work is passed, prefer it. Otherwise, if STACK_WORK - -- is set, use that. If neither, use the default ".stack-work" - mstackWorkEnv <- liftIO $ lookupEnv stackWorkEnvVar - let mproject = - case configProject of - PCProject pair -> Just pair - PCGlobalProject -> Nothing - PCNoProject _deps -> Nothing - configAllowLocals = - case configProject of - PCProject _ -> True - PCGlobalProject -> True - PCNoProject _ -> False - configWorkDir0 <- maybe (return relDirStackWork) (liftIO . parseRelDir) mstackWorkEnv - let configWorkDir = fromFirst configWorkDir0 configMonoidWorkDir - configLatestSnapshot = fromFirst - "https://s3.amazonaws.com/haddock.stackage.org/snapshots.json" - configMonoidLatestSnapshot - clConnectionCount = fromFirst 8 configMonoidConnectionCount - configHideTHLoading = fromFirstTrue configMonoidHideTHLoading - configPrefixTimestamps = fromFirst False configMonoidPrefixTimestamps - - configGHCVariant = getFirst configMonoidGHCVariant - configCompilerRepository = fromFirst - defaultCompilerRepository - configMonoidCompilerRepository - configGHCBuild = getFirst configMonoidGHCBuild - configInstallGHC = fromFirstTrue configMonoidInstallGHC - configSkipGHCCheck = fromFirstFalse configMonoidSkipGHCCheck - configSkipMsys = fromFirstFalse configMonoidSkipMsys - - configExtraIncludeDirs = configMonoidExtraIncludeDirs - configExtraLibDirs = configMonoidExtraLibDirs - configOverrideGccPath = getFirst configMonoidOverrideGccPath - - -- Only place in the codebase where platform is hard-coded. In theory - -- in the future, allow it to be configured. - (Platform defArch defOS) = buildPlatform - arch = fromMaybe defArch - $ getFirst configMonoidArch >>= Distribution.Text.simpleParse - os = defOS - configPlatform = Platform arch os - - configRequireStackVersion = simplifyVersionRange (getIntersectingVersionRange configMonoidRequireStackVersion) - - configCompilerCheck = fromFirst MatchMinor configMonoidCompilerCheck - - case arch of - OtherArch "aarch64" -> return () - OtherArch unk -> logWarn $ "Warning: Unknown value for architecture setting: " <> displayShow unk - _ -> return () - - configPlatformVariant <- liftIO $ - maybe PlatformVariantNone PlatformVariant <$> lookupEnv platformVariantEnvVar - - let configBuild = buildOptsFromMonoid configMonoidBuildOpts - configDocker <- - dockerOptsFromMonoid (fmap fst mproject) configResolver configMonoidDockerOpts - configNix <- nixOptsFromMonoid configMonoidNixOpts os - - configSystemGHC <- - case (getFirst configMonoidSystemGHC, nixEnable configNix) of - (Just False, True) -> - throwM NixRequiresSystemGhc - _ -> - return - (fromFirst - (dockerEnable configDocker || nixEnable configNix) - configMonoidSystemGHC) - - when (isJust configGHCVariant && configSystemGHC) $ - throwM ManualGHCVariantSettingsAreIncompatibleWithSystemGHC - - rawEnv <- liftIO getEnvironment - pathsEnv <- either throwM return - $ augmentPathMap (map toFilePath configMonoidExtraPath) - (Map.fromList (map (T.pack *** T.pack) rawEnv)) - origEnv <- mkProcessContext pathsEnv - let configProcessContextSettings _ = return origEnv - - configLocalProgramsBase <- case getFirst configMonoidLocalProgramsBase of - Nothing -> getDefaultLocalProgramsBase configStackRoot configPlatform origEnv - Just path -> return path - let localProgramsFilePath = toFilePath configLocalProgramsBase - when (osIsWindows && ' ' `elem` localProgramsFilePath) $ do - ensureDir configLocalProgramsBase - -- getShortPathName returns the long path name when a short name does not - -- exist. - shortLocalProgramsFilePath <- - liftIO $ getShortPathName localProgramsFilePath - when (' ' `elem` shortLocalProgramsFilePath) $ do - logError $ "Stack's 'programs' path contains a space character and " <> - "has no alternative short ('8 dot 3') name. This will cause " <> - "problems with packages that use the GNU project's 'configure' " <> - "shell script. Use the 'local-programs-path' configuration option " <> - "to specify an alternative path. The current path is: " <> - display (T.pack localProgramsFilePath) - platformOnlyDir <- runReaderT platformOnlyRelDir (configPlatform, configPlatformVariant) - let configLocalPrograms = configLocalProgramsBase platformOnlyDir - - configLocalBin <- - case getFirst configMonoidLocalBinPath of - Nothing -> do - localDir <- getAppUserDataDir "local" - return $ localDir relDirBin - Just userPath -> - (case mproject of - -- Not in a project - Nothing -> resolveDir' userPath - -- Resolves to the project dir and appends the user path if it is relative - Just (_, configYaml) -> resolveDir (parent configYaml) userPath) - -- TODO: Either catch specific exceptions or add a - -- parseRelAsAbsDirMaybe utility and use it along with - -- resolveDirMaybe. - `catchAny` - const (throwIO (NoSuchDirectory userPath)) - - configJobs <- - case getFirst configMonoidJobs of - Nothing -> liftIO getNumProcessors - Just i -> return i - let configConcurrentTests = fromFirst True configMonoidConcurrentTests - - let configTemplateParams = configMonoidTemplateParameters - configScmInit = getFirst configMonoidScmInit - configCabalConfigOpts = coerce configMonoidCabalConfigOpts - configGhcOptionsByName = coerce configMonoidGhcOptionsByName - configGhcOptionsByCat = coerce configMonoidGhcOptionsByCat - configSetupInfoLocations = configMonoidSetupInfoLocations - configSetupInfoInline = configMonoidSetupInfoInline - configPvpBounds = fromFirst (PvpBounds PvpBoundsNone False) configMonoidPvpBounds - configModifyCodePage = fromFirstTrue configMonoidModifyCodePage - configRebuildGhcOptions = fromFirstFalse configMonoidRebuildGhcOptions - configApplyGhcOptions = fromFirst AGOLocals configMonoidApplyGhcOptions - configAllowNewer = fromFirst False configMonoidAllowNewer - configDefaultTemplate = getFirst configMonoidDefaultTemplate - configDumpLogs = fromFirst DumpWarningLogs configMonoidDumpLogs - configSaveHackageCreds = fromFirst True configMonoidSaveHackageCreds - configHackageBaseUrl = fromFirst "https://hackage.haskell.org/" configMonoidHackageBaseUrl - configHideSourcePaths = fromFirstTrue configMonoidHideSourcePaths - configRecommendUpgrade = fromFirstTrue configMonoidRecommendUpgrade - - configAllowDifferentUser <- - case getFirst configMonoidAllowDifferentUser of - Just True -> return True - _ -> getInContainer - - configRunner' <- view runnerL - - useAnsi <- liftIO $ fromMaybe True <$> - hSupportsANSIWithoutEmulation stderr - - let stylesUpdate' = (configRunner' ^. stylesUpdateL) <> - configMonoidStyles - useColor' = runnerUseColor configRunner' - mUseColor = do - colorWhen <- getFirst configMonoidColorWhen - return $ case colorWhen of - ColorNever -> False - ColorAlways -> True - ColorAuto -> useAnsi - useColor'' = fromMaybe useColor' mUseColor - configRunner'' = configRunner' - & processContextL .~ origEnv - & stylesUpdateL .~ stylesUpdate' - & useColorL .~ useColor'' - go = runnerGlobalOpts configRunner' - - hsc <- - case getFirst configMonoidPackageIndices of - Nothing -> pure defaultHackageSecurityConfig - Just [hsc] -> pure hsc - Just x -> error $ "When overriding the default package index, you must provide exactly one value, received: " ++ show x - mpantryRoot <- liftIO $ lookupEnv "PANTRY_ROOT" - pantryRoot <- - case mpantryRoot of - Just dir -> - case parseAbsDir dir of - Nothing -> throwString $ "Failed to parse PANTRY_ROOT environment variable (expected absolute directory): " ++ show dir - Just x -> pure x - Nothing -> pure $ configStackRoot relDirPantry - - let snapLoc = - case getFirst configMonoidSnapshotLocation of - Nothing -> defaultSnapshotLocation - Just addr -> customSnapshotLocation - where - customSnapshotLocation (LTS x y) = - mkRSLUrl $ addr' - <> "/lts/" <> display x - <> "/" <> display y <> ".yaml" - customSnapshotLocation (Nightly date) = - let (year, month, day) = toGregorian date - in mkRSLUrl $ addr' - <> "/nightly/" - <> display year - <> "/" <> display month - <> "/" <> display day <> ".yaml" - mkRSLUrl builder = RSLUrl (utf8BuilderToText builder) Nothing - addr' = display $ T.dropWhileEnd (=='/') addr - - let configStackDeveloperMode = fromFirst stackDeveloperModeDefault configMonoidStackDeveloperMode - - withNewLogFunc go useColor'' stylesUpdate' $ \logFunc -> do - let configRunner = configRunner'' & logFuncL .~ logFunc - withLocalLogFunc logFunc $ withPantryConfig - pantryRoot - hsc - (maybe HpackBundled HpackCommand $ getFirst configMonoidOverrideHpack) - clConnectionCount - (fromFirst defaultCasaRepoPrefix configMonoidCasaRepoPrefix) - defaultCasaMaxPerRequest - snapLoc - (\configPantryConfig -> initUserStorage - (configStackRoot relFileStorage) - (\configUserStorage -> inner Config {..})) + stackRoot + userGlobalConfigFile + snapshot + project + configMonoid + inner + = do + -- If --stack-work is passed, prefer it. Otherwise, if STACK_WORK + -- is set, use that. If neither, use the default ".stack-work" + mstackWorkEnv <- liftIO $ lookupEnv stackWorkEnvVar + let mproject = + case project of + PCProject pair -> Just pair + PCGlobalProject -> Nothing + PCNoProject _deps -> Nothing + allowLocals = + case project of + PCProject _ -> True + PCGlobalProject -> True + PCNoProject _ -> False + configWorkDir0 <- + let parseStackWorkEnv x = + catch + (parseRelDir x) + ( \e -> case e of + InvalidRelDir _ -> + prettyThrowIO $ StackWorkEnvNotRelativeDir x + _ -> throwIO e + ) + in maybe (pure relDirStackWork) (liftIO . parseStackWorkEnv) mstackWorkEnv + let workDir = fromFirst configWorkDir0 configMonoid.workDir + -- The history of the URL below is as follows: + -- + -- * Before Stack 1.3.0 it was + -- https://www.stackage.org/download/snapshots.json. + -- * From Stack 1.3.0 to 2.15.3 it was + -- https://s3.amazonaws.com/haddock.stackage.org/snapshots.json. The + -- change was made because S3 was expected to have greater uptime than + -- stackage.org. + -- * In early 2024, the Stackage project was handed over to the Haskell + -- Foundation. Following that handover, the URL below was considered + -- the most reliable source of the file in question. + latestSnapshot = fromFirst + "https://stackage-haddock.haskell.org/snapshots.json" + configMonoid.latestSnapshot + recentSnapshots = fromFirst + "https://www.stackage.org/api/v1/snapshots" + configMonoid.recentSnapshots + clConnectionCount = fromFirst 8 configMonoid.connectionCount + hideTHLoading = fromFirstTrue configMonoid.hideTHLoading + prefixTimestamps = fromFirst False configMonoid.prefixTimestamps + ghcVariant = getFirst configMonoid.ghcVariant + compilerRepository = fromFirst + defaultCompilerRepository + configMonoid.compilerRepository + compilerTarget = fromFirst + defaultCompilerTarget + configMonoid.compilerTarget + compilerBindistPath = fromFirst + defaultCompilerBindistPath + configMonoid.compilerBindistPath + ghcBuild = getFirst configMonoid.ghcBuild + installGHC = fromFirstTrue configMonoid.installGHC + installMsys = fromFirst installGHC configMonoid.installMsys + skipGHCCheck = fromFirstFalse configMonoid.skipGHCCheck + skipMsys = fromFirstFalse configMonoid.skipMsys + defMsysEnvironment = case platform of + Platform I386 Windows -> Just MINGW32 + Platform X86_64 Windows -> Just MINGW64 + _ -> Nothing + extraIncludeDirs = configMonoid.extraIncludeDirs + extraLibDirs = configMonoid.extraLibDirs + customPreprocessorExts = configMonoid.customPreprocessorExts + overrideGccPath = getFirst configMonoid.overrideGccPath + -- Only place in the codebase where platform is hard-coded. In theory in + -- the future, allow it to be configured. + (Platform defArch defOS) = buildPlatform + arch = fromMaybe defArch + $ getFirst configMonoid.arch >>= Distribution.Text.simpleParse + os = defOS + platform = Platform arch os + requireStackVersion = simplifyVersionRange + configMonoid.requireStackVersion.intersectingVersionRange + compilerCheck = fromFirst MatchMinor configMonoid.compilerCheck + msysEnvironment <- case defMsysEnvironment of + -- Ignore the configuration setting if there is no default for the + -- platform. + Nothing -> pure Nothing + Just defMsysEnv -> do + let msysEnv = fromFirst defMsysEnv configMonoid.msysEnvironment + if msysEnvArch msysEnv == arch + then pure $ Just msysEnv + else prettyThrowM $ BadMsysEnvironment msysEnv arch + platformVariant <- liftIO $ + maybe PlatformVariantNone PlatformVariant <$> lookupEnv platformVariantEnvVar + let build = buildOptsFromMonoid configMonoid.buildOpts + docker <- + dockerOptsFromMonoid (fmap fst mproject) snapshot configMonoid.dockerOpts + nix <- nixOptsFromMonoid configMonoid.nixOpts os + systemGHC <- + case (getFirst configMonoid.systemGHC, nix.enable) of + (Just False, True) -> + throwM NixRequiresSystemGhc + _ -> + pure + (fromFirst + (docker.enable || nix.enable) + configMonoid.systemGHC) + when (isJust ghcVariant && systemGHC) $ + throwM ManualGHCVariantSettingsAreIncompatibleWithSystemGHC + rawEnv <- liftIO getEnvironment + pathsEnv <- either throwM pure + $ augmentPathMap (map toFilePath configMonoid.extraPath) + (Map.fromList (map (T.pack *** T.pack) rawEnv)) + origEnv <- mkProcessContext pathsEnv + let processContextSettings _ = pure origEnv + localProgramsBase <- case getFirst configMonoid.localProgramsBase of + Nothing -> getDefaultLocalProgramsBase stackRoot platform origEnv + Just path -> pure path + let localProgramsFilePath = toFilePath localProgramsBase + spaceInLocalProgramsPath = ' ' `elem` localProgramsFilePath + nonLatin1InLocalProgramsPath = not $ all isLatin1 localProgramsFilePath + problematicLocalProgramsPath = + nonLatin1InLocalProgramsPath + || (osIsWindows && spaceInLocalProgramsPath) + when problematicLocalProgramsPath $ do + let msgSpace = + [ flow "It contains a space character. This will prevent building \ + \with GHC 9.4.1 or later." + | osIsWindows && spaceInLocalProgramsPath + ] + msgNoShort <- if osIsWindows && spaceInLocalProgramsPath + then do + ensureDir localProgramsBase + -- getShortPathName returns the long path name when a short name does not + -- exist. + shortLocalProgramsFilePath <- + liftIO $ getShortPathName localProgramsFilePath + pure [ flow "It also has no alternative short ('8 dot 3') name. This \ + \will cause problems with packages that use the GNU \ + \project's 'configure' shell script." + | ' ' `elem` shortLocalProgramsFilePath + ] + else pure [] + let msgNonLatin1 = if nonLatin1InLocalProgramsPath + then + [ flow "It contains at least one non-ISO/IEC 8859-1 (Latin-1) \ + \character (Unicode code point > 255). This will cause \ + \problems with packages that build using the" + , style Shell "hsc2hs" + , flow "tool with its default template" + , style Shell "template-hsc.h" <> "." + ] + else [] + prettyWarn $ + "[S-8432]" + <> line + <> fillSep + ( [ flow "Stack's 'programs' path is" + , style File (fromString localProgramsFilePath) <> "." + ] + <> msgSpace + <> msgNoShort + <> msgNonLatin1 + ) + <> blankLine + <> fillSep + [ flow "To avoid such problems, use the" + , style Shell "local-programs-path" + , flow "non-project specific configuration option to specify an \ + \alternative path without those characteristics." + ] + <> line + platformOnlyDir <- + runReaderT platformOnlyRelDir (platform, platformVariant) + let localPrograms = localProgramsBase platformOnlyDir + localBin <- + case getFirst configMonoid.localBinPath of + Nothing -> do + localDir <- getAppUserDataDir "local" + pure $ localDir relDirBin + Just userPath -> + (case mproject of + -- Not in a project + Nothing -> resolveDir' userPath + -- Resolves to the project dir and appends the user path if it is + -- relative + Just (_, configYaml) -> resolveDir (parent configYaml) userPath) + -- TODO: Either catch specific exceptions or add a + -- parseRelAsAbsDirMaybe utility and use it along with + -- resolveDirMaybe. + `catchAny` + const (throwIO (NoSuchDirectory userPath)) + fileWatchHook <- + case getFirst configMonoid.fileWatchHook of + Nothing -> pure Nothing + Just userPath -> + ( case mproject of + -- Not in a project + Nothing -> Just <$> resolveFile' userPath + -- Resolves to the project dir and appends the user path if it is + -- relative + Just (_, configYaml) -> + Just <$> resolveFile (parent configYaml) userPath + ) + -- TODO: Either catch specific exceptions or add a + -- parseRelAsAbsFileMaybe utility and use it along with + -- resolveFileMaybe. + `catchAny` + const (throwIO (NoSuchFile userPath)) + jobs <- + case getFirst configMonoid.jobs of + Nothing -> liftIO getNumProcessors + Just i -> pure i + let concurrentTests = + fromFirst True configMonoid.concurrentTests + templateParams = configMonoid.templateParameters + scmInit = getFirst configMonoid.scmInit + cabalConfigOpts = coerce configMonoid.cabalConfigOpts + ghcOptionsByName = coerce configMonoid.ghcOptionsByName + ghcOptionsByCat = coerce configMonoid.ghcOptionsByCat + setupInfoLocations = configMonoid.setupInfoLocations + setupInfoInline = configMonoid.setupInfoInline + pvpBounds = + fromFirst (PvpBounds PvpBoundsNone False) configMonoid.pvpBounds + modifyCodePage = fromFirstTrue configMonoid.modifyCodePage + rebuildGhcOptions = + fromFirstFalse configMonoid.rebuildGhcOptions + applyGhcOptions = + fromFirst AGOLocals configMonoid.applyGhcOptions + applyProgOptions = + fromFirst APOLocals configMonoid.applyProgOptions + allowNewer = configMonoid.allowNewer + allowNewerDeps = coerce configMonoid.allowNewerDeps + defaultInitSnapshot <- do + root <- getCurrentDir + let resolve = (First <$>) . traverse (resolvePaths (Just root)) . getFirst + resolve configMonoid.defaultInitSnapshot + let defaultTemplate = getFirst configMonoid.defaultTemplate + dumpLogs = fromFirst DumpWarningLogs configMonoid.dumpLogs + saveHackageCreds = configMonoid.saveHackageCreds + hackageBaseUrl = + fromFirst Constants.hackageBaseUrl configMonoid.hackageBaseUrl + hideSourcePaths = fromFirstTrue configMonoid.hideSourcePaths + recommendStackUpgrade = fromFirstTrue configMonoid.recommendStackUpgrade + notifyIfNixOnPath = fromFirstTrue configMonoid.notifyIfNixOnPath + notifyIfGhcUntested = fromFirstTrue configMonoid.notifyIfGhcUntested + notifyIfCabalUntested = fromFirstTrue configMonoid.notifyIfCabalUntested + notifyIfArchUnknown = fromFirstTrue configMonoid.notifyIfArchUnknown + notifyIfNoRunTests = fromFirstTrue configMonoid.notifyIfNoRunTests + notifyIfNoRunBenchmarks = + fromFirstTrue configMonoid.notifyIfNoRunBenchmarks + notifyIfBaseNotBoot = + fromFirstTrue configMonoid.notifyIfBaseNotBoot + noRunCompile = fromFirstFalse configMonoid.noRunCompile + allowDifferentUser <- + case getFirst configMonoid.allowDifferentUser of + Just True -> pure True + _ -> getInContainer + configRunner' <- view runnerL + useAnsi <- liftIO $ hNowSupportsANSI stderr + let stylesUpdate' = (configRunner' ^. stylesUpdateL) <> + configMonoid.styles + useColor' = configRunner'.useColor + mUseColor = + getFirst configMonoid.colorWhen <&> \case + ColorNever -> False + ColorAlways -> True + ColorAuto -> useAnsi + useColor'' = fromMaybe useColor' mUseColor + configRunner'' = configRunner' + & processContextL .~ origEnv + & stylesUpdateL .~ stylesUpdate' + & useColorL .~ useColor'' + go = configRunner'.globalOpts + pic = fromFirst defaultPackageIndexConfig configMonoid.packageIndex + pantryRoot <- liftIO (lookupEnv pantryRootEnvVar) >>= \case + Just dir -> + case parseAbsDir dir of + Nothing -> throwIO $ ParseAbsolutePathException pantryRootEnvVar dir + Just x -> pure x + Nothing -> pure $ stackRoot relDirPantry + let snapLoc = + case getFirst configMonoid.snapshotLocation of + Nothing -> defaultSnapshotLocation + Just addr -> + customSnapshotLocation + where + customSnapshotLocation (LTS x y) = + mkRSLUrl $ addr' + <> "/lts/" <> display x + <> "/" <> display y <> ".yaml" + customSnapshotLocation (Nightly date) = + let (year, month, day) = toGregorian date + in mkRSLUrl $ addr' + <> "/nightly/" + <> display year + <> "/" <> display month + <> "/" <> display day <> ".yaml" + mkRSLUrl builder = RSLUrl (utf8BuilderToText builder) Nothing + addr' = display $ T.dropWhileEnd (=='/') addr + globalHintsLoc <- case getFirst configMonoid.globalHintsLocation of + Nothing -> pure defaultGlobalHintsLocation + Just unresolverGlobalHintsLoc -> do + resolvedGlobalHintsLocation <- + resolvePaths (Just stackRoot) unresolverGlobalHintsLoc + pure $ const resolvedGlobalHintsLocation + let stackDeveloperMode = fromFirst + stackDeveloperModeDefault + configMonoid.stackDeveloperMode + hpackForce = if fromFirstFalse configMonoid.hpackForce + then Hpack.Force + else Hpack.NoForce + casa = + if fromFirstTrue configMonoid.casaOpts.enable + then + let casaRepoPrefix = fromFirst + (fromFirst defaultCasaRepoPrefix configMonoid.casaRepoPrefix) + configMonoid.casaOpts.repoPrefix + casaMaxKeysPerRequest = fromFirst + defaultCasaMaxPerRequest + configMonoid.casaOpts.maxKeysPerRequest + in Just (casaRepoPrefix, casaMaxKeysPerRequest) + else Nothing + withNewLogFunc go useColor'' stylesUpdate' $ \logFunc -> do + let runner = configRunner'' & logFuncL .~ logFunc + withLocalLogFunc logFunc $ handleMigrationException $ do + logDebug $ case casa of + Nothing -> "Use of Casa server disabled." + Just (repoPrefix, maxKeys) -> + "Use of Casa server enabled: (" + <> fromString (show repoPrefix) + <> ", " + <> fromString (show maxKeys) + <> ")." + withPantryConfig' + pantryRoot + pic + (maybe HpackBundled HpackCommand $ getFirst configMonoid.overrideHpack) + hpackForce + clConnectionCount + casa + snapLoc + globalHintsLoc + (\pantryConfig -> initUserStorage + (stackRoot relFileStorage) + ( \userStorage -> inner Config + { workDir + , userGlobalConfigFile + , build + , docker + , nix + , processContextSettings + , localProgramsBase + , localPrograms + , hideTHLoading + , prefixTimestamps + , platform + , platformVariant + , ghcVariant + , ghcBuild + , latestSnapshot + , recentSnapshots + , systemGHC + , installGHC + , installMsys + , skipGHCCheck + , skipMsys + , msysEnvironment + , compilerCheck + , compilerRepository + , compilerTarget + , compilerBindistPath + , localBin + , fileWatchHook + , requireStackVersion + , jobs + , overrideGccPath + , extraIncludeDirs + , extraLibDirs + , customPreprocessorExts + , concurrentTests + , templateParams + , scmInit + , ghcOptionsByName + , ghcOptionsByCat + , cabalConfigOpts + , setupInfoLocations + , setupInfoInline + , pvpBounds + , modifyCodePage + , rebuildGhcOptions + , applyGhcOptions + , applyProgOptions + , allowNewer + , allowNewerDeps + , defaultInitSnapshot + , defaultTemplate + , allowDifferentUser + , dumpLogs + , project + , allowLocals + , saveHackageCreds + , hackageBaseUrl + , runner + , pantryConfig + , stackRoot + , snapshot + , userStorage + , hideSourcePaths + , recommendStackUpgrade + , notifyIfNixOnPath + , notifyIfGhcUntested + , notifyIfCabalUntested + , notifyIfArchUnknown + , notifyIfNoRunTests + , notifyIfNoRunBenchmarks + , notifyIfBaseNotBoot + , noRunCompile + , stackDeveloperMode + , casa + } + ) + ) -- | Runs the provided action with the given 'LogFunc' in the environment withLocalLogFunc :: HasLogFunc env => LogFunc -> RIO env a -> RIO env a withLocalLogFunc logFunc = local (set logFuncL logFunc) --- | Runs the provided action with a new 'LogFunc', given a 'StylesUpdate'. -withNewLogFunc :: MonadUnliftIO m - => GlobalOpts - -> Bool -- ^ Use color - -> StylesUpdate - -> (LogFunc -> m a) - -> m a +-- | Runs the provided action with a new 'LogFunc', given a t'StylesUpdate'. +withNewLogFunc :: + MonadUnliftIO m + => GlobalOpts + -> Bool -- ^ Use color + -> StylesUpdate + -> (LogFunc -> m a) + -> m a withNewLogFunc go useColor (StylesUpdate update) inner = do logOptions0 <- logOptionsHandle stderr False let logOptions @@ -432,10 +682,10 @@ withNewLogFunc go useColor (StylesUpdate update) inner = do $ setLogLevelColors logLevelColors $ setLogSecondaryColor secondaryColor $ setLogAccentColors (const highlightColor) - $ setLogUseTime (globalTimeInLog go) - $ setLogMinLevel (globalLogLevel go) - $ setLogVerboseFormat (globalLogLevel go <= LevelDebug) - $ setLogTerminal (globalTerminal go) + $ setLogUseTime go.timeInLog + $ setLogMinLevel go.logLevel + $ setLogVerboseFormat (go.logLevel <= LevelDebug) + $ setLogTerminal go.terminal logOptions0 withLogFunc logOptions inner where @@ -447,521 +697,617 @@ withNewLogFunc go useColor (StylesUpdate update) inner = do highlightColor = fromString $ setSGRCode $ snd $ styles ! Highlight -- | Get the default location of the local programs directory. -getDefaultLocalProgramsBase :: MonadThrow m - => Path Abs Dir - -> Platform - -> ProcessContext - -> m (Path Abs Dir) +getDefaultLocalProgramsBase :: + MonadThrow m + => Path Abs Dir + -> Platform + -> ProcessContext + -> m (Path Abs Dir) getDefaultLocalProgramsBase configStackRoot configPlatform override = - let - defaultBase = configStackRoot relDirPrograms - in - case configPlatform of - -- For historical reasons, on Windows a subdirectory of LOCALAPPDATA is - -- used instead of a subdirectory of STACK_ROOT. Unifying the defaults would - -- mean that Windows users would manually have to move data from the old - -- location to the new one, which is undesirable. - Platform _ Windows -> - case Map.lookup "LOCALAPPDATA" $ view envVarsL override of - Just t -> - case parseAbsDir $ T.unpack t of - Nothing -> throwM $ stringException ("Failed to parse LOCALAPPDATA environment variable (expected absolute directory): " ++ show t) - Just lad -> - return $ lad relDirUpperPrograms relDirStackProgName - Nothing -> return defaultBase - _ -> return defaultBase + case configPlatform of + -- For historical reasons, on Windows a subdirectory of LOCALAPPDATA is + -- used instead of a subdirectory of STACK_ROOT. Unifying the defaults would + -- mean that Windows users would manually have to move data from the old + -- location to the new one, which is undesirable. + Platform _ Windows -> do + let envVars = view envVarsL override + case T.unpack <$> Map.lookup "LOCALAPPDATA" envVars of + Just t -> case parseAbsDir t of + Nothing -> + throwM $ ParseAbsolutePathException "LOCALAPPDATA" t + Just lad -> + pure $ lad relDirUpperPrograms relDirStackProgName + Nothing -> pure defaultBase + _ -> pure defaultBase + where + defaultBase = configStackRoot relDirPrograms -- | Load the configuration, using current directory, environment variables, -- and defaults as necessary. -loadConfig :: HasRunner env => (Config -> RIO env a) -> RIO env a +loadConfig :: + (HasRunner env, HasTerm env) + => (Config -> RIO env a) + -> RIO env a loadConfig inner = do - mstackYaml <- view $ globalOptsL.to globalStackYaml - mproject <- loadProjectConfig mstackYaml - mresolver <- view $ globalOptsL.to globalResolver - configArgs <- view $ globalOptsL.to globalConfigMonoid - (stackRoot, userOwnsStackRoot) <- determineStackRootAndOwnership configArgs - - let (mproject', addConfigMonoid) = - case mproject of - PCProject (proj, fp, cm) -> (PCProject (proj, fp), (cm:)) - PCGlobalProject -> (PCGlobalProject, id) - PCNoProject deps -> (PCNoProject deps, id) - - userConfigPath <- getDefaultUserConfigPath stackRoot - extraConfigs0 <- getExtraConfigs userConfigPath >>= - mapM (\file -> loadConfigYaml (parseConfigMonoid (parent file)) file) - let extraConfigs = - -- non-project config files' existence of a docker section should never default docker - -- to enabled, so make it look like they didn't exist - map (\c -> c {configMonoidDockerOpts = - (configMonoidDockerOpts c) {dockerMonoidDefaultEnable = Any False}}) - extraConfigs0 - - let withConfig = - configFromConfigMonoid - stackRoot - userConfigPath - mresolver - mproject' - (mconcat $ configArgs : addConfigMonoid extraConfigs) - - withConfig $ \config -> do - unless (mkVersion' Meta.version `withinRange` configRequireStackVersion config) - (throwM (BadStackVersionException (configRequireStackVersion config))) - unless (configAllowDifferentUser config) $ do - unless userOwnsStackRoot $ - throwM (UserDoesn'tOwnDirectory stackRoot) - forM_ (configProjectRoot config) $ \dir -> - checkOwnership (dir configWorkDir config) - inner config - --- | Load the build configuration, adds build-specific values to config loaded by @loadConfig@. --- values. -withBuildConfig - :: RIO BuildConfig a - -> RIO Config a + mstackYaml <- view $ globalOptsL . to (.stackYaml) + mproject <- loadProjectConfig mstackYaml + mASnapshot <- view $ globalOptsL . to (.snapshot) + configArgs <- view $ globalOptsL . to (.configMonoid) + (configRoot, stackRoot, userOwnsStackRoot) <- + determineStackRootAndOwnership configArgs + + let (mproject', addConfigMonoid) = + case mproject of + PCProject (proj, fp, cm) -> (PCProject (proj, fp), (cm:)) + PCGlobalProject -> (PCGlobalProject, id) + PCNoProject deps -> (PCNoProject deps, id) + + userConfigPath <- getDefaultUserConfigPath configRoot + extraConfigs0 <- getExtraConfigs userConfigPath >>= + mapM (\file -> loadConfigYaml (parseConfigMonoid (parent file)) file) + let extraConfigs = + -- non-project config files' existence of a docker section should never + -- default docker to enabled, so make it look like they didn't exist + map + (\c -> c {dockerOpts = c.dockerOpts { defaultEnable = Any False }}) + extraConfigs0 + + let withConfig = + configFromConfigMonoid + stackRoot + userConfigPath + mASnapshot + mproject' + (mconcat $ configArgs : addConfigMonoid extraConfigs) + + withConfig $ \config -> do + let Platform arch _ = config.platform + case arch of + OtherArch unknownArch + | config.notifyIfArchUnknown -> + prettyWarnL + [ flow "Unknown value for architecture setting:" + , style Shell (fromString unknownArch) <> "." + , flow "To mute this message in future, set" + , style Shell (flow "notify-if-arch-unknown: false") + , flow "in Stack's configuration." + ] + _ -> pure () + unless (stackVersion `withinRange` config.requireStackVersion) + (throwM (BadStackVersionException config.requireStackVersion)) + unless config.allowDifferentUser $ do + unless userOwnsStackRoot $ + throwM (UserDoesn'tOwnDirectory stackRoot) + forM_ (configProjectRoot config) $ \dir -> + checkOwnership (dir config.workDir) + inner config + +-- | Load the build configuration, adds build-specific values to config loaded +-- by @loadConfig@. values. +withBuildConfig :: RIO BuildConfig a -> RIO Config a withBuildConfig inner = do - config <- ask - - -- If provided, turn the AbstractResolver from the command line - -- into a Resolver that can be used below. - - -- The configResolver and mcompiler are provided on the command - -- line. In order to properly deal with an AbstractResolver, we - -- need a base directory (to deal with custom snapshot relative - -- paths). We consider the current working directory to be the - -- correct base. Let's calculate the mresolver first. - mresolver <- forM (configResolver config) $ \aresolver -> do - logDebug ("Using resolver: " <> display aresolver <> " specified on command line") - makeConcreteResolver aresolver - - (project', stackYamlFP) <- case configProject config of - PCProject (project, fp) -> do - forM_ (projectUserMsg project) (logWarn . fromString) - return (project, fp) - PCNoProject extraDeps -> do - p <- - case mresolver of - Nothing -> throwIO NoResolverWhenUsingNoProject - Just _ -> getEmptyProject mresolver extraDeps - return (p, configUserConfigPath config) - PCGlobalProject -> do - logDebug "Run from outside a project, using implicit global project config" - destDir <- getImplicitGlobalProjectDir config - let dest :: Path Abs File - dest = destDir stackDotYaml - dest' :: FilePath - dest' = toFilePath dest - ensureDir destDir - exists <- doesFileExist dest - if exists - then do - iopc <- loadConfigYaml (parseProjectAndConfigMonoid destDir) dest - ProjectAndConfigMonoid project _ <- liftIO iopc - when (view terminalL config) $ - case configResolver config of - Nothing -> - logDebug $ - "Using resolver: " <> - display (projectResolver project) <> - " from implicit global project's config file: " <> - fromString dest' - Just _ -> return () - return (project, dest) - else do - logInfo ("Writing implicit global project config file to: " <> fromString dest') - logInfo "Note: You can change the snapshot via the resolver field there." - p <- getEmptyProject mresolver [] - liftIO $ do - writeBinaryFileAtomic dest $ byteString $ S.concat - [ "# This is the implicit global project's config file, which is only used when\n" - , "# 'stack' is run outside of a real project. Settings here do _not_ act as\n" - , "# defaults for all projects. To change stack's default settings, edit\n" - , "# '", encodeUtf8 (T.pack $ toFilePath $ configUserConfigPath config), "' instead.\n" - , "#\n" - , "# For more information about stack's configuration, see\n" - , "# http://docs.haskellstack.org/en/stable/yaml_configuration/\n" - , "#\n" - , Yaml.encode p] - writeBinaryFileAtomic (parent dest relFileReadmeTxt) - "This is the implicit global project, which is used only when 'stack' is run\n\ - \outside of a real project.\n" - return (p, dest) - mcompiler <- view $ globalOptsL.to globalCompiler - let project = project' - { projectCompiler = mcompiler <|> projectCompiler project' - , projectResolver = fromMaybe (projectResolver project') mresolver - } - extraPackageDBs <- mapM resolveDir' (projectExtraPackageDBs project) - - wanted <- lockCachedWanted stackYamlFP (projectResolver project) $ - fillProjectWanted stackYamlFP config project - - -- Unfortunately redoes getProjectWorkDir, since we don't have a BuildConfig yet - workDir <- view workDirL - let projectStorageFile = parent stackYamlFP workDir relFileStorage - - initProjectStorage projectStorageFile $ \projectStorage -> do - let bc = BuildConfig - { bcConfig = config - , bcSMWanted = wanted - , bcExtraPackageDBs = extraPackageDBs - , bcStackYaml = stackYamlFP - , bcCurator = projectCurator project - , bcProjectStorage = projectStorage - } - runRIO bc inner - where - getEmptyProject :: Maybe RawSnapshotLocation -> [PackageIdentifierRevision] -> RIO Config Project - getEmptyProject mresolver extraDeps = do - r <- case mresolver of - Just resolver -> do - logInfo ("Using resolver: " <> display resolver <> " specified on command line") - return resolver - Nothing -> do - r'' <- getLatestResolver - logInfo ("Using latest snapshot resolver: " <> display r'') - return r'' - return Project - { projectUserMsg = Nothing - , projectPackages = [] - , projectDependencies = map (RPLImmutable . flip RPLIHackage Nothing) extraDeps - , projectFlags = mempty - , projectResolver = r - , projectCompiler = Nothing - , projectExtraPackageDBs = [] - , projectCurator = Nothing - , projectDropPackages = mempty + config <- ask + + -- If provided, turn the AbstractSnapshot from the command line into a + -- snapshot that can be used below. + + -- The snapshot and mcompiler are provided on the command line. In order + -- to properly deal with an AbstractSnapshot, we need a base directory (to + -- deal with custom snapshot relative paths). We consider the current working + -- directory to be the correct base. Let's calculate the mSnapshot first. + mSnapshot <- forM config.snapshot $ \aSnapshot -> do + logDebug $ + "Using snapshot: " + <> display aSnapshot + <> " specified on command line" + makeConcreteSnapshot aSnapshot + + (project', configFile) <- case config.project of + PCProject (project, fp) -> do + forM_ project.userMsg prettyUserMessage + pure (project, Right fp) + PCNoProject extraDeps -> do + p <- + case mSnapshot of + Nothing -> throwIO NoSnapshotWhenUsingNoProject + Just _ -> getEmptyProject mSnapshot extraDeps + pure (p, Left config.userGlobalConfigFile) + PCGlobalProject -> do + logDebug "Run from outside a project, using implicit global project config" + destDir <- getImplicitGlobalProjectDir + let dest :: Path Abs File + dest = destDir stackDotYaml + dest' :: FilePath + dest' = toFilePath dest + ensureDir destDir + exists <- doesFileExist dest + if exists + then do + iopc <- loadConfigYaml (parseProjectAndConfigMonoid destDir) dest + ProjectAndConfigMonoid project _ <- liftIO iopc + when (view terminalL config) $ + case config.snapshot of + Nothing -> + logDebug $ + "Using snapshot: " + <> display project.snapshot + <> " from implicit global project's config file: " + <> fromString dest' + Just _ -> pure () + pure (project, Right dest) + else do + prettyInfoL + [ flow "Writing the configuration file for the implicit \ + \global project to:" + , pretty dest <> "." + , flow "Note: You can change the snapshot via the" + , style Shell "snapshot" + , flow "key there." + ] + p <- getEmptyProject mSnapshot [] + liftIO $ do + writeBinaryFileAtomic dest $ byteString $ S.concat + [ "# This is the implicit global project's configuration file, which is only used\n" + , "# when 'stack' is run outside of a real project. Settings here do _not_ act as\n" + , "# defaults for all projects. To change Stack's default settings, edit\n" + , "# '", encodeUtf8 (T.pack $ toFilePath config.userGlobalConfigFile), "' instead.\n" + , "#\n" + , "# For more information about Stack's configuration, see\n" + , "# http://docs.haskellstack.org/en/stable/configure/yaml/\n" + , "#\n" + , Yaml.encode p] + writeBinaryFileAtomic (parent dest relFileReadmeTxt) $ + "This is the implicit global project, which is " <> + "used only when 'stack' is run\noutside of a " <> + "real project.\n" + pure (p, Right dest) + mcompiler <- view $ globalOptsL . to (.compiler) + let project :: Project + project = project' + { Project.compiler = mcompiler <|> project'.compiler + , Project.snapshot = fromMaybe project'.snapshot mSnapshot } + -- We are indifferent as to whether the configuration file is a + -- user-specific global or a project-level one. + eitherConfigFile = EE.fromEither configFile + extraPackageDBs <- mapM resolveDir' project.extraPackageDBs + + smWanted <- lockCachedWanted eitherConfigFile project.snapshot $ + fillProjectWanted eitherConfigFile config project + + -- Unfortunately redoes getWorkDir, since we don't have a BuildConfig yet + workDir <- view workDirL + let projectStorageFile = parent eitherConfigFile workDir relFileStorage + + initProjectStorage projectStorageFile $ \projectStorage -> do + let bc = BuildConfig + { config + , smWanted + , extraPackageDBs + , configFile + , curator = project.curator + , projectStorage + } + runRIO bc inner + where + getEmptyProject :: + Maybe RawSnapshotLocation + -> [RawPackageLocationImmutable] + -> RIO Config Project + getEmptyProject mSnapshot extraDeps = do + snapshot <- case mSnapshot of + Just snapshot -> do + prettyInfoL + [ flow "Using the snapshot" + , style Current (fromString $ T.unpack $ textDisplay snapshot) + , flow "specified on the command line." + ] + pure snapshot + Nothing -> do + r'' <- getLatestSnapshot + prettyInfoL + [ flow "Using the latest snapshot" + , style Current (fromString $ T.unpack $ textDisplay r'') <> "." + ] + pure r'' + pure Project + { userMsg = Nothing + , packages = [] + , extraDeps = map RPLImmutable extraDeps + , flagsByPkg = mempty + , snapshot + , compiler = Nothing + , extraPackageDBs = [] + , curator = Nothing + , dropPackages = mempty + } + prettyUserMessage :: String -> RIO Config () + prettyUserMessage userMsg = do + let userMsgs = map flow $ splitAtLineEnds userMsg + warningDoc = mconcat $ intersperse blankLine userMsgs + prettyWarn warningDoc + where + splitAtLineEnds = reverse . map reverse . go [] + where + go :: [String] -> String -> [String] + go ss [] = ss + go ss s = case go' [] s of + ([], rest) -> go ss rest + (s', rest) -> go (s' : ss) rest + go' :: String -> String -> (String, String) + go' s [] = (s, []) + go' s [c] = (c:s, []) + go' s "\n\n" = (s, []) + go' s [c1, c2] = (c2:c1:s, []) + go' s ('\n':'\n':rest) = (s, stripLineEnds rest) + go' s ('\n':'\r':'\n':rest) = (s, stripLineEnds rest) + go' s ('\r':'\n':'\n':rest) = (s, stripLineEnds rest) + go' s ('\r':'\n':'\r':'\n':rest) = (s, stripLineEnds rest) + go' s (c:rest) = go' (c:s) rest + stripLineEnds :: String -> String + stripLineEnds ('\n':rest) = stripLineEnds rest + stripLineEnds ('\r':'\n':rest) = stripLineEnds rest + stripLineEnds rest = rest fillProjectWanted :: - (HasProcessContext env, HasLogFunc env, HasPantryConfig env) - => Path Abs t - -> Config - -> Project - -> Map RawPackageLocationImmutable PackageLocationImmutable - -> WantedCompiler - -> Map PackageName (Bool -> RIO env DepPackage) - -> RIO env (SMWanted, [CompletedPLI]) -fillProjectWanted stackYamlFP config project locCache snapCompiler snapPackages = do - let bopts = configBuild config - - packages0 <- for (projectPackages project) $ \fp@(RelFilePath t) -> do - abs' <- resolveDir (parent stackYamlFP) (T.unpack t) - let resolved = ResolvedPath fp abs' - pp <- mkProjectPackage YesPrintWarnings resolved (boptsHaddock bopts) - pure (cpName $ ppCommon pp, pp) - - (deps0, mcompleted) <- fmap unzip . forM (projectDependencies project) $ \rpl -> do - (pl, mCompleted) <- case rpl of - RPLImmutable rpli -> do - (compl, mcompl) <- - case Map.lookup rpli locCache of - Just compl -> pure (compl, Just compl) - Nothing -> do - cpl <- completePackageLocation rpli - if cplHasCabalFile cpl - then pure (cplComplete cpl, Just $ cplComplete cpl) - else do - warnMissingCabalFile rpli - pure (cplComplete cpl, Nothing) - pure (PLImmutable compl, CompletedPLI rpli <$> mcompl) - RPLMutable p -> - pure (PLMutable p, Nothing) - dp <- additionalDepPackage (shouldHaddockDeps bopts) pl - pure ((cpName $ dpCommon dp, dp), mCompleted) - - checkDuplicateNames $ - map (second (PLMutable . ppResolvedDir)) packages0 ++ - map (second dpLocation) deps0 - - let packages1 = Map.fromList packages0 - snPackages = snapPackages - `Map.difference` packages1 - `Map.difference` Map.fromList deps0 - `Map.withoutKeys` projectDropPackages project - - snDeps <- for snPackages $ \getDep -> getDep (shouldHaddockDeps bopts) - - let deps1 = Map.fromList deps0 `Map.union` snDeps - - let mergeApply m1 m2 f = - MS.merge MS.preserveMissing MS.dropMissing (MS.zipWithMatched f) m1 m2 - pFlags = projectFlags project - packages2 = mergeApply packages1 pFlags $ - \_ p flags -> p{ppCommon=(ppCommon p){cpFlags=flags}} - deps2 = mergeApply deps1 pFlags $ - \_ d flags -> d{dpCommon=(dpCommon d){cpFlags=flags}} - - checkFlagsUsedThrowing pFlags FSStackYaml packages1 deps1 - - let pkgGhcOptions = configGhcOptionsByName config - deps = mergeApply deps2 pkgGhcOptions $ - \_ d options -> d{dpCommon=(dpCommon d){cpGhcOptions=options}} - packages = mergeApply packages2 pkgGhcOptions $ - \_ p options -> p{ppCommon=(ppCommon p){cpGhcOptions=options}} - unusedPkgGhcOptions = pkgGhcOptions `Map.restrictKeys` Map.keysSet packages2 + (HasLogFunc env, HasPantryConfig env, HasProcessContext env) + => Path Abs File + -- ^ Location of the configuration file, which may be either a + -- user-specific global or a project-level one. + -> Config + -> Project + -> Map RawPackageLocationImmutable PackageLocationImmutable + -> WantedCompiler + -> Map PackageName (Bool -> RIO env DepPackage) + -> RIO env (SMWanted, [CompletedPLI]) +fillProjectWanted configFile config project locCache snapCompiler snapPackages = do + let bopts = config.build + + packages0 <- for project.packages $ \fp@(RelFilePath t) -> do + abs' <- resolveDir (parent configFile) (T.unpack t) + let resolved = ResolvedPath fp abs' + pp <- mkProjectPackage YesPrintWarnings resolved bopts.buildHaddocks + pure (pp.projectCommon.name, pp) + + -- prefetch git repos to avoid cloning per subdirectory + -- see https://github.com/commercialhaskell/stack/issues/5411 + let gitRepos = mapMaybe + ( \case + (RPLImmutable (RPLIRepo repo rpm)) -> Just (repo, rpm) + _ -> Nothing + ) + project.extraDeps + logDebug ("Prefetching git repos: " <> display (T.pack (show gitRepos))) + fetchReposRaw gitRepos + + (deps0, mcompleted) <- fmap unzip . forM project.extraDeps $ \rpl -> do + (pl, mCompleted) <- case rpl of + RPLImmutable rpli -> do + (compl, mcompl) <- + case Map.lookup rpli locCache of + Just compl -> pure (compl, Just compl) + Nothing -> do + cpl <- completePackageLocation rpli + if cplHasCabalFile cpl + then pure (cplComplete cpl, Just $ cplComplete cpl) + else do + warnMissingCabalFile rpli + pure (cplComplete cpl, Nothing) + pure (PLImmutable compl, CompletedPLI rpli <$> mcompl) + RPLMutable p -> + pure (PLMutable p, Nothing) + dp <- additionalDepPackage (shouldHaddockDeps bopts) pl + pure ((dp.depCommon.name, dp), mCompleted) + + checkDuplicateNames $ + map (second (PLMutable . (.resolvedDir))) packages0 ++ + map (second (.location)) deps0 + + let packages1 = Map.fromList packages0 + snPackages = snapPackages + `Map.difference` packages1 + `Map.difference` Map.fromList deps0 + `Map.withoutKeys` project.dropPackages + + snDeps <- for snPackages $ \getDep -> getDep (shouldHaddockDeps bopts) + + let deps1 = Map.fromList deps0 `Map.union` snDeps + + let mergeApply m1 m2 f = + MS.merge MS.preserveMissing MS.dropMissing (MS.zipWithMatched f) m1 m2 + pFlags = project.flagsByPkg + packages2 = mergeApply packages1 pFlags $ \_ p flags -> + p { projectCommon = p.projectCommon { flags = flags } } + deps2 = mergeApply deps1 pFlags $ \_ d flags -> + d { depCommon = d.depCommon { flags = flags } } + + checkFlagsUsedThrowing pFlags packages1 deps1 + + let pkgGhcOptions = config.ghcOptionsByName + deps = mergeApply deps2 pkgGhcOptions $ \_ d options -> + d { depCommon = d.depCommon { ghcOptions = options } } + packages = mergeApply packages2 pkgGhcOptions $ \_ p options -> + p { projectCommon = p.projectCommon { ghcOptions = options } } + unusedPkgGhcOptions = + pkgGhcOptions `Map.restrictKeys` Map.keysSet packages2 `Map.restrictKeys` Map.keysSet deps2 - unless (Map.null unusedPkgGhcOptions) $ - throwM $ InvalidGhcOptionsSpecification (Map.keys unusedPkgGhcOptions) + unless (Map.null unusedPkgGhcOptions) $ + throwM $ InvalidGhcOptionsSpecification (Map.keys unusedPkgGhcOptions) - let wanted = SMWanted - { smwCompiler = fromMaybe snapCompiler (projectCompiler project) - , smwProject = packages - , smwDeps = deps - , smwSnapshotLocation = projectResolver project - } - - pure (wanted, catMaybes mcompleted) + let wanted = SMWanted + { compiler = fromMaybe snapCompiler project.compiler + , project = packages + , deps = deps + , snapshotLocation = project.snapshot + } + pure (wanted, catMaybes mcompleted) + +-- | Check if a package is a project package or a dependency and, if it is, +-- if all the specified flags are defined in the package's Cabal file. +checkFlagsUsedThrowing :: + forall m. (MonadIO m, MonadThrow m) + => Map PackageName (Map FlagName Bool) + -> Map PackageName ProjectPackage + -> Map PackageName DepPackage + -> m () +checkFlagsUsedThrowing packageFlags projectPackages deps = do + unusedFlags <- forMaybeM (Map.toList packageFlags) getUnusedPackageFlags + unless (null unusedFlags) $ + prettyThrowM $ InvalidFlagSpecification unusedFlags + where + getUnusedPackageFlags :: + (PackageName, Map FlagName Bool) + -> m (Maybe UnusedFlags) + getUnusedPackageFlags (name, userFlags) = case maybeCommon of + -- Package is not available as project or dependency + Nothing -> pure $ Just $ UFNoPackage FSStackYaml name + -- Package exists, let's check if the flags are defined + Just common -> do + gpd <- liftIO common.gpd + let pname = pkgName $ PD.package $ PD.packageDescription gpd + pkgFlags = Set.fromList $ map PD.flagName $ PD.genPackageFlags gpd + unused = Map.keysSet $ Map.withoutKeys userFlags pkgFlags + pure $ if Set.null unused + -- All flags are defined, nothing to do + then Nothing + -- Error about the undefined flags + else Just $ UFFlagsNotDefined FSStackYaml pname pkgFlags unused + where + maybeCommon = fmap (.projectCommon) (Map.lookup name projectPackages) + <|> fmap (.depCommon) (Map.lookup name deps) -- | Check if there are any duplicate package names and, if so, throw an -- exception. checkDuplicateNames :: MonadThrow m => [(PackageName, PackageLocation)] -> m () checkDuplicateNames locals = - case filter hasMultiples $ Map.toList $ Map.fromListWith (++) $ map (second return) locals of - [] -> return () - x -> throwM $ DuplicateLocalPackageNames x - where - hasMultiples (_, _:_:_) = True - hasMultiples _ = False - + case filter hasMultiples $ Map.toList $ Map.fromListWith (++) $ map (second pure) locals of + [] -> pure () + x -> prettyThrowM $ DuplicateLocalPackageNames x + where + hasMultiples (_, _:_:_) = True + hasMultiples _ = False --- | Get the stack root, e.g. @~/.stack@, and determine whether the user owns it. +-- | Get the Stack root, e.g. @~/.stack@, and determine whether the user owns it. -- -- On Windows, the second value is always 'True'. -determineStackRootAndOwnership - :: (MonadIO m) - => ConfigMonoid - -- ^ Parsed command-line arguments - -> m (Path Abs Dir, Bool) +determineStackRootAndOwnership :: + MonadIO m + => ConfigMonoid + -- ^ Parsed command-line arguments + -> m (Path Abs Dir, Path Abs Dir, Bool) determineStackRootAndOwnership clArgs = liftIO $ do - stackRoot <- do - case getFirst (configMonoidStackRoot clArgs) of - Just x -> return x - Nothing -> do - mstackRoot <- lookupEnv stackRootEnvVar - case mstackRoot of - Nothing -> getAppUserDataDir stackProgName - Just x -> case parseAbsDir x of - Nothing -> throwString ("Failed to parse STACK_ROOT environment variable (expected absolute directory): " ++ show x) - Just parsed -> return parsed - - (existingStackRootOrParentDir, userOwnsIt) <- do - mdirAndOwnership <- findInParents getDirAndOwnership stackRoot - case mdirAndOwnership of - Just x -> return x - Nothing -> throwIO (BadStackRoot stackRoot) - - when (existingStackRootOrParentDir /= stackRoot) $ - if userOwnsIt - then ensureDir stackRoot - else throwIO $ - Won'tCreateStackRootInDirectoryOwnedByDifferentUser - stackRoot - existingStackRootOrParentDir - - stackRoot' <- canonicalizePath stackRoot - return (stackRoot', userOwnsIt) - --- | @'checkOwnership' dir@ throws 'UserDoesn'tOwnDirectory' if @dir@ --- isn't owned by the current user. + (configRoot, stackRoot) <- + case getFirst clArgs.stackRoot of + Just x -> pure (x, x) + Nothing -> + lookupEnv stackRootEnvVar >>= \case + Nothing -> do + wantXdg <- fromMaybe "" <$> lookupEnv stackXdgEnvVar + if not (null wantXdg) + then do + xdgRelDir <- parseRelDir stackProgName + (,) + <$> getXdgDir XdgConfig (Just xdgRelDir) + <*> getXdgDir XdgData (Just xdgRelDir) + else do + oldStyleRoot <- getAppUserDataDir stackProgName + pure (oldStyleRoot, oldStyleRoot) + Just x -> case parseAbsDir x of + Nothing -> + throwIO $ ParseAbsolutePathException stackRootEnvVar x + Just parsed -> pure (parsed, parsed) + + (existingStackRootOrParentDir, userOwnsIt) <- + findInParents getDirAndOwnership stackRoot >>= \case + Just x -> pure x + Nothing -> throwIO (BadStackRoot stackRoot) + + when (existingStackRootOrParentDir /= stackRoot) $ + if userOwnsIt + then ensureDir stackRoot + else throwIO $ + Won'tCreateStackRootInDirectoryOwnedByDifferentUser + stackRoot + existingStackRootOrParentDir + + configRoot' <- canonicalizePath configRoot + stackRoot' <- canonicalizePath stackRoot + pure (configRoot', stackRoot', userOwnsIt) + +-- | @'checkOwnership' dir@ throws 'UserDoesn'tOwnDirectory' if @dir@ isn't +-- owned by the current user. -- -- If @dir@ doesn't exist, its parent directory is checked instead. --- If the parent directory doesn't exist either, @'NoSuchDirectory' ('parent' dir)@ --- is thrown. -checkOwnership :: (MonadIO m) => Path Abs Dir -> m () -checkOwnership dir = do - mdirAndOwnership <- firstJustM getDirAndOwnership [dir, parent dir] - case mdirAndOwnership of - Just (_, True) -> return () - Just (dir', False) -> throwIO (UserDoesn'tOwnDirectory dir') - Nothing -> - (throwIO . NoSuchDirectory) $ (toFilePathNoTrailingSep . parent) dir +-- If the parent directory doesn't exist either, +-- @'NoSuchDirectory' ('parent' dir)@ is thrown. +checkOwnership :: MonadIO m => Path Abs Dir -> m () +checkOwnership dir = + firstJustM getDirAndOwnership [dir, parent dir] >>= \case + Just (_, True) -> pure () + Just (dir', False) -> throwIO (UserDoesn'tOwnDirectory dir') + Nothing -> + throwIO . NoSuchDirectory $ (toFilePathNoTrailingSep . parent) dir -- | @'getDirAndOwnership' dir@ returns @'Just' (dir, 'True')@ when @dir@ -- exists and the current user owns it in the sense of 'isOwnedByUser'. -getDirAndOwnership - :: (MonadIO m) - => Path Abs Dir - -> m (Maybe (Path Abs Dir, Bool)) +getDirAndOwnership :: + MonadIO m + => Path Abs Dir + -> m (Maybe (Path Abs Dir, Bool)) getDirAndOwnership dir = liftIO $ forgivingAbsence $ do ownership <- isOwnedByUser dir - return (dir, ownership) + pure (dir, ownership) --- | Check whether the current user (determined with 'getEffectiveUserId') is --- the owner for the given path. +-- | Check whether the current user (determined with +-- 'System.Posix.User.getEffectiveUserId') is the owner for the given path. -- --- Will always return 'True' on Windows. +-- Will always pure 'True' on Windows. isOwnedByUser :: MonadIO m => Path Abs t -> m Bool -isOwnedByUser path = liftIO $ do - if osIsWindows - then return True - else do - fileStatus <- getFileStatus (toFilePath path) - user <- getEffectiveUserID - return (user == fileOwner fileStatus) +isOwnedByUser path = liftIO $ + if osIsWindows + then pure True + else do + fileStatus <- getFileStatus (toFilePath path) + user <- getEffectiveUserID + pure (user == fileOwner fileStatus) -- | 'True' if we are currently running inside a Docker container. -getInContainer :: (MonadIO m) => m Bool +getInContainer :: MonadIO m => m Bool getInContainer = liftIO (isJust <$> lookupEnv inContainerEnvVar) -- | 'True' if we are currently running inside a Nix. -getInNixShell :: (MonadIO m) => m Bool +getInNixShell :: MonadIO m => m Bool getInNixShell = liftIO (isJust <$> lookupEnv inNixShellEnvVar) -- | Determine the extra config file locations which exist. -- -- Returns most local first -getExtraConfigs :: HasLogFunc env - => Path Abs File -- ^ use config path - -> RIO env [Path Abs File] -getExtraConfigs userConfigPath = do - defaultStackGlobalConfigPath <- getDefaultGlobalConfigPath - liftIO $ do - env <- getEnvironment - mstackConfig <- - maybe (return Nothing) (fmap Just . parseAbsFile) - $ lookup "STACK_CONFIG" env - mstackGlobalConfig <- - maybe (return Nothing) (fmap Just . parseAbsFile) - $ lookup "STACK_GLOBAL_CONFIG" env - filterM doesFileExist - $ fromMaybe userConfigPath mstackConfig - : maybe [] return (mstackGlobalConfig <|> defaultStackGlobalConfigPath) +getExtraConfigs :: + HasTerm env + => Path Abs File -- ^ use config path + -> RIO env [Path Abs File] +getExtraConfigs userConfigPath = liftIO $ do + env <- getEnvironment + mstackConfig <- + maybe (pure Nothing) (fmap Just . parseAbsFile) + $ lookup "STACK_CONFIG" env + mstackGlobalConfig <- + maybe (pure Nothing) (fmap Just . parseAbsFile) + $ lookup "STACK_GLOBAL_CONFIG" env + filterM doesFileExist + $ fromMaybe userConfigPath mstackConfig + : maybe [] pure (mstackGlobalConfig <|> defaultGlobalConfigPath) -- | Load and parse YAML from the given config file. Throws -- 'ParseConfigFileException' when there's a decoding error. -loadConfigYaml - :: HasLogFunc env - => (Value -> Yaml.Parser (WithJSONWarnings a)) -> Path Abs File -> RIO env a -loadConfigYaml parser path = do - eres <- loadYaml parser path - case eres of - Left err -> liftIO $ throwM (ParseConfigFileException path err) - Right res -> return res +loadConfigYaml :: + HasLogFunc env + => (Value -> Yaml.Parser (WithJSONWarnings a)) + -> Path Abs File -> RIO env a +loadConfigYaml parser path = loadYaml parser path >>= \case + Left err -> prettyThrowM (ParseConfigFileException path err) + Right res -> pure res -- | Load and parse YAML from the given file. -loadYaml - :: HasLogFunc env - => (Value -> Yaml.Parser (WithJSONWarnings a)) -> Path Abs File -> RIO env (Either Yaml.ParseException a) -loadYaml parser path = do - eres <- liftIO $ Yaml.decodeFileEither (toFilePath path) - case eres of - Left err -> return (Left err) - Right val -> - case Yaml.parseEither parser val of - Left err -> return (Left (Yaml.AesonException err)) - Right (WithJSONWarnings res warnings) -> do - logJSONWarnings (toFilePath path) warnings - return (Right res) +loadYaml :: + HasLogFunc env + => (Value -> Yaml.Parser (WithJSONWarnings a)) + -> Path Abs File + -> RIO env (Either Yaml.ParseException a) +loadYaml parser path = + liftIO (YamlInclude.decodeFileEither (toFilePath path)) >>= \case + Left err -> pure (Left err) + Right val -> + case Yaml.parseEither parser val of + Left err -> pure (Left (Yaml.AesonException err)) + Right (WithJSONWarnings res warnings) -> do + logJSONWarnings (toFilePath path) warnings + pure (Right res) -- | Get the location of the project config file, if it exists. -getProjectConfig :: HasLogFunc env - => StackYamlLoc - -- ^ Override stack.yaml - -> RIO env (ProjectConfig (Path Abs File)) -getProjectConfig (SYLOverride stackYaml) = return $ PCProject stackYaml -getProjectConfig SYLGlobalProject = return PCGlobalProject +getProjectConfig :: + HasTerm env + => StackYamlLoc + -- ^ Override stack.yaml + -> RIO env (ProjectConfig (Path Abs File)) +getProjectConfig (SYLOverride stackYaml) = pure $ PCProject stackYaml +getProjectConfig SYLGlobalProject = pure PCGlobalProject getProjectConfig SYLDefault = do - env <- liftIO getEnvironment - case lookup "STACK_YAML" env of - Just fp -> do - logInfo "Getting project config file from STACK_YAML environment" - liftM PCProject $ resolveFile' fp - Nothing -> do - currDir <- getCurrentDir - maybe PCGlobalProject PCProject <$> findInParents getStackDotYaml currDir - where - getStackDotYaml dir = do - let fp = dir stackDotYaml - fp' = toFilePath fp - logDebug $ "Checking for project config at: " <> fromString fp' - exists <- doesFileExist fp - if exists - then return $ Just fp - else return Nothing -getProjectConfig (SYLNoProject extraDeps) = return $ PCNoProject extraDeps + env <- liftIO getEnvironment + case lookup "STACK_YAML" env of + Just fp -> do + prettyInfoS + "Getting the project-level configuration file from the \ + \STACK_YAML environment variable." + PCProject <$> resolveFile' fp + Nothing -> do + currDir <- getCurrentDir + maybe PCGlobalProject PCProject <$> findInParents getStackDotYaml currDir + where + getStackDotYaml dir = do + let fp = dir stackDotYaml + fp' = toFilePath fp + logDebug $ "Checking for project config at: " <> fromString fp' + exists <- doesFileExist fp + if exists + then pure $ Just fp + else pure Nothing +getProjectConfig (SYLNoProject extraDeps) = pure $ PCNoProject extraDeps -- | Find the project config file location, respecting environment variables -- and otherwise traversing parents. If no config is found, we supply a default -- based on current directory. -loadProjectConfig :: HasLogFunc env - => StackYamlLoc - -- ^ Override stack.yaml - -> RIO env (ProjectConfig (Project, Path Abs File, ConfigMonoid)) -loadProjectConfig mstackYaml = do - mfp <- getProjectConfig mstackYaml - case mfp of - PCProject fp -> do - currDir <- getCurrentDir - logDebug $ "Loading project config file " <> - fromString (maybe (toFilePath fp) toFilePath (stripProperPrefix currDir fp)) - PCProject <$> load fp - PCGlobalProject -> do - logDebug "No project config file found, using defaults." - return PCGlobalProject - PCNoProject extraDeps -> do - logDebug "Ignoring config files" - return $ PCNoProject extraDeps - where - load fp = do - iopc <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp - ProjectAndConfigMonoid project config <- liftIO iopc - return (project, fp, config) - --- | Get the location of the default stack configuration file. --- If a file already exists at the deprecated location, its location is returned. --- Otherwise, the new location is returned. -getDefaultGlobalConfigPath - :: HasLogFunc env - => RIO env (Maybe (Path Abs File)) -getDefaultGlobalConfigPath = - case (defaultGlobalConfigPath, defaultGlobalConfigPathDeprecated) of - (Just new,Just old) -> - liftM (Just . fst ) $ - tryDeprecatedPath - (Just "non-project global configuration file") - doesFileExist - new - old - (Just new,Nothing) -> return (Just new) - _ -> return Nothing - --- | Get the location of the default user configuration file. --- If a file already exists at the deprecated location, its location is returned. --- Otherwise, the new location is returned. -getDefaultUserConfigPath - :: HasLogFunc env - => Path Abs Dir -> RIO env (Path Abs File) -getDefaultUserConfigPath stackRoot = do - (path, exists) <- tryDeprecatedPath - (Just "non-project configuration file") - doesFileExist - (defaultUserConfigPath stackRoot) - (defaultUserConfigPathDeprecated stackRoot) - unless exists $ do - ensureDir (parent path) - liftIO $ writeBinaryFileAtomic path defaultConfigYaml - return path - -packagesParser :: Parser [String] -packagesParser = many (strOption - (long "package" <> - metavar "PACKAGE(S)" <> - help "Additional package(s) that must be installed")) - -defaultConfigYaml :: IsString s => s +loadProjectConfig :: + HasTerm env + => StackYamlLoc + -- ^ Override stack.yaml + -> RIO env (ProjectConfig (Project, Path Abs File, ConfigMonoid)) +loadProjectConfig mstackYaml = getProjectConfig mstackYaml >>= \case + PCProject fp -> do + currDir <- getCurrentDir + logDebug $ + "Loading project config file " + <> fromString + (maybe (toFilePath fp) toFilePath (stripProperPrefix currDir fp)) + PCProject <$> load fp + PCGlobalProject -> do + logDebug "No project config file found, using defaults." + pure PCGlobalProject + PCNoProject extraDeps -> do + logDebug "Ignoring config files" + pure $ PCNoProject extraDeps + where + load fp = do + iopc <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp + ProjectAndConfigMonoid project config <- liftIO iopc + pure (project, fp, config) + +-- | Get the location of the default user global configuration file. +getDefaultUserConfigPath :: + HasTerm env + => Path Abs Dir + -> RIO env (Path Abs File) +getDefaultUserConfigPath configRoot = do + let userConfigPath = defaultUserConfigPath configRoot + userConfigExists <- doesFileExist userConfigPath + unless userConfigExists $ do + ensureDir (parent userConfigPath) + liftIO $ writeBinaryFileAtomic userConfigPath defaultConfigYaml + pure userConfigPath + +-- | The contents of the default Stack global configuration file. +defaultConfigYaml :: (IsString s, Semigroup s) => s defaultConfigYaml = - "# This file contains default non-project-specific settings for 'stack', used\n\ - \# in all projects. For more information about stack's configuration, see\n\ - \# http://docs.haskellstack.org/en/stable/yaml_configuration/\n\ + "# This file contains default non-project-specific settings for Stack, used\n\ + \# in all projects. For more information about Stack's configuration, see\n\ + \# http://docs.haskellstack.org/en/stable/configure/yaml/\n\ \\n\ - \# The following parameters are used by \"stack new\" to automatically fill fields\n\ - \# in the cabal config. We recommend uncommenting them and filling them out if\n\ + \# The following parameters are used by 'stack new' to automatically fill fields\n\ + \# in the Cabal file. We recommend uncommenting them and filling them out if\n\ \# you intend to use 'stack new'.\n\ - \# See https://docs.haskellstack.org/en/stable/yaml_configuration/#templates\n\ + \# See https://docs.haskellstack.org/en/stable/configure/yaml/non-project/#templates\n\ \templates:\n\ \ params:\n\ \# author-name:\n\ @@ -969,9 +1315,9 @@ defaultConfigYaml = \# copyright:\n\ \# github-username:\n\ \\n\ - \# The following parameter specifies stack's output styles; STYLES is a\n\ + \# The following parameter specifies Stack's output styles; STYLES is a\n\ \# colon-delimited sequence of key=value, where 'key' is a style name and\n\ \# 'value' is a semicolon-delimited list of 'ANSI' SGR (Select Graphic\n\ - \# Rendition) control codes (in decimal). Use \"stack ls stack-colors --basic\"\n\ + \# Rendition) control codes (in decimal). Use 'stack ls stack-colors --basic'\n\ \# to see the current sequence.\n\ \# stack-colors: STYLES\n" diff --git a/src/Stack/Config/Build.hs b/src/Stack/Config/Build.hs index 826c913711..3731527744 100644 --- a/src/Stack/Config/Build.hs +++ b/src/Stack/Config/Build.hs @@ -1,94 +1,155 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} --- | Build configuration -module Stack.Config.Build where +{-| +Module : Stack.Config.Build +Description : Build configuration. +License : BSD-3-Clause +Build configuration. +-} + +module Stack.Config.Build + ( buildOptsFromMonoid + , haddockOptsFromMonoid + , testOptsFromMonoid + , benchmarkOptsFromMonoid + ) where + +import Distribution.Verbosity ( normal ) +import Stack.BuildOpts + ( defaultBenchmarkOpts, defaultHaddockOpts, defaultTestOpts ) import Stack.Prelude -import Stack.Types.Config +import Stack.Types.BuildOpts + ( BenchmarkOpts (..), BuildOpts (..), HaddockOpts (..) + , TestOpts (..) + ) +import qualified Stack.Types.BuildOpts as BenchmarkOpts ( BenchmarkOpts (..) ) +import qualified Stack.Types.BuildOpts as HaddockOpts ( HaddockOpts (..) ) +import qualified Stack.Types.BuildOpts as TestOpts ( TestOpts (..) ) +import Stack.Types.BuildOptsMonoid + ( BenchmarkOptsMonoid (..), BuildOptsMonoid (..) + , CabalVerbosity (..), HaddockOptsMonoid (..) + , ProgressBarFormat (..), TestOptsMonoid (..) + ) -- | Interprets BuildOptsMonoid options. buildOptsFromMonoid :: BuildOptsMonoid -> BuildOpts -buildOptsFromMonoid BuildOptsMonoid{..} = BuildOpts - { boptsLibProfile = fromFirstFalse - (buildMonoidLibProfile <> - FirstFalse (if tracing || profiling then Just True else Nothing)) - , boptsExeProfile = fromFirstFalse - (buildMonoidExeProfile <> - FirstFalse (if tracing || profiling then Just True else Nothing)) - , boptsLibStrip = fromFirstTrue - (buildMonoidLibStrip <> - FirstTrue (if noStripping then Just False else Nothing)) - , boptsExeStrip = fromFirstTrue - (buildMonoidExeStrip <> - FirstTrue (if noStripping then Just False else Nothing)) - , boptsHaddock = fromFirstFalse buildMonoidHaddock - , boptsHaddockOpts = haddockOptsFromMonoid buildMonoidHaddockOpts - , boptsOpenHaddocks = fromFirstFalse buildMonoidOpenHaddocks - , boptsHaddockDeps = getFirst buildMonoidHaddockDeps - , boptsHaddockInternal = fromFirstFalse buildMonoidHaddockInternal - , boptsHaddockHyperlinkSource = fromFirstTrue buildMonoidHaddockHyperlinkSource - , boptsInstallExes = fromFirstFalse buildMonoidInstallExes - , boptsInstallCompilerTool = fromFirstFalse buildMonoidInstallCompilerTool - , boptsPreFetch = fromFirstFalse buildMonoidPreFetch - , boptsKeepGoing = getFirst buildMonoidKeepGoing - , boptsKeepTmpFiles = fromFirstFalse buildMonoidKeepTmpFiles - , boptsForceDirty = fromFirstFalse buildMonoidForceDirty - , boptsTests = fromFirstFalse buildMonoidTests - , boptsTestOpts = - testOptsFromMonoid buildMonoidTestOpts additionalArgs - , boptsBenchmarks = fromFirstFalse buildMonoidBenchmarks - , boptsBenchmarkOpts = - benchmarkOptsFromMonoid buildMonoidBenchmarkOpts additionalArgs - , boptsReconfigure = fromFirstFalse buildMonoidReconfigure - , boptsCabalVerbose = fromFirstFalse buildMonoidCabalVerbose - , boptsSplitObjs = fromFirstFalse buildMonoidSplitObjs - , boptsSkipComponents = buildMonoidSkipComponents - , boptsInterleavedOutput = fromFirstTrue buildMonoidInterleavedOutput - , boptsDdumpDir = getFirst buildMonoidDdumpDir - } - where - -- These options are not directly used in bopts, instead they - -- transform other options. - tracing = getAny buildMonoidTrace - profiling = getAny buildMonoidProfile - noStripping = getAny buildMonoidNoStrip - -- Additional args for tracing / profiling - additionalArgs = - if tracing || profiling - then Just $ "+RTS" : catMaybes [trac, prof, Just "-RTS"] - else Nothing - trac = - if tracing - then Just "-xc" - else Nothing - prof = - if profiling - then Just "-p" - else Nothing +buildOptsFromMonoid buildMonoid = BuildOpts + { libProfile = fromFirstFalse + ( buildMonoid.libProfile + <> FirstFalse (if tracing || profiling then Just True else Nothing) + ) + , exeProfile = fromFirstFalse + ( buildMonoid.exeProfile + <> FirstFalse (if tracing || profiling then Just True else Nothing) + ) + , libStrip = fromFirstTrue + ( buildMonoid.libStrip + <> FirstTrue (if noStripping then Just False else Nothing) + ) + , exeStrip = fromFirstTrue + ( buildMonoid.exeStrip + <> FirstTrue (if noStripping then Just False else Nothing) + ) + , buildHaddocks = fromFirstFalse buildMonoid.buildHaddocks + , haddockOpts = haddockOptsFromMonoid buildMonoid.haddockOpts + , openHaddocks = + not isHaddockFromHackage + && fromFirstFalse buildMonoid.openHaddocks + , haddockDeps = if isHaddockFromHackage + then Nothing + else getFirst buildMonoid.haddockDeps + , haddockExecutables = + not isHaddockFromHackage + && fromFirstFalse buildMonoid.haddockExecutables + , haddockTests = + not isHaddockFromHackage + && fromFirstFalse buildMonoid.haddockTests + , haddockBenchmarks = + not isHaddockFromHackage + && fromFirstFalse buildMonoid.haddockBenchmarks + , haddockInternal = + not isHaddockFromHackage + && fromFirstFalse buildMonoid.haddockInternal + , haddockHyperlinkSource = + isHaddockFromHackage + || fromFirstTrue buildMonoid.haddockHyperlinkSource + , haddockForHackage = isHaddockFromHackage + , installExes = fromFirstFalse buildMonoid.installExes + , installCompilerTool = fromFirstFalse buildMonoid.installCompilerTool + , preFetch = fromFirstFalse buildMonoid.preFetch + , keepGoing = getFirst buildMonoid.keepGoing + , keepTmpFiles = fromFirstFalse buildMonoid.keepTmpFiles + , forceDirty = isHaddockFromHackage || fromFirstFalse buildMonoid.forceDirty + , tests = fromFirstFalse buildMonoid.tests + , testOpts = testOptsFromMonoid buildMonoid.testOpts additionalArgs + , benchmarks = fromFirstFalse buildMonoid.benchmarks + , benchmarkOpts = + benchmarkOptsFromMonoid buildMonoid.benchmarkOpts additionalArgs + , reconfigure = fromFirstFalse buildMonoid.reconfigure + , cabalVerbose = fromFirst (CabalVerbosity normal) buildMonoid.cabalVerbose + , splitObjs = fromFirstFalse buildMonoid.splitObjs + , skipComponents = buildMonoid.skipComponents + , interleavedOutput = fromFirstTrue buildMonoid.interleavedOutput + , progressBar = fromFirst CappedBar buildMonoid.progressBar + , ddumpDir = getFirst buildMonoid.ddumpDir + , semaphore = fromFirstFalse buildMonoid.semaphore + } + where + isHaddockFromHackage = fromFirstFalse buildMonoid.haddockForHackage + -- These options are not directly used in bopts, instead they + -- transform other options. + tracing = getAny buildMonoid.trace + profiling = getAny buildMonoid.profile + noStripping = getAny buildMonoid.noStrip + -- Additional args for tracing / profiling + additionalArgs = + if tracing || profiling + then Just $ "+RTS" : catMaybes [trac, prof, Just "-RTS"] + else Nothing + trac = + if tracing + then Just "-xc" + else Nothing + prof = + if profiling + then Just "-p" + else Nothing +-- | Interprets HaddockOptsMonoid options. haddockOptsFromMonoid :: HaddockOptsMonoid -> HaddockOpts -haddockOptsFromMonoid HaddockOptsMonoid{..} = - defaultHaddockOpts - {hoAdditionalArgs = hoMonoidAdditionalArgs} +haddockOptsFromMonoid hoMonoid = defaultHaddockOpts + { HaddockOpts.additionalArgs = hoMonoid.additionalArgs } +-- | Interprets TestOptsMonoid options. testOptsFromMonoid :: TestOptsMonoid -> Maybe [String] -> TestOpts -testOptsFromMonoid TestOptsMonoid{..} madditional = - defaultTestOpts - { toRerunTests = fromFirstTrue toMonoidRerunTests - , toAdditionalArgs = fromMaybe [] madditional <> toMonoidAdditionalArgs - , toCoverage = fromFirstFalse toMonoidCoverage - , toDisableRun = fromFirstFalse toMonoidDisableRun - , toMaximumTimeSeconds = fromFirst (toMaximumTimeSeconds defaultTestOpts) toMonoidMaximumTimeSeconds - } +testOptsFromMonoid toMonoid madditional = defaultTestOpts + { TestOpts.rerunTests = fromFirstTrue toMonoid.rerunTests + , TestOpts.additionalArgs = + fromMaybe [] madditional <> toMonoid.additionalArgs + , TestOpts.coverage = fromFirstFalse toMonoid.coverage + , TestOpts.runTests = fromFirstTrue toMonoid.runTests + , TestOpts.maximumTimeSeconds = + fromFirst + defaultTestOpts.maximumTimeSeconds + toMonoid.maximumTimeSeconds + , TestOpts.timeoutGraceSeconds = + fromFirst + defaultTestOpts.timeoutGraceSeconds + toMonoid.timeoutGraceSeconds + , TestOpts.allowStdin = fromFirstTrue toMonoid.allowStdin + } -benchmarkOptsFromMonoid :: BenchmarkOptsMonoid -> Maybe [String] -> BenchmarkOpts -benchmarkOptsFromMonoid BenchmarkOptsMonoid{..} madditional = - defaultBenchmarkOpts - { beoAdditionalArgs = - fmap (\args -> unwords args <> " ") madditional <> - getFirst beoMonoidAdditionalArgs - , beoDisableRun = fromFirst - (beoDisableRun defaultBenchmarkOpts) - beoMonoidDisableRun - } +-- | Interprets BenchmarkOptsMonoid options. +benchmarkOptsFromMonoid :: + BenchmarkOptsMonoid + -> Maybe [String] + -> BenchmarkOpts +benchmarkOptsFromMonoid beoMonoid madditional = defaultBenchmarkOpts + { BenchmarkOpts.additionalArgs = + fmap (\args -> unwords args <> " ") madditional <> + getFirst beoMonoid.additionalArgs + , BenchmarkOpts.runBenchmarks = fromFirstTrue beoMonoid.runBenchmarks + } diff --git a/src/Stack/Config/ConfigureScript.hs b/src/Stack/Config/ConfigureScript.hs new file mode 100644 index 0000000000..62ef8beeb8 --- /dev/null +++ b/src/Stack/Config/ConfigureScript.hs @@ -0,0 +1,112 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Config.ConfigureScript +License : BSD-3-Clause +-} + +module Stack.Config.ConfigureScript + ( ensureConfigureScript + ) where + +import Path ( () ) +import Path.IO ( doesFileExist ) +import Stack.Constants ( osIsWindows, relFileConfigure ) +import Stack.DefaultColorWhen ( defaultColorWhen ) +import Stack.Prelude +import RIO.Process ( HasProcessContext, withWorkingDir ) + +-- | For the given directory, yields an action that trys to generate a +-- @configure@ script with @autoreconf@, if one does not exist in the directory. +ensureConfigureScript :: + (HasProcessContext env, HasTerm env) + => Path b Dir + -> RIO env () +ensureConfigureScript dir = do + let fp = dir relFileConfigure + exists <- doesFileExist fp + unless exists $ do + prettyInfoL + [ flow "Trying to generate" + , style Shell "configure" + , "with" + , style Shell "autoreconf" + , "in" + , pretty dir <> "." + ] + let autoreconf = if osIsWindows + then readProcessNull "sh" ["autoreconf", "-i"] + else readProcessNull "autoreconf" ["-i"] + -- On Windows 10, an upstream issue with the `sh autoreconf -i` + -- command means that command clears, but does not then restore, the + -- ENABLE_VIRTUAL_TERMINAL_PROCESSING flag for native terminals. The + -- following hack re-enables the lost ANSI-capability. + fixupOnWindows = when osIsWindows (void $ liftIO defaultColorWhen) + withWorkingDir (toFilePath dir) $ autoreconf `catchAny` \ex -> do + fixupOnWindows + prettyWarn $ + fillSep + [ flow "Stack failed to run" + , style Shell "autoreconf" <> "." + ] + <> blankLine + <> flow "Stack encountered the following error:" + <> blankLine + <> string (displayException ex) + when osIsWindows $ do + prettyInfo $ + fillSep + [ flow "Check that executable" + , style File "perl" + , flow "is on the path in Stack's MSYS2" + , style Dir "\\usr\\bin" + , flow "folder, and working, and that script files" + , style File "autoreconf" + , "and" + , style File "aclocal" + , flow "are on the path in that location. To check that" + , style File "perl" <> "," + , style File "autoreconf" + , "or" + , style File "aclocal" + , flow "are on the path in the required location, run commands:" + ] + <> blankLine + <> indent 4 (style Shell $ flow "stack exec where.exe -- perl") + <> line + <> indent 4 (style Shell $ flow "stack exec where.exe -- autoreconf") + <> line + <> indent 4 (style Shell $ flow "stack exec where.exe -- aclocal") + <> blankLine + <> fillSep + [ "If" + , style File "perl" <> "," + , style File "autoreconf" + , "or" + , style File "aclocal" + , flow "is not on the path in the required location, add them \ + \with command (note that the relevant package name is" + , style File "autotools" + , "not" + , style File "autoreconf" <> "):" + ] + <> blankLine + <> indent 4 + (style Shell $ flow "stack exec pacman -- --sync --refresh mingw-w64-x86_64-autotools") + <> blankLine + <> fillSep + [ flow "Some versions of" + , style File "perl" + , flow "from MSYS2 are broken. See" + , style Url "https://github.com/msys2/MSYS2-packages/issues/1611" + , "and" + , style Url "https://github.com/commercialhaskell/stack/pull/4781" <> "." + , "To test if" + , style File "perl" + , flow "in the required location is working, try command:" + ] + <> blankLine + <> indent 4 (style Shell $ flow "stack exec perl -- --version") + <> blankLine + fixupOnWindows diff --git a/src/Stack/Config/Docker.hs b/src/Stack/Config/Docker.hs index 5c25e77b92..bc91cb037a 100644 --- a/src/Stack/Config/Docker.hs +++ b/src/Stack/Config/Docker.hs @@ -1,99 +1,140 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE DeriveDataTypeable, RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} --- | Docker configuration -module Stack.Config.Docker where +{-| +Module : Stack.Config.Docker +Description : Docker configuration. +License : BSD-3-Clause -import Stack.Prelude -import Data.List (find) +Docker configuration. +-} + +module Stack.Config.Docker + ( ConfigDockerException (..) + , addDefaultTag + , dockerOptsFromMonoid + ) where + +import Data.List ( find ) import qualified Data.Text as T -import Distribution.Version (simplifyVersionRange) -import Stack.Types.Version -import Stack.Types.Config +import Distribution.Version ( simplifyVersionRange ) +import Stack.Prelude +import Stack.Types.Project ( Project (..) ) import Stack.Types.Docker -import Stack.Types.Resolver + ( DockerOpts (..), DockerMonoidRepoOrImage (..) + , DockerOptsMonoid (..), dockerImageArgName + ) +import Stack.Types.Snapshot ( AbstractSnapshot (..) ) +import Stack.Types.Version ( IntersectingVersionRange (..) ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Config.Docker" module. +data ConfigDockerException + = SnapshotNotSupportedException !(Maybe Project) !(Maybe AbstractSnapshot) + -- ^ Only LTS snapshots are supported for default image tag. + deriving Show + +instance Exception ConfigDockerException where + displayException (SnapshotNotSupportedException mproject mASnapshot) = + concat + [ "Error: [S-8575]\n" + , "Snapshot not supported for Docker images:\n " + , case (mproject, mASnapshot) of + (Nothing, Nothing) -> "no snapshot specified" + (_, Just aSnapshot) -> + T.unpack $ utf8BuilderToText $ display aSnapshot + (Just project, Nothing) -> + T.unpack $ utf8BuilderToText $ display project.snapshot + , "\nUse an LTS snapshot, or set the '" + , T.unpack dockerImageArgName + , "' explicitly, in your configuration file."] -- | Add a default Docker tag name to a given base image. -addDefaultTag - :: MonadThrow m +addDefaultTag :: + MonadThrow m => String -- ^ base -> Maybe Project - -> Maybe AbstractResolver + -> Maybe AbstractSnapshot -> m String -addDefaultTag base mproject maresolver = do - let exc = throwM $ ResolverNotSupportedException mproject maresolver - lts <- case maresolver of - Just (ARResolver (RSLSynonym lts@(LTS _ _))) -> return lts - Just _aresolver -> exc +addDefaultTag base mproject mASnapshot = do + let exc = throwM $ SnapshotNotSupportedException mproject mASnapshot + lts <- case mASnapshot of + Just (ASSnapshot (RSLSynonym lts@(LTS _ _))) -> pure lts + Just _aSnapshot -> exc Nothing -> - case projectResolver <$> mproject of - Just (RSLSynonym lts@(LTS _ _)) -> return lts + case (.snapshot) <$> mproject of + Just (RSLSynonym lts@(LTS _ _)) -> pure lts _ -> exc - return $ base ++ ":" ++ show lts + pure $ base ++ ":" ++ show lts -- | Interprets DockerOptsMonoid options. -dockerOptsFromMonoid - :: MonadThrow m - => Maybe Project - -> Maybe AbstractResolver - -> DockerOptsMonoid - -> m DockerOpts -dockerOptsFromMonoid mproject maresolver DockerOptsMonoid{..} = do - let dockerImage = - case getFirst dockerMonoidRepoOrImage of - Nothing -> addDefaultTag "fpco/stack-build" mproject maresolver - Just (DockerMonoidImage image) -> pure image - Just (DockerMonoidRepo repo) -> - case find (`elem` (":@" :: String)) repo of - Nothing -> addDefaultTag repo mproject maresolver - -- Repo already specified a tag or digest, so don't append default - Just _ -> pure repo - let dockerEnable = - fromFirst (getAny dockerMonoidDefaultEnable) dockerMonoidEnable - dockerRegistryLogin = - fromFirst - (isJust (emptyToNothing (getFirst dockerMonoidRegistryUsername))) - dockerMonoidRegistryLogin - dockerRegistryUsername = emptyToNothing (getFirst dockerMonoidRegistryUsername) - dockerRegistryPassword = emptyToNothing (getFirst dockerMonoidRegistryPassword) - dockerAutoPull = fromFirstTrue dockerMonoidAutoPull - dockerDetach = fromFirstFalse dockerMonoidDetach - dockerPersist = fromFirstFalse dockerMonoidPersist - dockerContainerName = emptyToNothing (getFirst dockerMonoidContainerName) - dockerNetwork = emptyToNothing (getFirst dockerMonoidNetwork) - dockerRunArgs = dockerMonoidRunArgs - dockerMount = dockerMonoidMount - dockerMountMode = emptyToNothing (getFirst dockerMonoidMountMode) - dockerEnv = dockerMonoidEnv - dockerSetUser = getFirst dockerMonoidSetUser - dockerRequireDockerVersion = - simplifyVersionRange (getIntersectingVersionRange dockerMonoidRequireDockerVersion) - dockerStackExe = getFirst dockerMonoidStackExe - - return DockerOpts{..} - where emptyToNothing Nothing = Nothing - emptyToNothing (Just s) | null s = Nothing - | otherwise = Just s - --- | Exceptions thrown by Stack.Docker.Config. -data StackDockerConfigException - = ResolverNotSupportedException !(Maybe Project) !(Maybe AbstractResolver) - -- ^ Only LTS resolvers are supported for default image tag. - deriving (Typeable) - --- | Exception instance for StackDockerConfigException. -instance Exception StackDockerConfigException - --- | Show instance for StackDockerConfigException. -instance Show StackDockerConfigException where - show (ResolverNotSupportedException mproject maresolver) = - concat - [ "Resolver not supported for Docker images:\n " - , case (mproject, maresolver) of - (Nothing, Nothing) -> "no resolver specified" - (_, Just aresolver) -> T.unpack $ utf8BuilderToText $ display aresolver - (Just project, Nothing) -> T.unpack $ utf8BuilderToText $ display $ projectResolver project - , "\nUse an LTS resolver, or set the '" - , T.unpack dockerImageArgName - , "' explicitly, in your configuration file."] +dockerOptsFromMonoid :: + MonadThrow m + => Maybe Project + -> Maybe AbstractSnapshot + -> DockerOptsMonoid + -> m DockerOpts +dockerOptsFromMonoid mproject mASnapshot dockerMonoid = do + let image = + case getFirst dockerMonoid.repoOrImage of + Nothing -> addDefaultTag "fpco/stack-build" mproject mASnapshot + Just (DockerMonoidImage image') -> pure image' + Just (DockerMonoidRepo repo) -> + case find (`elem` (":@" :: String)) repo of + Nothing -> addDefaultTag repo mproject mASnapshot + -- Repo already specified a tag or digest, so don't append default + Just _ -> pure repo + let enable = + fromFirst + (getAny dockerMonoid.defaultEnable) + dockerMonoid.enable + registryLogin = + fromFirst + (isJust (emptyToNothing (getFirst dockerMonoid.registryUsername))) + dockerMonoid.registryLogin + registryUsername = + emptyToNothing (getFirst dockerMonoid.registryUsername) + registryPassword = + emptyToNothing (getFirst dockerMonoid.registryPassword) + autoPull = fromFirstTrue dockerMonoid.autoPull + detach = fromFirstFalse dockerMonoid.detach + persist = fromFirstFalse dockerMonoid.persist + containerName = + emptyToNothing (getFirst dockerMonoid.containerName) + network = emptyToNothing (getFirst dockerMonoid.network) + runArgs = dockerMonoid.runArgs + mount = dockerMonoid.mount + mountMode = + emptyToNothing (getFirst dockerMonoid.mountMode) + env = dockerMonoid.env + setUser = getFirst dockerMonoid.setUser + requireDockerVersion = + simplifyVersionRange + dockerMonoid.requireDockerVersion.intersectingVersionRange + stackExe = getFirst dockerMonoid.stackExe + pure DockerOpts + { enable + , image + , registryLogin + , registryUsername + , registryPassword + , autoPull + , detach + , persist + , containerName + , network + , runArgs + , mount + , mountMode + , env + , stackExe + , setUser + , requireDockerVersion + } + where + emptyToNothing Nothing = Nothing + emptyToNothing (Just s) + | null s = Nothing + | otherwise = Just s diff --git a/src/Stack/Config/Nix.hs b/src/Stack/Config/Nix.hs index 97d847ede5..90a64d35f7 100644 --- a/src/Stack/Config/Nix.hs +++ b/src/Stack/Config/Nix.hs @@ -1,58 +1,121 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE RecordWildCards, DeriveDataTypeable, OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Config.Nix +Description : Nix configuration. +License : BSD-3-Clause + +Nix configuration. +-} --- | Nix configuration module Stack.Config.Nix - (nixOptsFromMonoid - ,nixCompiler - ,nixCompilerVersion - ,StackNixException(..) - ) where + ( ConfigNixPrettyException + , nixCompiler + , nixCompilerVersion + , nixOptsFromMonoid + ) where -import Stack.Prelude -import Control.Monad.Extra (ifM) +import Control.Monad.Extra ( ifM, whenJust ) import qualified Data.Text as T import qualified Data.Text.IO as TIO -import Distribution.System (OS (..)) -import Stack.Constants -import Stack.Types.Config -import Stack.Types.Nix -import System.Directory (doesFileExist) +import Distribution.System ( OS (..) ) +import Stack.Constants ( osIsWindows ) +import Stack.Prelude +import Stack.Types.Runner ( HasRunner ) +import Stack.Types.Nix ( NixOpts (..), NixOptsMonoid (..) ) +import System.Directory ( doesFileExist ) + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Config.Nix" module. +data ConfigNixPrettyException + = NixCannotUseShellFileAndPackagesException !FilePath ![Text] + -- ^ Nix can't be given packages and a shell file at the same time + | GHCMajorVersionUnspecified + | OnlyGHCSupported + deriving Show + +instance Pretty ConfigNixPrettyException where + pretty (NixCannotUseShellFileAndPackagesException initFile packages) = + "[S-2726]" + <> line + <> flow "The configuration of Stack's Nix integration cannot specify both \ + \a Nix shell file and Nix packages. You have specified:" + <> blankLine + <> spacedBulletedList + [ fillSep + [ flow "Shell file:" + , style File (fromString initFile) <> ";" + , "and" + ] + , fillSep $ + flow "Nix packages:" + : mkNarrativeList (Just Shell) False prettyPackages + ] + where + prettyPackages :: [StyleDoc] + prettyPackages = map (fromString . T.unpack) packages + pretty GHCMajorVersionUnspecified = + "[S-9317]" + <> line + <> flow "Stack's Nix integration requires at least a major version of GHC \ + \to be specified. No major version is specified." + pretty OnlyGHCSupported = + "[S-8605]" + <> line + <> flow "Stack's Nix integration supports only GHC binary distributions as \ + \compiler." + +instance Exception ConfigNixPrettyException -- | Interprets NixOptsMonoid options. -nixOptsFromMonoid - :: HasRunner env - => NixOptsMonoid - -> OS - -> RIO env NixOpts -nixOptsFromMonoid NixOptsMonoid{..} os = do - let defaultPure = case os of - OSX -> False - _ -> True - nixPureShell = fromFirst defaultPure nixMonoidPureShell - nixPackages = fromFirst [] nixMonoidPackages - nixInitFile = getFirst nixMonoidInitFile - nixShellOptions = fromFirst [] nixMonoidShellOptions - ++ prefixAll (T.pack "-I") (fromFirst [] nixMonoidPath) - nixAddGCRoots = fromFirstFalse nixMonoidAddGCRoots +nixOptsFromMonoid :: + (HasRunner env, HasTerm env) + => NixOptsMonoid + -> OS + -> RIO env NixOpts +nixOptsFromMonoid nixMonoid os = do + let defaultPure = case os of + OSX -> False + _ -> True + pureShell = fromFirst defaultPure nixMonoid.pureShell + packages = fromFirst [] nixMonoid.packages + initFile = getFirst nixMonoid.initFile + shellOptions = + fromFirst [] nixMonoid.shellOptions + ++ prefixAll (T.pack "-I") (fromFirst [] nixMonoid.path) + addGCRoots = fromFirstFalse nixMonoid.addGCRoots - -- Enable Nix-mode by default on NixOS, unless Docker-mode was specified - osIsNixOS <- isNixOS - let nixEnable0 = fromFirst osIsNixOS nixMonoidEnable + -- Enable Nix-mode by default on NixOS, unless Docker-mode was specified + osIsNixOS <- isNixOS + let nixEnable0 = fromFirst osIsNixOS nixMonoid.enable - nixEnable <- case () of _ - | nixEnable0 && osIsWindows -> do - logInfo "Note: Disabling nix integration, since this is being run in Windows" - return False - | otherwise -> return nixEnable0 + enable <- + if nixEnable0 && osIsWindows + then do + prettyNoteS + "Disabling Nix integration, since this is being run in Windows." + pure False + else pure nixEnable0 - when (not (null nixPackages) && isJust nixInitFile) $ - throwIO NixCannotUseShellFileAndPackagesException - return NixOpts{..} - where prefixAll p (x:xs) = p : x : prefixAll p xs - prefixAll _ _ = [] + unless (null packages) $ whenJust initFile $ \fp -> + prettyThrowIO $ NixCannotUseShellFileAndPackagesException fp packages -nixCompiler :: WantedCompiler -> Either StringException T.Text + pure NixOpts + { enable + , pureShell + , packages + , initFile + , shellOptions + , addGCRoots + } + where + prefixAll p (x:xs) = p : x : prefixAll p xs + prefixAll _ _ = [] + +nixCompiler :: WantedCompiler -> Either ConfigNixPrettyException T.Text nixCompiler compilerVersion = case compilerVersion of WCGhc version -> @@ -74,35 +137,23 @@ nixCompiler compilerVersion = <> T.pack (versionString version) <> "\"\ \else haskell.compiler.${builtins.head compilers})" _ -> "haskell.compiler.ghc" <> T.concat (x : y : minor) - _ -> Left $ stringException "GHC major version not specified" - WCGhcjs{} -> Left $ stringException "Only GHC is supported by stack --nix" - WCGhcGit{} -> Left $ stringException "Only GHC is supported by stack --nix" + _ -> Left GHCMajorVersionUnspecified + WCGhcjs{} -> Left OnlyGHCSupported + WCGhcGit{} -> Left OnlyGHCSupported -nixCompilerVersion :: WantedCompiler -> Either StringException T.Text +nixCompilerVersion :: WantedCompiler -> Either ConfigNixPrettyException T.Text nixCompilerVersion compilerVersion = case compilerVersion of WCGhc version -> case T.split (== '.') (fromString $ versionString version) of x : y : minor -> Right $ "ghc" <> T.concat (x : y : minor) - _ -> Left $ stringException "GHC major version not specified" - WCGhcjs{} -> Left $ stringException "Only GHC is supported by stack --nix" - WCGhcGit{} -> Left $ stringException "Only GHC is supported by stack --nix" - --- Exceptions thown specifically by Stack.Nix -data StackNixException - = NixCannotUseShellFileAndPackagesException - -- ^ Nix can't be given packages and a shell file at the same time - deriving (Typeable) - -instance Exception StackNixException - -instance Show StackNixException where - show NixCannotUseShellFileAndPackagesException = - "You cannot have packages and a shell-file filled at the same time in your nix-shell configuration." + _ -> Left GHCMajorVersionUnspecified + WCGhcjs{} -> Left OnlyGHCSupported + WCGhcGit{} -> Left OnlyGHCSupported isNixOS :: MonadIO m => m Bool isNixOS = liftIO $ do - let fp = "/etc/os-release" - ifM (doesFileExist fp) - (T.isInfixOf "ID=nixos" <$> TIO.readFile fp) - (return False) + let fp = "/etc/os-release" + ifM (doesFileExist fp) + (T.isInfixOf "ID=nixos" <$> TIO.readFile fp) + (pure False) diff --git a/src/Stack/ConfigCmd.hs b/src/Stack/ConfigCmd.hs index 353f3480d2..635cd1b645 100644 --- a/src/Stack/ConfigCmd.hs +++ b/src/Stack/ConfigCmd.hs @@ -1,180 +1,361 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE GADTs #-} - --- | Make changes to project or global configuration. +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedLists #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.ConfigCmd +Description : Make changes to project or global configuration. +License : BSD-3-Clause + +Make changes to project or global configuration. +-} + module Stack.ConfigCmd - (ConfigCmdSet(..) - ,configCmdSetParser - ,cfgCmdSet - ,cfgCmdSetName - ,configCmdEnvParser - ,cfgCmdEnv - ,cfgCmdEnvName - ,cfgCmdName) where + ( cfgCmdSet + , cfgCmdSetName + , cfgCmdEnv + , cfgCmdEnvName + , cfgCmdBuildFiles + , cfgCmdBuildFilesName + , cfgCmdName + , yamlContainsInclude + ) where -import Stack.Prelude -import Data.ByteString.Builder (byteString) +import qualified Data.Aeson.Key as Key +import qualified Data.Aeson.KeyMap as KeyMap +import Data.Attoparsec.Text as P + ( Parser, parseOnly, skip, string, takeText, takeWhile + , takeWhile1 + ) import qualified Data.Map.Merge.Strict as Map -import qualified Data.HashMap.Strict as HMap import qualified Data.Text as T import qualified Data.Yaml as Yaml -import qualified Options.Applicative as OA -import qualified Options.Applicative.Types as OA -import Options.Applicative.Builder.Extra -import Pantry (loadSnapshot) -import Path +import Pantry ( loadSnapshot ) +import Path ( (), parent ) import qualified RIO.Map as Map -import RIO.Process (envVarsL) -import Stack.Config (makeConcreteResolver, getProjectConfig, getImplicitGlobalProjectDir) -import Stack.Constants -import Stack.Types.Config -import Stack.Types.Resolver -import System.Environment (getEnvironment) - -data ConfigCmdSet - = ConfigCmdSetResolver (Unresolved AbstractResolver) - | ConfigCmdSetSystemGhc CommandScope - Bool - | ConfigCmdSetInstallGhc CommandScope - Bool - -data CommandScope - = CommandScopeGlobal - -- ^ Apply changes to the global configuration, - -- typically at @~/.stack/config.yaml@. - | CommandScopeProject - -- ^ Apply changes to the project @stack.yaml@. - -configCmdSetScope :: ConfigCmdSet -> CommandScope -configCmdSetScope (ConfigCmdSetResolver _) = CommandScopeProject -configCmdSetScope (ConfigCmdSetSystemGhc scope _) = scope -configCmdSetScope (ConfigCmdSetInstallGhc scope _) = scope - -cfgCmdSet - :: (HasConfig env, HasGHCVariant env) - => ConfigCmdSet -> RIO env () +import RIO.NonEmpty ( nonEmpty ) +import qualified RIO.NonEmpty as NE +import RIO.Process ( envVarsL ) +import Stack.Config + ( makeConcreteSnapshot, getProjectConfig + , getImplicitGlobalProjectDir + ) +import Stack.Constants ( stackDotYaml ) +import Stack.Prelude +import Stack.Types.BuildConfig ( BuildConfig ) +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.ConfigMonoid + ( configMonoidInstallGHCName + , configMonoidInstallMsysName + , configMonoidRecommendStackUpgradeName + , configMonoidSystemGHCName + ) +import Stack.Types.ConfigSetOpts + ( CommandScope (..), ConfigCmdSet (..) ,configCmdSetScope ) +import Stack.Types.EnvConfig ( EnvConfig ) +import Stack.Types.EnvSettings ( EnvSettings (..) ) +import Stack.Types.GHCVariant ( HasGHCVariant ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.ProjectConfig ( ProjectConfig (..) ) +import Stack.Types.Runner ( globalOptsL ) +import Stack.Types.Snapshot ( AbstractSnapshot ) +import System.Environment ( getEnvironment ) + +-- | Type repesenting \'pretty\' exceptions thrown by functions exported by the +-- "Stack.ConfigCmd" module. +data ConfigCmdPrettyException + = NoProjectConfigAvailable + | ConfigFileContainsIncludes !(Path Abs File) + deriving Show + +instance Pretty ConfigCmdPrettyException where + pretty NoProjectConfigAvailable = + "[S-3136]" + <> line + <> fillSep + [ style Shell "config" + , flow "command used when no project configuration available." + ] + pretty (ConfigFileContainsIncludes configFile) = + "[S-6088]" + <> line + <> fillSep + [ "The" + , style Shell "config set" + , flow "command cannot add a new key to a configuration file that uses" + , style Shell "!include" + , "directives:" + , pretty configFile + ] + +instance Exception ConfigCmdPrettyException + +-- | Function underlying Stack's @config set@ command. +cfgCmdSet :: + (HasConfig env, HasGHCVariant env) + => ConfigCmdSet -> RIO env () cfgCmdSet cmd = do - conf <- view configL - configFilePath <- - case configCmdSetScope cmd of - CommandScopeProject -> do - mstackYamlOption <- view $ globalOptsL.to globalStackYaml - mstackYaml <- getProjectConfig mstackYamlOption - case mstackYaml of - PCProject stackYaml -> return stackYaml - PCGlobalProject -> liftM ( stackDotYaml) (getImplicitGlobalProjectDir conf) - PCNoProject _extraDeps -> throwString "config command used when no project configuration available" -- maybe modify the ~/.stack/config.yaml file instead? - CommandScopeGlobal -> return (configUserConfigPath conf) - -- We don't need to worry about checking for a valid yaml here - (config :: Yaml.Object) <- - liftIO (Yaml.decodeFileEither (toFilePath configFilePath)) >>= either throwM return - newValue <- cfgCmdSetValue (parent configFilePath) cmd - let cmdKey = cfgCmdSetOptionName cmd - config' = HMap.insert cmdKey newValue config - if config' == config - then logInfo - (fromString (toFilePath configFilePath) <> - " already contained the intended configuration and remains unchanged.") - else do - writeBinaryFileAtomic configFilePath (byteString (Yaml.encode config')) - logInfo (fromString (toFilePath configFilePath) <> " has been updated.") - -cfgCmdSetValue - :: (HasConfig env, HasGHCVariant env) - => Path Abs Dir -- ^ root directory of project - -> ConfigCmdSet -> RIO env Yaml.Value -cfgCmdSetValue root (ConfigCmdSetResolver newResolver) = do - newResolver' <- resolvePaths (Just root) newResolver - concreteResolver <- makeConcreteResolver newResolver' - -- Check that the snapshot actually exists - void $ loadSnapshot =<< completeSnapshotLocation concreteResolver - return (Yaml.toJSON concreteResolver) -cfgCmdSetValue _ (ConfigCmdSetSystemGhc _ bool') = - return (Yaml.Bool bool') -cfgCmdSetValue _ (ConfigCmdSetInstallGhc _ bool') = - return (Yaml.Bool bool') - -cfgCmdSetOptionName :: ConfigCmdSet -> Text -cfgCmdSetOptionName (ConfigCmdSetResolver _) = "resolver" -cfgCmdSetOptionName (ConfigCmdSetSystemGhc _ _) = configMonoidSystemGHCName -cfgCmdSetOptionName (ConfigCmdSetInstallGhc _ _) = configMonoidInstallGHCName + conf <- view configL + configFilePath <- + case configCmdSetScope cmd of + CommandScopeProject -> do + mstackYamlOption <- view $ globalOptsL . to (.stackYaml) + mstackYaml <- getProjectConfig mstackYamlOption + case mstackYaml of + PCProject stackYaml -> pure stackYaml + PCGlobalProject -> getImplicitGlobalProjectDir <&> ( stackDotYaml) + PCNoProject _extraDeps -> prettyThrowIO NoProjectConfigAvailable + -- maybe modify the ~/.stack/config.yaml file instead? + CommandScopeGlobal -> pure conf.userGlobalConfigFile + rawConfig <- liftIO (readFileUtf8 (toFilePath configFilePath)) + config <- either throwM pure (Yaml.decodeEither' $ encodeUtf8 rawConfig) + newValue <- cfgCmdSetValue (parent configFilePath) cmd + let yamlLines = T.lines rawConfig + cmdKeys = cfgCmdSetKeys cmd -- Text + newValue' = T.stripEnd $ + decodeUtf8With lenientDecode $ Yaml.encode newValue -- Text + file = toFilePath configFilePath -- String + hits = catMaybes $ NE.toList $ NE.map (inConfig config) cmdKeys + primaryCmdKey = NE.last $ NE.head cmdKeys + newYamlLines <- case hits of + [] -> do + when (yamlContainsInclude rawConfig) $ + prettyThrowIO (ConfigFileContainsIncludes configFilePath) + prettyInfoL + [ pretty configFilePath + , flow "has been extended." + ] + pure $ writeLines yamlLines "" (NE.head cmdKeys) newValue' + [(cmdKey, oldValue)] -> if oldValue == newValue && cmdKey == primaryCmdKey + then do + prettyInfoL + [ pretty configFilePath + , flow "already contained the intended configuration and remains \ + \unchanged." + ] + pure yamlLines + else do + when (cmdKey /= primaryCmdKey) $ + prettyWarn $ + fillSep + [ pretty configFilePath + , flow "contained a synonym for" + , style Target (fromString $ T.unpack primaryCmdKey) + , parens (style Current (fromString $ T.unpack cmdKey)) + , flow "which has been replaced." + ] + <> line + switchLine configFilePath cmdKey primaryCmdKey newValue' [] yamlLines + _ -> do + -- In practice, this warning should not be encountered because with + -- snapshot and resolver (deprecated) present, Stack will not parse the + -- YAML file. + prettyWarnL + [ pretty configFilePath + , flow "contains more than one possible existing configuration and, \ + \consequently, remains unchanged." + ] + pure yamlLines + liftIO $ writeFileUtf8 file (T.unlines newYamlLines) + where + -- This assumes that if the key does not exist, the lines that can be + -- appended to include it are of a form like: + -- + -- key1: + -- key2: + -- key3: value + -- + writeLines yamlLines spaces cmdKeys value = + case nonEmpty $ NE.tail cmdKeys of + Nothing -> yamlLines <> [spaces <> NE.head cmdKeys <> ": " <> value] + Just ks -> writeLines + (yamlLines <> [spaces <> NE.head cmdKeys <> ":"]) + (spaces <> " ") + ks + value + + inConfig v cmdKeys = case v of + Yaml.Object obj -> + let cmdKey = NE.head cmdKeys + in case KeyMap.lookup (Key.fromText cmdKey) obj of + Nothing -> Nothing + Just v' -> case nonEmpty $ NE.tail cmdKeys of + Nothing -> Just (cmdKey, v') + Just ks -> inConfig v' ks + _ -> Nothing + + switchLine file cmdKey _ _ searched [] = do + prettyWarnL + [ style Current (fromString $ T.unpack cmdKey) + , flow "was not found in YAML file" + , pretty file + , flow "in the form" + , style Shell "key: value" + , flow "on a single line. Multi-line formats for existing keys are not \ + \supported by the" + , style Shell "config set" + , flow "commands. The file's contents have not been changed." + ] + pure $ reverse searched + switchLine file cmdKey cmdKey' newValue searched (oldLine:rest) = + case parseOnly (parseLine cmdKey) oldLine of + Left _ -> switchLine file cmdKey cmdKey' newValue (oldLine:searched) rest + Right (kt, spaces1, spaces2, spaces3, comment) -> do + let newLine = spaces1 <> renderKey cmdKey' kt <> spaces2 <> + ":" <> spaces3 <> newValue <> comment + prettyInfoL + [ pretty file + , flow "has been updated." + ] + pure $ reverse searched <> (newLine:rest) + + parseLine :: Text -> Parser (KeyType, Text, Text, Text, Text) + parseLine key = do + spaces1 <- P.takeWhile (== ' ') + kt <- parseKey key + spaces2 <- P.takeWhile (== ' ') + skip (== ':') + spaces3 <- P.takeWhile1 (== ' ') + -- This assumes that the existing value contains no space characters, which + -- is tolerable for current purposes. + void $ takeWhile1 (/= ' ') + -- This assumes that anything that follows the existing value is a comment, + -- which is tolerable for current purposes. + comment <- takeText + pure (kt, spaces1, spaces2, spaces3, comment) + + -- If the key is, for example, install-ghc, this recognises install-ghc, + -- 'install-ghc' or "install-ghc". + parseKey :: Text -> Parser KeyType + parseKey k = parsePlainKey k + <|> parseSingleQuotedKey k + <|> parseDoubleQuotedKey k + parsePlainKey :: Text -> Parser KeyType + parsePlainKey key = do + _ <- P.string key + pure PlainKey + + parseSingleQuotedKey :: Text -> Parser KeyType + parseSingleQuotedKey = parseQuotedKey SingleQuotedKey '\'' + + parseDoubleQuotedKey :: Text -> Parser KeyType + parseDoubleQuotedKey = parseQuotedKey DoubleQuotedKey '"' + + parseQuotedKey :: KeyType -> Char -> Text -> Parser KeyType + parseQuotedKey kt c key = do + skip (==c) + _ <- P.string key + skip (==c) + pure kt + + renderKey :: Text -> KeyType -> Text + renderKey key kt = case kt of + PlainKey -> key + SingleQuotedKey -> '\'' `T.cons` key `T.snoc` '\'' + DoubleQuotedKey -> '"' `T.cons` key `T.snoc` '"' + +-- |Type representing types of representations of keys in YAML files. +data KeyType + = PlainKey -- ^ For example: install-ghc + | SingleQuotedKey -- ^ For example: 'install-ghc' + | DoubleQuotedKey -- ^ For example: "install-ghc" + deriving (Eq, Show) + +cfgCmdSetValue :: + (HasConfig env, HasGHCVariant env) + => Path Abs Dir -- ^ root directory of project + -> ConfigCmdSet -> RIO env Yaml.Value +cfgCmdSetValue root (ConfigCmdSetSnapshot newSnapshot) = + snapshotValue root newSnapshot +cfgCmdSetValue root (ConfigCmdSetResolver newSnapshot) = + snapshotValue root newSnapshot +cfgCmdSetValue _ (ConfigCmdSetSystemGhc _ bool') = pure $ Yaml.Bool bool' +cfgCmdSetValue _ (ConfigCmdSetInstallGhc _ bool') = pure $ Yaml.Bool bool' +cfgCmdSetValue _ (ConfigCmdSetInstallMsys _ bool') = pure $ Yaml.Bool bool' +cfgCmdSetValue _ (ConfigCmdSetRecommendStackUpgrade _ bool') = + pure $ Yaml.Bool bool' +cfgCmdSetValue _ (ConfigCmdSetDownloadPrefix _ url) = pure $ Yaml.String url + +snapshotValue :: + HasConfig env + => Path Abs Dir -- ^ root directory of project + -> Unresolved AbstractSnapshot + -> RIO env Yaml.Value +snapshotValue root snapshot = do + snapshot' <- resolvePaths (Just root) snapshot + concreteSnapshot <- makeConcreteSnapshot snapshot' + -- Check that the snapshot actually exists + void $ loadSnapshot =<< completeSnapshotLocation concreteSnapshot + rslValue concreteSnapshot + +rslValue :: HasConfig env => RawSnapshotLocation -> RIO env Yaml.Value +rslValue (RSLCompiler compiler) = pure $ Yaml.toJSON compiler +rslValue (RSLUrl url Nothing) = pure $ Yaml.toJSON url +rslValue (RSLUrl url _) = do + -- I can't see how this would ever arise, but it is added for completeness: + prettyWarnL + [ flow "The specified snapshot value is a URL. The associated SHA256 hash \ + \and file size will be ignored." + ] + pure $ Yaml.toJSON url +rslValue (RSLFilePath rp) = pure $ Yaml.toJSON $ resolvedRelative rp +rslValue rsl = pure $ Yaml.toJSON rsl + +cfgCmdSetKeys :: ConfigCmdSet -> NonEmpty (NonEmpty Text) +cfgCmdSetKeys (ConfigCmdSetSnapshot _) = [["snapshot"], ["resolver"]] +cfgCmdSetKeys (ConfigCmdSetResolver _) = [["resolver"], ["snapshot"]] +cfgCmdSetKeys (ConfigCmdSetSystemGhc _ _) = [[configMonoidSystemGHCName]] +cfgCmdSetKeys (ConfigCmdSetInstallGhc _ _) = [[configMonoidInstallGHCName]] +cfgCmdSetKeys (ConfigCmdSetInstallMsys _ _) = [[configMonoidInstallMsysName]] +cfgCmdSetKeys (ConfigCmdSetRecommendStackUpgrade _ _) = + [[configMonoidRecommendStackUpgradeName]] +cfgCmdSetKeys (ConfigCmdSetDownloadPrefix _ _) = + [["package-index", "download-prefix"]] + +-- | Check if YAML content contains a @!include@ directive in value position. +-- This covers both inline values (e.g. @key: !include path@) and values on +-- the next line after indentation. Stack config keys do not contain spaces or +-- colons, so the first @:@ is always the value separator. +yamlContainsInclude :: Text -> Bool +yamlContainsInclude = + let lineContainsInclude yamlLine = + let stripped = T.stripStart yamlLine + in includeAsValue stripped || includeOnOwnLine stripped + + includeAsValue strippedLine = + let (_key, rest) = T.breakOn ":" strippedLine + in "!include" `T.isPrefixOf` T.stripStart (T.drop 1 rest) + + includeOnOwnLine strippedLine = + "!include" `T.isPrefixOf` strippedLine + in any lineContainsInclude . T.lines + +-- | The name of Stack's @config@ command. cfgCmdName :: String cfgCmdName = "config" +-- | The name of Stack's @config@ command's @set@ subcommand. cfgCmdSetName :: String cfgCmdSetName = "set" +-- | The name of Stack's @config@ command's @env@ subcommand. cfgCmdEnvName :: String cfgCmdEnvName = "env" -configCmdSetParser :: OA.Parser ConfigCmdSet -configCmdSetParser = - OA.hsubparser $ - mconcat - [ OA.command - "resolver" - (OA.info - (ConfigCmdSetResolver <$> - OA.argument - readAbstractResolver - (OA.metavar "RESOLVER" <> - OA.help "E.g. \"nightly\" or \"lts-7.2\"")) - (OA.progDesc - "Change the resolver of the current project. See https://docs.haskellstack.org/en/stable/yaml_configuration/#resolver for more info.")) - , OA.command - (T.unpack configMonoidSystemGHCName) - (OA.info - (ConfigCmdSetSystemGhc <$> scopeFlag <*> boolArgument) - (OA.progDesc - "Configure whether stack should use a system GHC installation or not.")) - , OA.command - (T.unpack configMonoidInstallGHCName) - (OA.info - (ConfigCmdSetInstallGhc <$> scopeFlag <*> boolArgument) - (OA.progDesc - "Configure whether stack should automatically install GHC when necessary.")) - ] - -scopeFlag :: OA.Parser CommandScope -scopeFlag = - OA.flag - CommandScopeProject - CommandScopeGlobal - (OA.long "global" <> - OA.help - "Modify the global configuration (typically at \"~/.stack/config.yaml\") instead of the project stack.yaml.") - -readBool :: OA.ReadM Bool -readBool = do - s <- OA.readerAsk - case s of - "true" -> return True - "false" -> return False - _ -> OA.readerError ("Invalid value " ++ show s ++ ": Expected \"true\" or \"false\"") - -boolArgument :: OA.Parser Bool -boolArgument = OA.argument readBool (OA.metavar "true|false" <> OA.completeWith ["true", "false"]) - -configCmdEnvParser :: OA.Parser EnvSettings -configCmdEnvParser = EnvSettings - <$> boolFlags True "locals" "include local package information" mempty - <*> boolFlags True "ghc-package-path" "set GHC_PACKAGE_PATH variable" mempty - <*> boolFlags True "stack-exe" "set STACK_EXE environment variable" mempty - <*> boolFlags False "locale-utf8" "set the GHC_CHARENC environment variable to UTF8" mempty - <*> boolFlags False "keep-ghc-rts" "keep any GHC_RTS environment variables" mempty +-- | The name of Stack's @config@ command's @build-files@ subcommand. +cfgCmdBuildFilesName :: String +cfgCmdBuildFilesName = "build-files" data EnvVarAction = EVASet !Text | EVAUnset deriving Show +-- | Function underlying Stack's @config env@ command. cfgCmdEnv :: EnvSettings -> RIO EnvConfig () cfgCmdEnv es = do origEnv <- liftIO $ Map.fromList . map (first fromString) <$> getEnvironment - mkPC <- view $ configL.to configProcessContextSettings + mkPC <- view $ configL . to (.processContextSettings) pc <- liftIO $ mkPC es let newEnv = pc ^. envVarsL actions = Map.merge @@ -194,4 +375,9 @@ cfgCmdEnv es = do encodeUtf8Builder key <> ";\n" escape '\'' = "'\"'\"'" escape c = T.singleton c - hPutBuilder stdout $ Map.foldMapWithKey toLine actions + putBuilder $ Map.foldMapWithKey toLine actions + +-- | This function takes no settings and yields no action of interest. It is +-- 'Stack.Config.withBuildConfig' that yields the desired actions. +cfgCmdBuildFiles :: () -> RIO BuildConfig () +cfgCmdBuildFiles () = pure () diff --git a/src/Stack/ConfigureOpts.hs b/src/Stack/ConfigureOpts.hs new file mode 100644 index 0000000000..539d12ddcc --- /dev/null +++ b/src/Stack/ConfigureOpts.hs @@ -0,0 +1,215 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.ConfigureOpts +License : BSD-3-Clause +-} + +module Stack.ConfigureOpts + ( configureOptsFromBase + , configureOptsFromDb + , renderConfigureOpts + , packageConfigureOptsFromPackage + ) where + +import qualified Data.Map as Map +import qualified Data.Text as T +import Database.Persist ( Entity, entityVal ) +import Distribution.Types.MungedPackageName + ( decodeCompatPackageName ) +import Distribution.Types.PackageName ( unPackageName ) +import Distribution.Types.UnqualComponentName + ( unUnqualComponentName ) +import GHC.Records ( HasField ) +import Path ( (), parseRelDir ) +import Path.Extra ( toFilePathNoTrailingSep ) +import Stack.Constants + ( bindirSuffix, compilerOptionsCabalFlag, docDirSuffix + , relDirEtc, relDirLib, relDirLibexec, relDirShare + ) +import Stack.Prelude +import Stack.Types.BuildOpts ( BuildOpts (..) ) +import Stack.Types.Compiler ( whichCompiler ) +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.ConfigureOpts + ( BaseConfigOpts (..), ConfigureOpts (..) + , PackageConfigureOpts (..) ) +import Stack.Types.EnvConfig ( EnvConfig, actualCompilerVersionL ) +import Stack.Types.GhcPkgId ( GhcPkgId, ghcPkgIdString ) +import Stack.Types.IsMutable ( IsMutable (..) ) +import Stack.Types.Package ( Package(..), packageIdentifier ) +import System.FilePath ( pathSeparator ) + +packageConfigureOptsFromPackage :: + Package + -> PackageConfigureOpts +packageConfigureOptsFromPackage pkg = PackageConfigureOpts + { pkgCabalConfigOpts = pkg.cabalConfigOpts + , pkgGhcOptions = pkg.ghcOptions + , pkgFlags = pkg.flags + , pkgDefaultFlags = pkg.defaultFlags + , pkgIdentifier = packageIdentifier pkg + } + +configureOptsFromDb :: + ( HasField "configCacheDirOptionValue" b1 String + , HasField "configCacheNoDirOptionValue" b2 String + ) + => [Entity b1] + -> [Entity b2] + -> ConfigureOpts +configureOptsFromDb x y = ConfigureOpts + { pathRelated = map ((.configCacheDirOptionValue) . entityVal) x + , nonPathRelated = map ((.configCacheNoDirOptionValue) . entityVal) y + } + +-- | Render a @BaseConfigOpts@ to an actual list of options +configureOptsFromBase :: + EnvConfig + -> BaseConfigOpts + -> Map PackageIdentifier GhcPkgId -- ^ dependencies + -> Bool -- ^ local non-extra-dep? + -> IsMutable + -> PackageConfigureOpts + -> ConfigureOpts +configureOptsFromBase econfig bco deps isLocal isMutable pkgConfigureOpts = + ConfigureOpts + { pathRelated = configureOptsPathRelated bco isMutable pkgConfigureOpts + , nonPathRelated = + configureOptsNonPathRelated econfig bco deps isLocal pkgConfigureOpts + } + +configureOptsPathRelated :: + BaseConfigOpts + -> IsMutable + -> PackageConfigureOpts + -> [String] +configureOptsPathRelated bco isMutable pkgOpts = concat + [ ["--user", "--package-db=clear", "--package-db=global"] + , map (("--package-db=" ++) . toFilePathNoTrailingSep) $ case isMutable of + Immutable -> bco.extraDBs ++ [bco.snapDB] + Mutable -> bco.extraDBs ++ [bco.snapDB] ++ [bco.localDB] + , [ "--libdir=" ++ toFilePathNoTrailingSep (installRoot relDirLib) + , "--bindir=" ++ toFilePathNoTrailingSep (installRoot bindirSuffix) + , "--datadir=" ++ toFilePathNoTrailingSep (installRoot relDirShare) + , "--libexecdir=" ++ toFilePathNoTrailingSep (installRoot relDirLibexec) + , "--sysconfdir=" ++ toFilePathNoTrailingSep (installRoot relDirEtc) + , "--docdir=" ++ toFilePathNoTrailingSep docDir + , "--htmldir=" ++ toFilePathNoTrailingSep docDir + , "--haddockdir=" ++ toFilePathNoTrailingSep docDir] + ] + where + installRoot = + case isMutable of + Immutable -> bco.snapInstallRoot + Mutable -> bco.localInstallRoot + docDir = + case pkgVerDir of + Nothing -> installRoot docDirSuffix + Just dir -> installRoot docDirSuffix dir + pkgVerDir = parseRelDir + ( packageIdentifierString pkgOpts.pkgIdentifier + ++ [pathSeparator] + ) + +-- | Same as 'configureOpts', but does not include directory path options +configureOptsNonPathRelated :: + EnvConfig + -> BaseConfigOpts + -> Map PackageIdentifier GhcPkgId -- ^ Dependencies. + -> Bool -- ^ Is this a local, non-extra-dep? + -> PackageConfigureOpts + -> [String] +configureOptsNonPathRelated econfig bco deps isLocal package = concat + [ depOptions + , [ "--enable-library-profiling" + | bopts.libProfile || bopts.exeProfile + ] + , ["--enable-profiling" | bopts.exeProfile && isLocal] + , ["--enable-split-objs" | bopts.splitObjs] + , [ "--disable-library-stripping" + | not $ bopts.libStrip || bopts.exeStrip + ] + , ["--disable-executable-stripping" | not bopts.exeStrip && isLocal] + , flags + , map T.unpack package.pkgCabalConfigOpts + , processGhcOptions package.pkgGhcOptions + , map ("--extra-include-dirs=" ++) config.extraIncludeDirs + , map ("--extra-lib-dirs=" ++) config.extraLibDirs + , maybe + [] + (\customGcc -> ["--with-gcc=" ++ toFilePath customGcc]) + config.overrideGccPath + , ["--exact-configuration"] + , ["--ghc-option=-fhide-source-paths" | hideSourcePaths] + ] + where + -- This function parses the GHC options that are providing in the + -- stack.yaml file. In order to handle RTS arguments correctly, we need + -- to provide the RTS arguments as a single argument. + processGhcOptions :: [Text] -> [String] + processGhcOptions args = + let (preRtsArgs, mid) = break ("+RTS" ==) args + (rtsArgs, end) = break ("-RTS" ==) mid + fullRtsArgs = + case rtsArgs of + [] -> + -- This means that we didn't have any RTS args - no `+RTS` - and + -- therefore no need for a `-RTS`. + [] + _ -> + -- In this case, we have some RTS args. `break` puts the `"-RTS"` + -- string in the `snd` list, so we want to append it on the end of + -- `rtsArgs` here. + -- + -- We're not checking that `-RTS` is the first element of `end`. + -- This is because the GHC RTS allows you to omit a trailing -RTS + -- if that's the last of the arguments. This permits a GHC options + -- in stack.yaml that matches what you might pass directly to GHC. + [T.unwords $ rtsArgs ++ ["-RTS"]] + -- We drop the first element from `end`, because it is always either + -- `"-RTS"` (and we don't want that as a separate argument) or the list + -- is empty (and `drop _ [] = []`). + postRtsArgs = drop 1 end + newArgs = concat [preRtsArgs, fullRtsArgs, postRtsArgs] + in concatMap (\x -> [compilerOptionsCabalFlag wc, T.unpack x]) newArgs + + wc = view (actualCompilerVersionL . to whichCompiler) econfig + + hideSourcePaths = config.hideSourcePaths + + config = view configL econfig + bopts = bco.buildOpts + mapAndAppend fn = Map.foldrWithKey' (fmap (:) . fn) + -- Unioning atop defaults is needed so that all flags are specified with + -- --exact-configuration. + flags = mapAndAppend + renderFlags + [] + (package.pkgFlags `Map.union` package.pkgDefaultFlags) + renderFlags name enabled = + "-f" + <> (if enabled then "" else "-") + <> flagNameString name + + depOptions = mapAndAppend toDepOption [] deps + + toDepOption (PackageIdentifier name _) gid = concat + [ "--dependency=" + , depOptionKey + , "=" + , ghcPkgIdString gid + ] + where + MungedPackageName subPkgName lib = decodeCompatPackageName name + depOptionKey = case lib of + LMainLibName -> unPackageName name + LSubLibName cn -> + unPackageName subPkgName <> ":" <> unUnqualComponentName cn + +-- | Render configure options as a single list of options. +renderConfigureOpts :: ConfigureOpts -> [String] +renderConfigureOpts copts = copts.pathRelated ++ copts.nonPathRelated diff --git a/src/Stack/Constants.hs b/src/Stack/Constants.hs index 13fe9d40ce..6ddfea5b19 100644 --- a/src/Stack/Constants.hs +++ b/src/Stack/Constants.hs @@ -1,155 +1,210 @@ -{-# LANGUAGE CPP #-} {-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TemplateHaskell #-} -- keep TH usage here +{-# LANGUAGE TemplateHaskell #-} -- keep TH usage here --- | Constants used throughout the project. +{-| +Module : Stack.Constants +Description : Constants used throughout the project. +License : BSD-3-Clause + +Constants used throughout the project. +-} module Stack.Constants - (buildPlanDir - ,buildPlanCacheDir - ,haskellFileExts - ,haskellPreprocessorExts - ,stackDotYaml - ,stackWorkEnvVar - ,stackRootEnvVar - ,stackRootOptionName - ,deprecatedStackRootOptionName - ,inContainerEnvVar - ,inNixShellEnvVar - ,stackProgNameUpper - ,wiredInPackages - ,cabalPackageName - ,implicitGlobalProjectDirDeprecated - ,implicitGlobalProjectDir - ,defaultUserConfigPathDeprecated - ,defaultUserConfigPath - ,defaultGlobalConfigPathDeprecated - ,defaultGlobalConfigPath - ,platformVariantEnvVar - ,compilerOptionsCabalFlag - ,ghcColorForceFlag - ,minTerminalWidth - ,maxTerminalWidth - ,defaultTerminalWidth - ,osIsWindows - ,relFileSetupHs - ,relFileSetupLhs - ,relFileHpackPackageConfig - ,relDirGlobalAutogen - ,relDirAutogen - ,relDirLogs - ,relFileCabalMacrosH - ,relDirBuild - ,relDirBin - ,relDirPantry - ,relDirPrograms - ,relDirUpperPrograms - ,relDirStackProgName - ,relDirStackWork - ,relFileReadmeTxt - ,relDirScript - ,relFileConfigYaml - ,relDirSnapshots - ,relDirGlobalHints - ,relFileGlobalHintsYaml - ,relDirInstall - ,relDirCompilerTools - ,relDirHoogle - ,relFileDatabaseHoo - ,relDirPkgdb - ,relFileStorage - ,relDirLoadedSnapshotCache - ,bindirSuffix - ,docDirSuffix - ,relDirHpc - ,relDirLib - ,relDirShare - ,relDirLibexec - ,relDirEtc - ,setupGhciShimCode - ,relDirSetupExeCache - ,relDirSetupExeSrc - ,relFileConfigure - ,relDirDist - ,relFileSetupMacrosH - ,relDirSetup - ,relFileSetupLower - ,relDirMingw - ,relDirMingw32 - ,relDirMingw64 - ,relDirLocal - ,relDirUsr - ,relDirInclude - ,relFileIndexHtml - ,relDirAll - ,relFilePackageCache - ,relFileDockerfile - ,relDirHaskellStackGhci - ,relFileGhciScript - ,relDirCombined - ,relFileHpcIndexHtml - ,relDirCustom - ,relDirPackageConfInplace - ,relDirExtraTixFiles - ,relDirInstalledPackages - ,backupUrlRelPath - ,relDirDotLocal - ,relDirDotSsh - ,relDirDotStackProgName - ,relDirUnderHome - ,relDirSrc - ,relFileLibtinfoSo5 - ,relFileLibtinfoSo6 - ,relFileLibncurseswSo6 - ,relFileLibgmpSo10 - ,relFileLibgmpSo3 - ,relDirNewCabal - ,relFileSetupExe - ,relFileSetupUpper - ,relFile7zexe - ,relFile7zdll - ,relFileMainHs - ,relFileStack - ,relFileStackDotExe - ,relFileStackDotTmpDotExe - ,relFileStackDotTmp - ,ghcShowOptionsOutput - ,hadrianCmdWindows - ,hadrianCmdPosix - ,usrLibDirs - ,testGhcEnvRelFile - ,relFileBuildLock - ,stackDeveloperModeDefault - ) - where - -import Data.ByteString.Builder (byteString) -import Data.Char (toUpper) -import Data.FileEmbed (embedFile, makeRelativeToProject) + ( buildPlanDir + , buildPlanCacheDir + , haskellFileExts + , haskellDefaultPreprocessorExts + , stackProgName + , stackProgName' + , nixProgName + , stackDotYaml + , stackWorkEnvVar + , stackRootEnvVar + , stackXdgEnvVar + , stackRootOptionName + , stackGlobalConfigOptionName + , pantryRootEnvVar + , inContainerEnvVar + , inNixShellEnvVar + , stackProgNameUpper + , wiredInPackages + , allWiredInPackages + , cabalPackageName + , implicitGlobalProjectDir + , defaultUserConfigPath + , defaultGlobalConfigPath + , platformVariantEnvVar + , compilerOptionsCabalFlag + , ghcColorForceFlag + , minTerminalWidth + , maxTerminalWidth + , defaultTerminalWidth + , osIsMacOS + , osIsWindows + , relFileSetupHs + , relFileSetupLhs + , relFileHpackPackageConfig + , relDirGlobalAutogen + , relDirAutogen + , relDirLogs + , relFileCabalMacrosH + , relDirBuild + , relDirBin + , relDirGhci + , relDirGhciScript + , relDirPantry + , relDirPrograms + , relDirRoot + , relDirUpperPrograms + , relDirStackProgName + , relDirStackWork + , relFileReadmeTxt + , relDirScript + , relDirScripts + , relFileConfigYaml + , relDirSnapshots + , relDirGlobalHints + , relFileGlobalHintsYaml + , relDirInstall + , relDirCompilerTools + , relDirHoogle + , relFileDatabaseHoo + , relDirPkgdb + , relFileStorage + , relDirLoadedSnapshotCache + , bindirSuffix + , docDirSuffix + , htmlDirSuffix + , relDirHpc + , relDirLib + , relDirShare + , relDirLibexec + , relDirEtc + , setupGhciShimCode + , relDirSetupExeCache + , relDirSetupExeSrc + , relFileConfigure + , relDirDist + , relFileSetupMacrosH + , relDirSetup + , relFileSetupLower + , relDirMingw + , relDirMingw32 + , relDirMingw64 + , relDirClang32 + , relDirClang64 + , relDirClangArm64 + , relDirUcrt64 + , relDirLocal + , relDirUsr + , relDirInclude + , relFileIndexHtml + , relDirAll + , relFilePackageCache + , relFileDockerfile + , relFileGhciScript + , relDirCombined + , relFileHpcIndexHtml + , relDirCustom + , relDirPackageConfInplace + , relDirExtraTixFiles + , relDirInstalledPackages + , backupUrlRelPath + , relDirDotLocal + , relDirDotSsh + , relDirDotStackProgName + , relDirUnderHome + , relDirSrc + , relFileLibcMuslx86_64So1 + , relFileLibtinfoSo5 + , relFileLibtinfoSo6 + , relFileLibncurseswSo6 + , relFileLibgmpSo10 + , relFileLibgmpSo3 + , relDirNewCabal + , relFileSetupExe + , relFileSetupUpper + , relFile7zexe + , relFile7zdll + , relFileMainHs + , relFileStack + , relFileStackDotExe + , relFileStackDotTmpDotExe + , relFileStackDotTmp + , ghcShowOptionsOutput + , ghcBootScript + , ghcConfigureScript + , ghcConfigureWindows + , ghcConfigureMacOS + , ghcConfigurePosix + , relDirHadrian + , relFileHadrianStackDotYaml + , hadrianScriptsWindows + , hadrianScriptsPosix + , libDirs + , usrLibDirs + , testGhcEnvRelFile + , relFileBuildLock + , stackDeveloperModeDefault + , isStackUploadDisabled + , globalFooter + , gitHubBasicAuthType + , gitHubTokenEnvVar + , altGitHubTokenEnvVar + , hackageBaseUrl + ) where + +import Data.ByteString.Builder ( byteString ) +import Data.Char ( toUpper ) +import Data.FileEmbed ( embedFile, makeRelativeToProject ) import qualified Data.Set as Set -import Distribution.Package (mkPackageName) -import qualified Hpack.Config as Hpack -import qualified Language.Haskell.TH.Syntax as TH (runIO, lift) -import Path as FL +import qualified Data.Text as T +import Distribution.Package ( mkPackageName ) +import Distribution.Version ( mkVersion ) +import Hpack.Config ( packageConfig ) +import qualified Language.Haskell.TH.Syntax as TH ( runIO, lift ) +import Path ( (), mkRelDir, mkRelFile, parseAbsFile ) +import Stack.Constants.StackProgName ( stackProgName ) +import Stack.Constants.UsrLibDirs ( libDirs, usrLibDirs ) import Stack.Prelude import Stack.Types.Compiler -import System.Permissions (osIsWindows) -import System.Process (readProcess) + ( ActualCompiler (..), WhichCompiler (..) ) +import System.Permissions ( osIsMacOS, osIsWindows ) +import System.Process ( readProcess ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Constants" module. +data ConstantsException + = WiredInPackagesNotParsedBug + deriving Show + +instance Exception ConstantsException where + displayException WiredInPackagesNotParsedBug = bugReport "[S-6057]" + "Parse error in wiredInPackages." + +-- | Name of the Stack program. +stackProgName' :: Text +stackProgName' = T.pack stackProgName + +-- | Name of the Nix package manager command +nixProgName :: String +nixProgName = "nix" -- | Extensions used for Haskell modules. Excludes preprocessor ones. haskellFileExts :: [Text] haskellFileExts = ["hs", "hsc", "lhs"] -- | Extensions for modules that are preprocessed by common preprocessors. -haskellPreprocessorExts :: [Text] -haskellPreprocessorExts = ["gc", "chs", "hsc", "x", "y", "ly", "cpphs"] +haskellDefaultPreprocessorExts :: [Text] +haskellDefaultPreprocessorExts = ["gc", "chs", "hsc", "x", "y", "ly", "cpphs"] --- | Name of the 'stack' program, uppercased +-- | Name of the \'stack\' program, uppercased stackProgNameUpper :: String stackProgNameUpper = map toUpper stackProgName --- | The filename used for the stack config file. +-- | The filename used for the Stack project-level configuration file. stackDotYaml :: Path Rel File stackDotYaml = $(mkRelFile "stack.yaml") @@ -161,95 +216,135 @@ stackWorkEnvVar = "STACK_WORK" stackRootEnvVar :: String stackRootEnvVar = "STACK_ROOT" --- | Option name for the global stack root. +-- | Environment variable used to indicate XDG directories should be used. +stackXdgEnvVar :: String +stackXdgEnvVar = "STACK_XDG" + +-- | Option name for the global Stack root. stackRootOptionName :: String stackRootOptionName = "stack-root" --- | Deprecated option name for the global stack root. --- --- Deprecated since stack-1.1.0. --- --- TODO: Remove occurrences of this variable and use 'stackRootOptionName' only --- after an appropriate deprecation period. -deprecatedStackRootOptionName :: String -deprecatedStackRootOptionName = "global-stack-root" +-- | Option name for the global Stack configuration file. +stackGlobalConfigOptionName :: String +stackGlobalConfigOptionName = "global-config" + +-- | Environment variable used to override the location of the Pantry store +pantryRootEnvVar :: String +pantryRootEnvVar = "PANTRY_ROOT" --- | Environment variable used to indicate stack is running in container. +-- | Environment variable used to indicate Stack is running in container. inContainerEnvVar :: String inContainerEnvVar = stackProgNameUpper ++ "_IN_CONTAINER" --- | Environment variable used to indicate stack is running in container. +-- | Environment variable used to indicate Stack is running in container. -- although we already have STACK_IN_NIX_EXTRA_ARGS that is set in the same conditions, -- it can happen that STACK_IN_NIX_EXTRA_ARGS is set to empty. inNixShellEnvVar :: String inNixShellEnvVar = map toUpper stackProgName ++ "_IN_NIX_SHELL" --- See https://downloads.haskell.org/~ghc/7.10.1/docs/html/libraries/ghc/src/Module.html#integerPackageKey -wiredInPackages :: Set PackageName -wiredInPackages = - maybe (error "Parse error in wiredInPackages") Set.fromList mparsed - where - mparsed = mapM parsePackageName - [ "ghc-prim" - , "integer-gmp" - , "integer-simple" - , "base" - , "rts" - , "template-haskell" - , "dph-seq" - , "dph-par" - , "ghc" - , "interactive" - ] +-- | The comment to \'see +-- https://downloads.haskell.org/~ghc/7.10.1/docs/html/libraries/ghc/src/Module.html#integerPackageKey\' +-- appears to be out of date. +-- +-- See \'Note [About units]\' and \'Wired-in units\' at +-- https://gitlab.haskell.org/ghc/ghc/-/blob/master/compiler/GHC/Unit.hs. +-- +-- The \'wired-in packages\' appear to have been replaced by those that have (e.g) +-- +-- > ghc-options: -this-unit-id ghc-prim +-- +-- in their Cabal file because they are \'magic\'. +-- +-- Cabal (the tool) also treats certain packages as non-reinstallable. See +-- @Distribution.Client.Dependency.nonReinstallablePackages@. +wiredInPackages :: + ActualCompiler + -- ^ The actual compiler being used. (Not yet implemented.) + -> Set PackageName +-- When building the GHC compiler from source, we know nothing about its +-- wired-in packages. We take a cautious approach. +wiredInPackages (ACGhcGit _ _) = allWiredInPackages +wiredInPackages (ACGhc ghcVersion) = case mparsed of + Just parsed -> Set.fromList parsed + Nothing -> impureThrow WiredInPackagesNotParsedBug + where + mparsed = mapM parsePackageName $ + [ "rts" + -- Said to be not a \'real\' package. + , "ghc" + -- A magic package. + , "ghc-prim" + -- A magic package. + , "integer-simple" + -- A magic package. + , "interactive" + -- Type and class declarations at the GHCi command prompt are treated + -- as if they were defined in modules all sharing a common package + -- interactive. See 'Note [The interactive package]' at + -- https://gitlab.haskell.org/ghc/ghc/-/blob/master/compiler/GHC/Runtime/Context.hs + -- With GHC 9.10.3 at least, there seems to be no problem in using it. + ] + <> [ "base" | ghcVersion < mkVersion [9, 12] ] + -- A formerly magic package. + <> [ "ghc-bignum" | ghcVersion >= mkVersion [9, 0, 1] ] + -- A magic package from GHC 9.0.1. + <> [ "ghc-internal" | ghcVersion >= mkVersion [9, 10, 1] ] + -- A magic package from GHC 9.10.1. + <> [ "integer-gmp" | ghcVersion < mkVersion [9, 0] ] + -- No longer magic > 1.0.3.0 (GHC >= 9.0) and deprecated in favour of + -- ghc-bignum. + <> [ "template-haskell" | ghcVersion < mkVersion [9, 12] ] + -- No longer magic > 2.22.0.0 (GHC >= 9.12). + +-- | A set of all package names that have been GHC wired-in packages for +-- versions of GHC supported by Stack. +allWiredInPackages :: Set PackageName +allWiredInPackages = Set.unions $ map wiredInPackages supportedCompilers + where + supportedCompilers = map (ACGhc . mkVersion) + [ [8, 4, 4] + , [8, 6, 5] + , [8, 8, 4] + , [8, 10, 7] + , [9, 0, 2] + , [9, 2, 8] + , [9, 4, 8] + , [9, 6, 7] + , [9, 8, 4] + , [9, 10, 2] + , [9, 12, 2] + ] -- | Just to avoid repetition and magic strings. cabalPackageName :: PackageName cabalPackageName = mkPackageName "Cabal" --- | Deprecated implicit global project directory used when outside of a project. -implicitGlobalProjectDirDeprecated :: Path Abs Dir -- ^ Stack root. - -> Path Abs Dir -implicitGlobalProjectDirDeprecated p = - p - $(mkRelDir "global") - -- | Implicit global project directory used when outside of a project. -- Normally, @getImplicitGlobalProjectDir@ should be used instead. -implicitGlobalProjectDir :: Path Abs Dir -- ^ Stack root. - -> Path Abs Dir -implicitGlobalProjectDir p = - p - $(mkRelDir "global-project") - --- | Deprecated default global config path. -defaultUserConfigPathDeprecated :: Path Abs Dir -> Path Abs File -defaultUserConfigPathDeprecated = ( $(mkRelFile "stack.yaml")) - --- | Default global config path. --- Normally, @getDefaultUserConfigPath@ should be used instead. +implicitGlobalProjectDir :: + Path Abs Dir -- ^ Stack root. + -> Path Abs Dir +implicitGlobalProjectDir p = p $(mkRelDir "global-project") + +-- | Default user global configuration path. Normally, +-- @getDefaultUserConfigPath@ should be used instead. defaultUserConfigPath :: Path Abs Dir -> Path Abs File defaultUserConfigPath = ( $(mkRelFile "config.yaml")) --- | Deprecated default global config path. --- Note that this will be @Nothing@ on Windows, which is by design. -defaultGlobalConfigPathDeprecated :: Maybe (Path Abs File) -defaultGlobalConfigPathDeprecated = parseAbsFile "/etc/stack/config" - --- | Default global config path. --- Normally, @getDefaultGlobalConfigPath@ should be used instead. --- Note that this will be @Nothing@ on Windows, which is by design. +-- | Default global config path. On Windows, by design, this will be @Nothing@. defaultGlobalConfigPath :: Maybe (Path Abs File) defaultGlobalConfigPath = parseAbsFile "/etc/stack/config.yaml" -- | Path where build plans are stored. -buildPlanDir :: Path Abs Dir -- ^ Stack root - -> Path Abs Dir +buildPlanDir :: + Path Abs Dir -- ^ Stack root + -> Path Abs Dir buildPlanDir = ( $(mkRelDir "build-plan")) -- | Path where binary caches of the build plans are stored. -buildPlanCacheDir - :: Path Abs Dir -- ^ Stack root +buildPlanCacheDir :: + Path Abs Dir -- ^ Stack root -> Path Abs Dir buildPlanCacheDir = ( $(mkRelDir "build-plan-cache")) @@ -287,7 +382,7 @@ relFileSetupLhs :: Path Rel File relFileSetupLhs = $(mkRelFile "Setup.lhs") relFileHpackPackageConfig :: Path Rel File -relFileHpackPackageConfig = $(mkRelFile Hpack.packageConfig) +relFileHpackPackageConfig = $(mkRelFile packageConfig) relDirGlobalAutogen :: Path Rel Dir relDirGlobalAutogen = $(mkRelDir "global-autogen") @@ -307,12 +402,21 @@ relDirBuild = $(mkRelDir "build") relDirBin :: Path Rel Dir relDirBin = $(mkRelDir "bin") +relDirGhci :: Path Rel Dir +relDirGhci = $(mkRelDir "ghci") + +relDirGhciScript :: Path Rel Dir +relDirGhciScript = $(mkRelDir "ghci-script") + relDirPantry :: Path Rel Dir relDirPantry = $(mkRelDir "pantry") relDirPrograms :: Path Rel Dir relDirPrograms = $(mkRelDir "programs") +relDirRoot :: Path Rel Dir +relDirRoot = $(mkRelDir ".") + relDirUpperPrograms :: Path Rel Dir relDirUpperPrograms = $(mkRelDir "Programs") @@ -328,6 +432,9 @@ relFileReadmeTxt = $(mkRelFile "README.txt") relDirScript :: Path Rel Dir relDirScript = $(mkRelDir "script") +relDirScripts :: Path Rel Dir +relDirScripts = $(mkRelDir "scripts") + relFileConfigYaml :: Path Rel File relFileConfigYaml = $(mkRelFile "config.yaml") @@ -369,6 +476,10 @@ bindirSuffix = relDirBin docDirSuffix :: Path Rel Dir docDirSuffix = $(mkRelDir "doc") +-- | Suffix applied to a path to get the @html@ directory. +htmlDirSuffix :: Path Rel Dir +htmlDirSuffix = $(mkRelDir "html") + relDirHpc :: Path Rel Dir relDirHpc = $(mkRelDir "hpc") @@ -419,6 +530,18 @@ relDirMingw32 = $(mkRelDir "mingw32") relDirMingw64 :: Path Rel Dir relDirMingw64 = $(mkRelDir "mingw64") +relDirClang32 :: Path Rel Dir +relDirClang32 = $(mkRelDir "clang32") + +relDirClang64 :: Path Rel Dir +relDirClang64 = $(mkRelDir "clang64") + +relDirClangArm64 :: Path Rel Dir +relDirClangArm64 = $(mkRelDir "clangarm64") + +relDirUcrt64 :: Path Rel Dir +relDirUcrt64 = $(mkRelDir "ucrt64") + relDirLocal :: Path Rel Dir relDirLocal = $(mkRelDir "local") @@ -440,9 +563,6 @@ relFilePackageCache = $(mkRelFile "package.cache") relFileDockerfile :: Path Rel File relFileDockerfile = $(mkRelFile "Dockerfile") -relDirHaskellStackGhci :: Path Rel Dir -relDirHaskellStackGhci = $(mkRelDir "haskell-stack-ghci") - relFileGhciScript :: Path Rel File relFileGhciScript = $(mkRelFile "ghci-script") @@ -482,6 +602,9 @@ relDirUnderHome = $(mkRelDir "_home") relDirSrc :: Path Rel Dir relDirSrc = $(mkRelDir "src") +relFileLibcMuslx86_64So1 :: Path Rel File +relFileLibcMuslx86_64So1 = $(mkRelFile "libc.musl-x86_64.so.1") + relFileLibtinfoSo5 :: Path Rel File relFileLibtinfoSo5 = $(mkRelFile "libtinfo.so.5") @@ -533,21 +656,48 @@ ghcShowOptionsOutput :: [String] ghcShowOptionsOutput = $(TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift . lines) --- | Relative path inside a GHC repo to the Hadrian build batch script -hadrianCmdWindows :: Path Rel File -hadrianCmdWindows = $(mkRelFile "hadrian/build.stack.bat") - --- | Relative path inside a GHC repo to the Hadrian build shell script -hadrianCmdPosix :: Path Rel File -hadrianCmdPosix = $(mkRelFile "hadrian/build.stack.sh") - --- | Used in Stack.Setup for detecting libtinfo, see comments at use site -usrLibDirs :: [Path Abs Dir] -#if WINDOWS -usrLibDirs = [] -#else -usrLibDirs = [$(mkAbsDir "/usr/lib"),$(mkAbsDir "/usr/lib64")] -#endif +-- | Relative paths inside a GHC repo to the boot script. +ghcBootScript :: Path Rel File +ghcBootScript = $(mkRelFile "boot") + +-- | Relative paths inside a GHC repo to the configure script. +ghcConfigureScript :: Path Rel File +ghcConfigureScript = $(mkRelFile "configure") + +-- | Command applicable to GHC's configure script on Windows. See: +-- https://gitlab.haskell.org/ghc/ghc/-/blob/master/hadrian/README.md +ghcConfigureWindows :: [String] +ghcConfigureWindows = ["sh", "configure", "--enable-tarballs-autodownload"] + +-- | Command applicable to GHC's configure script on macOS. See: +-- https://gitlab.haskell.org/ghc/ghc/-/blob/master/hadrian/README.md +ghcConfigureMacOS :: [String] +ghcConfigureMacOS = ["./configure", "--with-intree-gmp"] + +-- | Command applicable to GHC's configure script on non-Windows, non-macOS. +-- See: https://gitlab.haskell.org/ghc/ghc/-/blob/master/hadrian/README.md +ghcConfigurePosix :: [String] +ghcConfigurePosix = ["./configure"] + +relDirHadrian :: Path Rel Dir +relDirHadrian = $(mkRelDir "hadrian") + +relFileHadrianStackDotYaml :: Path Rel File +relFileHadrianStackDotYaml = relDirHadrian stackDotYaml + +-- | Relative paths inside a GHC repo to the Hadrian build batch script. +-- The second path is maintained for compatibility with older GHC versions. +hadrianScriptsWindows :: [Path Rel File] +hadrianScriptsWindows = + [ $(mkRelFile "hadrian/build-stack.bat") + , $(mkRelFile "hadrian/build.stack.bat") + ] + +-- | Relative paths inside a GHC repo to the Hadrian build shell script +-- The second path is maintained for compatibility with older GHC versions. +hadrianScriptsPosix :: [Path Rel File] +hadrianScriptsPosix = + [$(mkRelFile "hadrian/build-stack"), $(mkRelFile "hadrian/build.stack.sh")] -- | Relative file path for a temporary GHC environment file for tests testGhcEnvRelFile :: Path Rel File @@ -560,3 +710,30 @@ relFileBuildLock = $(mkRelFile "build-lock") -- | What should the default be for stack-developer-mode stackDeveloperModeDefault :: Bool stackDeveloperModeDefault = STACK_DEVELOPER_MODE_DEFAULT + +-- | What should the default be for stack-developer-mode +isStackUploadDisabled :: Bool +isStackUploadDisabled = STACK_DISABLE_STACK_UPLOAD + +-- | The footer to the help for Stack's subcommands +globalFooter :: String +globalFooter = + "Command 'stack --help' (or '-h') for global options that apply to all " + <> "subcommands." + +-- | The type for GitHub REST API HTTP \'Basic\' authentication. +gitHubBasicAuthType :: ByteString +gitHubBasicAuthType = "Bearer" + +-- | Environment variable to hold credentials for GitHub REST API HTTP \'Basic\' +-- authentication. +gitHubTokenEnvVar :: String +gitHubTokenEnvVar = "GH_TOKEN" + +-- | Alternate environment variable to hold credentials for GitHub REST API HTTP +-- \'Basic\' authentication. +altGitHubTokenEnvVar :: String +altGitHubTokenEnvVar = "GITHUB_TOKEN" + +hackageBaseUrl :: Text +hackageBaseUrl = "https://hackage.haskell.org/" diff --git a/src/Stack/Constants/Config.hs b/src/Stack/Constants/Config.hs index 1c1f8a3418..0140c1b8ce 100644 --- a/src/Stack/Constants/Config.hs +++ b/src/Stack/Constants/Config.hs @@ -1,141 +1,173 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TemplateHaskell #-} + +{-| +Module : Stack.Constants.Config +License : BSD-3-Clause +-} + module Stack.Constants.Config - ( distDirFromDir - , rootDistDirFromDir - , setupConfigFromDir - , workDirFromDir - , distRelativeDir - , imageStagingDir - , projectDockerSandboxDir + ( buildCachesDir , configCabalMod + , configPackageProjectRoot , configSetupConfigMod - , buildCachesDir - , testSuccessFile - , testBuiltFile - , hpcRelativeDir + , distDirFromDir + , distRelativeDir + , ghciDirL , hpcDirFromDir + , hpcRelativeDir + , imageStagingDir , objectInterfaceDirL - , ghciDirL + , projectDockerSandboxDir + , rootDistDirFromDir + , setupConfigFromDir , templatesDir + , testBuiltFile + , testSuccessFile + , workDirFromDir ) where -import Stack.Prelude -import Stack.Constants -import Stack.Types.Config -import Path +import Path ( (), mkRelDir, mkRelFile, parseRelDir ) +import Stack.Constants ( relDirDist, relDirGhci, relDirHpc ) +import Stack.Prelude +import Stack.Types.BuildConfig ( HasBuildConfig, configFileRootL ) +import Stack.Types.Compiler ( compilerVersionString ) +import Stack.Types.CompilerPaths ( compilerVersionL ) +import Stack.Types.Config ( Config, HasConfig, stackRootL, workDirL ) +import Stack.Types.EnvConfig + ( HasEnvConfig, platformGhcRelDir, useShaPathOnWindows ) -- | Output .o/.hi directory. objectInterfaceDirL :: HasBuildConfig env => Getting r env (Path Abs Dir) -objectInterfaceDirL = to $ \env -> -- FIXME is this idomatic lens code? +objectInterfaceDirL = to $ \env -> -- FIXME is this idiomatic lens code? let workDir = view workDirL env - root = view projectRootL env - in root workDir $(mkRelDir "odir/") + configFileRoot = view configFileRootL env + in configFileRoot workDir $(mkRelDir "odir/") -- | GHCi files directory. ghciDirL :: HasBuildConfig env => Getting r env (Path Abs Dir) -ghciDirL = to $ \env -> -- FIXME is this idomatic lens code? +ghciDirL = to $ \env -> -- FIXME is this idiomatic lens code? let workDir = view workDirL env - root = view projectRootL env - in root workDir $(mkRelDir "ghci/") - --- | The directory containing the files used for dirtiness check of source files. -buildCachesDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) - => Path Abs Dir -- ^ Package directory. - -> m (Path Abs Dir) + configFileRoot = view configFileRootL env + in configFileRoot workDir relDirGhci + +-- | The directory containing the files used for dirtiness check of source +-- files. +buildCachesDir :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => Path Abs Dir -- ^ Package directory. + -> m (Path Abs Dir) buildCachesDir dir = - liftM - ( $(mkRelDir "stack-build-caches")) - (distDirFromDir dir) - --- | The filename used to mark tests as having succeeded -testSuccessFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env) - => Path Abs Dir -- ^ Package directory - -> m (Path Abs File) + fmap + ( $(mkRelDir "stack-build-caches")) + (distDirFromDir dir) + +-- | The filename used to mark tests as having succeeded. +testSuccessFile :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => Path Abs Dir -- ^ Package directory + -> m (Path Abs File) testSuccessFile dir = - liftM - ( $(mkRelFile "stack-test-success")) - (distDirFromDir dir) - --- | The filename used to mark tests as having built -testBuiltFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env) - => Path Abs Dir -- ^ Package directory - -> m (Path Abs File) + fmap + ( $(mkRelFile "stack-test-success")) + (distDirFromDir dir) + +-- | The filename used to mark tests as having built. +testBuiltFile :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => Path Abs Dir -- ^ Package directory + -> m (Path Abs File) testBuiltFile dir = - liftM - ( $(mkRelFile "stack-test-built")) - (distDirFromDir dir) - --- | The filename used for modification check of .cabal -configCabalMod :: (MonadThrow m, MonadReader env m, HasEnvConfig env) - => Path Abs Dir -- ^ Package directory. - -> m (Path Abs File) + fmap + ( $(mkRelFile "stack-test-built")) + (distDirFromDir dir) + +-- | The filename used for modification check of a Cabal file. +configCabalMod :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => Path Abs Dir -- ^ Package directory. + -> m (Path Abs File) configCabalMod dir = - liftM - ( $(mkRelFile "stack-cabal-mod")) - (distDirFromDir dir) - --- | The filename used for modification check of setup-config -configSetupConfigMod :: (MonadThrow m, MonadReader env m, HasEnvConfig env) - => Path Abs Dir -- ^ Package directory. - -> m (Path Abs File) + fmap + ( $(mkRelFile "stack-cabal-mod")) + (distDirFromDir dir) + +-- | The filename used for modification check of setup-config. +configSetupConfigMod :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => Path Abs Dir -- ^ Package directory. + -> m (Path Abs File) configSetupConfigMod dir = - liftM - ( $(mkRelFile "stack-setup-config-mod")) - (distDirFromDir dir) + fmap + ( $(mkRelFile "stack-setup-config-mod")) + (distDirFromDir dir) + +-- | The filename used for the project root from the last build of a package. +configPackageProjectRoot :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => Path Abs Dir -- ^ Package directory. + -> m (Path Abs File) +configPackageProjectRoot dir = + fmap + ( $(mkRelFile "stack-project-root")) + (distDirFromDir dir) -- | Directory for HPC work. -hpcDirFromDir - :: (MonadThrow m, MonadReader env m, HasEnvConfig env) - => Path Abs Dir -- ^ Package directory. - -> m (Path Abs Dir) +hpcDirFromDir :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => Path Abs Dir -- ^ Package directory. + -> m (Path Abs Dir) hpcDirFromDir fp = - liftM (fp ) hpcRelativeDir + fmap (fp ) hpcRelativeDir -- | Relative location of directory for HPC work. -hpcRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) - => m (Path Rel Dir) +hpcRelativeDir :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => m (Path Rel Dir) hpcRelativeDir = - liftM ( $(mkRelDir "hpc")) distRelativeDir + fmap ( relDirHpc) distRelativeDir --- | Package's setup-config storing Cabal configuration -setupConfigFromDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) - => Path Abs Dir - -> m (Path Abs File) +-- | Package's setup-config storing Cabal configuration. +setupConfigFromDir :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => Path Abs Dir + -> m (Path Abs File) setupConfigFromDir fp = do - dist <- distDirFromDir fp - return $ dist $(mkRelFile "setup-config") + dist <- distDirFromDir fp + pure $ dist $(mkRelFile "setup-config") -- | Package's build artifacts directory. -distDirFromDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) - => Path Abs Dir - -> m (Path Abs Dir) +distDirFromDir :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => Path Abs Dir + -> m (Path Abs Dir) distDirFromDir fp = - liftM (fp ) distRelativeDir + fmap (fp ) distRelativeDir -- | The directory containing all dist directories, including all --- different GHC/Cabal combos. -rootDistDirFromDir - :: (MonadReader env m, HasConfig env) +-- different platform/compiler combinations. +rootDistDirFromDir :: + (HasConfig env, MonadReader env m) => Path Abs Dir -> m (Path Abs Dir) rootDistDirFromDir fp = - liftM (fp ) rootDistRelativeDir + fmap (fp ) rootDistRelativeDir -- | Relative directory to the top dist directory, containing --- individual GHC/Cabal combo as subdirs. -rootDistRelativeDir - :: (MonadReader env m, HasConfig env) +-- individual platform/compiler combinations as subdirs. +rootDistRelativeDir :: + (HasConfig env, MonadReader env m) => m (Path Rel Dir) rootDistRelativeDir = do - workDir <- view workDirL - return $ workDir $(mkRelDir "dist") + workDir <- view workDirL + pure $ workDir relDirDist -- | Package's working directory. -workDirFromDir :: (MonadReader env m, HasConfig env) - => Path Abs Dir - -> m (Path Abs Dir) +workDirFromDir :: + (HasConfig env, MonadReader env m) + => Path Abs Dir + -> m (Path Abs Dir) workDirFromDir fp = view $ workDirL.to (fp ) -- | Directory for project templates. @@ -143,34 +175,34 @@ templatesDir :: Config -> Path Abs Dir templatesDir config = view stackRootL config $(mkRelDir "templates") -- | Relative location of build artifacts. -distRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) - => m (Path Rel Dir) +distRelativeDir :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => m (Path Rel Dir) distRelativeDir = do - cabalPkgVer <- view cabalVersionL - platform <- platformGhcRelDir - -- Cabal version - envDir <- - parseRelDir $ - packageIdentifierString $ - PackageIdentifier cabalPackageName cabalPkgVer - platformAndCabal <- useShaPathOnWindows (platform envDir) - allDist <- rootDistRelativeDir - return $ allDist platformAndCabal + compilerVer <- view compilerVersionL + platform <- platformGhcRelDir + -- Compiler version: allows build artefacts to be distinguished by compiler + -- version, which will also distinguish one Cabal version from another. + compilerDir <- parseRelDir $ compilerVersionString compilerVer + platformAndCompiler <- useShaPathOnWindows (platform compilerDir) + allDist <- rootDistRelativeDir + pure $ allDist platformAndCompiler -- | Docker sandbox from project root. -projectDockerSandboxDir :: (MonadReader env m, HasConfig env) +projectDockerSandboxDir :: (HasConfig env, MonadReader env m) => Path Abs Dir -- ^ Project root -> m (Path Abs Dir) -- ^ Docker sandbox projectDockerSandboxDir projectRoot = do workDir <- view workDirL - return $ projectRoot workDir $(mkRelDir "docker/") + pure $ projectRoot workDir $(mkRelDir "docker/") -- | Image staging dir from project root. -imageStagingDir :: (MonadReader env m, HasConfig env, MonadThrow m) +imageStagingDir :: + (HasConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -- ^ Project root -> Int -- ^ Index of image -> m (Path Abs Dir) -- ^ Docker sandbox imageStagingDir projectRoot imageIdx = do workDir <- view workDirL idxRelDir <- parseRelDir (show imageIdx) - return $ projectRoot workDir $(mkRelDir "image") idxRelDir + pure $ projectRoot workDir $(mkRelDir "image") idxRelDir diff --git a/src/Stack/Constants/StackProgName.hs b/src/Stack/Constants/StackProgName.hs new file mode 100644 index 0000000000..0387d2e29a --- /dev/null +++ b/src/Stack/Constants/StackProgName.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Constants.StackProgName +License : BSD-3-Clause + +'stackProgName' is defined in this module rather than in "Stack.Constants", due +to GHC stage restrictions and the use of Template Haskell. +-} + +module Stack.Constants.StackProgName + ( stackProgName + ) where + +import Stack.Prelude ( String ) + +-- | Name of the Stack program. +stackProgName :: String +stackProgName = "stack" diff --git a/src/Stack/Coverage.hs b/src/Stack/Coverage.hs index c08f15eda6..db804c0b4f 100644 --- a/src/Stack/Coverage.hs +++ b/src/Stack/Coverage.hs @@ -1,405 +1,591 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE TupleSections #-} --- | Generate HPC (Haskell Program Coverage) reports +{-| +Module : Stack.Coverage +Description : Generate HPC (Haskell Program Coverage) reports. +License : BSD-3-Clause + +Generate HPC (Haskell Program Coverage) reports. +-} + module Stack.Coverage - ( deleteHpcReports - , updateTixFile - , generateHpcReport - , HpcReportOpts(..) - , generateHpcReportForTargets - , generateHpcUnifiedReport - , generateHpcMarkupIndex - ) where + ( hpcReportCmd + , deleteHpcReports + , updateTixFile + , generateHpcReport + , generateHpcUnifiedReport + , generateHpcMarkupIndex + ) where -import Stack.Prelude hiding (Display (..)) -import qualified Data.ByteString.Char8 as S8 -import qualified Data.ByteString.Lazy as BL -import Data.List +import qualified Data.ByteString.Lazy.Char8 as L8 +import Data.Conduit ( await ) +import qualified Data.List as L import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T import qualified Data.Text.Lazy as LT -import Distribution.Version (mkVersion) +import Distribution.Types.MungedPackageId ( computeCompatPackageId ) +import Distribution.Types.UnqualComponentName + ( mkUnqualComponentName ) import Path -import Path.Extra (toFilePathNoTrailingSep) + ( (), dirname, parent, parseAbsFile, parseRelDir + , parseRelFile, stripProperPrefix + ) +import Path.Extra ( toFilePathNoTrailingSep ) import Path.IO -import Stack.Build.Target + ( copyFile, doesDirExist, doesFileExist, ensureDir + , ignoringAbsence, listDir, removeDirRecur, removeFile + , resolveDir', resolveFile' + ) +import RIO.ByteString.Lazy ( putStrLn ) +import RIO.Process + ( ExitCodeException, ProcessException, proc, readProcess_ ) +import Stack.Build.Target ( NeedTargets (..) ) import Stack.Constants -import Stack.Constants.Config -import Stack.Package -import Stack.Types.Compiler -import Stack.Types.Config -import Stack.Types.NamedComponent -import Stack.Types.Package + ( relDirAll, relDirCombined, relDirCustom + , relDirExtraTixFiles, relDirPackageConfInplace + , relFileHpcIndexHtml, relFileIndexHtml + ) +import Stack.Constants.Config ( distDirFromDir, hpcRelativeDir ) +import Stack.Package ( hasBuildableMainLibrary ) +import Stack.PackageDump ( ghcPkgField ) +import Stack.Prelude +import Stack.Runners ( ShouldReexec (..), withConfig, withEnvConfig ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.CompilerPaths ( getGhcPkgExe ) +import Stack.Types.CompCollection ( getBuildableSetText ) +import Stack.Types.ComponentUtils ( unqualCompToString ) +import Stack.Types.BuildOptsCLI + ( BuildOptsCLI (..), defaultBuildOptsCLI ) +import Stack.Types.EnvConfig + ( EnvConfig (..), HasEnvConfig (..), hpcReportDir ) +import Stack.Types.HpcReportOpts ( HpcReportOpts (..) ) +import Stack.Types.NamedComponent ( NamedComponent (..) ) +import Stack.Types.Package ( Package (..), packageIdentifier ) +import Stack.Types.Runner ( Runner ) import Stack.Types.SourceMap -import System.FilePath (isPathSeparator) -import qualified RIO -import RIO.PrettyPrint -import RIO.Process -import Trace.Hpc.Tix -import Web.Browser (openBrowser) + ( PackageType (..), SMTargets (..), SMWanted (..) + , SourceMap (..), Target (..), ppRoot + ) +import System.FilePath ( isPathSeparator ) +import Trace.Hpc.Tix ( Tix (..), TixModule (..), readTix, writeTix ) +import Web.Browser ( openBrowser ) -newtype CoverageException = NonTestSuiteTarget PackageName deriving Typeable +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Coverage" module. +data CoveragePrettyException + = NonTestSuiteTarget PackageName + | NoTargetsOrTixSpecified + | NotLocalPackage PackageName + deriving Show -instance Exception CoverageException -instance Show CoverageException where - show (NonTestSuiteTarget name) = - "Can't specify anything except test-suites as hpc report targets (" ++ - packageNameString name ++ - " is used with a non test-suite target)" +instance Pretty CoveragePrettyException where + pretty (NonTestSuiteTarget name) = + "[S-6361]" + <> line + <> fillSep + [ flow "Can't specify anything except test-suites as hpc report \ + \targets" + , parens (style Target . fromPackageName $ name) + , flow "is used with a non test-suite target." + ] + pretty NoTargetsOrTixSpecified = + "[S-2321]" + <> line + <> flow "Not generating combined report, because no targets or tix files \ + \are specified." + pretty (NotLocalPackage name) = + "[S-9975]" + <> line + <> fillSep + [ flow "Expected a project package, but" + , style Target . fromPackageName $ name + , flow "is either an extra-dep or in the snapshot." + ] + +instance Exception CoveragePrettyException + +-- | Function underlying the @stack hpc report@ command. +hpcReportCmd :: HpcReportOpts -> RIO Runner () +hpcReportCmd hropts = do + let (tixFiles, targetNames) = + L.partition (".tix" `T.isSuffixOf`) hropts.inputs + boptsCLI = defaultBuildOptsCLI + { targetsCLI = if hropts.all then [] else targetNames } + withConfig YesReexec $ withEnvConfig AllowNoTargets boptsCLI $ + generateHpcReportForTargets hropts tixFiles targetNames -- | Invoked at the beginning of running with "--coverage" deleteHpcReports :: HasEnvConfig env => RIO env () deleteHpcReports = do - hpcDir <- hpcReportDir - liftIO $ ignoringAbsence (removeDirRecur hpcDir) + hpcDir <- hpcReportDir + liftIO $ ignoringAbsence (removeDirRecur hpcDir) --- | Move a tix file into a sub-directory of the hpc report directory. Deletes the old one if one is --- present. -updateTixFile :: HasEnvConfig env => PackageName -> Path Abs File -> String -> RIO env () +-- | Move a tix file into a sub-directory of the hpc report directory. Deletes +-- the old one if one is present. +updateTixFile :: + HasEnvConfig env + => PackageName + -> Path Abs File + -> String + -> RIO env () updateTixFile pkgName' tixSrc testName = do - exists <- doesFileExist tixSrc - when exists $ do - tixDest <- tixFilePath pkgName' testName - liftIO $ ignoringAbsence (removeFile tixDest) - ensureDir (parent tixDest) - -- Remove exe modules because they are problematic. This could be revisited if there's a GHC - -- version that fixes https://ghc.haskell.org/trac/ghc/ticket/1853 - mtix <- readTixOrLog tixSrc - case mtix of - Nothing -> logError $ "Failed to read " <> fromString (toFilePath tixSrc) - Just tix -> do - liftIO $ writeTix (toFilePath tixDest) (removeExeModules tix) - -- TODO: ideally we'd do a file move, but IIRC this can - -- have problems. Something about moving between drives - -- on windows? - copyFile tixSrc =<< parseAbsFile (toFilePath tixDest ++ ".premunging") - liftIO $ ignoringAbsence (removeFile tixSrc) + exists <- doesFileExist tixSrc + when exists $ do + tixDest <- tixFilePath pkgName' testName + liftIO $ ignoringAbsence (removeFile tixDest) + ensureDir (parent tixDest) + -- Remove exe modules because they are problematic. This could be + -- revisited if there's a GHC version that fixes + -- https://ghc.haskell.org/trac/ghc/ticket/1853 + readTixOrLog tixSrc >>= \case + Nothing -> prettyError $ + "[S-2887]" + <> line + <> fillSep + [ flow "Failed to read" + , pretty tixSrc <> "." + ] + Just tix -> do + liftIO $ writeTix (toFilePath tixDest) (removeExeModules tix) + -- TODO: ideally we'd do a file move, but IIRC this can + -- have problems. Something about moving between drives + -- on windows? + copyFile tixSrc =<< parseAbsFile (toFilePath tixDest ++ ".premunging") + liftIO $ ignoringAbsence (removeFile tixSrc) -- | Get the directory used for hpc reports for the given pkgId. hpcPkgPath :: HasEnvConfig env => PackageName -> RIO env (Path Abs Dir) hpcPkgPath pkgName' = do - outputDir <- hpcReportDir - pkgNameRel <- parseRelDir (packageNameString pkgName') - return (outputDir pkgNameRel) + outputDir <- hpcReportDir + pkgNameRel <- parseRelDir (packageNameString pkgName') + pure (outputDir pkgNameRel) --- | Get the tix file location, given the name of the file (without extension), and the package --- identifier string. +-- | Get the tix file location, given the name of the file (without extension), +-- and the package identifier string. tixFilePath :: HasEnvConfig env => PackageName -> String -> RIO env (Path Abs File) tixFilePath pkgName' testName = do - pkgPath <- hpcPkgPath pkgName' - tixRel <- parseRelFile (testName ++ "/" ++ testName ++ ".tix") - return (pkgPath tixRel) + pkgPath <- hpcPkgPath pkgName' + tixRel <- parseRelFile (testName ++ "/" ++ testName ++ ".tix") + pure (pkgPath tixRel) --- | Generates the HTML coverage report and shows a textual coverage summary for a package. +-- | Generates the HTML coverage report and shows a textual coverage summary for +-- a package. generateHpcReport :: HasEnvConfig env => Path Abs Dir -> Package -> [Text] -> RIO env () generateHpcReport pkgDir package tests = do - compilerVersion <- view actualCompilerVersionL - -- If we're using > GHC 7.10, the hpc 'include' parameter must specify a ghc package key. See - -- https://github.com/commercialhaskell/stack/issues/785 - let pkgName' = T.pack $ packageNameString (packageName package) - pkgId = packageIdentifierString (packageIdentifier package) - ghcVersion = getGhcVersion compilerVersion - hasLibrary = - case packageLibraries package of - NoLibraries -> False - HasLibraries _ -> True - internalLibs = packageInternalLibraries package - eincludeName <- - -- Pre-7.8 uses plain PKG-version in tix files. - if ghcVersion < mkVersion [7, 10] then return $ Right $ Just [pkgId] - -- We don't expect to find a package key if there is no library. - else if not hasLibrary && Set.null internalLibs then return $ Right Nothing - -- Look in the inplace DB for the package key. - -- See https://github.com/commercialhaskell/stack/issues/1181#issuecomment-148968986 - else do + -- If we're using > GHC 7.10, the hpc 'include' parameter must specify a ghc + -- package key. See + -- https://github.com/commercialhaskell/stack/issues/785 + let pkgName' = packageNameString package.name + hasLibrary = hasBuildableMainLibrary package + subLibs = package.subLibraries + eincludeName <- + if not hasLibrary && null subLibs + -- We don't expect to find a package key if there is no library. + then pure $ Right Nothing + -- Look in the inplace DB for the package key. + -- See https://github.com/commercialhaskell/stack/issues/1181#issuecomment-148968986 + else do + eincludeName <- + findPackageFieldForBuiltPackage + pkgDir + (packageIdentifier package) + (getBuildableSetText subLibs) -- GHC 8.0 uses package id instead of package key. -- See https://github.com/commercialhaskell/stack/issues/2424 - let hpcNameField = if ghcVersion >= mkVersion [8, 0] then "id" else "key" - eincludeName <- findPackageFieldForBuiltPackage pkgDir (packageIdentifier package) internalLibs hpcNameField - case eincludeName of - Left err -> do - logError $ RIO.display err - return $ Left err - Right includeNames -> return $ Right $ Just $ map T.unpack includeNames - forM_ tests $ \testName -> do - tixSrc <- tixFilePath (packageName package) (T.unpack testName) - let report = "coverage report for " <> pkgName' <> "'s test-suite \"" <> testName <> "\"" - reportDir = parent tixSrc + "id" case eincludeName of - Left err -> generateHpcErrorReport reportDir (RIO.display (sanitize (T.unpack err))) - -- Restrict to just the current library code, if there is a library in the package (see - -- #634 - this will likely be customizable in the future) - Right mincludeName -> do - let extraArgs = case mincludeName of - Just includeNames -> "--include" : intersperse "--include" (map (\n -> n ++ ":") includeNames) - Nothing -> [] - mreportPath <- generateHpcReportInternal tixSrc reportDir report extraArgs extraArgs - forM_ mreportPath (displayReportPath report . pretty) + Left err -> do + logError $ display err + pure $ Left err + Right includeNames -> pure $ Right $ Just $ map T.unpack includeNames + forM_ tests $ \testName -> do + tixSrc <- tixFilePath package.name (T.unpack testName) + let report = fillSep + [ flow "coverage report for" + , style Current (fromString pkgName') <> "'s" + , "test-suite" + , style PkgComponent (fromString $ T.unpack testName) + ] + reportHtml = + "coverage report for" + <> T.pack pkgName' + <> "'s test-suite \"" + <> testName + <> "\"" + reportDir = parent tixSrc + case eincludeName of + Left err -> generateHpcErrorReport reportDir (display (sanitize (T.unpack err))) + -- Restrict to just the current library code, if there is a library in the package (see + -- #634 - this will likely be customizable in the future) + Right mincludeName -> do + let extraArgs = case mincludeName of + Nothing -> [] + Just includeNames -> + "--include" + : L.intersperse "--include" (map (++ ":") includeNames) + mreportPath <- + generateHpcReportInternal tixSrc reportDir report reportHtml extraArgs extraArgs + forM_ mreportPath (displayReportPath "The" report . pretty) -generateHpcReportInternal :: HasEnvConfig env - => Path Abs File -> Path Abs Dir -> Text -> [String] -> [String] - -> RIO env (Maybe (Path Abs File)) -generateHpcReportInternal tixSrc reportDir report extraMarkupArgs extraReportArgs = do - -- If a .tix file exists, move it to the HPC output directory and generate a report for it. - tixFileExists <- doesFileExist tixSrc - if not tixFileExists +generateHpcReportInternal :: + HasEnvConfig env + => Path Abs File + -> Path Abs Dir + -> StyleDoc + -- ^ The pretty name for the report + -> Text + -- ^ The plain name for the report, used in HTML output + -> [String] + -> [String] + -> RIO env (Maybe (Path Abs File)) +generateHpcReportInternal + tixSrc + reportDir + report + reportHtml + extraMarkupArgs + extraReportArgs + = do + -- If a .tix file exists, move it to the HPC output directory and generate + -- a report for it. + tixFileExists <- doesFileExist tixSrc + if not tixFileExists then do - logError $ - "Didn't find .tix for " <> - RIO.display report <> - " - expected to find it at " <> - fromString (toFilePath tixSrc) <> - "." - return Nothing + prettyError $ + "[S-4634]" + <> line + <> fillSep + [ flow "Didn't find" + , style File ".tix" + , "for" + , report + , flow "- expected to find it at" + , pretty tixSrc <> "." + ] + pure Nothing else (`catch` \(err :: ProcessException) -> do - logError $ displayShow err - generateHpcErrorReport reportDir $ RIO.display $ sanitize $ show err - return Nothing) $ - (`onException` logError ("Error occurred while producing " <> RIO.display report)) $ do - -- Directories for .mix files. - hpcRelDir <- hpcRelativeDir - -- Compute arguments used for both "hpc markup" and "hpc report". - pkgDirs <- view $ buildConfigL.to (map ppRoot . Map.elems . smwProject . bcSMWanted) - let args = - -- Use index files from all packages (allows cross-package coverage results). - concatMap (\x -> ["--srcdir", toFilePathNoTrailingSep x]) pkgDirs ++ - -- Look for index files in the correct dir (relative to each pkgdir). - ["--hpcdir", toFilePathNoTrailingSep hpcRelDir, "--reset-hpcdirs"] - logInfo $ "Generating " <> RIO.display report - outputLines <- liftM (map (S8.filter (/= '\r')) . S8.lines . BL.toStrict . fst) $ - proc "hpc" - ( "report" + logError $ displayShow err + generateHpcErrorReport reportDir $ display $ sanitize $ + displayException err + pure Nothing) $ + (`onException` + prettyError + ( "[S-8215]" + <> line + <> fillSep + [ flow "Error occurred while producing" + , report <> "." + ] + )) $ do + -- Directories for .mix files. + hpcRelDir <- hpcRelativeDir + -- Compute arguments used for both "hpc markup" and "hpc report". + pkgDirs <- view $ buildConfigL . to + (map ppRoot . Map.elems . (.smWanted.project)) + let args = + -- Use index files from all packages (allows cross-package + -- coverage results). + concatMap (\x -> ["--srcdir", toFilePathNoTrailingSep x]) pkgDirs ++ + -- Look for index files in the correct dir (relative to each pkgdir). + ["--hpcdir", toFilePathNoTrailingSep hpcRelDir, "--reset-hpcdirs"] + prettyInfoL + [ "Generating" + , report <> "." + ] + -- Strip @\r@ characters because Windows. + outputLines <- map (L8.filter (/= '\r')) . L8.lines . fst <$> + proc "hpc" + ( "report" + : toFilePath tixSrc + : (args ++ extraReportArgs) + ) + readProcess_ + if all ("(0/0)" `L8.isSuffixOf`) outputLines + then do + let msgHtml = + "Error: [S-6829]\n\ + \The " + <> display reportHtml + <> " did not consider any code. One possible cause of this is \ + \if your test-suite builds the library code (see Stack \ + \\ + \issue #1008\ + \\ + \). It may also indicate a bug in Stack or the hpc program. \ + \Please report this issue if you think your coverage report \ + \should have meaningful results." + prettyError $ + "[S-6829]" + <> line + <> fillSep + [ "The" + , report + , flow "did not consider any code. One possible cause of this \ + \is if your test-suite builds the library code (see \ + \Stack issue #1008). It may also indicate a bug in \ + \Stack or the hpc program. Please report this issue if \ + \you think your coverage report should have meaningful \ + \results." + ] + generateHpcErrorReport reportDir msgHtml + pure Nothing + else do + let reportPath = reportDir relFileHpcIndexHtml + -- Print the summary report to the standard output stream. + putUtf8Builder =<< displayWithColor + ( fillSep + [ "Summary" + , report <> ":" + ] + <> line + ) + forM_ outputLines putStrLn + -- Generate the HTML markup. + void $ proc "hpc" + ( "markup" : toFilePath tixSrc - : (args ++ extraReportArgs) + : ("--destdir=" ++ toFilePathNoTrailingSep reportDir) + : (args ++ extraMarkupArgs) ) readProcess_ - if all ("(0/0)" `S8.isSuffixOf`) outputLines - then do - let msg html = - "Error: The " <> - RIO.display report <> - " did not consider any code. One possible cause of this is" <> - " if your test-suite builds the library code (see stack " <> - (if html then "" else "") <> - "issue #1008" <> - (if html then "" else "") <> - "). It may also indicate a bug in stack or" <> - " the hpc program. Please report this issue if you think" <> - " your coverage report should have meaningful results." - logError (msg False) - generateHpcErrorReport reportDir (msg True) - return Nothing - else do - let reportPath = reportDir relFileHpcIndexHtml - -- Print output, stripping @\r@ characters because Windows. - forM_ outputLines (logInfo . displayBytesUtf8) - -- Generate the markup. - void $ proc "hpc" - ( "markup" - : toFilePath tixSrc - : ("--destdir=" ++ toFilePathNoTrailingSep reportDir) - : (args ++ extraMarkupArgs) - ) - readProcess_ - return (Just reportPath) - -data HpcReportOpts = HpcReportOpts - { hroptsInputs :: [Text] - , hroptsAll :: Bool - , hroptsDestDir :: Maybe String - , hroptsOpenBrowser :: Bool - } deriving (Show) + pure (Just reportPath) generateHpcReportForTargets :: HasEnvConfig env => HpcReportOpts -> [Text] -> [Text] -> RIO env () generateHpcReportForTargets opts tixFiles targetNames = do - targetTixFiles <- - -- When there aren't any package component arguments, and --all - -- isn't passed, default to not considering any targets. - if not (hroptsAll opts) && null targetNames - then return [] - else do - when (hroptsAll opts && not (null targetNames)) $ - logWarn $ "Since --all is used, it is redundant to specify these targets: " <> displayShow targetNames - targets <- view $ envConfigL.to envConfigSourceMap.to smTargets.to smtTargets - liftM concat $ forM (Map.toList targets) $ \(name, target) -> - case target of - TargetAll PTDependency -> throwString $ - "Error: Expected a local package, but " ++ - packageNameString name ++ - " is either an extra-dep or in the snapshot." - TargetComps comps -> do - pkgPath <- hpcPkgPath name - forM (toList comps) $ \nc -> - case nc of - CTest testName -> - liftM (pkgPath ) $ parseRelFile (T.unpack testName ++ "/" ++ T.unpack testName ++ ".tix") - _ -> throwIO $ NonTestSuiteTarget name - - TargetAll PTProject -> do - pkgPath <- hpcPkgPath name - exists <- doesDirExist pkgPath - if exists - then do - (dirs, _) <- listDir pkgPath - liftM concat $ forM dirs $ \dir -> do - (_, files) <- listDir dir - return (filter ((".tix" `isSuffixOf`) . toFilePath) files) - else return [] - tixPaths <- liftM (\xs -> xs ++ targetTixFiles) $ mapM (resolveFile' . T.unpack) tixFiles - when (null tixPaths) $ - throwString "Not generating combined report, because no targets or tix files are specified." - outputDir <- hpcReportDir - reportDir <- case hroptsDestDir opts of - Nothing -> return (outputDir relDirCombined relDirCustom) - Just destDir -> do - dest <- resolveDir' destDir - ensureDir dest - return dest - let report = "combined report" - mreportPath <- generateUnionReport report reportDir tixPaths - forM_ mreportPath $ \reportPath -> - if hroptsOpenBrowser opts - then do - prettyInfo $ "Opening" <+> pretty reportPath <+> "in the browser." - void $ liftIO $ openBrowser (toFilePath reportPath) - else displayReportPath report (pretty reportPath) + targetTixFiles <- + -- When there aren't any package component arguments, and --all + -- isn't passed, default to not considering any targets. + if not opts.all && null targetNames + then pure [] + else do + when (opts.all && not (null targetNames)) $ + prettyWarnL + $ "Since" + : style Shell "--all" + : flow "is used, it is redundant to specify these targets:" + : mkNarrativeList (Just Target) False + (map (fromString . T.unpack) targetNames :: [StyleDoc]) + targets <- + view $ envConfigL . to (.sourceMap.targets.targets) + fmap concat $ forM (Map.toList targets) $ \(name, target) -> + case target of + TargetAll PTDependency -> prettyThrowIO $ NotLocalPackage name + TargetComps comps -> do + pkgPath <- hpcPkgPath name + forM (toList comps) $ + \case + CTest testName -> (pkgPath ) <$> + parseRelFile + ( testName' + ++ "/" + ++ testName' + ++ ".tix" + ) + where + testName' = unqualCompToString testName + _ -> prettyThrowIO $ NonTestSuiteTarget name + TargetAll PTProject -> do + pkgPath <- hpcPkgPath name + exists <- doesDirExist pkgPath + if exists + then do + (dirs, _) <- listDir pkgPath + fmap concat $ forM dirs $ \dir -> do + (_, files) <- listDir dir + pure (filter ((".tix" `L.isSuffixOf`) . toFilePath) files) + else pure [] + tixPaths <- (++ targetTixFiles) <$> + mapM (resolveFile' . T.unpack) tixFiles + when (null tixPaths) $ prettyThrowIO NoTargetsOrTixSpecified + outputDir <- hpcReportDir + reportDir <- case opts.destDir of + Nothing -> pure (outputDir relDirCombined relDirCustom) + Just destDir -> do + dest <- resolveDir' destDir + ensureDir dest + pure dest + let report = flow "combined coverage report" + reportHtml = "combined coverage report" + mreportPath <- generateUnionReport report reportHtml reportDir tixPaths + forM_ mreportPath $ \reportPath -> + if opts.openBrowser + then do + prettyInfo $ "Opening" <+> pretty reportPath <+> "in the browser." + void $ liftIO $ openBrowser (toFilePath reportPath) + else displayReportPath "The" report (pretty reportPath) +-- | Generates the HTML unified coverage report. generateHpcUnifiedReport :: HasEnvConfig env => RIO env () generateHpcUnifiedReport = do - outputDir <- hpcReportDir - ensureDir outputDir - (dirs, _) <- listDir outputDir - tixFiles0 <- liftM (concat . concat) $ forM (filter (("combined" /=) . dirnameString) dirs) $ \dir -> do - (dirs', _) <- listDir dir - forM dirs' $ \dir' -> do - (_, files) <- listDir dir' - return (filter ((".tix" `isSuffixOf`) . toFilePath) files) - extraTixFiles <- findExtraTixFiles - let tixFiles = tixFiles0 ++ extraTixFiles - reportDir = outputDir relDirCombined relDirAll - if length tixFiles < 2 - then logInfo $ - (if null tixFiles then "No tix files" else "Only one tix file") <> - " found in " <> - fromString (toFilePath outputDir) <> - ", so not generating a unified coverage report." - else do - let report = "unified report" - mreportPath <- generateUnionReport report reportDir tixFiles - forM_ mreportPath (displayReportPath report . pretty) + outputDir <- hpcReportDir + ensureDir outputDir + (dirs, _) <- listDir outputDir + tixFiles0 <- + fmap (concat . concat) $ forM (filter (("combined" /=) . dirnameString) dirs) $ \dir -> do + (dirs', _) <- listDir dir + forM dirs' $ \dir' -> do + (_, files) <- listDir dir' + pure (filter ((".tix" `L.isSuffixOf`) . toFilePath) files) + extraTixFiles <- findExtraTixFiles + let tixFiles = tixFiles0 ++ extraTixFiles + reportDir = outputDir relDirCombined relDirAll +-- A single *.tix file does not necessarily mean that a unified coverage report +-- is redundant. For example, one package may test the library of another +-- package that does not test its own library. See +-- https://github.com/commercialhaskell/stack/issues/5713 +-- +-- As an interim solution, a unified coverage report will always be produced +-- even if may be redundant in some circumstances. + if null tixFiles + then prettyInfoL + [ flow "No tix files found in" + , pretty outputDir <> "," + , flow "so not generating a unified coverage report." + ] + else do + let report = flow "unified coverage report" + reportHtml = "unified coverage report" + mreportPath <- generateUnionReport report reportHtml reportDir tixFiles + forM_ mreportPath (displayReportPath "The" report . pretty) -generateUnionReport :: HasEnvConfig env - => Text -> Path Abs Dir -> [Path Abs File] - -> RIO env (Maybe (Path Abs File)) -generateUnionReport report reportDir tixFiles = do - (errs, tix) <- fmap (unionTixes . map removeExeModules) (mapMaybeM readTixOrLog tixFiles) - logDebug $ "Using the following tix files: " <> fromString (show tixFiles) - unless (null errs) $ logWarn $ - "The following modules are left out of the " <> - RIO.display report <> - " due to version mismatches: " <> - mconcat (intersperse ", " (map fromString errs)) - tixDest <- liftM (reportDir ) $ parseRelFile (dirnameString reportDir ++ ".tix") - ensureDir (parent tixDest) - liftIO $ writeTix (toFilePath tixDest) tix - generateHpcReportInternal tixDest reportDir report [] [] +generateUnionReport :: + HasEnvConfig env + => StyleDoc + -- ^ Pretty description of the report. + -> Text + -- ^ Plain description of the report, used in HTML reporting. + -> Path Abs Dir + -> [Path Abs File] + -> RIO env (Maybe (Path Abs File)) +generateUnionReport report reportHtml reportDir tixFiles = do + (errs, tix) <- fmap (unionTixes . map removeExeModules) (mapMaybeM readTixOrLog tixFiles) + logDebug $ "Using the following tix files: " <> fromString (show tixFiles) + unless (null errs) $ + prettyWarn $ + fillSep + [ flow "The following modules are left out of the" + , report + , flow "due to version mismatches:" + ] + <> line + <> bulletedList (map fromString errs :: [StyleDoc]) + tixDest <- + (reportDir ) <$> parseRelFile (dirnameString reportDir ++ ".tix") + ensureDir (parent tixDest) + liftIO $ writeTix (toFilePath tixDest) tix + generateHpcReportInternal tixDest reportDir report reportHtml [] [] -readTixOrLog :: HasLogFunc env => Path b File -> RIO env (Maybe Tix) +readTixOrLog :: HasTerm env => Path b File -> RIO env (Maybe Tix) readTixOrLog path = do - mtix <- liftIO (readTix (toFilePath path)) `catchAny` \errorCall -> do - logError $ "Error while reading tix: " <> fromString (show errorCall) - return Nothing - when (isNothing mtix) $ - logError $ "Failed to read tix file " <> fromString (toFilePath path) - return mtix + mtix <- liftIO (readTix (toFilePath path)) `catchAny` \errorCall -> do + prettyError $ + "[S-3521]" + <> line + <> flow "Error while reading tix:" + <> line + <> string (displayException errorCall) + pure Nothing + when (isNothing mtix) $ + prettyError $ + "[S-7786]" + <> line + <> fillSep + [ flow "Failed to read tix file" + , pretty path <> "." + ] + pure mtix --- | Module names which contain '/' have a package name, and so they weren't built into the --- executable. +-- | Module names which contain '/' have a package name, and so they weren't +-- built into the executable. removeExeModules :: Tix -> Tix -removeExeModules (Tix ms) = Tix (filter (\(TixModule name _ _ _) -> '/' `elem` name) ms) +removeExeModules (Tix ms) = + Tix (filter (\(TixModule name _ _ _) -> '/' `elem` name) ms) unionTixes :: [Tix] -> ([String], Tix) unionTixes tixes = (Map.keys errs, Tix (Map.elems outputs)) - where - (errs, outputs) = Map.mapEither id $ Map.unionsWith merge $ map toMap tixes - toMap (Tix ms) = Map.fromList (map (\x@(TixModule k _ _ _) -> (k, Right x)) ms) - merge (Right (TixModule k hash1 len1 tix1)) - (Right (TixModule _ hash2 len2 tix2)) - | hash1 == hash2 && len1 == len2 = Right (TixModule k hash1 len1 (zipWith (+) tix1 tix2)) - merge _ _ = Left () + where + (errs, outputs) = Map.mapEither id $ Map.unionsWith merge $ map toMap tixes + toMap (Tix ms) = Map.fromList (map (\x@(TixModule k _ _ _) -> (k, Right x)) ms) + merge (Right (TixModule k hash1 len1 tix1)) + (Right (TixModule _ hash2 len2 tix2)) + | hash1 == hash2 && len1 == len2 = + Right (TixModule k hash1 len1 (zipWith (+) tix1 tix2)) + merge _ _ = Left () +-- | Generates the HTML index report. generateHpcMarkupIndex :: HasEnvConfig env => RIO env () generateHpcMarkupIndex = do - outputDir <- hpcReportDir - let outputFile = outputDir relFileIndexHtml - ensureDir outputDir - (dirs, _) <- listDir outputDir - rows <- liftM (catMaybes . concat) $ forM dirs $ \dir -> do - (subdirs, _) <- listDir dir - forM subdirs $ \subdir -> do - let indexPath = subdir relFileHpcIndexHtml - exists' <- doesFileExist indexPath - if not exists' then return Nothing else do - relPath <- stripProperPrefix outputDir indexPath - let package = dirname dir - testsuite = dirname subdir - return $ Just $ T.concat - [ "" - , pathToHtml package - , "" - , pathToHtml testsuite - , "" - ] - writeBinaryFileAtomic outputFile $ - "" <> - -- Part of the css from HPC's output HTML - "" <> - "" <> - "" <> - (if null rows - then - "No hpc_index.html files found in \"" <> - encodeUtf8Builder (pathToHtml outputDir) <> - "\"." - else - "" <> - "

NOTE: This is merely a listing of the html files found in the coverage reports directory. Some of these reports may be old.

" <> - "" <> - foldMap encodeUtf8Builder rows <> - "
PackageTestSuiteModification Time
") <> - "" - unless (null rows) $ - logInfo $ "\nAn index of the generated HTML coverage reports is available at " <> - fromString (toFilePath outputFile) + outputDir <- hpcReportDir + let outputFile = outputDir relFileIndexHtml + ensureDir outputDir + (dirs, _) <- listDir outputDir + rows <- fmap (concatMap catMaybes) $ forM dirs $ \dir -> do + (subdirs, _) <- listDir dir + forM subdirs $ \subdir -> do + let indexPath = subdir relFileHpcIndexHtml + exists' <- doesFileExist indexPath + if not exists' then pure Nothing else do + relPath <- stripProperPrefix outputDir indexPath + let package = dirname dir + testsuite = dirname subdir + pure $ Just $ T.concat + [ "" + , pathToHtml package + , "" + , pathToHtml testsuite + , "" + ] + writeBinaryFileAtomic outputFile $ + "" + <> + -- Part of the css from HPC's output HTML + "" + <> "" + <> "" + <> ( if null rows + then + "No hpc_index.html files found in \"" + <> encodeUtf8Builder (pathToHtml outputDir) + <> "\"." + else + "" + <> "

NOTE: This is merely a listing of the html files found in the coverage reports directory. Some of these reports may be old.

" + <> "" + <> foldMap encodeUtf8Builder rows + <> "
PackageTestSuiteModification Time
" + ) + <> "" + unless (null rows) $ + displayReportPath + "\nAn" "index of the generated HTML coverage reports" + (pretty outputFile) generateHpcErrorReport :: MonadIO m => Path Abs Dir -> Utf8Builder -> m () generateHpcErrorReport dir err = do - ensureDir dir - let fp = toFilePath (dir relFileHpcIndexHtml) - writeFileUtf8Builder fp $ - "" <> - "

HPC Report Generation Error

" <> - "

" <> - err <> - "

" <> - "" + ensureDir dir + let fp = toFilePath (dir relFileHpcIndexHtml) + writeFileUtf8Builder fp $ + "" + <> "

HPC Report Generation Error

" + <> "

" + <> err + <> "

" + <> "" pathToHtml :: Path b t -> Text pathToHtml = T.dropWhileEnd (=='/') . sanitize . toFilePath @@ -407,88 +593,77 @@ pathToHtml = T.dropWhileEnd (=='/') . sanitize . toFilePath -- | Escape HTML symbols (copied from Text.Hastache) htmlEscape :: LT.Text -> LT.Text htmlEscape = LT.concatMap proc_ - where - proc_ '&' = "&" - proc_ '\\' = "\" - proc_ '"' = """ - proc_ '\'' = "'" - proc_ '<' = "<" - proc_ '>' = ">" - proc_ h = LT.singleton h + where + proc_ '&' = "&" + proc_ '\\' = "\" + proc_ '"' = """ + proc_ '\'' = "'" + proc_ '<' = "<" + proc_ '>' = ">" + proc_ h = LT.singleton h sanitize :: String -> Text sanitize = LT.toStrict . htmlEscape . LT.pack dirnameString :: Path r Dir -> String -dirnameString = dropWhileEnd isPathSeparator . toFilePath . dirname +dirnameString = L.dropWhileEnd isPathSeparator . toFilePath . dirname -findPackageFieldForBuiltPackage - :: HasEnvConfig env - => Path Abs Dir -> PackageIdentifier -> Set.Set Text -> Text - -> RIO env (Either Text [Text]) -findPackageFieldForBuiltPackage pkgDir pkgId internalLibs field = do - distDir <- distDirFromDir pkgDir - let inplaceDir = distDir relDirPackageConfInplace - pkgIdStr = packageIdentifierString pkgId - notFoundErr = return $ Left $ "Failed to find package key for " <> T.pack pkgIdStr - extractField path = do - contents <- readFileUtf8 (toFilePath path) - case asum (map (T.stripPrefix (field <> ": ")) (T.lines contents)) of - Just result -> return $ Right $ T.strip result - Nothing -> notFoundErr - cabalVer <- view cabalVersionL - if cabalVer < mkVersion [1, 24] - then do - -- here we don't need to handle internal libs - path <- liftM (inplaceDir ) $ parseRelFile (pkgIdStr ++ "-inplace.conf") - logDebug $ "Parsing config in Cabal < 1.24 location: " <> fromString (toFilePath path) - exists <- doesFileExist path - if exists then fmap (:[]) <$> extractField path else notFoundErr - else do - -- With Cabal-1.24, it's in a different location. - logDebug $ "Scanning " <> fromString (toFilePath inplaceDir) <> " for files matching " <> fromString pkgIdStr - (_, files) <- handleIO (const $ return ([], [])) $ listDir inplaceDir - logDebug $ displayShow files - -- From all the files obtained from the scanning process above, we - -- need to identify which are .conf files and then ensure that - -- there is at most one .conf file for each library and internal - -- library (some might be missing if that component has not been - -- built yet). We should error if there are more than one .conf - -- file for a component or if there are no .conf files at all in - -- the searched location. - let toFilename = T.pack . toFilePath . filename - -- strip known prefix and suffix from the found files to determine only the conf files - stripKnown = T.stripSuffix ".conf" <=< T.stripPrefix (T.pack (pkgIdStr ++ "-")) - stripped = mapMaybe (\file -> fmap (,file) . stripKnown . toFilename $ file) files - -- which component could have generated each of these conf files - stripHash n = let z = T.dropWhile (/= '-') n in if T.null z then "" else T.tail z - matchedComponents = map (\(n, f) -> (stripHash n, [f])) stripped - byComponents = Map.restrictKeys (Map.fromListWith (++) matchedComponents) $ Set.insert "" internalLibs - logDebug $ displayShow byComponents - if Map.null $ Map.filter (\fs -> length fs > 1) byComponents - then case concat $ Map.elems byComponents of - [] -> notFoundErr - -- for each of these files, we need to extract the requested field - paths -> do - (errors, keys) <- partitionEithers <$> traverse extractField paths - case errors of - (a:_) -> return $ Left a -- the first error only, since they're repeated anyway - [] -> return $ Right keys - else return $ Left $ "Multiple files matching " <> T.pack (pkgIdStr ++ "-*.conf") <> " found in " <> - T.pack (toFilePath inplaceDir) <> ". Maybe try 'stack clean' on this package?" +findPackageFieldForBuiltPackage :: + HasEnvConfig env + => Path Abs Dir -> PackageIdentifier -> Set.Set Text -> Text + -> RIO env (Either Text [Text]) +findPackageFieldForBuiltPackage pkgDir pkgId subLibs field = do + let subLibNames = + Set.map (LSubLibName . mkUnqualComponentName . T.unpack) subLibs + libraryNames = Set.insert LMainLibName subLibNames + mungedPackageIds = Set.map (computeCompatPackageId pkgId) libraryNames + distDir <- distDirFromDir pkgDir + ghcPkgExe <- getGhcPkgExe + let inplaceDir = distDir relDirPackageConfInplace + pkgIdStr = packageIdentifierString pkgId + notFoundErr = pure $ + Left $ "Failed to find package key for " <> T.pack pkgIdStr + extractField mungedPkgId = do + mContents <- catch + (ghcPkgField ghcPkgExe inplaceDir mungedPkgId (T.unpack field) await) + -- A .conf file may not exist in the package database for a library or + -- sub-library, if that component has not been built yet. + (\(_ :: ExitCodeException) -> pure Nothing) + case mContents of + Just result -> pure $ Right $ T.strip result + Nothing -> notFoundErr + logDebug $ + "Scanning " + <> fromString (toFilePath inplaceDir) + <> " for munged packages matching " + <> fromString pkgIdStr + (errors, keys) <- + partitionEithers <$> traverse extractField (Set.toList mungedPackageIds) + case errors of + (a:_) -> pure $ Left a -- the first error only, since they're repeated anyway + [] -> pure $ Right keys -displayReportPath :: (HasTerm env) - => Text -> StyleDoc -> RIO env () -displayReportPath report reportPath = - prettyInfo $ "The" <+> fromString (T.unpack report) <+> "is available at" <+> reportPath +displayReportPath :: + HasTerm env + => StyleDoc + -> StyleDoc + -> StyleDoc + -> RIO env () +displayReportPath prefix report reportPath = + prettyInfoL + [ prefix + , report + , flow "is available at" + , reportPath <> "." + ] findExtraTixFiles :: HasEnvConfig env => RIO env [Path Abs File] findExtraTixFiles = do - outputDir <- hpcReportDir - let dir = outputDir relDirExtraTixFiles - dirExists <- doesDirExist dir - if dirExists - then do - (_, files) <- listDir dir - return $ filter ((".tix" `isSuffixOf`) . toFilePath) files - else return [] + outputDir <- hpcReportDir + let dir = outputDir relDirExtraTixFiles + dirExists <- doesDirExist dir + if dirExists + then do + (_, files) <- listDir dir + pure $ filter ((".tix" `L.isSuffixOf`) . toFilePath) files + else pure [] diff --git a/src/Stack/DefaultColorWhen.hs b/src/Stack/DefaultColorWhen.hs index 8c9e4f2a55..5c9440a71b 100644 --- a/src/Stack/DefaultColorWhen.hs +++ b/src/Stack/DefaultColorWhen.hs @@ -1,27 +1,23 @@ +{-| +Module : Stack.DefaultColorWhen +License : BSD-3-Clause +-} + module Stack.DefaultColorWhen ( defaultColorWhen ) where -import Stack.Prelude (stdout) -import Stack.Types.Config (ColorWhen (ColorAuto, ColorNever)) - -import System.Console.ANSI (hSupportsANSIWithoutEmulation) -import System.Environment (lookupEnv) +import Stack.Prelude ( stdout ) +import Stack.Types.ColorWhen ( ColorWhen (..) ) +import System.Console.ANSI ( hNowSupportsANSI ) +import System.Environment ( lookupEnv ) -- | The default adopts the standard proposed at http://no-color.org/, that -- color should not be added by default if the @NO_COLOR@ environment variable -- is present. defaultColorWhen :: IO ColorWhen -defaultColorWhen = do - -- On Windows, 'hSupportsANSIWithoutEmulation' has the side effect of enabling - -- ANSI for ANSI-capable native (ConHost) terminals, if not already - -- ANSI-enabled. Consequently, it is actioned even if @NO_COLOR@ might exist, - -- as @NO_COLOR@ might be overridden in a yaml configuration file or at the - -- command line. - supportsANSI <- hSupportsANSIWithoutEmulation stdout - mIsNoColor <- lookupEnv "NO_COLOR" - return $ case mIsNoColor of - Just _ -> ColorNever - _ -> case supportsANSI of - Just False -> ColorNever - _ -> ColorAuto +defaultColorWhen = lookupEnv "NO_COLOR" >>= \case + Just _ -> pure ColorNever + _ -> hNowSupportsANSI stdout >>= \case + False -> pure ColorNever + _ -> pure ColorAuto diff --git a/src/Stack/DependencyGraph.hs b/src/Stack/DependencyGraph.hs new file mode 100644 index 0000000000..b5a484d572 --- /dev/null +++ b/src/Stack/DependencyGraph.hs @@ -0,0 +1,449 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.DependencyGraph +License : BSD-3-Clause + +Module exporting a function to create a pruned dependency graph given +a t'DotOpts' value. +-} + +module Stack.DependencyGraph + ( createPrunedDependencyGraph + , resolveDependencies + , pruneGraph + ) where + +import qualified Data.Foldable as F +import qualified Data.Set as Set +import qualified Data.Map as Map +import qualified Data.Traversable as T +import Distribution.License ( License (..) ) +import qualified Distribution.PackageDescription as PD +import Distribution.Types.PackageName ( mkPackageName ) +import Path ( parent ) +import Stack.Build ( loadPackage ) +import Stack.Build.Installed ( getInstalled, toInstallMap ) +import Stack.Build.Source + ( loadCommonPackage, loadLocalPackage, loadSourceMap ) +import Stack.Build.Target( NeedTargets (..), parseTargets ) +import Stack.Package ( Package (..), setOfPackageDeps ) +import Stack.Prelude hiding ( Display (..), pkgName, loadPackage ) +import qualified Stack.Prelude ( pkgName ) +import Stack.Runners + ( ShouldReexec (..), withBuildConfig, withConfig + , withEnvConfig + ) +import Stack.SourceMap + ( globalsFromHints, mkProjectPackage, pruneGlobals ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.BuildOptsCLI + ( BuildOptsCLI (..), defaultBuildOptsCLI ) +import Stack.Types.BuildOptsMonoid + ( buildOptsMonoidBenchmarksL, buildOptsMonoidTestsL ) +import Stack.Types.Compiler ( ActualCompiler, wantedToActual ) +import Stack.Types.DependencyTree ( DependencyGraph, DotPayload (..) ) +import Stack.Types.DotConfig ( DotConfig (..) ) +import Stack.Types.DotOpts ( DotOpts (..) ) +import Stack.Types.DumpPackage ( DumpPackage (..) ) +import Stack.Types.EnvConfig ( EnvConfig (..), HasSourceMap (..) ) +import Stack.Types.GhcPkgId + ( GhcPkgId, ghcPkgIdString, parseGhcPkgId ) +import Stack.Types.GlobalOpts ( globalOptsBuildOptsMonoidL ) +import Stack.Types.Package ( LocalPackage (..) ) +import Stack.Types.Runner ( Runner, globalOptsL ) +import Stack.Types.SourceMap + ( CommonPackage (..), DepPackage (..), ProjectPackage (..) + , SMActual (..), SMWanted (..), SourceMap (..) + ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.DependencyGraph" module. +newtype DependencyGraphException + = DependencyNotFoundBug GhcPkgId + deriving Show + +instance Exception DependencyGraphException where + displayException (DependencyNotFoundBug depId) = bugReport "[S-7071]" $ concat + [ "Expected to find " + , ghcPkgIdString depId + , " in global DB." + ] + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.DependencyGraph" module. +newtype DependencyGraphPrettyException + = PackageNotFound PackageName + deriving Show + +instance Pretty DependencyGraphPrettyException where + + pretty (PackageNotFound pkgName) = + "[S-7151]" + <> line + <> fillSep + [ flow "The package" + , style Error . fromPackageName $ pkgName + , flow "was not identified as a project package, an extra-dep, or a \ + \package specified by the snapshot." + ] + <> blankLine + <> fillSep + [ "Command" + , style Shell "stack build --dry-run" + , flow "for information about why Stack fails to construct a build \ + \plan." + ] + +instance Exception DependencyGraphPrettyException + +-- | Create the dependency graph and also prune it as specified in the dot +-- options. Returns a set of local names and a map from package names to +-- dependencies. +createPrunedDependencyGraph :: + DotOpts + -> RIO + Runner + ( ActualCompiler + , Set PackageName + , DependencyGraph + ) +createPrunedDependencyGraph dotOpts = withDotConfig dotOpts $ do + localNames <- view $ buildConfigL . to (Map.keysSet . (.smWanted.project)) + logDebug "Creating dependency graph" + (compiler, resultGraph) <- createDependencyGraph dotOpts + let pkgsToPrune = Set.union + ( if dotOpts.includeBase + then dotOpts.prune + else Set.insert "base" dotOpts.prune + ) + (nonParentDependencies resultGraph dotOpts.reach) + prunedGraph = pruneGraph localNames pkgsToPrune resultGraph + logDebug "Returning pruned dependency graph" + pure (compiler, localNames, prunedGraph) + +-- Plumbing for --test and --bench flags +withDotConfig :: + DotOpts + -> RIO DotConfig a + -> RIO Runner a +withDotConfig opts inner = + local (over globalOptsL modifyGO) $ + if opts.globalHints + then withConfig NoReexec $ withBuildConfig withGlobalHints + else withConfig YesReexec withReal + where + withGlobalHints = do + buildConfig <- view buildConfigL + globals <- globalsFromHints buildConfig.smWanted.compiler + fakeGhcPkgId <- parseGhcPkgId "ignored" + actual <- either throwIO pure $ + wantedToActual buildConfig.smWanted.compiler + let smActual = SMActual + { compiler = actual + , project = buildConfig.smWanted.project + , deps = buildConfig.smWanted.deps + , globals = Map.mapWithKey toDump globals + } + toDump :: PackageName -> Version -> DumpPackage + toDump pkgName pkgVersion = DumpPackage + { ghcPkgId = fakeGhcPkgId + , packageIdent = PackageIdentifier + { pkgName + , pkgVersion + } + , sublib = Nothing + , license = Nothing + , libDirs = [] + , libraries = [] + , hasExposedModules = True + , exposedModules = mempty + , depends = [] + , haddockInterfaces = [] + , haddockHtml = Nothing + , isExposed = True + } + actualPkgs = + Map.keysSet smActual.deps <> Map.keysSet smActual.project + prunedActual = smActual + { globals = pruneGlobals smActual.globals actualPkgs } + targets <- parseTargets NeedTargets False boptsCLI prunedActual + logDebug "Loading source map" + sourceMap <- loadSourceMap targets boptsCLI smActual + let dc = DotConfig + { buildConfig + , sourceMap + , globalDump = toList smActual.globals + } + logDebug "DotConfig fully loaded" + runRIO dc inner + + withReal = withEnvConfig NeedTargets boptsCLI $ do + envConfig <- ask + let sourceMap = envConfig.sourceMap + installMap <- toInstallMap sourceMap + (_, globalDump, _, _) <- getInstalled installMap + let dc = DotConfig + { buildConfig = envConfig.buildConfig + , sourceMap + , globalDump + } + runRIO dc inner + + boptsCLI = defaultBuildOptsCLI + { targetsCLI = opts.dotTargets + , flags = opts.flags + } + modifyGO = + ( if opts.testTargets + then + set + (globalOptsBuildOptsMonoidL . buildOptsMonoidTestsL) + (Just True) + else id + ) + . ( if opts.benchTargets + then + set + (globalOptsBuildOptsMonoidL . buildOptsMonoidBenchmarksL) + (Just True) + else id + ) + +-- | Create the dependency graph, the result is a map from a package name to a +-- tuple of dependencies and payload if available. This function mainly gathers +-- the required arguments for @resolveDependencies@. +createDependencyGraph :: + DotOpts + -> RIO DotConfig (ActualCompiler, DependencyGraph) +createDependencyGraph dotOpts = do + sourceMap <- view sourceMapL + locals <- for (toList sourceMap.project) loadLocalPackage + let graph = Map.fromList $ + projectPackageDependencies dotOpts (filter (.wanted) locals) + globalDump <- view $ to (.globalDump) + -- TODO: Can there be multiple entries for wired-in-packages? If so, this will + -- choose one arbitrarily.. + let globalDumpMap = Map.fromList $ + map (\dp -> (Stack.Prelude.pkgName dp.packageIdent, dp)) globalDump + globalIdMap = + Map.fromList $ map ((.ghcPkgId) &&& (.packageIdent)) globalDump + let depLoader = + createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps + loadPackageDeps name version loc flags ghcOptions cabalConfigOpts + -- Skip packages that can't be loaded - see + -- https://github.com/commercialhaskell/stack/issues/2967 + | name `elem` [mkPackageName "rts", mkPackageName "ghc"] = + pure + ( Set.empty + , DotPayload + { version = Just version + , license = Just $ Right BSD3 + , location = Nothing + } + ) + | otherwise = fmap + (setOfPackageDeps &&& makePayload loc) + (loadPackage loc flags ghcOptions cabalConfigOpts) + resultGraph <- resolveDependencies dotOpts.dependencyDepth graph depLoader + pure (sourceMap.compiler, resultGraph) + where + makePayload loc pkg = DotPayload + { version = Just pkg.version + , license = Just pkg.license + , location = Just $ PLImmutable loc + } + +-- | Resolve the direct (depth 0) external dependencies of the given local +-- packages (assumed to come from project packages) +projectPackageDependencies :: + DotOpts + -> [LocalPackage] + -> [(PackageName, (Set PackageName, DotPayload))] +projectPackageDependencies dotOpts locals = + map + ( \lp -> let pkg = localPackageToPackage lp + pkgDir = parent lp.cabalFP + packageDepsSet = setOfPackageDeps pkg + loc = PLMutable $ ResolvedPath + { resolvedRelative = RelFilePath "N/A" + , resolvedAbsolute = pkgDir + } + in (pkg.name, (deps pkg packageDepsSet, lpPayload pkg loc)) + ) + locals + where + deps pkg packageDepsSet = if dotOpts.includeExternal + then Set.delete pkg.name packageDepsSet + else Set.intersection localNames packageDepsSet + localNames = Set.fromList $ map (.package.name) locals + lpPayload pkg loc = DotPayload + { version = Just pkg.version + , license = Just pkg.license + , location = Just loc + } + +-- | Given a SourceMap and a dependency loader, load the set of dependencies for +-- a package +createDepLoader :: + SourceMap + -> Map PackageName DumpPackage + -> Map GhcPkgId PackageIdentifier + -> ( PackageName + -> Version + -> PackageLocationImmutable + -> Map FlagName Bool + -> [Text] + -> [Text] + -> RIO DotConfig (Set PackageName, DotPayload) + ) + -> PackageName + -> RIO DotConfig (Set PackageName, DotPayload) +createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps pkgName = + fromMaybe (prettyThrowIO $ PackageNotFound pkgName) + (projectPackageDeps <|> dependencyDeps <|> globalDeps) + where + projectPackageDeps = loadDeps <$> Map.lookup pkgName sourceMap.project + where + loadDeps pp = do + pkg <- loadCommonPackage pp.projectCommon + pure (setOfPackageDeps pkg, payloadFromLocal pkg Nothing) + + dependencyDeps = + loadDeps <$> Map.lookup pkgName sourceMap.deps + where + loadDeps DepPackage{ location = PLMutable dir } = do + pp <- mkProjectPackage YesPrintWarnings dir False + pkg <- loadCommonPackage pp.projectCommon + pure (setOfPackageDeps pkg, payloadFromLocal pkg (Just $ PLMutable dir)) + + loadDeps dp@DepPackage{ location = PLImmutable loc } = do + let common = dp.depCommon + gpd <- liftIO common.gpd + let PackageIdentifier name version = + PD.package $ PD.packageDescription gpd + flags = common.flags + ghcOptions = common.ghcOptions + cabalConfigOpts = common.cabalConfigOpts + assert + (pkgName == name) + (loadPackageDeps pkgName version loc flags ghcOptions cabalConfigOpts) + + -- If package is a global package, use info from ghc-pkg (#4324, #3084) + globalDeps = + pure . getDepsFromDump <$> Map.lookup pkgName globalDumpMap + where + getDepsFromDump dump = (Set.fromList deps, payloadFromDump dump) + where + deps = map ghcIdToPackageName dump.depends + ghcIdToPackageName depId = + maybe + (impureThrow $ DependencyNotFoundBug depId) + Stack.Prelude.pkgName + (Map.lookup depId globalIdMap) + + payloadFromLocal pkg location = DotPayload + { version = Just pkg.version + , license = Just pkg.license + , location + } + + payloadFromDump dp = DotPayload + { version = Just $ pkgVersion dp.packageIdent + , license = Right <$> dp.license + , location = Nothing + } + +-- | Resolve the dependency graph up to (Just depth) or until fixpoint is +-- reached +resolveDependencies :: + (Applicative m, Monad m) + => Maybe Int + -> DependencyGraph + -> (PackageName -> m (Set PackageName, DotPayload)) + -> m DependencyGraph +resolveDependencies (Just 0) graph _ = pure graph +resolveDependencies limit graph loadPackageDeps = do + let values = Set.unions (fst <$> Map.elems graph) + keys = Map.keysSet graph + next = Set.difference values keys + if Set.null next + then pure graph + else do + x <- T.traverse (\name -> (name,) <$> loadPackageDeps name) (F.toList next) + resolveDependencies + (subtract 1 <$> limit) + (Map.unionWith unifier graph (Map.fromList x)) + loadPackageDeps + where + unifier (pkgs1, v1) (pkgs2, _) = (Set.union pkgs1 pkgs2, v1) + +-- | @pruneGraph dontPrune toPrune graph@ prunes all packages in @graph@ with a +-- name in @toPrune@ and removes resulting orphans unless they are in +-- @dontPrune@ +pruneGraph :: + (F.Foldable f, F.Foldable g, Eq a) + => f PackageName + -> g PackageName + -> Map PackageName (Set PackageName, a) + -> Map PackageName (Set PackageName, a) +pruneGraph dontPrune names = + pruneUnreachable dontPrune . Map.mapMaybeWithKey (\pkg (pkgDeps, x) -> + if pkg `F.elem` names + then Nothing + else let filtered = Set.filter (`F.notElem` names) pkgDeps + in Just (filtered, x)) + +-- | Make sure that all unreachable nodes (orphans) are pruned +pruneUnreachable :: + (Eq a, F.Foldable f) + => f PackageName + -> Map PackageName (Set PackageName, a) + -> Map PackageName (Set PackageName, a) +pruneUnreachable dontPrune = fixpoint prune + where + fixpoint :: Eq a => (a -> a) -> a -> a + fixpoint f v = if f v == v then v else fixpoint f (f v) + prune graph' = Map.filterWithKey (\k _ -> reachable k) graph' + where + reachable k = k `F.elem` dontPrune || k `Set.member` reachables + reachables = F.fold (fst <$> graph') + +nonParentDependencies :: + F.Foldable f + => Map PackageName (Set PackageName, a) + -> f PackageName + -> Set PackageName +nonParentDependencies graph names + | F.null names = Set.empty + | otherwise = + Set.difference (Map.keysSet graph) (backwardReachable names graph) + +backwardReachable :: + F.Foldable f + => f PackageName + -> Map PackageName (Set PackageName, a) + -> Set PackageName +backwardReachable names graph = go Set.empty (F.toList names) + where + reverseGraph = + Map.fromListWith Set.union + [ (dep, Set.singleton name) + | (name, (deps, _)) <- Map.toList graph + , dep <- Set.toList deps + ] + + go seen [] = seen + go seen (x : xs) + | x `Set.member` seen = go seen xs + | otherwise = + let parents = Map.findWithDefault Set.empty x reverseGraph + in go (Set.insert x seen) (Set.toList parents <> xs) + +localPackageToPackage :: LocalPackage -> Package +localPackageToPackage lp = fromMaybe lp.package lp.testBench diff --git a/src/Stack/Docker.hs b/src/Stack/Docker.hs index 85273650f7..bacb1dcb80 100644 --- a/src/Stack/Docker.hs +++ b/src/Stack/Docker.hs @@ -1,122 +1,157 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE CPP #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Docker +Description : Run commands in Docker containers. +License : BSD-3-Clause + +Run commands in Docker containers. +-} --- | Run commands in Docker containers module Stack.Docker - (dockerCmdName - ,dockerHelpOptName - ,dockerPullCmdName - ,entrypoint - ,preventInContainer - ,pull - ,reset - ,reExecArgName - ,StackDockerException(..) - ,getProjectRoot - ,runContainerAndExit + ( dockerCmdName + , dockerHelpOptName + , dockerPullCmdName + , entrypoint + , preventInContainer + , pull + , reset + , reExecArgName + , DockerException (..) + , getProjectRoot + , runContainerAndExit ) where -import Stack.Prelude -import qualified Crypto.Hash as Hash (Digest, MD5, hash) -import Pantry.Internal.AesonExtended (FromJSON(..),(.:),(.:?),(.!=),eitherDecode) +import Control.Monad.Extra ( whenJust ) +import qualified Crypto.Hash as Hash ( Digest, MD5, hash ) +import Data.Aeson ( eitherDecode ) +import Data.Aeson.Types ( FromJSON (..), (.!=) ) +import Data.Aeson.WarningParser ( (.:), (.:?) ) import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy as BL import qualified Data.ByteString.Lazy.Char8 as LBS -import Data.Char (isAscii,isDigit) -import Data.Conduit.List (sinkNull) -import Data.Conduit.Process.Typed hiding (proc) -import Data.List (dropWhileEnd,isPrefixOf,isInfixOf) -import Data.List.Extra (trim) +import Data.Char ( isAscii, isDigit ) +import Data.Conduit.List ( sinkNull ) +import Data.List ( dropWhileEnd, isInfixOf, isPrefixOf ) +import Data.List.Extra ( trim ) import qualified Data.Map.Strict as Map import qualified Data.Text as T import qualified Data.Text.Encoding as T -import Data.Time (UTCTime) -import qualified Data.Version (showVersion, parseVersion) -import Distribution.Version (mkVersion, mkVersion') +import Data.Time ( UTCTime ) +import qualified Data.Version ( parseVersion ) +import Distribution.Version ( mkVersion, mkVersion' ) import Path -import Path.Extra (toFilePathNoTrailingSep) -import Path.IO hiding (canonicalizePath) -import qualified Paths_stack as Meta -import Stack.Config (getInContainer) + ( (), dirname, filename, parent, parseAbsDir + , splitExtension + ) +import Path.Extra ( toFilePathNoTrailingSep ) +import Path.IO + ( copyFile, doesDirExist, doesFileExist, ensureDir + , getCurrentDir, getHomeDir, getModificationTime, listDir + , removeDirRecur, removeFile + ) +import qualified RIO.Directory ( makeAbsolute ) +import RIO.Process + ( ExitCodeException (..), HasProcessContext, augmentPath + , closed, doesExecutableExist, proc, processContextL + , readProcessStdout_, readProcess_, runProcess, runProcess_ + , setStderr, setStdin, setStdout, useHandleOpen + , withWorkingDir + ) +import Stack.Config ( getInContainer ) import Stack.Constants -import Stack.Constants.Config -import Stack.Setup (ensureDockerStackExe) -import Stack.Storage.User (loadDockerImageExeCache,saveDockerImageExeCache) -import Stack.Types.Version + ( buildPlanDir, inContainerEnvVar, platformVariantEnvVar + , relDirBin, relDirDotLocal, relDirDotSsh + , relDirDotStackProgName, relDirUnderHome, stackRootEnvVar + ) +import Stack.Constants.Config ( projectDockerSandboxDir ) +import Stack.Docker.Handlers ( handleSetGroups, handleSignals ) +import Stack.Prelude +import Stack.Setup ( ensureDockerStackExe ) +import Stack.Storage.User + ( loadDockerImageExeCache, saveDockerImageExeCache ) import Stack.Types.Config + ( Config (..), HasConfig (..), configProjectRoot, stackRootL + ) import Stack.Types.Docker -import System.Environment (getEnv,getEnvironment,getProgName,getArgs,getExecutablePath) + ( DockerException (..), DockerOpts (..), DockerStackExe (..) + , Mount (..), dockerCmdName, dockerContainerPlatform + , dockerEntrypointArgName, dockerHelpOptName + , dockerPullCmdName, reExecArgName + ) +import Stack.Types.DockerEntrypoint + ( DockerEntrypoint (..), DockerUser (..) ) +import Stack.Types.Runner + ( HasDockerEntrypointMVar (..), progNameL, terminalL + , viewExecutablePath + ) +import Stack.Types.Version ( showStackVersion, withinRange ) +import System.Environment ( getArgs, getEnv, getEnvironment ) import qualified System.FilePath as FP -import System.IO.Error (isDoesNotExistError) -import System.IO.Unsafe (unsafePerformIO) -import qualified System.PosixCompat.User as User +import System.IO.Error ( isDoesNotExistError ) +import qualified System.Posix.User as User import qualified System.PosixCompat.Files as Files -import System.Terminal (hIsTerminalDeviceOrMinTTY) -import Text.ParserCombinators.ReadP (readP_to_S) -import RIO.Process -import qualified RIO.Directory - -#ifndef WINDOWS -import System.Posix.Signals -import qualified System.Posix.User as PosixUser -#endif +import System.Terminal ( hIsTerminalDeviceOrMinTTY ) +import Text.ParserCombinators.ReadP ( readP_to_S ) -- | Function to get command and arguments to run in Docker container -getCmdArgs - :: HasConfig env +getCmdArgs :: + HasConfig env => DockerOpts -> Inspect -> Bool -> RIO env (FilePath,[String],[(String,String)],[Mount]) getCmdArgs docker imageInfo isRemoteDocker = do config <- view configL - deUser <- - if fromMaybe (not isRemoteDocker) (dockerSetUser docker) + user <- + if fromMaybe (not isRemoteDocker) docker.setUser then liftIO $ do - duUid <- User.getEffectiveUserID - duGid <- User.getEffectiveGroupID - duGroups <- nubOrd <$> User.getGroups - duUmask <- Files.setFileCreationMask 0o022 + uid <- User.getEffectiveUserID + gid <- User.getEffectiveGroupID + groups <- nubOrd <$> User.getGroups + umask <- Files.setFileCreationMask 0o022 -- Only way to get old umask seems to be to change it, so set it back afterward - _ <- Files.setFileCreationMask duUmask - return (Just DockerUser{..}) - else return Nothing + _ <- Files.setFileCreationMask umask + pure $ Just DockerUser + { uid + , gid + , groups + , umask + } + else pure Nothing args <- fmap - (["--" ++ reExecArgName ++ "=" ++ Data.Version.showVersion Meta.version - ,"--" ++ dockerEntrypointArgName - ,show DockerEntrypoint{..}] ++) - (liftIO getArgs) - case dockerStackExe (configDocker config) of + ( [ "--" ++ reExecArgName ++ "=" ++ showStackVersion + , "--" ++ dockerEntrypointArgName + , show DockerEntrypoint { user } + ] ++ + ) + (liftIO getArgs) + case config.docker.stackExe of Just DockerStackExeHost - | configPlatform config == dockerContainerPlatform -> do - exePath <- resolveFile' =<< liftIO getExecutablePath + | config.platform == dockerContainerPlatform -> do + exePath <- viewExecutablePath cmdArgs args exePath | otherwise -> throwIO UnsupportedStackExeHostPlatformException Just DockerStackExeImage -> do - progName <- liftIO getProgName - return (FP.takeBaseName progName, args, [], []) - Just (DockerStackExePath path) -> do - cmdArgs args path + progName <- view progNameL + pure (FP.takeBaseName progName, args, [], []) + Just (DockerStackExePath path) -> cmdArgs args path Just DockerStackExeDownload -> exeDownload args Nothing - | configPlatform config == dockerContainerPlatform -> do - (exePath,exeTimestamp,misCompatible) <- - do exePath <- resolveFile' =<< liftIO getExecutablePath + | config.platform == dockerContainerPlatform -> do + (exePath, exeTimestamp, misCompatible) <- + do exePath <- viewExecutablePath exeTimestamp <- getModificationTime exePath isKnown <- loadDockerImageExeCache - (iiId imageInfo) + imageInfo.iiId exePath exeTimestamp - return (exePath, exeTimestamp, isKnown) + pure (exePath, exeTimestamp, isKnown) case misCompatible of Just True -> cmdArgs args exePath Just False -> exeDownload args @@ -128,7 +163,7 @@ getCmdArgs docker imageInfo isRemoteDocker = do [ "run" , "-v" , toFilePath exePath ++ ":" ++ "/tmp/stack" - , T.unpack (iiId imageInfo) + , T.unpack imageInfo.iiId , "/tmp/stack" , "--version"] sinkNull @@ -138,7 +173,7 @@ getCmdArgs docker imageInfo isRemoteDocker = do Left ExitCodeException{} -> False Right _ -> True saveDockerImageExeCache - (iiId imageInfo) + imageInfo.iiId exePath exeTimestamp compatible @@ -154,16 +189,12 @@ getCmdArgs docker imageInfo isRemoteDocker = do -- MSS 2020-04-21 previously used replaceExtension, but semantics changed in path 0.7 -- In any event, I'm not even sure _why_ we need to drop a file extension here -- Originally introduced here: https://github.com/commercialhaskell/stack/commit/6218dadaf5fd7bf312bb1bd0db63b4784ba78cb2 -#if MIN_VERSION_path(0, 7, 0) let exeBase = case splitExtension exePath of Left _ -> exePath Right (x, _) -> x -#else - exeBase <- exePath -<.> "" -#endif let mountPath = hostBinDir FP. toFilePath (filename exeBase) - return (mountPath, args, [], [Mount (toFilePath exePath) mountPath]) + pure (mountPath, args, [], [Mount (toFilePath exePath) mountPath]) -- | Error if running in a container. preventInContainer :: MonadIO m => m () -> m () @@ -176,265 +207,274 @@ preventInContainer inner = -- | Run a command in a new Docker container, then exit the process. runContainerAndExit :: HasConfig env => RIO env void runContainerAndExit = do - config <- view configL - let docker = configDocker config - checkDockerVersion docker - (env,isStdinTerminal,isStderrTerminal,homeDir) <- liftIO $ - (,,,) - <$> getEnvironment - <*> hIsTerminalDeviceOrMinTTY stdin - <*> hIsTerminalDeviceOrMinTTY stderr - <*> getHomeDir - isStdoutTerminal <- view terminalL - let dockerHost = lookup "DOCKER_HOST" env - dockerCertPath = lookup "DOCKER_CERT_PATH" env - bamboo = lookup "bamboo_buildKey" env - jenkins = lookup "JENKINS_HOME" env - msshAuthSock = lookup "SSH_AUTH_SOCK" env - muserEnv = lookup "USER" env - isRemoteDocker = maybe False (isPrefixOf "tcp://") dockerHost - mstackYaml <- for (lookup "STACK_YAML" env) RIO.Directory.makeAbsolute - image <- either throwIO pure (dockerImage docker) - when (isRemoteDocker && - maybe False (isInfixOf "boot2docker") dockerCertPath) - (logWarn "Warning: Using boot2docker is NOT supported, and not likely to perform well.") - maybeImageInfo <- inspect image - imageInfo@Inspect{..} <- case maybeImageInfo of - Just ii -> return ii - Nothing - | dockerAutoPull docker -> - do pullImage docker image - mii2 <- inspect image - case mii2 of - Just ii2 -> return ii2 - Nothing -> throwM (InspectFailedException image) - | otherwise -> throwM (NotPulledException image) - projectRoot <- getProjectRoot - sandboxDir <- projectDockerSandboxDir projectRoot - let ImageConfig {..} = iiConfig - imageEnvVars = map (break (== '=')) icEnv - platformVariant = show $ hashRepoName image - stackRoot = view stackRootL config - sandboxHomeDir = sandboxDir homeDirName - isTerm = not (dockerDetach docker) && - isStdinTerminal && - isStdoutTerminal && - isStderrTerminal - keepStdinOpen = not (dockerDetach docker) && - -- Workaround for https://github.com/docker/docker/issues/12319 - -- This is fixed in Docker 1.9.1, but will leave the workaround - -- in place for now, for users who haven't upgraded yet. - (isTerm || (isNothing bamboo && isNothing jenkins)) - let mpath = T.pack <$> lookupImageEnv "PATH" imageEnvVars - when (isNothing mpath) $ do - logWarn "The Docker image does not set the PATH env var" - logWarn "This will likely fail, see https://github.com/commercialhaskell/stack/issues/2742" - newPathEnv <- either throwM return $ augmentPath - [ hostBinDir - , toFilePath (sandboxHomeDir relDirDotLocal relDirBin)] - mpath - (cmnd,args,envVars,extraMount) <- getCmdArgs docker imageInfo isRemoteDocker - pwd <- getCurrentDir - liftIO $ mapM_ ensureDir [sandboxHomeDir, stackRoot] - -- Since $HOME is now mounted in the same place in the container we can - -- just symlink $HOME/.ssh to the right place for the stack docker user - let sshDir = homeDir sshRelDir - sshDirExists <- doesDirExist sshDir - sshSandboxDirExists <- - liftIO - (Files.fileExist - (toFilePathNoTrailingSep (sandboxHomeDir sshRelDir))) - when (sshDirExists && not sshSandboxDirExists) - (liftIO - (Files.createSymbolicLink - (toFilePathNoTrailingSep sshDir) - (toFilePathNoTrailingSep (sandboxHomeDir sshRelDir)))) - let mountSuffix = maybe "" (":" ++) (dockerMountMode docker) - containerID <- withWorkingDir (toFilePath projectRoot) $ trim . decodeUtf8 <$> readDockerProcess - (concat - [["create" - ,"-e",inContainerEnvVar ++ "=1" - ,"-e",stackRootEnvVar ++ "=" ++ toFilePathNoTrailingSep stackRoot - ,"-e",platformVariantEnvVar ++ "=dk" ++ platformVariant - ,"-e","HOME=" ++ toFilePathNoTrailingSep sandboxHomeDir - ,"-e","PATH=" ++ T.unpack newPathEnv - ,"-e","PWD=" ++ toFilePathNoTrailingSep pwd - ,"-v",toFilePathNoTrailingSep homeDir ++ ":" ++ toFilePathNoTrailingSep homeDir ++ mountSuffix - ,"-v",toFilePathNoTrailingSep stackRoot ++ ":" ++ toFilePathNoTrailingSep stackRoot ++ mountSuffix - ,"-v",toFilePathNoTrailingSep projectRoot ++ ":" ++ toFilePathNoTrailingSep projectRoot ++ mountSuffix - ,"-v",toFilePathNoTrailingSep sandboxHomeDir ++ ":" ++ toFilePathNoTrailingSep sandboxHomeDir ++ mountSuffix - ,"-w",toFilePathNoTrailingSep pwd] - ,case dockerNetwork docker of + config <- view configL + let docker = config.docker + checkDockerVersion docker + (env, isStdinTerminal, isStderrTerminal, homeDir) <- liftIO $ + (,,,) + <$> getEnvironment + <*> hIsTerminalDeviceOrMinTTY stdin + <*> hIsTerminalDeviceOrMinTTY stderr + <*> getHomeDir + isStdoutTerminal <- view terminalL + let dockerHost = lookup "DOCKER_HOST" env + dockerCertPath = lookup "DOCKER_CERT_PATH" env + msshAuthSock = lookup "SSH_AUTH_SOCK" env + muserEnv = lookup "USER" env + isRemoteDocker = maybe False (isPrefixOf "tcp://") dockerHost + mstackYaml <- for (lookup "STACK_YAML" env) RIO.Directory.makeAbsolute + image <- either throwIO pure docker.image + when + ( isRemoteDocker && maybe False (isInfixOf "boot2docker") dockerCertPath ) + ( prettyWarnS + "Using boot2docker is NOT supported, and not likely to perform well." + ) + imageInfo <- inspect image >>= \case + Just ii -> pure ii + Nothing + | docker.autoPull -> do + pullImage docker image + inspect image >>= \case + Just ii2 -> pure ii2 + Nothing -> throwM (InspectFailedException image) + | otherwise -> throwM (NotPulledException image) + projectRoot <- getProjectRoot + sandboxDir <- projectDockerSandboxDir projectRoot + let ic = imageInfo.config + imageEnvVars = map (break (== '=')) ic.env + platformVariant = show $ hashRepoName image + stackRoot = view stackRootL config + sandboxHomeDir = sandboxDir homeDirName + isTerm = isStdinTerminal && isStdoutTerminal && isStderrTerminal + allocatePseudoTty = not docker.detach && isTerm + keepStdinOpen = not docker.detach + let mpath = T.pack <$> lookupImageEnv "PATH" imageEnvVars + when (isNothing mpath) $ do + prettyWarnL + [ flow "The Docker image does not set the PATH environment variable. \ + \This will likely fail. For further information, see" + , style Url "https://github.com/commercialhaskell/stack/issues/2742" <> "." + ] + newPathEnv <- either throwM pure $ augmentPath + [ hostBinDir + , toFilePath (sandboxHomeDir relDirDotLocal relDirBin) + ] + mpath + (cmnd,args,envVars,extraMount) <- getCmdArgs docker imageInfo isRemoteDocker + pwd <- getCurrentDir + liftIO $ mapM_ ensureDir [sandboxHomeDir, stackRoot] + -- Since $HOME is now mounted in the same place in the container we can + -- just symlink $HOME/.ssh to the right place for the stack docker user + let sshDir = homeDir sshRelDir + sshDirExists <- doesDirExist sshDir + sshSandboxDirExists <- + liftIO + (Files.fileExist + (toFilePathNoTrailingSep (sandboxHomeDir sshRelDir))) + when (sshDirExists && not sshSandboxDirExists) + (liftIO + (Files.createSymbolicLink + (toFilePathNoTrailingSep sshDir) + (toFilePathNoTrailingSep (sandboxHomeDir sshRelDir)))) + let mountSuffix = maybe "" (":" ++) docker.mountMode + containerID <- withWorkingDir (toFilePath projectRoot) $ + trim . decodeUtf8 <$> readDockerProcess + ( concat + [ [ "create" + , "-e", inContainerEnvVar ++ "=1" + , "-e", stackRootEnvVar ++ "=" ++ toFilePathNoTrailingSep stackRoot + , "-e", platformVariantEnvVar ++ "=dk" ++ platformVariant + , "-e", "HOME=" ++ toFilePathNoTrailingSep sandboxHomeDir + , "-e", "PATH=" ++ T.unpack newPathEnv + , "-e", "PWD=" ++ toFilePathNoTrailingSep pwd + , "-v" + , toFilePathNoTrailingSep homeDir ++ ":" ++ + toFilePathNoTrailingSep homeDir ++ mountSuffix + , "-v" + , toFilePathNoTrailingSep stackRoot ++ ":" ++ + toFilePathNoTrailingSep stackRoot ++ mountSuffix + , "-v" + , toFilePathNoTrailingSep projectRoot ++ ":" ++ + toFilePathNoTrailingSep projectRoot ++ mountSuffix + , "-v" + , toFilePathNoTrailingSep sandboxHomeDir ++ ":" ++ + toFilePathNoTrailingSep sandboxHomeDir ++ mountSuffix + , "-w", toFilePathNoTrailingSep pwd + ] + , case docker.network of Nothing -> ["--net=host"] Just name -> ["--net=" ++ name] - ,case muserEnv of + , case muserEnv of Nothing -> [] Just userEnv -> ["-e","USER=" ++ userEnv] - ,case msshAuthSock of + , case msshAuthSock of Nothing -> [] Just sshAuthSock -> - ["-e","SSH_AUTH_SOCK=" ++ sshAuthSock - ,"-v",sshAuthSock ++ ":" ++ sshAuthSock] - ,case mstackYaml of + [ "-e","SSH_AUTH_SOCK=" ++ sshAuthSock + , "-v",sshAuthSock ++ ":" ++ sshAuthSock + ] + , case mstackYaml of Nothing -> [] Just stackYaml -> - ["-e","STACK_YAML=" ++ stackYaml - ,"-v",stackYaml++ ":" ++ stackYaml ++ ":ro"] + [ "-e","STACK_YAML=" ++ stackYaml + , "-v",stackYaml++ ":" ++ stackYaml ++ ":ro" + ] -- Disable the deprecated entrypoint in FP Complete-generated images - ,["--entrypoint=/usr/bin/env" - | isJust (lookupImageEnv oldSandboxIdEnvVar imageEnvVars) && - (icEntrypoint == ["/usr/local/sbin/docker-entrypoint"] || - icEntrypoint == ["/root/entrypoint.sh"])] - ,concatMap (\(k,v) -> ["-e", k ++ "=" ++ v]) envVars - ,concatMap (mountArg mountSuffix) (extraMount ++ dockerMount docker) - ,concatMap (\nv -> ["-e", nv]) (dockerEnv docker) - ,case dockerContainerName docker of + , [ "--entrypoint=/usr/bin/env" + | isJust (lookupImageEnv oldSandboxIdEnvVar imageEnvVars) + && ( ic.entrypoint == ["/usr/local/sbin/docker-entrypoint"] + || ic.entrypoint == ["/root/entrypoint.sh"] + ) + ] + , concatMap (\(k,v) -> ["-e", k ++ "=" ++ v]) envVars + , concatMap (mountArg mountSuffix) (extraMount ++ docker.mount) + , concatMap (\nv -> ["-e", nv]) docker.env + , case docker.containerName of Just name -> ["--name=" ++ name] Nothing -> [] - ,["-t" | isTerm] - ,["-i" | keepStdinOpen] - ,dockerRunArgs docker - ,[image] - ,[cmnd] - ,args]) --- MSS 2018-08-30 can the CPP below be removed entirely, and instead exec the --- `docker` process so that it can handle the signals directly? -#ifndef WINDOWS - run <- askRunInIO - oldHandlers <- forM [sigINT,sigABRT,sigHUP,sigPIPE,sigTERM,sigUSR1,sigUSR2] $ \sig -> do - let sigHandler = run $ do - readProcessNull "docker" ["kill","--signal=" ++ show sig,containerID] - when (sig `elem` [sigTERM,sigABRT]) $ do - -- Give the container 30 seconds to exit gracefully, then send a sigKILL to force it - threadDelay 30000000 - readProcessNull "docker" ["kill",containerID] - oldHandler <- liftIO $ installHandler sig (Catch sigHandler) Nothing - return (sig, oldHandler) -#endif - let args' = concat [["start"] - ,["-a" | not (dockerDetach docker)] - ,["-i" | keepStdinOpen] - ,[containerID]] - e <- try (proc "docker" args' $ runProcess_ . setDelegateCtlc False) - `finally` - (do unless (dockerPersist docker || dockerDetach docker) $ - readProcessNull "docker" ["rm","-f",containerID] - `catch` (\(_::ExitCodeException) -> return ()) -#ifndef WINDOWS - forM_ oldHandlers $ \(sig,oldHandler) -> - liftIO $ installHandler sig oldHandler Nothing -#endif - ) - case e of - Left ExitCodeException{eceExitCode} -> exitWith eceExitCode - Right () -> exitSuccess - where - -- This is using a hash of the Docker repository (without tag or digest) to ensure - -- binaries/libraries aren't shared between Docker and host (or incompatible Docker images) - hashRepoName :: String -> Hash.Digest Hash.MD5 - hashRepoName = Hash.hash . BS.pack . takeWhile (\c -> c /= ':' && c /= '@') - lookupImageEnv name vars = - case lookup name vars of - Just ('=':val) -> Just val - _ -> Nothing - mountArg mountSuffix (Mount host container) = - ["-v",host ++ ":" ++ container ++ mountSuffix] - sshRelDir = relDirDotSsh + , ["-t" | allocatePseudoTty] + , ["-i" | keepStdinOpen] + , docker.runArgs + , [image] + , [cmnd] + , args + ] + ) + handleSignals docker keepStdinOpen containerID >>= \case + Left ExitCodeException{eceExitCode} -> exitWith eceExitCode + Right () -> exitSuccess + where + -- This is using a hash of the Docker repository (without tag or digest) to + -- ensure binaries/libraries aren't shared between Docker and host (or + -- incompatible Docker images) + hashRepoName :: String -> Hash.Digest Hash.MD5 + hashRepoName = Hash.hash . BS.pack . takeWhile (\c -> c /= ':' && c /= '@') + lookupImageEnv name vars = + case lookup name vars of + Just ('=':val) -> Just val + _ -> Nothing + mountArg mountSuffix (Mount host container) = + ["-v",host ++ ":" ++ container ++ mountSuffix] + sshRelDir = relDirDotSsh -- | Inspect Docker image or container. -inspect :: (HasProcessContext env, HasLogFunc env) - => String -> RIO env (Maybe Inspect) -inspect image = - do results <- inspects [image] - case Map.toList results of - [] -> return Nothing - [(_,i)] -> return (Just i) - _ -> throwIO (InvalidInspectOutputException "expect a single result") +inspect :: + (HasProcessContext env, HasLogFunc env) + => String + -> RIO env (Maybe Inspect) +inspect image = do + results <- inspects [image] + case Map.toList results of + [] -> pure Nothing + [(_,i)] -> pure (Just i) + _ -> throwIO (InvalidInspectOutputException "expect a single result") -- | Inspect multiple Docker images and/or containers. -inspects :: (HasProcessContext env, HasLogFunc env) - => [String] -> RIO env (Map Text Inspect) -inspects [] = return Map.empty -inspects images = - do maybeInspectOut <- - -- not using 'readDockerProcess' as the error from a missing image - -- needs to be recovered. - try (BL.toStrict . fst <$> proc "docker" ("inspect" : images) readProcess_) - case maybeInspectOut of - Right inspectOut -> - -- filtering with 'isAscii' to workaround @docker inspect@ output containing invalid UTF-8 - case eitherDecode (LBS.pack (filter isAscii (decodeUtf8 inspectOut))) of - Left msg -> throwIO (InvalidInspectOutputException msg) - Right results -> return (Map.fromList (map (\r -> (iiId r,r)) results)) - Left ece - | any (`LBS.isPrefixOf` eceStderr ece) missingImagePrefixes -> return Map.empty - Left e -> throwIO e - where missingImagePrefixes = ["Error: No such image", "Error: No such object:"] +inspects :: + (HasProcessContext env, HasLogFunc env) + => [String] + -> RIO env (Map Text Inspect) +inspects [] = pure Map.empty +inspects images = do + maybeInspectOut <- + -- not using 'readDockerProcess' as the error from a missing image + -- needs to be recovered. + try (BL.toStrict . fst <$> proc "docker" ("inspect" : images) readProcess_) + case maybeInspectOut of + Right inspectOut -> + -- filtering with 'isAscii' to workaround @docker inspect@ output + -- containing invalid UTF-8 + case eitherDecode (LBS.pack (filter isAscii (decodeUtf8 inspectOut))) of + Left msg -> throwIO (InvalidInspectOutputException msg) + Right results -> pure (Map.fromList (map (\r -> (r.iiId, r)) results)) + Left ece + | any (`LBS.isPrefixOf` eceStderr ece) missingImagePrefixes -> + pure Map.empty + Left e -> throwIO e + where + missingImagePrefixes = + [ -- Docker >= 29.0.0. See: + -- https://github.com/docker/cli/commit/9ba1314d3acc5bd59417049c26275f33e3d54021 + "error: no such object:" + , "Error: No such image" + , "Error: No such object:" + ] -- | Pull latest version of configured Docker image from registry. pull :: HasConfig env => RIO env () -pull = - do config <- view configL - let docker = configDocker config - checkDockerVersion docker - either throwIO (pullImage docker) (dockerImage docker) +pull = do + config <- view configL + let docker = config.docker + checkDockerVersion docker + either throwIO (pullImage docker) docker.image -- | Pull Docker image from registry. -pullImage :: (HasProcessContext env, HasLogFunc env) - => DockerOpts -> String -> RIO env () -pullImage docker image = - do logInfo ("Pulling image from registry: '" <> fromString image <> "'") - when (dockerRegistryLogin docker) - (do logInfo "You may need to log in." - proc - "docker" - (concat - [["login"] - ,maybe [] (\n -> ["--username=" ++ n]) (dockerRegistryUsername docker) - ,maybe [] (\p -> ["--password=" ++ p]) (dockerRegistryPassword docker) - ,[takeWhile (/= '/') image]]) - runProcess_) - -- We redirect the stdout of the process to stderr so that the output - -- of @docker pull@ will not interfere with the output of other - -- commands when using --auto-docker-pull. See issue #2733. - ec <- proc "docker" ["pull", image] $ \pc0 -> do - let pc = setStdout (useHandleOpen stderr) - $ setStderr (useHandleOpen stderr) - $ setStdin closed - pc0 - runProcess pc - case ec of - ExitSuccess -> return () - ExitFailure _ -> throwIO (PullFailedException image) +pullImage :: + (HasProcessContext env, HasTerm env) + => DockerOpts + -> String + -> RIO env () +pullImage docker image = do + prettyInfoL + [ flow "Pulling image from registry:" + , style Current (fromString image) <> "." + ] + when docker.registryLogin $ do + prettyInfoS "You may need to log in." + proc + "docker" + ( concat + [ ["login"] + , maybe [] (\n -> ["--username=" ++ n]) docker.registryUsername + , maybe [] (\p -> ["--password=" ++ p]) docker.registryPassword + , [takeWhile (/= '/') image] + ] + ) + runProcess_ + -- We redirect the stdout of the process to stderr so that the output + -- of @docker pull@ will not interfere with the output of other + -- commands when using --auto-docker-pull. See issue #2733. + ec <- proc "docker" ["pull", image] $ \pc0 -> do + let pc = setStdout (useHandleOpen stderr) + $ setStderr (useHandleOpen stderr) + $ setStdin closed + pc0 + runProcess pc + case ec of + ExitSuccess -> pure () + ExitFailure _ -> throwIO (PullFailedException image) -- | Check docker version (throws exception if incorrect) -checkDockerVersion - :: (HasProcessContext env, HasLogFunc env) - => DockerOpts -> RIO env () -checkDockerVersion docker = - do dockerExists <- doesExecutableExist "docker" - unless dockerExists (throwIO DockerNotInstalledException) - dockerVersionOut <- readDockerProcess ["--version"] - case words (decodeUtf8 dockerVersionOut) of - (_:_:v:_) -> - case fmap mkVersion' $ parseVersion' $ stripVersion v of - Just v' - | v' < minimumDockerVersion -> - throwIO (DockerTooOldException minimumDockerVersion v') - | v' `elem` prohibitedDockerVersions -> - throwIO (DockerVersionProhibitedException prohibitedDockerVersions v') - | not (v' `withinRange` dockerRequireDockerVersion docker) -> - throwIO (BadDockerVersionException (dockerRequireDockerVersion docker) v') - | otherwise -> - return () - _ -> throwIO InvalidVersionOutputException - _ -> throwIO InvalidVersionOutputException - where minimumDockerVersion = mkVersion [1, 6, 0] - prohibitedDockerVersions = [] - stripVersion v = takeWhile (/= '-') (dropWhileEnd (not . isDigit) v) - -- version is parsed by Data.Version provided code to avoid - -- Cabal's Distribution.Version lack of support for leading zeros in version - parseVersion' = fmap fst . listToMaybe . reverse . readP_to_S Data.Version.parseVersion +checkDockerVersion :: + (HasProcessContext env, HasLogFunc env) + => DockerOpts + -> RIO env () +checkDockerVersion docker = do + dockerExists <- doesExecutableExist "docker" + unless dockerExists (throwIO DockerNotInstalledException) + dockerVersionOut <- readDockerProcess ["--version"] + case words (decodeUtf8 dockerVersionOut) of + (_:_:v:_) -> + case fmap mkVersion' $ parseVersion' $ stripVersion v of + Just v' + | v' < minimumDockerVersion -> + throwIO (DockerTooOldException minimumDockerVersion v') + | v' `elem` prohibitedDockerVersions -> + throwIO (DockerVersionProhibitedException prohibitedDockerVersions v') + | not (v' `withinRange` docker.requireDockerVersion) -> + throwIO (BadDockerVersionException docker.requireDockerVersion v') + | otherwise -> + pure () + _ -> throwIO InvalidVersionOutputException + _ -> throwIO InvalidVersionOutputException + where + minimumDockerVersion = mkVersion [1, 6, 0] + prohibitedDockerVersions = [] + stripVersion v = takeWhile (/= '-') (dropWhileEnd (not . isDigit) v) + -- version is parsed by Data.Version provided code to avoid + -- Cabal's Distribution.Version lack of support for leading zeros in version + parseVersion' = + fmap fst . listToMaybe . reverse . readP_to_S Data.Version.parseVersion -- | Remove the project's Docker sandbox. reset :: HasConfig env => Bool -> RIO env () @@ -446,11 +486,16 @@ reset keepHome = do [homeDirName | keepHome] []) --- | The Docker container "entrypoint": special actions performed when first entering --- a container, such as switching the UID/GID to the "outside-Docker" user's. -entrypoint :: (HasProcessContext env, HasLogFunc env) - => Config -> DockerEntrypoint -> RIO env () -entrypoint config@Config{..} DockerEntrypoint{..} = +-- | The Docker container "entrypoint": special actions performed when first +-- entering a container, such as switching the UID/GID to the "outside-Docker" +-- user's. +entrypoint :: + (HasDockerEntrypointMVar env, HasProcessContext env, HasLogFunc env) + => Config + -> DockerEntrypoint + -> RIO env () +entrypoint config@Config{} de = do + entrypointMVar <- view dockerEntrypointMVarL modifyMVar_ entrypointMVar $ \alreadyRan -> do -- Only run the entrypoint once unless alreadyRan $ do @@ -460,110 +505,111 @@ entrypoint config@Config{..} DockerEntrypoint{..} = estackUserEntry0 <- liftIO $ tryJust (guard . isDoesNotExistError) $ User.getUserEntryForName stackUserName -- Switch UID/GID if needed, and update user's home directory - case deUser of - Nothing -> return () - Just (DockerUser 0 _ _ _) -> return () - Just du -> withProcessContext envOverride $ updateOrCreateStackUser estackUserEntry0 homeDir du + whenJust de.user $ \du -> case du of + DockerUser 0 _ _ _ -> pure () + _ -> withProcessContext envOverride $ + updateOrCreateStackUser estackUserEntry0 homeDir du case estackUserEntry0 of - Left _ -> return () + Left _ -> pure () Right ue -> do - -- If the 'stack' user exists in the image, copy any build plans and package indices from - -- its original home directory to the host's stack root, to avoid needing to download them + -- If the 'stack' user exists in the image, copy any build plans and + -- package indices from its original home directory to the host's + -- Stack root, to avoid needing to download them origStackHomeDir <- liftIO $ parseAbsDir (User.homeDirectory ue) let origStackRoot = origStackHomeDir relDirDotStackProgName buildPlanDirExists <- doesDirExist (buildPlanDir origStackRoot) when buildPlanDirExists $ do (_, buildPlans) <- listDir (buildPlanDir origStackRoot) forM_ buildPlans $ \srcBuildPlan -> do - let destBuildPlan = buildPlanDir (view stackRootL config) filename srcBuildPlan + let destBuildPlan = + buildPlanDir (view stackRootL config) filename srcBuildPlan exists <- doesFileExist destBuildPlan unless exists $ do ensureDir (parent destBuildPlan) copyFile srcBuildPlan destBuildPlan - return True - where - updateOrCreateStackUser estackUserEntry homeDir DockerUser{..} = do - case estackUserEntry of - Left _ -> do - -- If no 'stack' user in image, create one with correct UID/GID and home directory - readProcessNull "groupadd" - ["-o" - ,"--gid",show duGid - ,stackUserName] - readProcessNull "useradd" - ["-oN" - ,"--uid",show duUid - ,"--gid",show duGid - ,"--home",toFilePathNoTrailingSep homeDir - ,stackUserName] - Right _ -> do - -- If there is already a 'stack' user in the image, adjust its UID/GID and home directory - readProcessNull "usermod" - ["-o" - ,"--uid",show duUid - ,"--home",toFilePathNoTrailingSep homeDir - ,stackUserName] - readProcessNull "groupmod" - ["-o" - ,"--gid",show duGid - ,stackUserName] - forM_ duGroups $ \gid -> do + pure True + where + updateOrCreateStackUser estackUserEntry homeDir du = do + case estackUserEntry of + Left _ -> do + -- If no 'stack' user in image, create one with correct UID/GID and home + -- directory readProcessNull "groupadd" - ["-o" - ,"--gid",show gid - ,"group" ++ show gid] - -- 'setuid' to the wanted UID and GID - liftIO $ do - User.setGroupID duGid -#ifndef WINDOWS - PosixUser.setGroups duGroups -#endif - User.setUserID duUid - _ <- Files.setFileCreationMask duUmask - return () - stackUserName = "stack"::String - --- | MVar used to ensure the Docker entrypoint is performed exactly once -entrypointMVar :: MVar Bool -{-# NOINLINE entrypointMVar #-} -entrypointMVar = unsafePerformIO (newMVar False) + [ "-o" + , "--gid",show du.gid + , stackUserName + ] + readProcessNull "useradd" + [ "-oN" + , "--uid", show du.uid + , "--gid", show du.gid + , "--home", toFilePathNoTrailingSep homeDir + , stackUserName + ] + Right _ -> do + -- If there is already a 'stack' user in the image, adjust its UID/GID + -- and home directory + readProcessNull "usermod" + [ "-o" + , "--uid", show du.uid + , "--home", toFilePathNoTrailingSep homeDir + , stackUserName + ] + readProcessNull "groupmod" + [ "-o" + , "--gid", show du.gid + , stackUserName + ] + forM_ du.groups $ \gid -> + readProcessNull "groupadd" + [ "-o" + , "--gid", show gid + , "group" ++ show gid + ] + -- 'setuid' to the wanted UID and GID + liftIO $ do + User.setGroupID du.gid + handleSetGroups du.groups + User.setUserID du.uid + void $ Files.setFileCreationMask du.umask + stackUserName = "stack" :: String -- | Remove the contents of a directory, without removing the directory itself. --- This is used instead of 'FS.removeTree' to clear bind-mounted directories, since --- removing the root of the bind-mount won't work. -removeDirectoryContents :: Path Abs Dir -- ^ Directory to remove contents of - -> [Path Rel Dir] -- ^ Top-level directory names to exclude from removal - -> [Path Rel File] -- ^ Top-level file names to exclude from removal - -> IO () -removeDirectoryContents path excludeDirs excludeFiles = - do isRootDir <- doesDirExist path - when isRootDir - (do (lsd,lsf) <- listDir path - forM_ lsd - (\d -> unless (dirname d `elem` excludeDirs) - (removeDirRecur d)) - forM_ lsf - (\f -> unless (filename f `elem` excludeFiles) - (removeFile f))) +-- This is used instead of 'FS.removeTree' to clear bind-mounted directories, +-- since removing the root of the bind-mount won't work. +removeDirectoryContents :: + Path Abs Dir -- ^ Directory to remove contents of + -> [Path Rel Dir] -- ^ Top-level directory names to exclude from removal + -> [Path Rel File] -- ^ Top-level file names to exclude from removal + -> IO () +removeDirectoryContents path excludeDirs excludeFiles = do + isRootDir <- doesDirExist path + when isRootDir $ do + (lsd,lsf) <- listDir path + forM_ lsd + (\d -> unless (dirname d `elem` excludeDirs) + (removeDirRecur d)) + forM_ lsf + (\f -> unless (filename f `elem` excludeFiles) + (removeFile f)) --- | Produce a strict 'S.ByteString' from the stdout of a --- process. Throws a 'ReadProcessException' exception if the --- process fails. +-- | Produce a strict 'S.ByteString' from the stdout of a process. Throws a +-- 'Rio.Process.ReadProcessException' exception if the process fails. -- --- The stderr output is passed straight through, which is desirable for some cases --- e.g. docker pull, in which docker uses stderr for progress output. +-- The stderr output is passed straight through, which is desirable for some +-- cases e.g. docker pull, in which docker uses stderr for progress output. -- -- Use 'readProcess_' directly to customize this. -readDockerProcess - :: (HasProcessContext env, HasLogFunc env) - => [String] -> RIO env BS.ByteString +readDockerProcess :: + (HasProcessContext env, HasLogFunc env) + => [String] -> RIO env BS.ByteString readDockerProcess args = BL.toStrict <$> proc "docker" args readProcessStdout_ -- | Name of home directory within docker sandbox. homeDirName :: Path Rel Dir homeDirName = relDirUnderHome --- | Directory where 'stack' executable is bind-mounted in Docker container +-- | Directory where \'stack\' executable is bind-mounted in Docker container -- This refers to a path in the Linux *container*, and so should remain a -- 'FilePath' (not 'Path Abs Dir') so that it works when the host runs Windows. hostBinDir :: FilePath @@ -576,7 +622,7 @@ decodeUtf8 bs = T.unpack (T.decodeUtf8 bs) -- | Fail with friendly error if project root not set. getProjectRoot :: HasConfig env => RIO env (Path Abs Dir) getProjectRoot = do - mroot <- view $ configL.to configProjectRoot + mroot <- view $ configL . to configProjectRoot maybe (throwIO CannotDetermineProjectRootException) pure mroot -- | Environment variable that contained the old sandbox ID. @@ -586,31 +632,34 @@ oldSandboxIdEnvVar = "DOCKER_SANDBOX_ID" -- | Parsed result of @docker inspect@. data Inspect = Inspect - {iiConfig :: ImageConfig - ,iiCreated :: UTCTime - ,iiId :: Text - ,iiVirtualSize :: Maybe Integer} - deriving (Show) + { config :: ImageConfig + , created :: UTCTime + , iiId :: Text + , virtualSize :: Maybe Integer + } + deriving Show -- | Parse @docker inspect@ output. instance FromJSON Inspect where - parseJSON v = - do o <- parseJSON v - Inspect <$> o .: "Config" - <*> o .: "Created" - <*> o .: "Id" - <*> o .:? "VirtualSize" + parseJSON v = do + o <- parseJSON v + Inspect + <$> o .: "Config" + <*> o .: "Created" + <*> o .: "Id" + <*> o .:? "VirtualSize" -- | Parsed @Config@ section of @docker inspect@ output. data ImageConfig = ImageConfig - {icEnv :: [String] - ,icEntrypoint :: [String]} - deriving (Show) + { env :: [String] + , entrypoint :: [String] + } + deriving Show -- | Parse @Config@ section of @docker inspect@ output. instance FromJSON ImageConfig where - parseJSON v = - do o <- parseJSON v - ImageConfig - <$> fmap join (o .:? "Env") .!= [] - <*> fmap join (o .:? "Entrypoint") .!= [] + parseJSON v = do + o <- parseJSON v + ImageConfig + <$> fmap join (o .:? "Env") .!= [] + <*> fmap join (o .:? "Entrypoint") .!= [] diff --git a/src/Stack/DockerCmd.hs b/src/Stack/DockerCmd.hs new file mode 100644 index 0000000000..be9f167001 --- /dev/null +++ b/src/Stack/DockerCmd.hs @@ -0,0 +1,33 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.DockerCmd +Description : Functions related to Stack's @docker pull@ and @docker reset@ + commands. +License : BSD-3-Clause + +Functions related to Stack's @docker pull@ and @docker reset@ commands. +-} + +module Stack.DockerCmd + ( dockerPullCmd + , dockerResetCmd + ) where + +import Stack.Docker ( preventInContainer, pull, reset ) +import Stack.Prelude +import Stack.Runners ( ShouldReexec (..), withConfig ) +import Stack.Types.Runner ( Runner ) + +-- | Function underlying the @stack docker pull@ command. Pull the current +-- Docker image. +dockerPullCmd :: () -> RIO Runner () +dockerPullCmd () = withConfig NoReexec $ preventInContainer pull + +-- | Function underlying the @stack docker reset@ command. Reset the Docker +-- sandbox. +dockerResetCmd :: + Bool + -- ^ Delete the sandbox's home directory? + -> RIO Runner () +dockerResetCmd = withConfig NoReexec . preventInContainer . reset diff --git a/src/Stack/Dot.hs b/src/Stack/Dot.hs index 0c517f5d63..adec65cc79 100644 --- a/src/Stack/Dot.hs +++ b/src/Stack/Dot.hs @@ -1,554 +1,136 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} -module Stack.Dot (dot - ,listDependencies - ,DotOpts(..) - ,DotPayload(..) - ,ListDepsOpts(..) - ,ListDepsFormat(..) - ,ListDepsFormatOpts(..) - ,resolveDependencies - ,printGraph - ,pruneGraph - ) where +{-| +Module : Stack.Dot +Description : Functions related to Stack's @dot@ command. +License : BSD-3-Clause + +Functions related to Stack's @dot@ command. +-} + +module Stack.Dot + ( dotCmd + , printGraph + ) where -import Data.Aeson -import qualified Data.ByteString.Lazy.Char8 as LBC8 import qualified Data.Foldable as F -import qualified Data.Sequence as Seq import qualified Data.Set as Set import qualified Data.Map as Map import qualified Data.Text as Text import qualified Data.Text.IO as Text -import qualified Data.Traversable as T -import Distribution.Text (display) -import qualified Distribution.PackageDescription as PD -import qualified Distribution.SPDX.License as SPDX -import Distribution.License (License(BSD3), licenseFromSPDX) -import Distribution.Types.PackageName (mkPackageName) -import qualified Path -import RIO.PrettyPrint (HasTerm (..), HasStylesUpdate (..)) -import RIO.Process (HasProcessContext (..)) -import Stack.Build (loadPackage) -import Stack.Build.Installed (getInstalled, toInstallMap) -import Stack.Build.Source -import Stack.Constants -import Stack.Package -import Stack.Prelude hiding (Display (..), pkgName, loadPackage) -import qualified Stack.Prelude (pkgName) -import Stack.Runners -import Stack.SourceMap -import Stack.Types.Build -import Stack.Types.Compiler (wantedToActual) -import Stack.Types.Config -import Stack.Types.GhcPkgId -import Stack.Types.SourceMap -import Stack.Build.Target(NeedTargets(..), parseTargets) - --- | Options record for @stack dot@ -data DotOpts = DotOpts - { dotIncludeExternal :: !Bool - -- ^ Include external dependencies - , dotIncludeBase :: !Bool - -- ^ Include dependencies on base - , dotDependencyDepth :: !(Maybe Int) - -- ^ Limit the depth of dependency resolution to (Just n) or continue until fixpoint - , dotPrune :: !(Set PackageName) - -- ^ Package names to prune from the graph - , dotTargets :: [Text] - -- ^ stack TARGETs to trace dependencies for - , dotFlags :: !(Map ApplyCLIFlag (Map FlagName Bool)) - -- ^ Flags to apply when calculating dependencies - , dotTestTargets :: Bool - -- ^ Like the "--test" flag for build, affects the meaning of 'dotTargets'. - , dotBenchTargets :: Bool - -- ^ Like the "--bench" flag for build, affects the meaning of 'dotTargets'. - , dotGlobalHints :: Bool - -- ^ Use global hints instead of relying on an actual GHC installation. - } - -data ListDepsFormatOpts = ListDepsFormatOpts { listDepsSep :: !Text - -- ^ Separator between the package name and details. - , listDepsLicense :: !Bool - -- ^ Print dependency licenses instead of versions. - } - -data ListDepsFormat = ListDepsText ListDepsFormatOpts - | ListDepsTree ListDepsFormatOpts - | ListDepsJSON - -data ListDepsOpts = ListDepsOpts - { listDepsFormat :: !ListDepsFormat - -- ^ Format of printing dependencies - , listDepsDotOpts :: !DotOpts - -- ^ The normal dot options. - } +import Stack.Constants ( wiredInPackages ) +import Stack.DependencyGraph ( createPrunedDependencyGraph ) +import Stack.Prelude +import Stack.Types.Compiler ( ActualCompiler ) +import Stack.Types.DependencyTree ( DependencyGraph ) +import Stack.Types.DotOpts ( DotOpts (..) ) +import Stack.Types.Runner ( Runner ) -- | Visualize the project's dependencies as a graphviz graph -dot :: DotOpts -> RIO Runner () -dot dotOpts = do - (localNames, prunedGraph) <- createPrunedDependencyGraph dotOpts - printGraph dotOpts localNames prunedGraph - --- | Information about a package in the dependency graph, when available. -data DotPayload = DotPayload - { payloadVersion :: Maybe Version - -- ^ The package version. - , payloadLicense :: Maybe (Either SPDX.License License) - -- ^ The license the package was released under. - , payloadLocation :: Maybe PackageLocation - -- ^ The location of the package. - } deriving (Eq, Show) - --- | Create the dependency graph and also prune it as specified in the dot --- options. Returns a set of local names and and a map from package names to --- dependencies. -createPrunedDependencyGraph :: DotOpts - -> RIO Runner - (Set PackageName, - Map PackageName (Set PackageName, DotPayload)) -createPrunedDependencyGraph dotOpts = withDotConfig dotOpts $ do - localNames <- view $ buildConfigL.to (Map.keysSet . smwProject . bcSMWanted) - logDebug "Creating dependency graph" - resultGraph <- createDependencyGraph dotOpts - let pkgsToPrune = if dotIncludeBase dotOpts - then dotPrune dotOpts - else Set.insert "base" (dotPrune dotOpts) - prunedGraph = pruneGraph localNames pkgsToPrune resultGraph - logDebug "Returning prouned dependency graph" - return (localNames, prunedGraph) - --- | Create the dependency graph, the result is a map from a package --- name to a tuple of dependencies and payload if available. This --- function mainly gathers the required arguments for --- @resolveDependencies@. -createDependencyGraph - :: DotOpts - -> RIO DotConfig (Map PackageName (Set PackageName, DotPayload)) -createDependencyGraph dotOpts = do - sourceMap <- view sourceMapL - locals <- for (toList $ smProject sourceMap) loadLocalPackage - let graph = Map.fromList $ projectPackageDependencies dotOpts (filter lpWanted locals) - globalDump <- view $ to dcGlobalDump - -- TODO: Can there be multiple entries for wired-in-packages? If so, - -- this will choose one arbitrarily.. - let globalDumpMap = Map.fromList $ map (\dp -> (Stack.Prelude.pkgName (dpPackageIdent dp), dp)) globalDump - globalIdMap = Map.fromList $ map (\dp -> (dpGhcPkgId dp, dpPackageIdent dp)) globalDump - let depLoader = createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps - loadPackageDeps name version loc flags ghcOptions cabalConfigOpts - -- Skip packages that can't be loaded - see - -- https://github.com/commercialhaskell/stack/issues/2967 - | name `elem` [mkPackageName "rts", mkPackageName "ghc"] = - return (Set.empty, DotPayload (Just version) (Just $ Right BSD3) Nothing) - | otherwise = - fmap (packageAllDeps &&& makePayload loc) (loadPackage loc flags ghcOptions cabalConfigOpts) - resolveDependencies (dotDependencyDepth dotOpts) graph depLoader - where makePayload loc pkg = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg) (Just $ PLImmutable loc) - -listDependencies - :: ListDepsOpts - -> RIO Runner () -listDependencies opts = do - let dotOpts = listDepsDotOpts opts - (pkgs, resultGraph) <- createPrunedDependencyGraph dotOpts - liftIO $ case listDepsFormat opts of - ListDepsTree treeOpts -> Text.putStrLn "Packages" >> printTree treeOpts dotOpts 0 [] (treeRoots opts pkgs) resultGraph - ListDepsJSON -> printJSON pkgs resultGraph - ListDepsText textOpts -> void (Map.traverseWithKey go (snd <$> resultGraph)) - where go name payload = Text.putStrLn $ listDepsLine textOpts name payload - -data DependencyTree = DependencyTree (Set PackageName) (Map PackageName (Set PackageName, DotPayload)) - -instance ToJSON DependencyTree where - toJSON (DependencyTree _ dependencyMap) = - toJSON $ foldToList dependencyToJSON dependencyMap - -foldToList :: (k -> a -> b) -> Map k a -> [b] -foldToList f = Map.foldrWithKey (\k a bs -> bs ++ [f k a]) [] - -dependencyToJSON :: PackageName -> (Set PackageName, DotPayload) -> Value -dependencyToJSON pkg (deps, payload) = let fieldsAlwaysPresent = [ "name" .= packageNameString pkg - , "license" .= licenseText payload - , "version" .= versionText payload - , "dependencies" .= Set.map packageNameString deps - ] - loc = catMaybes [("location" .=) . pkgLocToJSON <$> payloadLocation payload] - in object $ fieldsAlwaysPresent ++ loc - -pkgLocToJSON :: PackageLocation -> Value -pkgLocToJSON (PLMutable (ResolvedPath _ dir)) = object [ "type" .= ("project package" :: Text) - , "url" .= ("file://" ++ Path.toFilePath dir)] -pkgLocToJSON (PLImmutable (PLIHackage pkgid _ _)) = object [ "type" .= ("hackage" :: Text) - , "url" .= ("https://hackage.haskell.org/package/" ++ display pkgid)] -pkgLocToJSON (PLImmutable (PLIArchive archive _)) = let url = case archiveLocation archive of - ALUrl u -> u - ALFilePath (ResolvedPath _ path) -> Text.pack $ "file://" ++ Path.toFilePath path - in object [ "type" .= ("archive" :: Text) - , "url" .= url - , "sha256" .= archiveHash archive - , "size" .= archiveSize archive ] -pkgLocToJSON (PLImmutable (PLIRepo repo _)) = object [ "type" .= case repoType repo of - RepoGit -> "git" :: Text - RepoHg -> "hg" :: Text - , "url" .= repoUrl repo - , "commit" .= repoCommit repo - , "subdir" .= repoSubdir repo - ] - -printJSON :: Set PackageName - -> Map PackageName (Set PackageName, DotPayload) - -> IO () -printJSON pkgs dependencyMap = LBC8.putStrLn $ encode $ DependencyTree pkgs dependencyMap - -treeRoots :: ListDepsOpts -> Set PackageName -> Set PackageName -treeRoots opts projectPackages' = - let targets = dotTargets $ listDepsDotOpts opts - in if null targets - then projectPackages' - else Set.fromList $ map (mkPackageName . Text.unpack) targets - -printTree :: ListDepsFormatOpts - -> DotOpts - -> Int - -> [Int] - -> Set PackageName - -> Map PackageName (Set PackageName, DotPayload) - -> IO () -printTree opts dotOpts depth remainingDepsCounts packages dependencyMap = - F.sequence_ $ Seq.mapWithIndex go (toSeq packages) - where - toSeq = Seq.fromList . Set.toList - go index name = let newDepsCounts = remainingDepsCounts ++ [Set.size packages - index - 1] - in - case Map.lookup name dependencyMap of - Just (deps, payload) -> do - printTreeNode opts dotOpts depth newDepsCounts deps payload name - if Just depth == dotDependencyDepth dotOpts - then return () - else printTree opts dotOpts (depth + 1) newDepsCounts deps dependencyMap - -- TODO: Define this behaviour, maybe return an error? - Nothing -> return () - -printTreeNode :: ListDepsFormatOpts - -> DotOpts - -> Int - -> [Int] - -> Set PackageName - -> DotPayload - -> PackageName - -> IO () -printTreeNode opts dotOpts depth remainingDepsCounts deps payload name = - let remainingDepth = fromMaybe 999 (dotDependencyDepth dotOpts) - depth - hasDeps = not $ null deps - in Text.putStrLn $ treeNodePrefix "" remainingDepsCounts hasDeps remainingDepth <> " " <> listDepsLine opts name payload - -treeNodePrefix :: Text -> [Int] -> Bool -> Int -> Text -treeNodePrefix t [] _ _ = t -treeNodePrefix t [0] True 0 = t <> "└──" -treeNodePrefix t [_] True 0 = t <> "├──" -treeNodePrefix t [0] True _ = t <> "└─┬" -treeNodePrefix t [_] True _ = t <> "├─┬" -treeNodePrefix t [0] False _ = t <> "└──" -treeNodePrefix t [_] False _ = t <> "├──" -treeNodePrefix t (0:ns) d remainingDepth = treeNodePrefix (t <> " ") ns d remainingDepth -treeNodePrefix t (_:ns) d remainingDepth = treeNodePrefix (t <> "│ ") ns d remainingDepth - -listDepsLine :: ListDepsFormatOpts -> PackageName -> DotPayload -> Text -listDepsLine opts name payload = Text.pack (packageNameString name) <> listDepsSep opts <> payloadText opts payload - -payloadText :: ListDepsFormatOpts -> DotPayload -> Text -payloadText opts payload = - if listDepsLicense opts - then licenseText payload - else versionText payload - -licenseText :: DotPayload -> Text -licenseText payload = maybe "" (Text.pack . display . either licenseFromSPDX id) (payloadLicense payload) - -versionText :: DotPayload -> Text -versionText payload = maybe "" (Text.pack . display) (payloadVersion payload) - --- | @pruneGraph dontPrune toPrune graph@ prunes all packages in --- @graph@ with a name in @toPrune@ and removes resulting orphans --- unless they are in @dontPrune@ -pruneGraph :: (F.Foldable f, F.Foldable g, Eq a) - => f PackageName - -> g PackageName - -> Map PackageName (Set PackageName, a) - -> Map PackageName (Set PackageName, a) -pruneGraph dontPrune names = - pruneUnreachable dontPrune . Map.mapMaybeWithKey (\pkg (pkgDeps,x) -> - if pkg `F.elem` names - then Nothing - else let filtered = Set.filter (\n -> n `F.notElem` names) pkgDeps - in if Set.null filtered && not (Set.null pkgDeps) - then Nothing - else Just (filtered,x)) - --- | Make sure that all unreachable nodes (orphans) are pruned -pruneUnreachable :: (Eq a, F.Foldable f) - => f PackageName - -> Map PackageName (Set PackageName, a) - -> Map PackageName (Set PackageName, a) -pruneUnreachable dontPrune = fixpoint prune - where fixpoint :: Eq a => (a -> a) -> a -> a - fixpoint f v = if f v == v then v else fixpoint f (f v) - prune graph' = Map.filterWithKey (\k _ -> reachable k) graph' - where reachable k = k `F.elem` dontPrune || k `Set.member` reachables - reachables = F.fold (fst <$> graph') - - --- | Resolve the dependency graph up to (Just depth) or until fixpoint is reached -resolveDependencies :: (Applicative m, Monad m) - => Maybe Int - -> Map PackageName (Set PackageName, DotPayload) - -> (PackageName -> m (Set PackageName, DotPayload)) - -> m (Map PackageName (Set PackageName, DotPayload)) -resolveDependencies (Just 0) graph _ = return graph -resolveDependencies limit graph loadPackageDeps = do - let values = Set.unions (fst <$> Map.elems graph) - keys = Map.keysSet graph - next = Set.difference values keys - if Set.null next - then return graph - else do - x <- T.traverse (\name -> (name,) <$> loadPackageDeps name) (F.toList next) - resolveDependencies (subtract 1 <$> limit) - (Map.unionWith unifier graph (Map.fromList x)) - loadPackageDeps - where unifier (pkgs1,v1) (pkgs2,_) = (Set.union pkgs1 pkgs2, v1) - --- | Given a SourceMap and a dependency loader, load the set of dependencies for a package -createDepLoader :: SourceMap - -> Map PackageName DumpPackage - -> Map GhcPkgId PackageIdentifier - -> (PackageName -> Version -> PackageLocationImmutable -> - Map FlagName Bool -> [Text] -> [Text] -> RIO DotConfig (Set PackageName, DotPayload)) - -> PackageName - -> RIO DotConfig (Set PackageName, DotPayload) -createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps pkgName = do - fromMaybe noDepsErr - (projectPackageDeps <|> dependencyDeps <|> globalDeps) - where - projectPackageDeps = - loadDeps <$> Map.lookup pkgName (smProject sourceMap) - where - loadDeps pp = do - pkg <- loadCommonPackage (ppCommon pp) - pure (packageAllDeps pkg, payloadFromLocal pkg Nothing) - - dependencyDeps = - loadDeps <$> Map.lookup pkgName (smDeps sourceMap) - where - loadDeps DepPackage{dpLocation=PLMutable dir} = do - pp <- mkProjectPackage YesPrintWarnings dir False - pkg <- loadCommonPackage (ppCommon pp) - pure (packageAllDeps pkg, payloadFromLocal pkg (Just $ PLMutable dir)) - - loadDeps dp@DepPackage{dpLocation=PLImmutable loc} = do - let common = dpCommon dp - gpd <- liftIO $ cpGPD common - let PackageIdentifier name version = PD.package $ PD.packageDescription gpd - flags = cpFlags common - ghcOptions = cpGhcOptions common - cabalConfigOpts = cpCabalConfigOpts common - assert (pkgName == name) (loadPackageDeps pkgName version loc flags ghcOptions cabalConfigOpts) - - -- If package is a global package, use info from ghc-pkg (#4324, #3084) - globalDeps = - pure . getDepsFromDump <$> Map.lookup pkgName globalDumpMap - where - getDepsFromDump dump = - (Set.fromList deps, payloadFromDump dump) - where - deps = map ghcIdToPackageName (dpDepends dump) - ghcIdToPackageName depId = - let errText = "Invariant violated: Expected to find " - in maybe (error (errText ++ ghcPkgIdString depId ++ " in global DB")) - Stack.Prelude.pkgName - (Map.lookup depId globalIdMap) - - noDepsErr = error ("Invariant violated: The '" ++ packageNameString pkgName - ++ "' package was not found in any of the dependency sources") - - payloadFromLocal pkg loc = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg) loc - payloadFromDump dp = DotPayload (Just $ pkgVersion $ dpPackageIdent dp) (Right <$> dpLicense dp) Nothing - --- | Resolve the direct (depth 0) external dependencies of the given local packages (assumed to come from project packages) -projectPackageDependencies :: DotOpts -> [LocalPackage] -> [(PackageName, (Set PackageName, DotPayload))] -projectPackageDependencies dotOpts locals = - map (\lp -> let pkg = localPackageToPackage lp - pkgDir = Path.parent $ lpCabalFile lp - loc = PLMutable $ ResolvedPath (RelFilePath "N/A") pkgDir - in (packageName pkg, (deps pkg, lpPayload pkg loc))) - locals - where deps pkg = - if dotIncludeExternal dotOpts - then Set.delete (packageName pkg) (packageAllDeps pkg) - else Set.intersection localNames (packageAllDeps pkg) - localNames = Set.fromList $ map (packageName . lpPackage) locals - lpPayload pkg loc = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg) (Just loc) - --- | Print a graphviz graph of the edges in the Map and highlight the given local packages -printGraph :: (Applicative m, MonadIO m) - => DotOpts - -> Set PackageName -- ^ all locals - -> Map PackageName (Set PackageName, DotPayload) - -> m () -printGraph dotOpts locals graph = do +dotCmd :: DotOpts -> RIO Runner () +dotCmd dotOpts = do + (compiler, localNames, prunedGraph) <- createPrunedDependencyGraph dotOpts + printGraph dotOpts compiler localNames prunedGraph + +-- | Print a graphviz graph of the edges in the Map and highlight the given +-- project packages +printGraph :: + (Applicative m, MonadIO m) + => DotOpts + -> ActualCompiler + -> Set PackageName -- ^ All project packages. + -> DependencyGraph + -> m () +printGraph dotOpts compiler locals graph = do liftIO $ Text.putStrLn "strict digraph deps {" printLocalNodes dotOpts filteredLocals - printLeaves graph - void (Map.traverseWithKey printEdges (fst <$> graph)) + printLeaves compiler graph + let allNodes = Map.keysSet graph + void (Map.traverseWithKey (printEdges allNodes) (fst <$> graph)) liftIO $ Text.putStrLn "}" - where filteredLocals = Set.filter (\local' -> - local' `Set.notMember` dotPrune dotOpts) locals - --- | Print the local nodes with a different style depending on options -printLocalNodes :: (F.Foldable t, MonadIO m) - => DotOpts - -> t PackageName - -> m () -printLocalNodes dotOpts locals = liftIO $ Text.putStrLn (Text.intercalate "\n" lpNodes) - where applyStyle :: Text -> Text - applyStyle n = if dotIncludeExternal dotOpts - then n <> " [style=dashed];" - else n <> " [style=solid];" - lpNodes :: [Text] - lpNodes = map (applyStyle . nodeName) (F.toList locals) - --- | Print nodes without dependencies -printLeaves :: MonadIO m - => Map PackageName (Set PackageName, DotPayload) - -> m () -printLeaves = F.mapM_ printLeaf . Map.keysSet . Map.filter Set.null . fmap fst - --- | @printDedges p ps@ prints an edge from p to every ps -printEdges :: MonadIO m => PackageName -> Set PackageName -> m () -printEdges package deps = F.forM_ deps (printEdge package) + where + filteredLocals = + Set.filter (\local' -> local' `Set.notMember` dotOpts.prune) locals + +-- | Print the project packages nodes with a different style, depending on +-- options +printLocalNodes :: + (F.Foldable t, MonadIO m) + => DotOpts + -> t PackageName + -> m () +printLocalNodes dotOpts locals = + liftIO $ Text.putStrLn (Text.intercalate "\n" lpNodes) + where + applyStyle :: Text -> Text + applyStyle n = if dotOpts.includeExternal + then n <> " [style=dashed];" + else n <> " [style=solid];" + lpNodes :: [Text] + lpNodes = map (applyStyle . nodeName) (F.toList locals) + +-- | Print relevant nodes, based on their relevant attributes. +printLeaves :: MonadIO m => ActualCompiler -> DependencyGraph -> m () +printLeaves compiler graph = + F.mapM_ printLeaf (Map.mapWithKey nodeAttributes graph) + where + allNodes = Map.keysSet graph + hasNoNodes = F.all (`Set.notMember` allNodes) + nodeAttributes package (deps, _) = + let isWiredInPackage = isWiredIn compiler package + isBottomRow = hasNoNodes deps + in (package, isWiredInPackage, isBottomRow) + +-- | @printDedges ps p ps'@ prints an edge from @p@ to every @ps'@, if it is a +-- member of @ps@. +printEdges :: + MonadIO m + => Set PackageName + -- ^ The nodes in the graph. + -> PackageName + -- ^ The node in question. + -> Set PackageName + -- ^ The dependencies of the node in question. + -> m () +printEdges nodes package deps = F.forM_ deps $ \dep -> + when (dep `elem` nodes) $ printEdge package dep -- | Print an edge between the two package names printEdge :: MonadIO m => PackageName -> PackageName -> m () -printEdge from to' = liftIO $ Text.putStrLn (Text.concat [ nodeName from, " -> ", nodeName to', ";"]) +printEdge from to' = + liftIO $ Text.putStrLn (Text.concat [ nodeName from + , " -> " + , nodeName to' + , ";" ]) -- | Convert a package name to a graph node name. nodeName :: PackageName -> Text nodeName name = "\"" <> Text.pack (packageNameString name) <> "\"" --- | Print a node with no dependencies -printLeaf :: MonadIO m => PackageName -> m () -printLeaf package = liftIO . Text.putStrLn . Text.concat $ - if isWiredIn package - then ["{rank=max; ", nodeName package, " [shape=box]; };"] - else ["{rank=max; ", nodeName package, "; };"] - --- | Check if the package is wired in (shipped with) ghc -isWiredIn :: PackageName -> Bool -isWiredIn = (`Set.member` wiredInPackages) - -localPackageToPackage :: LocalPackage -> Package -localPackageToPackage lp = - fromMaybe (lpPackage lp) (lpTestBench lp) - --- Plumbing for --test and --bench flags -withDotConfig - :: DotOpts - -> RIO DotConfig a - -> RIO Runner a -withDotConfig opts inner = - local (over globalOptsL modifyGO) $ - if dotGlobalHints opts - then withConfig NoReexec $ withBuildConfig withGlobalHints - else withConfig YesReexec withReal - where - withGlobalHints = do - bconfig <- view buildConfigL - globals <- globalsFromHints $ smwCompiler $ bcSMWanted bconfig - fakeGhcPkgId <- parseGhcPkgId "ignored" - actual <- either throwIO pure $ - wantedToActual $ smwCompiler $ - bcSMWanted bconfig - let smActual = SMActual - { smaCompiler = actual - , smaProject = smwProject $ bcSMWanted bconfig - , smaDeps = smwDeps $ bcSMWanted bconfig - , smaGlobal = Map.mapWithKey toDump globals - } - toDump :: PackageName -> Version -> DumpPackage - toDump name version = DumpPackage - { dpGhcPkgId = fakeGhcPkgId - , dpPackageIdent = PackageIdentifier name version - , dpParentLibIdent = Nothing - , dpLicense = Nothing - , dpLibDirs = [] - , dpLibraries = [] - , dpHasExposedModules = True - , dpExposedModules = mempty - , dpDepends = [] - , dpHaddockInterfaces = [] - , dpHaddockHtml = Nothing - , dpIsExposed = True - } - actualPkgs = Map.keysSet (smaDeps smActual) <> - Map.keysSet (smaProject smActual) - prunedActual = smActual { smaGlobal = pruneGlobals (smaGlobal smActual) actualPkgs } - targets <- parseTargets NeedTargets False boptsCLI prunedActual - logDebug "Loading source map" - sourceMap <- loadSourceMap targets boptsCLI smActual - let dc = DotConfig - { dcBuildConfig = bconfig - , dcSourceMap = sourceMap - , dcGlobalDump = toList $ smaGlobal smActual - } - logDebug "DotConfig fully loaded" - runRIO dc inner - - withReal = withEnvConfig NeedTargets boptsCLI $ do - envConfig <- ask - let sourceMap = envConfigSourceMap envConfig - installMap <- toInstallMap sourceMap - (_, globalDump, _, _) <- getInstalled installMap - let dc = DotConfig - { dcBuildConfig = envConfigBuildConfig envConfig - , dcSourceMap = sourceMap - , dcGlobalDump = globalDump - } - runRIO dc inner - - boptsCLI = defaultBuildOptsCLI - { boptsCLITargets = dotTargets opts - , boptsCLIFlags = dotFlags opts - } - modifyGO = - (if dotTestTargets opts then set (globalOptsBuildOptsMonoidL.buildOptsMonoidTestsL) (Just True) else id) . - (if dotBenchTargets opts then set (globalOptsBuildOptsMonoidL.buildOptsMonoidBenchmarksL) (Just True) else id) - -data DotConfig = DotConfig - { dcBuildConfig :: !BuildConfig - , dcSourceMap :: !SourceMap - , dcGlobalDump :: ![DumpPackage] - } -instance HasLogFunc DotConfig where - logFuncL = runnerL.logFuncL -instance HasPantryConfig DotConfig where - pantryConfigL = configL.pantryConfigL -instance HasTerm DotConfig where - useColorL = runnerL.useColorL - termWidthL = runnerL.termWidthL -instance HasStylesUpdate DotConfig where - stylesUpdateL = runnerL.stylesUpdateL -instance HasGHCVariant DotConfig -instance HasPlatform DotConfig -instance HasRunner DotConfig where - runnerL = configL.runnerL -instance HasProcessContext DotConfig where - processContextL = runnerL.processContextL -instance HasConfig DotConfig -instance HasBuildConfig DotConfig where - buildConfigL = lens dcBuildConfig (\x y -> x { dcBuildConfig = y }) -instance HasSourceMap DotConfig where - sourceMapL = lens dcSourceMap (\x y -> x { dcSourceMap = y }) +-- | Print a node if it (a) is a GHC wired-in package or (b) has no dependencies +-- that are also nodes. +printLeaf :: + MonadIO m + => ( PackageName + , Bool + -- Is package a GHC wired-in package? + , Bool + -- Does package have no dependencies that are are also nodes in the + -- graph? + ) + -> m () +printLeaf (package, isWiredInPackage, isBottomRow) = + when (isWiredInPackage || isBottomRow) $ + liftIO . Text.putStrLn . Text.concat $ + [ "{"] + <> [ "rank=max; " | isBottomRow ] + <> [ nodeName package ] + <> [ " [shape=box]" | isWiredInPackage ] + <> [ "; };" ] + +-- | Check if the package is a GHC wired-in package +isWiredIn :: ActualCompiler -> PackageName -> Bool +isWiredIn compiler package = + package `Set.member` wiredInPackages compiler diff --git a/src/Stack/Eval.hs b/src/Stack/Eval.hs new file mode 100644 index 0000000000..5b63a72de4 --- /dev/null +++ b/src/Stack/Eval.hs @@ -0,0 +1,42 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Eval +Description : Types and functions related to Stack's @eval@ command. +License : BSD-3-Clause + +Types and functions related to Stack's @eval@ command. +-} + +module Stack.Eval + ( EvalOpts (..) + , evalCmd + ) where + +import Stack.Exec + ( ExecOpts (..), ExecOptsExtra, SpecialExecCmd (..) + , execCmd + ) +import Stack.Prelude +import Stack.Types.Runner ( Runner ) + +-- | Type representing command line options for the @stack eval@ command. +data EvalOpts = EvalOpts + { arg :: !String + , extra :: !ExecOptsExtra + } + deriving Show + +-- | Function underlying the @stack eval@ command. Evaluate some Haskell code +-- inline. +evalCmd :: EvalOpts -> RIO Runner () +evalCmd eval = execCmd execOpts + where + execOpts = ExecOpts + { cmd = ExecGhc + , args = ["-e", eval.arg] + , extra = eval.extra + } diff --git a/src/Stack/Exec.hs b/src/Stack/Exec.hs new file mode 100644 index 0000000000..341673c68f --- /dev/null +++ b/src/Stack/Exec.hs @@ -0,0 +1,227 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Exec +Description : Types and function related to Stack's @exec@, @ghc@, @run@, + @runghc@ and @runhaskell@ commands. +License : BSD-3-Clause + +Types and function related to Stack's @exec@, @ghc@, @run@, @runghc@ and +@runhaskell@ commands. +-} + +module Stack.Exec + ( ExecOpts (..) + , SpecialExecCmd (..) + , ExecOptsExtra (..) + , execCmd + ) where + +import qualified Data.List as L +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set +import qualified Data.Text as T +import Distribution.Types.PackageName ( unPackageName ) +import RIO.NonEmpty ( head, nonEmpty ) +import RIO.Process ( exec ) +import Stack.Build ( build ) +import Stack.Build.Target + ( NeedTargets (..), RawTarget (..), parseRawTarget ) +import Stack.GhcPkg ( findGhcPkgField ) +import Stack.Setup ( withNewLocalBuildTargets ) +import Stack.Prelude +import Stack.Runners ( ShouldReexec (..), withConfig, withEnvConfig ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.BuildOptsCLI + ( BuildOptsCLI (..), defaultBuildOptsCLI ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), HasCompiler (..), getGhcPkgExe ) +import Stack.Types.ComponentUtils + ( unqualCompFromString, unqualCompToText ) +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.EnvConfig ( EnvConfig ) +import Stack.Types.EnvSettings ( EnvSettings (..) ) +import Stack.Types.NamedComponent ( NamedComponent (..), isCExe ) +import Stack.Types.Runner ( Runner ) +import Stack.Types.SourceMap ( SMWanted (..), ppComponents ) +import System.Directory ( withCurrentDirectory ) +import System.FilePath ( isValid ) + +-- | Type representing exceptions thrown by functions in the "Stack.Exec" +-- module. +newtype ExecException + = InvalidPathForExec FilePath + deriving Show + +instance Exception ExecException where + displayException (InvalidPathForExec path) = concat + [ "Error: [S-1541]\n" + , "Got an invalid '--cwd' argument for 'stack exec' (" + , path + , ")." + ] + +-- | Type representing \'pretty\' exceptions thrown by functions in the +-- "Stack.Exec" module. +data ExecPrettyException + = PackageIdNotFoundBug !String + | ExecutableToRunNotFound + | NoPackageIdReportedBug + | InvalidExecTargets ![Text] + deriving Show + +instance Pretty ExecPrettyException where + pretty (PackageIdNotFoundBug name) = bugPrettyReport "[S-8251]" $ + fillSep + [ flow "Could not find the package id of the package" + , style Target (fromString name) <> "." + ] + pretty ExecutableToRunNotFound = + "[S-2483]" + <> line + <> flow "No executables found." + pretty NoPackageIdReportedBug = bugPrettyReport "S-8600" $ + flow "execCmd: findGhcPkgField returned Just \"\"." + pretty (InvalidExecTargets targets) = + "[S-7371]" + <> line + <> fillSep + [ flow "The following are invalid" + , style Shell "--package" + , "values for" + , style Shell (flow "stack ghc") <> "," + , style Shell (flow "stack runghc") <> "," + , "or" + , style Shell (flow "stack runhaskell") <> ":" + ] + <> line + <> bulletedList (map (style Target . string . T.unpack) targets ) + +instance Exception ExecPrettyException + +-- | Type representing Stack's execution commands. +data SpecialExecCmd + = ExecCmd String + -- ^ @stack exec@ command. + | ExecRun + -- ^ @stack run@ command. + | ExecGhc + -- ^ @stack ghc@ command. + | ExecRunGhc + -- ^ @stack runghc@ or @stack runhaskell@ command. + deriving (Eq, Show) + +-- | Type representing extra Stack options for Stack's execution commands. +data ExecOptsExtra = ExecOptsExtra + { envSettings :: !EnvSettings + , packages :: ![String] + , rtsOptions :: ![String] + , cwd :: !(Maybe FilePath) + } + deriving Show + +-- | Type representing options for Stack's execution commands. +data ExecOpts = ExecOpts + { cmd :: !SpecialExecCmd + , args :: ![String] + , extra :: !ExecOptsExtra + } + deriving Show + +-- | Type representing valid targets for @--package@ option. +data ExecTarget = ExecTarget PackageName (Maybe Version) + +-- | The function underlying Stack's @exec@, @ghc@, @run@, @runghc@ and +-- @runhaskell@ commands. Execute a command. +execCmd :: ExecOpts -> RIO Runner () +execCmd opts = + withConfig YesReexec $ withEnvConfig AllowNoTargets boptsCLI $ do + let (errs, execTargets) = partitionEithers $ map fromTarget targets + unless (null errs) $ prettyThrowM $ InvalidExecTargets errs + unless (null execTargets) $ build Nothing + + config <- view configL + menv <- liftIO $ config.processContextSettings eo.envSettings + withProcessContext menv $ do + -- Add RTS options to arguments + let argsWithRts args = if null eo.rtsOptions + then args :: [String] + else args ++ ["+RTS"] ++ eo.rtsOptions ++ ["-RTS"] + (cmd, args) <- case (opts.cmd, argsWithRts opts.args) of + (ExecCmd cmd, args) -> pure (cmd, args) + (ExecRun, args) -> getRunCmd args + (ExecGhc, args) -> getGhcCmd execTargets args + (ExecRunGhc, args) -> getRunGhcCmd execTargets args + + runWithPath eo.cwd $ exec cmd args + where + eo = opts.extra + + targets = concatMap (T.words . T.pack) eo.packages + boptsCLI = defaultBuildOptsCLI { targetsCLI = targets } + + fromTarget :: Text -> Either Text ExecTarget + fromTarget target = + case parseRawTarget target >>= toExecTarget of + Nothing -> Left target + Just execTarget -> Right execTarget + + toExecTarget :: RawTarget -> Maybe ExecTarget + toExecTarget (RTPackageComponent _ _) = Nothing + toExecTarget (RTComponent _) = Nothing + toExecTarget (RTPackage name) = Just $ ExecTarget name Nothing + toExecTarget (RTPackageIdentifier (PackageIdentifier name pkgId)) = + Just $ ExecTarget name (Just pkgId) + + -- return the package-id of the first package in GHC_PACKAGE_PATH + getPkgId (ExecTarget pkgName _) = do + let name = unPackageName pkgName + pkg <- getGhcPkgExe + findGhcPkgField pkg [] name "id" >>= \case + Just i -> maybe + (prettyThrowIO NoPackageIdReportedBug) + (pure . head) + (nonEmpty $ words $ T.unpack i) + -- should never happen as we have already installed the packages + _ -> prettyThrowIO (PackageIdNotFoundBug name) + + getPkgOpts pkgs = + map ("-package-id=" ++) <$> mapM getPkgId pkgs + + getRunCmd args = do + packages <- view $ buildConfigL . to (.smWanted.project) + pkgComponents <- for (Map.elems packages) ppComponents + let executables = concatMap (filter isCExe . Set.toList) pkgComponents + (exe, args') = case args of + [] -> (firstExe, args) + x:xs -> let matchesExecutable y = y == CExe (unqualCompFromString x) + in case L.find matchesExecutable executables of + Nothing -> (firstExe, args) + argExe -> (argExe, xs) + where + firstExe = listToMaybe executables + case exe of + Just (CExe exe') -> do + let textExeName = unqualCompToText exe' + withNewLocalBuildTargets [T.cons ':' textExeName] $ build Nothing + pure (T.unpack textExeName, args') + _ -> prettyThrowIO ExecutableToRunNotFound + + getGhcCmd pkgs args = do + pkgopts <- getPkgOpts pkgs + compiler <- view $ compilerPathsL . to (.compiler) + pure (toFilePath compiler, pkgopts ++ args) + + getRunGhcCmd pkgs args = do + pkgopts <- getPkgOpts pkgs + interpret <- view $ compilerPathsL . to (.interpreter) + pure (toFilePath interpret, pkgopts ++ args) + + runWithPath :: Maybe FilePath -> RIO EnvConfig () -> RIO EnvConfig () + runWithPath path callback = case path of + Nothing -> callback + Just p | not (isValid p) -> throwIO $ InvalidPathForExec p + Just p -> withUnliftIO $ \ul -> withCurrentDirectory p $ unliftIO ul callback diff --git a/src/Stack/FileWatch.hs b/src/Stack/FileWatch.hs index f977cbb2fc..9a8a0c3b2a 100644 --- a/src/Stack/FileWatch.hs +++ b/src/Stack/FileWatch.hs @@ -1,131 +1,219 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TupleSections #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.FileWatch +License : BSD-3-Clause +-} + module Stack.FileWatch - ( fileWatch - , fileWatchPoll - ) where + ( WatchMode (WatchModePoll) + , fileWatch + , fileWatchPoll + ) where -import Control.Concurrent.STM (check) -import Stack.Prelude +import Control.Concurrent.STM ( check ) +import qualified Data.Map.Merge.Strict as Map import qualified Data.Map.Strict as Map import qualified Data.Set as Set -import GHC.IO.Exception -import Path -import System.FSNotify -import System.IO (getLine) -import RIO.PrettyPrint hiding (line) - -fileWatch - :: (HasLogFunc env, HasTerm env) - => ((Set (Path Abs File) -> IO ()) -> RIO env ()) +import qualified Data.Text as T +import GHC.IO.Exception + ( IOErrorType (InvalidArgument), IOException (..) ) +import Path ( fileExtension, parent ) +import Path.IO ( doesFileExist, executable, getPermissions ) +import RIO.Process + ( EnvVars, HasProcessContext (..), proc, runProcess + , withModifyEnvVars + ) +import System.Permissions ( osIsWindows ) +import Stack.Prelude +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.Runner ( HasRunner (..), Runner (..) ) +import System.FSNotify + ( WatchConfig, WatchMode (..), confWatchMode, defaultConfig + , eventPath, watchDir, withManagerConf + ) +import System.IO ( getLine ) + +fileWatch :: + (HasConfig env, HasTerm env) + => ((Set (Path Abs File) -> IO ()) -> RIO Runner ()) -> RIO env () fileWatch = fileWatchConf defaultConfig -fileWatchPoll - :: (HasLogFunc env, HasTerm env) - => ((Set (Path Abs File) -> IO ()) -> RIO env ()) +fileWatchPoll :: + (HasConfig env, HasTerm env) + => ((Set (Path Abs File) -> IO ()) -> RIO Runner ()) -> RIO env () -fileWatchPoll = fileWatchConf $ defaultConfig { confUsePolling = True } +fileWatchPoll = + fileWatchConf $ defaultConfig { confWatchMode = WatchModePoll 1000000 } -- | Run an action, watching for file changes -- -- The action provided takes a callback that is used to set the files to be -- watched. When any of those files are changed, we rerun the action again. -fileWatchConf - :: (HasLogFunc env, HasTerm env) +fileWatchConf :: + (HasConfig env, HasTerm env) => WatchConfig - -> ((Set (Path Abs File) -> IO ()) -> RIO env ()) + -> ((Set (Path Abs File) -> IO ()) -> RIO Runner ()) -> RIO env () -fileWatchConf cfg inner = withRunInIO $ \run -> withManagerConf cfg $ \manager -> do +fileWatchConf cfg inner = do + runner <- view runnerL + mHook <- view $ configL . to (.fileWatchHook) + withRunInIO $ \run -> withManagerConf cfg $ \manager -> do allFiles <- newTVarIO Set.empty dirtyVar <- newTVarIO True watchVar <- newTVarIO Map.empty let onChange event = atomically $ do - files <- readTVar allFiles - when (eventPath event `Set.member` files) (writeTVar dirtyVar True) + files <- readTVar allFiles + when (eventPath event `Set.member` files) (writeTVar dirtyVar True) setWatched :: Set (Path Abs File) -> IO () setWatched files = do - atomically $ writeTVar allFiles $ Set.map toFilePath files - watch0 <- readTVarIO watchVar - let actions = Map.mergeWithKey - keepListening - stopListening - startListening - watch0 - newDirs - watch1 <- forM (Map.toList actions) $ \(k, mmv) -> do - mv <- mmv - return $ - case mv of - Nothing -> Map.empty - Just v -> Map.singleton k v - atomically $ writeTVar watchVar $ Map.unions watch1 - where - newDirs = Map.fromList $ map (, ()) - $ Set.toList - $ Set.map parent files - - keepListening _dir listen () = Just $ return $ Just listen - stopListening = Map.map $ \f -> do - () <- f `catch` \ioe -> - -- Ignore invalid argument error - it can happen if - -- the directory is removed. - case ioe_type ioe of - InvalidArgument -> return () - _ -> throwIO ioe - return Nothing - startListening = Map.mapWithKey $ \dir () -> do - let dir' = fromString $ toFilePath dir - listen <- watchDir manager dir' (const True) onChange - return $ Just listen + atomically $ writeTVar allFiles $ Set.map toFilePath files + watch0 <- readTVarIO watchVar + let actions = Map.merge + (Map.mapMissing stopListening) + (Map.mapMissing startListening) + (Map.zipWithMatched keepListening) + watch0 + newDirs + watch1 <- forM (Map.toList actions) $ \(k, mmv) -> + mmv <&> \case + Nothing -> Map.empty + Just v -> Map.singleton k v + atomically $ writeTVar watchVar $ Map.unions watch1 + where + newDirs = Map.fromList $ map (, ()) + $ Set.toList + $ Set.map parent files + + keepListening _dir listen () = pure $ Just listen + stopListening _ f = do + () <- f `catch` \ioe -> + -- Ignore invalid argument error - it can happen if + -- the directory is removed. + case ioe_type ioe of + InvalidArgument -> pure () + _ -> throwIO ioe + pure Nothing + startListening dir () = do + let dir' = fromString $ toFilePath dir + listen <- watchDir manager dir' (const True) onChange + pure $ Just listen let watchInput = do - line <- getLine - unless (line == "quit") $ do - run $ case line of - "help" -> do - logInfo "" - logInfo "help: display this help" - logInfo "quit: exit" - logInfo "build: force a rebuild" - logInfo "watched: display watched files" - "build" -> atomically $ writeTVar dirtyVar True - "watched" -> do - watch <- readTVarIO allFiles - mapM_ (logInfo . fromString) (Set.toList watch) - "" -> atomically $ writeTVar dirtyVar True - _ -> logInfo $ - "Unknown command: " <> - displayShow line <> - ". Try 'help'" - - watchInput + l <- getLine + unless (l == "quit") $ do + run $ case l of + "help" -> do + prettyInfo $ + line + <> fillSep + [ style Shell "help" <> ":" + , flow "display this help." + ] + <> line + <> fillSep + [ style Shell "quit" <> ":" + , "exit." + ] + <> line + <> fillSep + [ style Shell "build" <> ":" + , flow "force a rebuild." + ] + <> line + <> fillSep + [ style Shell "watched" <> ":" + , flow "display watched files." + ] + "build" -> atomically $ writeTVar dirtyVar True + "watched" -> do + watch <- readTVarIO allFiles + mapM_ (prettyInfo . style File . fromString) (Set.toList watch) + "" -> atomically $ writeTVar dirtyVar True + _ -> prettyInfoL + [ flow "Unknown command:" + , style Shell (fromString l) <> "." + , "Try" + , style Shell "help" <> "." + ] + + watchInput race_ watchInput $ run $ forever $ do - atomically $ do - dirty <- readTVar dirtyVar - check dirty - - eres <- tryAny $ inner setWatched - - -- Clear dirtiness flag after the build to avoid an infinite - -- loop caused by the build itself triggering dirtiness. This - -- could be viewed as a bug, since files changed during the - -- build will not trigger an extra rebuild, but overall seems - -- like better behavior. See - -- https://github.com/commercialhaskell/stack/issues/822 - atomically $ writeTVar dirtyVar False - - prettyInfo $ - case eres of + atomically $ do + dirty <- readTVar dirtyVar + check dirty + + eres <- tryAny $ runRIO runner (inner setWatched) + + -- Clear dirtiness flag after the build to avoid an infinite loop caused + -- by the build itself triggering dirtiness. This could be viewed as a + -- bug, since files changed during the build will not trigger an extra + -- rebuild, but overall seems like better behavior. See + -- https://github.com/commercialhaskell/stack/issues/822 + atomically $ writeTVar dirtyVar False + + let defaultAction = case eres of Left e -> - let theStyle = case fromException e of - Just ExitSuccess -> Good - _ -> Error - in style theStyle $ fromString $ show e - _ -> style Good "Success! Waiting for next file change." + case fromException e of + Just ExitSuccess -> + prettyInfo $ style Good $ fromString $ displayException e + _ -> case fromException e :: Maybe PrettyException of + Just pe -> prettyError $ pretty pe + _ -> prettyInfo $ style Error $ fromString $ displayException e + _ -> prettyInfo $ + style Good (flow "Success! Waiting for next file change.") + + case mHook of + Nothing -> defaultAction + Just hook -> do + hookIsExecutable <- handleIO (\_ -> pure False) $ if osIsWindows + then + -- can't really detect executable on windows, only file extension + doesFileExist hook + else executable <$> getPermissions hook + if hookIsExecutable + then runFileWatchHook eres hook + else do + prettyWarn $ + flow "File watch hook not executable. Falling back on default." + defaultAction - logInfo "Type help for available commands. Press enter to force a rebuild." + prettyInfoL + [ "Type" + , style Shell "help" + , flow "for the available commands. Press enter to force a rebuild." + ] + +runFileWatchHook :: + (HasProcessContext env, HasTerm env) + => Either SomeException () + -> Path Abs File + -> RIO env () +runFileWatchHook buildResult hook = + withModifyEnvVars insertBuildResultInEnv $ do + let (cmd, args) = if osIsWindows && isShFile + then ("sh", [toFilePath hook]) + else (toFilePath hook, []) + menv <- view processContextL + withProcessContext menv $ proc cmd args runProcess >>= \case + ExitSuccess -> pure () + ExitFailure i -> do + prettyWarnL + [ flow "File watch hook exited with code:" + , style Error (fromString $ show i) <> "." + ] + pure () + where + insertBuildResultInEnv :: EnvVars -> EnvVars + insertBuildResultInEnv = Map.insert "HOOK_FW_RESULT" $ case buildResult of + Left e -> T.pack $ displayException e + Right _ -> "" + isShFile = case fileExtension hook of + Just ".sh" -> True + _ -> False diff --git a/src/Stack/GhcPkg.hs b/src/Stack/GhcPkg.hs index 59d946e138..b4bb846d75 100644 --- a/src/Stack/GhcPkg.hs +++ b/src/Stack/GhcPkg.hs @@ -1,103 +1,129 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} --- | Functions for the GHC package database. +{-| +Module : Stack.GhcPkg +Description : Functions for the GHC package database. +License : BSD-3-Clause + +Functions for the GHC package database. +-} module Stack.GhcPkg - (getGlobalDB - ,findGhcPkgField - ,createDatabase - ,unregisterGhcPkgIds - ,ghcPkgPathEnvVar - ,mkGhcPackagePath) - where + ( createDatabase + , findGhcPkgField + , getGlobalDB + , ghcPkg + , ghcPkgPathEnvVar + , mkGhcPackagePath + , unregisterGhcPkgIds + ) where -import Stack.Prelude import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Lazy as BL -import Data.List +import qualified Data.List as L import qualified Data.Text as T import qualified Data.Text.Encoding as T -import Path (parent, ()) -import Path.Extra (toFilePathNoTrailingSep) +import GHC.Utils.GhcPkg.Main.Compat ( ghcPkgUnregisterForce ) +import Path ( (), parent ) +import Path.Extra ( toFilePathNoTrailingSep ) import Path.IO -import Stack.Constants -import Stack.Types.Config (GhcPkgExe (..)) -import Stack.Types.GhcPkgId -import Stack.Types.Compiler -import System.FilePath (searchPathSeparator) -import RIO.Process + ( doesDirExist, doesFileExist, ensureDir, resolveDir' ) +import RIO.Process ( HasProcessContext, proc, readProcess_ ) +import Stack.Constants ( relFilePackageCache ) +import Stack.Prelude +import Stack.Types.Compiler ( WhichCompiler (..) ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), GhcPkgExe (..), HasCompiler + , compilerPathsL + ) +import Stack.Types.GhcPkgExe ( GhcPkgPrettyException (..) ) +import Stack.Types.GhcPkgId ( GhcPkgId, ghcPkgIdString ) +import System.FilePath ( searchPathSeparator ) -- | Get the global package database -getGlobalDB - :: (HasProcessContext env, HasLogFunc env) +getGlobalDB :: + (HasProcessContext env, HasTerm env) => GhcPkgExe -> RIO env (Path Abs Dir) getGlobalDB pkgexe = do - logDebug "Getting global package database location" - -- This seems like a strange way to get the global package database - -- location, but I don't know of a better one - bs <- ghcPkg pkgexe [] ["list", "--global"] >>= either throwIO return - let fp = S8.unpack $ stripTrailingColon $ firstLine bs - liftIO $ resolveDir' fp - where - stripTrailingColon bs - | S8.null bs = bs - | S8.last bs == ':' = S8.init bs - | otherwise = bs - firstLine = S8.takeWhile (\c -> c /= '\r' && c /= '\n') + logDebug "Getting global package database location" + -- This seems like a strange way to get the global package database + -- location, but I don't know of a better one + bs <- ghcPkg pkgexe [] ["list", "--global"] >>= either throwIO pure + let fp = S8.unpack $ stripTrailingColon $ firstLine bs + liftIO $ resolveDir' fp + where + stripTrailingColon bs + | S8.null bs = bs + | S8.last bs == ':' = S8.init bs + | otherwise = bs + firstLine = S8.takeWhile (\c -> c /= '\r' && c /= '\n') -- | Run the ghc-pkg executable -ghcPkg - :: (HasProcessContext env, HasLogFunc env) +ghcPkg :: + (HasProcessContext env, HasTerm env) => GhcPkgExe -> [Path Abs Dir] -> [String] -> RIO env (Either SomeException S8.ByteString) ghcPkg pkgexe@(GhcPkgExe pkgPath) pkgDbs args = do - eres <- go - case eres of - Left _ -> do - mapM_ (createDatabase pkgexe) pkgDbs - go - Right _ -> return eres - where - pkg = toFilePath pkgPath - go = tryAny $ BL.toStrict . fst <$> proc pkg args' readProcess_ - args' = packageDbFlags pkgDbs ++ args + eres <- go + case eres of + Left e -> do + prettyDebug $ + fillSep + [ flow "While using" + , style Shell "ghc-pkg" <>"," + , flow "Stack encountered the following error:" + ] + <> blankLine + <> string (displayException e) + <> flow "Trying again after considering database creation..." + mapM_ (createDatabase pkgexe) pkgDbs + go + Right _ -> pure eres + where + pkg = toFilePath pkgPath + go = tryAny $ BL.toStrict . fst <$> proc pkg args' readProcess_ + args' = packageDbFlags pkgDbs ++ args -- | Create a package database in the given directory, if it doesn't exist. -createDatabase - :: (HasProcessContext env, HasLogFunc env) +createDatabase :: + (HasProcessContext env, HasTerm env) => GhcPkgExe -> Path Abs Dir -> RIO env () createDatabase (GhcPkgExe pkgPath) db = do - exists <- doesFileExist (db relFilePackageCache) - unless exists $ do - -- ghc-pkg requires that the database directory does not exist - -- yet. If the directory exists but the package.cache file - -- does, we're in a corrupted state. Check for that state. - dirExists <- doesDirExist db - args <- if dirExists - then do - logWarn $ - "The package database located at " <> - fromString (toFilePath db) <> - " is corrupted (missing its package.cache file)." - logWarn "Proceeding with a recache" - return ["--package-db", toFilePath db, "recache"] - else do - -- Creating the parent doesn't seem necessary, as ghc-pkg - -- seems to be sufficiently smart. But I don't feel like - -- finding out it isn't the hard way - ensureDir (parent db) - return ["init", toFilePath db] - void $ proc (toFilePath pkgPath) args $ \pc -> - readProcess_ pc `onException` - logError ("Unable to create package database at " <> fromString (toFilePath db)) + exists <- doesFileExist (db relFilePackageCache) + unless exists $ do + -- ghc-pkg requires that the database directory does not exist + -- yet. If the directory exists but the package.cache file + -- does, we're in a corrupted state. Check for that state. + dirExists <- doesDirExist db + args <- if dirExists + then do + prettyWarnL + [ flow "The package database located at" + , pretty db + , flow "is corrupted. It is missing its" + , style File "package.cache" + , flow "file. Stack is proceeding with a recache." + ] + pure ["--package-db", toFilePath db, "recache"] + else do + -- Creating the parent doesn't seem necessary, as ghc-pkg + -- seems to be sufficiently smart. But I don't feel like + -- finding out it isn't the hard way + ensureDir (parent db) + pure ["init", toFilePath db] + void $ proc (toFilePath pkgPath) args $ \pc -> + onException (readProcess_ pc) $ + logError $ + "Error: [S-9735]\n" <> + "Unable to create package database at " <> + fromString (toFilePath db) -- | Get the environment variable to use for the package DB paths. ghcPkgPathEnvVar :: WhichCompiler -> Text @@ -106,53 +132,77 @@ ghcPkgPathEnvVar Ghc = "GHC_PACKAGE_PATH" -- | Get the necessary ghc-pkg flags for setting up the given package database packageDbFlags :: [Path Abs Dir] -> [String] packageDbFlags pkgDbs = - "--no-user-package-db" - : map (\x -> "--package-db=" ++ toFilePath x) pkgDbs + "--no-user-package-db" + : map (\x -> "--package-db=" ++ toFilePath x) pkgDbs -- | Get the value of a field of the package. -findGhcPkgField - :: (HasProcessContext env, HasLogFunc env) - => GhcPkgExe - -> [Path Abs Dir] -- ^ package databases - -> String -- ^ package identifier, or GhcPkgId - -> Text - -> RIO env (Maybe Text) -findGhcPkgField pkgexe pkgDbs name field = do - result <- - ghcPkg - pkgexe - pkgDbs - ["field", "--simple-output", name, T.unpack field] - return $ - case result of - Left{} -> Nothing - Right bs -> - fmap (stripCR . T.decodeUtf8) $ listToMaybe $ S8.lines bs +findGhcPkgField :: + (HasProcessContext env, HasTerm env) + => GhcPkgExe + -> [Path Abs Dir] -- ^ package databases + -> String -- ^ package identifier, or GhcPkgId + -> Text + -> RIO env (Maybe Text) +findGhcPkgField pkgexe pkgDbs name field = + let cmd = ["field", "--simple-output", name, T.unpack field] + in ghcPkg pkgexe pkgDbs cmd <&> \case + Left _ -> Nothing + Right bs -> fmap (stripCR . T.decodeUtf8) $ listToMaybe $ S8.lines bs -- | unregister list of package ghcids, batching available from GHC 8.2.1, -- see https://github.com/commercialhaskell/stack/issues/2662#issuecomment-460342402 -- using GHC package id where available (from GHC 7.9) -unregisterGhcPkgIds - :: (HasProcessContext env, HasLogFunc env) - => GhcPkgExe +-- +-- The version of the ghc-pkg executable supplied with GHCs published before +-- 28 August 2023 does not efficiently bulk unregister. Until an \'efficient\' +-- ghc-pkg is available, this function no longer uses: +-- +-- > eres <- ghcPkg pkgexe [pkgDb] args +-- > where +-- > args = "unregister" : "--user" : "--force" : +-- > map packageIdentifierString idents ++ +-- > if null gids then [] else "--ipid" : map ghcPkgIdString gids +-- +-- but uses: +-- +-- > globalDb <- view $ compilerPathsL.to cpGlobalDB +-- > eres <- tryAny $ liftIO $ +-- > ghcPkgUnregisterUserForce globalDb pkgDb hasIpid pkgarg_strs +-- +unregisterGhcPkgIds :: + (HasCompiler env, HasProcessContext env, HasTerm env) + => Bool + -- ^ Report pretty exceptions as warnings? + -> GhcPkgExe -> Path Abs Dir -- ^ package database -> NonEmpty (Either PackageIdentifier GhcPkgId) -> RIO env () -unregisterGhcPkgIds pkgexe pkgDb epgids = do - eres <- ghcPkg pkgexe [pkgDb] args - case eres of - Left e -> logWarn $ displayShow e - Right _ -> return () - where - (idents, gids) = partitionEithers $ toList epgids - args = "unregister" : "--user" : "--force" : - map packageIdentifierString idents ++ - if null gids then [] else "--ipid" : map ghcPkgIdString gids +unregisterGhcPkgIds isWarn pkgexe pkgDb epgids = do + globalDb <- view $ compilerPathsL . to (.globalDB) + try (ghcPkgUnregisterForce globalDb pkgDb hasIpid pkgarg_strs) >>= \case + Left (PrettyException e) -> when isWarn $ + prettyWarn $ + "[S-8729]" + <> line + <> flow "While unregistering packages, Stack encountered the following \ + \error:" + <> blankLine + <> pretty e + Right _ -> pure () + -- ghcPkgUnregisterForce does not perform an effective 'ghc-pkg recache', as + -- that depends on a specific version of the Cabal package. + ghcPkg pkgexe [pkgDb] ["recache"] >>= \case + Left err -> prettyThrowM $ CannotRecacheAfterUnregister pkgDb err + Right _ -> pure () + where + (idents, gids) = partitionEithers $ toList epgids + hasIpid = not (null gids) + pkgarg_strs = map packageIdentifierString idents <> map ghcPkgIdString gids -- | Get the value for GHC_PACKAGE_PATH mkGhcPackagePath :: Bool -> Path Abs Dir -> Path Abs Dir -> [Path Abs Dir] -> Path Abs Dir -> Text mkGhcPackagePath locals localdb deps extras globaldb = - T.pack $ intercalate [searchPathSeparator] $ concat + T.pack $ L.intercalate [searchPathSeparator] $ concat [ [toFilePathNoTrailingSep localdb | locals] , [toFilePathNoTrailingSep deps] , [toFilePathNoTrailingSep db | db <- reverse extras] diff --git a/src/Stack/Ghci.hs b/src/Stack/Ghci.hs index d207b97332..8db0e71196 100644 --- a/src/Stack/Ghci.hs +++ b/src/Stack/Ghci.hs @@ -1,871 +1,1135 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} --- | Run a GHCi configured with the user's package(s). +{-| +Module : Stack.Ghci +License : BSD-3-Clause + +Types and functions related to Stack's @ghci@ and @repl@ commands. +-} module Stack.Ghci - ( GhciOpts(..) - , GhciPkgInfo(..) - , GhciException(..) - , ghci - ) where - -import Stack.Prelude hiding (Display (..)) -import Control.Monad.State.Strict (State, execState, get, modify) -import Data.ByteString.Builder (byteString) + ( GhciOpts (..) + , GhciPkgInfo (..) + , GhciPrettyException (..) + , ModuleMap + , ghciCmd + , ghci + ) where + +import Control.Monad.Extra ( whenJust ) +import Control.Monad.State.Strict ( State, execState, get, modify ) +import Data.ByteString.Builder ( byteString ) import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Lazy as LBS -import Data.List -import qualified Data.List.NonEmpty as NE +import qualified Data.List as L +import Data.List.Extra ( (!?) ) +import qualified Data.Map as Map import qualified Data.Map.Strict as M import qualified Data.Set as S import qualified Data.Text as T -import qualified Data.Text.Lazy as TL -import qualified Data.Text.Lazy.Encoding as TLE import qualified Distribution.PackageDescription as C -import Path -import Path.Extra (toFilePathNoTrailingSep) -import Path.IO hiding (withSystemTempDir) -import qualified RIO -import RIO.PrettyPrint -import RIO.Process (HasProcessContext, exec, proc, readProcess_, withWorkingDir) -import Stack.Build -import Stack.Build.Installed -import Stack.Build.Source -import Stack.Build.Target +import Path ((), parent, parseRelFile ) +import Path.Extra ( forgivingResolveFile', toFilePathNoTrailingSep ) +import Path.IO + ( XdgDirectory (..), doesFileExist, ensureDir, getXdgDir ) +import RIO.NonEmpty ( nonEmpty ) +import RIO.Process ( exec, withWorkingDir ) +import Stack.Build ( buildLocalTargets ) +import Stack.Build.Installed ( getInstalled, toInstallMap ) +import Stack.Build.Source ( localDependencies, projectLocalPackages ) +import Stack.Build.Target ( NeedTargets (..), parseTargets ) import Stack.Constants -import Stack.Constants.Config + ( relDirGhciScript, relDirStackProgName, relFileCabalMacrosH + , relFileGhciScript, stackProgName' + ) +import Stack.Constants.Config ( ghciDirL, objectInterfaceDirL ) import Stack.Ghci.Script + ( GhciScript, ModuleName, cmdAdd, cmdModule + , scriptToLazyByteString + ) import Stack.Package -import Stack.Types.Build -import Stack.Types.Config + ( buildableExes, buildableForeignLibs, buildableSubLibs + , buildableTestSuites, buildableBenchmarks, getPackageOpts + , hasBuildableMainLibrary, listOfPackageDeps + , packageFromPackageDescription, readDotBuildinfo + , resolvePackageDescription, topSortPackageComponent + ) +import Stack.PackageFile ( getPackageFile ) +import Stack.Prelude +import Stack.Runners ( ShouldReexec (..), withConfig, withEnvConfig ) +import Stack.Types.Build.Exception + ( BuildPrettyException (..), pprintTargetParseErrors ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..), configFileL ) +import Stack.Types.BuildOpts ( BuildOpts (..) ) +import qualified Stack.Types.BuildOpts as BenchmarkOpts ( BenchmarkOpts (..) ) +import qualified Stack.Types.BuildOpts as TestOpts ( TestOpts (..) ) +import Stack.Types.BuildOptsCLI + ( ApplyCLIFlag (..), BuildOptsCLI (..), defaultBuildOptsCLI ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), HasCompiler (..) ) +import Stack.Types.Config ( Config (..), HasConfig (..), buildOptsL ) +import Stack.Types.Config.Exception ( ConfigPrettyException (..) ) +import Stack.Types.EnvConfig + ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL + , shaPathForBytes + ) +import Stack.Types.EnvSettings ( defaultEnvSettings ) +import Stack.Types.GhciOpts ( GhciOpts (..) ) +import Stack.Types.Installed ( InstallMap, InstalledMap ) import Stack.Types.NamedComponent + ( NamedComponent (..), isCLib, isCSubLib, renderComponentTo + , renderPkgComponent + ) import Stack.Types.Package + ( BuildInfoOpts (..), LocalPackage (..), Package (..) + , PackageConfig (..), dotCabalCFilePath, dotCabalGetPath + , dotCabalMainPath + ) +import Stack.Types.PackageFile ( PackageComponentFile (..) ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Runner ( HasRunner, Runner ) import Stack.Types.SourceMap -import System.IO (putStrLn) -import System.IO.Temp (getCanonicalTemporaryDirectory) -import System.Permissions (setScriptPerms) - --- | Command-line options for GHC. -data GhciOpts = GhciOpts - { ghciTargets :: ![Text] - , ghciArgs :: ![String] - , ghciGhcOptions :: ![String] - , ghciFlags :: !(Map ApplyCLIFlag (Map FlagName Bool)) - , ghciGhcCommand :: !(Maybe FilePath) - , ghciNoLoadModules :: !Bool - , ghciAdditionalPackages :: ![String] - , ghciMainIs :: !(Maybe Text) - , ghciLoadLocalDeps :: !Bool - , ghciSkipIntermediate :: !Bool - , ghciHidePackages :: !(Maybe Bool) - , ghciNoBuild :: !Bool - , ghciOnlyMain :: !Bool - } deriving Show - --- | Necessary information to load a package or its components. + ( CommonPackage (..), DepPackage (..), GlobalPackage + , PackageType (..), ProjectPackage (..), SMActual (..) + , SMTargets (..), SMWanted (..), SourceMap (..), Target (..) + ) +import System.IO ( putStrLn ) +import System.Permissions ( setScriptPerms ) + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Ghci" module. +data GhciPrettyException + = GhciTargetParseException ![StyleDoc] + | CandidatesIndexOutOfRangeBug + | InvalidPackageOption !String + | FileTargetIsInvalidAbsFile !String + | Can'tSpecifyFilesAndTargets + | Can'tSpecifyFilesAndMainIs + deriving Show + +instance Pretty GhciPrettyException where + pretty (GhciTargetParseException errs) = + "[S-6948]" + <> pprintTargetParseErrors errs + <> blankLine + <> fillSep + [ flow "Note that to specify options to be passed to GHCi, use the" + , style Shell "--ghci-options" + , "option." + ] + pretty CandidatesIndexOutOfRangeBug = bugPrettyReport "[S-1939]" $ + flow "figureOutMainFile: index out of range." + pretty (InvalidPackageOption name) = + "[S-6716]" + <> line + <> fillSep + [ flow "Failed to parse" + , style Shell "--package" + , "option" + , style Target (fromString name) <> "." + ] + pretty (FileTargetIsInvalidAbsFile name) = + "[S-3600]" + <> line + <> fillSep + [ flow "Cannot work out a valid path for file target" + , style File (fromString name) <> "." + ] + pretty Can'tSpecifyFilesAndTargets = + "[S-9906]" + <> line + <> fillSep + [ flow "Cannot use" + , style Shell "stack ghci" + , flow "with both file targets and package targets." + ] + pretty Can'tSpecifyFilesAndMainIs = + "[S-5188]" + <> line + <> fillSep + [ flow "Cannot use" + , style Shell "stack ghci" + , flow "with both file targets and" + , style Shell "--main-is" + , "flag." + ] + +instance Exception GhciPrettyException + +-- | Type representing information required to load a package or its components. -- -- NOTE: GhciPkgInfo has paths as list instead of a Set to preserve files order -- as a workaround for bug https://ghc.haskell.org/trac/ghc/ticket/13786 data GhciPkgInfo = GhciPkgInfo - { ghciPkgName :: !PackageName - , ghciPkgOpts :: ![(NamedComponent, BuildInfoOpts)] - , ghciPkgDir :: !(Path Abs Dir) - , ghciPkgModules :: !ModuleMap - , ghciPkgCFiles :: ![Path Abs File] -- ^ C files. - , ghciPkgMainIs :: !(Map NamedComponent [Path Abs File]) - , ghciPkgTargetFiles :: !(Maybe [Path Abs File]) - , ghciPkgPackage :: !Package - } deriving Show - --- | Loaded package description and related info. + { name :: !PackageName + , opts :: ![(NamedComponent, BuildInfoOpts)] + , dir :: !(Path Abs Dir) + , modules :: !ModuleMap + , cFiles :: ![Path Abs File] -- ^ C files. + , mainIs :: !(Map NamedComponent [Path Abs File]) + , targetFiles :: !(Maybe [Path Abs File]) + , package :: !Package + } + deriving Show + +-- | Type representing loaded package description and related information. data GhciPkgDesc = GhciPkgDesc - { ghciDescPkg :: !Package - , ghciDescCabalFp :: !(Path Abs File) - , ghciDescTarget :: !Target - } + { package :: !Package + , cabalFP :: !(Path Abs File) + , target :: !Target + } --- Mapping from a module name to a map with all of the paths that use --- that name. Each of those paths is associated with a set of components --- that contain it. Purpose of this complex structure is for use in +-- | Type synonym representing maps from a module name to a map with all of the +-- paths that use that name. Each of those paths is associated with a set of +-- components that contain it. + +-- The purpose of this complex structure is for use in -- 'checkForDuplicateModules'. -type ModuleMap = Map ModuleName (Map (Path Abs File) (Set (PackageName, NamedComponent))) +type ModuleMap = + Map ModuleName (Map (Path Abs File) (Set (PackageName, NamedComponent))) unionModuleMaps :: [ModuleMap] -> ModuleMap unionModuleMaps = M.unionsWith (M.unionWith S.union) -data GhciException - = InvalidPackageOption String - | LoadingDuplicateModules - | MissingFileTarget String - | Can'tSpecifyFilesAndTargets - | Can'tSpecifyFilesAndMainIs - | GhciTargetParseException [Text] - deriving (Typeable) - -instance Exception GhciException - -instance Show GhciException where - show (InvalidPackageOption name) = - "Failed to parse --package option " ++ name - show LoadingDuplicateModules = unlines - [ "Not attempting to start ghci due to these duplicate modules." - , "Use --no-load to try to start it anyway, without loading any modules (but these are still likely to cause errors)" - ] - show (MissingFileTarget name) = - "Cannot find file target " ++ name - show Can'tSpecifyFilesAndTargets = - "Cannot use 'stack ghci' with both file targets and package targets" - show Can'tSpecifyFilesAndMainIs = - "Cannot use 'stack ghci' with both file targets and --main-is flag" - show (GhciTargetParseException xs) = - show (TargetParseException xs) ++ - "\nNote that to specify options to be passed to GHCi, use the --ghci-options flag" - --- | Launch a GHCi session for the given local package targets with the --- given options and configure it with the load paths and extensions --- of those targets. +-- | Function underlying the @stack ghci@ and @stack repl@ commands. Run GHCi in +-- the context of a project. +ghciCmd :: GhciOpts -> RIO Runner () +ghciCmd ghciOpts = + let boptsCLI = defaultBuildOptsCLI + -- using only additional packages, targets then get overridden in `ghci` + { targetsCLI = map T.pack ghciOpts.additionalPackages + , initialBuildSteps = True + , flags = ghciOpts.flags + , ghcOptions = map T.pack ghciOpts.ghcOptions + } + in withConfig YesReexec $ withEnvConfig AllowNoTargets boptsCLI $ do + bopts <- view buildOptsL + -- override env so running of tests and benchmarks is disabled + let boptsLocal = bopts + { testOpts = bopts.testOpts { TestOpts.runTests = False } + , benchmarkOpts = + bopts.benchmarkOpts { BenchmarkOpts.runBenchmarks = False } + } + local (set buildOptsL boptsLocal) (ghci ghciOpts) + +-- | Launch a GHCi session for the given project package targets with the given +-- options and configure it with the load paths and extensions of those targets. ghci :: HasEnvConfig env => GhciOpts -> RIO env () -ghci opts@GhciOpts{..} = do - let buildOptsCLI = defaultBuildOptsCLI - { boptsCLITargets = [] - , boptsCLIFlags = ghciFlags - } - sourceMap <- view $ envConfigL.to envConfigSourceMap - installMap <- toInstallMap sourceMap - locals <- projectLocalPackages - depLocals <- localDependencies - let localMap = - M.fromList [(packageName $ lpPackage lp, lp) | lp <- locals ++ depLocals] - -- FIXME:qrilka this looks wrong to go back to SMActual - sma = SMActual - { smaCompiler = smCompiler sourceMap - , smaProject = smProject sourceMap - , smaDeps = smDeps sourceMap - , smaGlobal = smGlobal sourceMap - } - -- Parse --main-is argument. - mainIsTargets <- parseMainIsTargets buildOptsCLI sma ghciMainIs - -- Parse to either file targets or build targets - etargets <- preprocessTargets buildOptsCLI sma ghciTargets - (inputTargets, mfileTargets) <- case etargets of - Right packageTargets -> return (packageTargets, Nothing) - Left rawFileTargets -> do - case mainIsTargets of - Nothing -> return () - Just _ -> throwM Can'tSpecifyFilesAndMainIs - -- Figure out targets based on filepath targets - (targetMap, fileInfo, extraFiles) <- findFileTargets locals rawFileTargets - return (targetMap, Just (fileInfo, extraFiles)) - -- Get a list of all the local target packages. - localTargets <- getAllLocalTargets opts inputTargets mainIsTargets localMap - -- Get a list of all the non-local target packages. - nonLocalTargets <- getAllNonLocalTargets inputTargets - -- Check if additional package arguments are sensible. - addPkgs <- checkAdditionalPackages ghciAdditionalPackages - -- Load package descriptions. - pkgDescs <- loadGhciPkgDescs buildOptsCLI localTargets - -- If necessary, ask user about which main module to load. - bopts <- view buildOptsL - mainFile <- - if ghciNoLoadModules - then return Nothing - else do - -- Figure out package files, in order to ask the user - -- about which main module to load. See the note below for - -- why this is done again after the build. This could - -- potentially be done more efficiently, because all we - -- need is the location of main modules, not the rest. - pkgs0 <- getGhciPkgInfos installMap addPkgs (fmap fst mfileTargets) pkgDescs - figureOutMainFile bopts mainIsTargets localTargets pkgs0 - let pkgTargets pn targets = - case targets of - TargetAll _ -> [T.pack (packageNameString pn)] - TargetComps comps -> [renderPkgComponent (pn, c) | c <- toList comps] - -- Build required dependencies and setup local packages. - buildDepsAndInitialSteps opts $ - concatMap (\(pn, (_, t)) -> pkgTargets pn t) localTargets - targetWarnings localTargets nonLocalTargets mfileTargets - -- Load the list of modules _after_ building, to catch changes in - -- unlisted dependencies (#1180) - pkgs <- getGhciPkgInfos installMap addPkgs (fmap fst mfileTargets) pkgDescs - checkForIssues pkgs - -- Finally, do the invocation of ghci - runGhci opts localTargets mainFile pkgs (maybe [] snd mfileTargets) (nonLocalTargets ++ addPkgs) - -preprocessTargets - :: HasEnvConfig env - => BuildOptsCLI - -> SMActual GlobalPackage - -> [Text] - -> RIO env (Either [Path Abs File] (Map PackageName Target)) +ghci opts = do + let buildOptsCLI = defaultBuildOptsCLI + { targetsCLI = [] + , flags = opts.flags + } + sourceMap <- view $ envConfigL . to (.sourceMap) + installMap <- toInstallMap sourceMap + locals <- projectLocalPackages + depLocals <- localDependencies + let localMap = + M.fromList [(lp.package.name, lp) | lp <- locals ++ depLocals] + -- FIXME:qrilka this looks wrong to go back to SMActual + sma = SMActual + { compiler = sourceMap.compiler + , project = sourceMap.project + , deps = sourceMap.deps + , globals = sourceMap.globalPkgs + } + -- Parse --main-is argument. + mainIsTargets <- parseMainIsTargets buildOptsCLI sma opts.mainIs + -- Parse to either file targets or build targets + (inputTargets, mfileTargets) <- + preprocessTargets buildOptsCLI sma opts.targets >>= \case + Right packageTargets -> pure (packageTargets, Nothing) + Left rawFileTargets -> do + whenJust mainIsTargets $ \_ -> prettyThrowM Can'tSpecifyFilesAndMainIs + -- Figure out targets based on filepath targets + (targetMap, fileInfo, extraFiles) <- + findFileTargets locals rawFileTargets + pure (targetMap, Just (fileInfo, extraFiles)) + -- Get a list of all the local target packages. + localTargets <- getAllLocalTargets opts inputTargets mainIsTargets localMap + -- Get a list of all the non-local target packages. + nonLocalTargets <- getAllNonLocalTargets inputTargets + let getInternalDependencies target localPackage = + topSortPackageComponent localPackage.package target False + internalDependencies = + M.intersectionWith getInternalDependencies inputTargets localMap + relevantDependencies = M.filter (any isCSubLib) internalDependencies + -- Check if additional package arguments are sensible. + addPkgs <- checkAdditionalPackages opts.additionalPackages + -- Load package descriptions. + pkgDescs <- loadGhciPkgDescs buildOptsCLI localTargets + -- If necessary, ask user about which main module to load. + bopts <- view buildOptsL + mainFile <- if opts.noLoadModules + then pure Nothing + else do + -- Figure out package files, in order to ask the user about which main + -- module to load. See the note below for why this is done again after the + -- build. This could potentially be done more efficiently, because all we + -- need is the location of main modules, not the rest. + pkgs0 <- getGhciPkgInfos installMap addPkgs (fmap fst mfileTargets) pkgDescs + figureOutMainFile bopts mainIsTargets localTargets pkgs0 + let pkgTargets pn targets = + case targets of + TargetAll _ -> [T.pack (packageNameString pn)] + TargetComps comps -> [renderPkgComponent (pn, c) | c <- toList comps] + -- Build required dependencies and setup project packages. + buildDepsAndInitialSteps opts $ + concatMap (\(pn, (_, t)) -> pkgTargets pn t) localTargets + targetWarnings localTargets nonLocalTargets mfileTargets + -- Load the list of modules _after_ building, to catch changes in + -- unlisted dependencies (#1180) + pkgs <- getGhciPkgInfos installMap addPkgs (fmap fst mfileTargets) pkgDescs + checkForIssues pkgs + -- Finally, do the invocation of ghci + runGhci + opts + localTargets + mainFile + pkgs + (maybe [] snd mfileTargets) + (nonLocalTargets ++ addPkgs) + relevantDependencies + +preprocessTargets :: + HasEnvConfig env + => BuildOptsCLI + -> SMActual GlobalPackage + -> [Text] + -> RIO env (Either [Path Abs File] (Map PackageName Target)) preprocessTargets buildOptsCLI sma rawTargets = do - let (fileTargetsRaw, normalTargetsRaw) = - partition (\t -> ".hs" `T.isSuffixOf` t || ".lhs" `T.isSuffixOf` t) - rawTargets - -- Only use file targets if we have no normal targets. - if not (null fileTargetsRaw) && null normalTargetsRaw - then do - fileTargets <- forM fileTargetsRaw $ \fp0 -> do - let fp = T.unpack fp0 - mpath <- liftIO $ forgivingAbsence (resolveFile' fp) - case mpath of - Nothing -> throwM (MissingFileTarget fp) - Just path -> return path - return (Left fileTargets) - else do - -- Try parsing targets before checking if both file and - -- module targets are specified (see issue#3342). - let boptsCLI = buildOptsCLI { boptsCLITargets = normalTargetsRaw } - normalTargets <- parseTargets AllowNoTargets False boptsCLI sma - `catch` \ex -> case ex of - TargetParseException xs -> throwM (GhciTargetParseException xs) - _ -> throwM ex - unless (null fileTargetsRaw) $ throwM Can'tSpecifyFilesAndTargets - return (Right $ smtTargets normalTargets) - -parseMainIsTargets - :: HasEnvConfig env - => BuildOptsCLI - -> SMActual GlobalPackage - -> Maybe Text - -> RIO env (Maybe (Map PackageName Target)) + let (fileTargetsRaw, normalTargetsRaw) = + L.partition + (\t -> ".hs" `T.isSuffixOf` t || ".lhs" `T.isSuffixOf` t) + rawTargets + -- Only use file targets if we have no normal targets. + if not (null fileTargetsRaw) && null normalTargetsRaw + then do + fileTargets <- forM fileTargetsRaw $ \fp0 -> do + let fp = T.unpack fp0 + forgivingResolveFile' fp >>= \case + Nothing -> prettyThrowM (FileTargetIsInvalidAbsFile fp) + Just path -> pure path + pure (Left fileTargets) + else do + -- Try parsing targets before checking if both file and + -- module targets are specified (see issue#3342). + let boptsCLI = buildOptsCLI { targetsCLI = normalTargetsRaw } + normalTargets <- parseTargets AllowNoTargets False boptsCLI sma + `catch` \pex@(PrettyException ex) -> + case fromException $ toException ex of + Just (TargetParseException xs) -> + prettyThrowM $ GhciTargetParseException xs + _ -> throwM pex + unless (null fileTargetsRaw) $ prettyThrowM Can'tSpecifyFilesAndTargets + pure (Right normalTargets.targets) + +parseMainIsTargets :: + HasEnvConfig env + => BuildOptsCLI + -> SMActual GlobalPackage + -> Maybe Text + -> RIO env (Maybe (Map PackageName Target)) parseMainIsTargets buildOptsCLI sma mtarget = forM mtarget $ \target -> do - let boptsCLI = buildOptsCLI { boptsCLITargets = [target] } - targets <- parseTargets AllowNoTargets False boptsCLI sma - return $ smtTargets targets + let boptsCLI = buildOptsCLI { targetsCLI = [target] } + targets <- parseTargets AllowNoTargets False boptsCLI sma + pure targets.targets -- | Display PackageName + NamedComponent displayPkgComponent :: (PackageName, NamedComponent) -> StyleDoc -displayPkgComponent = style PkgComponent . fromString . T.unpack . renderPkgComponent +displayPkgComponent = + style PkgComponent . fromString . T.unpack . renderPkgComponent -findFileTargets - :: HasEnvConfig env - => [LocalPackage] - -> [Path Abs File] - -> RIO env (Map PackageName Target, Map PackageName [Path Abs File], [Path Abs File]) +findFileTargets :: + HasEnvConfig env + => [LocalPackage] + -> [Path Abs File] + -> RIO env (Map PackageName Target, Map PackageName [Path Abs File], [Path Abs File]) findFileTargets locals fileTargets = do - filePackages <- forM locals $ \lp -> do - (_,compFiles,_,_) <- getPackageFiles (packageFiles (lpPackage lp)) (lpCabalFile lp) - return (lp, M.map (map dotCabalGetPath) compFiles) - let foundFileTargetComponents :: [(Path Abs File, [(PackageName, NamedComponent)])] - foundFileTargetComponents = - map (\fp -> (fp, ) $ sort $ - concatMap (\(lp, files) -> map ((packageName (lpPackage lp), ) . fst) - (filter (elem fp . snd) (M.toList files)) - ) filePackages - ) fileTargets - results <- forM foundFileTargetComponents $ \(fp, xs) -> - case xs of - [] -> do - prettyWarn $ vsep - [ "Couldn't find a component for file target" <+> - pretty fp <> - ". This means that the correct ghc options might not be used." - , "Attempting to load the file anyway." - ] - return $ Left fp - [x] -> do - prettyInfo $ - "Using configuration for" <+> displayPkgComponent x <+> - "to load" <+> pretty fp - return $ Right (fp, x) - (x:_) -> do - prettyWarn $ - "Multiple components contain file target" <+> - pretty fp <> ":" <+> - mconcat (intersperse ", " (map displayPkgComponent xs)) <> line <> - "Guessing the first one," <+> displayPkgComponent x <> "." - return $ Right (fp, x) - let (extraFiles, associatedFiles) = partitionEithers results - targetMap = - foldl unionTargets M.empty $ - map (\(_, (name, comp)) -> M.singleton name (TargetComps (S.singleton comp))) - associatedFiles - infoMap = - foldl (M.unionWith (<>)) M.empty $ - map (\(fp, (name, _)) -> M.singleton name [fp]) - associatedFiles - return (targetMap, infoMap, extraFiles) - -getAllLocalTargets - :: HasEnvConfig env - => GhciOpts - -> Map PackageName Target - -> Maybe (Map PackageName Target) - -> Map PackageName LocalPackage - -> RIO env [(PackageName, (Path Abs File, Target))] -getAllLocalTargets GhciOpts{..} targets0 mainIsTargets localMap = do - -- Use the 'mainIsTargets' as normal targets, for CLI concision. See - -- #1845. This is a little subtle - we need to do the target parsing - -- independently in order to handle the case where no targets are - -- specified. - let targets = maybe targets0 (unionTargets targets0) mainIsTargets - packages <- view $ envConfigL.to envConfigSourceMap.to smProject - -- Find all of the packages that are directly demanded by the - -- targets. - let directlyWanted = flip mapMaybe (M.toList packages) $ - \(name, pp) -> - case M.lookup name targets of - Just simpleTargets -> Just (name, (ppCabalFP pp, simpleTargets)) - Nothing -> Nothing - -- Figure out - let extraLoadDeps = getExtraLoadDeps ghciLoadLocalDeps localMap directlyWanted - if (ghciSkipIntermediate && not ghciLoadLocalDeps) || null extraLoadDeps - then return directlyWanted - else do - let extraList = - mconcat $ intersperse ", " (map (fromString . packageNameString . fst) extraLoadDeps) - if ghciLoadLocalDeps - then logInfo $ - "The following libraries will also be loaded into GHCi because " <> - "they are local dependencies of your targets, and you specified --load-local-deps:\n " <> - extraList - else logInfo $ - "The following libraries will also be loaded into GHCi because " <> - "they are intermediate dependencies of your targets:\n " <> - extraList <> - "\n(Use --skip-intermediate-deps to omit these)" - return (directlyWanted ++ extraLoadDeps) - -getAllNonLocalTargets - :: Map PackageName Target - -> RIO env [PackageName] + filePackages <- forM locals $ \lp -> do + PackageComponentFile _ compFiles _ _ <- getPackageFile lp.package lp.cabalFP + pure (lp, M.map (map dotCabalGetPath) compFiles) + let foundFileTargetComponents :: [(Path Abs File, [(PackageName, NamedComponent)])] + foundFileTargetComponents = + map (\fp -> (fp, ) $ L.sort $ + concatMap (\(lp, files) -> map ((lp.package.name,) . fst) + (filter (elem fp . snd) (M.toList files)) + ) filePackages + ) fileTargets + results <- forM foundFileTargetComponents $ \(fp, xs) -> + case xs of + [] -> do + prettyWarnL + [ flow "Couldn't find a component for file target" + , pretty fp <> "." + , flow "This means that the correct GHC options might not be used. \ + \Attempting to load the file anyway." + ] + pure $ Left fp + [x] -> do + prettyInfoL + [ flow "Using configuration for" + , displayPkgComponent x + , flow "to load" + , pretty fp + ] + pure $ Right (fp, x) + (x:_) -> do + prettyWarn $ + fillSep + [ flow "Multiple components contain file target" + , pretty fp <> ":" + , fillSep $ punctuate "," (map displayPkgComponent xs) + ] + <> line + <> fillSep + [ flow "Guessing the first one," + , displayPkgComponent x <> "." + ] + pure $ Right (fp, x) + let (extraFiles, associatedFiles) = partitionEithers results + targetMap = + foldl' unionTargets M.empty $ + map (\(_, (name, comp)) -> M.singleton name (TargetComps (S.singleton comp))) + associatedFiles + infoMap = + foldl' (M.unionWith (<>)) M.empty $ + map (\(fp, (name, _)) -> M.singleton name [fp]) + associatedFiles + pure (targetMap, infoMap, extraFiles) + +getAllLocalTargets :: + HasEnvConfig env + => GhciOpts + -> Map PackageName Target + -> Maybe (Map PackageName Target) + -> Map PackageName LocalPackage + -> RIO env [(PackageName, (Path Abs File, Target))] +getAllLocalTargets ghciOpts targets0 mainIsTargets localMap = do + -- Use the 'mainIsTargets' as normal targets, for CLI concision. See + -- #1845. This is a little subtle - we need to do the target parsing + -- independently in order to handle the case where no targets are + -- specified. + let targets = maybe targets0 (unionTargets targets0) mainIsTargets + packages <- view $ envConfigL . to (.sourceMap.project) + -- Find all of the packages that are directly demanded by the + -- targets. + let directlyWanted = flip mapMaybe (M.toList packages) $ + \(name, pp) -> + case M.lookup name targets of + Just simpleTargets -> Just (name, (pp.cabalFP, simpleTargets)) + Nothing -> Nothing + -- Figure out + let extraLoadDeps = + getExtraLoadDeps ghciOpts.loadLocalDeps localMap directlyWanted + if null extraLoadDeps + then pure directlyWanted + else do + let extraList' = + map (fromPackageName . fst) extraLoadDeps :: [StyleDoc] + extraList = mkNarrativeList (Just Current) False extraList' + if ghciOpts.loadLocalDeps + then prettyInfo $ + fillSep $ + [ flow "The following libraries will also be loaded into \ + \GHCi because they are local dependencies of your \ + \targets, and you specified" + , style Shell "--load-local-deps" <> ":" + ] + <> extraList + else prettyInfo $ + fillSep + ( flow "The following libraries will also be loaded into \ + \GHCi because they are intermediate dependencies of \ + \your targets:" + : extraList + ) + pure (directlyWanted ++ extraLoadDeps) + +getAllNonLocalTargets :: + Map PackageName Target + -> RIO env [PackageName] getAllNonLocalTargets targets = do let isNonLocal (TargetAll PTDependency) = True isNonLocal _ = False - return $ map fst $ filter (isNonLocal . snd) (M.toList targets) + pure $ map fst $ filter (isNonLocal . snd) (M.toList targets) buildDepsAndInitialSteps :: HasEnvConfig env => GhciOpts -> [Text] -> RIO env () -buildDepsAndInitialSteps GhciOpts{..} localTargets = do - let targets = localTargets ++ map T.pack ghciAdditionalPackages - -- If necessary, do the build, for local packagee targets, only do - -- 'initialBuildSteps'. - case NE.nonEmpty targets of - -- only new local targets could appear here - Just nonEmptyTargets | not ghciNoBuild -> do - eres <- buildLocalTargets nonEmptyTargets - case eres of - Right () -> return () - Left err -> do - prettyError $ fromString (show err) - prettyWarn "Build failed, but trying to launch GHCi anyway" - _ -> - return () +buildDepsAndInitialSteps ghciOpts localTargets = do + let targets = localTargets ++ map T.pack ghciOpts.additionalPackages + -- If necessary, do the build, for project packagee targets, only do + -- 'initialBuildSteps'. + whenJust (nonEmpty targets) $ \nonEmptyTargets -> + unless ghciOpts.noBuild $ do + -- only new project package targets could appear here + buildLocalTargets nonEmptyTargets >>= \case + Right () -> pure () + Left err -> do + case fromException err of + Just (PrettyException prettyErr) -> prettyError $ pretty prettyErr + Nothing -> prettyError $ fromString (displayException err) + prettyWarn "Build failed, but trying to launch GHCi anyway" checkAdditionalPackages :: MonadThrow m => [String] -> m [PackageName] checkAdditionalPackages pkgs = forM pkgs $ \name -> do - let mres = (pkgName <$> parsePackageIdentifier name) - <|> parsePackageNameThrowing name - maybe (throwM $ InvalidPackageOption name) return mres + let mres = (pkgName <$> parsePackageIdentifier name) + <|> parsePackageNameThrowing name + maybe (prettyThrowM $ InvalidPackageOption name) pure mres +runGhci :: + HasEnvConfig env + => GhciOpts + -> [(PackageName, (Path Abs File, Target))] + -> Maybe (Path Abs File) + -> [GhciPkgInfo] + -> [Path Abs File] + -> [PackageName] + -> Map PackageName (Seq NamedComponent) + -> RIO env () runGhci - :: HasEnvConfig env - => GhciOpts - -> [(PackageName, (Path Abs File, Target))] - -> Maybe (Path Abs File) - -> [GhciPkgInfo] - -> [Path Abs File] - -> [PackageName] - -> RIO env () -runGhci GhciOpts{..} targets mainFile pkgs extraFiles exposePackages = do - config <- view configL - let pkgopts = hidePkgOpts ++ genOpts ++ ghcOpts - shouldHidePackages = - fromMaybe (not (null pkgs && null exposePackages)) ghciHidePackages - hidePkgOpts = - if shouldHidePackages - then - ["-hide-all-packages"] ++ - -- This is necessary, because current versions of ghci - -- will entirely fail to start if base isn't visible. This - -- is because it tries to use the interpreter to set - -- buffering options on standard IO. - (if null targets then ["-package", "base"] else []) ++ - concatMap (\n -> ["-package", packageNameString n]) exposePackages - else [] - oneWordOpts bio - | shouldHidePackages = bioOneWordOpts bio ++ bioPackageFlags bio - | otherwise = bioOneWordOpts bio - genOpts = nubOrd (concatMap (concatMap (oneWordOpts . snd) . ghciPkgOpts) pkgs) - (omittedOpts, ghcOpts) = partition badForGhci $ - concatMap (concatMap (bioOpts . snd) . ghciPkgOpts) pkgs ++ map T.unpack - ( fold (configGhcOptionsByCat config) -- include everything, locals, and targets - ++ concatMap (getUserOptions . ghciPkgName) pkgs - ) - getUserOptions pkg = M.findWithDefault [] pkg (configGhcOptionsByName config) - badForGhci x = - isPrefixOf "-O" x || elem x (words "-debug -threaded -ticky -static -Werror") - unless (null omittedOpts) $ - logWarn - ("The following GHC options are incompatible with GHCi and have not been passed to it: " <> - mconcat (intersperse " " (fromString <$> nubOrd omittedOpts))) - oiDir <- view objectInterfaceDirL - let odir = + ghciOpts + targets + mainFile + pkgs + extraFiles + exposePackages + exposeInternalDep + = do + config <- view configL + let subDepsPackageUnhide pName deps = + if null deps then [] else ["-package", fromPackageName pName] + pkgopts = hidePkgOpts ++ genOpts ++ ghcOpts + shouldHidePackages = fromMaybe + (not (null pkgs && null exposePackages)) + ghciOpts.hidePackages + hidePkgOpts = + if shouldHidePackages + then + ["-hide-all-packages"] + -- This is necessary, because current versions of ghci will + -- entirely fail to start if base isn't visible. This is because + -- it tries to use the interpreter to set buffering options on + -- standard IO. + ++ (if null targets then ["-package", "base"] else []) + ++ concatMap + (\n -> ["-package", packageNameString n]) + exposePackages + ++ M.foldMapWithKey subDepsPackageUnhide exposeInternalDep + else [] + oneWordOpts bio + | shouldHidePackages = bio.oneWordOpts ++ bio.packageFlags + | otherwise = bio.oneWordOpts + genOpts = nubOrd + (concatMap (concatMap (oneWordOpts . snd) . (.opts)) pkgs) + (omittedOpts, ghcOpts) = L.partition badForGhci $ + concatMap (concatMap ((.opts) . snd) . (.opts)) pkgs + ++ map + T.unpack + ( fold config.ghcOptionsByCat + -- ^ include everything, locals, and targets + ++ concatMap (getUserOptions . (.name)) pkgs + ) + getUserOptions pkg = + M.findWithDefault [] pkg config.ghcOptionsByName + badForGhci x = + L.isPrefixOf "-O" x + || elem x (words "-debug -threaded -ticky -static -Werror") + unless (null omittedOpts) $ + prettyWarn $ + fillSep + ( flow "The following GHC options are incompatible with GHCi \ + \and have not been passed to it:" + : mkNarrativeList (Just Current) False + (map fromString (nubOrd omittedOpts) :: [StyleDoc]) + ) + <> line + oiDir <- view objectInterfaceDirL + let odir = [ "-odir=" <> toFilePathNoTrailingSep oiDir - , "-hidir=" <> toFilePathNoTrailingSep oiDir ] - logInfo $ - "Configuring GHCi with the following packages: " <> - mconcat (intersperse ", " (map (fromString . packageNameString . ghciPkgName) pkgs)) - compilerExeName <- view $ compilerPathsL.to cpCompiler.to toFilePath - let execGhci extras = do - menv <- liftIO $ configProcessContextSettings config defaultEnvSettings + , "-hidir=" <> toFilePathNoTrailingSep oiDir + ] + prettyInfoL + ( flow "Configuring GHCi with the following packages:" + : mkNarrativeList (Just Current) False + (map (fromPackageName . (.name)) pkgs :: [StyleDoc]) + ) + compilerExeName <- + view $ compilerPathsL . to (.compiler) . to toFilePath + let execGhci extras = do + menv <- + liftIO $ config.processContextSettings defaultEnvSettings withPackageWorkingDir $ withProcessContext menv $ exec - (fromMaybe compilerExeName ghciGhcCommand) - (("--interactive" : ) $ - -- This initial "-i" resets the include directories to - -- not include CWD. If there aren't any packages, CWD - -- is included. - (if null pkgs then id else ("-i" : )) $ - odir <> pkgopts <> extras <> ghciGhcOptions <> ghciArgs) - withPackageWorkingDir = + (fromMaybe compilerExeName ghciOpts.ghcCommand) + ( ("--interactive" : ) $ + -- This initial "-i" resets the include directories to not + -- include CWD. If there aren't any packages, CWD is included. + (if null pkgs then id else ("-i" : )) $ + odir + <> pkgopts + <> extras + <> ghciOpts.ghcOptions + <> ghciOpts.args + ) + withPackageWorkingDir = case pkgs of - [pkg] -> withWorkingDir (toFilePath $ ghciPkgDir pkg) + [pkg] -> withWorkingDir (toFilePath pkg.dir) _ -> id - -- TODO: Consider optimizing this check. Perhaps if no - -- "with-ghc" is specified, assume that it is not using intero. - checkIsIntero = - -- Optimization dependent on the behavior of renderScript - - -- it doesn't matter if it's intero or ghci when loading - -- multiple packages. - case pkgs of - [_] -> do - menv <- liftIO $ configProcessContextSettings config defaultEnvSettings - output <- withProcessContext menv - $ runGrabFirstLine (fromMaybe compilerExeName ghciGhcCommand) ["--version"] - return $ "Intero" `isPrefixOf` output - _ -> return False - -- Since usage of 'exec' does not return, we cannot do any cleanup - -- on ghci exit. So, instead leave the generated files. To make this - -- more efficient and avoid gratuitous generation of garbage, the - -- file names are determined by hashing. This also has the nice side - -- effect of making it possible to copy the ghci invocation out of - -- the log and have it still work. - tmpDirectory <- - ( relDirHaskellStackGhci) <$> - (parseAbsDir =<< liftIO getCanonicalTemporaryDirectory) - ghciDir <- view ghciDirL - ensureDir ghciDir - ensureDir tmpDirectory - macrosOptions <- writeMacrosFile ghciDir pkgs - if ghciNoLoadModules + -- Since usage of 'exec' does not pure, we cannot do any cleanup on ghci + -- exit. So, instead leave the generated files. To make this more + -- efficient and avoid gratuitous generation of garbage, the file names + -- are determined by hashing. This also has the nice side effect of making + -- it possible to copy the ghci invocation out of the log and have it + -- still work. + tmpDirectory <- getXdgDir XdgCache $ + Just (relDirStackProgName relDirGhciScript) + ghciDir <- view ghciDirL + ensureDir ghciDir + ensureDir tmpDirectory + macrosOptions <- writeMacrosFile ghciDir pkgs + if ghciOpts.noLoadModules then execGhci macrosOptions else do - checkForDuplicateModules pkgs - isIntero <- checkIsIntero - scriptOptions <- writeGhciScript tmpDirectory (renderScript isIntero pkgs mainFile ghciOnlyMain extraFiles) - execGhci (macrosOptions ++ scriptOptions) + checkForDuplicateModules pkgs + scriptOptions <- + writeGhciScript + tmpDirectory + (renderScript pkgs mainFile ghciOpts.onlyMain extraFiles) + execGhci (macrosOptions ++ scriptOptions) -writeMacrosFile :: HasTerm env => Path Abs Dir -> [GhciPkgInfo] -> RIO env [String] +writeMacrosFile :: + HasTerm env + => Path Abs Dir + -> [GhciPkgInfo] + -> RIO env [String] writeMacrosFile outputDirectory pkgs = do - fps <- fmap (nubOrd . catMaybes . concat) $ - forM pkgs $ \pkg -> forM (ghciPkgOpts pkg) $ \(_, bio) -> do - let cabalMacros = bioCabalMacros bio - exists <- liftIO $ doesFileExist cabalMacros - if exists - then return $ Just cabalMacros - else do - prettyWarnL ["Didn't find expected autogen file:", pretty cabalMacros] - return Nothing - files <- liftIO $ mapM (S8.readFile . toFilePath) fps - if null files then return [] else do - out <- liftIO $ writeHashedFile outputDirectory relFileCabalMacrosH $ - S8.concat $ map (<> "\n#undef CURRENT_PACKAGE_KEY\n#undef CURRENT_COMPONENT_ID\n") files - return ["-optP-include", "-optP" <> toFilePath out] + fps <- fmap (nubOrd . concatMap catMaybes) $ + forM pkgs $ \pkg -> forM pkg.opts $ \(_, bio) -> do + let cabalMacros = bio.cabalMacros + exists <- liftIO $ doesFileExist cabalMacros + if exists + then pure $ Just cabalMacros + else do + prettyWarnL ["Didn't find expected autogen file:", pretty cabalMacros] + pure Nothing + files <- liftIO $ mapM (S8.readFile . toFilePath) fps + if null files then pure [] else do + out <- liftIO $ writeHashedFile outputDirectory relFileCabalMacrosH $ + S8.concat $ map + (<> "\n#undef CURRENT_PACKAGE_KEY\n#undef CURRENT_COMPONENT_ID\n") + files + pure ["-optP-include", "-optP" <> toFilePath out] writeGhciScript :: (MonadIO m) => Path Abs Dir -> GhciScript -> m [String] writeGhciScript outputDirectory script = do - scriptPath <- liftIO $ writeHashedFile outputDirectory relFileGhciScript $ - LBS.toStrict $ scriptToLazyByteString script - let scriptFilePath = toFilePath scriptPath - setScriptPerms scriptFilePath - return ["-ghci-script=" <> scriptFilePath] + scriptPath <- liftIO $ writeHashedFile outputDirectory relFileGhciScript $ + LBS.toStrict $ scriptToLazyByteString script + let scriptFilePath = toFilePath scriptPath + setScriptPerms scriptFilePath + pure ["-ghci-script=" <> scriptFilePath] -writeHashedFile :: Path Abs Dir -> Path Rel File -> ByteString -> IO (Path Abs File) +writeHashedFile :: + Path Abs Dir + -> Path Rel File + -> ByteString + -> IO (Path Abs File) writeHashedFile outputDirectory relFile contents = do - relSha <- shaPathForBytes contents - let outDir = outputDirectory relSha - outFile = outDir relFile - alreadyExists <- doesFileExist outFile - unless alreadyExists $ do - ensureDir outDir - writeBinaryFileAtomic outFile $ byteString contents - return outFile - -renderScript :: Bool -> [GhciPkgInfo] -> Maybe (Path Abs File) -> Bool -> [Path Abs File] -> GhciScript -renderScript isIntero pkgs mainFile onlyMain extraFiles = do - let cdPhase = case (isIntero, pkgs) of - -- If only loading one package, set the cwd properly. - -- Otherwise don't try. See - -- https://github.com/commercialhaskell/stack/issues/3309 - (True, [pkg]) -> cmdCdGhc (ghciPkgDir pkg) - _ -> mempty - addPhase = cmdAdd $ S.fromList (map Left allModules ++ addMain) - addMain = case mainFile of - Just path -> [Right path] - _ -> [] - modulePhase = cmdModule $ S.fromList allModules - allModules = nubOrd $ concatMap (M.keys . ghciPkgModules) pkgs - case getFileTargets pkgs <> extraFiles of - [] -> - if onlyMain - then cdPhase <> if isJust mainFile then cmdAdd (S.fromList addMain) else mempty - else cdPhase <> addPhase <> modulePhase - fileTargets -> cmdAdd (S.fromList (map Right fileTargets)) + relSha <- shaPathForBytes contents + let outDir = outputDirectory relSha + outFile = outDir relFile + alreadyExists <- doesFileExist outFile + unless alreadyExists $ do + ensureDir outDir + writeBinaryFileAtomic outFile $ byteString contents + pure outFile + +renderScript :: + [GhciPkgInfo] + -> Maybe (Path Abs File) + -> Bool + -> [Path Abs File] + -> GhciScript +renderScript pkgs mainFile onlyMain extraFiles = do + let addPhase = cmdAdd $ S.fromList (map Left allModules ++ addMain) + addMain = maybe [] (L.singleton . Right) mainFile + modulePhase = cmdModule $ S.fromList allModules + allModules = nubOrd $ concatMap (M.keys . (.modules)) pkgs + case getFileTargets pkgs <> extraFiles of + [] -> + if onlyMain + then + if isJust mainFile + then cmdAdd (S.fromList addMain) + else mempty + else addPhase <> modulePhase + fileTargets -> cmdAdd (S.fromList (map Right fileTargets)) -- Hacky check if module / main phase should be omitted. This should be -- improved if / when we have a better per-component load. getFileTargets :: [GhciPkgInfo] -> [Path Abs File] -getFileTargets = concatMap (concat . maybeToList . ghciPkgTargetFiles) - --- | Figure out the main-is file to load based on the targets. Asks the --- user for input if there is more than one candidate main-is file. -figureOutMainFile - :: HasRunner env - => BuildOpts - -> Maybe (Map PackageName Target) - -> [(PackageName, (Path Abs File, Target))] - -> [GhciPkgInfo] - -> RIO env (Maybe (Path Abs File)) -figureOutMainFile bopts mainIsTargets targets0 packages = do - case candidates of - [] -> return Nothing - [c@(_,_,fp)] -> do logInfo ("Using main module: " <> RIO.display (renderCandidate c)) - return (Just fp) - candidate:_ -> do - borderedWarning $ do - logWarn "The main module to load is ambiguous. Candidates are: " - forM_ (map renderCandidate candidates) (logWarn . RIO.display) - logWarn - "You can specify which one to pick by: " - logWarn - (" * Specifying targets to stack ghci e.g. stack ghci " <> - RIO.display ( sampleTargetArg candidate)) - logWarn - (" * Specifying what the main is e.g. stack ghci " <> - RIO.display (sampleMainIsArg candidate)) - logWarn - (" * Choosing from the candidate above [1.." <> - RIO.display (length candidates) <> "]") - liftIO userOption - where - targets = fromMaybe (M.fromList $ map (\(k, (_, x)) -> (k, x)) targets0) - mainIsTargets - candidates = do - pkg <- packages - case M.lookup (ghciPkgName pkg) targets of - Nothing -> [] - Just target -> do - (component,mains) <- - M.toList $ - M.filterWithKey (\k _ -> k `S.member` wantedComponents) - (ghciPkgMainIs pkg) - main <- mains - return (ghciPkgName pkg, component, main) - where - wantedComponents = - wantedPackageComponents bopts target (ghciPkgPackage pkg) - renderCandidate c@(pkgName,namedComponent,mainIs) = - let candidateIndex = T.pack . show . (+1) . fromMaybe 0 . elemIndex c - pkgNameText = T.pack (packageNameString pkgName) - in candidateIndex candidates <> ". Package `" <> - pkgNameText <> - "' component " <> - -- This is the format that can be directly copy-pasted as - -- an argument to `stack ghci`. - pkgNameText <> ":" <> renderComp namedComponent <> - " with main-is file: " <> - T.pack (toFilePath mainIs) - candidateIndices = take (length candidates) [1 :: Int ..] - userOption = do - option <- prompt "Specify main module to use (press enter to load none): " - let selected = fromMaybe - ((+1) $ length candidateIndices) - (readMaybe (T.unpack option) :: Maybe Int) - case elemIndex selected candidateIndices of - Nothing -> do - putStrLn - "Not loading any main modules, as no valid module selected" - putStrLn "" - return Nothing - Just op -> do - let (_,_,fp) = candidates !! op - putStrLn - ("Loading main module from candidate " <> - show (op + 1) <> ", --main-is " <> - toFilePath fp) - putStrLn "" - return $ Just fp - renderComp c = - case c of - CLib -> "lib" - CInternalLib name -> "internal-lib:" <> name - CExe name -> "exe:" <> name - CTest name -> "test:" <> name - CBench name -> "bench:" <> name - sampleTargetArg (pkg,comp,_) = - T.pack (packageNameString pkg) <> ":" <> renderComp comp - sampleMainIsArg (pkg,comp,_) = - "--main-is " <> T.pack (packageNameString pkg) <> ":" <> renderComp comp - -loadGhciPkgDescs - :: HasEnvConfig env - => BuildOptsCLI - -> [(PackageName, (Path Abs File, Target))] - -> RIO env [GhciPkgDesc] +getFileTargets = concatMap (concat . maybeToList . (.targetFiles)) + +-- | Figure out the main-is file to load based on the targets. Asks the user for +-- input if there is more than one candidate main-is file. +figureOutMainFile :: + (HasRunner env, HasTerm env) + => BuildOpts + -> Maybe (Map PackageName Target) + -> [(PackageName, (Path Abs File, Target))] + -> [GhciPkgInfo] + -> RIO env (Maybe (Path Abs File)) +figureOutMainFile bopts mainIsTargets targets0 packages = + case candidates of + [] -> pure Nothing + [c@(_,_,fp)] -> do + prettyInfo $ + fillSep + [ "Using" + , style Current "main" + , "module:" + ] + <> line + <> renderCandidate c + <> line + pure (Just fp) + candidate:_ -> do + prettyWarn $ + fillSep + [ "The" + , style Current "main" + , flow "module to load is ambiguous. Candidates are:" + ] + <> line + <> mconcat (L.intersperse line (map renderCandidate candidates)) + <> blankLine + <> flow "You can specify which one to pick by:" + <> line + <> bulletedList + [ fillSep + [ flow "Specifying targets to" + , style Shell (flow "stack ghci") + , "e.g." + , style Shell ( fillSep + [ flow "stack ghci" + , sampleTargetArg candidate + ] + ) <> "." + ] + , fillSep + [ flow "Specifying what the" + , style Current "main" + , flow "is e.g." + , style Shell ( fillSep + [ flow "stack ghci" + , sampleMainIsArg candidate + ] + ) <> "." + ] + , flow + $ "Choosing from the candidate above [1.." + <> show (length candidates) + <> "]." + ] + <> line + liftIO userOption + where + targets = fromMaybe + (M.fromList $ map (\(k, (_, x)) -> (k, x)) targets0) + mainIsTargets + candidates = do + pkg <- packages + case M.lookup pkg.name targets of + Nothing -> [] + Just target -> do + (component,mains) <- + M.toList $ + M.filterWithKey (\k _ -> k `S.member` wantedComponents) + pkg.mainIs + main <- mains + pure (pkg.name, component, main) + where + wantedComponents = + wantedPackageComponents bopts target pkg.package + renderCandidate c@(pkgName, namedComponent, mainIs) = + let candidateIndex = + fromString . show . (+1) . fromMaybe 0 . L.elemIndex c + pkgNameText = fromPackageName pkgName + in hang 4 + $ fill 4 ( candidateIndex candidates <> ".") + <> fillSep + [ "Package" + , style Current pkgNameText <> "," + , "component" + -- This is the format that can be directly copy-pasted as an + -- argument to `stack ghci`. + , style + PkgComponent + ( pkgNameText + <> ":" + <> renderComponentTo namedComponent + ) + <> "," + , "with" + , style Shell "main-is" + , "file:" + , pretty mainIs <> "." + ] + candidateIndices = take (length candidates) [1 :: Int ..] + userOption = do + option <- prompt "Specify main module to use (press enter to load none): " + let selected = fromMaybe + ((+1) $ length candidateIndices) + (readMaybe (T.unpack option) :: Maybe Int) + case L.elemIndex selected candidateIndices of + Nothing -> do + putStrLn + "Not loading any main modules, as no valid module selected" + putStrLn "" + pure Nothing + Just op -> do + (_, _, fp) <- maybe + (prettyThrowIO CandidatesIndexOutOfRangeBug) + pure + (candidates !? op) + putStrLn + ("Loading main module from candidate " <> + show (op + 1) <> ", --main-is " <> + toFilePath fp) + putStrLn "" + pure $ Just fp + sampleTargetArg (pkg, comp, _) = + fromPackageName pkg + <> ":" + <> renderComponentTo comp + sampleMainIsArg (pkg, comp, _) = + fillSep + [ "--main-is" + , fromPackageName pkg <> ":" <> renderComponentTo comp + ] + +loadGhciPkgDescs :: + HasEnvConfig env + => BuildOptsCLI + -> [(PackageName, (Path Abs File, Target))] + -> RIO env [GhciPkgDesc] loadGhciPkgDescs buildOptsCLI localTargets = - forM localTargets $ \(name, (cabalfp, target)) -> - loadGhciPkgDesc buildOptsCLI name cabalfp target + forM localTargets $ \(name, (cabalFP, target)) -> + loadGhciPkgDesc buildOptsCLI name cabalFP target -- | Load package description information for a ghci target. -loadGhciPkgDesc - :: HasEnvConfig env - => BuildOptsCLI - -> PackageName - -> Path Abs File - -> Target - -> RIO env GhciPkgDesc -loadGhciPkgDesc buildOptsCLI name cabalfp target = do - econfig <- view envConfigL - compilerVersion <- view actualCompilerVersionL - let SourceMap{..} = envConfigSourceMap econfig - -- Currently this source map is being build with - -- the default targets - sourceMapGhcOptions = fromMaybe [] $ - (cpGhcOptions . ppCommon <$> M.lookup name smProject) - <|> - (cpGhcOptions . dpCommon <$> M.lookup name smDeps) - sourceMapCabalConfigOpts = fromMaybe [] $ - (cpCabalConfigOpts . ppCommon <$> M.lookup name smProject) - <|> - (cpCabalConfigOpts . dpCommon <$> M.lookup name smDeps) - config = - PackageConfig - { packageConfigEnableTests = True - , packageConfigEnableBenchmarks = True - , packageConfigFlags = getLocalFlags buildOptsCLI name - , packageConfigGhcOptions = sourceMapGhcOptions - , packageConfigCabalConfigOpts = sourceMapCabalConfigOpts - , packageConfigCompilerVersion = compilerVersion - , packageConfigPlatform = view platformL econfig - } - -- TODO we've already parsed this information, otherwise we - -- wouldn't have figured out the cabalfp already. In the future: - -- retain that GenericPackageDescription in the relevant data - -- structures to avoid reparsing. - (gpdio, _name, _cabalfp) <- loadCabalFilePath (parent cabalfp) - gpkgdesc <- liftIO $ gpdio YesPrintWarnings - - -- Source the package's *.buildinfo file created by configure if any. See - -- https://www.haskell.org/cabal/users-guide/developing-packages.html#system-dependent-parameters - buildinfofp <- parseRelFile (packageNameString name ++ ".buildinfo") - hasDotBuildinfo <- doesFileExist (parent cabalfp buildinfofp) - let mbuildinfofp - | hasDotBuildinfo = Just (parent cabalfp buildinfofp) - | otherwise = Nothing - mbuildinfo <- forM mbuildinfofp readDotBuildinfo - let pdp = resolvePackageDescription config gpkgdesc - pkg = - packageFromPackageDescription config (C.genPackageFlags gpkgdesc) $ - maybe - pdp - (\bi -> - let PackageDescriptionPair x y = pdp - in PackageDescriptionPair - (C.updatePackageDescription bi x) - (C.updatePackageDescription bi y)) - mbuildinfo - return GhciPkgDesc - { ghciDescPkg = pkg - , ghciDescCabalFp = cabalfp - , ghciDescTarget = target - } - -getGhciPkgInfos - :: HasEnvConfig env - => InstallMap - -> [PackageName] - -> Maybe (Map PackageName [Path Abs File]) - -> [GhciPkgDesc] - -> RIO env [GhciPkgInfo] +loadGhciPkgDesc :: + HasEnvConfig env + => BuildOptsCLI + -> PackageName + -> Path Abs File + -> Target + -> RIO env GhciPkgDesc +loadGhciPkgDesc buildOptsCLI name cabalFP target = do + econfig <- view envConfigL + compilerVersion <- view actualCompilerVersionL + let sm = econfig.sourceMap + -- Currently this source map is being build with + -- the default targets + sourceMapGhcOptions = fromMaybe [] $ + ((.projectCommon.ghcOptions) <$> M.lookup name sm.project) + <|> + ((.depCommon.ghcOptions) <$> M.lookup name sm.deps) + sourceMapCabalConfigOpts = fromMaybe [] $ + ( (.projectCommon.cabalConfigOpts) <$> M.lookup name sm.project) + <|> + ((.depCommon.cabalConfigOpts) <$> M.lookup name sm.deps) + sourceMapFlags = + maybe mempty (.projectCommon.flags) $ M.lookup name sm.project + config = PackageConfig + { enableTests = True + , enableBenchmarks = True + , flags = getCliFlags <> sourceMapFlags + , ghcOptions = sourceMapGhcOptions + , cabalConfigOpts = sourceMapCabalConfigOpts + , compilerVersion = compilerVersion + , platform = view platformL econfig + } + -- TODO we've already parsed this information, otherwise we wouldn't have + -- figured out the cabalFP already. In the future: retain that + -- GenericPackageDescription in the relevant data structures to avoid + -- reparsing. + (gpdio, _name, _cabalFP) <- + loadCabalFilePath (Just stackProgName') (parent cabalFP) + gpkgdesc <- liftIO $ gpdio YesPrintWarnings + + -- Source the package's *.buildinfo file created by configure if any. See + -- https://www.haskell.org/cabal/users-guide/developing-packages.html#system-dependent-parameters + buildinfofp <- parseRelFile (packageNameString name ++ ".buildinfo") + hasDotBuildinfo <- doesFileExist (parent cabalFP buildinfofp) + let mbuildinfofp + | hasDotBuildinfo = Just (parent cabalFP buildinfofp) + | otherwise = Nothing + mbuildinfo <- forM mbuildinfofp readDotBuildinfo + let pdp = resolvePackageDescription config gpkgdesc + package = + packageFromPackageDescription config (C.genPackageFlags gpkgdesc) $ + maybe pdp (`C.updatePackageDescription` pdp) mbuildinfo + pure GhciPkgDesc + { package + , cabalFP + , target + } + where + cliFlags = buildOptsCLI.flags + -- | All CLI Cabal flags for a package. + getCliFlags :: Map FlagName Bool + getCliFlags = Map.unions + [ Map.findWithDefault Map.empty (ACFByName name) cliFlags + , Map.findWithDefault Map.empty ACFAllProjectPackages cliFlags + ] + +getGhciPkgInfos :: + HasEnvConfig env + => InstallMap + -> [PackageName] + -> Maybe (Map PackageName [Path Abs File]) + -> [GhciPkgDesc] + -> RIO env [GhciPkgInfo] getGhciPkgInfos installMap addPkgs mfileTargets localTargets = do - (installedMap, _, _, _) <- getInstalled installMap - let localLibs = - [ packageName (ghciDescPkg desc) - | desc <- localTargets - , hasLocalComp isCLib (ghciDescTarget desc) - ] - forM localTargets $ \pkgDesc -> - makeGhciPkgInfo installMap installedMap localLibs addPkgs mfileTargets pkgDesc + (installedMap, _, _, _) <- getInstalled installMap + let localLibs = + [ desc.package.name + | desc <- localTargets + , hasLocalComp isCLib desc.target + ] + forM localTargets $ \pkgDesc -> + makeGhciPkgInfo installMap installedMap localLibs addPkgs mfileTargets pkgDesc -- | Make information necessary to load the given package in GHCi. -makeGhciPkgInfo - :: HasEnvConfig env - => InstallMap - -> InstalledMap - -> [PackageName] - -> [PackageName] - -> Maybe (Map PackageName [Path Abs File]) - -> GhciPkgDesc - -> RIO env GhciPkgInfo +makeGhciPkgInfo :: + HasEnvConfig env + => InstallMap + -> InstalledMap + -> [PackageName] + -> [PackageName] + -> Maybe (Map PackageName [Path Abs File]) + -> GhciPkgDesc + -> RIO env GhciPkgInfo makeGhciPkgInfo installMap installedMap locals addPkgs mfileTargets pkgDesc = do - bopts <- view buildOptsL - let pkg = ghciDescPkg pkgDesc - cabalfp = ghciDescCabalFp pkgDesc - target = ghciDescTarget pkgDesc - name = packageName pkg - (mods,files,opts) <- getPackageOpts (packageOpts pkg) installMap installedMap locals addPkgs cabalfp - let filteredOpts = filterWanted opts - filterWanted = M.filterWithKey (\k _ -> k `S.member` allWanted) - allWanted = wantedPackageComponents bopts target pkg - return - GhciPkgInfo - { ghciPkgName = name - , ghciPkgOpts = M.toList filteredOpts - , ghciPkgDir = parent cabalfp - , ghciPkgModules = unionModuleMaps $ - map (\(comp, mp) -> M.map (\fp -> M.singleton fp (S.singleton (packageName pkg, comp))) mp) - (M.toList (filterWanted mods)) - , ghciPkgMainIs = M.map (mapMaybe dotCabalMainPath) files - , ghciPkgCFiles = mconcat (M.elems (filterWanted (M.map (mapMaybe dotCabalCFilePath) files))) - , ghciPkgTargetFiles = mfileTargets >>= M.lookup name - , ghciPkgPackage = pkg - } + bopts <- view buildOptsL + let package = pkgDesc.package + cabalFP = pkgDesc.cabalFP + target = pkgDesc.target + name = package.name + dir = parent cabalFP + targetFiles = mfileTargets >>= M.lookup name + (mods, files, allOpts) <- + getPackageOpts package installMap installedMap locals addPkgs cabalFP + let opts = M.toList $ filterWanted allOpts + filterWanted :: Map NamedComponent a -> Map NamedComponent a + filterWanted = M.filterWithKey (\k _ -> k `S.member` allWanted) + allWanted = wantedPackageComponents bopts target package + modules = unionModuleMaps $ + map + ( \(comp, mp) -> M.map + (\fp -> M.singleton fp (S.singleton (name, comp))) + mp + ) + (M.toList (filterWanted mods)) + mainIs = M.map (mapMaybe dotCabalMainPath) files + cFiles = mconcat + (M.elems (filterWanted (M.map (mapMaybe dotCabalCFilePath) files))) + pure GhciPkgInfo + { name + , opts + , dir + , modules + , mainIs + , cFiles + , targetFiles + , package + } -- NOTE: this should make the same choices as the components code in -- 'loadLocalPackage'. Unfortunately for now we reiterate this logic -- (differently). wantedPackageComponents :: BuildOpts -> Target -> Package -> Set NamedComponent wantedPackageComponents _ (TargetComps cs) _ = cs -wantedPackageComponents bopts (TargetAll PTProject) pkg = S.fromList $ - (case packageLibraries pkg of - NoLibraries -> [] - HasLibraries names -> CLib : map CInternalLib (S.toList names)) ++ - map CExe (S.toList (packageExes pkg)) <> - map CInternalLib (S.toList $ packageInternalLibraries pkg) <> - (if boptsTests bopts then map CTest (M.keys (packageTests pkg)) else []) <> - (if boptsBenchmarks bopts then map CBench (S.toList (packageBenchmarks pkg)) else []) +wantedPackageComponents bopts (TargetAll PTProject) pkg = + ( if hasBuildableMainLibrary pkg + then S.insert CLib (S.mapMonotonic CSubLib buildableForeignLibs') + else S.empty + ) + <> S.mapMonotonic CExe buildableExes' + <> S.mapMonotonic CSubLib buildableSubLibs' + <> ( if bopts.tests + then S.mapMonotonic CTest buildableTestSuites' + else S.empty + ) + <> ( if bopts.benchmarks + then S.mapMonotonic CBench buildableBenchmarks' + else S.empty + ) + where + buildableForeignLibs' = buildableForeignLibs pkg + buildableSubLibs' = buildableSubLibs pkg + buildableExes' = buildableExes pkg + buildableTestSuites' = buildableTestSuites pkg + buildableBenchmarks' = buildableBenchmarks pkg wantedPackageComponents _ _ _ = S.empty -checkForIssues :: HasLogFunc env => [GhciPkgInfo] -> RIO env () -checkForIssues pkgs = do - when (length pkgs > 1) $ borderedWarning $ do - -- Cabal flag issues could arise only when there are at least 2 packages - unless (null cabalFlagIssues) $ borderedWarning $ do - logWarn "Warning: There are cabal flags for this project which may prevent GHCi from loading your code properly." - logWarn "In some cases it can also load some projects which would otherwise fail to build." - logWarn "" - mapM_ (logWarn . RIO.display) $ intercalate [""] cabalFlagIssues - logWarn "" - logWarn "To resolve, remove the flag(s) from the cabal file(s) and instead put them at the top of the haskell files." - logWarn "" - logWarn "It isn't yet possible to load multiple packages into GHCi in all cases - see" - logWarn "https://ghc.haskell.org/trac/ghc/ticket/10827" - where - cabalFlagIssues = concatMap mixedFlag - [ ( "-XNoImplicitPrelude" - , [ "-XNoImplicitPrelude will be used, but GHCi will likely fail to build things which depend on the implicit prelude."] - ) - , ( "-XCPP" - , [ "-XCPP will be used, but it can cause issues with multiline strings." - , "See https://downloads.haskell.org/~ghc/7.10.2/docs/html/users_guide/options-phases.html#cpp-string-gaps" - ] - ) - , ( "-XNoTraditionalRecordSyntax" - , [ "-XNoTraditionalRecordSyntax will be used, but it break modules which use record syntax." ] - ) - , ( "-XTemplateHaskell" - , [ "-XTemplateHaskell will be used, but it may cause compilation issues due to different parsing of '$' when there's no space after it." ] - ) - , ( "-XQuasiQuotes" - , [ "-XQuasiQuotes will be used, but it may cause parse failures due to a different meaning for list comprehension syntax like [x| ... ]" ] - ) - , ( "-XSafe" - , [ "-XSafe will be used, but it will fail to compile unsafe modules." ] - ) - , ( "-XArrows" - , [ "-XArrows will be used, but it will cause non-arrow usages of proc, (-<), (-<<) to fail" ] - ) - , ( "-XOverloadedStrings" - , [ "-XOverloadedStrings will be used, but it can cause type ambiguity in code not usually compiled with it." ] - ) - , ( "-XOverloadedLists" - , [ "-XOverloadedLists will be used, but it can cause type ambiguity in code not usually compiled with it." ] - ) - , ( "-XMonoLocalBinds" - , [ "-XMonoLocalBinds will be used, but it can cause type errors in code which expects generalized local bindings." ] - ) - , ( "-XTypeFamilies" - , [ "-XTypeFamilies will be used, but it implies -XMonoLocalBinds, and so can cause type errors in code which expects generalized local bindings." ] - ) - , ( "-XGADTs" - , [ "-XGADTs will be used, but it implies -XMonoLocalBinds, and so can cause type errors in code which expects generalized local bindings." ] - ) - , ( "-XNewQualifiedOperators" - , [ "-XNewQualifiedOperators will be used, but this will break usages of the old qualified operator syntax." ] - ) +checkForIssues :: HasTerm env => [GhciPkgInfo] -> RIO env () +checkForIssues pkgs = + when (length pkgs > 1) $ do + -- Cabal flag issues could arise only when there are at least 2 packages + unless (null cabalFlagIssues) $ do + prettyWarn $ + flow "There are Cabal flags for this project which may prevent \ + \GHCi from loading your code properly. In some cases it \ + \can also load some projects which would otherwise fail to \ + \build." + <> blankLine + <> mconcat (L.intersperse blankLine cabalFlagIssues) + <> blankLine + <> flow "To resolve, remove the flag(s) from the Cabal file(s) and \ + \instead put them at the top of the Haskell files." + <> blankLine + prettyWarnL + [ flow "It isn't yet possible to load multiple packages into GHCi in \ + \all cases. For further information, see" + , style Url "https://ghc.haskell.org/trac/ghc/ticket/10827" <> "." + ] + where + cabalFlagIssues = concatMap mixedFlag + [ ( "-XNoImplicitPrelude" + , [ flow "-XNoImplicitPrelude will be used, but GHCi will likely fail to \ + \build things which depend on the implicit prelude." ] - mixedFlag (flag, msgs) = - let x = partitionComps (== flag) in - [ msgs ++ showWhich x | mixedSettings x ] - mixedSettings (xs, ys) = xs /= [] && ys /= [] - showWhich (haveIt, don'tHaveIt) = - [ "It is specified for:" - , " " <> renderPkgComponents haveIt - , "But not for: " - , " " <> renderPkgComponents don'tHaveIt + ) + , ( "-XCPP" + , [ flow "-XCPP will be used, but it can cause issues with multiline \ + \strings. For further information, see" + , style Url "https://downloads.haskell.org/~ghc/7.10.2/docs/html/users_guide/options-phases.html#cpp-string-gaps" <> "." ] - partitionComps f = (map fst xs, map fst ys) - where - (xs, ys) = partition (any f . snd) compsWithOpts - compsWithOpts = map (\(k, bio) -> (k, bioOneWordOpts bio ++ bioOpts bio)) compsWithBios - compsWithBios = - [ ((ghciPkgName pkg, c), bio) - | pkg <- pkgs - , (c, bio) <- ghciPkgOpts pkg + ) + , ( "-XNoTraditionalRecordSyntax" + , [ flow "-XNoTraditionalRecordSyntax will be used, but it break modules \ + \which use record syntax." ] - -borderedWarning :: HasLogFunc env => RIO env a -> RIO env a -borderedWarning f = do - logWarn "" - logWarn "* * * * * * * *" - x <- f - logWarn "* * * * * * * *" - logWarn "" - return x + ) + , ( "-XTemplateHaskell" + , [ flow "-XTemplateHaskell will be used, but it may cause compilation \ + \issues due to different parsing of '$' when there's no space \ + \after it." + ] + ) + , ( "-XQuasiQuotes" + , [ flow "-XQuasiQuotes will be used, but it may cause parse failures \ + \due to a different meaning for list comprehension syntax like \ + \[x| ... ]" + ] + ) + , ( "-XSafe" + , [ flow "-XSafe will be used, but it will fail to compile unsafe \ + \modules." + ] + ) + , ( "-XArrows" + , [ flow "-XArrows will be used, but it will cause non-arrow usages of \ + \proc, (-<), (-<<) to fail" + ] + ) + , ( "-XOverloadedStrings" + , [ flow "-XOverloadedStrings will be used, but it can cause type \ + \ambiguity in code not usually compiled with it." + ] + ) + , ( "-XOverloadedLists" + , [ flow "-XOverloadedLists will be used, but it can cause type \ + \ambiguity in code not usually compiled with it." + ] + ) + , ( "-XMonoLocalBinds" + , [ flow "-XMonoLocalBinds will be used, but it can cause type errors in \ + \code which expects generalized local bindings." ] + ) + , ( "-XTypeFamilies" + , [ flow "-XTypeFamilies will be used, but it implies -XMonoLocalBinds, \ + \and so can cause type errors in code which expects generalized \ + \local bindings." ] + ) + , ( "-XGADTs" + , [ flow "-XGADTs will be used, but it implies -XMonoLocalBinds, and so \ + \can cause type errors in code which expects generalized local \ + \bindings." ] + ) + , ( "-XNewQualifiedOperators" + , [ flow "-XNewQualifiedOperators will be used, but this will break \ + \usages of the old qualified operator syntax." ] + ) + ] + mixedFlag (flag, msgs) = + let x = partitionComps (== flag) + in [ fillSep $ msgs ++ showWhich x | mixedSettings x ] + mixedSettings (xs, ys) = xs /= [] && ys /= [] + showWhich (haveIt, don'tHaveIt) = + [ flow "It is specified for:" ] + <> mkNarrativeList (Just PkgComponent) False + (map (fromString . T.unpack . renderPkgComponent) haveIt :: [StyleDoc]) + <> [ flow "But not for:" ] + <> mkNarrativeList (Just PkgComponent) False + (map (fromString . T.unpack . renderPkgComponent) don'tHaveIt :: [StyleDoc]) + partitionComps f = (map fst xs, map fst ys) + where + (xs, ys) = L.partition (any f . snd) compsWithOpts + compsWithOpts = map (\(k, bio) -> + (k, bio.oneWordOpts ++ bio.opts)) compsWithBios + compsWithBios = + [ ((pkg.name, c), bio) + | pkg <- pkgs + , (c, bio) <- pkg.opts + ] -- TODO: Should this also tell the user the filepaths, not just the -- module name? checkForDuplicateModules :: HasTerm env => [GhciPkgInfo] -> RIO env () -checkForDuplicateModules pkgs = do - unless (null duplicates) $ do - borderedWarning $ do - prettyWarn $ "Multiple files use the same module name:" <> - line <> bulletedList (map prettyDuplicate duplicates) - -- MSS 2020-10-13 Disabling, may remove entirely in the future - -- See: https://github.com/commercialhaskell/stack/issues/5407#issuecomment-707339928 - -- throwM LoadingDuplicateModules - where - duplicates :: [(ModuleName, Map (Path Abs File) (Set (PackageName, NamedComponent)))] - duplicates = - filter (\(_, mp) -> M.size mp > 1) $ - M.toList $ - unionModuleMaps (map ghciPkgModules pkgs) - prettyDuplicate :: (ModuleName, Map (Path Abs File) (Set (PackageName, NamedComponent))) -> StyleDoc - prettyDuplicate (mn, mp) = - style Error (pretty mn) <+> "found at the following paths" <> line <> - bulletedList (map fileDuplicate (M.toList mp)) - fileDuplicate :: (Path Abs File, Set (PackageName, NamedComponent)) -> StyleDoc - fileDuplicate (fp, comps) = - pretty fp <+> parens (fillSep (punctuate "," (map displayPkgComponent (S.toList comps)))) - -targetWarnings - :: HasBuildConfig env +checkForDuplicateModules pkgs = + unless (null duplicates) $ + -- Two or more files with the same module name are treated as a warning + -- rather than an error. See: + -- https://github.com/commercialhaskell/stack/issues/5407#issuecomment-707339928 + prettyWarn $ + flow "Multiple files use the same module name:" + <> line + <> bulletedList (map prettyDuplicate duplicates) + <> line + where + duplicates :: + [(ModuleName, Map (Path Abs File) (Set (PackageName, NamedComponent)))] + duplicates = + filter (\(_, mp) -> M.size mp > 1) $ + M.toList $ + unionModuleMaps (map (.modules) pkgs) + prettyDuplicate :: + (ModuleName, Map (Path Abs File) (Set (PackageName, NamedComponent))) + -> StyleDoc + prettyDuplicate (mn, mp) = + fillSep + [ style Error (pretty mn) + , flow "found at the following paths" + ] + <> line + <> bulletedList (map fileDuplicate (M.toList mp)) + fileDuplicate :: + (Path Abs File, Set (PackageName, NamedComponent)) -> StyleDoc + fileDuplicate (fp, comps) = + fillSep + [ pretty fp + , parens $ + fillSep $ punctuate "," (map displayPkgComponent (S.toList comps)) + ] + +targetWarnings :: + HasBuildConfig env => [(PackageName, (Path Abs File, Target))] -> [PackageName] -> Maybe (Map PackageName [Path Abs File], [Path Abs File]) @@ -874,105 +1138,108 @@ targetWarnings localTargets nonLocalTargets mfileTargets = do unless (null nonLocalTargets) $ prettyWarnL [ flow "Some targets" - , parens $ fillSep $ punctuate "," $ map (style Good . fromString . packageNameString) nonLocalTargets - , flow "are not local packages, and so cannot be directly loaded." - , flow "In future versions of stack, this might be supported - see" - , style Url "https://github.com/commercialhaskell/stack/issues/1441" - , "." - , flow "It can still be useful to specify these, as they will be passed to ghci via -package flags." + , parens $ fillSep $ punctuate "," $ map + (style Good . fromPackageName) + nonLocalTargets + , flow "are not project packages, and so cannot be directly loaded. In \ + \future versions of Stack, this might be supported - see" + , style Url "https://github.com/commercialhaskell/stack/issues/1441" <> "." + , flow "It can still be useful to specify these, as they will be passed \ + \to ghci via" + , style Shell "-package" + , "flags." ] when (null localTargets && isNothing mfileTargets) $ do - smWanted <- view $ buildConfigL.to bcSMWanted - stackYaml <- view stackYamlL - prettyNote $ vsep - [ flow "No local targets specified, so a plain ghci will be started with no package hiding or package options." - , "" - , flow $ T.unpack $ utf8BuilderToText $ - "You are using snapshot: " <> - RIO.display (smwSnapshotLocation smWanted) - , "" - , flow "If you want to use package hiding and options, then you can try one of the following:" - , "" - , bulletedList - [ fillSep - [ flow "If you want to start a different project configuration than" <+> pretty stackYaml <> ", then you can use" - , style Shell "stack init" - , flow "to create a new stack.yaml for the packages in the current directory." - , line - ] - , flow "If you want to use the project configuration at" <+> pretty stackYaml <> ", then you can add to its 'packages' field." - ] - , "" - ] + smWanted <- view $ buildConfigL . to (.smWanted) + view configFileL >>= \case + -- A user-specific global configuration file + Left _ -> prettyThrowM ConfigFileNotProjectLevelBug + -- A project-level configuration file + Right projectConfigFile -> prettyNote $ vsep + [ flow "No project package targets specified, so a plain ghci will be \ + \started with no package hiding or package options." + , "" + , flow $ T.unpack $ utf8BuilderToText $ + "You are using snapshot: " <> + display smWanted.snapshotLocation + , "" + , flow "If you want to use package hiding and options, then you can try \ + \one of the following:" + , "" + , bulletedList + [ fillSep + [ flow "If you want to start a different project configuration \ + \than" + , pretty projectConfigFile <> "," + , flow "then you can use" + , style Shell "stack init" + , flow "to create a new" + , style File "stack.yaml" + , flow "for the packages in the current directory." + , line + ] + , flow "If you want to use the project configuration at" + , pretty projectConfigFile <> "," + , flow "then you can add to the value of its" + , style Shell "packages" + , "key." + ] + , "" + ] --- Adds in intermediate dependencies between ghci targets. Note that it --- will return a Lib component for these intermediate dependencies even --- if they don't have a library (but that's fine for the usage within --- this module). +-- Adds in intermediate dependencies between ghci targets. Note that it will +-- return a Lib component for these intermediate dependencies even if they don't +-- have a library (but that's fine for the usage within this module). -- --- If 'True' is passed for loadAllDeps, this loads all local deps, even --- if they aren't intermediate. -getExtraLoadDeps - :: Bool - -> Map PackageName LocalPackage - -> [(PackageName, (Path Abs File, Target))] - -> [(PackageName, (Path Abs File, Target))] +-- If 'True' is passed for loadAllDeps, this loads all local deps, even if they +-- aren't intermediate. +getExtraLoadDeps :: + Bool + -> Map PackageName LocalPackage + -> [(PackageName, (Path Abs File, Target))] + -> [(PackageName, (Path Abs File, Target))] getExtraLoadDeps loadAllDeps localMap targets = - M.toList $ - (\mp -> foldl' (flip M.delete) mp (map fst targets)) $ - M.mapMaybe id $ - execState (mapM_ (mapM_ go . getDeps . fst) targets) - (M.fromList (map (second Just) targets)) - where - getDeps :: PackageName -> [PackageName] - getDeps name = - case M.lookup name localMap of - Just lp -> M.keys (packageDeps (lpPackage lp)) -- FIXME just Local? - _ -> [] - go :: PackageName -> State (Map PackageName (Maybe (Path Abs File, Target))) Bool - go name = do - cache <- get - case (M.lookup name cache, M.lookup name localMap) of - (Just (Just _), _) -> return True - (Just Nothing, _) | not loadAllDeps -> return False - (_, Just lp) -> do - let deps = M.keys (packageDeps (lpPackage lp)) - shouldLoad <- liftM or $ mapM go deps - if shouldLoad - then do - modify (M.insert name (Just (lpCabalFile lp, TargetComps (S.singleton CLib)))) - return True - else do - modify (M.insert name Nothing) - return False - (_, _) -> return False + M.toList $ + (\mp -> foldl' (flip M.delete) mp (map fst targets)) $ + M.mapMaybe id $ + execState (mapM_ (mapM_ go . getDeps . fst) targets) + (M.fromList (map (second Just) targets)) + where + getDeps :: PackageName -> [PackageName] + getDeps name = + case M.lookup name localMap of + Just lp -> listOfPackageDeps lp.package -- FIXME just Local? + _ -> [] + go :: + PackageName + -> State (Map PackageName (Maybe (Path Abs File, Target))) Bool + go name = do + cache <- get + case (M.lookup name cache, M.lookup name localMap) of + (Just (Just _), _) -> pure True + (Just Nothing, _) | not loadAllDeps -> pure False + (_, Just lp) -> do + let deps = listOfPackageDeps lp.package + shouldLoad <- or <$> mapM go deps + if shouldLoad + then do + modify (M.insert name (Just (lp.cabalFP, TargetComps (S.singleton CLib)))) + pure True + else do + modify (M.insert name Nothing) + pure False + (_, _) -> pure False unionTargets :: Ord k => Map k Target -> Map k Target -> Map k Target -unionTargets = M.unionWith $ \l r -> - case (l, r) of - (TargetAll PTDependency, _) -> r - (TargetComps sl, TargetComps sr) -> TargetComps (S.union sl sr) - (TargetComps _, TargetAll PTProject) -> TargetAll PTProject - (TargetComps _, _) -> l - (TargetAll PTProject, _) -> TargetAll PTProject +unionTargets = M.unionWith $ \l r -> case (l, r) of + (TargetAll PTDependency, _) -> r + (TargetComps sl, TargetComps sr) -> TargetComps (S.union sl sr) + (TargetComps _, TargetAll PTProject) -> TargetAll PTProject + (TargetComps _, _) -> l + (TargetAll PTProject, _) -> TargetAll PTProject hasLocalComp :: (NamedComponent -> Bool) -> Target -> Bool -hasLocalComp p t = - case t of - TargetComps s -> any p (S.toList s) - TargetAll PTProject -> True - _ -> False - --- | Run a command and grab the first line of stdout, dropping --- stderr's contexts completely. -runGrabFirstLine :: (HasProcessContext env, HasLogFunc env) => String -> [String] -> RIO env String -runGrabFirstLine cmd0 args = - proc cmd0 args $ \pc -> do - (out, _err) <- readProcess_ pc - return - $ TL.unpack - $ TL.filter (/= '\r') - $ TL.concat - $ take 1 - $ TL.lines - $ TLE.decodeUtf8With lenientDecode out +hasLocalComp p t = case t of + TargetComps s -> any p (S.toList s) + TargetAll PTProject -> True + _ -> False diff --git a/src/Stack/Ghci/Script.hs b/src/Stack/Ghci/Script.hs index 11945d6123..0379087e61 100644 --- a/src/Stack/Ghci/Script.hs +++ b/src/Stack/Ghci/Script.hs @@ -1,58 +1,56 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Ghci.Script +License : BSD-3-Clause +-} module Stack.Ghci.Script ( GhciScript , ModuleName - , cmdAdd - , cmdCdGhc , cmdModule - , scriptToLazyByteString , scriptToBuilder , scriptToFile ) where -import Data.ByteString.Builder (toLazyByteString) -import Data.List +import Data.ByteString.Builder ( toLazyByteString ) +import qualified Data.List as L import qualified Data.Set as S -import Path +import Distribution.ModuleName ( ModuleName, components ) import Stack.Prelude -import System.IO (hSetBinaryMode) - -import Distribution.ModuleName hiding (toFilePath) +import System.IO ( hSetBinaryMode ) -newtype GhciScript = GhciScript { unGhciScript :: [GhciCommand] } +newtype GhciScript = GhciScript { ghciScript :: [GhciCommand] } instance Semigroup GhciScript where GhciScript xs <> GhciScript ys = GhciScript (ys <> xs) + instance Monoid GhciScript where mempty = GhciScript [] mappend = (<>) data GhciCommand - = Add (Set (Either ModuleName (Path Abs File))) - | CdGhc (Path Abs Dir) - | Module (Set ModuleName) - deriving (Show) + = AddCmd (Set (Either ModuleName (Path Abs File))) + | ModuleCmd (Set ModuleName) + deriving Show cmdAdd :: Set (Either ModuleName (Path Abs File)) -> GhciScript -cmdAdd = GhciScript . (:[]) . Add - -cmdCdGhc :: Path Abs Dir -> GhciScript -cmdCdGhc = GhciScript . (:[]) . CdGhc +cmdAdd = GhciScript . (:[]) . AddCmd cmdModule :: Set ModuleName -> GhciScript -cmdModule = GhciScript . (:[]) . Module +cmdModule = GhciScript . (:[]) . ModuleCmd scriptToLazyByteString :: GhciScript -> LByteString scriptToLazyByteString = toLazyByteString . scriptToBuilder scriptToBuilder :: GhciScript -> Builder scriptToBuilder backwardScript = mconcat $ fmap commandToBuilder script - where - script = reverse $ unGhciScript backwardScript + where + script = reverse backwardScript.ghciScript scriptToFile :: Path Abs File -> GhciScript -> IO () scriptToFile path script = @@ -60,31 +58,40 @@ scriptToFile path script = $ \hdl -> do hSetBuffering hdl (BlockBuffering Nothing) hSetBinaryMode hdl True hPutBuilder hdl (scriptToBuilder script) - where - filepath = toFilePath path + where + filepath = toFilePath path -- Command conversion commandToBuilder :: GhciCommand -> Builder -commandToBuilder (Add modules) +commandToBuilder (AddCmd modules) | S.null modules = mempty | otherwise = ":add " - <> mconcat (intersperse " " $ - fmap (fromString . quoteFileName . either (mconcat . intersperse "." . components) toFilePath) - (S.toAscList modules)) + <> mconcat + ( L.intersperse " " + $ fmap + ( fromString + . quoteFileName + . either (mconcat . L.intersperse "." . components) toFilePath + ) + (S.toAscList modules) + ) <> "\n" -commandToBuilder (CdGhc path) = - ":cd-ghc " <> fromString (quoteFileName (toFilePath path)) <> "\n" - -commandToBuilder (Module modules) +commandToBuilder (ModuleCmd modules) | S.null modules = ":module +\n" | otherwise = ":module + " - <> mconcat (intersperse " " - $ fromString . quoteFileName . mconcat . intersperse "." . components <$> S.toAscList modules) + <> mconcat + ( L.intersperse " " + $ fromString + . quoteFileName + . mconcat + . L.intersperse "." + . components <$> S.toAscList modules + ) <> "\n" -- | Make sure that a filename with spaces in it gets the proper quotes. diff --git a/src/Stack/Hoogle.hs b/src/Stack/Hoogle.hs index 60b09fed46..3e88b845c7 100644 --- a/src/Stack/Hoogle.hs +++ b/src/Stack/Hoogle.hs @@ -1,206 +1,290 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Hoogle +Description : A wrapper around hoogle. +License : BSD-3-Clause + +A wrapper around hoogle. +-} --- | A wrapper around hoogle. module Stack.Hoogle - ( hoogleCmd - ) where + ( hoogleCmd + ) where -import Stack.Prelude import qualified Data.ByteString.Lazy.Char8 as BL8 -import Data.Char (isSpace) +import Data.Char ( isSpace ) +import Data.Either.Extra ( eitherToMaybe ) import qualified Data.Text as T -import Distribution.PackageDescription (packageDescription, package) -import Distribution.Types.PackageName (mkPackageName) -import Distribution.Version (mkVersion) -import Lens.Micro ((?~)) -import Path (parseAbsFile) -import Path.IO hiding (findExecutable) -import qualified Stack.Build -import Stack.Build.Target (NeedTargets(NeedTargets)) +import Distribution.PackageDescription ( packageDescription, package ) +import Distribution.Types.PackageName ( mkPackageName ) +import Distribution.Version ( mkVersion ) +import Lens.Micro ( (?~) ) +import Path ( parseAbsFile ) +import Path.IO ( createDirIfMissing, doesFileExist ) +import qualified RIO.Map as Map +import RIO.Process ( findExecutable, proc, readProcess_, runProcess_) +import qualified Stack.Build ( build ) +import Stack.Build.Target ( NeedTargets (..) ) +import Stack.Constants ( stackProgName' ) +import Stack.Prelude import Stack.Runners + ( ShouldReexec (..), withConfig, withDefaultEnvConfig + , withEnvConfig + ) +import Stack.Types.BuildOptsCLI + ( BuildOptsCLI (..), defaultBuildOptsCLI ) +import Stack.Types.BuildOptsMonoid ( buildOptsMonoidHaddockL ) import Stack.Types.Config -import Stack.Types.SourceMap -import qualified RIO.Map as Map -import RIO.Process + ( Config (..), HasConfig (..) ) +import Stack.Types.EnvConfig + ( EnvConfig, HasSourceMap (..), hoogleDatabasePath + , hoogleRoot + ) +import Stack.Types.EnvSettings ( EnvSettings (..) ) +import Stack.Types.GlobalOpts + ( GlobalOpts (..), globalOptsBuildOptsMonoidL ) +import Stack.Types.Runner ( Runner, globalOptsL ) +import Stack.Types.SourceMap ( DepPackage (..), SourceMap (..) ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Hoogle" module. +data HoogleException + = HoogleOnPathNotFoundBug + deriving Show + +instance Exception HoogleException where + displayException HoogleOnPathNotFoundBug = bugReport "[S-9669]" + "Cannot find Hoogle executable on PATH, after installing." + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Hoogle" module. +data HooglePrettyException + = HoogleNotFound StyleDoc + | HoogleDatabaseNotFound + deriving Show + +instance Pretty HooglePrettyException where + pretty (HoogleNotFound e) = + "[S-1329]" + <> line + <> e + <> line + <> fillSep + [ flow "Not installing Hoogle due to" + , style Shell "--no-setup" <> "." + ] + pretty HoogleDatabaseNotFound = + "[S-3025]" + <> line + <> fillSep + [ flow "No Hoogle database. Not building one due to" + , style Shell "--no-setup" <> "." + ] + +instance Exception HooglePrettyException -- | Helper type to duplicate log messages data Muted = Muted | NotMuted -- | Hoogle command. -hoogleCmd :: ([String],Bool,Bool,Bool) -> RIO Runner () -hoogleCmd (args,setup,rebuild,startServer) = +hoogleCmd :: ([String], Bool, Bool, Bool) -> RIO Runner () +hoogleCmd (args, setup, rebuild, startServer) = local (over globalOptsL modifyGO) $ - withConfig YesReexec $ - withDefaultEnvConfig $ do - hooglePath <- ensureHoogleInPath - generateDbIfNeeded hooglePath - runHoogle hooglePath args' - where - modifyGO :: GlobalOpts -> GlobalOpts - modifyGO = globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL ?~ True - - args' :: [String] - args' = if startServer - then ["server", "--local", "--port", "8080"] - else [] - ++ args - generateDbIfNeeded :: Path Abs File -> RIO EnvConfig () - generateDbIfNeeded hooglePath = do - databaseExists <- checkDatabaseExists - if databaseExists && not rebuild - then return () - else if setup || rebuild - then do - logWarn - (if rebuild - then "Rebuilding database ..." - else "No Hoogle database yet. Automatically building haddocks and hoogle database (use --no-setup to disable) ...") - buildHaddocks - logInfo "Built docs." - generateDb hooglePath - logInfo "Generated DB." - else do - logError - "No Hoogle database. Not building one due to --no-setup" - bail - generateDb :: Path Abs File -> RIO EnvConfig () - generateDb hooglePath = do - do dir <- hoogleRoot - createDirIfMissing True dir - runHoogle hooglePath ["generate", "--local"] - buildHaddocks :: RIO EnvConfig () - buildHaddocks = do - config <- view configL - runRIO config $ -- a bit weird that we have to drop down like this - catch (withDefaultEnvConfig $ Stack.Build.build Nothing) - (\(_ :: ExitCode) -> return ()) - hooglePackageName = mkPackageName "hoogle" - hoogleMinVersion = mkVersion [5, 0] - hoogleMinIdent = - PackageIdentifier hooglePackageName hoogleMinVersion - installHoogle :: RIO EnvConfig (Path Abs File) - installHoogle = requiringHoogle Muted $ do - Stack.Build.build Nothing - mhooglePath' <- findExecutable "hoogle" - case mhooglePath' of - Right hooglePath -> parseAbsFile hooglePath - Left _ -> do - logWarn "Couldn't find hoogle in path after installing. This shouldn't happen, may be a bug." - bail - requiringHoogle :: Muted -> RIO EnvConfig x -> RIO EnvConfig x - requiringHoogle muted f = do - hoogleTarget <- do - sourceMap <- view $ sourceMapL . to smDeps - case Map.lookup hooglePackageName sourceMap of - Just hoogleDep -> - case dpLocation hoogleDep of - PLImmutable pli -> - T.pack . packageIdentifierString <$> - restrictMinHoogleVersion muted (packageLocationIdent pli) - plm@(PLMutable _) -> do - T.pack . packageIdentifierString . package . packageDescription - <$> loadCabalFile plm - Nothing -> do - -- not muted because this should happen only once - logWarn "No hoogle version was found, trying to install the latest version" - mpir <- getLatestHackageVersion YesRequireHackageIndex hooglePackageName UsePreferredVersions - let hoogleIdent = case mpir of - Nothing -> hoogleMinIdent - Just (PackageIdentifierRevision _ ver _) -> - PackageIdentifier hooglePackageName ver + withConfig YesReexec $ + withDefaultEnvConfig $ do + hooglePath <- ensureHoogleInPath + generateDbIfNeeded hooglePath + runHoogle hooglePath args' + where + modifyGO :: GlobalOpts -> GlobalOpts + modifyGO = globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL ?~ True + + args' :: [String] + args' = if startServer + then ["server", "--local", "--port", "8080"] ++ args + else args + + generateDbIfNeeded :: Path Abs File -> RIO EnvConfig () + generateDbIfNeeded hooglePath = do + databaseExists <- checkDatabaseExists + unless (databaseExists && not rebuild) $ + if setup || rebuild + then do + prettyWarnL $ + if rebuild + then + [ flow "Rebuilding database ..." ] + else + [ flow "No Hoogle database yet. Automatically building \ + \Haddock documentation and Hoogle database (use" + , style Shell "--no-setup" + , flow "to disable) ..." + ] + buildHaddocks + prettyInfoS "Built Haddock documentation." + generateDb hooglePath + prettyInfoS "Generated Hoogle database." + else prettyThrowIO HoogleDatabaseNotFound + + generateDb :: Path Abs File -> RIO EnvConfig () + generateDb hooglePath = do + dir <- hoogleRoot + createDirIfMissing True dir + runHoogle hooglePath ["generate", "--local"] + + buildHaddocks :: RIO EnvConfig () + buildHaddocks = do + config <- view configL + runRIO config $ -- a bit weird that we have to drop down like this + catch (withDefaultEnvConfig $ Stack.Build.build Nothing) + (\(_ :: ExitCode) -> pure ()) + + hooglePackageName = mkPackageName "hoogle" + hoogleMinVersion = mkVersion [5, 0] + hoogleMinIdent = + PackageIdentifier hooglePackageName hoogleMinVersion + + installHoogle :: RIO EnvConfig (Path Abs File) + installHoogle = requiringHoogle Muted $ do + Stack.Build.build Nothing + mhooglePath' <- findExecutable "hoogle" + case mhooglePath' of + Right hooglePath -> parseAbsFile hooglePath + Left _ -> throwIO HoogleOnPathNotFoundBug + + requiringHoogle :: Muted -> RIO EnvConfig x -> RIO EnvConfig x + requiringHoogle muted f = do + hoogleTarget <- do + sourceMap <- view $ sourceMapL . to (.deps) + case Map.lookup hooglePackageName sourceMap of + Just hoogleDep -> + case hoogleDep.location of + PLImmutable pli -> T.pack . packageIdentifierString <$> - restrictMinHoogleVersion muted hoogleIdent - config <- view configL - let boptsCLI = defaultBuildOptsCLI - { boptsCLITargets = [hoogleTarget] - } - runRIO config $ withEnvConfig NeedTargets boptsCLI f - restrictMinHoogleVersion - :: HasLogFunc env - => Muted -> PackageIdentifier -> RIO env PackageIdentifier - restrictMinHoogleVersion muted ident = do - if ident < hoogleMinIdent + restrictMinHoogleVersion muted (packageLocationIdent pli) + plm@(PLMutable _) -> + T.pack . packageIdentifierString . package . packageDescription + <$> loadCabalFile (Just stackProgName') plm + Nothing -> do + -- not muted because this should happen only once + prettyWarnS + "No hoogle version was found, trying to install the latest version" + mpir <- getLatestHackageVersion YesRequireHackageIndex hooglePackageName UsePreferredVersions + let hoogleIdent = case mpir of + Nothing -> hoogleMinIdent + Just (PackageIdentifierRevision _ ver _) -> + PackageIdentifier hooglePackageName ver + T.pack . packageIdentifierString <$> + restrictMinHoogleVersion muted hoogleIdent + config <- view configL + let boptsCLI = defaultBuildOptsCLI + { targetsCLI = [hoogleTarget] } + runRIO config $ withEnvConfig NeedTargets boptsCLI f + + restrictMinHoogleVersion :: + HasLogFunc env + => Muted + -> PackageIdentifier + -> RIO env PackageIdentifier + restrictMinHoogleVersion muted ident = + if ident < hoogleMinIdent then do - muteableLog LevelWarn muted $ - "Minimum " <> - fromString (packageIdentifierString hoogleMinIdent) <> - " is not in your index. Installing the minimum version." - pure hoogleMinIdent + muteableLog LevelWarn muted $ + "Minimum " <> + fromString (packageIdentifierString hoogleMinIdent) <> + " is not in your index. Installing the minimum version." + pure hoogleMinIdent else do - muteableLog LevelInfo muted $ - "Minimum version is " <> - fromString (packageIdentifierString hoogleMinIdent) <> - ". Found acceptable " <> - fromString (packageIdentifierString ident) <> - " in your index, requiring its installation." - pure ident - muteableLog :: HasLogFunc env => LogLevel -> Muted -> Utf8Builder -> RIO env () - muteableLog logLevel muted msg = - case muted of - Muted -> pure () - NotMuted -> logGeneric "" logLevel msg - runHoogle :: Path Abs File -> [String] -> RIO EnvConfig () - runHoogle hooglePath hoogleArgs = do - config <- view configL - menv <- liftIO $ configProcessContextSettings config envSettings - dbpath <- hoogleDatabasePath - let databaseArg = ["--database=" ++ toFilePath dbpath] - withProcessContext menv $ proc - (toFilePath hooglePath) - (hoogleArgs ++ databaseArg) - runProcess_ - bail :: RIO EnvConfig a - bail = exitWith (ExitFailure (-1)) - checkDatabaseExists = do - path <- hoogleDatabasePath - liftIO (doesFileExist path) - ensureHoogleInPath :: RIO EnvConfig (Path Abs File) - ensureHoogleInPath = do - config <- view configL - menv <- liftIO $ configProcessContextSettings config envSettings - mhooglePath <- runRIO menv (findExecutable "hoogle") <> - requiringHoogle NotMuted (findExecutable "hoogle") - eres <- case mhooglePath of - Left _ -> return $ Left "Hoogle isn't installed." - Right hooglePath -> do - result <- withProcessContext menv - $ proc hooglePath ["--numeric-version"] - $ tryAny . fmap fst . readProcess_ - let unexpectedResult got = Left $ T.concat - [ "'" - , T.pack hooglePath - , " --numeric-version' did not respond with expected value. Got: " - , got - ] - return $ case result of - Left err -> unexpectedResult $ T.pack (show err) - Right bs -> case parseVersion (takeWhile (not . isSpace) (BL8.unpack bs)) of - Nothing -> unexpectedResult $ T.pack (BL8.unpack bs) - Just ver - | ver >= hoogleMinVersion -> Right hooglePath - | otherwise -> Left $ T.concat - [ "Installed Hoogle is too old, " - , T.pack hooglePath - , " is version " - , T.pack $ versionString ver - , " but >= 5.0 is required." - ] - case eres of - Right hooglePath -> parseAbsFile hooglePath - Left err - | setup -> do - logWarn $ display err <> " Automatically installing (use --no-setup to disable) ..." - installHoogle - | otherwise -> do - logWarn $ display err <> " Not installing it due to --no-setup." - bail - envSettings = - EnvSettings - { esIncludeLocals = True - , esIncludeGhcPackagePath = True - , esStackExe = True - , esLocaleUtf8 = False - , esKeepGhcRts = False - } + muteableLog LevelInfo muted $ + "Minimum version is " <> + fromString (packageIdentifierString hoogleMinIdent) <> + ". Found acceptable " <> + fromString (packageIdentifierString ident) <> + " in your index, requiring its installation." + pure ident + muteableLog :: + HasLogFunc env + => LogLevel + -> Muted + -> Utf8Builder + -> RIO env () + muteableLog logLevel muted msg = + case muted of + Muted -> pure () + NotMuted -> logGeneric "" logLevel msg + + runHoogle :: Path Abs File -> [String] -> RIO EnvConfig () + runHoogle hooglePath hoogleArgs = do + config <- view configL + menv <- liftIO $ config.processContextSettings envSettings + dbpath <- hoogleDatabasePath + let databaseArg = ["--database=" ++ toFilePath dbpath] + withProcessContext menv $ proc + (toFilePath hooglePath) + (hoogleArgs ++ databaseArg) + runProcess_ + + checkDatabaseExists = do + path <- hoogleDatabasePath + liftIO (doesFileExist path) + + ensureHoogleInPath :: RIO EnvConfig (Path Abs File) + ensureHoogleInPath = do + config <- view configL + menv <- liftIO $ config.processContextSettings envSettings + mHooglePath <- eitherToMaybe <$> runRIO menv (findExecutable "hoogle") + let mHooglePath' = + eitherToMaybe <$> requiringHoogle NotMuted (findExecutable "hoogle") + eres <- maybe mHooglePath' (pure . Just) mHooglePath >>= \case + Nothing -> pure $ Left (flow "Hoogle isn't installed.") + Just hooglePath -> do + result <- withProcessContext menv + $ proc hooglePath ["--numeric-version"] + $ tryAny . fmap fst . readProcess_ + let unexpectedResult got = Left $ + fillSep + [ style Shell (fromString hooglePath) + , style Shell "--numeric-version" + , flow "did not respond with expected value. Got:" + ] + <> blankLine + <> got + pure $ case result of + Left err -> unexpectedResult $ string (displayException err) + Right bs -> + case parseVersion (takeWhile (not . isSpace) (BL8.unpack bs)) of + Nothing -> unexpectedResult $ fromString (BL8.unpack bs) + Just ver + | ver >= hoogleMinVersion -> Right hooglePath + | otherwise -> Left $ + fillSep + [ flow "Installed Hoogle is too old, " + , style Shell (fromString hooglePath) + , flow "is version" + , fromString (versionString ver) + , flow "but >= 5.0 is required." + ] + case eres of + Right hooglePath -> parseAbsFile hooglePath + Left err + | setup -> do + prettyWarnL + [ err + , flow "Automatically installing (use" + , style Shell "--no-setup" + , flow "to disable) ..." + ] + installHoogle + | otherwise -> prettyThrowIO $ HoogleNotFound err + + envSettings = EnvSettings + { includeLocals = True + , includeGhcPackagePath = True + , stackExe = True + , localeUtf8 = False + , keepGhcRts = False + } diff --git a/src/Stack/IDE.hs b/src/Stack/IDE.hs index bdad682572..9de8fea9f6 100644 --- a/src/Stack/IDE.hs +++ b/src/Stack/IDE.hs @@ -1,59 +1,95 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE ScopedTypeVariables #-} - --- | Functions for IDEs. +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.IDE +Description : Types and functions related to Stack's @ide@ command. +License : BSD-3-Clause + +Types and functions related to Stack's @ide@ command. +-} + module Stack.IDE - ( OutputStream(..) - , ListPackagesCmd(..) - , listPackages - , listTargets - ) where + ( OutputStream (..) + , ListPackagesCmd (..) + , idePackagesCmd + , ideTargetsCmd + , listPackages + , listTargets + ) where import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as T +import Data.Tuple ( swap ) import Stack.Prelude -import Stack.Types.Config +import Stack.Runners + ( ShouldReexec (..), withBuildConfig, withConfig ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.IdeOpts ( ListPackagesCmd (..), OutputStream (..) ) import Stack.Types.NamedComponent + ( NamedComponent, isCBench, isCExe, isCTest + , renderPkgComponent + ) +import Stack.Types.Runner ( Runner ) import Stack.Types.SourceMap -import System.IO (putStrLn) + ( ProjectPackage (..), SMWanted (..), ppComponentsMaybe ) +import System.IO ( putStrLn ) + +-- | Function underlying the @stack ide packages@ command. List packages in the +-- project. +idePackagesCmd :: (OutputStream, ListPackagesCmd) -> RIO Runner () +idePackagesCmd = + withConfig NoReexec . withBuildConfig . uncurry listPackages -data OutputStream = OutputLogInfo - | OutputStdout +compTypes :: (Bool, Bool, Bool) -> NamedComponent -> Bool +compTypes (False, False, False) = const True +compTypes (exe, test, bench) = + \x -> (exe && isCExe x) || (test && isCTest x) || (bench && isCBench x) -data ListPackagesCmd = ListPackageNames - | ListPackageCabalFiles +-- | Function underlying the @stack ide targets@ command. List targets in the +-- project. +ideTargetsCmd :: ((Bool, Bool, Bool), OutputStream) -> RIO Runner () +ideTargetsCmd = withConfig NoReexec . + withBuildConfig . uncurry listTargets . fmap compTypes . swap -outputFunc :: HasLogFunc env => OutputStream -> String -> RIO env () -outputFunc OutputLogInfo = logInfo . fromString +outputFunc :: HasTerm env => OutputStream -> String -> RIO env () +outputFunc OutputLogInfo = prettyInfo . fromString outputFunc OutputStdout = liftIO . putStrLn -- | List the packages inside the current project. -listPackages :: HasBuildConfig env => OutputStream -> ListPackagesCmd -> RIO env () +listPackages :: + HasBuildConfig env + => OutputStream + -> ListPackagesCmd + -> RIO env () listPackages stream flag = do - packages <- view $ buildConfigL.to (smwProject . bcSMWanted) + packages <- view $ buildConfigL . to (.smWanted.project) let strs = case flag of ListPackageNames -> map packageNameString (Map.keys packages) ListPackageCabalFiles -> - map (toFilePath . ppCabalFP) (Map.elems packages) + map (toFilePath . (.cabalFP)) (Map.elems packages) mapM_ (outputFunc stream) strs -- | List the targets in the current project. -listTargets :: forall env. HasBuildConfig env => OutputStream -> RIO env () -listTargets stream = do - packages <- view $ buildConfigL.to (smwProject . bcSMWanted) +listTargets :: + forall env. HasBuildConfig env + => OutputStream + -> (NamedComponent -> Bool) + -> RIO env () +listTargets stream isCompType = do + packages <- view $ buildConfigL . to (.smWanted.project) pairs <- concat <$> Map.traverseWithKey toNameAndComponent packages outputFunc stream $ T.unpack $ T.intercalate "\n" $ map renderPkgComponent pairs - where - toNameAndComponent - :: PackageName - -> ProjectPackage - -> RIO env [(PackageName, NamedComponent)] - toNameAndComponent pkgName' = - fmap (map (pkgName', ) . Set.toList) . ppComponents + where + toNameAndComponent :: + PackageName + -> ProjectPackage + -> RIO env [(PackageName, NamedComponent)] + toNameAndComponent pkgName' = + fmap (map (pkgName',) . Set.toList) . ppComponentsMaybe (\x -> + if isCompType x then Just x else Nothing) diff --git a/src/Stack/Init.hs b/src/Stack/Init.hs index e483fd04af..40b26659a6 100644 --- a/src/Stack/Init.hs +++ b/src/Stack/Init.hs @@ -1,582 +1,753 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} -module Stack.Init - ( initProject - , InitOpts (..) - ) where -import Stack.Prelude -import qualified Data.ByteString.Builder as B -import qualified Data.ByteString.Char8 as BC -import qualified Data.Foldable as F -import qualified Data.HashMap.Strict as HM -import qualified Data.IntMap as IntMap -import Data.List.Extra (groupSortOn) -import qualified Data.List.NonEmpty as NonEmpty -import qualified Data.Map.Strict as Map -import qualified Data.Set as Set -import qualified Data.Text as T -import qualified Data.Text.Normalize as T (normalize , NormalizationMode(NFC)) -import qualified Data.Yaml as Yaml +{-| +Module : Stack.Init +Description : Types and functions related to Stack's @init@ command. +License : BSD-3-Clause + +Types and functions related to Stack's @init@ command. +-} + +module Stack.Init + ( InitOpts (..) + , initCmd + , initProject + ) where + +import qualified Data.Aeson.KeyMap as KeyMap +import qualified Data.ByteString.Builder as B +import qualified Data.ByteString.Char8 as BC +import qualified Data.Foldable as F +import qualified Data.IntMap as IntMap +import Data.List.Extra ( groupSortOn ) +import Data.List.NonEmpty.Extra ( minimumBy1 ) +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set +import qualified Data.Text as T +import qualified Data.Yaml as Yaml import qualified Distribution.PackageDescription as C -import qualified Distribution.Text as C -import qualified Distribution.Version as C +import qualified Distribution.Text as C +import qualified Distribution.Version as C import Path -import Path.Extra (toFilePathNoTrailingSep) -import Path.Find (findFiles) -import Path.IO hiding (findFiles) -import qualified Paths_stack as Meta -import qualified RIO.FilePath as FP -import RIO.List ((\\), intercalate, intersperse, - isSuffixOf, isPrefixOf) -import RIO.List.Partial (minimumBy) + ( PathException, (), dirname, filename, parent + , stripProperPrefix + ) +import Path.Extra ( toFilePathNoTrailingSep ) +import Path.Find ( findFiles ) +import Path.IO + ( AnyPath, RelPath, doesFileExist, getCurrentDir + , makeRelativeToCurrentDir, resolveDir' + ) +import qualified RIO.FilePath as FP +import RIO.List ( (\\), intercalate, isSuffixOf, isPrefixOf ) +import RIO.NonEmpty ( nonEmpty ) +import qualified RIO.NonEmpty as NE import Stack.BuildPlan -import Stack.Config (getSnapshots, - makeConcreteResolver) -import Stack.Constants + ( BuildPlanCheck (..), DepError (..), checkSnapBuildPlan + , removeSrcPkgDefaultFlags, selectBestSnapshot + ) +import Stack.Config ( getSnapshots, makeConcreteSnapshot ) +import Stack.Constants ( stackDotYaml, stackProgName' ) +import Stack.Prelude +import Stack.Runners + ( ShouldReexec (..), withConfig, withGlobalProject ) import Stack.SourceMap -import Stack.Types.Config -import Stack.Types.Resolver -import Stack.Types.Version - --- | Generate stack.yaml -initProject - :: (HasConfig env, HasGHCVariant env) - => Path Abs Dir - -> InitOpts - -> Maybe AbstractResolver - -> RIO env () -initProject currDir initOpts mresolver = do - let dest = currDir stackDotYaml - - reldest <- toFilePath `liftM` makeRelativeToCurrentDir dest - - exists <- doesFileExist dest - when (not (forceOverwrite initOpts) && exists) $ - throwString - ("Error: Stack configuration file " <> reldest <> - " exists, use '--force' to overwrite it.") - - dirs <- mapM (resolveDir' . T.unpack) (searchDirs initOpts) - let find = findCabalDirs (includeSubDirs initOpts) - dirs' = if null dirs then [currDir] else dirs - logInfo "Looking for .cabal or package.yaml files to use to init the project." - cabaldirs <- Set.toList . Set.unions <$> mapM find dirs' - (bundle, dupPkgs) <- cabalPackagesCheck cabaldirs Nothing - let makeRelDir dir = - case stripProperPrefix currDir dir of - Nothing - | currDir == dir -> "." - | otherwise -> assert False $ toFilePathNoTrailingSep dir - Just rel -> toFilePathNoTrailingSep rel - fpToPkgDir fp = - let absDir = parent fp - in ResolvedPath (RelFilePath $ T.pack $ makeRelDir absDir) absDir - pkgDirs = Map.map (fpToPkgDir . fst) bundle - (snapshotLoc, flags, extraDeps, rbundle) <- getDefaultResolver initOpts mresolver pkgDirs - - let ignored = Map.difference bundle rbundle - dupPkgMsg - | dupPkgs /= [] = - "Warning (added by new or init): Some packages were found to \ - \have names conflicting with others and have been commented \ - \out in the packages section.\n" - | otherwise = "" - - missingPkgMsg - | Map.size ignored > 0 = - "Warning (added by new or init): Some packages were found to \ - \be incompatible with the resolver and have been left commented \ - \out in the packages section.\n" - | otherwise = "" - - extraDepMsg - | Map.size extraDeps > 0 = - "Warning (added by new or init): Specified resolver could not \ - \satisfy all dependencies. Some external packages have been \ - \added as dependencies.\n" - | otherwise = "" - makeUserMsg msgs = - let msg = concat msgs - in if msg /= "" then - msg <> "You can omit this message by removing it from \ - \stack.yaml\n" - else "" - - userMsg = makeUserMsg [dupPkgMsg, missingPkgMsg, extraDepMsg] - - gpdByDir = Map.fromList [ (parent fp, gpd) | (fp, gpd) <- Map.elems bundle] - gpds = Map.elems $ - Map.mapMaybe (flip Map.lookup gpdByDir . resolvedAbsolute) rbundle - - deps <- for (Map.toList extraDeps) $ \(n, v) -> - PLImmutable . cplComplete <$> - completePackageLocation (RPLIHackage (PackageIdentifierRevision n v CFILatest) Nothing) - - let p = Project - { projectUserMsg = if userMsg == "" then Nothing else Just userMsg - , projectPackages = resolvedRelative <$> Map.elems rbundle - , projectDependencies = map toRawPL deps - , projectFlags = removeSrcPkgDefaultFlags gpds flags - , projectResolver = snapshotLoc - , projectCompiler = Nothing - , projectExtraPackageDBs = [] - , projectCurator = Nothing - , projectDropPackages = mempty - } - - makeRel = fmap toFilePath . makeRelativeToCurrentDir - - indent t = T.unlines $ fmap (" " <>) (T.lines t) - - logInfo $ "Initialising configuration using resolver: " <> display snapshotLoc - logInfo $ "Total number of user packages considered: " - <> display (Map.size bundle + length dupPkgs) - - when (dupPkgs /= []) $ do - logWarn $ "Warning! Ignoring " - <> displayShow (length dupPkgs) - <> " duplicate packages:" - rels <- mapM makeRel dupPkgs - logWarn $ display $ indent $ showItems rels - - when (Map.size ignored > 0) $ do - logWarn $ "Warning! Ignoring " - <> displayShow (Map.size ignored) - <> " packages due to dependency conflicts:" - rels <- mapM makeRel (Map.elems (fmap fst ignored)) - logWarn $ display $ indent $ showItems rels - - when (Map.size extraDeps > 0) $ do - logWarn $ "Warning! " <> displayShow (Map.size extraDeps) - <> " external dependencies were added." - logInfo $ - (if exists then "Overwriting existing configuration file: " - else "Writing configuration to file: ") - <> fromString reldest - writeBinaryFileAtomic dest - $ renderStackYaml p - (Map.elems $ fmap (makeRelDir . parent . fst) ignored) - (map (makeRelDir . parent) dupPkgs) - logInfo "All done." + ( SnapshotCandidate, loadProjectSnapshotCandidate ) +import Stack.Types.Config ( HasConfig, Config (..), configL ) +import Stack.Types.GHCVariant ( HasGHCVariant ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Project ( Project (..) ) +import Stack.Types.Runner (Runner, globalOptsL ) +import Stack.Types.Snapshot ( AbstractSnapshot, Snapshots (..) ) +import Stack.Types.Version ( stackMajorVersion ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Init" module. +data InitException + = NoPackagesToIgnoreBug + deriving Show + +instance Exception InitException where + displayException NoPackagesToIgnoreBug = bugReport "[S-2747]" + "No packages to ignore." + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Init" module. +data InitPrettyException + = SnapshotDownloadFailure SomeException + | ConfigFileAlreadyExists FilePath + | PackageNameInvalid [(Path Abs File, PackageName)] + | NoMatchingSnapshot !(NonEmpty SnapName) + | SnapshotMismatch !RawSnapshotLocation String + | SnapshotPartial !RawSnapshotLocation !String + deriving Show + +instance Pretty InitPrettyException where + pretty (ConfigFileAlreadyExists reldest) = + "[S-8009]" + <> line + <> flow "Stack declined to create a project-level configuration file." + <> blankLine + <> fillSep + [ flow "The file" + , style File (fromString reldest) + , "already exists. To overwrite it, pass the flag" + , style Shell "--force" <> "." + ] + pretty (PackageNameInvalid rels) = + "[S-5267]" + <> line + <> flow "Stack did not create project-level configuration, as (like \ + \Hackage) it requires a Cabal file name to match the package it \ + \defines." + <> blankLine + <> flow "Please rename the following Cabal files:" + <> line + <> bulletedList + ( map + ( \(fp, name) -> fillSep + [ pretty fp + , "as" + , style + File + (fromPackageName name <> ".cabal") + ] + ) + rels + ) + pretty (SnapshotDownloadFailure e) = + "[S-8332]" + <> line + <> flow "Stack failed to create project-level configuration file, as it \ + \was unable to download the index of available snapshots." + <> blankLine + <> fillSep + [ flow "This sometimes happens because Certificate Authorities are \ + \missing on your system. You can try the Stack command again \ + \or manually create the configuration file. For help about the \ + \content of Stack's configuration files, see (for the most \ + \recent release of Stack)" + , style + Url + "http://docs.haskellstack.org/en/stable/configure/yaml/" + <> "." + ] + <> blankLine + <> flow "While downloading the snapshot index, Stack encountered the \ + \following error:" + <> blankLine + <> string (displayException e) + pretty (NoMatchingSnapshot names) = + "[S-1833]" + <> line + <> flow "None of the following snapshots provides a compiler matching \ + \your package(s):" + <> line + <> bulletedList (map (fromString . show) (NE.toList names)) + <> blankLine + <> resolveOptions + pretty (SnapshotMismatch snapshot errDesc) = + "[S-6395]" + <> line + <> fillSep + [ "Snapshot" + , style Url (pretty $ PrettyRawSnapshotLocation snapshot) + , flow "does not have a matching compiler to build some or all of \ + \your package(s)." + ] + <> blankLine + <> indent 4 (string errDesc) + <> line + <> resolveOptions + pretty (SnapshotPartial snapshot errDesc) = + "[S-2422]" + <> line + <> fillSep + [ "Snapshot" + , style Url (pretty $ PrettyRawSnapshotLocation snapshot) + , flow "does not have all the packages to match your requirements." + ] + <> blankLine + <> indent 4 (string errDesc) + <> line + <> resolveOptions + +resolveOptions :: StyleDoc +resolveOptions = + flow "This may be resolved by:" + <> line + <> bulletedList + [ fillSep + [ "Using" + , style Shell "--omit-packages" + , "to exclude mismatching package(s)." + ] + , fillSep + [ "Using" + , style Shell "--snapshot" + , "to specify a matching snapshot." + ] + ] + +instance Exception InitPrettyException + +-- | Type representing command line options for the @stack init@ command. +data InitOpts = InitOpts + { searchDirs :: ![T.Text] + -- ^ List of sub directories to search for .cabal files + , omitPackages :: Bool + -- ^ Exclude conflicting or incompatible user packages + , forceOverwrite :: Bool + -- ^ Overwrite existing stack.yaml + , includeSubDirs :: Bool + -- ^ If True, include all .cabal files found in any sub directories + } + +-- | Function underlying the @stack init@ command. Project initialization. +initCmd :: InitOpts -> RIO Runner () +initCmd initOpts = do + pwd <- getCurrentDir + go <- view globalOptsL + withGlobalProject $ + withConfig YesReexec (initProject pwd initOpts go.snapshot) + +-- | Generate a @stack.yaml@ file. +initProject :: + (HasConfig env, HasGHCVariant env) + => Path Abs Dir + -> InitOpts + -> Maybe AbstractSnapshot + -> RIO env () +initProject currDir initOpts mASnapshot = do + let dest = currDir stackDotYaml + reldest <- toFilePath <$> makeRelativeToCurrentDir dest + exists <- doesFileExist dest + when (not initOpts.forceOverwrite && exists) $ + prettyThrowIO $ ConfigFileAlreadyExists reldest + dirs <- mapM (resolveDir' . T.unpack) initOpts.searchDirs + let find = findCabalDirs initOpts.includeSubDirs + dirs' = if null dirs then [currDir] else dirs + prettyInfo $ + fillSep + [ flow "Looking for Cabal or" + , style File "package.yaml" + , flow "files to use to initialise Stack's project-level YAML \ + \configuration file." + ] + <> line + cabaldirs <- Set.toList . Set.unions <$> mapM find dirs' + (bundle, dupPkgs) <- cabalPackagesCheck cabaldirs + let makeRelDir dir = + case stripProperPrefix currDir dir of + Nothing + | currDir == dir -> "." + | otherwise -> assert False $ toFilePathNoTrailingSep dir + Just rel -> toFilePathNoTrailingSep rel + fpToPkgDir fp = + let absDir = parent fp + in ResolvedPath (RelFilePath $ T.pack $ makeRelDir absDir) absDir + pkgDirs = Map.map (fpToPkgDir . fst) bundle + defaultInitSnapshot <- view $ configL . to (.defaultInitSnapshot) + let mASnapshot' = getFirst $ First mASnapshot <> defaultInitSnapshot + (snapshot, flags, extraDeps, rbundle) <- + getDefaultSnapshot initOpts mASnapshot' pkgDirs + let ignored = Map.difference bundle rbundle + dupPkgMsg + | dupPkgs /= [] = Just + "Warning (added by new or init): Some packages were found to have \ + \names\n\ + \conflicting with others and have been commented out in the \ + \packages section." + | otherwise = Nothing + missingPkgMsg + | Map.size ignored > 0 = Just + "Warning (added by new or init): Some packages were found to be \ + \incompatible\n\ + \with the snapshot and have been left commented out in the \ + \packages section." + | otherwise = Nothing + extraDepMsg + | Map.size extraDeps > 0 = Just + "Warning (added by new or init): Specified snapshot could not \ + \satisfy all\n\ + \dependencies. Some external packages have been added as \ + \dependencies." + | otherwise = Nothing + removalMsg = + "You can omit this message by removing it from the project-level \ + \configuration\n\ + \file." + makeUserMsg mMsgs = + let msgs = catMaybes mMsgs + in if null msgs + then Nothing + else Just $ intercalate "\n\n" (msgs <> [removalMsg]) <> "\n" + userMsg = makeUserMsg [dupPkgMsg, missingPkgMsg, extraDepMsg] + gpdByDir = + Map.fromList [ (parent fp, gpd) | (fp, gpd) <- Map.elems bundle] + gpds = Map.elems $ + Map.mapMaybe (flip Map.lookup gpdByDir . resolvedAbsolute) rbundle + deps <- for (Map.toList extraDeps) $ \(n, v) -> + PLImmutable . cplComplete <$> + completePackageLocation + (RPLIHackage (PackageIdentifierRevision n v CFILatest) Nothing) + let project = Project + { userMsg + , packages = resolvedRelative <$> Map.elems rbundle + , extraDeps = map toRawPL deps + , flagsByPkg = removeSrcPkgDefaultFlags gpds flags + , snapshot + , compiler = Nothing + , extraPackageDBs = [] + , curator = Nothing + , dropPackages = mempty + } + makeRel = fmap toFilePath . makeRelativeToCurrentDir + prettyInfoL + [ flow "Initialising Stack's project-level configuration file using \ + \snapshot" + , pretty (PrettyRawSnapshotLocation snapshot) <> "." + ] + prettyInfoL $ + let n = Map.size bundle + length dupPkgs + in [ "Considered" + , fromString $ show n + , "user" + , if n == 1 then "package." else "packages." + ] + when (dupPkgs /= []) $ do + rels <- mapM makeRel dupPkgs + prettyWarn $ + fillSep + [ flow "Ignoring these" + , fromString $ show (length dupPkgs) + , flow "duplicate packages:" + ] + <> line + <> bulletedList (map (style File . fromString) rels) + when (Map.size ignored > 0) $ do + rels <- mapM makeRel (Map.elems (fmap fst ignored)) + prettyWarn $ + fillSep + [ flow "Ignoring these" + , fromString $ show (Map.size ignored) + , flow "packages due to dependency conflicts:" + ] + <> line + <> bulletedList (map (style File . fromString) rels) + when (Map.size extraDeps > 0) $ + prettyWarnL + [ fromString $ show (Map.size extraDeps) + , flow "external dependencies were added." + ] + prettyInfoL + [ flow $ if exists + then "Overwriting existing configuration file" + else "Writing configuration to" + , style File (fromString reldest) <> "." + ] + writeBinaryFileAtomic dest $ renderStackYaml project + (Map.elems $ fmap (makeRelDir . parent . fst) ignored) + (map (makeRelDir . parent) dupPkgs) + prettyInfoS + "Stack's project-level configuration file has been initialised." -- | Render a stack.yaml file with comments, see: -- https://github.com/commercialhaskell/stack/issues/226 renderStackYaml :: Project -> [FilePath] -> [FilePath] -> B.Builder renderStackYaml p ignoredPackages dupPackages = - case Yaml.toJSON p of - Yaml.Object o -> renderObject o - _ -> assert False $ B.byteString $ Yaml.encode p - where - renderObject o = - B.byteString headerHelp - <> B.byteString "\n\n" - <> F.foldMap (goComment o) comments - <> goOthers (o `HM.difference` HM.fromList comments) - <> B.byteString footerHelp - <> "\n" - - goComment o (name, comment) = - case (convert <$> HM.lookup name o) <|> nonPresentValue name of - Nothing -> assert (name == "user-message") mempty - Just v -> - B.byteString comment <> - B.byteString "\n" <> - v <> - if name == "packages" then commentedPackages else "" <> - B.byteString "\n" - where - convert v = B.byteString (Yaml.encode $ Yaml.object [(name, v)]) - - -- Some fields in stack.yaml are optional and may not be - -- generated. For these, we provided commented out dummy - -- values to go along with the comments. - nonPresentValue "extra-deps" = Just "# extra-deps: []\n" - nonPresentValue "flags" = Just "# flags: {}\n" - nonPresentValue "extra-package-dbs" = Just "# extra-package-dbs: []\n" - nonPresentValue _ = Nothing - - commentLine l | null l = "#" - | otherwise = "# " ++ l - commentHelp = BC.pack . intercalate "\n" . map commentLine - commentedPackages = - let ignoredComment = commentHelp - [ "The following packages have been ignored due to incompatibility with the" - , "resolver compiler, dependency conflicts with other packages" - , "or unsatisfied dependencies." - ] - dupComment = commentHelp - [ "The following packages have been ignored due to package name conflict " - , "with other packages." - ] - in commentPackages ignoredComment ignoredPackages - <> commentPackages dupComment dupPackages - - commentPackages comment pkgs - | pkgs /= [] = - B.byteString comment - <> B.byteString "\n" - <> B.byteString (BC.pack $ concat - $ map (\x -> "#- " ++ x ++ "\n") pkgs ++ ["\n"]) - | otherwise = "" - - goOthers o - | HM.null o = mempty - | otherwise = assert False $ B.byteString $ Yaml.encode o - - -- Per Section Help - comments = - [ ("user-message" , userMsgHelp) - , ("resolver" , resolverHelp) - , ("packages" , packageHelp) - , ("extra-deps" , extraDepsHelp) - , ("flags" , "# Override default flag values for local packages and extra-deps") - , ("extra-package-dbs", "# Extra package databases containing global packages") - ] - - -- Help strings - headerHelp = commentHelp - [ "This file was automatically generated by 'stack init'" - , "" - , "Some commonly used options have been documented as comments in this file." - , "For advanced use and comprehensive documentation of the format, please see:" - , "https://docs.haskellstack.org/en/stable/yaml_configuration/" - ] - - resolverHelp = commentHelp - [ "Resolver to choose a 'specific' stackage snapshot or a compiler version." - , "A snapshot resolver dictates the compiler version and the set of packages" - , "to be used for project dependencies. For example:" - , "" - , "resolver: lts-3.5" - , "resolver: nightly-2015-09-21" - , "resolver: ghc-7.10.2" - , "" - , "The location of a snapshot can be provided as a file or url. Stack assumes" - , "a snapshot provided as a file might change, whereas a url resource does not." - , "" - , "resolver: ./custom-snapshot.yaml" - , "resolver: https://example.com/snapshots/2018-01-01.yaml" - ] - - userMsgHelp = commentHelp - [ "A warning or info to be displayed to the user on config load." ] - - packageHelp = commentHelp - [ "User packages to be built." - , "Various formats can be used as shown in the example below." - , "" - , "packages:" - , "- some-directory" - , "- https://example.com/foo/bar/baz-0.0.2.tar.gz" - , " subdirs:" - , " - auto-update" - , " - wai" - ] - - extraDepsHelp = commentHelp - [ "Dependency packages to be pulled from upstream that are not in the resolver." - , "These entries can reference officially published versions as well as" - , "forks / in-progress versions pinned to a git hash. For example:" - , "" - , "extra-deps:" - , "- acme-missiles-0.3" - , "- git: https://github.com/commercialhaskell/stack.git" - , " commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a" - , "" - ] - - footerHelp = - let major = toMajorVersion $ C.mkVersion' Meta.version - in commentHelp - [ "Control whether we use the GHC we find on the path" - , "system-ghc: true" - , "" - , "Require a specific version of stack, using version ranges" - , "require-stack-version: -any # Default" - , "require-stack-version: \"" - ++ C.display (C.orLaterVersion major) ++ "\"" - , "" - , "Override the architecture used by stack, especially useful on Windows" - , "arch: i386" - , "arch: x86_64" - , "" - , "Extra directories used by stack for building" - , "extra-include-dirs: [/path/to/dir]" - , "extra-lib-dirs: [/path/to/dir]" - , "" - , "Allow a newer minor version of GHC than the snapshot specifies" - , "compiler-check: newer-minor" - ] + case Yaml.toJSON p of + Yaml.Object o -> renderObject o + _ -> assert False $ B.byteString $ Yaml.encode p + where + renderObject o = + B.byteString headerHelp + <> B.byteString "\n\n" + <> F.foldMap (goComment o) comments + <> goOthers (o `KeyMap.difference` KeyMap.fromList comments) + <> B.byteString footerHelp + <> "\n" + goComment o (name, comment) = + case (convert <$> KeyMap.lookup name o) <|> nonPresentValue name of + Nothing -> assert (name == "user-message") mempty + Just v -> + B.byteString comment <> + B.byteString "\n" <> + v <> + if name == "packages" then commentedPackages else "" <> + B.byteString "\n" + where + convert v = B.byteString (Yaml.encode $ Yaml.object [(name, v)]) + + -- Some keys in stack.yaml are optional and may not be generated. For these, + -- we provided commented out dummy values to go along with the comments. + nonPresentValue "extra-deps" = Just "# extra-deps: []\n" + nonPresentValue "flags" = Just "# flags: {}\n" + nonPresentValue "extra-package-dbs" = Just "# extra-package-dbs: []\n" + nonPresentValue _ = Nothing + + commentLine l | null l = "#" + | otherwise = "# " ++ l + commentHelp = BC.pack . intercalate "\n" . map commentLine + commentedPackages = + let ignoredComment = commentHelp + [ "The following packages have been ignored due to incompatibility with the" + , "snapshot compiler, dependency conflicts with other packages" + , "or unsatisfied dependencies." + ] + dupComment = commentHelp + [ "The following packages have been ignored due to package name conflict " + , "with other packages." + ] + in commentPackages ignoredComment ignoredPackages + <> commentPackages dupComment dupPackages + commentPackages comment pkgs + | pkgs /= [] = + B.byteString comment + <> B.byteString "\n" + <> B.byteString (BC.pack $ concat + $ map (\x -> "#- " ++ x ++ "\n") pkgs ++ ["\n"]) + | otherwise = "" + goOthers o + | KeyMap.null o = mempty + | otherwise = assert False $ B.byteString $ Yaml.encode o + -- Per Section Help + comments = + [ ("user-message" , userMsgHelp) + , ("snapshot" , snapshotHelp) + , ("packages" , packageHelp) + , ("extra-deps" , extraDepsHelp) + , ("flags" , "# Override default flag values for project packages and extra-deps") + , ("extra-package-dbs", "# Extra package databases containing global packages") + ] + -- Help strings + headerHelp = commentHelp + [ "This file was automatically generated by 'stack init'" + , "" + , "Some commonly used options have been documented as comments in this file." + , "For advanced use and comprehensive documentation of the format, please see:" + , "https://docs.haskellstack.org/en/stable/configure/yaml/" + ] + snapshotHelp = commentHelp + [ "A 'specific' Stackage snapshot or a compiler version." + , "A snapshot dictates the compiler version and the set of packages" + , "to be used for project dependencies. For example:" + , "" + , "snapshot: lts-24.37" + , "snapshot: nightly-2026-04-18" + , "snapshot: ghc-9.10.3" + , "" + , "The location of a snapshot can be provided as a file or url. Stack assumes" + , "a snapshot provided as a file might change, whereas a url resource does not." + , "" + , "snapshot: ./custom-snapshot.yaml" + , "snapshot: https://example.com/snapshots/2024-01-01.yaml" + ] + userMsgHelp = commentHelp + [ "A warning or info to be displayed to the user on config load." ] + packageHelp = commentHelp + [ "User packages to be built." + , "Various formats can be used as shown in the example below." + , "" + , "packages:" + , "- some-directory" + , "- https://example.com/foo/bar/baz-0.0.2.tar.gz" + , " subdirs:" + , " - auto-update" + , " - wai" + ] + extraDepsHelp = commentHelp + [ "Dependency packages to be pulled from upstream that are not in the snapshot." + , "These entries can reference officially published versions as well as" + , "forks / in-progress versions pinned to a git hash. For example:" + , "" + , "extra-deps:" + , "- acme-missiles-0.3" + , "- git: https://github.com/commercialhaskell/stack.git" + , " commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a" + , "" + ] + footerHelp = commentHelp + [ "Control whether we use the GHC we find on the path" + , "system-ghc: true" + , "" + , "Require a specific version of Stack, using version ranges" + , "require-stack-version: -any # Default" + , "require-stack-version: \"" + ++ C.display (C.orLaterVersion stackMajorVersion) ++ "\"" + , "" + , "Override the architecture used by Stack, especially useful on Windows" + , "arch: i386" + , "arch: x86_64" + , "" + , "Extra directories used by Stack for building" + , "extra-include-dirs: [/path/to/dir]" + , "extra-lib-dirs: [/path/to/dir]" + , "" + , "Allow a newer minor version of GHC than the snapshot specifies" + , "compiler-check: newer-minor" + ] getSnapshots' :: HasConfig env => RIO env Snapshots -getSnapshots' = do - getSnapshots `catchAny` \e -> do - logError $ - "Unable to download snapshot list, and therefore could " <> - "not generate a stack.yaml file automatically" - logError $ - "This sometimes happens due to missing Certificate Authorities " <> - "on your system. For more information, see:" - logError "" - logError " https://github.com/commercialhaskell/stack/issues/234" - logError "" - logError "You can try again, or create your stack.yaml file by hand. See:" - logError "" - logError " http://docs.haskellstack.org/en/stable/yaml_configuration/" - logError "" - logError $ "Exception was: " <> displayShow e - throwString "" - --- | Get the default resolver value -getDefaultResolver - :: (HasConfig env, HasGHCVariant env) - => InitOpts - -> Maybe AbstractResolver - -> Map PackageName (ResolvedPath Dir) - -- ^ Src package name: cabal dir - -> RIO env - ( RawSnapshotLocation - , Map PackageName (Map FlagName Bool) - , Map PackageName Version - , Map PackageName (ResolvedPath Dir)) - -- ^ ( Resolver - -- , Flags for src packages and extra deps - -- , Extra dependencies - -- , Src packages actually considered) -getDefaultResolver initOpts mresolver pkgDirs = do - (candidate, loc) <- case mresolver of - Nothing -> selectSnapResolver - Just ar -> do - sl <- makeConcreteResolver ar - c <- loadProjectSnapshotCandidate sl NoPrintWarnings False - return (c, sl) - getWorkingResolverPlan initOpts pkgDirs candidate loc - where - -- TODO support selecting best across regular and custom snapshots - selectSnapResolver = do - snaps <- fmap getRecommendedSnapshots getSnapshots' - (c, l, r) <- selectBestSnapshot (Map.elems pkgDirs) snaps - case r of - BuildPlanCheckFail {} | not (omitPackages initOpts) - -> throwM (NoMatchingSnapshot snaps) - _ -> return (c, l) - -getWorkingResolverPlan - :: (HasConfig env, HasGHCVariant env) - => InitOpts - -> Map PackageName (ResolvedPath Dir) - -- ^ Src packages: cabal dir - -> SnapshotCandidate env - -> RawSnapshotLocation - -> RIO env - ( RawSnapshotLocation - , Map PackageName (Map FlagName Bool) - , Map PackageName Version - , Map PackageName (ResolvedPath Dir)) - -- ^ ( SnapshotDef - -- , Flags for src packages and extra deps - -- , Extra dependencies - -- , Src packages actually considered) -getWorkingResolverPlan initOpts pkgDirs0 snapCandidate snapLoc = do - logInfo $ "Selected resolver: " <> display snapLoc - go pkgDirs0 - where - go pkgDirs = do - eres <- checkBundleResolver initOpts snapLoc snapCandidate (Map.elems pkgDirs) - -- if some packages failed try again using the rest - case eres of - Right (f, edeps)-> return (snapLoc, f, edeps, pkgDirs) - Left ignored - | Map.null available -> do - logWarn "*** Could not find a working plan for any of \ - \the user packages.\nProceeding to create a \ - \config anyway." - return (snapLoc, Map.empty, Map.empty, Map.empty) - | otherwise -> do - when (Map.size available == Map.size pkgDirs) $ - error "Bug: No packages to ignore" - - if length ignored > 1 then do - logWarn "*** Ignoring packages:" - logWarn $ display $ indent $ showItems $ map packageNameString ignored - else - logWarn $ "*** Ignoring package: " - <> fromString - (case ignored of - [] -> error "getWorkingResolverPlan.head" - x:_ -> packageNameString x) - - go available - where - indent t = T.unlines $ fmap (" " <>) (T.lines t) - isAvailable k _ = k `notElem` ignored - available = Map.filterWithKey isAvailable pkgDirs - -checkBundleResolver - :: (HasConfig env, HasGHCVariant env) - => InitOpts - -> RawSnapshotLocation - -> SnapshotCandidate env - -> [ResolvedPath Dir] - -- ^ Src package dirs - -> RIO env - (Either [PackageName] ( Map PackageName (Map FlagName Bool) - , Map PackageName Version)) -checkBundleResolver initOpts snapshotLoc snapCandidate pkgDirs = do - result <- checkSnapBuildPlan pkgDirs Nothing snapCandidate - case result of - BuildPlanCheckOk f -> return $ Right (f, Map.empty) - BuildPlanCheckPartial _f e -> do -- FIXME:qrilka unused f - if omitPackages initOpts - then do - warnPartial result - logWarn "*** Omitting packages with unsatisfied dependencies" - return $ Left $ failedUserPkgs e - else throwM $ ResolverPartial snapshotLoc (show result) - BuildPlanCheckFail _ e _ - | omitPackages initOpts -> do - logWarn $ "*** Resolver compiler mismatch: " - <> display snapshotLoc - logWarn $ display $ indent $ T.pack $ show result - return $ Left $ failedUserPkgs e - | otherwise -> throwM $ ResolverMismatch snapshotLoc (show result) - where - indent t = T.unlines $ fmap (" " <>) (T.lines t) - warnPartial res = do - logWarn $ "*** Resolver " <> display snapshotLoc - <> " will need external packages: " - logWarn $ display $ indent $ T.pack $ show res - - failedUserPkgs e = Map.keys $ Map.unions (Map.elems (fmap deNeededBy e)) +getSnapshots' = catchAny + getSnapshots + (prettyThrowIO . SnapshotDownloadFailure) + +-- | Get the default snapshot value +getDefaultSnapshot :: + (HasConfig env, HasGHCVariant env) + => InitOpts + -> Maybe AbstractSnapshot + -> Map PackageName (ResolvedPath Dir) + -- ^ Src package name: cabal dir + -> RIO env + ( RawSnapshotLocation + , Map PackageName (Map FlagName Bool) + , Map PackageName Version + , Map PackageName (ResolvedPath Dir)) + -- ^ ( Snapshot + -- , Flags for src packages and extra deps + -- , Extra dependencies + -- , Src packages actually considered) +getDefaultSnapshot initOpts mASnapshot pkgDirs = do + (candidate, loc) <- case mASnapshot of + Nothing -> selectSnapshot + Just as -> do + sl <- makeConcreteSnapshot as + c <- loadProjectSnapshotCandidate sl NoPrintWarnings False + pure (c, sl) + getWorkingSnapshotPlan initOpts pkgDirs candidate loc + where + -- TODO support selecting best across regular and custom snapshots + selectSnapshot = do + snaps <- fmap getRecommendedSnapshots getSnapshots' + (c, l, r) <- selectBestSnapshot (Map.elems pkgDirs) snaps + case r of + BuildPlanCheckFail {} | not initOpts.omitPackages + -> prettyThrowM $ NoMatchingSnapshot snaps + _ -> pure (c, l) + +getWorkingSnapshotPlan :: + (HasConfig env, HasGHCVariant env) + => InitOpts + -> Map PackageName (ResolvedPath Dir) + -- ^ Src packages: cabal dir + -> SnapshotCandidate env + -> RawSnapshotLocation + -> RIO env + ( RawSnapshotLocation + , Map PackageName (Map FlagName Bool) + , Map PackageName Version + , Map PackageName (ResolvedPath Dir)) + -- ^ ( Raw snapshot location + -- , Flags for src packages and extra deps + -- , Extra dependencies + -- , Src packages actually considered) +getWorkingSnapshotPlan initOpts pkgDirs0 snapCandidate snapLoc = do + prettyInfoL + [ flow "Selected the snapshot" + , pretty (PrettyRawSnapshotLocation snapLoc) <> "." + ] + go pkgDirs0 + where + go pkgDirs = do + eres <- checkBundleSnapshot initOpts snapLoc snapCandidate (Map.elems pkgDirs) + -- if some packages failed try again using the rest + case eres of + Right (f, edeps)-> pure (snapLoc, f, edeps, pkgDirs) + Left ignored + | Map.null available -> do + prettyWarnS + "Could not find a working plan for any of the user packages. \ + \Proceeding to create a project-level configuration file anyway." + pure (snapLoc, Map.empty, Map.empty, Map.empty) + | otherwise -> do + when (Map.size available == Map.size pkgDirs) $ + throwM NoPackagesToIgnoreBug + if length ignored > 1 + then + prettyWarn + ( flow "Ignoring the following packages:" + <> line + <> bulletedList (map fromPackageName ignored) + ) + else + prettyWarnL + [ flow "Ignoring package:" + , fromString + ( case ignored of + [] -> throwM NoPackagesToIgnoreBug + x:_ -> packageNameString x + ) + ] + go available + where + isAvailable k _ = k `notElem` ignored + available = Map.filterWithKey isAvailable pkgDirs + +checkBundleSnapshot :: + (HasConfig env, HasGHCVariant env) + => InitOpts + -> RawSnapshotLocation + -> SnapshotCandidate env + -> [ResolvedPath Dir] + -- ^ Src package dirs + -> RIO env + (Either [PackageName] ( Map PackageName (Map FlagName Bool) + , Map PackageName Version)) +checkBundleSnapshot initOpts snapshotLoc snapCandidate pkgDirs = do + result <- checkSnapBuildPlan pkgDirs Nothing snapCandidate + case result of + BuildPlanCheckOk f -> pure $ Right (f, Map.empty) + BuildPlanCheckPartial _f e -> do -- FIXME:qrilka unused f + if initOpts.omitPackages + then do + warnPartial result + prettyWarnS "Omitting packages with unsatisfied dependencies" + pure $ Left $ failedUserPkgs e + else + prettyThrowM $ SnapshotPartial snapshotLoc (show result) + BuildPlanCheckFail _ e _ + | initOpts.omitPackages -> do + prettyWarn $ + fillSep + [ "Snapshot compiler mismatch:" + , style Current (fromString . T.unpack $ textDisplay snapshotLoc) + ] + <> line + <> indent 4 (string $ show result) + pure $ Left $ failedUserPkgs e + | otherwise -> prettyThrowM $ SnapshotMismatch snapshotLoc (show result) + where + warnPartial res = do + prettyWarn $ + fillSep + [ "Snapshot" + , style Current (fromString . T.unpack $ textDisplay snapshotLoc) + , flow "will need external packages:" + ] + <> line + <> indent 4 (string $ show res) + + failedUserPkgs e = Map.keys $ Map.unions (Map.elems (fmap (.neededBy) e)) getRecommendedSnapshots :: Snapshots -> NonEmpty SnapName getRecommendedSnapshots snapshots = - -- in order - Latest LTS, Latest Nightly, all LTS most recent first - case NonEmpty.nonEmpty ltss of - Just (mostRecent :| older) - -> mostRecent :| (nightly : older) - Nothing - -> nightly :| [] - where - ltss = map (uncurry LTS) (IntMap.toDescList $ snapshotsLts snapshots) - nightly = Nightly (snapshotsNightly snapshots) - -data InitOpts = InitOpts - { searchDirs :: ![T.Text] - -- ^ List of sub directories to search for .cabal files - , omitPackages :: Bool - -- ^ Exclude conflicting or incompatible user packages - , forceOverwrite :: Bool - -- ^ Overwrite existing stack.yaml - , includeSubDirs :: Bool - -- ^ If True, include all .cabal files found in any sub directories - } - -findCabalDirs - :: HasConfig env + -- in order - Latest LTS, Latest Nightly, all LTS most recent first + case nonEmpty supportedLtss of + Just (mostRecent :| older) -> mostRecent :| (nightly : older) + Nothing -> nightly :| [] + where + ltss = map (uncurry LTS) (IntMap.toDescList snapshots.lts ) + supportedLtss = filter (>= minSupportedLts) ltss + nightly = Nightly snapshots.nightly + +-- |Yields the minimum LTS supported by Stack. +minSupportedLts :: SnapName +-- See https://github.com/commercialhaskell/stack/blob/master/ChangeLog.md +-- under Stack version 3.1.1. +minSupportedLts = LTS 12 0 + +findCabalDirs :: + HasConfig env => Bool -> Path Abs Dir -> RIO env (Set (Path Abs Dir)) findCabalDirs recurse dir = - Set.fromList . map parent - <$> liftIO (findFiles dir isHpackOrCabal subdirFilter) - where - subdirFilter subdir = recurse && not (isIgnored subdir) - isHpack = (== "package.yaml") . toFilePath . filename - isCabal = (".cabal" `isSuffixOf`) . toFilePath - isHpackOrCabal x = isHpack x || isCabal x - - isIgnored path = "." `isPrefixOf` dirName || dirName `Set.member` ignoredDirs - where - dirName = FP.dropTrailingPathSeparator (toFilePath (dirname path)) + Set.fromList . map parent + <$> liftIO (findFiles dir isHpackOrCabal subdirFilter) + where + subdirFilter subdir = recurse && not (isIgnored subdir) + isHpack = (== "package.yaml") . toFilePath . filename + isCabal = (".cabal" `isSuffixOf`) . toFilePath + isHpackOrCabal x = isHpack x || isCabal x + isIgnored path = "." `isPrefixOf` dirName || dirName `Set.member` ignoredDirs + where + dirName = FP.dropTrailingPathSeparator (toFilePath (dirname path)) -- | Special directories that we don't want to traverse for .cabal files ignoredDirs :: Set FilePath -ignoredDirs = Set.fromList - [ "dist" - ] - -cabalPackagesCheck - :: (HasConfig env, HasGHCVariant env) - => [Path Abs Dir] - -> Maybe String - -> RIO env - ( Map PackageName (Path Abs File, C.GenericPackageDescription) - , [Path Abs File]) -cabalPackagesCheck cabaldirs dupErrMsg = do - when (null cabaldirs) $ do - logWarn "We didn't find any local package directories" - logWarn "You may want to create a package with \"stack new\" instead" - logWarn "Create an empty project for now" - logWarn "If this isn't what you want, please delete the generated \"stack.yaml\"" - - relpaths <- mapM prettyPath cabaldirs - logInfo "Using cabal packages:" - logInfo $ formatGroup relpaths - - packages <- for cabaldirs $ \dir -> do - (gpdio, _name, cabalfp) <- loadCabalFilePath dir - gpd <- liftIO $ gpdio YesPrintWarnings - pure (cabalfp, gpd) - - -- package name cannot be empty or missing otherwise - -- it will result in cabal solver failure. - -- stack requires packages name to match the cabal file name - -- Just the latter check is enough to cover both the cases - - let normalizeString = T.unpack . T.normalize T.NFC . T.pack - getNameMismatchPkg (fp, gpd) - | (normalizeString . packageNameString . gpdPackageName) gpd /= (normalizeString . FP.takeBaseName . toFilePath) fp - = Just fp - | otherwise = Nothing - nameMismatchPkgs = mapMaybe getNameMismatchPkg packages - - when (nameMismatchPkgs /= []) $ do - rels <- mapM prettyPath nameMismatchPkgs - error $ "Package name as defined in the .cabal file must match the \ - \.cabal file name.\n\ - \Please fix the following packages and try again:\n" - <> T.unpack (utf8BuilderToText (formatGroup rels)) - - let dupGroups = filter ((> 1) . length) - . groupSortOn (gpdPackageName . snd) - dupAll = concat $ dupGroups packages - - -- Among duplicates prefer to include the ones in upper level dirs - pathlen = length . FP.splitPath . toFilePath . fst - getmin = minimumBy (compare `on` pathlen) - dupSelected = map getmin (dupGroups packages) - dupIgnored = dupAll \\ dupSelected - unique = packages \\ dupIgnored - - when (dupIgnored /= []) $ do - dups <- mapM (mapM (prettyPath. fst)) (dupGroups packages) - logWarn $ - "Following packages have duplicate package names:\n" <> - mconcat (intersperse "\n" (map formatGroup dups)) - case dupErrMsg of - Nothing -> logWarn $ - "Packages with duplicate names will be ignored.\n" - <> "Packages in upper level directories will be preferred.\n" - Just msg -> error msg - - return (Map.fromList - $ map (\(file, gpd) -> (gpdPackageName gpd,(file, gpd))) unique - , map fst dupIgnored) - -formatGroup :: [String] -> Utf8Builder -formatGroup = foldMap (\path -> "- " <> fromString path <> "\n") +ignoredDirs = Set.fromList ["dist"] + +cabalPackagesCheck :: + (HasConfig env, HasGHCVariant env) + => [Path Abs Dir] + -> RIO env + ( Map PackageName (Path Abs File, C.GenericPackageDescription) + , [Path Abs File] + ) +cabalPackagesCheck cabaldirs = do + when (null cabaldirs) $ + prettyWarn $ + fillSep + [ flow "Stack did not find any local directories containing a \ + \package description. You may want to create a package with" + , style Shell (flow "stack new") + , flow "instead." + ] + <> blankLine + <> fillSep + [ flow "Stack will create an empty project. If this is not what \ + \you want, please delete the generated" + , style File "stack.yaml" + , "file." + ] + relpaths <- mapM prettyPath cabaldirs + unless (null relpaths) $ + prettyInfo $ + flow "Using the Cabal packages:" + <> line + <> bulletedList (map (style File . fromString) relpaths) + <> line + -- A package name cannot be empty or missing otherwise it will result in + -- Cabal solver failure. Stack requires packages name to match the Cabal + -- file name. Just the latter check is enough to cover both the cases. + ePackages <- for cabaldirs $ \dir -> do + -- Pantry's 'loadCabalFilePath' throws 'MismatchedCabalName' (error + -- [S-910]) if the Cabal file name does not match the package it + -- defines. + (gpdio, _name, cabalFP) <- loadCabalFilePath (Just stackProgName') dir + eres <- liftIO $ try (gpdio YesPrintWarnings) + case eres :: Either PantryException C.GenericPackageDescription of + Right gpd -> pure $ Right (cabalFP, gpd) + Left (MismatchedCabalName fp name) -> pure $ Left (fp, name) + Left e -> throwIO e + let (nameMismatchPkgs, packages) = partitionEithers ePackages + when (nameMismatchPkgs /= []) $ + prettyThrowIO $ PackageNameInvalid nameMismatchPkgs + let dupGroups = mapMaybe nonEmpty . groupSortOn (gpdPackageName . snd) + dupAll = concatMap NE.toList $ dupGroups packages + -- Among duplicates prefer to include the ones in upper level dirs + pathlen = length . FP.splitPath . toFilePath . fst + getmin = minimumBy1 (compare `on` pathlen) + dupSelected = map getmin (dupGroups packages) + dupIgnored = dupAll \\ dupSelected + unique = packages \\ dupIgnored + when (dupIgnored /= []) $ do + dups <- mapM (mapM (prettyPath . fst)) (dupGroups packages) + prettyWarn $ + flow "The following packages have duplicate package names:" + <> line + <> foldMap + ( \dup -> bulletedList (map fromString (NE.toList dup)) + <> line + ) + dups + <> line + <> flow "Packages with duplicate names will be ignored. Packages \ + \in upper level directories will be preferred." + <> line + pure (Map.fromList + $ map (\(file, gpd) -> (gpdPackageName gpd,(file, gpd))) unique + , map fst dupIgnored) prettyPath :: - (MonadIO m, RelPath (Path r t) ~ Path Rel t, AnyPath (Path r t)) - => Path r t - -> m FilePath -prettyPath path = do - eres <- liftIO $ try $ makeRelativeToCurrentDir path - return $ case eres of - Left (_ :: PathException) -> toFilePath path - Right res -> toFilePath res + (MonadIO m, RelPath (Path r t) ~ Path Rel t, AnyPath (Path r t)) + => Path r t + -> m FilePath +prettyPath path = + liftIO (try $ makeRelativeToCurrentDir path) <&> \case + Left (_ :: PathException) -> toFilePath path + Right res -> toFilePath res diff --git a/src/Stack/List.hs b/src/Stack/List.hs index 2ba9ef775c..5a70bf2dc6 100644 --- a/src/Stack/List.hs +++ b/src/Stack/List.hs @@ -1,76 +1,132 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.List +Description : Types and functions related to Stack's @list@ command. +License : BSD-3-Clause + +Types and functions related to Stack's @list@ command. +-} + module Stack.List - ( listPackages + ( listCmd + , listPackages ) where -import Stack.Prelude +import qualified RIO.ByteString.Lazy as Lazy import qualified RIO.Map as Map -import RIO.List (intercalate) -import RIO.Process (HasProcessContext) +import RIO.Process ( HasProcessContext ) +import Stack.Config ( getRawSnapshot ) +import Stack.Prelude +import Stack.Runners ( ShouldReexec (..), withConfig ) +import Stack.SourceMap ( globalsFromHints ) +import Stack.Types.Runner ( Runner ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.List" module. +newtype ListPrettyException + = CouldNotParsePackageSelectors [StyleDoc] + deriving Show -newtype ListException - = CouldNotParsePackageSelectors [String] - deriving Typeable -instance Exception ListException -instance Show ListException where - show (CouldNotParsePackageSelectors strs) = unlines $ map ("- " ++) strs +instance Pretty ListPrettyException where + pretty (CouldNotParsePackageSelectors errs) = + "[S-4926]" + <> line + <> bulletedList errs + +instance Exception ListPrettyException + +-- | Function underlying the @stack list@ command. List packages. +listCmd :: [String] -> RIO Runner () +listCmd names = withConfig NoReexec $ do + mSnapshot <- getRawSnapshot + let mWc = rsCompiler <$> mSnapshot + mGlobals <- mapM globalsFromHints mWc + listPackages mSnapshot mGlobals names -- | Intended to work for the command line command. -listPackages - :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) - => Maybe RawSnapshot -- ^ when looking up by name, take from this build plan - -> [String] -- ^ names or identifiers +listPackages :: + forall env. (HasPantryConfig env, HasProcessContext env, HasTerm env) + => Maybe RawSnapshot + -- ^ When looking up by name, take from this build plan. + -> Maybe (Map PackageName Version) + -- ^ Global hints for snapshot wanted compiler. + -> [String] + -- ^ Names or identifiers. -> RIO env () -listPackages mSnapshot input = do - let (errs1, names) = case mSnapshot of - Just snapshot | null input -> - ([], Map.keys (rsPackages snapshot)) - _ -> partitionEithers $ map parse input - (errs2, locs) <- partitionEithers <$> traverse toLoc names - case errs1 ++ errs2 of - [] -> pure () - errs -> throwM $ CouldNotParsePackageSelectors errs - mapM_ (logInfo . fromString . packageIdentifierString) locs - where - toLoc | Just snapshot <- mSnapshot = toLocSnapshot snapshot - | otherwise = toLocNoSnapshot +listPackages mSnapshot mGlobals input = do + let (errs1, names) = case mSnapshot of + Just snapshot | null input -> ([], Map.keys (rsPackages snapshot)) + _ -> partitionEithers $ map parse input + (errs2, locs) <- partitionEithers <$> traverse toLoc names + case errs1 ++ errs2 of + [] -> pure () + errs -> prettyThrowM $ CouldNotParsePackageSelectors errs + mapM_ (Lazy.putStrLn . fromPackageId) locs + where + toLoc | Just snapshot <- mSnapshot = toLocSnapshot snapshot + | otherwise = toLocNoSnapshot - toLocNoSnapshot :: PackageName -> RIO env (Either String PackageIdentifier) - toLocNoSnapshot name = do - mloc1 <- getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions - mloc <- - case mloc1 of - Just _ -> pure mloc1 - Nothing -> do - updated <- updateHackageIndex $ Just $ "Could not find package " <> fromString (packageNameString name) <> ", updating" - case updated of - UpdateOccurred -> getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions - NoUpdateOccurred -> pure Nothing - case mloc of + toLocNoSnapshot :: PackageName -> RIO env (Either StyleDoc PackageIdentifier) + toLocNoSnapshot name = do + mloc1 <- + getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions + mloc <- + case mloc1 of + Just _ -> pure mloc1 Nothing -> do - candidates <- getHackageTypoCorrections name - pure $ Left $ concat - [ "Could not find package " - , packageNameString name - , " on Hackage" - , if null candidates - then "" - else ". Perhaps you meant: " ++ intercalate ", " (map packageNameString candidates) - ] - Just loc -> pure $ Right (packageLocationIdent loc) + updated <- + updateHackageIndex $ Just $ + "Could not find package " + <> fromPackageName name + <> ", updating" + case updated of + UpdateOccurred -> + getLatestHackageLocation + YesRequireHackageIndex + name + UsePreferredVersions + NoUpdateOccurred -> pure Nothing + case mloc of + Nothing -> do + candidates <- getHackageTypoCorrections name + pure $ Left $ fillSep + [ flow "Could not find package" + , style Current (fromPackageName name) + , flow "on Hackage." + , if null candidates + then mempty + else fillSep $ + flow "Perhaps you meant one of:" + : mkNarrativeList (Just Good) False + (map fromPackageName candidates :: [StyleDoc]) + ] + Just loc -> pure $ Right (packageLocationIdent loc) - toLocSnapshot :: RawSnapshot -> PackageName -> RIO env (Either String PackageIdentifier) - toLocSnapshot snapshot name = - case Map.lookup name (rsPackages snapshot) of - Nothing -> - pure $ Left $ "Package does not appear in snapshot: " ++ packageNameString name - Just sp -> do - loc <- cplComplete <$> completePackageLocation (rspLocation sp) - pure $ Right (packageLocationIdent loc) + toLocSnapshot :: + RawSnapshot + -> PackageName + -> RIO env (Either StyleDoc PackageIdentifier) + toLocSnapshot snapshot name = + case Map.lookup name (rsPackages snapshot) of + Nothing -> case Map.lookup name =<< mGlobals of + Nothing -> + pure $ Left $ fillSep + [ flow "Package does not appear in snapshot (directly or \ + \indirectly):" + , style Current (fromPackageName name) <> "." + ] + Just version -> + pure $ Right $ PackageIdentifier name version + Just sp -> do + loc <- cplComplete <$> completePackageLocation (rspLocation sp) + pure $ Right (packageLocationIdent loc) - parse s = - case parsePackageName s of - Just x -> Right x - Nothing -> Left $ "Could not parse as package name or identifier: " ++ s + parse s = + case parsePackageName s of + Just x -> Right x + Nothing -> Left $ fillSep + [ flow "Could not parse as package name or identifier:" + , style Current (fromString s) <> "." + ] diff --git a/src/Stack/Lock.hs b/src/Stack/Lock.hs index 642fb804f8..7ef17d028f 100644 --- a/src/Stack/Lock.hs +++ b/src/Stack/Lock.hs @@ -1,154 +1,200 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Lock +License : BSD-3-Clause +-} module Stack.Lock - ( lockCachedWanted - , LockedLocation(..) - , Locked(..) - ) where - -import Pantry.Internal.AesonExtended -import Data.ByteString.Builder (byteString) -import qualified Data.List.NonEmpty as NE + ( lockCachedWanted + , LockedLocation (..) + , Locked (..) + ) where + +import Data.Aeson.Types ( FromJSON (..), ToJSON, Value, (.=), object ) +import Data.Aeson.WarningParser + ( WithJSONWarnings (..), (..:), jsonSubWarnings + , jsonSubWarningsT, logJSONWarnings, withObjectWarnings + ) +import Data.ByteString.Builder ( byteString ) import qualified Data.Map as Map +import qualified Data.Text as T import qualified Data.Yaml as Yaml -import Pantry -import Path (parent) -import Path.Extended (addExtension) -import Path.IO (doesFileExist) -import Stack.Prelude -import Stack.SourceMap -import Stack.Types.Config -import Stack.Types.SourceMap +import qualified RIO.NonEmpty as NE +import Path ( addExtension, parent ) +import Path.IO ( doesFileExist ) +import Stack.Prelude +import Stack.SourceMap ( snapToDepPackage ) +import Stack.Types.Config.Exception ( ConfigPrettyException (..) ) +import Stack.Types.LockFileBehavior ( LockFileBehavior (..) ) +import Stack.Types.Runner ( HasRunner, lockFileBehaviorL, rslInLogL ) +import Stack.Types.SourceMap ( DepPackage, SMWanted ) + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Lock" module. +data LockPrettyException + = WritingLockFileError (Path Abs File) Locked + deriving Show +instance Pretty LockPrettyException where + pretty (WritingLockFileError lockFile newLocked) = + "[S-1353]" + <> line + <> flow "Stack is configured to report an error on writing a lock file." + <> blankLine + <> fillSep + [ flow "Stack just tried to write the following lock file content to" + , pretty lockFile <> ":" + ] + <> blankLine + <> string newLocked' + where + newLocked' = T.unpack . decodeUtf8With lenientDecode $ Yaml.encode newLocked + +instance Exception LockPrettyException + +-- | Type representing locked locations. data LockedLocation a b = LockedLocation - { llOriginal :: a - , llCompleted :: b - } deriving (Eq, Show) + { original :: a + , completed :: b + } + deriving (Eq, Show) instance (ToJSON a, ToJSON b) => ToJSON (LockedLocation a b) where - toJSON ll = - object [ "original" .= llOriginal ll, "completed" .= llCompleted ll ] + toJSON ll = + object [ "original" .= ll.original, "completed" .= ll.completed ] instance ( FromJSON (WithJSONWarnings (Unresolved a)) , FromJSON (WithJSONWarnings (Unresolved b)) ) => FromJSON (WithJSONWarnings (Unresolved (LockedLocation a b))) where - parseJSON = - withObjectWarnings "LockedLocation" $ \o -> do - original <- jsonSubWarnings $ o ..: "original" - completed <- jsonSubWarnings $ o ..: "completed" - pure $ LockedLocation <$> original <*> completed + parseJSON = + withObjectWarnings "LockedLocation" $ \o -> do + original <- jsonSubWarnings $ o ..: "original" + completed <- jsonSubWarnings $ o ..: "completed" + pure $ LockedLocation <$> original <*> completed -- Special wrapper extracting only 1 RawPackageLocationImmutable -- serialization should not produce locations with multiple subdirs -- so we should be OK using just a head element -newtype SingleRPLI = SingleRPLI { unSingleRPLI :: RawPackageLocationImmutable} +newtype SingleRPLI + = SingleRPLI { singleRPLI :: RawPackageLocationImmutable} instance FromJSON (WithJSONWarnings (Unresolved SingleRPLI)) where - parseJSON v = - do - WithJSONWarnings unresolvedRPLIs ws <- parseJSON v - let withWarnings x = WithJSONWarnings x ws - pure $ withWarnings $ SingleRPLI . NE.head <$> unresolvedRPLIs + parseJSON v = + do + WithJSONWarnings unresolvedRPLIs ws <- parseJSON v + let withWarnings x = WithJSONWarnings x ws + pure $ withWarnings $ SingleRPLI . NE.head <$> unresolvedRPLIs +-- | Type representing the contents of lock files. data Locked = Locked - { lckSnapshotLocations :: [LockedLocation RawSnapshotLocation SnapshotLocation] - , lckPkgImmutableLocations :: [LockedLocation RawPackageLocationImmutable PackageLocationImmutable] - } deriving (Eq, Show) + { snapshotLocations :: [LockedLocation RawSnapshotLocation SnapshotLocation] + , pkgImmutableLocations :: [LockedLocation RawPackageLocationImmutable PackageLocationImmutable] + } + deriving (Eq, Show) instance ToJSON Locked where - toJSON Locked {..} = - object - [ "snapshots" .= lckSnapshotLocations - , "packages" .= lckPkgImmutableLocations - ] + toJSON lck = + object + [ "snapshots" .= lck.snapshotLocations + , "packages" .= lck.pkgImmutableLocations + ] instance FromJSON (WithJSONWarnings (Unresolved Locked)) where - parseJSON = withObjectWarnings "Locked" $ \o -> do - snapshots <- jsonSubWarningsT $ o ..: "snapshots" - packages <- jsonSubWarningsT $ o ..: "packages" - let unwrap ll = ll { llOriginal = unSingleRPLI (llOriginal ll) } - pure $ Locked <$> sequenceA snapshots <*> (map unwrap <$> sequenceA packages) - -loadYamlThrow - :: HasLogFunc env - => (Value -> Yaml.Parser (WithJSONWarnings a)) -> Path Abs File -> RIO env a -loadYamlThrow parser path = do - val <- liftIO $ Yaml.decodeFileThrow (toFilePath path) - case Yaml.parseEither parser val of - Left err -> throwIO $ Yaml.AesonException err - Right (WithJSONWarnings res warnings) -> do - logJSONWarnings (toFilePath path) warnings - return res + parseJSON = withObjectWarnings "Locked" $ \o -> do + snapshots <- jsonSubWarningsT $ o ..: "snapshots" + packages <- jsonSubWarningsT $ o ..: "packages" + let unwrap :: LockedLocation SingleRPLI b -> LockedLocation RawPackageLocationImmutable b + unwrap ll = ll { original = ll.original.singleRPLI } + pure $ Locked <$> sequenceA snapshots <*> (map unwrap <$> sequenceA packages) + +loadYamlThrow :: + HasLogFunc env + => (Value -> Yaml.Parser (WithJSONWarnings a)) + -> Path Abs File + -> RIO env a +loadYamlThrow parser path = + liftIO (Yaml.decodeFileEither (toFilePath path)) >>= \case + Left parseException -> throwIO $ + ParseConfigFileException path parseException + Right val -> case Yaml.parseEither parser val of + Left err -> throwIO $ Yaml.AesonException err + Right (WithJSONWarnings res warnings) -> do + logJSONWarnings (toFilePath path) warnings + pure res +-- | Yields a t'Stack.Types.SourceMap.SMWanted' taking into account the relevant +-- lock file, if one is applicable and it exists. lockCachedWanted :: - (HasPantryConfig env, HasRunner env) - => Path Abs File - -> RawSnapshotLocation - -> (Map RawPackageLocationImmutable PackageLocationImmutable - -> WantedCompiler - -> Map PackageName (Bool -> RIO env DepPackage) - -> RIO env ( SMWanted, [CompletedPLI])) - -> RIO env SMWanted -lockCachedWanted stackFile resolver fillWanted = do - lockFile <- liftIO $ addExtension ".lock" stackFile - let getLockExists = doesFileExist lockFile - lfb <- view lockFileBehaviorL - readLockFile <- - case lfb of - LFBIgnore -> pure False - LFBReadWrite -> getLockExists - LFBReadOnly -> getLockExists - LFBErrorOnWrite -> getLockExists - locked <- - if readLockFile - then do - logDebug "Using package location completions from a lock file" - unresolvedLocked <- loadYamlThrow parseJSON lockFile - resolvePaths (Just $ parent stackFile) unresolvedLocked - else do - logDebug "Not reading lock file" - pure $ Locked [] [] - let toMap :: Ord a => [LockedLocation a b] -> Map a b - toMap = Map.fromList . map (\ll -> (llOriginal ll, llCompleted ll)) - slocCache = toMap $ lckSnapshotLocations locked - pkgLocCache = toMap $ lckPkgImmutableLocations locked - (snap, slocCompleted, pliCompleted) <- - loadAndCompleteSnapshotRaw resolver slocCache pkgLocCache - let compiler = snapshotCompiler snap - snPkgs = Map.mapWithKey (\n p h -> snapToDepPackage h n p) (snapshotPackages snap) - (wanted, prjCompleted) <- fillWanted pkgLocCache compiler snPkgs - let lockLocations = map (\(CompletedPLI r c) -> LockedLocation r c) - differentSnapLocs (CompletedSL raw complete) - | raw == toRawSL complete = Nothing - | otherwise = Just $ LockedLocation raw complete - newLocked = Locked { lckSnapshotLocations = mapMaybe differentSnapLocs slocCompleted - , lckPkgImmutableLocations = - lockLocations $ pliCompleted <> prjCompleted - } - when (newLocked /= locked) $ do - case lfb of - LFBReadWrite -> - writeBinaryFileAtomic lockFile $ - header <> - byteString (Yaml.encode newLocked) - LFBErrorOnWrite -> do - logError "You indicated that Stack should error out on writing a lock file" - logError $ - "I just tried to write the following lock file contents to " <> - fromString (toFilePath lockFile) - logError $ display $ decodeUtf8With lenientDecode $ Yaml.encode newLocked - exitFailure - LFBIgnore -> pure () - LFBReadOnly -> pure () - pure wanted - where - header = - "# This file was autogenerated by Stack.\n\ - \# You should not edit this file by hand.\n\ - \# For more information, please see the documentation at:\n\ - \# https://docs.haskellstack.org/en/stable/lock_files\n\n" + (HasPantryConfig env, HasRunner env) + => Path Abs File + -- ^ The relevant Stack project-level configuration file. + -> RawSnapshotLocation + -- ^ The relevant snapshot. + -> ( Map RawPackageLocationImmutable PackageLocationImmutable + -> WantedCompiler + -> Map PackageName (Bool -> RIO env DepPackage) + -> RIO env ( SMWanted, [CompletedPLI]) + ) + -> RIO env SMWanted +lockCachedWanted stackFile snapshot fillWanted = do + lockFile <- liftIO $ addExtension ".lock" stackFile + let getLockExists = doesFileExist lockFile + lfb <- view lockFileBehaviorL + readLockFile <- + case lfb of + LFBIgnore -> pure False + LFBReadWrite -> getLockExists + LFBReadOnly -> getLockExists + LFBErrorOnWrite -> getLockExists + locked <- + if readLockFile + then do + logDebug "Using package location completions from a lock file" + unresolvedLocked <- loadYamlThrow parseJSON lockFile + resolvePaths (Just $ parent stackFile) unresolvedLocked + else do + logDebug "Not reading lock file" + pure $ Locked [] [] + let toMap :: Ord a => [LockedLocation a b] -> Map a b + toMap = Map.fromList . map ((.original) &&& (.completed)) + slocCache = toMap locked.snapshotLocations + pkgLocCache = toMap locked.pkgImmutableLocations + debugRSL <- view rslInLogL + (snap, slocCompleted, pliCompleted) <- + loadAndCompleteSnapshotRaw' debugRSL snapshot slocCache pkgLocCache + let compiler = snapshotCompiler snap + snPkgs = Map.mapWithKey + (\n p h -> snapToDepPackage h n p) + (snapshotPackages snap) + (wanted, prjCompleted) <- fillWanted pkgLocCache compiler snPkgs + let lockLocations = map (\(CompletedPLI r c) -> LockedLocation r c) + differentSnapLocs (CompletedSL raw complete) + | raw == toRawSL complete = Nothing + | otherwise = Just $ LockedLocation raw complete + newLocked = Locked + { snapshotLocations = mapMaybe differentSnapLocs slocCompleted + , pkgImmutableLocations = + lockLocations $ pliCompleted <> prjCompleted + } + when (newLocked /= locked) $ + case lfb of + LFBReadWrite -> + writeBinaryFileAtomic lockFile $ + header <> + byteString (Yaml.encode newLocked) + LFBErrorOnWrite -> + prettyThrowIO $ WritingLockFileError lockFile newLocked + LFBIgnore -> pure () + LFBReadOnly -> pure () + pure wanted + where + header = + "# This file was autogenerated by Stack.\n\ + \# You should not edit this file by hand.\n\ + \# For more information, please see the documentation at:\n\ + \# https://docs.haskellstack.org/en/stable/topics/lock_files\n\n" diff --git a/src/Stack/Ls.hs b/src/Stack/Ls.hs index 553e7a70ca..1e978441ad 100644 --- a/src/Stack/Ls.hs +++ b/src/Stack/Ls.hs @@ -1,194 +1,164 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Ls +Description : Types and functions related to Stack's @ls@ command. +License : BSD-3-Clause + +Types and functions related to Stack's @ls@ command. +-} module Stack.Ls ( lsCmd - , lsParser - , listDependenciesCmd ) where -import Control.Exception (throw) -import Data.Aeson -import Data.Array.IArray ((//), elems) -import Stack.Prelude hiding (Snapshot (..), SnapName (..)) +import Control.Monad.Extra ( whenJust ) +import Data.Aeson ( FromJSON, Value (..), (.:), encode ) +import Data.Array.IArray ( (//), elems ) +import qualified Data.ByteString.Lazy.Char8 as LBC8 +import Distribution.Package ( mkPackageName ) import qualified Data.Aeson.Types as A +import qualified Data.Foldable as F import qualified Data.List as L -import Data.Text hiding (pack, intercalate) +import qualified Data.Map as Map +import qualified Data.Sequence as Seq +import qualified Data.Set as Set +import Data.Text ( isPrefixOf ) import qualified Data.Text as T import qualified Data.Text.IO as T import qualified Data.Vector as V -import Network.HTTP.StackClient (httpJSON, addRequestHeader, getResponseBody, parseRequest, hAccept) -import qualified Options.Applicative as OA -import Options.Applicative (idm) -import Options.Applicative.Builder.Extra (boolFlags) -import Path -import RIO.PrettyPrint (useColorL) -import RIO.PrettyPrint.DefaultStyles (defaultStyles) -import RIO.PrettyPrint.Types (StyleSpec) -import RIO.PrettyPrint.StylesUpdate (StylesUpdate (..), stylesUpdateL) -import Stack.Dot -import Stack.Runners -import Stack.Options.DotParser (listDepsOptsParser) -import Stack.Types.Config -import System.Console.ANSI.Codes (SGR (Reset), setSGRCode, sgrToCode) -import System.Process.Pager (pageText) -import System.Directory (listDirectory) - -data LsView - = Local - | Remote - deriving (Show, Eq, Ord) - +import Network.HTTP.StackClient + ( HttpException (..), addRequestHeader, getResponseBody + , hAccept, httpJSON, parseUrlThrow + ) +import Path ( parent ) +import RIO.List ( sort ) +import Stack.Build.Installed ( getInstalled, toInstallMap ) +import Stack.Config ( withBuildConfig ) +import Stack.Constants ( osIsWindows ) +import Stack.DependencyGraph ( createPrunedDependencyGraph ) +import Stack.Prelude hiding ( Nightly, Snapshot ) +import Stack.Runners + ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) +import Stack.Setup.Installed + ( Tool (..), filterTools, listInstalled, toolString ) +import Stack.SourceMap ( globalsFromHints ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.Config + ( Config (..), HasConfig (..), askRecentSnapshotsUrl ) +import Stack.Types.DependencyTree + ( DependencyGraph, DependencyTree (..), DotPayload (..) + , licenseText, versionText + ) +import Stack.Types.DotOpts ( DotOpts (..) ) +import Stack.Types.DumpPackage ( DumpPackage (..) ) +import Stack.Types.EnvConfig ( EnvConfig (..), installationRootDeps ) +import Stack.Types.LsOpts + ( LsCmdOpts (..), LsCmds (..), ListDepsFormat (..) + , ListDepsFormatOpts (..), ListDepsOpts (..) + , ListDepsTextFilter (..), ListGlobalsOpts (..) + , ListStylesOpts (..), ListToolsOpts (..), LsView (..) + , SnapshotOpts (..) + ) +import Stack.Types.Runner ( Runner, terminalL ) +import Stack.Types.SourceMap ( SMWanted (..) ) +import System.Console.ANSI.Codes + ( SGR (Reset), setSGRCode, sgrToCode ) +import System.Process.Pager ( pageText ) +import System.Directory ( listDirectory ) +import System.IO ( putStrLn ) + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Ls" module. +data LsPrettyException + = ParseFailure ![Value] + | ParseRecentSnapshotsUrlFailed !HttpException + +deriving instance Show LsPrettyException + +instance Pretty LsPrettyException where + pretty (ParseFailure val) = + "[S-3421]" + <> line + <> fillSep + [ flow "Failure to parse values as a snapshot:" + , string (show val) + ] + pretty (ParseRecentSnapshotsUrlFailed err) = + "[S-9131]" + <> line + <> fillSep + [ flow "While trying to parse the" + , style Shell "recent-snapshots" + , flow "URL, Stack encountered the following error:" + ] + <> blankLine + <> fromString (displayException err) + +instance Exception LsPrettyException + +-- | Type representing Stackage snapshot types. data SnapshotType - = Lts - | Nightly - deriving (Show, Eq, Ord) - -data LsCmds - = LsSnapshot SnapshotOpts - | LsDependencies ListDepsOpts - | LsStyles ListStylesOpts - -data SnapshotOpts = SnapshotOpts - { soptViewType :: LsView - , soptLtsSnapView :: Bool - , soptNightlySnapView :: Bool - } deriving (Eq, Show, Ord) - -data ListStylesOpts = ListStylesOpts - { coptBasic :: Bool - , coptSGR :: Bool - , coptExample :: Bool - } deriving (Eq, Ord, Show) - -newtype LsCmdOpts = LsCmdOpts - { lsView :: LsCmds - } - -lsParser :: OA.Parser LsCmdOpts -lsParser = LsCmdOpts <$> OA.hsubparser (lsSnapCmd <> lsDepsCmd <> lsStylesCmd) - -lsCmdOptsParser :: OA.Parser LsCmds -lsCmdOptsParser = LsSnapshot <$> lsViewSnapCmd - -lsDepOptsParser :: OA.Parser LsCmds -lsDepOptsParser = LsDependencies <$> listDepsOptsParser - -lsStylesOptsParser :: OA.Parser LsCmds -lsStylesOptsParser = LsStyles <$> listStylesOptsParser - -listStylesOptsParser :: OA.Parser ListStylesOpts -listStylesOptsParser = ListStylesOpts - <$> boolFlags False - "basic" - "a basic report of the styles used. The default is a fuller \ - \one" - idm - <*> boolFlags True - "sgr" - "the provision of the equivalent SGR instructions (provided \ - \by default). Flag ignored for a basic report" - idm - <*> boolFlags True - "example" - "the provision of an example of the applied style (provided \ - \by default for colored output). Flag ignored for a basic \ - \report" - idm - -lsViewSnapCmd :: OA.Parser SnapshotOpts -lsViewSnapCmd = - SnapshotOpts <$> - (OA.hsubparser (lsViewRemoteCmd <> lsViewLocalCmd) <|> pure Local) <*> - OA.switch - (OA.long "lts" <> OA.short 'l' <> OA.help "Only show lts snapshots") <*> - OA.switch - (OA.long "nightly" <> OA.short 'n' <> - OA.help "Only show nightly snapshots") - -lsSnapCmd :: OA.Mod OA.CommandFields LsCmds -lsSnapCmd = - OA.command - "snapshots" - (OA.info - lsCmdOptsParser - (OA.progDesc "View local snapshot (default option)")) - -lsDepsCmd :: OA.Mod OA.CommandFields LsCmds -lsDepsCmd = - OA.command - "dependencies" - (OA.info lsDepOptsParser (OA.progDesc "View the dependencies")) - -lsStylesCmd :: OA.Mod OA.CommandFields LsCmds -lsStylesCmd = - OA.command - "stack-colors" - (OA.info lsStylesOptsParser - (OA.progDesc "View stack's output styles")) - <> - OA.command - "stack-colours" - (OA.info lsStylesOptsParser - (OA.progDesc "View stack's output styles (alias for \ - \'stack-colors')")) + = Lts + -- ^ Stackage LTS Haskell + | Nightly + -- ^ Stackage Nightly + deriving (Eq, Ord, Show) data Snapshot = Snapshot - { snapId :: Text - , snapTitle :: Text - , snapTime :: Text - } deriving (Show, Eq, Ord) - -data SnapshotData = SnapshotData - { _snapTotalCounts :: Integer - , snaps :: [[Snapshot]] - } deriving (Show, Eq, Ord) + { snapId :: Text + , title :: Text + , time :: Text + } + deriving (Eq, Ord, Show) instance FromJSON Snapshot where - parseJSON o@(Array _) = parseSnapshot o - parseJSON _ = mempty + parseJSON o@(Array _) = parseSnapshot o + parseJSON _ = mempty + +data SnapshotData = SnapshotData + { _snapTotalCounts :: Integer + , snaps :: [[Snapshot]] + } + deriving (Eq, Ord, Show) instance FromJSON SnapshotData where - parseJSON (Object s) = - SnapshotData <$> s .: "totalCount" <*> s .: "snapshots" - parseJSON _ = mempty + parseJSON (Object s) = + SnapshotData <$> s .: "totalCount" <*> s .: "snapshots" + parseJSON _ = mempty toSnapshot :: [Value] -> Snapshot -toSnapshot [String sid, String stitle, String stime] = - Snapshot - { snapId = sid - , snapTitle = stitle - , snapTime = stime +toSnapshot [String snapId, String title, String time] = + Snapshot + { snapId + , title + , time } -toSnapshot val = throw $ ParseFailure val - -newtype LsException = - ParseFailure [Value] - deriving (Show, Typeable) - -instance Exception LsException +toSnapshot val = prettyImpureThrow $ ParseFailure val parseSnapshot :: Value -> A.Parser Snapshot -parseSnapshot = A.withArray "array of snapshot" (return . toSnapshot . V.toList) +parseSnapshot = A.withArray "array of snapshot" (pure . toSnapshot . V.toList) displayTime :: Snapshot -> [Text] -displayTime Snapshot {..} = [snapTime] +displayTime snap = [snap.time] displaySnap :: Snapshot -> [Text] -displaySnap Snapshot {..} = - ["Resolver name: " <> snapId, "\n" <> snapTitle <> "\n\n"] +displaySnap snap = + ["Snapshot name: " <> snap.snapId, "\n" <> snap.title <> "\n\n"] displaySingleSnap :: [Snapshot] -> Text displaySingleSnap snapshots = - case snapshots of - [] -> mempty - (x:xs) -> - let snaps = - displayTime x <> ["\n\n"] <> displaySnap x <> - L.concatMap displaySnap xs - in T.concat snaps + case snapshots of + [] -> mempty + (x:xs) -> + let snaps = + displayTime x <> ["\n\n"] <> displaySnap x <> + L.concatMap displaySnap xs + in T.concat snaps renderData :: Bool -> Text -> IO () renderData True content = pageText content @@ -196,24 +166,22 @@ renderData False content = T.putStr content displaySnapshotData :: Bool -> SnapshotData -> IO () displaySnapshotData term sdata = - case L.reverse $ snaps sdata of - [] -> return () - xs -> - let snaps = T.concat $ L.map displaySingleSnap xs - in renderData term snaps + case L.reverse sdata.snaps of + [] -> pure () + xs -> + let snaps = T.concat $ L.map displaySingleSnap xs + in renderData term snaps filterSnapshotData :: SnapshotData -> SnapshotType -> SnapshotData filterSnapshotData sdata stype = - sdata - { snaps = filterSnapData - } - where - snapdata = snaps sdata - filterSnapData = - case stype of - Lts -> L.map (L.filter (\x -> "lts" `isPrefixOf` snapId x)) snapdata - Nightly -> - L.map (L.filter (\x -> "nightly" `isPrefixOf` snapId x)) snapdata + sdata { snaps = filterSnapData } + where + snapdata = sdata.snaps + filterSnapData = + case stype of + Lts -> L.map (L.filter (\x -> "lts" `isPrefixOf` x.snapId)) snapdata + Nightly -> + L.map (L.filter (\x -> "nightly" `isPrefixOf` x.snapId)) snapdata displayLocalSnapshot :: Bool -> [String] -> IO () displayLocalSnapshot term xs = renderData term (localSnaptoText xs) @@ -223,105 +191,238 @@ localSnaptoText xs = T.intercalate "\n" $ L.map T.pack xs handleLocal :: LsCmdOpts -> RIO Runner () handleLocal lsOpts = do - (instRoot :: Path Abs Dir) <- withConfig YesReexec $ withDefaultEnvConfig installationRootDeps - isStdoutTerminal <- view terminalL - let snapRootDir = parent $ parent instRoot - snapData' <- liftIO $ listDirectory $ toFilePath snapRootDir - let snapData = L.sort snapData' - case lsView lsOpts of - LsSnapshot SnapshotOpts {..} -> - case (soptLtsSnapView, soptNightlySnapView) of - (True, False) -> - liftIO $ - displayLocalSnapshot isStdoutTerminal $ - L.filter (L.isPrefixOf "lts") snapData - (False, True) -> - liftIO $ - displayLocalSnapshot isStdoutTerminal $ - L.filter (L.isPrefixOf "night") snapData - _ -> liftIO $ displayLocalSnapshot isStdoutTerminal snapData - LsDependencies _ -> return () - LsStyles _ -> return () - -handleRemote - :: HasRunner env - => LsCmdOpts -> RIO env () + (instRoot :: Path Abs Dir) <- + withConfig YesReexec $ withDefaultEnvConfig installationRootDeps + isStdoutTerminal <- view terminalL + let parentInstRoot = parent instRoot + snapRootDir + | osIsWindows = parentInstRoot + | otherwise = parent parentInstRoot + snapData' <- liftIO $ listDirectory $ toFilePath snapRootDir + let snapData = L.sort snapData' + case lsOpts.lsCmds of + LsSnapshot sopt -> + case (sopt.ltsSnapView, sopt.nightlySnapView) of + (True, False) -> + liftIO $ + displayLocalSnapshot isStdoutTerminal $ + L.filter (L.isPrefixOf "lts") snapData + (False, True) -> + liftIO $ + displayLocalSnapshot isStdoutTerminal $ + L.filter (L.isPrefixOf "night") snapData + _ -> liftIO $ displayLocalSnapshot isStdoutTerminal snapData + LsGlobals _ -> pure () + LsDependencies _ -> pure () + LsStyles _ -> pure () + LsTools _ -> pure () + +handleRemote :: HasConfig env => LsCmdOpts -> RIO env () handleRemote lsOpts = do - req <- liftIO $ parseRequest urlInfo - isStdoutTerminal <- view terminalL - let req' = addRequestHeader hAccept "application/json" req - result <- httpJSON req' - let snapData = getResponseBody result - case lsView lsOpts of - LsSnapshot SnapshotOpts {..} -> - case (soptLtsSnapView, soptNightlySnapView) of - (True, False) -> - liftIO $ - displaySnapshotData isStdoutTerminal $ - filterSnapshotData snapData Lts - (False, True) -> - liftIO $ - displaySnapshotData isStdoutTerminal $ - filterSnapshotData snapData Nightly - _ -> liftIO $ displaySnapshotData isStdoutTerminal snapData - LsDependencies _ -> return () - LsStyles _ -> return () - where - urlInfo = "https://www.stackage.org/snapshots" - + urlInfoText <- askRecentSnapshotsUrl + req <- catch + (parseUrlThrow $ T.unpack urlInfoText) + (prettyThrowM . ParseRecentSnapshotsUrlFailed) + isStdoutTerminal <- view terminalL + let req' = addRequestHeader hAccept "application/json" req + result <- httpJSON req' + let snapData = getResponseBody result + case lsOpts.lsCmds of + LsSnapshot sopt -> + case (sopt.ltsSnapView, sopt.nightlySnapView) of + (True, False) -> + liftIO $ + displaySnapshotData isStdoutTerminal $ + filterSnapshotData snapData Lts + (False, True) -> + liftIO $ + displaySnapshotData isStdoutTerminal $ + filterSnapshotData snapData Nightly + _ -> liftIO $ displaySnapshotData isStdoutTerminal snapData + LsGlobals _ -> pure () + LsDependencies _ -> pure () + LsStyles _ -> pure () + LsTools _ -> pure () + +-- | Function underlying the @stack ls@ command. lsCmd :: LsCmdOpts -> RIO Runner () lsCmd lsOpts = - case lsView lsOpts of - LsSnapshot SnapshotOpts {..} -> - case soptViewType of - Local -> handleLocal lsOpts - Remote -> handleRemote lsOpts - LsDependencies depOpts -> listDependenciesCmd False depOpts - LsStyles stylesOpts -> withConfig NoReexec $ listStylesCmd stylesOpts - --- | List the dependencies -listDependenciesCmd :: Bool -> ListDepsOpts -> RIO Runner () -listDependenciesCmd deprecated opts = do - when - deprecated - (logWarn - "DEPRECATED: Use ls dependencies instead. Will be removed in next major version.") - listDependencies opts - -lsViewLocalCmd :: OA.Mod OA.CommandFields LsView -lsViewLocalCmd = - OA.command - "local" - (OA.info (pure Local) (OA.progDesc "View local snapshot")) - -lsViewRemoteCmd :: OA.Mod OA.CommandFields LsView -lsViewRemoteCmd = - OA.command - "remote" - (OA.info (pure Remote) (OA.progDesc "View remote snapshot")) - --- | List stack's output styles + case lsOpts.lsCmds of + LsSnapshot sopt -> + case sopt.viewType of + Local -> handleLocal lsOpts + Remote -> withConfig NoReexec $ handleRemote lsOpts + LsGlobals globalsOpts -> withConfig NoReexec $ listGlobalsCmd globalsOpts + LsDependencies depOpts -> listDependencies depOpts + LsStyles stylesOpts -> withConfig NoReexec $ listStylesCmd stylesOpts + LsTools toolsOpts -> withConfig NoReexec $ listToolsCmd toolsOpts + +-- | List Stack's output styles listStylesCmd :: ListStylesOpts -> RIO Config () listStylesCmd opts = do - lc <- ask - -- This is the same test as is used in Stack.Types.Runner.withRunner - let useColor = view useColorL lc - styles = elems $ defaultStyles // stylesUpdate (view stylesUpdateL lc) - isComplex = not (coptBasic opts) - showSGR = isComplex && coptSGR opts - showExample = isComplex && coptExample opts && useColor - styleReports = L.map (styleReport showSGR showExample) styles - liftIO $ T.putStrLn $ T.intercalate (if isComplex then "\n" else ":") styleReports - where - styleReport :: Bool -> Bool -> StyleSpec -> Text - styleReport showSGR showExample (k, sgrs) = k <> "=" <> codes - <> (if showSGR then sgrsList else mempty) - <> (if showExample then example else mempty) - where - codes = T.intercalate ";" (L.map (fromString . show) $ - L.concatMap sgrToCode sgrs) - sgrsList = " [" <> T.intercalate ", " (L.map (fromString . show) sgrs) - <> "]" - example = " " <> ansi <> "Example" <> reset - ansi = fromString $ setSGRCode sgrs - reset = fromString $ setSGRCode [Reset] + lc <- ask + -- This is the same test as is used in Stack.Types.Runner.withRunner + let useColor = view useColorL lc + styles = elems $ defaultStyles // stylesUpdate (view stylesUpdateL lc) + isComplex = not opts.basic + showSGR = isComplex && opts.sgr + showExample = isComplex && opts.example && useColor + styleReports = L.map (styleReport showSGR showExample) styles + liftIO $ + T.putStrLn $ T.intercalate (if isComplex then "\n" else ":") styleReports + where + styleReport :: Bool -> Bool -> StyleSpec -> Text + styleReport showSGR showExample (k, sgrs) = k <> "=" <> codes + <> (if showSGR then sgrsList else mempty) + <> (if showExample then example else mempty) + where + codes = T.intercalate ";" (L.map (fromString . show) $ + L.concatMap sgrToCode sgrs) + sgrsList = " [" <> T.intercalate ", " (L.map (fromString . show) sgrs) + <> "]" + example = " " <> ansi <> "Example" <> reset + ansi = fromString $ setSGRCode sgrs + reset = fromString $ setSGRCode [Reset] + +-- | List Stack's installed tools, sorted (see instance of 'Ord' for t'Tool'). +listToolsCmd :: ListToolsOpts -> RIO Config () +listToolsCmd opts = do + localPrograms <- view $ configL . to (.localPrograms) + installed <- sort <$> listInstalled localPrograms + let wanted = case opts.filter of + [] -> installed + "ghc-git" -> [t | t@(ToolGhcGit _ _) <- installed] + pkgName -> filtered pkgName installed + liftIO $ mapM_ (putStrLn . toolString) wanted + where + filtered pkgName installed = Tool <$> + filterTools (mkPackageName pkgName) (const True) installed + +listGlobalsCmd :: ListGlobalsOpts -> RIO Config () +listGlobalsCmd opts = do + idents <- if opts.globalHints + then + withBuildConfig $ do + buildConfig <- view buildConfigL + globals <- globalsFromHints buildConfig.smWanted.compiler + pure $ map (uncurry PackageIdentifier) (Map.toList globals) + else + withDefaultEnvConfig $ do + envConfig <- ask + installMap <- toInstallMap envConfig.sourceMap + (_, globalDump, _, _) <- getInstalled installMap + pure $ L.sort $ map (.packageIdent) globalDump + forM_ idents $ \ident -> + prettyInfo $ fromString $ packageIdentifierString ident + +listDependencies :: ListDepsOpts -> RIO Runner () +listDependencies opts = do + let dotOpts = opts.dotOpts + (_, pkgs, resultGraph) <- createPrunedDependencyGraph dotOpts + liftIO $ case opts.format of + ListDepsTree treeOpts -> + T.putStrLn "Packages" + >> printTree treeOpts dotOpts 0 [] (treeRoots opts pkgs) resultGraph + ListDepsJSON -> printJSON pkgs resultGraph + ListDepsText textOpts listDepsTextFilters -> do + let resultGraph' = Map.filterWithKey p resultGraph + p k _ = + Set.notMember k (exclude (Set.toList pkgs) listDepsTextFilters) + void $ Map.traverseWithKey (go "" textOpts) (snd <$> resultGraph') + where + exclude :: [PackageName] -> [ListDepsTextFilter] -> Set PackageName + exclude locals = Set.fromList . exclude' locals + + exclude' :: [PackageName] -> [ListDepsTextFilter] -> [PackageName] + exclude' _ [] = [] + exclude' locals (f:fs) = case f of + FilterPackage pkgName -> pkgName : exclude' locals fs + FilterLocals -> locals <> exclude' locals fs + ListDepsConstraints -> do + let constraintOpts = ListDepsFormatOpts + { sep = " ==" + , license = False + } + T.putStrLn "constraints:" + void $ Map.traverseWithKey (go " , " constraintOpts) + (snd <$> resultGraph) + where + go prefix lineOpts name payload = + T.putStrLn $ prefix <> listDepsLine lineOpts name payload + +treeRoots :: ListDepsOpts -> Set PackageName -> Set PackageName +treeRoots opts projectPackages' = + let targets = opts.dotOpts.dotTargets + in if null targets + then projectPackages' + else Set.fromList $ map (mkPackageName . T.unpack) targets + +printTree :: + ListDepsFormatOpts + -> DotOpts + -> Int + -> [Int] + -> Set PackageName + -> DependencyGraph + -> IO () +printTree opts dotOpts depth remainingDepsCounts packages dependencyMap = + F.sequence_ $ Seq.mapWithIndex go (toSeq packages) + where + toSeq = Seq.fromList . Set.toList + go index name = + let newDepsCounts = remainingDepsCounts ++ [Set.size packages - index - 1] + -- TODO: Define the 'Nothing' behaviour, maybe pure an error? + in whenJust (Map.lookup name dependencyMap) $ \(deps, payload) -> do + printTreeNode opts dotOpts depth newDepsCounts deps payload name + unless (Just depth == dotOpts.dependencyDepth) $ + printTree + opts + dotOpts + (depth + 1) + newDepsCounts + deps + dependencyMap + +printTreeNode :: + ListDepsFormatOpts + -> DotOpts + -> Int + -> [Int] + -> Set PackageName + -> DotPayload + -> PackageName + -> IO () +printTreeNode opts dotOpts depth remainingDepsCounts deps payload name = + let remainingDepth = fromMaybe 999 dotOpts.dependencyDepth - depth + hasDeps = not $ null deps + in T.putStrLn $ + treeNodePrefix "" remainingDepsCounts hasDeps remainingDepth <> " " <> + listDepsLine opts name payload + +treeNodePrefix :: Text -> [Int] -> Bool -> Int -> Text +treeNodePrefix t [] _ _ = t +treeNodePrefix t [0] True 0 = t <> "└──" +treeNodePrefix t [_] True 0 = t <> "├──" +treeNodePrefix t [0] True _ = t <> "└─┬" +treeNodePrefix t [_] True _ = t <> "├─┬" +treeNodePrefix t [0] False _ = t <> "└──" +treeNodePrefix t [_] False _ = t <> "├──" +treeNodePrefix t (0:ns) d remainingDepth = + treeNodePrefix (t <> " ") ns d remainingDepth +treeNodePrefix t (_:ns) d remainingDepth = + treeNodePrefix (t <> "│ ") ns d remainingDepth + +listDepsLine :: ListDepsFormatOpts -> PackageName -> DotPayload -> Text +listDepsLine opts name payload = + T.pack (packageNameString name) <> opts.sep <> + payloadText opts payload + +payloadText :: ListDepsFormatOpts -> DotPayload -> Text +payloadText opts payload = + if opts.license + then licenseText payload + else versionText payload + +printJSON :: Set PackageName -> DependencyGraph -> IO () +printJSON pkgs dependencyMap = + LBC8.putStrLn $ encode $ DependencyTree pkgs dependencyMap diff --git a/src/Stack/New.hs b/src/Stack/New.hs index c640b4b0bc..01cf2b888e 100644 --- a/src/Stack/New.hs +++ b/src/Stack/New.hs @@ -1,194 +1,422 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} --- | Create new a new project directory populated with a basic working --- project. +{-| +Module : Stack.New +Description : Types and functions related to Stack's @new@ command. +License : BSD-3-Clause -module Stack.New - ( new - , NewOpts(..) - , TemplateName - , templatesHelp - ) where +Types and functions related to Stack's @new@ command. +-} -import Stack.Prelude -import Control.Monad.Trans.Writer.Strict +module Stack.New + ( NewOpts (..) + , TemplateName + , newCmd + , new + ) where + +import Control.Monad.Extra ( whenJust ) +import Control.Monad.Trans.Writer.Strict ( execWriterT ) import Data.Aeson as A +import qualified Data.Aeson.KeyMap as KeyMap import qualified Data.ByteString.Base64 as B64 -import Data.ByteString.Builder (lazyByteString) +import Data.ByteString.Builder ( lazyByteString ) import qualified Data.ByteString.Lazy as LB -import Data.Conduit -import Data.List +import Data.Conduit ( yield ) +import qualified Data.List as L import qualified Data.Map.Strict as M import qualified Data.Set as S import qualified Data.Text as T -import qualified Data.Text.Lazy as TL import qualified Data.Text.Encoding as T +import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.Encoding as TLE -import Data.Time.Calendar -import Data.Time.Clock -import Network.HTTP.StackClient (VerifiedDownloadException (..), Request, HttpException, - getResponseBody, httpLbs, mkDownloadRequest, parseRequest, parseUrlThrow, - setForceDownload, setGithubHeaders, setRequestCheckStatus, verifiedDownloadWithProgress) -import Path +import Data.Time.Calendar ( toGregorian ) +import Data.Time.Clock ( getCurrentTime, utctDay ) +import Network.HTTP.Client ( applyBasicAuth ) +import Network.HTTP.StackClient + ( HttpException (..), HttpExceptionContent (..) + , Response (..), VerifiedDownloadException (..) + , mkDownloadRequest, notFound404, parseRequest + , setForceDownload, setRequestCheckStatus + , verifiedDownloadWithProgress + ) +import Path ( (), dirname, parent, parseRelDir, parseRelFile ) import Path.IO + ( doesDirExist, doesFileExist, ensureDir, getCurrentDir ) +import RIO.Process ( proc, runProcess_, withWorkingDir ) import Stack.Constants -import Stack.Constants.Config -import Stack.Types.Config + ( allWiredInPackages, altGitHubTokenEnvVar, backupUrlRelPath + , gitHubBasicAuthType, gitHubTokenEnvVar, stackDotYaml + ) +import Stack.Constants.Config ( templatesDir ) +import Stack.Init ( InitOpts (..), initProject ) +import Stack.Prelude +import Stack.Runners + ( ShouldReexec (..), withConfig, withGlobalProject ) +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Runner ( Runner, globalOptsL ) +import Stack.Types.SCM ( SCM (..) ) import Stack.Types.TemplateName -import qualified RIO.HashMap as HM -import RIO.Process + ( RepoService (..), RepoTemplatePath (..), TemplateName + , TemplatePath (..), defaultTemplateName + , parseRepoPathWithService, templateName, templatePath + ) +import System.Environment ( lookupEnv ) import qualified Text.Mustache as Mustache import qualified Text.Mustache.Render as Mustache import Text.ProjectTemplate + ( ProjectTemplateException, receiveMem, unpackTemplate ) + +-------------------------------------------------------------------------------- +-- Exceptions + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.New" module. +data NewPrettyException + = ProjectDirAlreadyExists !String !(Path Abs Dir) + | DownloadTemplateFailed !Text !String !VerifiedDownloadException + | forall b. LoadTemplateFailed !TemplateName !(Path b File) + | forall b. ExtractTemplateFailed !TemplateName !(Path b File) !String + | TemplateInvalid !TemplateName !StyleDoc + | MagicPackageNameInvalid !String + | AttemptedOverwrites !Text ![Path Abs File] + +deriving instance Show NewPrettyException + +instance Pretty NewPrettyException where + pretty (ProjectDirAlreadyExists name path) = + "[S-2135]" + <> line + <> fillSep + [ flow "Stack failed to create a new directory for project" + , style Current (fromString name) <> "," + , flow "as the directory" + , pretty path + , flow "already exists." + ] + pretty (DownloadTemplateFailed name url err) = + "[S-1688]" + <> line + <> fillSep + [ flow "Stack failed to download the template" + , style Current (fromString . T.unpack $ name) + , "from" + , style Url (fromString url) <> "." + ] + <> blankLine + <> ( if isNotFound + then flow "Please check that the template exists at that \ + \location." + <> blankLine + else mempty + ) + <> fillSep + [ flow "While downloading, Stack encountered" + , msg + ] + where + (msg, isNotFound) = case err of + DownloadHttpError (HttpExceptionRequest req content) -> + let msg' = flow "an HTTP error. Stack made the request:" + <> blankLine + <> string (show req) + <> blankLine + <> flow "and the content of the error was:" + <> blankLine + <> string (show content) + isNotFound404 = case content of + StatusCodeException res _ -> + responseStatus res == notFound404 + _ -> False + in (msg', isNotFound404) + DownloadHttpError (InvalidUrlException url' reason) -> + let msg' = fillSep + [ flow "an HTTP error. The URL" + , style Url (fromString url') + , flow "was considered invalid because" + , fromString reason <> "." + ] + in (msg', False) + _ -> let msg' = flow "the following error:" + <> blankLine + <> fromString (displayException err) + in (msg', False) + pretty (LoadTemplateFailed name path) = + "[S-3650]" + <> line + <> fillSep + [ flow "Stack failed to load the downloaded template" + , style Current (fromString $ T.unpack $ templateName name) + , "from" + , pretty path <> "." + ] + pretty (ExtractTemplateFailed name path err) = + "[S-9582]" + <> line + <> fillSep + [ flow "Stack failed to extract the loaded template" + , style Current (fromString $ T.unpack $ templateName name) + , "at" + , pretty path <> "." + ] + <> blankLine + <> flow "While extracting, Stack encountered the following error:" + <> blankLine + <> string err + pretty (TemplateInvalid name why) = + "[S-9490]" + <> line + <> fillSep + [ flow "Stack failed to use the template" + , style Current (fromString $ T.unpack $ templateName name) <> "," + , "as" + , why + ] + pretty (MagicPackageNameInvalid name) = + "[S-5682]" + <> line + <> fillSep + [ flow "Stack declined to create a new directory for project" + , style Current (fromString name) <> "," + , flow "as package" + , fromString name + , flow "is 'wired-in' to a version of GHC. That can cause build \ + \errors." + ] + <> blankLine + <> fillSep + ( flow "The names blocked by Stack are:" + : mkNarrativeList Nothing False + (map fromPackageName sortedWiredInPackages :: [StyleDoc]) + ) + where + sortedWiredInPackages = L.sort $ S.toList allWiredInPackages + pretty (AttemptedOverwrites name fps) = + "[S-3113]" + <> line + <> fillSep + [ flow "Stack declined to apply the template" + , style Current (fromString . T.unpack $ name) <> "," + , flow "as it would create files that already exist." + ] + <> blankLine + <> flow "The template would create the following existing files:" + <> line + <> bulletedList (map (style File . pretty) fps) + <> blankLine + <> fillSep + [ "Use the" + , style Shell "--force" + , "flag to ignore this and overwrite those files." + ] + +instance Exception NewPrettyException -------------------------------------------------------------------------------- -- Main project creation --- | Options for creating a new project. +-- | Type representing command line options for the @stack new@ command (other +-- than those applicable also to the @stack init@ command). data NewOpts = NewOpts - { newOptsProjectName :: PackageName + { projectName :: PackageName -- ^ Name of the project to create. - , newOptsCreateBare :: Bool + , createBare :: Bool -- ^ Whether to create the project without a directory. - , newOptsTemplate :: Maybe TemplateName + , init :: Bool + -- ^ Whether to initialise the project for use with Stack. + , template :: Maybe TemplateName -- ^ Name of the template to use. - , newOptsNonceParams :: Map Text Text + , nonceParams :: Map Text Text -- ^ Nonce parameters specified just for this invocation. - } + } + +-- | Function underlying the @stack new@ command. Create a project directory +-- structure and initialize the Stack config. +newCmd :: (NewOpts, InitOpts) -> RIO Runner () +newCmd (newOpts, initOpts) = + withGlobalProject $ withConfig YesReexec $ do + dir <- new newOpts initOpts.forceOverwrite + exists <- doesFileExist $ dir stackDotYaml + when (newOpts.init && (initOpts.forceOverwrite || not exists)) $ do + go <- view globalOptsL + initProject dir initOpts go.snapshot -- | Create a new project with the given options. new :: HasConfig env => NewOpts -> Bool -> RIO env (Path Abs Dir) new opts forceOverwrite = do - when (newOptsProjectName opts `elem` wiredInPackages) $ - throwM $ Can'tUseWiredInName (newOptsProjectName opts) - pwd <- getCurrentDir - absDir <- if bare then return pwd - else do relDir <- parseRelDir (packageNameString project) - liftM (pwd ) (return relDir) - exists <- doesDirExist absDir - configTemplate <- view $ configL.to configDefaultTemplate - let template = fromMaybe defaultTemplateName $ asum [ cliOptionTemplate - , configTemplate - ] - if exists && not bare - then throwM (AlreadyExists absDir) - else do - templateText <- loadTemplate template (logUsing absDir template) - files <- - applyTemplate - project - template - (newOptsNonceParams opts) - absDir - templateText - when (not forceOverwrite && bare) $ checkForOverwrite (M.keys files) - writeTemplateFiles files - runTemplateInits absDir - return absDir - where - cliOptionTemplate = newOptsTemplate opts - project = newOptsProjectName opts - bare = newOptsCreateBare opts - logUsing absDir template templateFrom = - let loading = case templateFrom of - LocalTemp -> "Loading local" - RemoteTemp -> "Downloading" - in - logInfo - (loading <> " template \"" <> display (templateName template) <> - "\" to create project \"" <> - fromString (packageNameString project) <> - "\" in " <> - if bare then "the current directory" - else fromString (toFilePath (dirname absDir)) <> - " ...") + when (project `elem` allWiredInPackages) $ + prettyThrowM $ MagicPackageNameInvalid projectName + pwd <- getCurrentDir + absDir <- if bare + then pure pwd + else do relDir <- parseRelDir (packageNameString project) + pure (pwd relDir) + exists <- doesDirExist absDir + configTemplate <- view $ configL . to (.defaultTemplate) + let template = fromMaybe defaultTemplateName $ asum [ cliOptionTemplate + , configTemplate + ] + if exists && not bare + then prettyThrowM $ ProjectDirAlreadyExists projectName absDir + else do + templateText <- loadTemplate template (logUsing absDir template) + files <- + applyTemplate + project + template + opts.nonceParams + absDir + templateText + when (not forceOverwrite && bare) $ + checkForOverwrite (templateName template) (M.keys files) + writeTemplateFiles files + runTemplateInits absDir + pure absDir + where + cliOptionTemplate = opts.template + project = opts.projectName + projectName = packageNameString project + bare = opts.createBare + logUsing absDir template templateFrom = + let loading = case templateFrom of + LocalTemp -> flow "Loading local" + RemoteTemp -> "Downloading" + in prettyInfo + ( fillSep + [ loading + , "template" + , style + Current + (fromString $ T.unpack $ templateName template) + , flow "to create project" + , style Current (fromString projectName) + , "in" + , ( if bare + then flow "the current directory" + else fillSep + [ "directory" + , pretty $ dirname absDir + ] + ) + <> "..." + ] + ) data TemplateFrom = LocalTemp | RemoteTemp -- | Download and read in a template's text content. -loadTemplate - :: forall env. HasConfig env - => TemplateName - -> (TemplateFrom -> RIO env ()) - -> RIO env Text +loadTemplate :: + forall env. HasConfig env + => TemplateName + -> (TemplateFrom -> RIO env ()) + -> RIO env Text loadTemplate name logIt = do - templateDir <- view $ configL.to templatesDir - case templatePath name of - AbsPath absFile -> logIt LocalTemp >> loadLocalFile absFile eitherByteStringToText - UrlPath s -> do - let settings = asIsFromUrl s - downloadFromUrl settings templateDir - RelPath rawParam relFile -> - catch - (do f <- loadLocalFile relFile eitherByteStringToText - logIt LocalTemp - return f) - (\(e :: NewException) -> do - case relSettings rawParam of - Just settings -> do - req <- parseRequest (tplDownloadUrl settings) - let extract = tplExtract settings - downloadTemplate req extract (templateDir relFile) - Nothing -> throwM e - ) - RepoPath rtp -> do - let settings = settingsFromRepoTemplatePath rtp - downloadFromUrl settings templateDir - - where - loadLocalFile :: Path b File -> (ByteString -> Either String Text) -> RIO env Text - loadLocalFile path extract = do - logDebug ("Opening local template: \"" <> fromString (toFilePath path) - <> "\"") - exists <- doesFileExist path - if exists - then do - bs <- readFileBinary (toFilePath path) --readFileUtf8 (toFilePath path) - case extract bs of - Left err -> do - logWarn $ "Template extraction error: " <> display (T.pack err) - throwM (FailedToLoadTemplate name (toFilePath path)) - Right template -> - pure template - else throwM (FailedToLoadTemplate name (toFilePath path)) - relSettings :: String -> Maybe TemplateDownloadSettings - relSettings req = do - rtp <- parseRepoPathWithService defaultRepoService (T.pack req) - pure (settingsFromRepoTemplatePath rtp) - downloadFromUrl :: TemplateDownloadSettings -> Path Abs Dir -> RIO env Text - downloadFromUrl settings templateDir = do - let url = tplDownloadUrl settings - req <- parseRequest url - let rel = fromMaybe backupUrlRelPath (parseRelFile url) - downloadTemplate req (tplExtract settings) (templateDir rel) - downloadTemplate :: Request -> (ByteString -> Either String Text) -> Path Abs File -> RIO env Text - downloadTemplate req extract path = do - let dReq = setForceDownload True $ mkDownloadRequest (setRequestCheckStatus req) - logIt RemoteTemp - catch - (void $ do - verifiedDownloadWithProgress dReq path (T.pack $ toFilePath path) Nothing + templateDir <- view $ configL . to templatesDir + case templatePath name of + AbsPath absFile -> + logIt LocalTemp >> loadLocalFile absFile eitherByteStringToText + UrlPath s -> do + let settings = asIsFromUrl s + downloadFromUrl settings templateDir + RelPath rawParam relFile -> + catch + (do f <- loadLocalFile relFile eitherByteStringToText + logIt LocalTemp + pure f) + ( \(e :: PrettyException) -> do + settings <- fromMaybe (throwM e) (relSettings rawParam) + let url = settings.downloadUrl + mBasicAuth = settings.basicAuth + extract = settings.extract + downloadTemplate url mBasicAuth extract (templateDir relFile) + ) + RepoPath rtp -> do + settings <- settingsFromRepoTemplatePath rtp + downloadFromUrl settings templateDir + + where + loadLocalFile :: + Path b File + -> (ByteString -> Either String Text) + -> RIO env Text + loadLocalFile path extract = do + logDebug $ + "Opening local template: \"" + <> fromString (toFilePath path) + <> "\"" + exists <- doesFileExist path + if exists + then do + bs <- readFileBinary (toFilePath path) --readFileUtf8 (toFilePath path) + case extract bs of + Left err -> prettyThrowM $ ExtractTemplateFailed name path err + Right template -> + pure template + else prettyThrowM $ LoadTemplateFailed name path + + relSettings :: String -> Maybe (RIO env TemplateDownloadSettings) + relSettings req = do + rtp <- parseRepoPathWithService defaultRepoService (T.pack req) + pure (settingsFromRepoTemplatePath rtp) + + downloadFromUrl :: TemplateDownloadSettings -> Path Abs Dir -> RIO env Text + downloadFromUrl settings templateDir = do + let url = settings.downloadUrl + mBasicAuth = settings.basicAuth + rel = fromMaybe backupUrlRelPath (parseRelFile url) + downloadTemplate url mBasicAuth settings.extract (templateDir rel) + + downloadTemplate :: + String + -> Maybe (ByteString, ByteString) + -- ^ Optional HTTP \'Basic\' authentication (type, credentials) + -> (ByteString -> Either String Text) + -> Path Abs File + -> RIO env Text + downloadTemplate url mBasicAuth extract path = do + req <- parseRequest url + let authReq = maybe id (uncurry applyBasicAuth) mBasicAuth req + dReq = setForceDownload True $ + mkDownloadRequest (setRequestCheckStatus authReq) + logIt RemoteTemp + catch + ( do let label = T.pack $ toFilePath path + res <- verifiedDownloadWithProgress dReq path label Nothing + if res + then logStickyDone ("Downloaded " <> display label <> ".") + else logStickyDone "Already downloaded." + ) + (useCachedVersionOrThrow url path) + loadLocalFile path extract + + useCachedVersionOrThrow :: + String + -> Path Abs File + -> VerifiedDownloadException + -> RIO env () + useCachedVersionOrThrow url path exception = do + exists <- doesFileExist path + + if exists + then + prettyWarn + ( flow "Tried to download the template but an error was \ + \found. Using cached local version. It may not be the \ + \most recent version though." ) - (useCachedVersionOrThrow path) - - loadLocalFile path extract - useCachedVersionOrThrow :: Path Abs File -> VerifiedDownloadException -> RIO env () - useCachedVersionOrThrow path exception = do - exists <- doesFileExist path - - if exists - then do logWarn "Tried to download the template but an error was found." - logWarn "Using cached local version. It may not be the most recent version though." - else throwM (FailedToDownloadTemplate name exception) + else + prettyThrowM $ DownloadTemplateFailed (templateName name) url exception +-- | Type representing settings for the download of Stack project templates. data TemplateDownloadSettings = TemplateDownloadSettings - { tplDownloadUrl :: String - , tplExtract :: ByteString -> Either String Text + { downloadUrl :: String + , basicAuth :: Maybe (ByteString, ByteString) + -- ^ Optional HTTP \'Basic\' authentication (type, credentials) + , extract :: ByteString -> Either String Text } eitherByteStringToText :: ByteString -> Either String Text @@ -196,235 +424,271 @@ eitherByteStringToText = mapLeft show . decodeUtf8' asIsFromUrl :: String -> TemplateDownloadSettings asIsFromUrl url = TemplateDownloadSettings - { tplDownloadUrl = url - , tplExtract = eitherByteStringToText + { downloadUrl = url + , basicAuth = Nothing + , extract = eitherByteStringToText } --- | Construct a URL for downloading from a repo. -settingsFromRepoTemplatePath :: RepoTemplatePath -> TemplateDownloadSettings -settingsFromRepoTemplatePath (RepoTemplatePath Github user name) = - -- T.concat ["https://raw.githubusercontent.com", "/", user, "/stack-templates/master/", name] - TemplateDownloadSettings - { tplDownloadUrl = concat ["https://api.github.com/repos/", T.unpack user, "/stack-templates/contents/", T.unpack name] - , tplExtract = \bs -> do - decodedJson <- eitherDecode (LB.fromStrict bs) - case decodedJson of - Object o | Just (String content) <- HM.lookup "content" o -> do - let noNewlines = T.filter (/= '\n') - bsContent <- B64.decode $ T.encodeUtf8 (noNewlines content) - mapLeft show $ decodeUtf8' bsContent +-- | Construct settings for downloading a Stack project template from a +-- repository. +settingsFromRepoTemplatePath :: + HasTerm env + => RepoTemplatePath + -> RIO env TemplateDownloadSettings +settingsFromRepoTemplatePath (RepoTemplatePath GitHub user name) = do + let basicAuthMsg token = prettyInfoL + [ flow "Using content of" + , fromString token + , flow " environment variable to authenticate GitHub REST API." + ] + mBasicAuth <- do + wantGitHubToken <- liftIO $ fromMaybe "" <$> lookupEnv gitHubTokenEnvVar + if not (L.null wantGitHubToken) + then do + basicAuthMsg gitHubTokenEnvVar + pure $ Just (gitHubBasicAuthType, fromString wantGitHubToken) + else do + wantAltGitHubToken <- + liftIO $ fromMaybe "" <$> lookupEnv altGitHubTokenEnvVar + if not (L.null wantAltGitHubToken) + then do + basicAuthMsg altGitHubTokenEnvVar + pure $ Just (gitHubBasicAuthType, fromString wantAltGitHubToken) + else pure Nothing + pure TemplateDownloadSettings + { downloadUrl = concat + [ "https://api.github.com/repos/" + , T.unpack user + , "/stack-templates/contents/" + , T.unpack name + ] + , basicAuth = mBasicAuth + , extract = \bs -> + eitherDecode (LB.fromStrict bs) >>= \case + Object o | Just (String content) <- KeyMap.lookup "content" o -> do + let noNewlines = T.filter (/= '\n') + bsContent <- B64.decode $ T.encodeUtf8 (noNewlines content) + mapLeft show $ decodeUtf8' bsContent _ -> - fail "Couldn't parse GitHub response as a JSON object with a \"content\" field" + Left "Couldn't parse GitHub response as a JSON object with a \ + \\"content\" field" } - -settingsFromRepoTemplatePath (RepoTemplatePath Gitlab user name) = - asIsFromUrl $ concat ["https://gitlab.com", "/", T.unpack user, "/stack-templates/raw/master/", T.unpack name] -settingsFromRepoTemplatePath (RepoTemplatePath Bitbucket user name) = - asIsFromUrl $ concat ["https://bitbucket.org", "/", T.unpack user, "/stack-templates/raw/master/", T.unpack name] +settingsFromRepoTemplatePath (RepoTemplatePath GitLab user name) = pure $ + asIsFromUrl $ concat + [ "https://gitlab.com" + , "/" + , T.unpack user + , "/stack-templates/raw/master/" + , T.unpack name + ] +settingsFromRepoTemplatePath (RepoTemplatePath Bitbucket user name) = pure $ + asIsFromUrl $ concat + [ "https://bitbucket.org" + , "/" + , T.unpack user + , "/stack-templates/raw/master/" + , T.unpack name + ] +settingsFromRepoTemplatePath (RepoTemplatePath Codeberg user name) = pure $ + asIsFromUrl $ concat + [ "https://codeberg.org" + , "/" + , T.unpack user + , "/stack-templates/raw/" + , T.unpack name + ] -- | Apply and unpack a template into a directory. -applyTemplate - :: HasConfig env - => PackageName - -> TemplateName - -> Map Text Text - -> Path Abs Dir - -> Text - -> RIO env (Map (Path Abs File) LB.ByteString) +applyTemplate :: + HasConfig env + => PackageName + -> TemplateName + -> Map Text Text + -> Path Abs Dir + -> Text + -> RIO env (Map (Path Abs File) LB.ByteString) applyTemplate project template nonceParams dir templateText = do - config <- view configL - currentYear <- do - now <- liftIO getCurrentTime - let (year, _, _) = toGregorian (utctDay now) - return $ T.pack . show $ year - let context = M.unions [nonceParams, nameParams, configParams, yearParam] - where - nameAsVarId = T.replace "-" "_" $ T.pack $ packageNameString project - nameAsModule = T.filter (/= ' ') $ T.toTitle $ T.replace "-" " " $ T.pack $ packageNameString project - nameParams = M.fromList [ ("name", T.pack $ packageNameString project) - , ("name-as-varid", nameAsVarId) - , ("name-as-module", nameAsModule) ] - configParams = configTemplateParams config - yearParam = M.singleton "year" currentYear - files :: Map FilePath LB.ByteString <- - catch (execWriterT $ runConduit $ - yield (T.encodeUtf8 templateText) .| - unpackTemplate receiveMem id - ) - (\(e :: ProjectTemplateException) -> - throwM (InvalidTemplate template (show e))) - when (M.null files) $ - throwM (InvalidTemplate template "Template does not contain any files") - - let isPkgSpec f = ".cabal" `isSuffixOf` f || f == "package.yaml" - unless (any isPkgSpec . M.keys $ files) $ - throwM (InvalidTemplate template "Template does not contain a .cabal \ - \or package.yaml file") - - -- Apply Mustache templating to a single file within the project - -- template. - let applyMustache bytes - -- Workaround for performance problems with mustache and - -- large files, applies to Yesod templates with large - -- bootstrap CSS files. See - -- https://github.com/commercialhaskell/stack/issues/4133. - | LB.length bytes < 50000 - , Right text <- TLE.decodeUtf8' bytes = do - let etemplateCompiled = Mustache.compileTemplate (T.unpack (templateName template)) $ TL.toStrict text - templateCompiled <- case etemplateCompiled of - Left e -> throwM $ InvalidTemplate template (show e) - Right t -> return t - let (substitutionErrors, applied) = Mustache.checkedSubstitute templateCompiled context - missingKeys = S.fromList $ concatMap onlyMissingKeys substitutionErrors - unless (S.null missingKeys) - (logInfo ("\n" <> displayShow (MissingParameters project template missingKeys (configUserConfigPath config)) <> "\n")) - pure $ LB.fromStrict $ encodeUtf8 applied - - -- Too large or too binary - | otherwise = pure bytes - - liftM - M.fromList - (mapM - (\(fpOrig,bytes) -> - do -- Apply the mustache template to the filenames - -- as well, so that we can have file names - -- depend on the project name. - fp <- applyMustache $ TLE.encodeUtf8 $ TL.pack fpOrig - path <- parseRelFile $ TL.unpack $ TLE.decodeUtf8 fp - bytes' <- applyMustache bytes - return (dir path, bytes')) - (M.toList files)) - where - onlyMissingKeys (Mustache.VariableNotFound ks) = map T.unpack ks - onlyMissingKeys _ = [] + config <- view configL + currentYear <- do + now <- liftIO getCurrentTime + let (year, _, _) = toGregorian (utctDay now) + pure $ T.pack . show $ year + let context = M.unions [nonceParams, nameParams, configParams, yearParam] + where + nameAsVarId = T.replace "-" "_" $ T.pack $ packageNameString project + nameAsModule = T.filter (/= ' ') $ T.toTitle $ T.replace "-" " " $ + T.pack $ packageNameString project + nameParams = M.fromList [ ("name", T.pack $ packageNameString project) + , ("name-as-varid", nameAsVarId) + , ("name-as-module", nameAsModule) ] + configParams = config.templateParams + yearParam = M.singleton "year" currentYear + files :: Map FilePath LB.ByteString <- + catch + ( execWriterT $ runConduit $ + yield (T.encodeUtf8 templateText) .| + unpackTemplate receiveMem id + ) + ( \(e :: ProjectTemplateException) -> + prettyThrowM $ TemplateInvalid template (string $ displayException e) + ) + when (M.null files) $ + prettyThrowM $ TemplateInvalid + template + (flow "the template does not contain any files.") + + let isPkgSpec f = ".cabal" `L.isSuffixOf` f || "package.yaml" `L.isSuffixOf` f + unless (any isPkgSpec . M.keys $ files) $ + prettyThrowM $ TemplateInvalid + template + (flow "the template does not contain a Cabal or package.yaml file.") + + -- Apply Mustache templating to a single file within the project template. + let applyMustache bytes + -- Workaround for performance problems with mustache and + -- large files, applies to Yesod templates with large + -- bootstrap CSS files. See + -- https://github.com/commercialhaskell/stack/issues/4133. + | LB.length bytes < 50000 + , Right text <- TLE.decodeUtf8' bytes = do + let etemplateCompiled = + Mustache.compileTemplate (T.unpack (templateName template)) $ TL.toStrict text + templateCompiled <- case etemplateCompiled of + Left e -> prettyThrowM $ TemplateInvalid + template + ( flow "Stack encountered the following error:" + <> blankLine + -- Text.Parsec.Error.ParseError is not an instance + -- of Control.Exception. + <> string (show e) + ) + Right t -> pure t + let (substitutionErrors, applied) = + Mustache.checkedSubstitute templateCompiled context + missingKeys = + S.fromList $ concatMap onlyMissingKeys substitutionErrors + pure (LB.fromStrict $ encodeUtf8 applied, missingKeys) + + -- Too large or too binary + | otherwise = pure (bytes, S.empty) + + -- Accumulate any missing keys as the file is processed + processFile mks (fpOrig, bytes) = do + -- Apply the mustache template to the filenames as well, so that we + -- can have file names depend on the project name. + (fp, mks1) <- applyMustache $ TLE.encodeUtf8 $ TL.pack fpOrig + path <- parseRelFile $ TL.unpack $ TLE.decodeUtf8 fp + (bytes', mks2) <- applyMustache bytes + pure (mks <> mks1 <> mks2, (dir path, bytes')) + + (missingKeys, results) <- mapAccumLM processFile S.empty (M.toList files) + unless (S.null missingKeys) $ + prettyNote $ + missingParameters + missingKeys + config.userGlobalConfigFile + pure $ M.fromList results + where + onlyMissingKeys (Mustache.VariableNotFound ks) = map T.unpack ks + onlyMissingKeys _ = [] + + mapAccumLM :: Monad m => (a -> b -> m(a, c)) -> a -> [b] -> m(a, [c]) + mapAccumLM _ a [] = pure (a, []) + mapAccumLM f a (x:xs) = do + (a', c) <- f a x + (a'', cs) <- mapAccumLM f a' xs + pure (a'', c:cs) + + missingParameters :: Set String -> Path Abs File -> StyleDoc + missingParameters missingKeys userConfigPath = + fillSep + ( flow "The following parameters were needed by the template but \ + \not provided:" + : mkNarrativeList Nothing False + (map toStyleDoc (S.toList missingKeys)) + ) + <> blankLine + <> fillSep + [ flow "You can provide them in Stack's global configuration file" + , "(" <> pretty userConfigPath <> ")" + , "like this:" + ] + <> blankLine + <> "templates:" + <> line + <> " params:" + <> line + <> vsep + ( map + (\key -> " " <> fromString key <> ": value") + (S.toList missingKeys) + ) + <> blankLine + <> flow "Or you can pass each one on the command line as parameters \ + \like this:" + <> blankLine + <> style Shell + ( fillSep + [ flow "stack new" + , fromPackageName project + , fromString $ T.unpack (templateName template) + , hsep $ + map + ( \key -> + fillSep [ "-p" + , "\"" <> fromString key <> ":value\"" + ] + ) + (S.toList missingKeys) + ] + ) + <> line + where + toStyleDoc :: String -> StyleDoc + toStyleDoc = fromString -- | Check if we're going to overwrite any existing files. -checkForOverwrite :: (MonadIO m, MonadThrow m) => [Path Abs File] -> m () -checkForOverwrite files = do - overwrites <- filterM doesFileExist files - unless (null overwrites) $ throwM (AttemptedOverwrites overwrites) +checkForOverwrite :: + (MonadIO m, MonadThrow m) + => Text + -> [Path Abs File] + -> m () +checkForOverwrite name files = do + overwrites <- filterM doesFileExist files + unless (null overwrites) $ + prettyThrowM $ AttemptedOverwrites name overwrites -- | Write files to the new project directory. -writeTemplateFiles - :: MonadIO m - => Map (Path Abs File) LB.ByteString -> m () +writeTemplateFiles :: + MonadIO m + => Map (Path Abs File) LB.ByteString + -> m () writeTemplateFiles files = - liftIO $ - forM_ - (M.toList files) - (\(fp,bytes) -> - do ensureDir (parent fp) - writeBinaryFileAtomic fp $ lazyByteString bytes) + liftIO $ + forM_ + (M.toList files) + (\(fp,bytes) -> + do ensureDir (parent fp) + writeBinaryFileAtomic fp $ lazyByteString bytes) -- | Run any initialization functions, such as Git. -runTemplateInits - :: HasConfig env - => Path Abs Dir - -> RIO env () +runTemplateInits :: HasConfig env => Path Abs Dir -> RIO env () runTemplateInits dir = do - config <- view configL - case configScmInit config of - Nothing -> return () - Just Git -> - withWorkingDir (toFilePath dir) $ - catchAny (proc "git" ["init"] runProcess_) - (\_ -> logInfo "git init failed to run, ignoring ...") - --- | Display help for the templates command. -templatesHelp :: HasLogFunc env => RIO env () -templatesHelp = do - let url = defaultTemplatesHelpUrl - req <- liftM setGithubHeaders (parseUrlThrow url) - resp <- httpLbs req `catch` (throwM . FailedToDownloadTemplatesHelp) - case decodeUtf8' $ LB.toStrict $ getResponseBody resp of - Left err -> throwM $ BadTemplatesHelpEncoding url err - Right txt -> logInfo $ display txt + config <- view configL + whenJust config.scmInit $ \Git -> + withWorkingDir (toFilePath dir) $ + catchAny + (proc "git" ["init"] runProcess_) + ( \_ -> prettyWarnL + [ flow "Stack failed to run a" + , style Shell (flow "git init") + , flow "command. Ignoring..." + ] + ) -------------------------------------------------------------------------------- -- Defaults -- | The default service to use to download templates. defaultRepoService :: RepoService -defaultRepoService = Github - --- | Default web URL to get the `stack templates` help output. -defaultTemplatesHelpUrl :: String -defaultTemplatesHelpUrl = - "https://raw.githubusercontent.com/commercialhaskell/stack-templates/master/STACK_HELP.md" - --------------------------------------------------------------------------------- --- Exceptions - --- | Exception that might occur when making a new project. -data NewException - = FailedToLoadTemplate !TemplateName - !FilePath - | FailedToDownloadTemplate !TemplateName - !VerifiedDownloadException - | AlreadyExists !(Path Abs Dir) - | MissingParameters !PackageName !TemplateName !(Set String) !(Path Abs File) - | InvalidTemplate !TemplateName !String - | AttemptedOverwrites [Path Abs File] - | FailedToDownloadTemplatesHelp !HttpException - | BadTemplatesHelpEncoding - !String -- URL it's downloaded from - !UnicodeException - | Can'tUseWiredInName !PackageName - deriving (Typeable) - -instance Exception NewException - -instance Show NewException where - show (FailedToLoadTemplate name path) = - "Failed to load download template " <> T.unpack (templateName name) <> - " from " <> - path - show (FailedToDownloadTemplate name (DownloadHttpError httpError)) = - "There was an unexpected HTTP error while downloading template " <> - T.unpack (templateName name) <> ": " <> show httpError - show (FailedToDownloadTemplate name _) = - "Failed to download template " <> T.unpack (templateName name) <> - ": unknown reason" - - show (AlreadyExists path) = - "Directory " <> toFilePath path <> " already exists. Aborting." - show (MissingParameters name template missingKeys userConfigPath) = - intercalate - "\n" - [ "The following parameters were needed by the template but not provided: " <> - intercalate ", " (S.toList missingKeys) - , "You can provide them in " <> - toFilePath userConfigPath <> - ", like this:" - , "templates:" - , " params:" - , intercalate - "\n" - (map - (\key -> - " " <> key <> ": value") - (S.toList missingKeys)) - , "Or you can pass each one as parameters like this:" - , "stack new " <> packageNameString name <> " " <> - T.unpack (templateName template) <> - " " <> - unwords - (map - (\key -> - "-p \"" <> key <> ":value\"") - (S.toList missingKeys))] - show (InvalidTemplate name why) = - "The template \"" <> T.unpack (templateName name) <> - "\" is invalid and could not be used. " <> - "The error was: " <> why - show (AttemptedOverwrites fps) = - "The template would create the following files, but they already exist:\n" <> - unlines (map ((" " ++) . toFilePath) fps) <> - "Use --force to ignore this, and overwite these files." - show (FailedToDownloadTemplatesHelp ex) = - "Failed to download `stack templates` help. The HTTP error was: " <> show ex - show (BadTemplatesHelpEncoding url err) = - "UTF-8 decoding error on template info from\n " <> url <> "\n\n" <> show err - show (Can'tUseWiredInName name) = - "The name \"" <> packageNameString name <> "\" is used by GHC wired-in packages, and so shouldn't be used as a package name" +defaultRepoService = GitHub diff --git a/src/Stack/Nix.hs b/src/Stack/Nix.hs index 81e383d7c1..666280052e 100644 --- a/src/Stack/Nix.hs +++ b/src/Stack/Nix.hs @@ -1,135 +1,176 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Nix +Description : Run commands in a nix-shell. +License : BSD-3-Clause + +Run commands in a nix-shell. +-} --- | Run commands in a nix-shell module Stack.Nix - (nixCmdName - ,nixHelpOptName - ,runShellAndExit + ( nixCmdName + , nixHelpOptName + , runShellAndExit ) where -import Stack.Prelude import qualified Data.Text as T -import Data.Version (showVersion) -import Path.IO -import qualified Paths_stack as Meta -import Stack.Config (getInContainer, withBuildConfig) -import Stack.Config.Nix (nixCompiler, nixCompilerVersion) -import Stack.Constants (platformVariantEnvVar,inNixShellEnvVar,inContainerEnvVar) +import Path.IO ( resolveFile ) +import RIO.Process ( exec, processContextL ) +import Stack.Config ( getInContainer, withBuildConfig ) +import Stack.Config.Nix ( nixCompiler, nixCompilerVersion ) +import Stack.Constants + ( inContainerEnvVar, inNixShellEnvVar + , platformVariantEnvVar + ) +import Stack.Prelude +import Stack.Types.BuildConfig ( wantedCompilerVersionL ) import Stack.Types.Config -import Stack.Types.Docker -import Stack.Types.Nix -import System.Environment (getArgs,getExecutablePath,lookupEnv) -import qualified System.FilePath as F -import RIO.Process (processContextL, exec) + ( Config (..), HasConfig (..), configProjectRoot ) +import Stack.Types.Docker ( reExecArgName ) +import Stack.Types.Runner ( viewExecutablePath ) +import Stack.Types.Nix ( NixOpts (..) ) +import Stack.Types.Version ( showStackVersion ) +import System.Environment ( getArgs, lookupEnv ) +import qualified System.FilePath as F + +-- | Type representing \'pretty\' exceptions thrown by functions exported by +-- the "Stack.Nix" module. +data NixPrettyException + = CannotDetermineProjectRoot + -- ^ Can't determine the project root (location of the shell file if any). + deriving Show + +instance Pretty NixPrettyException where + pretty CannotDetermineProjectRoot = + "[S-7384]" + <> line + <> flow "Cannot determine project root directory." +instance Exception NixPrettyException + +-- | Execute @nix-shell@, replacing the current process. runShellAndExit :: RIO Config void runShellAndExit = do - inContainer <- getInContainer -- TODO we can probably assert that this is False based on Stack.Runners now - origArgs <- liftIO getArgs - let args | inContainer = origArgs -- internal-re-exec version already passed - -- first stack when restarting in the container - | otherwise = - ("--" ++ reExecArgName ++ "=" ++ showVersion Meta.version) : origArgs - exePath <- liftIO getExecutablePath - config <- view configL - envOverride <- view processContextL - local (set processContextL envOverride) $ do - let cmnd = escape exePath - args' = map escape args + inContainer <- getInContainer -- TODO we can probably assert that this is False based on Stack.Runners now + origArgs <- liftIO getArgs + let args | inContainer = origArgs -- internal-re-exec version already passed + -- first stack when restarting in the container + | otherwise = + ("--" ++ reExecArgName ++ "=" ++ showStackVersion) : origArgs + exePath <- toFilePath <$> viewExecutablePath + config <- view configL + envOverride <- view processContextL + local (set processContextL envOverride) $ do + let cmnd = escape exePath + args' = map escape args + + mshellFile <- case configProjectRoot config of + Just projectRoot -> + traverse (resolveFile projectRoot) config.nix.initFile + Nothing -> pure Nothing - mshellFile <- case configProjectRoot config of - Just projectRoot -> - traverse (resolveFile projectRoot) $ nixInitFile (configNix config) - Nothing -> pure Nothing + -- This will never result in double loading the build config, since: + -- + -- 1. This function explicitly takes a Config, not a HasConfig + -- + -- 2. This function ends up exiting before running other code + -- (thus the void return type) + compilerVersion <- withBuildConfig $ view wantedCompilerVersionL - -- This will never result in double loading the build config, since: - -- - -- 1. This function explicitly takes a Config, not a HasConfig - -- - -- 2. This function ends up exiting before running other code - -- (thus the void return type) - compilerVersion <- withBuildConfig $ view wantedCompilerVersionL + ghc <- either prettyThrowIO pure $ nixCompiler compilerVersion + ghcVersion <- either prettyThrowIO pure $ nixCompilerVersion compilerVersion + let pkgsInConfig = config.nix.packages + -- It appears that cacert needs to be specified in order for + -- crypton-x509-system >= 1.6.8 to work with Stack's Nix integration: + pkgs = pkgsInConfig ++ [ghc, "git", "gcc", "gmp", "cacert"] + pkgsStr = "[" <> T.intercalate " " pkgs <> "]" + pureShell = config.nix.pureShell + addGCRoots = config.nix.addGCRoots + nixopts = case mshellFile of + Just fp -> + [ toFilePath fp + , "--arg" + , "ghc" + , "with (import {}); " ++ T.unpack ghc + , "--argstr", "ghcVersion", T.unpack ghcVersion + ] + Nothing -> + [ "-E" + , T.unpack $ T.concat + [ "with (import {}); " + , "let inputs = ",pkgsStr,"; " + , "libPath = lib.makeLibraryPath inputs; " + , "stackExtraArgs = lib.concatMap (pkg: " + , "[ ''--extra-lib-dirs=${lib.getLib pkg}/lib'' " + , " ''--extra-include-dirs=${lib.getDev pkg}/include'' ]" + , ") inputs; in " + , "runCommand ''myEnv'' { " + , "buildInputs = lib.optional stdenv.isLinux glibcLocales ++ inputs; " + -- glibcLocales is necessary on Linux to avoid warnings about + -- GHC being incapable to set the locale. + , T.pack platformVariantEnvVar <> "=''nix''; " + , T.pack inNixShellEnvVar <> "=1; " + , if inContainer + -- If shell is pure, this env var would not + -- be seen by stack inside nix + then T.pack inContainerEnvVar <> "=1; " + else "" + , "LD_LIBRARY_PATH = libPath;" + -- LD_LIBRARY_PATH is set because for now it's needed by + -- builds using Template Haskell + , "STACK_IN_NIX_EXTRA_ARGS = stackExtraArgs; " + -- overriding default locale so Unicode output using base + -- won't be broken + , "LANG=\"en_US.UTF-8\";" + , "} \"\"" + ] + ] - ghc <- either throwIO return $ nixCompiler compilerVersion - ghcVersion <- either throwIO return $ nixCompilerVersion compilerVersion - let pkgsInConfig = nixPackages (configNix config) - pkgs = pkgsInConfig ++ [ghc, "git", "gcc", "gmp"] - pkgsStr = "[" <> T.intercalate " " pkgs <> "]" - pureShell = nixPureShell (configNix config) - addGCRoots = nixAddGCRoots (configNix config) - nixopts = case mshellFile of - Just fp -> [toFilePath fp - ,"--arg", "ghc", "with (import {}); " ++ T.unpack ghc - ,"--argstr", "ghcVersion", T.unpack ghcVersion] - Nothing -> ["-E", T.unpack $ T.concat - ["with (import {}); " - ,"let inputs = ",pkgsStr,"; " - , "libPath = lib.makeLibraryPath inputs; " - , "stackExtraArgs = lib.concatMap (pkg: " - , "[ ''--extra-lib-dirs=${lib.getLib pkg}/lib'' " - , " ''--extra-include-dirs=${lib.getDev pkg}/include'' ]" - , ") inputs; in " - ,"runCommand ''myEnv'' { " - ,"buildInputs = lib.optional stdenv.isLinux glibcLocales ++ inputs; " - ,T.pack platformVariantEnvVar <> "=''nix''; " - ,T.pack inNixShellEnvVar <> "=1; " - ,if inContainer - -- If shell is pure, this env var would not - -- be seen by stack inside nix - then T.pack inContainerEnvVar <> "=1; " - else "" - ,"LD_LIBRARY_PATH = libPath;" -- LD_LIBRARY_PATH is set because for now it's - -- needed by builds using Template Haskell - ,"STACK_IN_NIX_EXTRA_ARGS = stackExtraArgs; " - -- overriding default locale so Unicode output using base won't be broken - ,"LANG=\"en_US.UTF-8\";" - ,"} \"\""]] - -- glibcLocales is necessary on Linux to avoid warnings about GHC being incapable to set the locale. - fullArgs = concat [if pureShell then ["--pure"] else [] - ,if addGCRoots then ["--indirect", "--add-root" - ,toFilePath (configWorkDir config) - F. "nix-gc-symlinks" F. "gc-root"] else [] - ,map T.unpack (nixShellOptions (configNix config)) - ,nixopts - ,["--run", unwords (cmnd:"$STACK_IN_NIX_EXTRA_ARGS":args')] - ] - -- Using --run instead of --command so we cannot - -- end up in the nix-shell if stack build is Ctrl-C'd - pathVar <- liftIO $ lookupEnv "PATH" - logDebug $ "PATH is: " <> displayShow pathVar - logDebug $ - "Using a nix-shell environment " <> (case mshellFile of - Just path -> "from file: " <> fromString (toFilePath path) - Nothing -> "with nix packages: " <> display (T.intercalate ", " pkgs)) - exec "nix-shell" fullArgs + fullArgs = concat + [ [ "--pure" | pureShell ] + , if addGCRoots + then [ "--indirect" + , "--add-root" + , toFilePath + config.workDir + F. "nix-gc-symlinks" + F. "gc-root" + ] + else [] + , map T.unpack config.nix.shellOptions + , nixopts + , ["--run", unwords (cmnd:"$STACK_IN_NIX_EXTRA_ARGS":args')] + -- Using --run instead of --command so we cannot end up in the + -- nix-shell if stack build is Ctrl-C'd + ] + pathVar <- liftIO $ lookupEnv "PATH" + logDebug $ "PATH is: " <> displayShow pathVar + logDebug $ + "Using a nix-shell environment " + <> ( case mshellFile of + Just path -> + "from file: " + <> fromString (toFilePath path) + Nothing -> + "with nix packages: " + <> display (T.intercalate ", " pkgs) + ) + exec "nix-shell" fullArgs -- | Shell-escape quotes inside the string and enclose it in quotes. escape :: String -> String -escape str = "'" ++ foldr (\c -> if c == '\'' then - ("'\"'\"'"++) - else (c:)) "" str - ++ "'" +escape str = + "'" + ++ foldr (\c -> if c == '\'' then ("'\"'\"'"++) else (c:)) "" str + ++ "'" --- | Command-line argument for "nix" +-- | Command-line argument for "nix". nixCmdName :: String nixCmdName = "nix" +-- | Command-line option to show only @--nix-*@ options. nixHelpOptName :: String nixHelpOptName = nixCmdName ++ "-help" - --- | Exceptions thrown by "Stack.Nix". -data StackNixException - = CannotDetermineProjectRoot - -- ^ Can't determine the project root (location of the shell file if any). - deriving (Typeable) - -instance Exception StackNixException - -instance Show StackNixException where - show CannotDetermineProjectRoot = - "Cannot determine project root directory." diff --git a/src/Stack/Options/BenchParser.hs b/src/Stack/Options/BenchParser.hs index d5b751c4a2..81629b5193 100644 --- a/src/Stack/Options/BenchParser.hs +++ b/src/Stack/Options/BenchParser.hs @@ -1,25 +1,42 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -module Stack.Options.BenchParser where +{-| +Module : Stack.Options.BenchParser +Description : Parser for benchmark arguments. +License : BSD-3-Clause + +Parser for benchmark arguments. +-} + +module Stack.Options.BenchParser + ( benchOptsParser + ) where import Options.Applicative + ( Mod, Parser, help, long, metavar, strOption ) import Options.Applicative.Builder.Extra + ( firstBoolFlagsTrue, optionalFirst ) import Stack.Prelude -import Stack.Options.Utils -import Stack.Types.Config +import Stack.Options.Utils ( hideMods ) +import Stack.Types.BuildOptsMonoid ( BenchmarkOptsMonoid (..) ) -- | Parser for bench arguments. -- FIXME hiding options benchOptsParser :: Bool -> Parser BenchmarkOptsMonoid benchOptsParser hide0 = BenchmarkOptsMonoid - <$> optionalFirst (strOption (long "benchmark-arguments" <> - long "ba" <> - metavar "BENCH_ARGS" <> - help ("Forward BENCH_ARGS to the benchmark suite. " <> - "Supports templates from `cabal bench`") <> - hide)) - <*> optionalFirst (flag' True (long "no-run-benchmarks" <> - help "Disable running of benchmarks. (Benchmarks will still be built.)" <> - hide)) - where hide = hideMods hide0 + <$> optionalFirst (strOption + ( long "benchmark-arguments" + <> long "ba" + <> metavar "BENCH_ARGS" + <> help "Arguments passed to the benchmarks. Supports path variables \ + \provided by the Cabal build system." + <> hide + )) + <*> firstBoolFlagsTrue + "run-benchmarks" + "running of targeted benchmarks." + hide + where + hide :: Mod f a + hide = hideMods hide0 diff --git a/src/Stack/Options/BuildMonoidParser.hs b/src/Stack/Options/BuildMonoidParser.hs index e4c4ef4cd1..7de4eeaa78 100644 --- a/src/Stack/Options/BuildMonoidParser.hs +++ b/src/Stack/Options/BuildMonoidParser.hs @@ -1,184 +1,277 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.BuildMonoidParser where -import qualified Data.Text as T +{-| +Module : Stack.Options.BuildMonoidParser +Description : Parse arguments for Stack's build configuration. +License : BSD-3-Clause + +Parse arguments for Stack's build configuration. +-} + +module Stack.Options.BuildMonoidParser + ( buildOptsMonoidParser + ) where + +import Distribution.Parsec ( eitherParsec ) import Options.Applicative + ( FlagFields, Mod, Parser, eitherReader, flag, help, long + , metavar, option, strOption + ) import Options.Applicative.Builder.Extra -import Stack.Build (splitObjsWarning) + ( firstBoolFlagsFalse, firstBoolFlagsNoDefault + , firstBoolFlagsTrue, optionalFirst + ) +import Stack.Build ( splitObjsWarning ) import Stack.Prelude -import Stack.Options.BenchParser -import Stack.Options.TestParser -import Stack.Options.HaddockParser -import Stack.Options.Utils -import Stack.Types.Config.Build +import Stack.Options.BenchParser ( benchOptsParser ) +import Stack.Options.TestParser ( testOptsParser ) +import Stack.Options.HaddockParser ( haddockOptsParser ) +import Stack.Options.Utils ( GlobalOptsContext (..), hideMods ) +import Stack.Types.BuildOptsMonoid + ( BuildOptsMonoid (..), CabalVerbosity, readProgressBarFormat + , toFirstCabalVerbosity + ) +import Stack.Types.ComponentUtils ( unqualCompFromString ) +-- | Parse command line arguments for build configuration. buildOptsMonoidParser :: GlobalOptsContext -> Parser BuildOptsMonoid -buildOptsMonoidParser hide0 = - BuildOptsMonoid <$> trace' <*> profile <*> noStrip <*> - libProfiling <*> exeProfiling <*> libStripping <*> - exeStripping <*> haddock <*> haddockOptsParser hideBool <*> - openHaddocks <*> haddockDeps <*> haddockInternal <*> - haddockHyperlinkSource <*> copyBins <*> copyCompilerTool <*> - preFetch <*> keepGoing <*> keepTmpFiles <*> forceDirty <*> - tests <*> testOptsParser hideBool <*> benches <*> - benchOptsParser hideBool <*> reconfigure <*> cabalVerbose <*> splitObjs <*> skipComponents <*> - interleavedOutput <*> ddumpDir - where - hideBool = hide0 /= BuildCmdGlobalOpts - hide = - hideMods hideBool - hideExceptGhci = - hideMods (hide0 `notElem` [BuildCmdGlobalOpts, GhciCmdGlobalOpts]) +buildOptsMonoidParser hide0 = BuildOptsMonoid + <$> semaphore + <*> trace' + <*> profile + <*> noStrip + <*> libProfiling + <*> exeProfiling + <*> libStripping + <*> exeStripping + <*> haddock + <*> haddockOptsParser hideBool + <*> openHaddocks + <*> haddockDeps + <*> haddockExecutables + <*> haddockTests + <*> haddockBenchmarks + <*> haddockInternal + <*> haddockHyperlinkSource + <*> haddockForHackage + <*> copyBins + <*> copyCompilerTool + <*> preFetch + <*> keepGoing + <*> keepTmpFiles + <*> forceDirty + <*> tests + <*> testOptsParser hideBool + <*> benches + <*> benchOptsParser hideBool + <*> reconfigure + <*> cabalVerbose + <*> splitObjs + <*> skipComponents + <*> interleavedOutput + <*> progressBar + <*> ddumpDir + where + hideBool = hide0 /= BuildCmdGlobalOpts + hide :: Mod f a + hide = hideMods hideBool + hideExceptGhci :: Mod FlagFields a + hideExceptGhci = + hideMods (hide0 `notElem` [BuildCmdGlobalOpts, GhciCmdGlobalOpts]) + + -- These use 'Any' because they are not settable in stack.yaml, so + -- there is no need for options like --no-profile. + trace' = Any <$> + flag + False + True + ( long "trace" + <> help + "Enable profiling in libraries, executables, etc. for all \ + \expressions and generate a backtrace on exception." + <> hideExceptGhci + ) + profile = Any <$> + flag + False + True + ( long "profile" + <> help + "Enable profiling in libraries, executables, etc. for all \ + \expressions and generate a profiling report in tests or \ + \benchmarks." + <> hideExceptGhci + ) + noStrip = Any <$> + flag + False + True + ( long "no-strip" + <> help + "Disable DWARF debugging symbol stripping in libraries, \ + \executables, etc. for all expressions, producing larger \ + \executables but allowing the use of standard \ + \debuggers/profiling tools/other utilities that use \ + \debugging symbols." + <> hideExceptGhci + ) + libProfiling = firstBoolFlagsFalse + "library-profiling" + "library profiling for TARGETs and all its dependencies." + hide + exeProfiling = firstBoolFlagsFalse + "executable-profiling" + "executable profiling for TARGETs and all its dependencies." + hide + libStripping = firstBoolFlagsTrue + "library-stripping" + "library stripping for TARGETs and all its dependencies." + hide + exeStripping = firstBoolFlagsTrue + "executable-stripping" + "executable stripping for TARGETs and all its dependencies." + hide + haddock = firstBoolFlagsFalse + "haddock" + "generating Haddock documentation for the package(s) in this \ + \directory/configuration." + hide + openHaddocks = firstBoolFlagsFalse + "open" + "opening the local Haddock documentation in the browser." + hide + haddockDeps = firstBoolFlagsNoDefault + "haddock-deps" + "building Haddock documentation for dependencies. (default: if building \ + \Haddock documentation, true; otherwise, false)" + hide + haddockExecutables = firstBoolFlagsFalse + "haddock-executables" + "also building Haddock documentation for all executables (like \ + \'cabal haddock --executables')." + hide + haddockTests = firstBoolFlagsFalse + "haddock-tests" + "also building Haddock documentation for all test suites (like \ + \'cabal haddock --tests')." + hide + haddockBenchmarks = firstBoolFlagsFalse + "haddock-benchmarks" + "also building Haddock documentation for all benchmarks (like \ + \'cabal haddock --benchmarks')." + hide + haddockInternal = firstBoolFlagsFalse + "haddock-internal" + "building Haddock documentation for internal modules (like \ + \'cabal haddock --internal')." + hide + haddockHyperlinkSource = firstBoolFlagsTrue + "haddock-hyperlink-source" + "building hyperlinked source for Haddock documentation (like \ + \'haddock --hyperlinked-source')." + hide + haddockForHackage = firstBoolFlagsFalse + "haddock-for-hackage" + "building with flags to generate Haddock documentation suitable for upload \ + \to Hackage." + hide + copyBins = firstBoolFlagsFalse + "copy-bins" + "copying binaries to local-bin (see 'stack path')." + hide + copyCompilerTool = firstBoolFlagsFalse + "copy-compiler-tool" + "copying binaries of targets to compiler-tools-bin (see 'stack path')." + hide + keepGoing = firstBoolFlagsNoDefault + "keep-going" + "continue running after a step fails. (default: for 'build', false; for \ + \'test' or 'bench', true)" + hide + keepTmpFiles = firstBoolFlagsFalse + "keep-tmp-files" + "keep intermediate files and build directories." + hide + preFetch = firstBoolFlagsFalse + "prefetch" + "fetching packages necessary for the build immediately. Useful with \ + \--dry-run." + hide + forceDirty = firstBoolFlagsFalse + "force-dirty" + "forcing the treatment of all project packages and local extra-deps as \ + \having dirty files. Useful for cases where Stack can't detect a file \ + \change." + hide + tests = firstBoolFlagsFalse + "test" + "testing the package(s) in this directory/configuration." + hideExceptGhci + benches = firstBoolFlagsFalse + "bench" + "benchmarking the package(s) in this directory/configuration." + hideExceptGhci + reconfigure = firstBoolFlagsFalse + "reconfigure" + "performing the configure step, even if unnecessary. Useful in some \ + \corner cases with custom Setup.hs files." + hide + cabalVerbose = cabalVerbosityOptsParser hideBool + splitObjs = firstBoolFlagsFalse + "split-objs" + ( "split-objs, to reduce output size (at the cost of build time). " + ++ splitObjsWarning + ) + hide + skipComponents = many (fmap unqualCompFromString (strOption + ( long "skip" + <> help "Skip given component (can be specified multiple times)." + <> hide + ))) + interleavedOutput = firstBoolFlagsTrue + "interleaved-output" + "printing concurrent GHC output to the console with a prefix for the \ + \package name." + hide + progressBar = First <$> optional (option (eitherReader readProgressBarFormat) + ( long "progress-bar" + <> metavar "FORMAT" + <> help "Progress bar format (accepts none, count-only, capped and full). \ + \(default: capped)" + <> hide + )) + ddumpDir = optionalFirst (strOption + ( long "ddump-dir" + <> help "Specify output directory for ddump-files." + <> hide + )) + semaphore = firstBoolFlagsFalse + "semaphore" + "the use of a system semaphore to perform compilation in parallel when \ + \possible. Supported, by default, by GHC 9.10.1 or later." + hide + +-- | Parser for Cabal verbosity options +cabalVerbosityOptsParser :: Bool -> Parser (First CabalVerbosity) +cabalVerbosityOptsParser hide = + cabalVerbosityParser hide <|> cabalVerboseParser hide + +-- | Parser for Cabal verbosity option +cabalVerbosityParser :: Bool -> Parser (First CabalVerbosity) +cabalVerbosityParser hide = + let pCabalVerbosity = option (eitherReader eitherParsec) + ( long "cabal-verbosity" + <> metavar "VERBOSITY" + <> help "Cabal verbosity (accepts Cabal's numerical and extended \ + \syntax)." + <> hideMods hide) + in First . Just <$> pCabalVerbosity - -- These use 'Any' because they are not settable in stack.yaml, so - -- there is no need for options like --no-profile. - trace' = Any <$> - flag - False - True - (long "trace" <> - help - "Enable profiling in libraries, executables, etc. \ - \for all expressions and generate a backtrace on \ - \exception" <> - hideExceptGhci) - profile = Any <$> - flag - False - True - (long "profile" <> - help - "profiling in libraries, executables, etc. \ - \for all expressions and generate a profiling report\ - \ in tests or benchmarks" <> - hideExceptGhci) - noStrip = Any <$> - flag - False - True - (long "no-strip" <> - help - "Disable DWARF debugging symbol stripping in libraries, \ - \executables, etc. for all expressions, producing \ - \larger executables but allowing the use of standard \ - \debuggers/profiling tools/other utilities that use \ - \debugging symbols." <> - hideExceptGhci) - libProfiling = - firstBoolFlagsFalse - "library-profiling" - "library profiling for TARGETs and all its dependencies" - hide - exeProfiling = - firstBoolFlagsFalse - "executable-profiling" - "executable profiling for TARGETs and all its dependencies" - hide - libStripping = - firstBoolFlagsTrue - "library-stripping" - "library stripping for TARGETs and all its dependencies" - hide - exeStripping = - firstBoolFlagsTrue - "executable-stripping" - "executable stripping for TARGETs and all its dependencies" - hide - haddock = - firstBoolFlagsFalse - "haddock" - "generating Haddocks the package(s) in this directory/configuration" - hide - openHaddocks = - firstBoolFlagsFalse - "open" - "opening the local Haddock documentation in the browser" - hide - haddockDeps = - firstBoolFlagsNoDefault - "haddock-deps" - "building Haddocks for dependencies (default: true if building Haddocks, false otherwise)" - hide - haddockInternal = - firstBoolFlagsFalse - "haddock-internal" - "building Haddocks for internal modules (like cabal haddock --internal)" - hide - haddockHyperlinkSource = - firstBoolFlagsTrue - "haddock-hyperlink-source" - "building hyperlinked source for Haddock (like haddock --hyperlinked-source)" - hide - copyBins = - firstBoolFlagsFalse - "copy-bins" - "copying binaries to local-bin (see 'stack path')" - hide - copyCompilerTool = - firstBoolFlagsFalse - "copy-compiler-tool" - "copying binaries of targets to compiler-tools-bin (see 'stack path')" - hide - keepGoing = - firstBoolFlagsNoDefault - "keep-going" - "continue running after a step fails (default: false for build, true for test/bench)" - hide - keepTmpFiles = - firstBoolFlagsFalse - "keep-tmp-files" - "keep intermediate files and build directories" - hide - preFetch = - firstBoolFlagsFalse - "prefetch" - "Fetch packages necessary for the build immediately, useful with --dry-run" - hide - forceDirty = - firstBoolFlagsFalse - "force-dirty" - "Force treating all local packages as having dirty files (useful for cases where stack can't detect a file change" - hide - tests = - firstBoolFlagsFalse - "test" - "testing the package(s) in this directory/configuration" - hideExceptGhci - benches = - firstBoolFlagsFalse - "bench" - "benchmarking the package(s) in this directory/configuration" - hideExceptGhci - reconfigure = - firstBoolFlagsFalse - "reconfigure" - "Perform the configure step even if unnecessary. Useful in some corner cases with custom Setup.hs files" - hide - cabalVerbose = - firstBoolFlagsFalse - "cabal-verbose" - "Ask Cabal to be verbose in its output" - hide - splitObjs = - firstBoolFlagsFalse - "split-objs" - ("Enable split-objs, to reduce output size (at the cost of build time). " ++ splitObjsWarning) - hide - skipComponents = many - (fmap - T.pack - (strOption - (long "skip" <> - help "Skip given component, can be specified multiple times" <> - hide))) - interleavedOutput = - firstBoolFlagsTrue - "interleaved-output" - "Print concurrent GHC output to the console with a prefix for the package name" - hide - ddumpDir = - optionalFirst - (strOption - (long "ddump-dir" <> - help "Specify output ddump-files" <> - hide)) +-- | Parser for the Cabal verbose flag, retained for backward compatibility +cabalVerboseParser :: Bool -> Parser (First CabalVerbosity) +cabalVerboseParser hide = + let pVerboseFlag = firstBoolFlagsFalse + "cabal-verbose" + "asking Cabal to be verbose in its output." + (hideMods hide) + in toFirstCabalVerbosity <$> pVerboseFlag diff --git a/src/Stack/Options/BuildParser.hs b/src/Stack/Options/BuildParser.hs index 548552a447..df0f8d961a 100644 --- a/src/Stack/Options/BuildParser.hs +++ b/src/Stack/Options/BuildParser.hs @@ -1,111 +1,183 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -module Stack.Options.BuildParser where +{-| +Module : Stack.Options.BuildParser +License : BSD-3-Clause +-} -import qualified Data.Map as Map +module Stack.Options.BuildParser + ( buildOptsParser + ) where + +import qualified Data.List as L +import qualified Data.Text as T import Options.Applicative -import Options.Applicative.Args + ( Parser, completer, flag, flag', help, internal, long + , metavar, strOption, switch, value + ) +import Options.Applicative.Args ( cmdOption ) import Options.Applicative.Builder.Extra -import Stack.Options.Completion -import Stack.Options.PackageParser (readFlag) + ( firstBoolFlagsNoDefault, textArgument, textOption ) +import Stack.Options.Completion ( ghcOptsCompleter, targetCompleter ) +import Stack.Options.FlagsParser ( flagsParser ) +import Stack.Options.Utils ( hideMods ) import Stack.Prelude -import Stack.Types.Config +import Stack.Types.BuildOptsCLI + ( BuildCommand, BuildOptsCLI (..), BuildSubset (..) + , FileWatchOpts (..) + ) -- | Parser for CLI-only build arguments -buildOptsParser :: BuildCommand - -> Parser BuildOptsCLI -buildOptsParser cmd = - BuildOptsCLI <$> - targetsParser <*> - switch - (long "dry-run" <> - help "Don't build anything, just prepare to") <*> - ((\x y z -> - concat [x, y, z]) <$> - flag - [] - ["-Wall", "-Werror"] - (long "pedantic" <> - help "Turn on -Wall and -Werror") <*> - flag - [] - ["-O0"] - (long "fast" <> - help "Turn off optimizations (-O0)") <*> - many - (textOption - (long "ghc-options" <> - metavar "OPTIONS" <> - completer ghcOptsCompleter <> - help "Additional options passed to GHC"))) <*> - flagsParser <*> - (flag' - BSOnlyDependencies - (long "dependencies-only" <> - help "A synonym for --only-dependencies") <|> - flag' - BSOnlySnapshot - (long "only-snapshot" <> - help - "Only build packages for the snapshot database, not the local database") <|> - flag' - BSOnlyDependencies - (long "only-dependencies" <> - help - "Only build packages that are dependencies of targets on the command line") <|> - flag' - BSOnlyLocals - (long "only-locals" <> - help - "Only build packages in the local database, fail if the build plan includes the snapshot database") <|> - pure BSAll) <*> - (flag' - FileWatch - (long "file-watch" <> - help - "Watch for changes in local files and automatically rebuild. Ignores files in VCS boring/ignore file") <|> - flag' - FileWatchPoll - (long "file-watch-poll" <> - help - "Like --file-watch, but polling the filesystem instead of using events") <|> - pure NoFileWatch) <*> - switch - (long "watch-all" <> - help "Watch all local files not taking targets into account") <*> - many (cmdOption - (long "exec" <> - metavar "COMMAND [ARGUMENT(S)]" <> - help "Command and argument(s) to run after a successful build")) <*> - switch - (long "only-configure" <> - help - "Only perform the configure step, not any builds. Intended for tool usage, may break when used on multiple packages at once!") <*> - pure cmd <*> - switch - (long "initial-build-steps" <> - help "For target packages, only run initial build steps needed for GHCi" <> - internal) +buildOptsParser :: BuildCommand -> Parser BuildOptsCLI +buildOptsParser cmd = BuildOptsCLI + <$> targetsParser + <*> switch + ( long "dry-run" + <> help "Don't build anything, just prepare to." + ) + <*> ( (\x y z -> concat [x, y, z]) + <$> flag + [] + ["-Wall", "-Werror"] + ( long "pedantic" + <> help "Pass the -Wall and -Werror flags to GHC, turning on all \ + \warnings that indicate potentially suspicious code and \ + \making all warnings into fatal errors. Can be overridden \ + \using Stack's --ghc-options option." + ) + <*> flag + [] + ["-O0"] + ( long "fast" + <> help "Pass a -O0 flag to GHC, turning off any GHC \ + \optimsations that have been set. Can be overridden using \ + \Stack's --ghc-options option." + ) + <*> many (textOption + ( long "ghc-options" + <> metavar "OPTIONS" + <> completer ghcOptsCompleter + <> help "Additional options to be passed to GHC (can be specified \ + \multiple times)." + )) + ) + <*> progsOptionsParser + <*> flagsParser + <*> firstBoolFlagsNoDefault + "allow-newer" + "ignoring of lower and upper version bounds in Cabal files." + (hideMods False) + <*> ( flag' BSOnlyDependencies + ( long "dependencies-only" + <> help "A synonym for --only-dependencies." + ) + <|> flag' BSOnlySnapshot + ( long "only-snapshot" + <> help "Only build packages for the snapshot database, not the \ + \local database." + ) + <|> flag' BSOnlyDependencies + ( long "only-dependencies" + <> help "Only build packages that are dependencies of targets on \ + \the command line." + ) + <|> flag' BSOnlyLocals + ( long "only-locals" + <> help "Only build packages in the local database. Fail if the \ + \build plan includes the snapshot database." + ) + <|> pure BSAll + ) + <*> ( flag' FileWatch + ( long "file-watch" + <> help "Watch for changes in local files and automatically \ + \rebuild." + ) + <|> flag' FileWatchPoll + ( long "file-watch-poll" + <> help "Like --file-watch, but polling the filesystem instead of \ + \using events." + ) + <|> pure NoFileWatch + ) + <*> switch + ( long "watch-all" + <> help "Watch all local files not taking targets into account." + ) + <*> many (cmdOption + ( long "exec" + <> metavar "COMMAND [ARGUMENT(S)]" + <> help "Command and argument(s) to run after a successful build." + )) + <*> switch + ( long "only-configure" + <> help "Only perform the configure step, not any builds. Intended for \ + \tool usage. May break when used on multiple packages at once!" + ) + <*> pure cmd + <*> switch + ( long "initial-build-steps" + <> help "For target packages, only run initial build steps needed for \ + \GHCi." + <> internal + ) +-- | Parser for build targets. targetsParser :: Parser [Text] targetsParser = - many - (textArgument - (metavar "TARGET" <> - completer targetCompleter <> - help ("If none specified, use all local packages. " <> - "See https://docs.haskellstack.org/en/stable/build_command/#target-syntax for details."))) + many (textArgument + ( metavar "TARGET" + <> completer targetCompleter + <> help "Can be specified multiple times. If none specified, use all \ + \project packages. See \ + \https://docs.haskellstack.org/en/stable/commands/build_command/#target-syntax \ + \for details." + )) -flagsParser :: Parser (Map.Map ApplyCLIFlag (Map.Map FlagName Bool)) -flagsParser = - Map.unionsWith Map.union <$> - many - (option - readFlag - (long "flag" <> - completer flagCompleter <> - metavar "PACKAGE:[-]FLAG" <> - help - ("Override flags set in stack.yaml " <> - "(applies to local packages and extra-deps)"))) +progsOptionsParser :: Parser [(Text, [Text])] +progsOptionsParser = + dummyProgOptionsParser + *> (filter (not . L.null . snd) <$> progsOptionsParser') + where + -- The purpose of this parser is only to generate the desired help text. The + -- actual --PROG-options parsers are all internal. + dummyProgOptionsParser :: Parser String + dummyProgOptionsParser = strOption + ( long "PROG-option" + <> help + ( "Pass an argument to PROG (can be specified multiple times). PROG \ + \must be a program recognised by the Cabal library and one of " + <> T.unpack (T.intercalate " " progs) <> "." + ) + <> metavar "ARG" + <> value "" + ) + progs :: [Text] + progs = L.sort + [ + -- configuration + "pkg-config" + -- preprocessors + , "alex" + , "c2hs" + , "cpphs" + , "doctest" + , "greencard" + , "happy" + , "hsc2hs" + , "hscolour" + -- platform toolchain (GNU) + , "ar" -- create, modify, and extract from archives + , "gcc" -- C/C++ compiler + , "ld" -- linker + , "strip" -- discards symbols and other data from object files + , "tar" + ] + progsOptionsParser' :: Parser [(Text, [Text])] + progsOptionsParser' = traverse mkProgOptionsParser progs + mkProgOptionsParser :: Text -> Parser (Text, [Text]) + mkProgOptionsParser prog = fmap (prog,) $ many $ textOption + ( long (T.unpack prog <> "-option") + <> internal + ) diff --git a/src/Stack/Options/CleanParser.hs b/src/Stack/Options/CleanParser.hs index b90845ff05..82a5e9110a 100644 --- a/src/Stack/Options/CleanParser.hs +++ b/src/Stack/Options/CleanParser.hs @@ -1,24 +1,59 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.CleanParser where +{-# LANGUAGE ApplicativeDo #-} -import Options.Applicative -import Stack.Clean (CleanCommand(..), CleanOpts (..)) +{-| +Module : Stack.Options.CleanParser +License : BSD-3-Clause +-} + +module Stack.Options.CleanParser + ( cleanOptsParser + ) where + +import Options.Applicative ( Parser, flag', help, idm, long, metavar ) +import Options.Applicative.Builder.Extra ( boolFlags ) +import Stack.Clean + ( CleanCommand (..), CleanDepth (..), CleanOpts (..) ) import Stack.Prelude -import Stack.Types.PackageName +import Stack.Types.PackageName ( packageNameArgument ) -- | Command-line parser for the clean command. cleanOptsParser :: CleanCommand -> Parser CleanOpts -cleanOptsParser Clean = CleanShallow <$> packages <|> doFullClean - where - packages = - many - (packageNameArgument - (metavar "PACKAGE" <> - help "If none specified, clean all project packages")) - doFullClean = - flag' - CleanFull - (long "full" <> - help "Delete the project’s stack working directories (.stack-work by default).") +cleanOptsParser Clean = shallowParser <|> fullParser + +cleanOptsParser Purge = pure $ CleanOpts + { depth = CleanFull + , omitThis = False + } + +shallowParser :: Parser CleanOpts +shallowParser = do + packages <- parsePackages + omitThis <- parseOmitThis + pure $ CleanOpts + { depth = CleanShallow packages + , omitThis + } + where + parsePackages = many (packageNameArgument + ( metavar "PACKAGE" + <> help "If none specified, clean all project packages." + )) + parseOmitThis = boolFlags False + "omit-this" + "the omission of directories currently in use" + idm -cleanOptsParser Purge = pure CleanFull +fullParser :: Parser CleanOpts +fullParser = do + depth <- doFullClean + pure $ CleanOpts + { depth + , omitThis = False + } + where + doFullClean = flag' CleanFull + ( long "full" + <> help "Delete the project's Stack work directories (.stack-work by \ + \default)." + ) diff --git a/src/Stack/Options/Completion.hs b/src/Stack/Options/Completion.hs index b4c760d908..b78effbba5 100644 --- a/src/Stack/Options/Completion.hs +++ b/src/Stack/Options/Completion.hs @@ -1,112 +1,130 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TupleSections #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Options.Completion +Description : Completers for command line arguments. +License : BSD-3-Clause + +Completers for command line arguments or arguments of command line options. +-} module Stack.Options.Completion - ( ghcOptsCompleter - , targetCompleter - , flagCompleter - , projectExeCompleter - ) where + ( ghcOptsCompleter + , targetCompleter + , flagCompleter + , projectExeCompleter + ) where -import Data.Char (isSpace) -import Data.List (isPrefixOf) +import Data.Char ( isSpace ) +import Data.List ( isPrefixOf ) import qualified Data.Map as Map -import Data.Maybe import qualified Data.Set as Set import qualified Data.Text as T import qualified Distribution.PackageDescription as C -import qualified Distribution.Types.UnqualComponentName as C -import Options.Applicative -import Options.Applicative.Builder.Extra -import Stack.Constants (ghcShowOptionsOutput) -import Stack.Options.GlobalParser (globalOptsFromMonoid) +import Options.Applicative ( Completer, mkCompleter ) +import Options.Applicative.Builder.Extra ( unescapeBashArg ) +import Stack.Constants ( ghcShowOptionsOutput ) +import Stack.Options.GlobalParser ( globalOptsFromMonoid ) import Stack.Runners + ( ShouldReexec (..), withConfig, withDefaultEnvConfig + , withRunnerGlobal + ) import Stack.Prelude -import Stack.Types.Config -import Stack.Types.NamedComponent -import Stack.Types.SourceMap +import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.Config ( Config (..) ) +import Stack.Types.EnvConfig ( EnvConfig ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Project ( Project (..) ) +import Stack.Types.ProjectConfig ( ProjectConfig (..) ) +import Stack.Types.NamedComponent ( renderPkgComponent ) +import Stack.Types.SourceMap ( SMWanted (..), ppComponents, ppGPD ) +-- | A completer for @--ghc-options@ or @--ghci-options@. ghcOptsCompleter :: Completer -ghcOptsCompleter = mkCompleter $ \inputRaw -> return $ - let input = unescapeBashArg inputRaw - (curArgReversed, otherArgsReversed) = break isSpace (reverse input) - curArg = reverse curArgReversed - otherArgs = reverse otherArgsReversed - in if null curArg then [] else - map (otherArgs ++) $ - filter (curArg `isPrefixOf`) ghcShowOptionsOutput +ghcOptsCompleter = mkCompleter $ \inputRaw -> pure $ + let input = unescapeBashArg inputRaw + (curArgReversed, otherArgsReversed) = break isSpace (reverse input) + curArg = reverse curArgReversed + otherArgs = reverse otherArgsReversed + in if null curArg + then [] + else + map (otherArgs ++) $ + filter (curArg `isPrefixOf`) ghcShowOptionsOutput -- TODO: Ideally this would pay attention to --stack-yaml, may require -- changes to optparse-applicative. -buildConfigCompleter - :: (String -> RIO EnvConfig [String]) - -> Completer +buildConfigCompleter :: (String -> RIO EnvConfig [String]) -> Completer buildConfigCompleter inner = mkCompleter $ \inputRaw -> do - let input = unescapeBashArg inputRaw - case input of - -- If it looks like a flag, skip this more costly completion. - ('-': _) -> return [] - _ -> do - go' <- globalOptsFromMonoid False mempty - let go = go' { globalLogLevel = LevelOther "silent" } - withRunnerGlobal go $ withConfig NoReexec $ withDefaultEnvConfig $ inner input + let input = unescapeBashArg inputRaw + case input of + -- If it looks like a flag, skip this more costly completion. + ('-': _) -> pure [] + _ -> do + -- We do not need to specify the name of the current Stack executable, as + -- it was invoked, or the path to the current Stack executable, as + -- withDefaultEnvConfig does not need either. + go' <- globalOptsFromMonoid "" Nothing False mempty + let go = go' { logLevel = LevelOther "silent" } + withRunnerGlobal go $ withConfig NoReexec $ withDefaultEnvConfig $ inner input +-- | A completer for components of project packages. targetCompleter :: Completer targetCompleter = buildConfigCompleter $ \input -> do - packages <- view $ buildConfigL.to (smwProject . bcSMWanted) + packages <- view $ buildConfigL . to (.smWanted.project) comps <- for packages ppComponents - pure - $ filter (input `isPrefixOf`) - $ concatMap allComponentNames - $ Map.toList comps - where - allComponentNames (name, comps) = - map (T.unpack . renderPkgComponent . (name,)) (Set.toList comps) + pure $ + concatMap + (filter (input `isPrefixOf`) . allComponentNames) + (Map.toList comps) + where + allComponentNames (name, comps) = + map (T.unpack . renderPkgComponent . (name,)) (Set.toList comps) +-- | A completer for Cabal flags of project packages. flagCompleter :: Completer flagCompleter = buildConfigCompleter $ \input -> do - bconfig <- view buildConfigL - gpds <- for (smwProject $ bcSMWanted bconfig) ppGPD - let wildcardFlags - = nubOrd - $ concatMap (\(name, gpd) -> - map (\fl -> "*:" ++ flagString name fl) (C.genPackageFlags gpd)) - $ Map.toList gpds - normalFlags - = concatMap (\(name, gpd) -> - map (\fl -> packageNameString name ++ ":" ++ flagString name fl) - (C.genPackageFlags gpd)) - $ Map.toList gpds - flagString name fl = - let flname = C.unFlagName $ C.flagName fl - in (if flagEnabled name fl then "-" else "") ++ flname - prjFlags = - case configProject (bcConfig bconfig) of - PCProject (p, _) -> projectFlags p - PCGlobalProject -> mempty - PCNoProject _ -> mempty - flagEnabled name fl = - fromMaybe (C.flagDefault fl) $ - Map.lookup (C.flagName fl) $ - Map.findWithDefault Map.empty name prjFlags - return $ filter (input `isPrefixOf`) $ - case input of - ('*' : ':' : _) -> wildcardFlags - ('*' : _) -> wildcardFlags - _ -> normalFlags + bconfig <- view buildConfigL + gpds <- for bconfig.smWanted.project ppGPD + let wildcardFlags + = nubOrd + $ concatMap (\(name, gpd) -> + map (\fl -> "*:" ++ flagString name fl) (C.genPackageFlags gpd)) + $ Map.toList gpds + normalFlags + = concatMap (\(name, gpd) -> + map (\fl -> packageNameString name ++ ":" ++ flagString name fl) + (C.genPackageFlags gpd)) + $ Map.toList gpds + flagString name fl = + let flname = C.unFlagName $ C.flagName fl + in (if flagEnabled name fl then "-" else "") ++ flname + prjFlags = + case bconfig.config.project of + PCProject (p, _) -> p.flagsByPkg + PCGlobalProject -> mempty + PCNoProject _ -> mempty + flagEnabled name fl = + fromMaybe (C.flagDefault fl) $ + Map.lookup (C.flagName fl) $ + Map.findWithDefault Map.empty name prjFlags + pure $ filter (input `isPrefixOf`) $ + case input of + ('*' : ':' : _) -> wildcardFlags + ('*' : _) -> wildcardFlags + _ -> normalFlags +-- | A completer for executable components of project packages. projectExeCompleter :: Completer projectExeCompleter = buildConfigCompleter $ \input -> do - packages <- view $ buildConfigL.to (smwProject . bcSMWanted) + packages <- view $ buildConfigL . to (.smWanted.project) gpds <- Map.traverseWithKey (const ppGPD) packages pure $ filter (input `isPrefixOf`) $ nubOrd $ concatMap - (\gpd -> map - (C.unUnqualComponentName . fst) - (C.condExecutables gpd) - ) + (map (C.unUnqualComponentName . fst) . C.condExecutables) gpds diff --git a/src/Stack/Options/ConfigEnvParser.hs b/src/Stack/Options/ConfigEnvParser.hs new file mode 100644 index 0000000000..9a03506e3e --- /dev/null +++ b/src/Stack/Options/ConfigEnvParser.hs @@ -0,0 +1,29 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Options.ConfigEnvParser +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @config env@ command. +-} + +module Stack.Options.ConfigEnvParser + ( configCmdEnvParser + ) where + +import qualified Options.Applicative as OA +import Options.Applicative.Builder.Extra ( boolFlags ) +import Stack.Prelude +import Stack.Types.EnvSettings ( EnvSettings (..) ) + +-- | Parse command line arguments for Stack's @config env@ command. +configCmdEnvParser :: OA.Parser EnvSettings +configCmdEnvParser = EnvSettings + <$> boolFlags True "locals" "include information about local packages" mempty + <*> boolFlags True + "ghc-package-path" "set GHC_PACKAGE_PATH environment variable" mempty + <*> boolFlags True "stack-exe" "set STACK_EXE environment variable" mempty + <*> boolFlags False + "locale-utf8" "set the GHC_CHARENC environment variable to UTF-8" mempty + <*> boolFlags False + "keep-ghc-rts" "keep any GHCRTS environment variable" mempty diff --git a/src/Stack/Options/ConfigParser.hs b/src/Stack/Options/ConfigParser.hs index 4a4f0094c9..2787173727 100644 --- a/src/Stack/Options/ConfigParser.hs +++ b/src/Stack/Options/ConfigParser.hs @@ -1,177 +1,243 @@ -{-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.ConfigParser where +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} -import Data.Char +{-| +Module : Stack.Options.ConfigParser +License : BSD-3-Clause +-} + +module Stack.Options.ConfigParser + ( configOptsParser + ) where + +import Data.Char ( toUpper ) import Options.Applicative + ( Mod, Parser, auto, completer, completeWith, eitherReader + , help, long, metavar, option, short, strOption + ) import Options.Applicative.Builder.Extra -import Path -import Stack.Constants -import Stack.Options.BuildMonoidParser -import Stack.Options.DockerParser -import Stack.Options.GhcBuildParser -import Stack.Options.GhcVariantParser -import Stack.Options.NixParser -import Stack.Options.Utils -import Stack.Prelude -import Stack.Types.Config + ( PathCompleterOpts (..), absDirOption, absFileOption + , defaultPathCompleterOpts, dirCompleter, firstBoolFlagsFalse + , firstBoolFlagsNoDefault, firstBoolFlagsTrue, optionalFirst + , pathCompleterWith + ) +import Path ( PathException (..), parseRelDir ) +import Stack.Constants ( stackRootOptionName ) +import Stack.Options.BuildMonoidParser ( buildOptsMonoidParser ) +import Stack.Options.DockerParser ( dockerOptsParser ) +import Stack.Options.GhcBuildParser ( ghcBuildParser ) +import Stack.Options.GhcVariantParser ( ghcVariantParser ) +import Stack.Options.NixParser ( nixOptsParser ) +import Stack.Options.Utils ( GlobalOptsContext (..), hideMods ) +import Stack.Prelude hiding ( snapshotLocation ) +import Stack.Types.ColorWhen ( readColorWhen ) +import Stack.Types.ConfigMonoid ( ConfigMonoid (..) ) +import Stack.Types.DumpLogs ( DumpLogs (..) ) import qualified System.FilePath as FilePath -- | Command-line arguments parser for configuration. configOptsParser :: FilePath -> GlobalOptsContext -> Parser ConfigMonoid configOptsParser currentDir hide0 = - (\stackRoot workDir buildOpts dockerOpts nixOpts systemGHC installGHC arch - ghcVariant ghcBuild jobs includes libs overrideGccPath overrideHpack - skipGHCCheck skipMsys localBin setupInfoLocations modifyCodePage - allowDifferentUser dumpLogs colorWhen snapLoc -> mempty - { configMonoidStackRoot = stackRoot - , configMonoidWorkDir = workDir - , configMonoidBuildOpts = buildOpts - , configMonoidDockerOpts = dockerOpts - , configMonoidNixOpts = nixOpts - , configMonoidSystemGHC = systemGHC - , configMonoidInstallGHC = installGHC - , configMonoidSkipGHCCheck = skipGHCCheck - , configMonoidArch = arch - , configMonoidGHCVariant = ghcVariant - , configMonoidGHCBuild = ghcBuild - , configMonoidJobs = jobs - , configMonoidExtraIncludeDirs = includes - , configMonoidExtraLibDirs = libs - , configMonoidOverrideGccPath = overrideGccPath - , configMonoidOverrideHpack = overrideHpack - , configMonoidSkipMsys = skipMsys - , configMonoidLocalBinPath = localBin - , configMonoidSetupInfoLocations = setupInfoLocations - , configMonoidModifyCodePage = modifyCodePage - , configMonoidAllowDifferentUser = allowDifferentUser - , configMonoidDumpLogs = dumpLogs - , configMonoidColorWhen = colorWhen - , configMonoidSnapshotLocation = snapLoc - }) - <$> optionalFirst (absDirOption - ( long stackRootOptionName - <> metavar (map toUpper stackRootOptionName) - <> help ("Absolute path to the global stack root directory " ++ - "(Overrides any STACK_ROOT environment variable)") - <> hide - )) - <*> optionalFirst (option (eitherReader (mapLeft showWorkDirError . parseRelDir)) - ( long "work-dir" - <> metavar "WORK-DIR" - <> completer (pathCompleterWith (defaultPathCompleterOpts { pcoAbsolute = False, pcoFileFilter = const False })) - <> help ("Relative path of work directory " ++ - "(Overrides any STACK_WORK environment variable, default is '.stack-work')") - <> hide - )) - <*> buildOptsMonoidParser hide0 - <*> dockerOptsParser True - <*> nixOptsParser True - <*> firstBoolFlagsNoDefault - "system-ghc" - "using the system installed GHC (on the PATH) if it is available and its version matches. Disabled by default." - hide - <*> firstBoolFlagsTrue - "install-ghc" - "downloading and installing GHC if necessary (can be done manually with stack setup)" - hide - <*> optionalFirst (strOption - ( long "arch" - <> metavar "ARCH" - <> help "System architecture, e.g. i386, x86_64" - <> hide - )) - <*> optionalFirst (ghcVariantParser (hide0 /= OuterGlobalOpts)) - <*> optionalFirst (ghcBuildParser (hide0 /= OuterGlobalOpts)) - <*> optionalFirst (option auto - ( long "jobs" - <> short 'j' - <> metavar "JOBS" - <> help "Number of concurrent jobs to run" - <> hide - )) - <*> many ((currentDir FilePath.) <$> strOption - ( long "extra-include-dirs" - <> metavar "DIR" - <> completer dirCompleter - <> help "Extra directories to check for C header files" - <> hide - )) - <*> many ((currentDir FilePath.) <$> strOption - ( long "extra-lib-dirs" - <> metavar "DIR" - <> completer dirCompleter - <> help "Extra directories to check for libraries" - <> hide - )) - <*> optionalFirst (absFileOption - ( long "with-gcc" - <> metavar "PATH-TO-GCC" - <> help "Use gcc found at PATH-TO-GCC" - <> hide - )) - <*> optionalFirst (strOption - ( long "with-hpack" - <> metavar "HPACK" - <> help "Use HPACK executable (overrides bundled Hpack)" - <> hide - )) - <*> firstBoolFlagsFalse - "skip-ghc-check" - "skipping the GHC version and architecture check" - hide - <*> firstBoolFlagsFalse - "skip-msys" - "skipping the local MSYS installation (Windows only)" - hide - <*> optionalFirst ((currentDir FilePath.) <$> strOption - ( long "local-bin-path" - <> metavar "DIR" - <> completer dirCompleter - <> help "Install binaries to DIR" - <> hide - )) - <*> many ( - strOption - ( long "setup-info-yaml" - <> help "Alternate URL or relative / absolute path for stack dependencies" - <> metavar "URL" )) - <*> firstBoolFlagsTrue - "modify-code-page" - "setting the codepage to support UTF-8 (Windows only)" - hide - <*> firstBoolFlagsNoDefault - "allow-different-user" - ("permission for users other than the owner of the stack root " ++ - "directory to use a stack installation (POSIX only) " ++ - "(default: true inside Docker, otherwise false)") - hide - <*> fmap toDumpLogs - (firstBoolFlagsNoDefault - "dump-logs" - "dump the build output logs for local packages to the console (default: dump warning logs)" - hide) - <*> optionalFirst (option readColorWhen - ( long "color" - <> long "colour" - <> metavar "WHEN" - <> completeWith ["always", "never", "auto"] - <> help "Specify when to use color in output; WHEN is 'always', \ - \'never', or 'auto'. On Windows versions before Windows \ - \10, for terminals that do not support color codes, the \ - \default is 'never'; color may work on terminals that \ - \support color codes" - <> hide - )) - <*> optionalFirst (strOption - ( long "snapshot-location-base" - <> help "The base location of LTS/Nightly snapshots" - <> metavar "URL" - )) - where - hide = hideMods (hide0 /= OuterGlobalOpts) - toDumpLogs (First (Just True)) = First (Just DumpAllLogs) - toDumpLogs (First (Just False)) = First (Just DumpNoLogs) - toDumpLogs (First Nothing) = First Nothing - showWorkDirError err = show err ++ - "\nNote that --work-dir must be a relative child directory, because work-dirs outside of the package are not supported by Cabal." ++ - "\nSee https://github.com/commercialhaskell/stack/issues/2954" + ( \stackRoot workDir buildOpts dockerOpts nixOpts systemGHC installGHC installMsys arch + ghcVariant ghcBuild jobs extraIncludeDirs extraLibDirs + customPreprocessorExts overrideGccPath overrideHpack hpackForce skipGHCCheck skipMsys + localBinPath setupInfoLocations modifyCodePage allowDifferentUser dumpLogs + colorWhen snapshotLocation noRunCompile -> mempty + { stackRoot + , workDir + , buildOpts + , dockerOpts + , nixOpts + , systemGHC + , installGHC + , installMsys + , skipGHCCheck + , arch + , ghcVariant + , ghcBuild + , jobs + , extraIncludeDirs + , extraLibDirs + , customPreprocessorExts + , overrideGccPath + , overrideHpack + , hpackForce + , skipMsys + , localBinPath + , setupInfoLocations + , modifyCodePage + , allowDifferentUser + , dumpLogs + , colorWhen + , snapshotLocation + , noRunCompile + } + ) + <$> optionalFirst (absDirOption + ( long stackRootOptionName + <> metavar (map toUpper stackRootOptionName) + <> help "Absolute path to the global Stack root directory. Overrides \ + \any STACK_ROOT environment variable." + <> hide + )) + <*> optionalFirst (option (eitherReader (mapLeft showWorkDirError . parseRelDir)) + ( long "work-dir" + <> metavar "WORK-DIR" + <> completer + ( pathCompleterWith + ( defaultPathCompleterOpts + { absolute = False, fileFilter = const False } + ) + ) + <> help "Relative path to Stack's work directory. Overrides any \ + \STACK_WORK environment variable. (default: '.stack-work')" + <> hide + )) + <*> buildOptsMonoidParser hide0 + <*> dockerOptsParser True + <*> nixOptsParser True + <*> firstBoolFlagsNoDefault + "system-ghc" + "using the system installed GHC (on the PATH) if it is available and \ + \its version matches. (default: disabled)" + hide + <*> firstBoolFlagsTrue + "install-ghc" + "downloading and installing GHC if necessary. (Can be done manually \ + \with 'stack setup'.)" + hide + <*> firstBoolFlagsNoDefault + "install-msys" + "downloading and installing MSYS2 if necessary (Windows only). (Can be \ + \done manually with 'stack setup'.) (default: same as 'install-ghc')" + hide + <*> optionalFirst (strOption + ( long "arch" + <> metavar "ARCH" + <> help "System architecture, e.g. i386, x86_64, aarch64." + <> hide + )) + <*> optionalFirst (ghcVariantParser (hide0 /= OuterGlobalOpts)) + <*> optionalFirst (ghcBuildParser (hide0 /= OuterGlobalOpts)) + <*> optionalFirst (option auto + ( long "jobs" + <> short 'j' + <> metavar "JOBS" + <> help "Number of concurrent jobs to run." + <> hide + )) + <*> many ((currentDir FilePath.) <$> strOption + ( long "extra-include-dirs" + <> metavar "DIR" + <> completer dirCompleter + <> help "Extra directories to check for C header files." + <> hide + )) + <*> many ((currentDir FilePath.) <$> strOption + ( long "extra-lib-dirs" + <> metavar "DIR" + <> completer dirCompleter + <> help "Extra directories to check for libraries." + <> hide + )) + <*> many (strOption + ( long "custom-preprocessor-extensions" + <> metavar "EXT" + <> help "Extensions used for custom preprocessors." + <> hide + )) + <*> optionalFirst (absFileOption + ( long "with-gcc" + <> metavar "PATH-TO-GCC" + <> help "Use gcc found at PATH-TO-GCC." + <> hide + )) + <*> optionalFirst (strOption + ( long "with-hpack" + <> metavar "HPACK" + <> help "Use HPACK executable (overrides bundled Hpack)." + <> hide + )) + <*> firstBoolFlagsFalse + "hpack-force" + "overwriting of Cabal files that were created by a more recent \ + \version of Hpack or have been modified manually." + hide + <*> firstBoolFlagsFalse + "skip-ghc-check" + "skipping the GHC version and architecture check." + hide + <*> firstBoolFlagsFalse + "skip-msys" + "skipping the local MSYS installation (Windows only)." + hide + <*> optionalFirst ((currentDir FilePath.) <$> strOption + ( long "local-bin-path" + <> metavar "DIR" + <> completer dirCompleter + <> help "Override the target directory for 'stack build --copy-bins' \ + \and 'stack install'. DIR can be an absolute path or one \ + \relative to the current directory." + <> hide + )) + <*> many (strOption + ( long "setup-info-yaml" + <> help "Alternate URL or path (relative or absolute) for Stack \ + \dependencies." + <> metavar "URL" + )) + <*> firstBoolFlagsTrue + "modify-code-page" + "setting the codepage to support UTF-8 (Windows only)." + hide + <*> firstBoolFlagsNoDefault + "allow-different-user" + "permission for users other than the owner of the Stack root directory \ + \to use a Stack installation (POSIX only). (default: inside Docker, \ + \ true; otherwise, false)" + hide + <*> fmap toDumpLogs (firstBoolFlagsNoDefault + "dump-logs" + "dump the build output logs for project packages to the console. \ + \(default: dump warning logs)" + hide) + <*> optionalFirst (option readColorWhen + ( long "color" + <> long "colour" + <> metavar "WHEN" + <> completeWith ["always", "never", "auto"] + <> help "Specify when to use color in output; WHEN is 'always', \ + \'never', or 'auto'. On Windows versions before Windows \ + \10, for terminals that do not support color codes, the \ + \default is 'never'; color may work on terminals that \ + \support color codes." + <> hide + )) + <*> optionalFirst (strOption + ( long "snapshot-location-base" + <> help "The base location of LTS/Nightly snapshots." + <> metavar "URL" + )) + <*> firstBoolFlagsFalse + "script-no-run-compile" + "the use of options `--no-run --compile` with `stack script`." + hide + where + hide :: Mod f a + hide = hideMods (hide0 /= OuterGlobalOpts) + toDumpLogs (First (Just True)) = First (Just DumpAllLogs) + toDumpLogs (First (Just False)) = First (Just DumpNoLogs) + toDumpLogs (First Nothing) = First Nothing + showWorkDirError err = case fromException err of + Just (InvalidRelDir x) -> + "Stack failed to interpret the value of the option as a valid\n\ + \relative path to a directory. Stack will not accept an absolute path. A \ + \path\n\ + \containing a .. (parent directory) component is not valid.\n\n\ + \If set, Stack expects the value to identify the location of Stack's \ + \work\n\ + \directory, relative to the root directory of the project or package. \ + \Stack\n\ + \encountered the value:\n" + ++ x + _ -> displayException err diff --git a/src/Stack/Options/ConfigSetParser.hs b/src/Stack/Options/ConfigSetParser.hs new file mode 100644 index 0000000000..6c730b862d --- /dev/null +++ b/src/Stack/Options/ConfigSetParser.hs @@ -0,0 +1,143 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Options.ConfigSetParser +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @config set@ command. +-} + +module Stack.Options.ConfigSetParser + ( configCmdSetParser + ) where + +import qualified Data.Text as T +import qualified Options.Applicative as OA +import qualified Options.Applicative.Types as OA +import Stack.Prelude +import Stack.Types.ConfigMonoid + ( configMonoidInstallGHCName + , configMonoidInstallMsysName + , configMonoidRecommendStackUpgradeName + , configMonoidSystemGHCName + ) +import Stack.Types.ConfigSetOpts + ( CommandScope (..), ConfigCmdSet (..) ) +import Stack.Types.Snapshot ( readAbstractSnapshot ) + +-- | Parse command line arguments for Stack's @config set@ command. +configCmdSetParser :: OA.Parser ConfigCmdSet +configCmdSetParser = + OA.hsubparser $ + mconcat + [ OA.command "snapshot" + ( OA.info + ( ConfigCmdSetSnapshot + <$> OA.argument + readAbstractSnapshot + ( OA.metavar "SNAPSHOT" + <> OA.help "E.g. \"nightly\" or \"lts-24.37\"" )) + ( OA.progDesc + "Change the snapshot of the current project." )) + , OA.command "resolver" + ( OA.info + ( ConfigCmdSetResolver + <$> OA.argument + readAbstractSnapshot + ( OA.metavar "SNAPSHOT" + <> OA.help "E.g. \"nightly\" or \"lts-24.37\"" )) + ( OA.progDesc + "Change the snapshot of the current project, using the \ + \(deprecated) resolver key." )) + , OA.command (T.unpack configMonoidSystemGHCName) + ( OA.info + ( ConfigCmdSetSystemGhc + <$> globalScopeFlag + <*> boolArgument ) + ( OA.progDesc + "Configure whether or not Stack should use a system GHC \ + \installation." )) + , OA.command (T.unpack configMonoidInstallGHCName) + ( OA.info + ( ConfigCmdSetInstallGhc + <$> globalScopeFlag + <*> boolArgument ) + ( OA.progDesc + "Configure whether or not Stack should automatically install \ + \GHC when necessary." )) + , OA.command (T.unpack configMonoidInstallMsysName) + ( OA.info + ( ConfigCmdSetInstallMsys + <$> globalScopeFlag + <*> boolArgument ) + ( OA.progDesc + "Configure whether or not Stack should automatically install \ + \MSYS2 when necessary." )) + , OA.command (T.unpack configMonoidRecommendStackUpgradeName) + ( OA.info + ( ConfigCmdSetRecommendStackUpgrade + <$> projectScopeFlag + <*> boolArgument ) + ( OA.progDesc + "Configure whether or not Stack should notify the user if it \ + \identifes a new version of Stack is available." )) + , OA.command "package-index" + ( OA.info + ( OA.hsubparser $ + OA.command "download-prefix" + ( OA.info + ( ConfigCmdSetDownloadPrefix + <$> globalScopeFlag + <*> urlArgument ) + ( OA.progDesc + "Configure download prefix for Stack's package \ + \index." ))) + ( OA.progDesc + "Configure Stack's package index" )) + ] + +globalScopeFlag :: OA.Parser CommandScope +globalScopeFlag = OA.flag + CommandScopeProject + CommandScopeGlobal + ( OA.long "global" + <> OA.help + "Modify the user-specific global configuration file ('config.yaml') \ + \instead of the project-level configuration file ('stack.yaml')." + ) + +projectScopeFlag :: OA.Parser CommandScope +projectScopeFlag = OA.flag + CommandScopeGlobal + CommandScopeProject + ( OA.long "project" + <> OA.help + "Modify the project-level configuration file ('stack.yaml') instead of \ + \the user-specific global configuration file ('config.yaml')." + ) + +boolArgument :: OA.Parser Bool +boolArgument = OA.argument + readBool + ( OA.metavar "true|false" + <> OA.completeWith ["true", "false"] + ) + +readBool :: OA.ReadM Bool +readBool = do + s <- OA.readerAsk + case s of + "true" -> pure True + "false" -> pure False + _ -> OA.readerError ("Invalid value " ++ show s ++ + ": Expected \"true\" or \"false\"") + +urlArgument :: OA.Parser Text +urlArgument = OA.strArgument + ( OA.metavar "URL" + <> OA.value defaultDownloadPrefix + <> OA.showDefault + <> OA.help + "Location of package index. It is highly recommended to use only the \ + \official Hackage server or a mirror." + ) diff --git a/src/Stack/Options/DockerParser.hs b/src/Stack/Options/DockerParser.hs index 5e0514707d..c913cca274 100644 --- a/src/Stack/Options/DockerParser.hs +++ b/src/Stack/Options/DockerParser.hs @@ -1,107 +1,158 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.DockerParser where -import Data.List (intercalate) -import qualified Data.Text as T -import Distribution.Version (anyVersion) +{-| +Module : Stack.Options.DockerParser +License : BSD-3-Clause +-} + +module Stack.Options.DockerParser + ( dockerOptsParser + ) where + +import Data.List ( intercalate ) +import qualified Data.Text as T +import Distribution.Version ( anyVersion ) import Options.Applicative -import Options.Applicative.Args + ( Mod, Parser, auto, completer, help, listCompleter, long + , metavar, option, str, value + ) +import Options.Applicative.Args ( argsOption ) import Options.Applicative.Builder.Extra -import Stack.Docker + ( dirCompleter, eitherReader', fileCompleter + , firstBoolFlagsFalse, firstBoolFlagsNoDefault + , firstBoolFlagsTrue, optionalFirst + ) +import Stack.Constants ( stackProgName ) +import Stack.Docker ( dockerCmdName ) import Stack.Prelude -import Stack.Options.Utils -import Stack.Types.Version +import Stack.Options.Utils ( hideMods ) +import Stack.Types.Version ( IntersectingVersionRange (..) ) import Stack.Types.Docker + ( DockerMonoidRepoOrImage (..), DockerOptsMonoid (..) + , dockerAutoPullArgName, dockerImageArgName + , dockerContainerNameArgName, dockerDetachArgName + , dockerEnvArgName, dockerPersistArgName + , dockerRegistryLoginArgName, dockerRegistryPasswordArgName + , dockerRegistryUsernameArgName, dockerRepoArgName + , dockerRunArgsArgName, dockerMountArgName + , dockerMountModeArgName, dockerNetworkArgName + , dockerSetUserArgName, dockerStackExeArgName + , dockerStackExeDownloadVal, dockerStackExeHostVal + , dockerStackExeImageVal, parseDockerStackExe + ) -- | Options parser configuration for Docker. dockerOptsParser :: Bool -> Parser DockerOptsMonoid -dockerOptsParser hide0 = - DockerOptsMonoid (Any False) - <$> firstBoolFlagsNoDefault - dockerCmdName - "using a Docker container. --docker implies 'system-ghc: true'" - hide - <*> fmap First - (Just . DockerMonoidRepo <$> option str (long (dockerOptName dockerRepoArgName) <> - hide <> - metavar "NAME" <> - help "Docker repository name") <|> - Just . DockerMonoidImage <$> option str (long (dockerOptName dockerImageArgName) <> - hide <> - metavar "IMAGE" <> - help "Exact Docker image ID (overrides docker-repo)") <|> - pure Nothing) - <*> firstBoolFlagsNoDefault - (dockerOptName dockerRegistryLoginArgName) - "registry requires login" - hide - <*> firstStrOption (long (dockerOptName dockerRegistryUsernameArgName) <> - hide <> - metavar "USERNAME" <> - help "Docker registry username") - <*> firstStrOption (long (dockerOptName dockerRegistryPasswordArgName) <> - hide <> - metavar "PASSWORD" <> - help "Docker registry password") - <*> firstBoolFlagsTrue - (dockerOptName dockerAutoPullArgName) - "automatic pulling latest version of image" - hide - <*> firstBoolFlagsFalse - (dockerOptName dockerDetachArgName) - "running a detached Docker container" - hide - <*> firstBoolFlagsFalse - (dockerOptName dockerPersistArgName) - "not deleting container after it exits" - hide - <*> firstStrOption (long (dockerOptName dockerContainerNameArgName) <> - hide <> - metavar "NAME" <> - help "Docker container name") - <*> firstStrOption (long (dockerOptName dockerNetworkArgName) <> - hide <> - metavar "NETWORK" <> - help "Docker network") - <*> argsOption (long (dockerOptName dockerRunArgsArgName) <> - hide <> - value [] <> - metavar "'ARG1 [ARG2 ...]'" <> - help "Additional options to pass to 'docker run'") - <*> many (option auto (long (dockerOptName dockerMountArgName) <> - hide <> - metavar "(PATH | HOST-PATH:CONTAINER-PATH)" <> - completer dirCompleter <> - help ("Mount volumes from host in container " ++ - "(may specify multiple times)"))) - <*> firstStrOption (long (dockerOptName dockerMountModeArgName) <> - hide <> - metavar "SUFFIX" <> - help "Volume mount mode suffix") - <*> many (option str (long (dockerOptName dockerEnvArgName) <> - hide <> - metavar "NAME=VALUE" <> - help ("Set environment variable in container " ++ - "(may specify multiple times)"))) - <*> optionalFirst (option (eitherReader' parseDockerStackExe) - (let specialOpts = - [ dockerStackExeDownloadVal - , dockerStackExeHostVal - , dockerStackExeImageVal - ] in - long(dockerOptName dockerStackExeArgName) <> - hide <> - metavar (intercalate "|" (specialOpts ++ ["PATH"])) <> - completer (listCompleter specialOpts <> fileCompleter) <> - help (concat [ "Location of " +dockerOptsParser hide0 = DockerOptsMonoid (Any False) + <$> firstBoolFlagsNoDefault + dockerCmdName + "using a Docker container. --docker implies 'system-ghc: true'." + hide + <*> fmap First + ( Just . DockerMonoidRepo <$> option str + ( long (dockerOptName dockerRepoArgName) + <> hide + <> metavar "NAME" + <> help "Docker repository name." + ) + <|> Just . DockerMonoidImage <$> option str + ( long (dockerOptName dockerImageArgName) + <> hide + <> metavar "IMAGE" + <> help "Exact Docker image ID (overrides docker-repo)." + ) + <|> pure Nothing + ) + <*> firstBoolFlagsNoDefault + (dockerOptName dockerRegistryLoginArgName) + "registry requires login." + hide + <*> firstStrOption + ( long (dockerOptName dockerRegistryUsernameArgName) + <> hide + <> metavar "USERNAME" + <> help "Docker registry username." + ) + <*> firstStrOption + ( long (dockerOptName dockerRegistryPasswordArgName) + <> hide + <> metavar "PASSWORD" + <> help "Docker registry password." + ) + <*> firstBoolFlagsTrue + (dockerOptName dockerAutoPullArgName) + "automatic pulling latest version of image." + hide + <*> firstBoolFlagsFalse + (dockerOptName dockerDetachArgName) + "running a detached Docker container." + hide + <*> firstBoolFlagsFalse + (dockerOptName dockerPersistArgName) + "not deleting container after it exits." + hide + <*> firstStrOption + ( long (dockerOptName dockerContainerNameArgName) + <> hide + <> metavar "NAME" + <> help "Docker container name." + ) + <*> firstStrOption + ( long (dockerOptName dockerNetworkArgName) + <> hide + <> metavar "NETWORK" + <> help "Docker network." + ) + <*> argsOption + ( long (dockerOptName dockerRunArgsArgName) + <> hide + <> value [] + <> metavar "'ARG1 [ARG2 ...]'" + <> help "Additional options to pass to 'docker run'.") + <*> many (option auto + ( long (dockerOptName dockerMountArgName) + <> hide + <> metavar "(PATH | HOST-PATH:CONTAINER-PATH)" + <> completer dirCompleter + <> help "Mount volumes from host in container (can be specified \ + \multiple times)." + )) + <*> firstStrOption + ( long (dockerOptName dockerMountModeArgName) + <> hide + <> metavar "SUFFIX" + <> help "Volume mount mode suffix." + ) + <*> many (option str + ( long (dockerOptName dockerEnvArgName) + <> hide + <> metavar "NAME=VALUE" + <> help "Set environment variable in container (can be specified \ + \multiple times)." + )) + <*> optionalFirst (option (eitherReader' parseDockerStackExe) + ( let specialOpts = [ dockerStackExeDownloadVal + , dockerStackExeHostVal + , dockerStackExeImageVal + ] + in long (dockerOptName dockerStackExeArgName) + <> hide + <> metavar (intercalate "|" (specialOpts ++ ["PATH"])) + <> completer (listCompleter specialOpts <> fileCompleter) + <> help ( concat + [ "Location of " , stackProgName - , " executable used in container" ]))) - <*> firstBoolFlagsNoDefault - (dockerOptName dockerSetUserArgName) - "setting user in container to match host" - hide - <*> pure (IntersectingVersionRange anyVersion) - where - dockerOptName optName = dockerCmdName ++ "-" ++ T.unpack optName - firstStrOption = optionalFirst . option str - hide = hideMods hide0 + , " executable used in container." + ] + ) + )) + <*> firstBoolFlagsNoDefault + (dockerOptName dockerSetUserArgName) + "setting user in container to match host." + hide + <*> pure (IntersectingVersionRange anyVersion) + where + dockerOptName optName = dockerCmdName ++ "-" ++ T.unpack optName + firstStrOption = optionalFirst . option str + hide :: Mod f a + hide = hideMods hide0 diff --git a/src/Stack/Options/DotParser.hs b/src/Stack/Options/DotParser.hs index 3068e7cb50..3572c50464 100644 --- a/src/Stack/Options/DotParser.hs +++ b/src/Stack/Options/DotParser.hs @@ -1,105 +1,102 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -module Stack.Options.DotParser where +{-| +Module : Stack.Options.DotParser +License : BSD-3-Clause -import Data.Char (isSpace) -import Data.List.Split (splitOn) +Function to parse command line arguments for Stack's @dot@ command and certain +command line arguments for Stack's @ls dependencies@ command. +-} + +module Stack.Options.DotParser + ( dotOptsParser + ) where + +import Data.Char ( isSpace ) +import Data.List.Split ( splitOn ) import qualified Data.Set as Set -import qualified Data.Text as T -import Distribution.Types.PackageName(mkPackageName) +import Distribution.Types.PackageName ( mkPackageName ) import Options.Applicative -import Options.Applicative.Builder.Extra -import Stack.Dot -import Stack.Options.BuildParser + ( Parser, auto, completer, help, idm, long, metavar, option + , strOption, switch + ) +import Options.Applicative.Builder.Extra ( boolFlags, textArgument ) +import Stack.Options.Completion ( targetCompleter ) +import Stack.Options.FlagsParser ( flagsParser ) import Stack.Prelude +import Stack.Types.DotOpts ( DotOpts (..) ) -- | Parser for arguments to `stack dot` dotOptsParser :: Bool -> Parser DotOpts -dotOptsParser externalDefault = - DotOpts <$> includeExternal - <*> includeBase - <*> depthLimit - <*> fmap (maybe Set.empty $ Set.fromList . splitNames) prunedPkgs - <*> targetsParser - <*> flagsParser - <*> testTargets - <*> benchTargets - <*> globalHints - where includeExternal = boolFlags externalDefault - "external" - "inclusion of external dependencies" - idm - includeBase = boolFlags True - "include-base" - "inclusion of dependencies on base" - idm - depthLimit = - optional (option auto - (long "depth" <> - metavar "DEPTH" <> - help ("Limit the depth of dependency resolution " <> - "(Default: No limit)"))) - prunedPkgs = optional (strOption - (long "prune" <> - metavar "PACKAGES" <> - help ("Prune each package name " <> - "from the comma separated list " <> - "of package names PACKAGES"))) - testTargets = switch (long "test" <> - help "Consider dependencies of test components") - benchTargets = switch (long "bench" <> - help "Consider dependencies of benchmark components") - - splitNames :: String -> [PackageName] - splitNames = map (mkPackageName . takeWhile (not . isSpace) . dropWhile isSpace) . splitOn "," - - globalHints = switch (long "global-hints" <> - help "Do not require an install GHC; instead, use a hints file for global packages") - -separatorParser :: Parser Text -separatorParser = - fmap escapeSep - (textOption (long "separator" <> - metavar "SEP" <> - help ("Separator between package name " <> - "and package version.") <> - value " " <> - showDefault)) - where escapeSep sep = T.replace "\\t" "\t" (T.replace "\\n" "\n" sep) - -licenseParser :: Parser Bool -licenseParser = boolFlags False - "license" - "printing of dependency licenses instead of versions" - idm - -listDepsFormatOptsParser :: Parser ListDepsFormatOpts -listDepsFormatOptsParser = ListDepsFormatOpts <$> separatorParser <*> licenseParser - -listDepsTreeParser :: Parser ListDepsFormat -listDepsTreeParser = ListDepsTree <$> listDepsFormatOptsParser - -listDepsTextParser :: Parser ListDepsFormat -listDepsTextParser = ListDepsText <$> listDepsFormatOptsParser +dotOptsParser externalDefault = DotOpts + <$> includeExternal + <*> includeBase + <*> depthLimit + <*> fmap (maybe Set.empty $ Set.fromList . splitNames) prunedPkgs + <*> fmap (maybe Set.empty $ Set.fromList . splitNames) reachPkgs + <*> targetsParser + <*> flagsParser + <*> testTargets + <*> benchTargets + <*> globalHints + where + includeExternal = boolFlags externalDefault + "external" + "inclusion of external dependencies." + idm + includeBase = boolFlags True + "include-base" + "inclusion of dependencies on base." + idm + depthLimit = optional (option auto + ( long "depth" + <> metavar "DEPTH" + <> help "Limit the depth of dependency resolution. (default: no limit)" + )) + prunedPkgs = optional (strOption + ( long "prune" + <> metavar "PACKAGES" + <> help "Prune specified package(s). PACKAGES is a comma-separated list of \ + \package names." + )) + reachPkgs = optional (strOption + ( long "reach" + <> metavar "PACKAGES" + <> help "Prune packages that cannot reach any of the specified package(s) \ + \in the dependency graph. PACKAGES is a comma-separated list of \ + \package names." + )) -listDepsJsonParser :: Parser ListDepsFormat -listDepsJsonParser = pure ListDepsJSON + targetsParser :: Parser [Text] + targetsParser = + many (textArgument + ( metavar "TARGET" + <> completer targetCompleter + <> help "Can be specified multiple times. If none specified, use all \ + \project packages. Ignores project package components and \ + \non-project packages." + )) -toListDepsOptsParser :: Parser ListDepsFormat -> Parser ListDepsOpts -toListDepsOptsParser formatParser = ListDepsOpts - <$> formatParser - <*> dotOptsParser True + testTargets = switch + ( long "test" + <> help "Consider dependencies of test components." + ) + benchTargets = switch + ( long "bench" + <> help "Consider dependencies of benchmark components." + ) -formatSubCommand :: String -> String -> Parser ListDepsFormat -> Mod CommandFields ListDepsOpts -formatSubCommand cmd desc formatParser = - command cmd (info (toListDepsOptsParser formatParser) - (progDesc desc)) + splitNames :: String -> [PackageName] + splitNames = map + ( mkPackageName + . takeWhile (not . isSpace) + . dropWhile isSpace + ) + . splitOn "," --- | Parser for arguments to `stack ls dependencies`. -listDepsOptsParser :: Parser ListDepsOpts -listDepsOptsParser = subparser - ( formatSubCommand "text" "Print dependencies as text (default)" listDepsTextParser - <> formatSubCommand "tree" "Print dependencies as tree" listDepsTreeParser - <> formatSubCommand "json" "Print dependencies as JSON" listDepsJsonParser - ) <|> toListDepsOptsParser listDepsTextParser + globalHints = switch + ( long "global-hints" + <> help "Do not require an installed GHC; instead, use a hints file for \ + \global packages." + ) diff --git a/src/Stack/Options/EvalParser.hs b/src/Stack/Options/EvalParser.hs new file mode 100644 index 0000000000..79e7278953 --- /dev/null +++ b/src/Stack/Options/EvalParser.hs @@ -0,0 +1,28 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Options.EvalParser +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @eval@ command. +-} + +module Stack.Options.EvalParser + ( evalOptsParser + ) where + +import Options.Applicative ( Parser, metavar, strArgument ) +import Stack.Eval ( EvalOpts (..) ) +import Stack.Options.ExecParser ( execOptsExtraParser ) +import Stack.Prelude + +-- | Parse command line arguments for Stack's @eval@ command. +evalOptsParser :: + String -- ^ metavar + -> Parser EvalOpts +evalOptsParser meta = EvalOpts + <$> eoArgsParser + <*> execOptsExtraParser + where + eoArgsParser :: Parser String + eoArgsParser = strArgument (metavar meta) diff --git a/src/Stack/Options/ExecParser.hs b/src/Stack/Options/ExecParser.hs index 039c9ae147..227b7b7419 100644 --- a/src/Stack/Options/ExecParser.hs +++ b/src/Stack/Options/ExecParser.hs @@ -1,79 +1,95 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.ExecParser where + +{-| +Module : Stack.Options.ExecParser +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @exec@, @ghc@, @run@, +@runghc@ and @runhaskell@ commands. +-} + +module Stack.Options.ExecParser + ( execOptsParser + , execOptsExtraParser + ) where import Options.Applicative -import Options.Applicative.Builder.Extra -import Options.Applicative.Args -import Stack.Options.Completion + ( Parser, completer, help, idm, long, metavar, strArgument + , strOption + ) +import Options.Applicative.Builder.Extra ( boolFlags, dirCompleter ) +import Options.Applicative.Args ( argsOption ) +import Stack.Exec + ( ExecOpts (..), ExecOptsExtra (..), SpecialExecCmd (..) ) +import Stack.Options.Completion ( projectExeCompleter ) import Stack.Prelude -import Stack.Types.Config +import Stack.Types.EnvSettings ( EnvSettings (..) ) --- | Parser for exec command +-- | Parse command line arguments for Stack's @exec@, @ghc@, @run@, +-- @runghc@ and @runhaskell@ commands. execOptsParser :: Maybe SpecialExecCmd -> Parser ExecOpts -execOptsParser mcmd = - ExecOpts - <$> maybe eoCmdParser pure mcmd - <*> eoArgsParser - <*> execOptsExtraParser - where - eoCmdParser = ExecCmd <$> strArgument (metavar "COMMAND" <> completer projectExeCompleter) - eoArgsParser = many (strArgument (metavar txt)) - where - txt = case mcmd of - Nothing -> normalTxt - Just ExecCmd{} -> normalTxt - Just ExecRun -> "-- ARGUMENT(S) (e.g. stack run -- file.txt)" - Just ExecGhc -> "-- ARGUMENT(S) (e.g. stack runghc -- X.hs -o x)" - Just ExecRunGhc -> "-- ARGUMENT(S) (e.g. stack runghc -- X.hs)" - normalTxt = "-- ARGUMENT(S) (e.g. stack exec ghc-pkg -- describe base)" - -evalOptsParser :: String -- ^ metavar - -> Parser EvalOpts -evalOptsParser meta = - EvalOpts - <$> eoArgsParser - <*> execOptsExtraParser - where - eoArgsParser :: Parser String - eoArgsParser = strArgument (metavar meta) +execOptsParser mcmd = ExecOpts + <$> maybe eoCmdParser pure mcmd + <*> eoArgsParser + <*> execOptsExtraParser + where + eoCmdParser = ExecCmd + <$> strArgument + ( metavar "COMMAND" + <> completer projectExeCompleter + ) + eoArgsParser = many (strArgument (metavar txt)) + where + txt = case mcmd of + Nothing -> normalTxt + Just ExecCmd{} -> normalTxt + Just ExecRun -> "-- ARGUMENT(S) (e.g. stack run -- file.txt)" + Just ExecGhc -> "-- ARGUMENT(S) (e.g. stack ghc -- X.hs -o x)" + Just ExecRunGhc -> "-- ARGUMENT(S) (e.g. stack runghc -- X.hs)" + normalTxt = "-- ARGUMENT(S) (e.g. stack exec ghc-pkg -- describe base)" -- | Parser for extra options to exec command execOptsExtraParser :: Parser ExecOptsExtra execOptsExtraParser = ExecOptsExtra - <$> eoEnvSettingsParser - <*> eoPackagesParser - <*> eoRtsOptionsParser - <*> eoCwdParser - where - eoEnvSettingsParser :: Parser EnvSettings - eoEnvSettingsParser = EnvSettings True - <$> boolFlags True - "ghc-package-path" - "setting the GHC_PACKAGE_PATH variable for the subprocess" - idm - <*> boolFlags True - "stack-exe" - "setting the STACK_EXE environment variable to the path for the stack executable" - idm - <*> pure False - <*> pure True + <$> eoEnvSettingsParser + <*> eoPackagesParser + <*> eoRtsOptionsParser + <*> eoCwdParser + where + eoEnvSettingsParser :: Parser EnvSettings + eoEnvSettingsParser = EnvSettings True + <$> boolFlags True + "ghc-package-path" + "setting the GHC_PACKAGE_PATH variable for the subprocess." + idm + <*> boolFlags True + "stack-exe" + "setting the STACK_EXE environment variable to the path for the \ + \stack executable." + idm + <*> pure False + <*> pure True - eoPackagesParser :: Parser [String] - eoPackagesParser = many - (strOption (long "package" - <> help "Additional package(s) that must be installed" - <> metavar "PACKAGE(S)")) + eoPackagesParser :: Parser [String] + eoPackagesParser = many (strOption + ( long "package" + <> metavar "PACKAGE(S)" + <> help "Add package(s) as a list of names or identifiers separated by \ + \spaces (can be specified multiple times)." + )) - eoRtsOptionsParser :: Parser [String] - eoRtsOptionsParser = concat <$> many (argsOption - ( long "rts-options" - <> help "Explicit RTS options to pass to application" - <> metavar "RTSFLAG")) + eoRtsOptionsParser :: Parser [String] + eoRtsOptionsParser = concat <$> many (argsOption + ( long "rts-options" + <> help "Explicit RTS options to pass to application (can be specified \ + \multiple times)." + <> metavar "RTSFLAG" + )) - eoCwdParser :: Parser (Maybe FilePath) - eoCwdParser = optional - (strOption (long "cwd" - <> help "Sets the working directory before executing" - <> metavar "DIR" - <> completer dirCompleter) - ) + eoCwdParser :: Parser (Maybe FilePath) + eoCwdParser = optional (strOption + ( long "cwd" + <> help "Sets the working directory before executing." + <> metavar "DIR" + <> completer dirCompleter + )) diff --git a/src/Stack/Options/FlagsParser.hs b/src/Stack/Options/FlagsParser.hs new file mode 100644 index 0000000000..2a85b06045 --- /dev/null +++ b/src/Stack/Options/FlagsParser.hs @@ -0,0 +1,37 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Options.FlagsParser +Description : Parser for one or more Cabal flags. +License : BSD-3-Clause + +Parser for one or more Cabal flags. +-} + +module Stack.Options.FlagsParser + ( flagsParser + ) where + +import qualified Data.Map as Map +import Options.Applicative + ( Parser, completer, help, long, metavar, option ) +import Stack.Options.Completion ( flagCompleter ) +import Stack.Options.PackageParser ( readFlag ) +import Stack.Prelude +import Stack.Types.BuildOptsCLI ( ApplyCLIFlag ) + +-- | Parser for one or more @--flag@ options, each for a Cabal flag. +flagsParser :: Parser (Map.Map ApplyCLIFlag (Map.Map FlagName Bool)) +flagsParser = Map.unionsWith Map.union + <$> many (option readFlag + ( long "flag" + <> completer flagCompleter + <> metavar "PACKAGE:[-]FLAG" + <> help "Set (or unset) the Cabal flag for the package (or use '*' for \ + \all packages) (can be specified multiple times). Applies to \ + \project packages, packages included directly in the snapshot, \ + \and extra-deps. Takes precedence over any Cabal flags \ + \specified for the package in the snapshot or in the \ + \project-level configuration file (stack.yaml)." + )) diff --git a/src/Stack/Options/GhcBuildParser.hs b/src/Stack/Options/GhcBuildParser.hs index 21a3fd3c20..2ee0094ed8 100644 --- a/src/Stack/Options/GhcBuildParser.hs +++ b/src/Stack/Options/GhcBuildParser.hs @@ -1,26 +1,44 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.GhcBuildParser where + +{-| +Module : Stack.Options.GhcBuildParser +License : BSD-3-Clause +-} + +module Stack.Options.GhcBuildParser +( ghcBuildParser +) where import Options.Applicative -import Options.Applicative.Types -import Stack.Options.Utils + ( Parser, completeWith, help, long, metavar, option ) +import Options.Applicative.Types ( readerAsk, readerError ) +import Stack.Options.Utils ( hideMods ) import Stack.Prelude -import Stack.Types.CompilerBuild +import Stack.Types.CompilerBuild ( CompilerBuild, parseCompilerBuild ) -- | GHC build parser ghcBuildParser :: Bool -> Parser CompilerBuild -ghcBuildParser hide = - option - readGHCBuild - (long "ghc-build" <> metavar "BUILD" <> - completeWith ["standard", "gmp4", "nopie", "tinfo6", "tinfo6-nopie", "ncurses6", "integersimple"] <> - help - "Specialized GHC build, e.g. 'gmp4' or 'standard' (usually auto-detected)" <> - hideMods hide - ) - where - readGHCBuild = do - s <- readerAsk - case parseCompilerBuild s of - Left e -> readerError (show e) - Right v -> return v +ghcBuildParser hide = option readGHCBuild + ( long "ghc-build" + <> metavar "BUILD" + <> completeWith + [ "standard" + , "gmp4" + , "nopie" + , "tinfo6" + , "tinfo6-libc6-pre232" + , "tinfo6-nopie" + , "ncurses6" + , "int-native" + , "integersimple" + ] + <> help "Specialized GHC build, e.g. 'gmp4' or 'standard' (usually \ + \auto-detected)." + <> hideMods hide + ) + where + readGHCBuild = do + s <- readerAsk + case parseCompilerBuild s of + Left e -> readerError (displayException e) + Right v -> pure v diff --git a/src/Stack/Options/GhcVariantParser.hs b/src/Stack/Options/GhcVariantParser.hs index a366853bce..bd6729d5ee 100644 --- a/src/Stack/Options/GhcVariantParser.hs +++ b/src/Stack/Options/GhcVariantParser.hs @@ -1,25 +1,33 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.GhcVariantParser where + +{-| +Module : Stack.Options.GhcVariantParser +License : BSD-3-Clause +-} + +module Stack.Options.GhcVariantParser + ( ghcVariantParser + ) where import Options.Applicative -import Options.Applicative.Types (readerAsk) + ( Parser, help, long, metavar, option, readerError ) +import Options.Applicative.Types ( readerAsk ) import Stack.Prelude -import Stack.Options.Utils -import Stack.Types.Config +import Stack.Options.Utils ( hideMods ) +import Stack.Types.GHCVariant ( GHCVariant, parseGHCVariant ) -- | GHC variant parser ghcVariantParser :: Bool -> Parser GHCVariant -ghcVariantParser hide = - option - readGHCVariant - (long "ghc-variant" <> metavar "VARIANT" <> - help - "Specialized GHC variant, e.g. integersimple (incompatible with --system-ghc)" <> - hideMods hide - ) - where - readGHCVariant = do - s <- readerAsk - case parseGHCVariant s of - Left e -> readerError (show e) - Right v -> return v +ghcVariantParser hide = option readGHCVariant + ( long "ghc-variant" + <> metavar "VARIANT" + <> help "Specialized GHC variant, e.g. int-native or integersimple \ + \(incompatible with --system-ghc)." + <> hideMods hide + ) + where + readGHCVariant = do + s <- readerAsk + case parseGHCVariant s of + Left e -> readerError (displayException e) + Right v -> pure v diff --git a/src/Stack/Options/GhciParser.hs b/src/Stack/Options/GhciParser.hs index 4380e0d7bf..c2e6bf0319 100644 --- a/src/Stack/Options/GhciParser.hs +++ b/src/Stack/Options/GhciParser.hs @@ -1,60 +1,103 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -module Stack.Options.GhciParser where + +{-| +Module : Stack.Options.GhciParser +Description : Parse arguments for Stack's @ghci@ and @repl@ commands. +License : BSD-3-Clause + +Function to parse arguments for Stack's @ghci@ and @repl@ commands. +-} + +module Stack.Options.GhciParser + ( ghciOptsParser + ) where import Options.Applicative -import Options.Applicative.Args + ( Parser, completer, flag, help, idm, internal, long, metavar + , strOption, switch + ) +import Options.Applicative.Args ( argsOption ) import Options.Applicative.Builder.Extra -import Stack.Config (packagesParser) -import Stack.Ghci (GhciOpts (..)) -import Stack.Options.BuildParser (flagsParser) -import Stack.Options.Completion + ( boolFlags, boolFlagsNoDefault, fileExtCompleter + , textArgument, textOption + ) +import Stack.Options.Completion ( ghcOptsCompleter, targetCompleter ) +import Stack.Options.FlagsParser ( flagsParser ) +import Stack.Options.PackagesParser ( packagesParser ) import Stack.Prelude +import Stack.Types.GhciOpts ( GhciOpts (..) ) --- | Parser for GHCI options +-- | Parse command line arguments for Stack's @ghci@ and @repl@ commands. ghciOptsParser :: Parser GhciOpts ghciOptsParser = GhciOpts - <$> many - (textArgument - (metavar "TARGET/FILE" <> - completer (targetCompleter <> fileExtCompleter [".hs", ".lhs"]) <> - help ("If none specified, use all local packages. " <> - "See https://docs.haskellstack.org/en/stable/build_command/#target-syntax for details. " <> - "If a path to a .hs or .lhs file is specified, it will be loaded."))) - <*> ((\x y -> x ++ concat y) - <$> flag - [] - ["-Wall", "-Werror"] - (long "pedantic" <> help "Turn on -Wall and -Werror") - <*> many (argsOption (long "ghci-options" <> - metavar "OPTIONS" <> - completer ghcOptsCompleter <> - help "Additional options passed to GHCi")) - ) - <*> (concat <$> many - (argsOption - (long "ghc-options" <> - metavar "OPTIONS" <> - completer ghcOptsCompleter <> - help "Additional options passed to both GHC and GHCi"))) - <*> flagsParser - <*> optional - (strOption (long "with-ghc" <> - metavar "GHC" <> - help "Use this GHC to run GHCi")) - <*> (not <$> boolFlags True "load" "load modules on start-up" idm) - <*> packagesParser - <*> optional - (textOption - (long "main-is" <> - metavar "TARGET" <> - completer targetCompleter <> - help "Specify which target should contain the main \ - \module to load, such as for an executable for \ - \test suite or benchmark.")) - <*> switch (long "load-local-deps" <> help "Load all local dependencies of your targets") - -- TODO: deprecate this? probably useless. - <*> switch (long "skip-intermediate-deps" <> help "Skip loading intermediate target dependencies" <> internal) - <*> optional (boolFlagsNoDefault "package-hiding" "package hiding" idm) - <*> switch (long "no-build" <> help "Don't build before launching GHCi" <> internal) - <*> switch (long "only-main" <> help "Only load and import the main module. If no main module, no modules will be loaded.") + <$> many (textArgument + ( metavar "TARGET/FILE" + <> completer (targetCompleter <> fileExtCompleter [".hs", ".lhs"]) + <> help "If none specified, use all project packages. See \ + \https://docs.haskellstack.org/en/stable/commands/build_command/#target-syntax \ + \for details. If a path to a .hs or .lhs file is specified, it \ + \will be loaded." + )) + <*> ( (\x y -> x ++ concat y) + <$> flag + [] + ["-Wall", "-Werror"] + ( long "pedantic" + <> help "Turn on -Wall and -Werror." + ) + <*> many (argsOption + ( long "ghci-options" + <> metavar "OPTIONS" + <> completer ghcOptsCompleter + <> help "Additional options passed to GHCi (can be specified \ + \multiple times)." + )) + ) + <*> ( concat + <$> many (argsOption + ( long "ghc-options" + <> metavar "OPTIONS" + <> completer ghcOptsCompleter + <> help "Additional options passed to both GHC and GHCi (can be \ + \specified multiple times)." + )) + ) + <*> flagsParser + <*> optional (strOption + ( long "with-ghc" + <> metavar "GHC" + <> help "Use this GHC to run GHCi." + )) + <*> ( not + <$> boolFlags True + "load" + "load modules on start-up." + idm + ) + <*> packagesParser + <*> optional (textOption + ( long "main-is" + <> metavar "TARGET" + <> completer targetCompleter + <> help "Specify which target should contain the main module to load, \ + \such as for an executable for test suite or benchmark." + )) + <*> switch + ( long "load-local-deps" + <> help "Load all local dependencies of your targets." + ) + <*> optional (boolFlagsNoDefault + "package-hiding" + "package hiding" + idm) + <*> switch + ( long "no-build" + <> help "Don't build before launching GHCi." + <> internal + ) + <*> switch + ( long "only-main" + <> help "Only load and import the main module. If no main module, no \ + \modules will be loaded." + ) diff --git a/src/Stack/Options/GlobalParser.hs b/src/Stack/Options/GlobalParser.hs index 0ca2eadb6b..9cc6201e8e 100644 --- a/src/Stack/Options/GlobalParser.hs +++ b/src/Stack/Options/GlobalParser.hs @@ -1,121 +1,176 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} -module Stack.Options.GlobalParser where +{-| +Module : Stack.Options.GlobalParser +Description : Functions to parse Stack's \'global\' command line arguments. +License : BSD-3-Clause + +Functions to parse Stack's \'global\' command line arguments. +-} + +module Stack.Options.GlobalParser + ( globalOptsFromMonoid + , globalOptsParser + ) where import Options.Applicative + ( Mod, Parser, ReadM, auto, completer, help, hidden, internal + , long, metavar, option, short, strOption, value + ) import Options.Applicative.Builder.Extra -import Path.IO (getCurrentDir, resolveDir', resolveFile') -import qualified Stack.Docker as Docker -import Stack.Init + ( fileExtCompleter, firstBoolFlagsFalse + , firstBoolFlagsNoDefault, firstBoolFlagsTrue, optionalFirst + ) +import Options.Applicative.Types ( readerAsk ) +import Path.IO ( getCurrentDir, resolveDir', resolveFile' ) +import qualified Stack.Docker as Docker import Stack.Prelude -import Stack.Options.ConfigParser -import Stack.Options.LogLevelParser -import Stack.Options.ResolverParser -import Stack.Options.Utils -import Stack.Types.Config -import Stack.Types.Docker +import Stack.Options.ConfigParser ( configOptsParser ) +import Stack.Options.LogLevelParser ( logLevelOptsParser ) +import Stack.Options.SnapshotParser + ( abstractSnapshotOptsParser, compilerOptsParser ) +import Stack.Options.Utils ( GlobalOptsContext (..), hideMods ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid (..) ) +import Stack.Types.LockFileBehavior + ( LockFileBehavior (..), readLockFileBehavior ) +import Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) +import Stack.Types.Docker ( dockerEntrypointArgName ) -- | Parser for global command-line options. -globalOptsParser :: FilePath -> GlobalOptsContext -> Maybe LogLevel -> Parser GlobalOptsMonoid -globalOptsParser currentDir kind defLogLevel = - GlobalOptsMonoid <$> - optionalFirst (strOption (long Docker.reExecArgName <> hidden <> internal)) <*> - optionalFirst (option auto (long dockerEntrypointArgName <> hidden <> internal)) <*> - (First <$> logLevelOptsParser hide0 defLogLevel) <*> - firstBoolFlagsTrue +globalOptsParser :: + FilePath + -> GlobalOptsContext + -> Parser GlobalOptsMonoid +globalOptsParser currentDir kind = GlobalOptsMonoid + <$> optionalFirst (strOption + ( long Docker.reExecArgName + <> hidden + <> internal + )) + <*> optionalFirst (option auto + ( long dockerEntrypointArgName + <> hidden + <> internal + )) + <*> (First <$> logLevelOptsParser hide0) + <*> firstBoolFlagsTrue "time-in-log" - "inclusion of timings in logs, for the purposes of using diff with logs" - hide <*> - configOptsParser currentDir kind <*> - optionalFirst (abstractResolverOptsParser hide0) <*> - pure (First Nothing) <*> -- resolver root is only set via the script command - optionalFirst (compilerOptsParser hide0) <*> - firstBoolFlagsNoDefault + "inclusion of timings in logs, for the purposes of using diff with \ + \logs." + hide + <*> firstBoolFlagsFalse + "rsl-in-log" + "inclusion of raw snapshot layer (rsl) in logs." + hide + <*> firstBoolFlagsFalse + "plan-in-log" + "inclusion of information about build plan construction in logs." + hide + <*> configOptsParser currentDir kind + <*> optionalFirst (abstractSnapshotOptsParser hide0) + <*> pure (First Nothing) + <*> optionalFirst (compilerOptsParser hide0) + -- snapshot root is only set via the script command + <*> firstBoolFlagsNoDefault "terminal" - "overriding terminal detection in the case of running in a false terminal" - hide <*> - option readStyles - (long "stack-colors" <> - long "stack-colours" <> - metavar "STYLES" <> - value mempty <> - help "Specify stack's output styles; STYLES is a colon-delimited \ - \sequence of key=value, where 'key' is a style name and 'value' \ - \is a semicolon-delimited list of 'ANSI' SGR (Select Graphic \ - \Rendition) control codes (in decimal). Use 'stack ls \ - \stack-colors --basic' to see the current sequence. In shells \ - \where a semicolon is a command separator, enclose STYLES in \ - \quotes." <> - hide) <*> - optionalFirst (option auto - (long "terminal-width" <> - metavar "INT" <> - help "Specify the width of the terminal, used for pretty-print messages" <> - hide)) <*> - optionalFirst - (strOption - (long "stack-yaml" <> - metavar "STACK-YAML" <> - completer (fileExtCompleter [".yaml"]) <> - help ("Override project stack.yaml file " <> - "(overrides any STACK_YAML environment variable)") <> - hide)) <*> - optionalFirst (option readLockFileBehavior - (long "lock-file" <> - help "Specify how to interact with lock files. Default: read/write. If resolver is overridden: read-only" <> - hide)) - where - hide = hideMods hide0 - hide0 = kind /= OuterGlobalOpts + "overriding terminal detection in the case of running in a false \ + \terminal." + hide + <*> option readStyles + ( long "stack-colors" + <> long "stack-colours" + <> metavar "STYLES" + <> value mempty + <> help "Specify Stack's output styles; STYLES is a colon-delimited \ + \sequence of key=value, where 'key' is a style name and 'value' \ + \is a semicolon-delimited list of 'ANSI' SGR (Select Graphic \ + \Rendition) control codes (in decimal). Use 'stack ls \ + \stack-colors --basic' to see the current sequence. In shells \ + \where a semicolon is a command separator, enclose STYLES in \ + \quotes." + <> hide + ) + <*> optionalFirst (option auto + ( long "terminal-width" + <> metavar "INT" + <> help "Specify the width of the terminal, used for pretty-print \ + \messages." + <> hide + )) + <*> optionalFirst (strOption + ( long "stack-yaml" + <> short 'w' + <> metavar "STACK-YAML" + <> completer (fileExtCompleter [".yaml"]) + <> help "Override project stack.yaml file (overrides any STACK_YAML \ + \environment variable)." + <> hide + )) + <*> optionalFirst (option readLockFileBehavior + ( long "lock-file" + <> help "Specify how to interact with lock files. (default: if \ + \snapshot is overridden: read-only; otherwise: read/write)" + <> hide + )) + where + hide :: Mod f a + hide = hideMods hide0 + hide0 = kind /= OuterGlobalOpts -- | Create GlobalOpts from GlobalOptsMonoid. -globalOptsFromMonoid :: MonadIO m => Bool -> GlobalOptsMonoid -> m GlobalOpts -globalOptsFromMonoid defaultTerminal GlobalOptsMonoid{..} = do - resolver <- for (getFirst globalMonoidResolver) $ \ur -> do +globalOptsFromMonoid :: + MonadIO m + => String + -- ^ The name of the current Stack executable, as it was invoked. + -> Maybe (Path Abs File) + -- ^ The path to the current Stack executable, if the operating system + -- provides a reliable way to determine it and where a result was + -- available. + -> Bool + -> GlobalOptsMonoid + -> m GlobalOpts +globalOptsFromMonoid progName mExecutablePath defaultTerminal globalMonoid = do + snapshot <- for (getFirst globalMonoid.snapshot) $ \us -> do root <- - case globalMonoidResolverRoot of + case globalMonoid.snapshotRoot of First Nothing -> getCurrentDir First (Just dir) -> resolveDir' dir - resolvePaths (Just root) ur + resolvePaths (Just root) us stackYaml <- - case getFirst globalMonoidStackYaml of + case getFirst globalMonoid.stackYaml of Nothing -> pure SYLDefault Just fp -> SYLOverride <$> resolveFile' fp - pure GlobalOpts - { globalReExecVersion = getFirst globalMonoidReExecVersion - , globalDockerEntrypoint = getFirst globalMonoidDockerEntrypoint - , globalLogLevel = fromFirst defaultLogLevel globalMonoidLogLevel - , globalTimeInLog = fromFirstTrue globalMonoidTimeInLog - , globalConfigMonoid = globalMonoidConfigMonoid - , globalResolver = resolver - , globalCompiler = getFirst globalMonoidCompiler - , globalTerminal = fromFirst defaultTerminal globalMonoidTerminal - , globalStylesUpdate = globalMonoidStyles - , globalTermWidth = getFirst globalMonoidTermWidth - , globalStackYaml = stackYaml - , globalLockFileBehavior = + let lockFileBehavior = let defLFB = - case getFirst globalMonoidResolver of + case getFirst globalMonoid.snapshot of Nothing -> LFBReadWrite _ -> LFBReadOnly - in fromFirst defLFB globalMonoidLockFileBehavior + in fromFirst defLFB globalMonoid.lockFileBehavior + pure GlobalOpts + { reExecVersion = getFirst globalMonoid.reExecVersion + , dockerEntrypoint = getFirst globalMonoid.dockerEntrypoint + , logLevel = fromFirst defaultLogLevel globalMonoid.logLevel + , timeInLog = fromFirstTrue globalMonoid.timeInLog + , rslInLog = fromFirstFalse globalMonoid.rslInLog + , planInLog = fromFirstFalse globalMonoid.planInLog + , configMonoid = globalMonoid.configMonoid + , snapshot + , compiler = getFirst globalMonoid.compiler + , terminal = fromFirst defaultTerminal globalMonoid.terminal + , stylesUpdate = globalMonoid.styles + , termWidthOpt = getFirst globalMonoid.termWidthOpt + , stackYaml + , lockFileBehavior + , progName + , mExecutablePath } -initOptsParser :: Parser InitOpts -initOptsParser = - InitOpts <$> searchDirs - <*> omitPackages - <*> overwrite <*> fmap not ignoreSubDirs - where - searchDirs = - many (textArgument - (metavar "DIR" <> - completer dirCompleter <> - help "Directories to include, default is current directory.")) - ignoreSubDirs = switch (long "ignore-subdirs" <> - help "Do not search for .cabal files in sub directories") - overwrite = switch (long "force" <> - help "Force overwriting an existing stack.yaml") - omitPackages = switch (long "omit-packages" <> - help "Exclude conflicting or incompatible user packages") +-- | Default logging level should be something useful but not crazy. +defaultLogLevel :: LogLevel +defaultLogLevel = LevelInfo + +readStyles :: ReadM StylesUpdate +readStyles = parseStylesUpdateFromString <$> readerAsk diff --git a/src/Stack/Options/HaddockParser.hs b/src/Stack/Options/HaddockParser.hs index 55d43c24bd..28a508d13f 100644 --- a/src/Stack/Options/HaddockParser.hs +++ b/src/Stack/Options/HaddockParser.hs @@ -1,20 +1,31 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.HaddockParser where -import Options.Applicative -import Options.Applicative.Args -import Stack.Options.Utils +{-| +Module : Stack.Options.HaddockParser +License : BSD-3-Clause +-} + +module Stack.Options.HaddockParser + ( haddockOptsParser + ) where + +import Options.Applicative ( Parser, help, long, metavar ) +import Options.Applicative.Args ( argsOption ) +import Stack.Options.Utils ( hideMods ) import Stack.Prelude -import Stack.Types.Config +import Stack.Types.BuildOptsMonoid ( HaddockOptsMonoid (..) ) -- | Parser for haddock arguments. haddockOptsParser :: Bool -> Parser HaddockOptsMonoid -haddockOptsParser hide0 = - HaddockOptsMonoid <$> fmap (fromMaybe []) - (optional - (argsOption - (long "haddock-arguments" <> - metavar "HADDOCK_ARGS" <> - help "Arguments passed to the haddock program" <> - hide))) - where hide = hideMods hide0 +haddockOptsParser hide0 = HaddockOptsMonoid + <$> fmap + (fromMaybe []) + ( optional (argsOption + ( long "haddock-arguments" + <> metavar "HADDOCK_ARGS" + <> help "Arguments passed to the Haddock program." + <> hide + )) + ) + where + hide = hideMods hide0 diff --git a/src/Stack/Options/HpcReportParser.hs b/src/Stack/Options/HpcReportParser.hs index 9201d0d862..efae794dc4 100644 --- a/src/Stack/Options/HpcReportParser.hs +++ b/src/Stack/Options/HpcReportParser.hs @@ -1,41 +1,44 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.HpcReportParser where -import qualified Data.Text as T +{-| +Module : Stack.Options.HpcReportParser +Description : Parser for @stack hpc report@. +License : BSD-3-Clause + +Parser for @stack hpc report@. +-} + +module Stack.Options.HpcReportParser + ( hpcReportOptsParser + ) where + import Options.Applicative + ( Parser, completer, help, long, metavar, strOption, switch ) import Options.Applicative.Builder.Extra -import Options.Applicative.Types (readerAsk) -import Stack.Coverage (HpcReportOpts (..)) -import Stack.Options.Completion (targetCompleter) + ( dirCompleter, fileExtCompleter, textArgument ) +import Stack.Options.Completion ( targetCompleter ) import Stack.Prelude -import Stack.Types.Config +import Stack.Types.HpcReportOpts ( HpcReportOpts (..) ) -- | Parser for @stack hpc report@. hpcReportOptsParser :: Parser HpcReportOpts hpcReportOptsParser = HpcReportOpts - <$> many (textArgument $ metavar "TARGET_OR_TIX" <> - completer (targetCompleter <> fileExtCompleter [".tix"])) - <*> switch (long "all" <> help "Use results from all packages and components involved in previous --coverage run") - <*> optional (strOption (long "destdir" <> - metavar "DIR" <> - completer dirCompleter <> - help "Output directory for HTML report")) - <*> switch (long "open" <> help "Open the report in the browser") - -pvpBoundsOption :: Parser PvpBounds -pvpBoundsOption = - option - readPvpBounds - (long "pvp-bounds" <> - metavar "PVP-BOUNDS" <> - completeWith ["none", "lower", "upper", "both"] <> - help - "How PVP version bounds should be added to .cabal file: none, lower, upper, both") - where - readPvpBounds = do - s <- readerAsk - case parsePvpBounds $ T.pack s of - Left e -> - readerError e - Right v -> - return v + <$> many (textArgument + ( metavar "TARGET_OR_TIX" + <> completer (targetCompleter <> fileExtCompleter [".tix"]) + )) + <*> switch + ( long "all" + <> help "Use results from all packages and components involved in \ + \previous --coverage run." + ) + <*> optional (strOption + ( long "destdir" + <> metavar "DIR" + <> completer dirCompleter + <> help "Output directory for HTML report." + )) + <*> switch + ( long "open" + <> help "Open the report in the browser." + ) diff --git a/src/Stack/Options/IdeParser.hs b/src/Stack/Options/IdeParser.hs new file mode 100644 index 0000000000..14fb2a081e --- /dev/null +++ b/src/Stack/Options/IdeParser.hs @@ -0,0 +1,64 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Options.IdeParser +Description : Parse arguments for Stack's @ide@ commands. +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @ide@ commands. +-} + +module Stack.Options.IdeParser + ( idePackagesParser + , ideTargetsParser + ) where + +import Options.Applicative ( Parser, flag, help, long, switch ) +import Stack.Prelude +import Stack.Types.IdeOpts ( ListPackagesCmd (..), OutputStream (..) ) + +-- | Parse command line arguments for Stack's @ide packages@ command. +idePackagesParser :: Parser (OutputStream, ListPackagesCmd) +idePackagesParser = (,) <$> outputFlag <*> cabalFileFlag + +-- | Parse command line arguments for Stack's @ide targets@ command. +ideTargetsParser :: Parser ((Bool, Bool, Bool), OutputStream) +ideTargetsParser = + (,) <$> ((,,) <$> exeFlag <*> testFlag <*> benchFlag) <*> outputFlag + +outputFlag :: Parser OutputStream +outputFlag = flag + OutputLogInfo + OutputStdout + ( long "stdout" + <> help "Send output to the standard output stream instead of the \ + \default, the standard error stream." + ) + +cabalFileFlag :: Parser ListPackagesCmd +cabalFileFlag = flag + ListPackageNames + ListPackageCabalFiles + ( long "cabal-files" + <> help "Print paths to package Cabal files instead of package \ + \names." + ) + +exeFlag :: Parser Bool +exeFlag = switch + ( long "exes" + <> help "Include executables." + ) + +testFlag :: Parser Bool +testFlag = switch + ( long "tests" + <> help "Include test suites." + ) + +benchFlag :: Parser Bool +benchFlag = switch + ( long "benchmarks" + <> help "Include benchmarks." + ) diff --git a/src/Stack/Options/InitParser.hs b/src/Stack/Options/InitParser.hs new file mode 100644 index 0000000000..976e828189 --- /dev/null +++ b/src/Stack/Options/InitParser.hs @@ -0,0 +1,50 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Options.InitParser +Description : Parse arguments for Stack's @init@ and @new@ commands. +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @init@ and @new@ commands. +-} + +module Stack.Options.InitParser + ( initOptsParser + ) where + +import Options.Applicative + ( Parser, completer, help, long, metavar, switch ) +import Options.Applicative.Builder.Extra + ( dirCompleter, textArgument ) +import Stack.Init ( InitOpts (..) ) +import Stack.Prelude + +-- | Parse command line arguments for Stack's @init@ and @new@ commands. +initOptsParser :: Parser InitOpts +initOptsParser = InitOpts + <$> searchDirs + <*> omitPackages + <*> overwrite + <*> fmap not ignoreSubDirs + where + searchDirs = many (textArgument + ( metavar "DIR(S)" + <> completer dirCompleter + <> help "Directory, or directories, to include in the search for Cabal \ + \files, when initialising. The default is the current directory." + )) + ignoreSubDirs = switch + ( long "ignore-subdirs" + <> help "Do not search for Cabal files in subdirectories, when \ + \initialising." + ) + overwrite = switch + ( long "force" + <> help "Force an initialisation that overwrites any existing stack.yaml \ + \file." + ) + omitPackages = switch + ( long "omit-packages" + <> help "Exclude conflicting or incompatible user packages, when \ + \initialising." + ) diff --git a/src/Stack/Options/LogLevelParser.hs b/src/Stack/Options/LogLevelParser.hs index 65bcddc654..fabf129431 100644 --- a/src/Stack/Options/LogLevelParser.hs +++ b/src/Stack/Options/LogLevelParser.hs @@ -1,44 +1,64 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -module Stack.Options.LogLevelParser where +{-| +Module : Stack.Options.LogLevelParser +License : BSD-3-Clause +-} -import qualified Data.Text as T +module Stack.Options.LogLevelParser + ( logLevelOptsParser + ) where + +import qualified Data.Text as T import Options.Applicative -import Stack.Options.Utils + ( Parser, completeWith, flag', help, long, metavar, short + , strOption + ) +import Stack.Options.Utils ( hideMods ) import Stack.Prelude -- | Parser for a logging level. -logLevelOptsParser :: Bool -> Maybe LogLevel -> Parser (Maybe LogLevel) -logLevelOptsParser hide defLogLevel = - fmap (Just . parse) - (strOption (long "verbosity" <> - metavar "VERBOSITY" <> - completeWith ["silent", "error", "warn", "info", "debug"] <> - help "Verbosity: silent, error, warn, info, debug" <> - hideMods hide)) <|> - flag' (Just verboseLevel) - (short 'v' <> long "verbose" <> - help ("Enable verbose mode: verbosity level \"" <> showLevel verboseLevel <> "\"") <> - hideMods hide) <|> - flag' (Just silentLevel) - (long "silent" <> - help ("Enable silent mode: verbosity level \"" <> showLevel silentLevel <> "\"") <> - hideMods hide) <|> - pure defLogLevel - where verboseLevel = LevelDebug - silentLevel = LevelOther "silent" - showLevel l = - case l of - LevelDebug -> "debug" - LevelInfo -> "info" - LevelWarn -> "warn" - LevelError -> "error" - LevelOther x -> T.unpack x - parse s = - case s of - "debug" -> LevelDebug - "info" -> LevelInfo - "warn" -> LevelWarn - "error" -> LevelError - _ -> LevelOther (T.pack s) +logLevelOptsParser :: Bool -> Parser (Maybe LogLevel) +logLevelOptsParser hide = fmap (Just . parse) + (strOption + ( long "verbosity" + <> metavar "VERBOSITY" + <> completeWith ["silent", "error", "warn", "info", "debug"] + <> help "Set verbosity level: silent, error, warn, info or debug." + <> hideMods hide + )) + <|> flag' (Just verboseLevel) + ( short 'v' + <> long "verbose" + <> help + ( "Enable verbose mode: verbosity level \"" + <> showLevel verboseLevel + <> "\"." + ) + <> hideMods hide + ) + <|> flag' (Just silentLevel) + ( long "silent" + <> help ( "Enable silent mode: verbosity level \"" + <> showLevel silentLevel + <> "\"." + ) + <> hideMods hide + ) + <|> pure Nothing + where + verboseLevel = LevelDebug + silentLevel = LevelOther "silent" + showLevel l = case l of + LevelDebug -> "debug" + LevelInfo -> "info" + LevelWarn -> "warn" + LevelError -> "error" + LevelOther x -> T.unpack x + parse s = case s of + "debug" -> LevelDebug + "info" -> LevelInfo + "warn" -> LevelWarn + "error" -> LevelError + _ -> LevelOther (T.pack s) diff --git a/src/Stack/Options/LsParser.hs b/src/Stack/Options/LsParser.hs new file mode 100644 index 0000000000..97dadf0029 --- /dev/null +++ b/src/Stack/Options/LsParser.hs @@ -0,0 +1,262 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE ApplicativeDo #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Options.LsParser +Description : Parse arguments for Stack's @ls@ command. +License : BSD-3-Clause + +Function to parse command line arguments for Stack's @ls@ command. +-} + +module Stack.Options.LsParser + ( lsOptsParser + ) where + +import qualified Data.Text as T +import qualified Options.Applicative as OA +import Options.Applicative ( idm ) +import Options.Applicative.Builder.Extra ( boolFlags, textOption ) +import Stack.Constants ( globalFooter ) +import Stack.Options.DotParser ( dotOptsParser ) +import Stack.Prelude hiding ( sep ) +import Stack.Types.LsOpts + ( ListDepsFormat (..), ListDepsFormatOpts (..) + , ListDepsOpts (..), ListDepsTextFilter (..) + , ListGlobalsOpts (..), ListStylesOpts (..) + , ListToolsOpts (..), LsCmdOpts (..), LsCmds (..) + , LsView (..), SnapshotOpts (..), ListGlobalsOpts + ) + +-- | Parse command line arguments for Stack's @ls@ command. +lsOptsParser :: OA.Parser LsCmdOpts +lsOptsParser = LsCmdOpts + <$> OA.hsubparser + ( lsSnapCmd + <> lsGlobalsCmd + <> lsDepsCmd + <> lsStylesCmd + <> lsToolsCmd + ) + +lsSnapCmd :: OA.Mod OA.CommandFields LsCmds +lsSnapCmd = OA.command "snapshots" $ + OA.info lsCmdOptsParser $ + OA.progDesc "View snapshots. (default: local)" + <> OA.footer localSnapshotMsg + +lsGlobalsCmd :: OA.Mod OA.CommandFields LsCmds +lsGlobalsCmd = OA.command "globals" $ + OA.info lsGlobalsOptsParser $ + OA.progDesc "View global packages." + <> OA.footer globalFooter + +lsDepsCmd :: OA.Mod OA.CommandFields LsCmds +lsDepsCmd = OA.command "dependencies" $ + OA.info lsDepOptsParser $ + OA.progDesc + "View the packages versions used for a project. Use a command if the \ + \first target specified has the name of a command. Targets other than \ + \project packages are ignored." + <> OA.footer globalFooter + +lsStylesCmd :: OA.Mod OA.CommandFields LsCmds +lsStylesCmd = + OA.command + "stack-colors" + (OA.info lsStylesOptsParser + (OA.progDesc "View Stack's output styles.")) + <> OA.command + "stack-colours" + (OA.info lsStylesOptsParser + (OA.progDesc "View Stack's output styles (alias for \ + \'stack-colors').")) + +lsToolsCmd :: OA.Mod OA.CommandFields LsCmds +lsToolsCmd = + OA.command + "tools" + (OA.info lsToolsOptsParser + (OA.progDesc "View Stack's installed tools.")) + +lsCmdOptsParser :: OA.Parser LsCmds +lsCmdOptsParser = LsSnapshot <$> lsViewSnapCmd + +lsGlobalsOptsParser :: OA.Parser LsCmds +lsGlobalsOptsParser = LsGlobals <$> listGlobalsOptsParser + +lsDepOptsParser :: OA.Parser LsCmds +lsDepOptsParser = LsDependencies <$> listDepsOptsParser + +lsStylesOptsParser :: OA.Parser LsCmds +lsStylesOptsParser = LsStyles <$> listStylesOptsParser + +lsToolsOptsParser :: OA.Parser LsCmds +lsToolsOptsParser = LsTools <$> listToolsOptsParser + +lsViewSnapCmd :: OA.Parser SnapshotOpts +lsViewSnapCmd = SnapshotOpts + <$> ( OA.hsubparser (lsViewRemoteCmd <> lsViewLocalCmd) <|> pure Local) + <*> OA.switch + ( OA.long "lts" + <> OA.short 'l' + <> OA.help "Only show LTS Haskell snapshots." + ) + <*> OA.switch + ( OA.long "nightly" + <> OA.short 'n' + <> OA.help "Only show Nightly snapshots." + ) + +lsViewRemoteCmd :: OA.Mod OA.CommandFields LsView +lsViewRemoteCmd = OA.command "remote" $ + OA.info (pure Remote) $ + OA.progDesc "View remote snapshots." + <> OA.footer pagerMsg + +pagerMsg :: String +pagerMsg = + "On a terminal, uses a pager, if one is available. Respects the PAGER \ + \environment variable (subject to that, prefers pager 'less' to 'more')." + +lsViewLocalCmd :: OA.Mod OA.CommandFields LsView +lsViewLocalCmd = OA.command "local" $ + OA.info (pure Local) $ + OA.progDesc "View local snapshots." + <> OA.footer localSnapshotMsg + +localSnapshotMsg :: String +localSnapshotMsg = + "A local snapshot is identified by a hash code. " <> pagerMsg + +-- | Parser for arguments to `stack ls globals`. +listGlobalsOptsParser :: OA.Parser ListGlobalsOpts +listGlobalsOptsParser = ListGlobalsOpts <$> globalHints + where + globalHints = boolFlags True + "global-hints" + "use of a hints file for global packages, rather than an installed GHC" + idm + +-- | Parser for arguments to `stack ls dependencies`. +listDepsOptsParser :: OA.Parser ListDepsOpts +listDepsOptsParser = OA.subparser + ( formatSubCommand + "text" + "Print dependencies as text (default)." + listDepsTextParser + <> formatSubCommand + "cabal" + "Print dependencies as exact Cabal constraints." + listDepsConstraintsParser + <> formatSubCommand + "tree" + "Print dependencies as tree." + listDepsTreeParser + <> formatSubCommand + "json" + "Print dependencies as JSON." + listDepsJsonParser + ) + <|> toListDepsOptsParser listDepsTextParser + +formatSubCommand :: + String + -> String + -> OA.Parser ListDepsFormat + -> OA.Mod OA.CommandFields ListDepsOpts +formatSubCommand cmd desc formatParser = + OA.command + cmd (OA.info (toListDepsOptsParser formatParser) (OA.progDesc desc)) + +listDepsTextParser :: OA.Parser ListDepsFormat +listDepsTextParser = + ListDepsText <$> listDepsFormatOptsParser <*> textFilterParser + +textFilterParser :: OA.Parser [ListDepsTextFilter] +textFilterParser = many (OA.option parseListDepsTextFilter + ( OA.long "filter" + <> OA.metavar "ITEM" + <> OA.help "Item to be filtered out of the results, if present, being either \ + \$locals (for all project packages) or a package name (can be \ + \specified multiple times)." + )) + +parseListDepsTextFilter :: OA.ReadM ListDepsTextFilter +parseListDepsTextFilter = OA.eitherReader $ \s -> + if s == "$locals" + then Right FilterLocals + else case parsePackageName s of + Just pkgName -> Right $ FilterPackage pkgName + Nothing -> Left $ s <> " is not a valid package name." + +listDepsConstraintsParser :: OA.Parser ListDepsFormat +listDepsConstraintsParser = pure ListDepsConstraints + +listDepsTreeParser :: OA.Parser ListDepsFormat +listDepsTreeParser = ListDepsTree <$> listDepsFormatOptsParser + +listDepsJsonParser :: OA.Parser ListDepsFormat +listDepsJsonParser = pure ListDepsJSON + +listDepsFormatOptsParser :: OA.Parser ListDepsFormatOpts +listDepsFormatOptsParser = do + license <- licenseParser + sep <- separatorParser + pure ListDepsFormatOpts + { sep + , license + } + +separatorParser :: OA.Parser Text +separatorParser = fmap + escapeSep + ( textOption + ( OA.long "separator" + <> OA.metavar "SEP" + <> OA.help "Separator between package name and what follows." + <> OA.value " " + <> OA.showDefault + ) + ) + where + escapeSep s = T.replace "\\t" "\t" (T.replace "\\n" "\n" s) + +licenseParser :: OA.Parser Bool +licenseParser = boolFlags False + "license" + "printing of dependency licenses instead of versions." + idm + +toListDepsOptsParser :: OA.Parser ListDepsFormat -> OA.Parser ListDepsOpts +toListDepsOptsParser formatParser = ListDepsOpts + <$> formatParser + <*> dotOptsParser True + +listStylesOptsParser :: OA.Parser ListStylesOpts +listStylesOptsParser = ListStylesOpts + <$> boolFlags False + "basic" + "a basic report of the styles used. The default is a fuller one." + idm + <*> boolFlags True + "sgr" + "the provision of the equivalent SGR instructions (provided by \ + \default). Flag ignored for a basic report." + idm + <*> boolFlags True + "example" + "the provision of an example of the applied style (provided by default \ + \for colored output). Flag ignored for a basic report." + idm + +listToolsOptsParser :: OA.Parser ListToolsOpts +listToolsOptsParser = ListToolsOpts + <$> OA.strOption + ( OA.long "filter" + <> OA.metavar "TOOL_NAME" + <> OA.value "" + <> OA.help "Filter by a tool name (eg 'ghc', 'ghc-git' or 'msys2') \ + \- case sensitive. (default: no filter)" + ) diff --git a/src/Stack/Options/NewParser.hs b/src/Stack/Options/NewParser.hs index 34a9c06fdd..de4ba3141c 100644 --- a/src/Stack/Options/NewParser.hs +++ b/src/Stack/Options/NewParser.hs @@ -1,38 +1,55 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.NewParser where -import qualified Data.Map.Strict as M +{-| +Module : Stack.Options.NewParser +License : BSD-3-Clause +-} + +module Stack.Options.NewParser + ( newOptsParser + ) where + +import qualified Data.Map.Strict as M import Options.Applicative -import Stack.Init -import Stack.New -import Stack.Options.GlobalParser + ( Parser, help, idm, long, metavar, short, switch ) +import Options.Applicative.Builder.Extra ( boolFlags ) +import Stack.Init ( InitOpts ) +import Stack.New ( NewOpts (..) ) +import Stack.Options.InitParser ( initOptsParser ) import Stack.Prelude -import Stack.Types.PackageName +import Stack.Types.PackageName ( packageNameArgument ) import Stack.Types.TemplateName + ( templateNameArgument, templateParamArgument ) -- | Parser for @stack new@. -newOptsParser :: Parser (NewOpts,InitOpts) +newOptsParser :: Parser (NewOpts, InitOpts) newOptsParser = (,) <$> newOpts <*> initOptsParser - where - newOpts = - NewOpts <$> - packageNameArgument - (metavar "PACKAGE_NAME" <> help "A valid package name.") <*> - switch - (long "bare" <> - help "Do not create a subdirectory for the project") <*> - optional (templateNameArgument - (metavar "TEMPLATE_NAME" <> - help "Name of a template - can take the form\ - \ [[service:]username/]template with optional service name\ - \ (github, gitlab, or bitbucket) \ - \ and username for the service; or, a local filename such as\ - \ foo.hsfiles or ~/foo; or, a full URL such as\ - \ https://example.com/foo.hsfiles.")) <*> - fmap - M.fromList - (many - (templateParamArgument - (short 'p' <> long "param" <> metavar "KEY:VALUE" <> - help - "Parameter for the template in the format key:value"))) + where + newOpts = NewOpts + <$> packageNameArgument + ( metavar "PACKAGE_NAME" + <> help "A valid package name." + ) + <*> switch + ( long "bare" + <> help "Do not create a subdirectory for the project." + ) + <*> boolFlags True + "init" + "the initialisation of the project for use with Stack." + idm + <*> optional (templateNameArgument + ( metavar "TEMPLATE_NAME" + <> help "Name of a template - can take the form\ + \ [[service:]username/]template with optional service name\ + \ (github, gitlab, bitbucket, or codeberg) and username for\ + \ the service; or, a local filename such as foo.hsfiles or\ + \ ~/foo; or, a full URL such as\ + \ https://example.com/foo.hsfiles." + )) + <*> fmap M.fromList (many (templateParamArgument + ( short 'p' + <> long "param" + <> metavar "KEY:VALUE" + <> help "Parameter for the template in the format key:value." + ))) diff --git a/src/Stack/Options/NixParser.hs b/src/Stack/Options/NixParser.hs index 614fccbd4c..8dc990cf0a 100644 --- a/src/Stack/Options/NixParser.hs +++ b/src/Stack/Options/NixParser.hs @@ -1,61 +1,82 @@ -{-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.NixParser where +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} -import qualified Data.Text as T +{-| +Module : Stack.Options.NixParser +Description : Parser for options for Nix integration. +License : BSD-3-Clause + +Parser for Stack's command line options for Nix integration. +-} + +module Stack.Options.NixParser + ( nixOptsParser + ) where + +import qualified Data.Text as T import Options.Applicative -import Options.Applicative.Args + ( Mod, Parser, completer, help, long, metavar, option, str ) +import Options.Applicative.Args ( argsOption ) import Options.Applicative.Builder.Extra -import Stack.Nix -import Stack.Options.Utils + ( fileExtCompleter, firstBoolFlagsFalse + , firstBoolFlagsNoDefault, optionalFirst + ) +import Stack.Nix ( nixCmdName ) +import Stack.Options.Utils ( hideMods ) import Stack.Prelude -import Stack.Types.Nix +import Stack.Types.Nix ( NixOptsMonoid (..) ) +-- | Parser for Stack's command line options for integration with Nix. nixOptsParser :: Bool -> Parser NixOptsMonoid nixOptsParser hide0 = overrideActivation <$> - (NixOptsMonoid + ( NixOptsMonoid <$> firstBoolFlagsNoDefault - nixCmdName - "use of a Nix-shell. Implies 'system-ghc: true'" - hide + nixCmdName + "use of a Nix-shell. Implies 'system-ghc: true'." + hide <*> firstBoolFlagsNoDefault - "nix-pure" - "use of a pure Nix-shell. Implies '--nix' and 'system-ghc: true'" - hide - <*> optionalFirst - (textArgsOption - (long "nix-packages" <> - metavar "NAMES" <> - help "List of packages that should be available in the nix-shell (space separated)" <> - hide)) - <*> optionalFirst - (option - str - (long "nix-shell-file" <> - metavar "FILE" <> - completer (fileExtCompleter [".nix"]) <> - help "Nix file to be used to launch a nix-shell (for regular Nix users)" <> - hide)) - <*> optionalFirst - (textArgsOption - (long "nix-shell-options" <> - metavar "OPTIONS" <> - help "Additional options passed to nix-shell" <> - hide)) - <*> optionalFirst - (textArgsOption - (long "nix-path" <> - metavar "PATH_OPTIONS" <> - help "Additional options to override NIX_PATH parts (notably 'nixpkgs')" <> - hide)) + "nix-pure" + "use of a pure Nix-shell. Implies '--nix' and 'system-ghc: true'." + hide + <*> optionalFirst (textArgsOption + ( long "nix-packages" + <> metavar "NAMES" + <> help "List of packages that should be available in the nix-shell \ + \(space separated)." + <> hide + )) + <*> optionalFirst (option str + ( long "nix-shell-file" + <> metavar "FILE" + <> completer (fileExtCompleter [".nix"]) + <> help "Nix file to be used to launch a nix-shell (for regular Nix \ + \users)." + <> hide + )) + <*> optionalFirst (textArgsOption + ( long "nix-shell-options" + <> metavar "OPTIONS" + <> help "Additional options passed to nix-shell." + <> hide + )) + <*> optionalFirst (textArgsOption + ( long "nix-path" + <> metavar "PATH_OPTIONS" + <> help "Additional options to override NIX_PATH parts (notably \ + \'nixpkgs')." + <> hide + )) <*> firstBoolFlagsFalse - "nix-add-gc-roots" - "addition of packages to the nix GC roots so nix-collect-garbage doesn't remove them" - hide + "nix-add-gc-roots" + "addition of packages to the nix GC roots so nix-collect-garbage does \ + \not remove them." + hide ) - where - hide = hideMods hide0 - overrideActivation m = - if fromFirst False (nixMonoidPureShell m) - then m { nixMonoidEnable = (First . Just . fromFirst True) (nixMonoidEnable m) } - else m - textArgsOption = fmap (map T.pack) . argsOption + where + hide :: Mod f a + hide = hideMods hide0 + overrideActivation m = + if fromFirst False m.pureShell + then m { enable = (First . Just . fromFirst True) m.enable } + else m + textArgsOption = fmap (map T.pack) . argsOption diff --git a/src/Stack/Options/PackageParser.hs b/src/Stack/Options/PackageParser.hs index 2ec1121c9e..09d92a5abe 100644 --- a/src/Stack/Options/PackageParser.hs +++ b/src/Stack/Options/PackageParser.hs @@ -1,31 +1,36 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.PackageParser where -import qualified Data.Map as Map -import Options.Applicative -import Options.Applicative.Types (readerAsk) +{-| +Module : Stack.Options.PackageParser +License : BSD-3-Clause +-} + +module Stack.Options.PackageParser + ( readFlag + ) where + +import qualified Data.Map as Map +import Options.Applicative ( ReadM, readerError ) +import Options.Applicative.Types ( readerAsk ) import Stack.Prelude -import Stack.Types.Config.Build (ApplyCLIFlag (..)) +import Stack.Types.BuildOptsCLI ( ApplyCLIFlag (..) ) -- | Parser for package:[-]flag readFlag :: ReadM (Map ApplyCLIFlag (Map FlagName Bool)) readFlag = do - s <- readerAsk - case break (== ':') s of - (pn, ':':mflag) -> do - pn' <- - case parsePackageName pn of - Nothing - | pn == "*" -> return ACFAllProjectPackages - | otherwise -> readerError $ "Invalid package name: " ++ pn - Just x -> return $ ACFByName x - let (b, flagS) = - case mflag of - '-':x -> (False, x) - _ -> (True, mflag) - flagN <- - case parseFlagName flagS of - Nothing -> readerError $ "Invalid flag name: " ++ flagS - Just x -> return x - return $ Map.singleton pn' $ Map.singleton flagN b - _ -> readerError "Must have a colon" + s <- readerAsk + case break (== ':') s of + (pn, ':':mflag) -> do + pn' <- case parsePackageName pn of + Nothing + | pn == "*" -> pure ACFAllProjectPackages + | otherwise -> readerError $ "Invalid package name: " ++ pn + Just x -> pure $ ACFByName x + let (b, flagS) = case mflag of + '-':x -> (False, x) + _ -> (True, mflag) + flagN <- case parseFlagName flagS of + Nothing -> readerError $ "Invalid flag name: " ++ flagS + Just x -> pure x + pure $ Map.singleton pn' $ Map.singleton flagN b + _ -> readerError "Must have a colon." diff --git a/src/Stack/Options/PackagesParser.hs b/src/Stack/Options/PackagesParser.hs new file mode 100644 index 0000000000..001d2c533f --- /dev/null +++ b/src/Stack/Options/PackagesParser.hs @@ -0,0 +1,27 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Options.PackagesParser +Description : Parser for one or more package names. +License : BSD-3-Clause + +Parser for one or more package names. +-} + +module Stack.Options.PackagesParser + ( packagesParser + ) where + +import Options.Applicative ( Parser, help, long, metavar, strOption ) +import Stack.Prelude + +-- | Parser for one or more package names. +packagesParser :: Parser [String] +packagesParser = many + ( strOption + ( long "package" + <> metavar "PACKAGE" + <> help "Add a package (can be specified multiple times)" + ) + ) diff --git a/src/Stack/Options/PathParser.hs b/src/Stack/Options/PathParser.hs new file mode 100644 index 0000000000..79ed382bb3 --- /dev/null +++ b/src/Stack/Options/PathParser.hs @@ -0,0 +1,35 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Options.PathParser +Description : Parse arguments for Stack's @path@ command. +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @path@ command. +-} + +module Stack.Options.PathParser + ( pathParser + ) where + +import qualified Data.Text as T +import Options.Applicative ( Parser, flag, help, long ) +import Stack.Path + ( pathsFromConfig, pathsFromEnvConfig, pathsFromRunner ) +import Stack.Prelude + +-- | Parse command line arguments for Stack's @path@ command. +pathParser :: Parser [Text] +pathParser = mapMaybeA + ( \(desc, name) -> flag Nothing (Just name) + ( long (T.unpack name) + <> help desc + ) + ) + paths + where + toDescName (desc, name, _) = (desc, name) + paths = + pathsFromRunner + : map toDescName pathsFromConfig + <> map toDescName pathsFromEnvConfig diff --git a/src/Stack/Options/PvpBoundsParser.hs b/src/Stack/Options/PvpBoundsParser.hs new file mode 100644 index 0000000000..68c56c0690 --- /dev/null +++ b/src/Stack/Options/PvpBoundsParser.hs @@ -0,0 +1,46 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Options.PvpBoundsParser +Description : Parser for PVP bounds. +License : BSD-3-Clause + +Parser for PVP bounds. +-} + +module Stack.Options.PvpBoundsParser + ( pvpBoundsParser + ) where + +import qualified Data.Text as T +import Options.Applicative + ( Parser, completeWith, help, long, metavar, option + , readerError + ) +import Options.Applicative.Types ( readerAsk ) +import Stack.Prelude +import Stack.Types.PvpBounds ( PvpBounds (..), parsePvpBounds ) + +-- | Parser for PVP bounds. +pvpBoundsParser :: + Maybe Text + -- ^ Optional context for the option's help message. + -> Parser PvpBounds +pvpBoundsParser context = option readPvpBounds + ( long "pvp-bounds" + <> metavar "PVP-BOUNDS" + <> completeWith ["none", "lower", "upper", "both"] + <> help (T.unpack helpMsg) + ) + where + readPvpBounds = do + s <- readerAsk + case parsePvpBounds $ T.pack s of + Left e -> readerError e + Right v -> pure v + helpMsg = + helpMsgPrefix + <> " PVP version bounds should be added to Cabal file: none, lower, upper, \ + \both." + helpMsgPrefix = maybe "How" (<> ", how") context diff --git a/src/Stack/Options/ResolverParser.hs b/src/Stack/Options/ResolverParser.hs deleted file mode 100644 index c80475e64f..0000000000 --- a/src/Stack/Options/ResolverParser.hs +++ /dev/null @@ -1,34 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DataKinds #-} -module Stack.Options.ResolverParser where - -import qualified Data.Text as T -import Options.Applicative -import Options.Applicative.Types (readerAsk) -import Stack.Options.Utils -import Stack.Prelude -import Stack.Types.Resolver - --- | Parser for the resolver -abstractResolverOptsParser :: Bool -> Parser (Unresolved AbstractResolver) -abstractResolverOptsParser hide = - option readAbstractResolver - (long "resolver" <> - metavar "RESOLVER" <> - help "Override resolver in project file" <> - hideMods hide) - -compilerOptsParser :: Bool -> Parser WantedCompiler -compilerOptsParser hide = - option readCompilerVersion - (long "compiler" <> - metavar "COMPILER" <> - help "Use the specified compiler" <> - hideMods hide) - -readCompilerVersion :: ReadM WantedCompiler -readCompilerVersion = do - s <- readerAsk - case parseWantedCompiler (T.pack s) of - Left{} -> readerError $ "Failed to parse compiler: " ++ s - Right x -> return x diff --git a/src/Stack/Options/SDistParser.hs b/src/Stack/Options/SDistParser.hs index 71a222cac9..df864ec3d3 100644 --- a/src/Stack/Options/SDistParser.hs +++ b/src/Stack/Options/SDistParser.hs @@ -1,23 +1,48 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.SDistParser where + +{-| +Module : Stack.Options.SDistParser +Description : Parse arguments for Stack's @sdist@ command. +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @sdist@ command. +-} + +module Stack.Options.SDistParser + ( sdistOptsParser + ) where import Options.Applicative -import Options.Applicative.Builder.Extra + ( Parser, completer, help, idm, long, metavar, strArgument + , strOption, switch + ) +import Options.Applicative.Builder.Extra ( boolFlags, dirCompleter ) import Stack.Prelude -import Stack.SDist -import Stack.Options.HpcReportParser (pvpBoundsOption) +import Stack.Types.SDistOpts ( SDistOpts (..) ) +import Stack.Options.PvpBoundsParser ( pvpBoundsParser ) --- | Parser for arguments to `stack sdist` and `stack upload` +-- | Parse command line arguments for Stack's @sdist@ command. sdistOptsParser :: Parser SDistOpts -sdistOptsParser = SDistOpts <$> - many (strArgument $ metavar "DIR" <> completer dirCompleter) <*> - optional pvpBoundsOption <*> - ignoreCheckSwitch <*> - buildPackageOption <*> - optional (strOption (long "tar-dir" <> help "If specified, copy all the tar to this dir")) - where - ignoreCheckSwitch = - switch (long "ignore-check" - <> help "Do not check package for common mistakes") - buildPackageOption = - boolFlags False "test-tarball" "building of the resulting tarball" idm +sdistOptsParser = SDistOpts + <$> many (strArgument + ( metavar "DIR" + <> completer dirCompleter + <> help "A relative path to a package directory. Can be specified \ + \multiple times. If none specified, use all project packages." + )) + <*> optional (pvpBoundsParser Nothing) + <*> ignoreCheckSwitch + <*> buildPackageOption + <*> optional (strOption + ( long "tar-dir" + <> help "If specified, copy all the generated files to this directory." + )) + where + ignoreCheckSwitch = switch + ( long "ignore-check" + <> help "Do not check packages for common mistakes." + ) + buildPackageOption = boolFlags False + "test-tarball" + "building of the resulting generated files." + idm diff --git a/src/Stack/Options/ScriptParser.hs b/src/Stack/Options/ScriptParser.hs index 49e9ac2f10..9c228035cc 100644 --- a/src/Stack/Options/ScriptParser.hs +++ b/src/Stack/Options/ScriptParser.hs @@ -1,52 +1,77 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.ScriptParser where + +{-| +Module : Stack.Options.ScriptParser +Description : Parse arguments for Stack's @script@ command. +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @script@ command. +-} + +module Stack.Options.ScriptParser + ( scriptOptsParser + ) where import Options.Applicative + ( Parser, completer, eitherReader, flag', help, long, metavar + , option, strArgument, strOption + ) import Options.Applicative.Builder.Extra -import Stack.Options.Completion + ( boolFlags, fileExtCompleter ) +import Stack.Options.Completion ( ghcOptsCompleter ) +import Stack.Options.PackagesParser ( packagesParser ) import Stack.Prelude +import Stack.Script + ( ScriptExecute (..), ScriptOpts (..), ShouldRun (..) ) -data ScriptOpts = ScriptOpts - { soPackages :: ![String] - , soFile :: !FilePath - , soArgs :: ![String] - , soCompile :: !ScriptExecute - , soGhcOptions :: ![String] - , soScriptExtraDeps :: ![PackageIdentifierRevision] - } - deriving Show - -data ScriptExecute - = SEInterpret - | SECompile - | SEOptimize - deriving Show - +-- | Parse command line arguments for Stack's @script@ command. scriptOptsParser :: Parser ScriptOpts scriptOptsParser = ScriptOpts - <$> many (strOption - (long "package" <> - metavar "PACKAGE(S)" <> - help "Additional package(s) that must be installed")) - <*> strArgument (metavar "FILE" <> completer (fileExtCompleter [".hs", ".lhs"])) - <*> many (strArgument (metavar "-- ARGUMENT(S) (e.g. stack script X.hs -- argument(s) to program)")) - <*> (flag' SECompile - ( long "compile" - <> help "Compile the script without optimization and run the executable" - ) <|> - flag' SEOptimize - ( long "optimize" - <> help "Compile the script with optimization and run the executable" - ) <|> - pure SEInterpret) - <*> many (strOption - (long "ghc-options" <> - metavar "OPTIONS" <> - completer ghcOptsCompleter <> - help "Additional options passed to GHC")) - <*> many (option extraDepRead - (long "extra-dep" <> - metavar "PACKAGE-VERSION" <> - help "Extra dependencies to be added to the snapshot")) - where - extraDepRead = eitherReader $ mapLeft show . parsePackageIdentifierRevision . fromString + <$> packagesParser + <*> strArgument + ( metavar "FILE" + <> completer (fileExtCompleter [".hs", ".lhs"]) + ) + <*> many (strArgument + ( metavar "-- ARGUMENT(S) (e.g. stack script X.hs -- argument(s) to \ + \program)." + )) + <*> ( flag' SECompile + ( long "compile" + <> help "Compile the script without optimization and run the \ + \executable." + ) + <|> flag' SEOptimize + ( long "optimize" + <> help "Compile the script with optimization and run the \ + \executable." + ) + <|> pure SEInterpret + ) + <*> boolFlags False + "use-root" + "writing of all compilation outputs to a script-specific location in \ + \the scripts directory of the Stack root." + mempty + <*> many (strOption + ( long "ghc-options" + <> metavar "OPTIONS" + <> completer ghcOptsCompleter + <> help "Additional options passed to GHC (can be specified multiple \ + \times)." + )) + <*> many (option extraDepRead + ( long "extra-dep" + <> metavar "EXTRA-DEP" + <> help "An immutable extra dependency to be added to the snapshot \ + \(can be specified multiple times)." + )) + <*> ( flag' NoRun + ( long "no-run" + <> help "Do not run, just compile." + ) + <|> pure YesRun + ) + where + extraDepRead = eitherReader $ + mapLeft show . parseRawPackageLocationImmutables . fromString diff --git a/src/Stack/Options/SetupParser.hs b/src/Stack/Options/SetupParser.hs new file mode 100644 index 0000000000..946d9aba5c --- /dev/null +++ b/src/Stack/Options/SetupParser.hs @@ -0,0 +1,58 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Options.SetupParser +Description : Parse arguments for Stack's @setup@ command. +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @setup@ command. +-} + +module Stack.Options.SetupParser + ( setupOptsParser + ) where + +import qualified Data.Text as T +import qualified Options.Applicative as OA +import qualified Options.Applicative.Builder.Extra as OA +import qualified Options.Applicative.Types as OA +import Stack.Prelude +import Stack.Types.SetupOpts ( SetupCmdOpts (..) ) + +-- | Parse command line arguments for Stack's @setup@ command. +setupOptsParser :: OA.Parser SetupCmdOpts +setupOptsParser = SetupCmdOpts + <$> OA.optional (OA.argument readVersion + ( OA.metavar "GHC_VERSION" + <> OA.help "Version of GHC to install, e.g. 9.10.3. (default: install \ + \the version implied by the snapshot)" + )) + <*> OA.boolFlags False + "reinstall" + "reinstalling GHC, even if available (incompatible with --system-ghc)." + OA.idm + <*> OA.optional (OA.strOption + ( OA.long "ghc-bindist" + <> OA.metavar "URL" + <> OA.help "Alternate GHC binary distribution (requires custom \ + \--ghc-variant)." + )) + <*> OA.many (OA.strOption + ( OA.long "ghcjs-boot-options" + <> OA.metavar "GHCJS_BOOT" + <> OA.help "Additional ghcjs-boot options." + )) + <*> OA.boolFlags True + "ghcjs-boot-clean" + "Control if ghcjs-boot should have --clean option present." + OA.idm + where + readVersion = do + s <- OA.readerAsk + case parseWantedCompiler ("ghc-" <> T.pack s) of + Left _ -> + case parseWantedCompiler (T.pack s) of + Left _ -> OA.readerError $ "Invalid version: " ++ s + Right x -> pure x + Right x -> pure x diff --git a/src/Stack/Options/SnapshotParser.hs b/src/Stack/Options/SnapshotParser.hs new file mode 100644 index 0000000000..caf0e3e372 --- /dev/null +++ b/src/Stack/Options/SnapshotParser.hs @@ -0,0 +1,45 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Options.SnapshotParser +License : BSD-3-Clause +-} + +module Stack.Options.SnapshotParser + ( abstractSnapshotOptsParser + , compilerOptsParser + , readCompilerVersion + ) where + +import qualified Data.Text as T +import Options.Applicative + ( Parser, ReadM, help, long, metavar, option, readerError ) +import Options.Applicative.Types ( readerAsk ) +import Stack.Options.Utils ( hideMods ) +import Stack.Prelude +import Stack.Types.Snapshot ( AbstractSnapshot, readAbstractSnapshot ) + +-- | Parser for the snapshot +abstractSnapshotOptsParser :: Bool -> Parser (Unresolved AbstractSnapshot) +abstractSnapshotOptsParser hide = option readAbstractSnapshot + ( long "snapshot" + <> long "resolver" + <> metavar "SNAPSHOT" + <> help "Override snapshot in the project configuration file." + <> hideMods hide + ) + +compilerOptsParser :: Bool -> Parser WantedCompiler +compilerOptsParser hide = option readCompilerVersion + ( long "compiler" + <> metavar "COMPILER" + <> help "Use the specified compiler." + <> hideMods hide + ) + +readCompilerVersion :: ReadM WantedCompiler +readCompilerVersion = do + s <- readerAsk + case parseWantedCompiler (T.pack s) of + Left{} -> readerError $ "Failed to parse compiler: " ++ s + Right x -> pure x diff --git a/src/Stack/Options/TestParser.hs b/src/Stack/Options/TestParser.hs index 144c7b9db0..4571d55c6c 100644 --- a/src/Stack/Options/TestParser.hs +++ b/src/Stack/Options/TestParser.hs @@ -1,44 +1,64 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.TestParser where + +{-| +Module : Stack.Options.TestParser +License : BSD-3-Clause +-} + +module Stack.Options.TestParser + ( testOptsParser + ) where import Options.Applicative -import Options.Applicative.Args + ( Mod, Parser, auto, flag', help, long, metavar, option ) +import Options.Applicative.Args ( argsOption ) import Options.Applicative.Builder.Extra -import Stack.Options.Utils + ( firstBoolFlagsTrue, optionalFirst, optionalFirstFalse ) +import Stack.Options.Utils ( hideMods ) import Stack.Prelude -import Stack.Types.Config +import Stack.Types.BuildOptsMonoid ( TestOptsMonoid (..) ) -- | Parser for test arguments. -- FIXME hide args testOptsParser :: Bool -> Parser TestOptsMonoid -testOptsParser hide0 = - TestOptsMonoid - <$> firstBoolFlagsTrue - "rerun-tests" - "running already successful tests" - hide - <*> fmap - concat - (many - (argsOption - (long "test-arguments" <> - long "ta" <> - metavar "TEST_ARGS" <> - help "Arguments passed in to the test suite program" <> - hide))) - <*> optionalFirstFalse - (flag' True - (long "coverage" <> - help "Generate a code coverage report" <> - hide)) - <*> optionalFirstFalse - (flag' True - (long "no-run-tests" <> - help "Disable running of tests. (Tests will still be built.)" <> - hide)) - <*> optionalFirst - (option (fmap Just auto) - (long "test-suite-timeout" <> - help "Maximum test suite run time in seconds." <> - hide)) - where hide = hideMods hide0 +testOptsParser hide0 = TestOptsMonoid + <$> firstBoolFlagsTrue + "rerun-tests" + "running already successful test suites." + hide + <*> fmap concat (many (argsOption + ( long "test-arguments" + <> long "ta" + <> metavar "TEST_ARGS" + <> help "Arguments passed to the test suites." + <> hide + ))) + <*> optionalFirstFalse (flag' True + ( long "coverage" + <> help "Generate a code coverage report." + <> hide + )) + <*> firstBoolFlagsTrue + "run-tests" + "running of targeted test suites." + hide + <*> optionalFirst (option (fmap Just auto) + ( long "test-suite-timeout" + <> metavar "SECONDS" + <> help "For each test suite, maximum run time before it fails." + <> hide + )) + <*> optionalFirst (option (fmap Just auto) + ( long "test-suite-timeout-grace" + <> metavar "SECONDS" + <> help "Request termination of timed-out test suite process. Grace \ + \period after timeout before termination is forced." + <> hide + )) + <*> firstBoolFlagsTrue + "tests-allow-stdin" + "allow standard input in test suites." + hide + where + hide :: Mod f a + hide = hideMods hide0 diff --git a/src/Stack/Options/UnpackParser.hs b/src/Stack/Options/UnpackParser.hs new file mode 100644 index 0000000000..a73d12e046 --- /dev/null +++ b/src/Stack/Options/UnpackParser.hs @@ -0,0 +1,70 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Options.UnpackParser +Description : Parse arguments for Stack's @unpack@ command. +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @unpack@ command. +-} + +module Stack.Options.UnpackParser + ( unpackOptsParser + ) where + +import qualified Data.Text as T +import Options.Applicative + ( Parser, ReadM, argument, eitherReader, help, long, metavar + , option, switch + ) +import Path ( SomeBase (..), parseSomeDir ) +import Stack.Prelude +import Stack.Unpack ( UnpackOpts (..), UnpackTarget) + +-- | Parse command line arguments for Stack's @unpack@ command. +unpackOptsParser :: Parser UnpackOpts +unpackOptsParser = UnpackOpts + <$> some unpackTargetParser + <*> areCandidatesParser + <*> optional dirParser + +unpackTargetParser :: Parser UnpackTarget +unpackTargetParser = argument unpackTargetReader + ( metavar "TARGET" + <> help "A package or package candidate (can be specified multiple times). A \ + \package can be referred to by name only or by identifier \ + \(including, optionally, a revision as '@rev:' or \ + \'@sha256:'). A package candidate is referred to by its \ + \identifier." + ) + +unpackTargetReader :: ReadM UnpackTarget +unpackTargetReader = eitherReader $ \s -> + case parsePackageIdentifierRevision $ T.pack s of + Right pir -> Right (Right pir) + Left _ -> case parsePackageName s of + Just pn -> Right (Left pn) + Nothing -> + Left $ s <> " is an invalid way to refer to a package or package \ + \candidate to be unpacked." + +areCandidatesParser :: Parser Bool +areCandidatesParser = switch + ( long "candidate" + <> help "Each target is a package candidate." + ) + +dirParser :: Parser (SomeBase Dir) +dirParser = option dirReader + ( long "to" + <> metavar "DIR" + <> help "Optionally, a directory to unpack into. A target will be unpacked \ + \ into a subdirectory." + ) + +dirReader :: ReadM (SomeBase Dir) +dirReader = eitherReader $ \s -> + case parseSomeDir s of + Just dir -> Right dir + Nothing -> + Left $ s <> " is an invalid way to refer to a directory." diff --git a/src/Stack/Options/UpgradeParser.hs b/src/Stack/Options/UpgradeParser.hs new file mode 100644 index 0000000000..71710117f0 --- /dev/null +++ b/src/Stack/Options/UpgradeParser.hs @@ -0,0 +1,99 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Options.UpgradeParser +Description : Parse arguments for Stack's @upgrade@ command. +License : BSD-3-Clause + +Function to parse command line arguments for Stack's @upgrade@ command. +-} + +module Stack.Options.UpgradeParser + ( upgradeOptsParser + ) where + +import Options.Applicative + ( Parser, flag', help, idm, long, metavar, showDefault + , strOption, switch, value + ) +import Options.Applicative.Builder.Extra ( boolFlags ) +import Stack.Prelude +import Stack.Types.UpgradeOpts + ( BinaryOpts (..), SourceOpts (..), UpgradeOpts (..) ) + +-- | Parse command line arguments for Stack's @upgrade@ command. +upgradeOptsParser :: + Bool + -- ^ The default for --[no]-only-local-bin + -> Parser UpgradeOpts +upgradeOptsParser onlyLocalBin = UpgradeOpts + <$> (sourceOnly <|> optional binaryOpts) + <*> (binaryOnly <|> optional sourceOpts) + where + binaryOnly = flag' Nothing + ( long "binary-only" + <> help "Do not use a source upgrade path." + ) + sourceOnly = flag' Nothing + ( long "source-only" + <> help "Do not use a binary upgrade path." + ) + + binaryOpts = BinaryOpts + <$> optional (strOption + ( long "binary-platform" + <> help "Platform type for archive to download." + <> metavar "PLATFORM" + )) + <*> switch + ( long "force-download" + <> help "Download the latest available Stack executable, even if not \ + \newer." + ) + <*> boolFlags onlyLocalBin + "only-local-bin" + "downloading only to Stack's local binary directory" + idm + <*> optional (strOption + ( long "binary-version" + <> help "Download a specific Stack version, even if already \ + \installed." + <> metavar "VERSION" + )) + <*> optional (strOption + ( long "github-org" + <> help "GitHub organization name." + <> metavar "USER" + )) + <*> optional (strOption + ( long "github-repo" + <> help "GitHub repository name." + <> metavar "REPO" + )) + + sourceOpts = SourceOpts + <$> ( ( \fromGit repo branch -> + if fromGit + then Just (repo, branch) + else Nothing + ) + <$> switch + ( long "git" + <> help "Clone from Git instead of downloading from Hackage \ + \(more dangerous)." + ) + <*> strOption + ( long "git-repo" + <> help "Clone from specified Git repository." + <> metavar "URL" + <> value "https://github.com/commercialhaskell/stack" + <> showDefault + ) + <*> strOption + ( long "git-branch" + <> help "Clone from specified Git branch." + <> metavar "BRANCH" + <> value "master" + <> showDefault + ) + ) diff --git a/src/Stack/Options/UploadParser.hs b/src/Stack/Options/UploadParser.hs new file mode 100644 index 0000000000..932438ea76 --- /dev/null +++ b/src/Stack/Options/UploadParser.hs @@ -0,0 +1,69 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Options.UploadParser +Description : Parse arguments for Stack's @upload@ command. +License : BSD-3-Clause + +Functions to parse command line arguments for Stack's @upload@ command. +-} + +module Stack.Options.UploadParser + ( uploadOptsParser + ) where + +import Options.Applicative + ( Parser, completer, flag, help, idm, long, metavar, short + , strArgument, strOption, switch + ) +import Options.Applicative.Builder.Extra + ( boolFlags, dirCompleter, firstBoolFlagsTrue ) +import Stack.Options.PvpBoundsParser ( pvpBoundsParser ) +import Stack.Prelude +import Stack.Upload ( UploadOpts (..), UploadVariant (..) ) + +-- | Parse command line arguments for Stack's @upload@ command. +uploadOptsParser :: Parser UploadOpts +uploadOptsParser = UploadOpts + <$> itemsToWorkWithParser + <*> documentationParser + <*> optional (pvpBoundsParser (Just "For package upload")) + <*> ignoreCheckSwitch + <*> buildPackageOption + <*> tarDirParser + <*> uploadVariantParser + <*> saveHackageCredsOption + where + itemsToWorkWithParser = many (strArgument + ( metavar "ITEM" + <> completer dirCompleter + <> help "A relative path to a package directory or, for package upload \ + \only, an sdist tarball. Can be specified multiple times." + )) + documentationParser = flag False True + ( long "documentation" + <> short 'd' + <> help "Upload documentation for packages (not packages)." + ) + ignoreCheckSwitch = switch + ( long "ignore-check" + <> help "Do not check packages, for upload, for common mistakes." + ) + buildPackageOption = boolFlags False + "test-tarball" + "building of the resulting generated files, for package upload." + idm + tarDirParser = optional (strOption + ( long "tar-dir" + <> help "If specified, copy all the generated files, for package upload, \ + \to this directory." + )) + uploadVariantParser = flag Publishing Candidate + ( long "candidate" + <> help "Upload as, or for, a package candidate." + ) + saveHackageCredsOption = firstBoolFlagsTrue + "save-hackage-creds" + "saving user's Hackage username and password in a local file." + idm diff --git a/src/Stack/Options/Utils.hs b/src/Stack/Options/Utils.hs index 9094e4c27c..ba61cb00d1 100644 --- a/src/Stack/Options/Utils.hs +++ b/src/Stack/Options/Utils.hs @@ -1,7 +1,16 @@ {-# LANGUAGE NoImplicitPrelude #-} -module Stack.Options.Utils where -import Options.Applicative +{-| +Module : Stack.Options.Utils +License : BSD-3-Clause +-} + +module Stack.Options.Utils + ( GlobalOptsContext (..) + , hideMods + ) where + +import Options.Applicative ( Mod, hidden, idm, internal ) import Stack.Prelude -- | If argument is True, hides the option from usage and help @@ -9,13 +18,13 @@ hideMods :: Bool -> Mod f a hideMods hide = if hide then internal <> hidden else idm -- | Allows adjust global options depending on their context --- Note: This was being used to remove ambibuity between the local and global --- implementation of stack init --resolver option. Now that stack init has no --- local --resolver this is not being used anymore but the code is kept for any +-- Note: This was being used to remove ambiguity between the local and global +-- implementation of stack init --snapshot option. Now that stack init has no +-- local --snapshot this is not being used anymore but the code is kept for any -- similar future use cases. data GlobalOptsContext - = OuterGlobalOpts -- ^ Global options before subcommand name - | OtherCmdGlobalOpts -- ^ Global options following any other subcommand - | BuildCmdGlobalOpts - | GhciCmdGlobalOpts - deriving (Show, Eq) + = OuterGlobalOpts -- ^ Global options before subcommand name + | OtherCmdGlobalOpts -- ^ Global options following any other subcommand + | BuildCmdGlobalOpts + | GhciCmdGlobalOpts + deriving (Eq, Show) diff --git a/src/Stack/Package.hs b/src/Stack/Package.hs index 9d01f72f4a..6ae52f7dd1 100644 --- a/src/Stack/Package.hs +++ b/src/Stack/Package.hs @@ -1,436 +1,383 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} --- | Dealing with Cabal. +{-| +Module : Stack.Package +Description : Dealing with Cabal. +License : BSD-3-Clause + +Dealing with Cabal. +-} module Stack.Package - (readDotBuildinfo - ,resolvePackage - ,packageFromPackageDescription - ,Package(..) - ,PackageDescriptionPair(..) - ,GetPackageFiles(..) - ,GetPackageOpts(..) - ,PackageConfig(..) - ,buildLogPath - ,PackageException (..) - ,resolvePackageDescription - ,packageDependencies - ,applyForceCustomBuild + ( readDotBuildinfo + , resolvePackage + , packageFromPackageDescription + , Package (..) + , PackageConfig (..) + , buildLogPath + , PackageException (..) + , resolvePackageDescription + , packageDependencies + , applyForceCustomBuild + , hasBuildableMainLibrary + , packageUnknownTools + , buildableForeignLibs + , buildableSubLibs + , buildableExes + , buildableTestSuites + , buildableBenchmarks + , getPackageOpts + , processPackageDepsEither + , listOfPackageDeps + , setOfPackageDeps + , topSortPackageComponent ) where -import Data.List (find, isPrefixOf, unzip) import qualified Data.Map.Strict as M import qualified Data.Set as S import qualified Data.Text as T +import Distribution.CabalSpecVersion ( cabalSpecToVersionDigits ) import Distribution.Compiler -import Distribution.ModuleName (ModuleName) -import qualified Distribution.ModuleName as Cabal -import qualified Distribution.Package as D -import Distribution.Package hiding (Package,PackageName,packageName,packageVersion,PackageIdentifier) -import qualified Distribution.PackageDescription as D -import Distribution.PackageDescription hiding (FlagName) -import Distribution.PackageDescription.Parsec -import Distribution.Pretty (prettyShow) -import Distribution.Simple.Glob (matchDirFileGlob) -import Distribution.System (OS (..), Arch, Platform (..)) -import qualified Distribution.Text as D + ( CompilerFlavor (..), PerCompilerFlavor (..) ) +import Distribution.ModuleName ( ModuleName ) +import Distribution.Package ( mkPackageName ) +import Distribution.PackageDescription + ( Benchmark (..), BuildInfo (..), BuildType (..) + , CondTree (..), Condition (..), ConfVar (..) + , Dependency (..), Executable (..), ForeignLib (..) + , GenericPackageDescription (..), HookedBuildInfo + , Library (..), PackageDescription (..), PackageFlag (..) + , SetupBuildInfo (..), TestSuite (..), allLibraries + , buildType, depPkgName, depVerRange + , unqualComponentNameToPackageName + ) +import qualified Distribution.PackageDescription as Executable + ( Executable (..) ) +import Distribution.Simple.PackageDescription ( readHookedBuildInfo ) +import Distribution.System ( OS (..), Arch, Platform (..) ) +import Distribution.Text ( display ) import qualified Distribution.Types.CondTree as Cabal -import qualified Distribution.Types.ExeDependency as Cabal -import Distribution.Types.ForeignLib -import qualified Distribution.Types.LegacyExeDependency as Cabal -import Distribution.Types.LibraryName (libraryNameString, maybeToLibraryName) -import Distribution.Types.MungedPackageName -import qualified Distribution.Types.UnqualComponentName as Cabal -import qualified Distribution.Verbosity as D -import Distribution.Version (mkVersion, orLaterVersion, anyVersion) -import qualified HiFileParser as Iface -#if MIN_VERSION_path(0,7,0) -import Path as FL hiding (replaceExtension) -#else -import Path as FL -#endif -import Path.Extra -import Path.IO hiding (findFiles) -import Stack.Build.Installed -import Stack.Constants -import Stack.Constants.Config -import Stack.Prelude hiding (Display (..)) -import Stack.Types.Compiler -import Stack.Types.Config -import Stack.Types.GhcPkgId +import Distribution.Utils.Path ( makeSymbolicPath, getSymbolicPath ) +import Distribution.Verbosity ( silent ) +import Distribution.Version + ( anyVersion, mkVersion, orLaterVersion ) +import Path + ( (), parent, parseAbsDir, parseRelDir, parseRelFile + , stripProperPrefix + ) +import Path.Extra ( concatAndCollapseAbsDir, toFilePathNoTrailingSep ) +import Stack.Component + ( componentDependencyMap, foldOnNameAndBuildInfo + , isComponentBuildable, stackBenchmarkFromCabal + , stackExecutableFromCabal, stackForeignLibraryFromCabal + , stackLibraryFromCabal, stackTestFromCabal + ) +import Stack.ComponentFile + ( buildDir, componentAutogenDir, componentBuildDir + , componentOutputDir, packageAutogenDir + ) +import Stack.Constants ( relFileCabalMacrosH, relDirLogs ) +import Stack.Constants.Config ( distDirFromDir ) +import Stack.PackageFile ( getPackageFile, stackPackageFileFromCabal ) +import Stack.Prelude hiding ( Display (..) ) +import Stack.Types.BuildConfig ( HasBuildConfig (..), getWorkDir ) +import Stack.Types.CompCollection + ( CompCollection, collectionLookup, foldAndMakeCollection + , foldComponentToAnotherCollection, getBuildableSet + , getBuildableSetText + ) +import Stack.Types.Compiler ( ActualCompiler (..) ) +import Stack.Types.CompilerPaths ( cabalVersionL ) +import Stack.Types.Component + ( HasBuildInfo, HasComponentInfo, HasQualiName, HasName + , StackUnqualCompName (..) + ) +import qualified Stack.Types.Component as Component +import Stack.Types.ComponentUtils ( emptyCompName, toCabalName ) +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.Dependency + ( DepLibrary (..), DepType (..), DepValue (..) + , cabalSetupDepsToStackDep, libraryDepFromVersionRange + ) +import Stack.Types.EnvConfig ( HasEnvConfig ) +import Stack.Types.Installed + ( InstallMap, Installed (..), InstalledMap + , installedToPackageIdOpt + ) import Stack.Types.NamedComponent + ( NamedComponent (..), isPotentialDependency + , subLibComponents + ) import Stack.Types.Package + ( BioInput(..), BuildInfoOpts (..), Package (..) + , PackageConfig (..), PackageException (..) + , dotCabalCFilePath, packageIdentifier + ) +import Stack.Types.PackageFile + ( DotCabalPath, PackageComponentFile (..) ) +import Stack.Types.SourceMap ( Target(..), PackageType (..) ) import Stack.Types.Version -import qualified System.Directory as D -import System.FilePath (replaceExtension) -import qualified System.FilePath as FilePath -import System.IO.Error -import RIO.Process -import RIO.PrettyPrint -import qualified RIO.PrettyPrint as PP (Style (Module)) - -data Ctx = Ctx { ctxFile :: !(Path Abs File) - , ctxDistDir :: !(Path Abs Dir) - , ctxBuildConfig :: !BuildConfig - , ctxCabalVer :: !Version - } - -instance HasPlatform Ctx -instance HasGHCVariant Ctx -instance HasLogFunc Ctx where - logFuncL = configL.logFuncL -instance HasRunner Ctx where - runnerL = configL.runnerL -instance HasStylesUpdate Ctx where - stylesUpdateL = runnerL.stylesUpdateL -instance HasTerm Ctx where - useColorL = runnerL.useColorL - termWidthL = runnerL.termWidthL -instance HasConfig Ctx -instance HasPantryConfig Ctx where - pantryConfigL = configL.pantryConfigL -instance HasProcessContext Ctx where - processContextL = configL.processContextL -instance HasBuildConfig Ctx where - buildConfigL = lens ctxBuildConfig (\x y -> x { ctxBuildConfig = y }) + ( VersionRange, intersectVersionRanges, withinRange ) +import System.FilePath ( replaceExtension ) +import RIO.Seq ((|>)) -- | Read @.buildinfo@ ancillary files produced by some Setup.hs hooks. -- The file includes Cabal file syntax to be merged into the package description --- derived from the package's .cabal file. +-- derived from the package's Cabal file. -- -- NOTE: not to be confused with BuildInfo, an Stack-internal datatype. -readDotBuildinfo :: MonadIO m - => Path Abs File - -> m HookedBuildInfo -readDotBuildinfo buildinfofp = - liftIO $ readHookedBuildInfo D.silent (toFilePath buildinfofp) +readDotBuildinfo :: MonadIO m => Path Abs File -> m HookedBuildInfo +readDotBuildinfo = + liftIO . readHookedBuildInfo silent Nothing . makeSymbolicPath . toFilePath --- | Resolve a parsed cabal file into a 'Package', which contains all of --- the info needed for stack to build the 'Package' given the current +-- | Resolve a parsed Cabal file into a t'Package', which contains all of the +-- info needed for Stack to build the t'Package' given the current -- configuration. -resolvePackage :: PackageConfig - -> GenericPackageDescription - -> Package +resolvePackage :: PackageConfig -> GenericPackageDescription -> Package resolvePackage packageConfig gpkg = - packageFromPackageDescription - packageConfig - (genPackageFlags gpkg) - (resolvePackageDescription packageConfig gpkg) - -packageFromPackageDescription :: PackageConfig - -> [D.Flag] - -> PackageDescriptionPair - -> Package -packageFromPackageDescription packageConfig pkgFlags (PackageDescriptionPair pkgNoMod pkg) = - Package - { packageName = name - , packageVersion = pkgVersion pkgId - , packageLicense = licenseRaw pkg - , packageDeps = deps - , packageFiles = pkgFiles - , packageUnknownTools = unknownTools - , packageGhcOptions = packageConfigGhcOptions packageConfig - , packageCabalConfigOpts = packageConfigCabalConfigOpts packageConfig - , packageFlags = packageConfigFlags packageConfig - , packageDefaultFlags = M.fromList - [(flagName flag, flagDefault flag) | flag <- pkgFlags] - , packageAllDeps = S.fromList (M.keys deps) - , packageLibraries = - let mlib = do - lib <- library pkg - guard $ buildable $ libBuildInfo lib - Just lib - in - case mlib of - Nothing -> NoLibraries - Just _ -> HasLibraries foreignLibNames - , packageInternalLibraries = subLibNames - , packageTests = M.fromList - [(T.pack (Cabal.unUnqualComponentName $ testName t), testInterface t) - | t <- testSuites pkgNoMod - , buildable (testBuildInfo t) - ] - , packageBenchmarks = S.fromList - [T.pack (Cabal.unUnqualComponentName $ benchmarkName b) - | b <- benchmarks pkgNoMod - , buildable (benchmarkBuildInfo b) - ] - -- Same comment about buildable applies here too. - , packageExes = S.fromList - [T.pack (Cabal.unUnqualComponentName $ exeName biBuildInfo) - | biBuildInfo <- executables pkg - , buildable (buildInfo biBuildInfo)] - -- This is an action used to collect info needed for "stack ghci". - -- This info isn't usually needed, so computation of it is deferred. - , packageOpts = GetPackageOpts $ - \installMap installedMap omitPkgs addPkgs cabalfp -> - do (componentsModules,componentFiles,_,_) <- getPackageFiles pkgFiles cabalfp - let internals = S.toList $ internalLibComponents $ M.keysSet componentsModules - excludedInternals <- mapM (parsePackageNameThrowing . T.unpack) internals - mungedInternals <- mapM (parsePackageNameThrowing . T.unpack . - toInternalPackageMungedName) internals - componentsOpts <- - generatePkgDescOpts installMap installedMap - (excludedInternals ++ omitPkgs) (mungedInternals ++ addPkgs) - cabalfp pkg componentFiles - return (componentsModules,componentFiles,componentsOpts) - , packageHasExposedModules = maybe - False - (not . null . exposedModules) - (library pkg) - , packageBuildType = buildType pkg - , packageSetupDeps = msetupDeps - , packageCabalSpec = either orLaterVersion id $ specVersionRaw pkg - } - where - extraLibNames = S.union subLibNames foreignLibNames - - subLibNames - = S.fromList - $ map (T.pack . Cabal.unUnqualComponentName) - $ mapMaybe (libraryNameString . libName) -- this is a design bug in the Cabal API: this should statically be known to exist - $ filter (buildable . libBuildInfo) - $ subLibraries pkg - - foreignLibNames - = S.fromList - $ map (T.pack . Cabal.unUnqualComponentName . foreignLibName) - $ filter (buildable . foreignLibBuildInfo) - $ foreignLibs pkg - - toInternalPackageMungedName - = T.pack . prettyShow . MungedPackageName (pkgName pkgId) - . maybeToLibraryName . Just . Cabal.mkUnqualComponentName . T.unpack - - -- Gets all of the modules, files, build files, and data files that - -- constitute the package. This is primarily used for dirtiness - -- checking during build, as well as use by "stack ghci" - pkgFiles = GetPackageFiles $ - \cabalfp -> debugBracket ("getPackageFiles" <+> pretty cabalfp) $ do - let pkgDir = parent cabalfp - distDir <- distDirFromDir pkgDir - bc <- view buildConfigL - cabalVer <- view cabalVersionL - (componentModules,componentFiles,dataFiles',warnings) <- - runRIO - (Ctx cabalfp distDir bc cabalVer) - (packageDescModulesAndFiles pkg) - setupFiles <- - if buildType pkg == Custom - then do - let setupHsPath = pkgDir relFileSetupHs - setupLhsPath = pkgDir relFileSetupLhs - setupHsExists <- doesFileExist setupHsPath - if setupHsExists then return (S.singleton setupHsPath) else do - setupLhsExists <- doesFileExist setupLhsPath - if setupLhsExists then return (S.singleton setupLhsPath) else return S.empty - else return S.empty - buildFiles <- liftM (S.insert cabalfp . S.union setupFiles) $ do - let hpackPath = pkgDir relFileHpackPackageConfig - hpackExists <- doesFileExist hpackPath - return $ if hpackExists then S.singleton hpackPath else S.empty - return (componentModules, componentFiles, buildFiles <> dataFiles', warnings) - pkgId = package pkg - name = pkgName pkgId - - (unknownTools, knownTools) = packageDescTools pkg - - deps = M.filterWithKey (const . not . isMe) (M.unionsWith (<>) - [ asLibrary <$> packageDependencies packageConfig pkg - -- We include all custom-setup deps - if present - in the - -- package deps themselves. Stack always works with the - -- invariant that there will be a single installed package - -- relating to a package name, and this applies at the setup - -- dependency level as well. - , asLibrary <$> fromMaybe M.empty msetupDeps - , knownTools - ]) - msetupDeps = fmap - (M.fromList . map (depPkgName &&& depVerRange) . setupDepends) - (setupBuildInfo pkg) - - asLibrary range = DepValue - { dvVersionRange = range - , dvType = AsLibrary + packageFromPackageDescription + packageConfig + (genPackageFlags gpkg) + (resolvePackageDescription packageConfig gpkg) + +packageFromPackageDescription :: + PackageConfig + -> [PackageFlag] + -> PackageDescription + -> Package +packageFromPackageDescription + packageConfig + pkgFlags + pkg + = Package + { name = name + , version = pkgVersion pkgId + , license = licenseRaw pkg + , ghcOptions = packageConfig.ghcOptions + , cabalConfigOpts = packageConfig.cabalConfigOpts + , flags = packageConfig.flags + , defaultFlags = M.fromList + [(flagName flag, flagDefault flag) | flag <- pkgFlags] + , library = stackLibraryFromCabal <$> library pkg + , subLibraries = + foldAndMakeCollection stackLibraryFromCabal $ subLibraries pkg + , foreignLibraries = + foldAndMakeCollection stackForeignLibraryFromCabal $ foreignLibs pkg + , testSuites = + foldAndMakeCollection stackTestFromCabal $ testSuites pkg + , benchmarks = + foldAndMakeCollection stackBenchmarkFromCabal $ benchmarks pkg + , executables = + foldAndMakeCollection stackExecutableFromCabal $ executables pkg + , buildType = buildType pkg + , setupDeps = fmap cabalSetupDepsToStackDep (setupBuildInfo pkg) + , cabalSpec = specVersion pkg + , file = stackPackageFileFromCabal pkg + , testEnabled = packageConfig.enableTests + , benchmarkEnabled = packageConfig.enableBenchmarks } - - -- Is the package dependency mentioned here me: either the package - -- name itself, or the name of one of the sub libraries - isMe name' = name' == name || fromString (packageNameString name') `S.member` extraLibNames - --- | Generate GHC options for the package's components, and a list of --- options which apply generally to the package, not one specific --- component. + where + -- Gets all of the modules, files, build files, and data files that constitute + -- the package. This is primarily used for dirtiness checking during build, as + -- well as use by "stack ghci" + pkgId = package pkg + name = pkgName pkgId + +-- | This is an action used to collect info needed for "stack ghci". This info +-- isn't usually needed, so computation of it is deferred. +getPackageOpts :: + (HasEnvConfig env, MonadReader env m, MonadThrow m, MonadUnliftIO m ) + => Package + -> InstallMap + -> InstalledMap + -> [PackageName] + -> [PackageName] + -> Path Abs File + -> m ( Map NamedComponent (Map ModuleName (Path Abs File)) + , Map NamedComponent [DotCabalPath] + , Map NamedComponent BuildInfoOpts + ) +getPackageOpts + stackPackage + installMap + installedMap + omitPkgs + addPkgs + cabalFP + = do + PackageComponentFile !componentsModules componentFiles _ _ <- + getPackageFile stackPackage cabalFP + let subLibs = + S.toList $ subLibComponents $ M.keysSet componentsModules + excludedSubLibs = + map (unqualComponentNameToPackageName . toCabalName) subLibs + componentsOpts <- generatePkgDescOpts + installMap + installedMap + (excludedSubLibs ++ omitPkgs) + addPkgs + cabalFP + stackPackage + componentFiles + pure (componentsModules, componentFiles, componentsOpts) + +-- | Generate GHC options for the package's components, and a list of options +-- which apply generally to the package, not one specific component. +generatePkgDescOpts :: + (HasEnvConfig env, MonadThrow m, MonadReader env m, MonadIO m) + => InstallMap + -> InstalledMap + -> [PackageName] + -- ^ Packages to omit from the "-package" / "-package-id" flags + -> [PackageName] + -- ^ Packages to add to the "-package" flags + -> Path Abs File + -> Package + -> Map NamedComponent [DotCabalPath] + -> m (Map NamedComponent BuildInfoOpts) generatePkgDescOpts - :: (HasEnvConfig env, MonadThrow m, MonadReader env m, MonadIO m) - => InstallMap - -> InstalledMap - -> [PackageName] -- ^ Packages to omit from the "-package" / "-package-id" flags - -> [PackageName] -- ^ Packages to add to the "-package" flags - -> Path Abs File - -> PackageDescription - -> Map NamedComponent [DotCabalPath] - -> m (Map NamedComponent BuildInfoOpts) -generatePkgDescOpts installMap installedMap omitPkgs addPkgs cabalfp pkg componentPaths = do - config <- view configL - cabalVer <- view cabalVersionL - distDir <- distDirFromDir cabalDir - let generate namedComponent binfo = - ( namedComponent - , generateBuildInfoOpts BioInput - { biInstallMap = installMap - , biInstalledMap = installedMap - , biCabalDir = cabalDir - , biDistDir = distDir - , biOmitPackages = omitPkgs - , biAddPackages = addPkgs - , biBuildInfo = binfo - , biDotCabalPaths = fromMaybe [] (M.lookup namedComponent componentPaths) - , biConfigLibDirs = configExtraLibDirs config - , biConfigIncludeDirs = configExtraIncludeDirs config - , biComponentName = namedComponent - , biCabalVersion = cabalVer - } - ) - return - ( M.fromList - (concat - [ maybe - [] - (return . generate CLib . libBuildInfo) - (library pkg) - , mapMaybe - (\sublib -> do - let maybeLib = CInternalLib . T.pack . Cabal.unUnqualComponentName <$> (libraryNameString . libName) sublib - flip generate (libBuildInfo sublib) <$> maybeLib - ) - (subLibraries pkg) - , fmap - (\exe -> - generate - (CExe (T.pack (Cabal.unUnqualComponentName (exeName exe)))) - (buildInfo exe)) - (executables pkg) - , fmap - (\bench -> - generate - (CBench (T.pack (Cabal.unUnqualComponentName (benchmarkName bench)))) - (benchmarkBuildInfo bench)) - (benchmarks pkg) - , fmap - (\test -> - generate - (CTest (T.pack (Cabal.unUnqualComponentName (testName test)))) - (testBuildInfo test)) - (testSuites pkg)])) - where - cabalDir = parent cabalfp - --- | Input to 'generateBuildInfoOpts' -data BioInput = BioInput - { biInstallMap :: !InstallMap - , biInstalledMap :: !InstalledMap - , biCabalDir :: !(Path Abs Dir) - , biDistDir :: !(Path Abs Dir) - , biOmitPackages :: ![PackageName] - , biAddPackages :: ![PackageName] - , biBuildInfo :: !BuildInfo - , biDotCabalPaths :: ![DotCabalPath] - , biConfigLibDirs :: ![FilePath] - , biConfigIncludeDirs :: ![FilePath] - , biComponentName :: !NamedComponent - , biCabalVersion :: !Version - } - --- | Generate GHC options for the target. Since Cabal also figures out --- these options, currently this is only used for invoking GHCI (via --- stack ghci). + installMap + installedMap + omitPackages + addPackages + cabalFP + pkg + componentPaths + = do + config <- view configL + cabalVersion <- view cabalVersionL + distDir <- distDirFromDir cabalDir + let generate componentName buildInfo = generateBuildInfoOpts BioInput + { installMap + , installedMap + , cabalDir + , distDir + , omitPackages + , addPackages + , buildInfo + , dotCabalPaths = + fromMaybe [] (M.lookup componentName componentPaths) + , configLibDirs = config.extraLibDirs + , configIncludeDirs = config.extraIncludeDirs + , componentName + , cabalVersion + } + insertInMap name compVal = M.insert name (generate name compVal) + translatedInsertInMap constructor name = + insertInMap (constructor name) + makeBuildInfoOpts :: + (Foldable t, HasBuildInfo component, HasName component) + => (Package -> t component) + -> (StackUnqualCompName -> NamedComponent) + -> Map NamedComponent BuildInfoOpts + -> Map NamedComponent BuildInfoOpts + makeBuildInfoOpts selector constructor = + foldOnNameAndBuildInfo + (selector pkg) + (translatedInsertInMap constructor) + aggregateAllBuildInfoOpts = + makeBuildInfoOpts (.library) (const CLib) + . makeBuildInfoOpts (.subLibraries) CSubLib + . makeBuildInfoOpts (.executables) CExe + . makeBuildInfoOpts (.benchmarks) CBench + . makeBuildInfoOpts (.testSuites) CTest + pure $ aggregateAllBuildInfoOpts mempty + where + cabalDir = parent cabalFP + +-- | Generate GHC options for the target. Since Cabal also figures out these +-- options, currently this is only used for invoking GHCI (via stack ghci). generateBuildInfoOpts :: BioInput -> BuildInfoOpts -generateBuildInfoOpts BioInput {..} = - BuildInfoOpts - { bioOpts = ghcOpts ++ cppOptions biBuildInfo - -- NOTE for future changes: Due to this use of nubOrd (and other uses - -- downstream), these generated options must not rely on multiple - -- argument sequences. For example, ["--main-is", "Foo.hs", "--main- - -- is", "Bar.hs"] would potentially break due to the duplicate - -- "--main-is" being removed. - -- - -- See https://github.com/commercialhaskell/stack/issues/1255 - , bioOneWordOpts = nubOrd $ concat - [extOpts, srcOpts, includeOpts, libOpts, fworks, cObjectFiles] - , bioPackageFlags = deps - , bioCabalMacros = componentAutogen relFileCabalMacrosH - } - where - cObjectFiles = - mapMaybe (fmap toFilePath . - makeObjectFilePathFromC biCabalDir biComponentName biDistDir) - cfiles - cfiles = mapMaybe dotCabalCFilePath biDotCabalPaths - installVersion = snd - -- Generates: -package=base -package=base16-bytestring-0.1.1.6 ... - deps = - concat - [ case M.lookup name biInstalledMap of - Just (_, Stack.Types.Package.Library _ident ipid _) -> ["-package-id=" <> ghcPkgIdString ipid] - _ -> ["-package=" <> packageNameString name <> - maybe "" -- This empty case applies to e.g. base. - ((("-" <>) . versionString) . installVersion) - (M.lookup name biInstallMap)] - | name <- pkgs] - pkgs = - biAddPackages ++ - [ name - | Dependency name _ _ <- targetBuildDepends biBuildInfo -- TODO: cabal 3 introduced multiple public libraries in a single dependency - , name `notElem` biOmitPackages] - PerCompilerFlavor ghcOpts _ = options biBuildInfo - extOpts = map (("-X" ++) . D.display) (usedExtensions biBuildInfo) - srcOpts = - map (("-i" <>) . toFilePathNoTrailingSep) - (concat - [ [ componentBuildDir biCabalVersion biComponentName biDistDir ] - , [ biCabalDir - | null (hsSourceDirs biBuildInfo) - ] - , mapMaybe toIncludeDir (hsSourceDirs biBuildInfo) - , [ componentAutogen ] - , maybeToList (packageAutogenDir biCabalVersion biDistDir) - , [ componentOutputDir biComponentName biDistDir ] - ]) ++ - [ "-stubdir=" ++ toFilePathNoTrailingSep (buildDir biDistDir) ] - componentAutogen = componentAutogenDir biCabalVersion biComponentName biDistDir - toIncludeDir "." = Just biCabalDir - toIncludeDir relDir = concatAndColapseAbsDir biCabalDir relDir - includeOpts = - map ("-I" <>) (biConfigIncludeDirs <> pkgIncludeOpts) - pkgIncludeOpts = - [ toFilePathNoTrailingSep absDir - | dir <- includeDirs biBuildInfo - , absDir <- handleDir dir - ] - libOpts = - map ("-l" <>) (extraLibs biBuildInfo) <> - map ("-L" <>) (biConfigLibDirs <> pkgLibDirs) - pkgLibDirs = - [ toFilePathNoTrailingSep absDir - | dir <- extraLibDirs biBuildInfo - , absDir <- handleDir dir - ] - handleDir dir = case (parseAbsDir dir, parseRelDir dir) of - (Just ab, _ ) -> [ab] - (_ , Just rel) -> [biCabalDir rel] - (Nothing, Nothing ) -> [] - fworks = map (\fwk -> "-framework=" <> fwk) (frameworks biBuildInfo) +generateBuildInfoOpts bi = + BuildInfoOpts + { opts = + ghcOpts + ++ fmap ("-optP" <>) bi.buildInfo.cppOptions + -- NOTE for future changes: Due to this use of nubOrd (and other uses + -- downstream), these generated options must not rely on multiple + -- argument sequences. For example, ["--main-is", "Foo.hs", "--main- + -- is", "Bar.hs"] would potentially break due to the duplicate + -- "--main-is" being removed. + -- + -- See https://github.com/commercialhaskell/stack/issues/1255 + , oneWordOpts = nubOrd $ concat + [extOpts, srcOpts, includeOpts, libOpts, fworks, cObjectFiles] + , packageFlags = deps + , cabalMacros = componentAutogen relFileCabalMacrosH + } + where + cObjectFiles = mapMaybe + ( fmap toFilePath + . makeObjectFilePathFromC bi.cabalDir bi.componentName bi.distDir + ) + cfiles + cfiles = mapMaybe dotCabalCFilePath bi.dotCabalPaths + installVersion = snd + -- Generates: -package=base -package=base16-bytestring-0.1.1.6 ... + deps = + concat + [ case M.lookup name bi.installedMap of + Just (_, Stack.Types.Installed.Library _ident installedInfo) -> + installedToPackageIdOpt installedInfo + _ -> ["-package=" <> packageNameString name <> + maybe "" -- This empty case applies to e.g. base. + ((("-" <>) . versionString) . installVersion) + (M.lookup name bi.installMap)] + | name <- pkgs + ] + pkgs = + bi.addPackages ++ + [ name + | Dependency name _ _ <- bi.buildInfo.targetBuildDepends + -- TODO: Cabal 3.0 introduced multiple public libraries in a single + -- dependency + , name `notElem` bi.omitPackages + ] + PerCompilerFlavor ghcOpts _ = bi.buildInfo.options + extOpts = + map (("-X" ++) . display) bi.buildInfo.allLanguages + <> map (("-X" ++) . display) bi.buildInfo.usedExtensions + srcOpts = + map (("-i" <>) . toFilePathNoTrailingSep) + (concat + [ [ componentBuildDir bi.componentName bi.distDir ] + , [ bi.cabalDir + | null bi.buildInfo.hsSourceDirs + ] + , mapMaybe + (toIncludeDir . getSymbolicPath) + bi.buildInfo.hsSourceDirs + , [ componentAutogen ] + , [ packageAutogenDir bi.distDir ] + , [ componentOutputDir bi.componentName bi.distDir ] + ]) ++ + [ "-stubdir=" ++ toFilePathNoTrailingSep (buildDir bi.distDir) ] + componentAutogen = componentAutogenDir bi.componentName bi.distDir + toIncludeDir "." = Just bi.cabalDir + toIncludeDir relDir = concatAndCollapseAbsDir bi.cabalDir relDir + includeOpts = + map ("-I" <>) (bi.configIncludeDirs <> pkgIncludeOpts) + pkgIncludeOpts = + [ toFilePathNoTrailingSep absDir + | dir <- bi.buildInfo.includeDirs + , absDir <- handleDir dir + ] + libOpts = + map ("-l" <>) bi.buildInfo.extraLibs <> + map ("-L" <>) (bi.configLibDirs <> pkgLibDirs) + pkgLibDirs = + [ toFilePathNoTrailingSep absDir + | dir <- bi.buildInfo.extraLibDirs + , absDir <- handleDir dir + ] + handleDir dir = case (parseAbsDir dir, parseRelDir dir) of + (Just ab, _ ) -> [ab] + (_ , Just rel) -> [bi.cabalDir rel] + (Nothing, Nothing ) -> [] + fworks = map ("-framework=" <>) bi.buildInfo.frameworks -- | Make the .o path from the .c file path for a component. Example: -- @@ -458,173 +405,31 @@ generateBuildInfoOpts BioInput {..} = -- $(mkAbsFile "/Users/chris/Repos/hoogle/cbits/text_search.c") -- Just "/Users/chris/Repos/hoogle/.stack-work/Cabal-x.x.x/dist/build/hoogle/hoogle-tmp/cbits/text_search.o" -- λ> -makeObjectFilePathFromC - :: MonadThrow m - => Path Abs Dir -- ^ The cabal directory. - -> NamedComponent -- ^ The name of the component. - -> Path Abs Dir -- ^ Dist directory. - -> Path Abs File -- ^ The path to the .c file. - -> m (Path Abs File) -- ^ The path to the .o file for the component. +makeObjectFilePathFromC :: + MonadThrow m + => Path Abs Dir -- ^ The cabal directory. + -> NamedComponent -- ^ The name of the component. + -> Path Abs Dir -- ^ Dist directory. + -> Path Abs File -- ^ The path to the .c file. + -> m (Path Abs File) -- ^ The path to the .o file for the component. makeObjectFilePathFromC cabalDir namedComponent distDir cFilePath = do - relCFilePath <- stripProperPrefix cabalDir cFilePath - relOFilePath <- - parseRelFile (replaceExtension (toFilePath relCFilePath) "o") - return (componentOutputDir namedComponent distDir relOFilePath) - --- | Make the global autogen dir if Cabal version is new enough. -packageAutogenDir :: Version -> Path Abs Dir -> Maybe (Path Abs Dir) -packageAutogenDir cabalVer distDir - | cabalVer < mkVersion [2, 0] = Nothing - | otherwise = Just $ buildDir distDir relDirGlobalAutogen - --- | Make the autogen dir. -componentAutogenDir :: Version -> NamedComponent -> Path Abs Dir -> Path Abs Dir -componentAutogenDir cabalVer component distDir = - componentBuildDir cabalVer component distDir relDirAutogen - --- | See 'Distribution.Simple.LocalBuildInfo.componentBuildDir' -componentBuildDir :: Version -> NamedComponent -> Path Abs Dir -> Path Abs Dir -componentBuildDir cabalVer component distDir - | cabalVer < mkVersion [2, 0] = buildDir distDir - | otherwise = - case component of - CLib -> buildDir distDir - CInternalLib name -> buildDir distDir componentNameToDir name - CExe name -> buildDir distDir componentNameToDir name - CTest name -> buildDir distDir componentNameToDir name - CBench name -> buildDir distDir componentNameToDir name - --- | The directory where generated files are put like .o or .hs (from .x files). -componentOutputDir :: NamedComponent -> Path Abs Dir -> Path Abs Dir -componentOutputDir namedComponent distDir = - case namedComponent of - CLib -> buildDir distDir - CInternalLib name -> makeTmp name - CExe name -> makeTmp name - CTest name -> makeTmp name - CBench name -> makeTmp name - where - makeTmp name = - buildDir distDir componentNameToDir (name <> "/" <> name <> "-tmp") - --- | Make the build dir. Note that Cabal >= 2.0 uses the --- 'componentBuildDir' above for some things. -buildDir :: Path Abs Dir -> Path Abs Dir -buildDir distDir = distDir relDirBuild - --- NOTE: don't export this, only use it for valid paths based on --- component names. -componentNameToDir :: Text -> Path Rel Dir -componentNameToDir name = - fromMaybe (error "Invariant violated: component names should always parse as directory names") - (parseRelDir (T.unpack name)) + relCFilePath <- stripProperPrefix cabalDir cFilePath + relOFilePath <- + parseRelFile (replaceExtension (toFilePath relCFilePath) "o") + pure (componentOutputDir namedComponent distDir relOFilePath) -- | Get all dependencies of the package (buildable targets only). --- --- Note that for Cabal versions 1.22 and earlier, there is a bug where --- Cabal requires dependencies for non-buildable components to be --- present. We're going to use GHC version as a proxy for Cabal --- library version in this case for simplicity, so we'll check for GHC --- being 7.10 or earlier. This obviously makes our function a lot more --- fun to write... -packageDependencies - :: PackageConfig - -> PackageDescription +packageDependencies :: + PackageDescription -> Map PackageName VersionRange -packageDependencies pkgConfig pkg' = +packageDependencies pkg = M.fromListWith intersectVersionRanges $ - map (depPkgName &&& depVerRange) $ - concatMap targetBuildDepends (allBuildInfo' pkg) ++ - maybe [] setupDepends (setupBuildInfo pkg) - where - pkg - | getGhcVersion (packageConfigCompilerVersion pkgConfig) >= mkVersion [8, 0] = pkg' - -- Set all components to buildable. Only need to worry about - -- library, exe, test, and bench, since others didn't exist in - -- older Cabal versions - | otherwise = pkg' - { library = (\c -> c { libBuildInfo = go (libBuildInfo c) }) <$> library pkg' - , executables = (\c -> c { buildInfo = go (buildInfo c) }) <$> executables pkg' - , testSuites = - if packageConfigEnableTests pkgConfig - then (\c -> c { testBuildInfo = go (testBuildInfo c) }) <$> testSuites pkg' - else testSuites pkg' - , benchmarks = - if packageConfigEnableBenchmarks pkgConfig - then (\c -> c { benchmarkBuildInfo = go (benchmarkBuildInfo c) }) <$> benchmarks pkg' - else benchmarks pkg' - } - - go bi = bi { buildable = True } - --- | Get all dependencies of the package (buildable targets only). --- --- This uses both the new 'buildToolDepends' and old 'buildTools' --- information. -packageDescTools - :: PackageDescription - -> (Set ExeName, Map PackageName DepValue) -packageDescTools pd = - (S.fromList $ concat unknowns, M.fromListWith (<>) $ concat knowns) - where - (unknowns, knowns) = unzip $ map perBI $ allBuildInfo' pd - - perBI :: BuildInfo -> ([ExeName], [(PackageName, DepValue)]) - perBI bi = - (unknownTools, tools) - where - (unknownTools, knownTools) = partitionEithers $ map go1 (buildTools bi) - - tools = mapMaybe go2 (knownTools ++ buildToolDepends bi) - - -- This is similar to desugarBuildTool from Cabal, however it - -- uses our own hard-coded map which drops tools shipped with - -- GHC (like hsc2hs), and includes some tools from Stackage. - go1 :: Cabal.LegacyExeDependency -> Either ExeName Cabal.ExeDependency - go1 (Cabal.LegacyExeDependency name range) = - case M.lookup name hardCodedMap of - Just pkgName -> Right $ Cabal.ExeDependency pkgName (Cabal.mkUnqualComponentName name) range - Nothing -> Left $ ExeName $ T.pack name - - go2 :: Cabal.ExeDependency -> Maybe (PackageName, DepValue) - go2 (Cabal.ExeDependency pkg _name range) - | pkg `S.member` preInstalledPackages = Nothing - | otherwise = Just - ( pkg - , DepValue - { dvVersionRange = range - , dvType = AsBuildTool - } - ) + map (depPkgName &&& depVerRange) $ + concatMap targetBuildDepends (allBuildInfo' pkg) + <> maybe [] setupDepends (setupBuildInfo pkg) --- | A hard-coded map for tool dependencies -hardCodedMap :: Map String D.PackageName -hardCodedMap = M.fromList - [ ("alex", Distribution.Package.mkPackageName "alex") - , ("happy", Distribution.Package.mkPackageName "happy") - , ("cpphs", Distribution.Package.mkPackageName "cpphs") - , ("greencard", Distribution.Package.mkPackageName "greencard") - , ("c2hs", Distribution.Package.mkPackageName "c2hs") - , ("hscolour", Distribution.Package.mkPackageName "hscolour") - , ("hspec-discover", Distribution.Package.mkPackageName "hspec-discover") - , ("hsx2hs", Distribution.Package.mkPackageName "hsx2hs") - , ("gtk2hsC2hs", Distribution.Package.mkPackageName "gtk2hs-buildtools") - , ("gtk2hsHookGenerator", Distribution.Package.mkPackageName "gtk2hs-buildtools") - , ("gtk2hsTypeGen", Distribution.Package.mkPackageName "gtk2hs-buildtools") - ] - --- | Executable-only packages which come pre-installed with GHC and do --- not need to be built. Without this exception, we would either end --- up unnecessarily rebuilding these packages, or failing because the --- packages do not appear in the Stackage snapshot. -preInstalledPackages :: Set D.PackageName -preInstalledPackages = S.fromList - [ D.mkPackageName "hsc2hs" - , D.mkPackageName "haddock" - ] - --- | Variant of 'allBuildInfo' from Cabal that, like versions before --- 2.2, only includes buildable components. +-- | Variant of 'Distribution.Types.PackageDescription.allBuildInfo' from Cabal +-- that, like versions before Cabal 2.2 only includes buildable components. allBuildInfo' :: PackageDescription -> [BuildInfo] allBuildInfo' pkg_descr = [ bi | lib <- allLibraries pkg_descr , let bi = libBuildInfo lib @@ -642,696 +447,141 @@ allBuildInfo' pkg_descr = [ bi | lib <- allLibraries pkg_descr , let bi = benchmarkBuildInfo tst , buildable bi ] --- | Get all files referenced by the package. -packageDescModulesAndFiles - :: PackageDescription - -> RIO Ctx (Map NamedComponent (Map ModuleName (Path Abs File)), Map NamedComponent [DotCabalPath], Set (Path Abs File), [PackageWarning]) -packageDescModulesAndFiles pkg = do - (libraryMods,libDotCabalFiles,libWarnings) <- - maybe - (return (M.empty, M.empty, [])) - (asModuleAndFileMap libComponent libraryFiles) - (library pkg) - (subLibrariesMods,subLibDotCabalFiles,subLibWarnings) <- - liftM - foldTuples - (mapM - (asModuleAndFileMap internalLibComponent libraryFiles) - (subLibraries pkg)) - (executableMods,exeDotCabalFiles,exeWarnings) <- - liftM - foldTuples - (mapM - (asModuleAndFileMap exeComponent executableFiles) - (executables pkg)) - (testMods,testDotCabalFiles,testWarnings) <- - liftM - foldTuples - (mapM (asModuleAndFileMap testComponent testFiles) (testSuites pkg)) - (benchModules,benchDotCabalPaths,benchWarnings) <- - liftM - foldTuples - (mapM - (asModuleAndFileMap benchComponent benchmarkFiles) - (benchmarks pkg)) - dfiles <- resolveGlobFiles (specVersion pkg) - (extraSrcFiles pkg - ++ map (dataDir pkg FilePath.) (dataFiles pkg)) - let modules = libraryMods <> subLibrariesMods <> executableMods <> testMods <> benchModules - files = - libDotCabalFiles <> subLibDotCabalFiles <> exeDotCabalFiles <> testDotCabalFiles <> - benchDotCabalPaths - warnings = libWarnings <> subLibWarnings <> exeWarnings <> testWarnings <> benchWarnings - return (modules, files, dfiles, warnings) - where - libComponent = const CLib - internalLibComponent = CInternalLib . T.pack . maybe "" Cabal.unUnqualComponentName . libraryNameString . libName - exeComponent = CExe . T.pack . Cabal.unUnqualComponentName . exeName - testComponent = CTest . T.pack . Cabal.unUnqualComponentName . testName - benchComponent = CBench . T.pack . Cabal.unUnqualComponentName . benchmarkName - asModuleAndFileMap label f lib = do - (a,b,c) <- f (label lib) lib - return (M.singleton (label lib) a, M.singleton (label lib) b, c) - foldTuples = foldl' (<>) (M.empty, M.empty, []) - --- | Resolve globbing of files (e.g. data files) to absolute paths. -resolveGlobFiles - :: Version -- ^ cabal file version - -> [String] - -> RIO Ctx (Set (Path Abs File)) -resolveGlobFiles cabalFileVersion = - liftM (S.fromList . catMaybes . concat) . - mapM resolve - where - resolve name = - if '*' `elem` name - then explode name - else liftM return (resolveFileOrWarn name) - explode name = do - dir <- asks (parent . ctxFile) - names <- - matchDirFileGlob' - (FL.toFilePath dir) - name - mapM resolveFileOrWarn names - matchDirFileGlob' dir glob = - catch - (liftIO (matchDirFileGlob minBound cabalFileVersion dir glob)) - (\(e :: IOException) -> - if isUserError e - then do - prettyWarnL - [ flow "Wildcard does not match any files:" - , style File $ fromString glob - , line <> flow "in directory:" - , style Dir $ fromString dir - ] - return [] - else throwIO e) - --- | Get all files referenced by the benchmark. -benchmarkFiles - :: NamedComponent - -> Benchmark - -> RIO Ctx (Map ModuleName (Path Abs File), [DotCabalPath], [PackageWarning]) -benchmarkFiles component bench = do - resolveComponentFiles component build names - where - names = bnames <> exposed - exposed = - case benchmarkInterface bench of - BenchmarkExeV10 _ fp -> [DotCabalMain fp] - BenchmarkUnsupported _ -> [] - bnames = map DotCabalModule (otherModules build) - build = benchmarkBuildInfo bench - --- | Get all files referenced by the test. -testFiles - :: NamedComponent - -> TestSuite - -> RIO Ctx (Map ModuleName (Path Abs File), [DotCabalPath], [PackageWarning]) -testFiles component test = do - resolveComponentFiles component build names - where - names = bnames <> exposed - exposed = - case testInterface test of - TestSuiteExeV10 _ fp -> [DotCabalMain fp] - TestSuiteLibV09 _ mn -> [DotCabalModule mn] - TestSuiteUnsupported _ -> [] - bnames = map DotCabalModule (otherModules build) - build = testBuildInfo test - --- | Get all files referenced by the executable. -executableFiles - :: NamedComponent - -> Executable - -> RIO Ctx (Map ModuleName (Path Abs File), [DotCabalPath], [PackageWarning]) -executableFiles component exe = do - resolveComponentFiles component build names - where - build = buildInfo exe - names = - map DotCabalModule (otherModules build) ++ - [DotCabalMain (modulePath exe)] - --- | Get all files referenced by the library. -libraryFiles - :: NamedComponent - -> Library - -> RIO Ctx (Map ModuleName (Path Abs File), [DotCabalPath], [PackageWarning]) -libraryFiles component lib = do - resolveComponentFiles component build names - where - build = libBuildInfo lib - names = bnames ++ exposed - exposed = map DotCabalModule (exposedModules lib) - bnames = map DotCabalModule (otherModules build) - --- | Get all files referenced by the component. -resolveComponentFiles - :: NamedComponent - -> BuildInfo - -> [DotCabalDescriptor] - -> RIO Ctx (Map ModuleName (Path Abs File), [DotCabalPath], [PackageWarning]) -resolveComponentFiles component build names = do - dirs <- mapMaybeM resolveDirOrWarn (hsSourceDirs build) - dir <- asks (parent . ctxFile) - agdirs <- autogenDirs - (modules,files,warnings) <- - resolveFilesAndDeps - component - ((if null dirs then [dir] else dirs) ++ agdirs) - names - cfiles <- buildOtherSources build - return (modules, files <> cfiles, warnings) - where - autogenDirs = do - cabalVer <- asks ctxCabalVer - distDir <- asks ctxDistDir - let compDir = componentAutogenDir cabalVer component distDir - pkgDir = maybeToList $ packageAutogenDir cabalVer distDir - filterM doesDirExist $ compDir : pkgDir - --- | Get all C sources and extra source files in a build. -buildOtherSources :: BuildInfo -> RIO Ctx [DotCabalPath] -buildOtherSources build = do - cwd <- liftIO getCurrentDir - dir <- asks (parent . ctxFile) - file <- asks ctxFile - let resolveDirFiles files toCabalPath = - forMaybeM files $ \fp -> do - result <- resolveDirFile dir fp - case result of - Nothing -> do - warnMissingFile "File" cwd fp file - return Nothing - Just p -> return $ Just (toCabalPath p) - csources <- resolveDirFiles (cSources build) DotCabalCFilePath - jsources <- resolveDirFiles (targetJsSources build) DotCabalFilePath - return (csources <> jsources) - --- | Get the target's JS sources. -targetJsSources :: BuildInfo -> [FilePath] -targetJsSources = jsSources - --- | A pair of package descriptions: one which modified the buildable --- values of test suites and benchmarks depending on whether they are --- enabled, and one which does not. --- --- Fields are intentionally lazy, we may only need one or the other --- value. --- --- MSS 2017-08-29: The very presence of this data type is terribly --- ugly, it represents the fact that the Cabal 2.0 upgrade did _not_ --- go well. Specifically, we used to have a field to indicate whether --- a component was enabled in addition to buildable, but that's gone --- now, and this is an ugly proxy. We should at some point clean up --- the mess of Package, LocalPackage, etc, and probably pull in the --- definition of PackageDescription from Cabal with our additionally --- needed metadata. But this is a good enough hack for the --- moment. Odds are, you're reading this in the year 2024 and thinking --- "wtf?" -data PackageDescriptionPair = PackageDescriptionPair - { pdpOrigBuildable :: PackageDescription - , pdpModifiedBuildable :: PackageDescription - } - --- | Evaluates the conditions of a 'GenericPackageDescription', yielding --- a resolved 'PackageDescription'. -resolvePackageDescription :: PackageConfig - -> GenericPackageDescription - -> PackageDescriptionPair -resolvePackageDescription packageConfig (GenericPackageDescription desc defaultFlags mlib subLibs foreignLibs' exes tests benches) = - PackageDescriptionPair - { pdpOrigBuildable = go False - , pdpModifiedBuildable = go True +-- | Evaluates the conditions of a t'GenericPackageDescription', yielding +-- a resolved t'PackageDescription'. +resolvePackageDescription :: + PackageConfig + -> GenericPackageDescription + -> PackageDescription +resolvePackageDescription + packageConfig + ( GenericPackageDescription + desc _ defaultFlags mlib subLibs foreignLibs' exes tests benches + ) + = desc + { library = fmap (resolveConditions rc updateLibDeps) mlib + , subLibraries = map + ( \(n, v) -> + (resolveConditions rc updateLibDeps v){libName = LSubLibName n} + ) + subLibs + , foreignLibs = map + ( \(n, v) -> + (resolveConditions rc updateForeignLibDeps v){foreignLibName = n} + ) + foreignLibs' + , executables = map + ( \(n, v) -> (resolveConditions rc updateExeDeps v){exeName = n} ) + exes + , testSuites = map + ( \(n, v) -> + (resolveConditions rc updateTestDeps v){testName = n} + ) + tests + , benchmarks = map + ( \(n, v) -> + (resolveConditions rc updateBenchmarkDeps v){benchmarkName = n} + ) + benches } - where - go modBuildable = - desc {library = - fmap (resolveConditions rc updateLibDeps) mlib - ,subLibraries = - map (\(n, v) -> (resolveConditions rc updateLibDeps v){libName=LSubLibName n}) - subLibs - ,foreignLibs = - map (\(n, v) -> (resolveConditions rc updateForeignLibDeps v){foreignLibName=n}) - foreignLibs' - ,executables = - map (\(n, v) -> (resolveConditions rc updateExeDeps v){exeName=n}) - exes - ,testSuites = - map (\(n,v) -> (resolveConditions rc (updateTestDeps modBuildable) v){testName=n}) - tests - ,benchmarks = - map (\(n,v) -> (resolveConditions rc (updateBenchmarkDeps modBuildable) v){benchmarkName=n}) - benches} - - flags = - M.union (packageConfigFlags packageConfig) - (flagMap defaultFlags) - - rc = mkResolveConditions - (packageConfigCompilerVersion packageConfig) - (packageConfigPlatform packageConfig) - flags - - updateLibDeps lib deps = - lib {libBuildInfo = - (libBuildInfo lib) {targetBuildDepends = deps}} - updateForeignLibDeps lib deps = - lib {foreignLibBuildInfo = - (foreignLibBuildInfo lib) {targetBuildDepends = deps}} - updateExeDeps exe deps = - exe {buildInfo = - (buildInfo exe) {targetBuildDepends = deps}} - - -- Note that, prior to moving to Cabal 2.0, we would set - -- testEnabled/benchmarkEnabled here. These fields no longer - -- exist, so we modify buildable instead here. The only - -- wrinkle in the Cabal 2.0 story is - -- https://github.com/haskell/cabal/issues/1725, where older - -- versions of Cabal (which may be used for actually building - -- code) don't properly exclude build-depends for - -- non-buildable components. Testing indicates that everything - -- is working fine, and that this comment can be completely - -- ignored. I'm leaving the comment anyway in case something - -- breaks and you, poor reader, are investigating. - updateTestDeps modBuildable test deps = - let bi = testBuildInfo test - bi' = bi - { targetBuildDepends = deps - , buildable = buildable bi && (if modBuildable then packageConfigEnableTests packageConfig else True) - } - in test { testBuildInfo = bi' } - updateBenchmarkDeps modBuildable benchmark deps = - let bi = benchmarkBuildInfo benchmark - bi' = bi - { targetBuildDepends = deps - , buildable = buildable bi && (if modBuildable then packageConfigEnableBenchmarks packageConfig else True) - } - in benchmark { benchmarkBuildInfo = bi' } + where + flags = M.union packageConfig.flags (flagMap defaultFlags) + rc = mkResolveConditions + packageConfig.compilerVersion + packageConfig.platform + flags + updateLibDeps lib deps = lib + { libBuildInfo = (libBuildInfo lib) {targetBuildDepends = deps} } + updateForeignLibDeps lib deps = lib + { foreignLibBuildInfo = + (foreignLibBuildInfo lib) {targetBuildDepends = deps} + } + updateExeDeps exe deps = exe + { Executable.buildInfo = (buildInfo exe) {targetBuildDepends = deps} } + updateTestDeps test deps = test + { testBuildInfo = (testBuildInfo test) {targetBuildDepends = deps} } + updateBenchmarkDeps bench deps = bench + { benchmarkBuildInfo = + (benchmarkBuildInfo bench) {targetBuildDepends = deps} + } -- | Make a map from a list of flag specifications. -- -- What is @flagManual@ for? -flagMap :: [Flag] -> Map FlagName Bool +flagMap :: [PackageFlag] -> Map FlagName Bool flagMap = M.fromList . map pair - where pair :: Flag -> (FlagName, Bool) - pair = flagName &&& flagDefault + where + pair :: PackageFlag -> (FlagName, Bool) + pair = flagName &&& flagDefault data ResolveConditions = ResolveConditions - { rcFlags :: Map FlagName Bool - , rcCompilerVersion :: ActualCompiler - , rcOS :: OS - , rcArch :: Arch - } + { flags :: Map FlagName Bool + , compilerVersion :: ActualCompiler + , os :: OS + , arch :: Arch + } -- | Generic a @ResolveConditions@ using sensible defaults. -mkResolveConditions :: ActualCompiler -- ^ Compiler version - -> Platform -- ^ installation target platform - -> Map FlagName Bool -- ^ enabled flags - -> ResolveConditions +mkResolveConditions :: + ActualCompiler -- ^ Compiler version + -> Platform -- ^ installation target platform + -> Map FlagName Bool -- ^ enabled flags + -> ResolveConditions mkResolveConditions compilerVersion (Platform arch os) flags = ResolveConditions - { rcFlags = flags - , rcCompilerVersion = compilerVersion - , rcOS = os - , rcArch = arch - } + { flags + , compilerVersion + , os + , arch + } -- | Resolve the condition tree for the library. -resolveConditions :: (Semigroup target,Monoid target,Show target) - => ResolveConditions - -> (target -> cs -> target) - -> CondTree ConfVar cs target - -> target +resolveConditions :: + (Semigroup target, Monoid target, Show target) + => ResolveConditions + -> (target -> cs -> target) + -> CondTree ConfVar cs target + -> target resolveConditions rc addDeps (CondNode lib deps cs) = basic <> children - where basic = addDeps lib deps - children = mconcat (map apply cs) - where apply (Cabal.CondBranch cond node mcs) = - if condSatisfied cond - then resolveConditions rc addDeps node - else maybe mempty (resolveConditions rc addDeps) mcs - condSatisfied c = - case c of - Var v -> varSatisifed v - Lit b -> b - CNot c' -> - not (condSatisfied c') - COr cx cy -> - condSatisfied cx || condSatisfied cy - CAnd cx cy -> - condSatisfied cx && condSatisfied cy - varSatisifed v = - case v of - OS os -> os == rcOS rc - Arch arch -> arch == rcArch rc - Flag flag -> - fromMaybe False $ M.lookup flag (rcFlags rc) - -- NOTE: ^^^^^ This should never happen, as all flags - -- which are used must be declared. Defaulting to - -- False. - Impl flavor range -> - case (flavor, rcCompilerVersion rc) of - (GHC, ACGhc vghc) -> vghc `withinRange` range - _ -> False - --- | Try to resolve the list of base names in the given directory by --- looking for unique instances of base names applied with the given --- extensions, plus find any of their module and TemplateHaskell --- dependencies. -resolveFilesAndDeps - :: NamedComponent -- ^ Package component name - -> [Path Abs Dir] -- ^ Directories to look in. - -> [DotCabalDescriptor] -- ^ Base names. - -> RIO Ctx (Map ModuleName (Path Abs File),[DotCabalPath],[PackageWarning]) -resolveFilesAndDeps component dirs names0 = do - (dotCabalPaths, foundModules, missingModules) <- loop names0 S.empty - warnings <- liftM2 (++) (warnUnlisted foundModules) (warnMissing missingModules) - return (foundModules, dotCabalPaths, warnings) - where - loop [] _ = return ([], M.empty, []) - loop names doneModules0 = do - resolved <- resolveFiles dirs names - let foundFiles = mapMaybe snd resolved - foundModules = mapMaybe toResolvedModule resolved - missingModules = mapMaybe toMissingModule resolved - pairs <- mapM (getDependencies component dirs) foundFiles - let doneModules = - S.union - doneModules0 - (S.fromList (mapMaybe dotCabalModule names)) - moduleDeps = S.unions (map fst pairs) - thDepFiles = concatMap snd pairs - modulesRemaining = S.difference moduleDeps doneModules - -- Ignore missing modules discovered as dependencies - they may - -- have been deleted. - (resolvedFiles, resolvedModules, _) <- - loop (map DotCabalModule (S.toList modulesRemaining)) doneModules - return - ( nubOrd $ foundFiles <> map DotCabalFilePath thDepFiles <> resolvedFiles - , M.union - (M.fromList foundModules) - resolvedModules - , missingModules) - warnUnlisted foundModules = do - let unlistedModules = - foundModules `M.difference` - M.fromList (mapMaybe (fmap (, ()) . dotCabalModule) names0) - return $ - if M.null unlistedModules - then [] - else [ UnlistedModulesWarning - component - (map fst (M.toList unlistedModules))] - warnMissing _missingModules = do - return [] - -- TODO: bring this back - see - -- https://github.com/commercialhaskell/stack/issues/2649 - {- - cabalfp <- asks ctxFile - return $ - if null missingModules - then [] - else [ MissingModulesWarning - cabalfp - component - missingModules] - -} - -- TODO: In usages of toResolvedModule / toMissingModule, some sort - -- of map + partition would probably be better. - toResolvedModule - :: (DotCabalDescriptor, Maybe DotCabalPath) - -> Maybe (ModuleName, Path Abs File) - toResolvedModule (DotCabalModule mn, Just (DotCabalModulePath fp)) = - Just (mn, fp) - toResolvedModule _ = - Nothing - toMissingModule - :: (DotCabalDescriptor, Maybe DotCabalPath) - -> Maybe ModuleName - toMissingModule (DotCabalModule mn, Nothing) = - Just mn - toMissingModule _ = - Nothing - --- | Get the dependencies of a Haskell module file. -getDependencies - :: NamedComponent -> [Path Abs Dir] -> DotCabalPath -> RIO Ctx (Set ModuleName, [Path Abs File]) -getDependencies component dirs dotCabalPath = - case dotCabalPath of - DotCabalModulePath resolvedFile -> readResolvedHi resolvedFile - DotCabalMainPath resolvedFile -> readResolvedHi resolvedFile - DotCabalFilePath{} -> return (S.empty, []) - DotCabalCFilePath{} -> return (S.empty, []) - where - readResolvedHi resolvedFile = do - dumpHIDir <- componentOutputDir component <$> asks ctxDistDir - dir <- asks (parent . ctxFile) - let sourceDir = fromMaybe dir $ find (`isProperPrefixOf` resolvedFile) dirs - stripSourceDir d = stripProperPrefix d resolvedFile - case stripSourceDir sourceDir of - Nothing -> return (S.empty, []) - Just fileRel -> do - let hiPath = - FilePath.replaceExtension - (toFilePath (dumpHIDir fileRel)) - ".hi" - dumpHIExists <- liftIO $ D.doesFileExist hiPath - if dumpHIExists - then parseHI hiPath - else return (S.empty, []) - --- | Parse a .hi file into a set of modules and files. -parseHI - :: FilePath -> RIO Ctx (Set ModuleName, [Path Abs File]) -parseHI hiPath = do - dir <- asks (parent . ctxFile) - result <- liftIO $ Iface.fromFile hiPath - case result of - Left msg -> do - prettyStackDevL - [ flow "Failed to decode module interface:" - , style File $ fromString hiPath - , flow "Decoding failure:" - , style Error $ fromString msg - ] - pure (S.empty, []) - Right iface -> do - let moduleNames = fmap (fromString . T.unpack . decodeUtf8Lenient . fst) . - Iface.unList . Iface.dmods . Iface.deps - resolveFileDependency file = do - resolved <- liftIO (forgivingAbsence (resolveFile dir file)) >>= rejectMissingFile - when (isNothing resolved) $ - prettyWarnL - [ flow "Dependent file listed in:" - , style File $ fromString hiPath - , flow "does not exist:" - , style File $ fromString file - ] - pure resolved - resolveUsages = traverse (resolveFileDependency . Iface.unUsage) . Iface.unList . Iface.usage - resolvedUsages <- catMaybes <$> resolveUsages iface - pure (S.fromList $ moduleNames iface, resolvedUsages) - --- | Try to resolve the list of base names in the given directory by --- looking for unique instances of base names applied with the given --- extensions. -resolveFiles - :: [Path Abs Dir] -- ^ Directories to look in. - -> [DotCabalDescriptor] -- ^ Base names. - -> RIO Ctx [(DotCabalDescriptor, Maybe DotCabalPath)] -resolveFiles dirs names = - forM names (\name -> liftM (name, ) (findCandidate dirs name)) - -data CabalFileNameParseFail - = CabalFileNameParseFail FilePath - | CabalFileNameInvalidPackageName FilePath - deriving (Typeable) - -instance Exception CabalFileNameParseFail -instance Show CabalFileNameParseFail where - show (CabalFileNameParseFail fp) = "Invalid file path for cabal file, must have a .cabal extension: " ++ fp - show (CabalFileNameInvalidPackageName fp) = "cabal file names must use valid package names followed by a .cabal extension, the following is invalid: " ++ fp - --- | Parse a package name from a file path. -parsePackageNameFromFilePath :: MonadThrow m => Path a File -> m PackageName -parsePackageNameFromFilePath fp = do - base <- clean $ toFilePath $ filename fp - case parsePackageName base of - Nothing -> throwM $ CabalFileNameInvalidPackageName $ toFilePath fp - Just x -> return x - where clean = liftM reverse . strip . reverse - strip ('l':'a':'b':'a':'c':'.':xs) = return xs - strip _ = throwM (CabalFileNameParseFail (toFilePath fp)) - --- | Find a candidate for the given module-or-filename from the list --- of directories and given extensions. -findCandidate - :: [Path Abs Dir] - -> DotCabalDescriptor - -> RIO Ctx (Maybe DotCabalPath) -findCandidate dirs name = do - pkg <- asks ctxFile >>= parsePackageNameFromFilePath - candidates <- liftIO makeNameCandidates - case candidates of - [candidate] -> return (Just (cons candidate)) - [] -> do - case name of - DotCabalModule mn - | D.display mn /= paths_pkg pkg -> logPossibilities dirs mn - _ -> return () - return Nothing - (candidate:rest) -> do - warnMultiple name candidate rest - return (Just (cons candidate)) - where - cons = - case name of - DotCabalModule{} -> DotCabalModulePath - DotCabalMain{} -> DotCabalMainPath - DotCabalFile{} -> DotCabalFilePath - DotCabalCFile{} -> DotCabalCFilePath - paths_pkg pkg = "Paths_" ++ packageNameString pkg - makeNameCandidates = - liftM (nubOrd . concat) (mapM makeDirCandidates dirs) - makeDirCandidates :: Path Abs Dir - -> IO [Path Abs File] - makeDirCandidates dir = - case name of - DotCabalMain fp -> resolveCandidate dir fp - DotCabalFile fp -> resolveCandidate dir fp - DotCabalCFile fp -> resolveCandidate dir fp - DotCabalModule mn -> do - let perExt ext = - resolveCandidate dir (Cabal.toFilePath mn ++ "." ++ T.unpack ext) - withHaskellExts <- mapM perExt haskellFileExts - withPPExts <- mapM perExt haskellPreprocessorExts - pure $ - case (concat withHaskellExts, concat withPPExts) of - -- If we have exactly 1 Haskell extension and exactly - -- 1 preprocessor extension, assume the former file is - -- generated from the latter - -- - -- See https://github.com/commercialhaskell/stack/issues/4076 - ([_], [y]) -> [y] - - -- Otherwise, return everything - (xs, ys) -> xs ++ ys - resolveCandidate dir = fmap maybeToList . resolveDirFile dir - --- | Resolve file as a child of a specified directory, symlinks --- don't get followed. -resolveDirFile - :: (MonadIO m, MonadThrow m) - => Path Abs Dir -> FilePath.FilePath -> m (Maybe (Path Abs File)) -resolveDirFile x y = do - -- The standard canonicalizePath does not work for this case - p <- parseCollapsedAbsFile (toFilePath x FilePath. y) - exists <- doesFileExist p - return $ if exists then Just p else Nothing - --- | Warn the user that multiple candidates are available for an --- entry, but that we picked one anyway and continued. -warnMultiple - :: DotCabalDescriptor -> Path b t -> [Path b t] -> RIO Ctx () -warnMultiple name candidate rest = - -- TODO: figure out how to style 'name' and the dispOne stuff - prettyWarnL - [ flow "There were multiple candidates for the Cabal entry" - , fromString . showName $ name - , line <> bulletedList (map dispOne (candidate:rest)) - , line <> flow "picking:" - , dispOne candidate - ] - where showName (DotCabalModule name') = D.display name' - showName (DotCabalMain fp) = fp - showName (DotCabalFile fp) = fp - showName (DotCabalCFile fp) = fp - dispOne = fromString . toFilePath - -- TODO: figure out why dispOne can't be just `display` - -- (remove the .hlint.yaml exception if it can be) - --- | Log that we couldn't find a candidate, but there are --- possibilities for custom preprocessor extensions. --- --- For example: .erb for a Ruby file might exist in one of the --- directories. -logPossibilities - :: HasTerm env - => [Path Abs Dir] -> ModuleName -> RIO env () -logPossibilities dirs mn = do - possibilities <- liftM concat (makePossibilities mn) - unless (null possibilities) $ prettyWarnL - [ flow "Unable to find a known candidate for the Cabal entry" - , (style PP.Module . fromString $ D.display mn) <> "," - , flow "but did find:" - , line <> bulletedList (map pretty possibilities) - , flow "If you are using a custom preprocessor for this module" - , flow "with its own file extension, consider adding the file(s)" - , flow "to your .cabal under extra-source-files." - ] - where - makePossibilities name = - mapM - (\dir -> - do (_,files) <- listDir dir - return - (map - filename - (filter - (isPrefixOf (D.display name) . - toFilePath . filename) - files))) - dirs + where + basic = addDeps lib deps + children = mconcat (map apply cs) + where + apply (Cabal.CondBranch cond node mcs) = + if condSatisfied cond + then resolveConditions rc addDeps node + else maybe mempty (resolveConditions rc addDeps) mcs + condSatisfied c = + case c of + Var v -> varSatisfied v + Lit b -> b + CNot c' -> not (condSatisfied c') + COr cx cy -> condSatisfied cx || condSatisfied cy + CAnd cx cy -> condSatisfied cx && condSatisfied cy + varSatisfied v = + case v of + OS os -> os == rc.os + Arch arch -> arch == rc.arch + PackageFlag flag -> fromMaybe False $ M.lookup flag rc.flags + -- NOTE: ^^^^^ This should never happen, as all flags which are used + -- must be declared. Defaulting to False. + Impl flavor range -> + case (flavor, rc.compilerVersion) of + (GHC, ACGhc vghc) -> vghc `withinRange` range + _ -> False -- | Path for the package's build log. -buildLogPath :: (MonadReader env m, HasBuildConfig env, MonadThrow m) - => Package -> Maybe String -> m (Path Abs File) +buildLogPath :: + (MonadReader env m, HasBuildConfig env, MonadThrow m) + => Package + -> Maybe String + -> m (Path Abs File) buildLogPath package' msuffix = do env <- ask - let stack = getProjectWorkDir env + let workDir = getWorkDir env fp <- parseRelFile $ concat $ packageIdentifierString (packageIdentifier package') : maybe id (\suffix -> ("-" :) . (suffix :)) msuffix [".log"] - return $ stack relDirLogs fp - --- Internal helper to define resolveFileOrWarn and resolveDirOrWarn -resolveOrWarn :: Text - -> (Path Abs Dir -> String -> RIO Ctx (Maybe a)) - -> FilePath.FilePath - -> RIO Ctx (Maybe a) -resolveOrWarn subject resolver path = - do cwd <- liftIO getCurrentDir - file <- asks ctxFile - dir <- asks (parent . ctxFile) - result <- resolver dir path - when (isNothing result) $ warnMissingFile subject cwd path file - return result - -warnMissingFile :: Text -> Path Abs Dir -> FilePath -> Path Abs File -> RIO Ctx () -warnMissingFile subject cwd path fromFile = - prettyWarnL - [ fromString . T.unpack $ subject -- TODO: needs style? - , flow "listed in" - , maybe (pretty fromFile) pretty (stripProperPrefix cwd fromFile) - , flow "file does not exist:" - , style Dir . fromString $ path - ] - --- | Resolve the file, if it can't be resolved, warn for the user --- (purely to be helpful). -resolveFileOrWarn :: FilePath.FilePath - -> RIO Ctx (Maybe (Path Abs File)) -resolveFileOrWarn = resolveOrWarn "File" f - where f p x = liftIO (forgivingAbsence (resolveFile p x)) >>= rejectMissingFile - --- | Resolve the directory, if it can't be resolved, warn for the user --- (purely to be helpful). -resolveDirOrWarn :: FilePath.FilePath - -> RIO Ctx (Maybe (Path Abs Dir)) -resolveDirOrWarn = resolveOrWarn "Directory" f - where f p x = liftIO (forgivingAbsence (resolveDir p x)) >>= rejectMissingDir + pure $ workDir relDirLogs fp {- FIXME -- | Create a 'ProjectPackage' from a directory containing a package. @@ -1342,7 +592,7 @@ mkProjectPackage -> RIO env ProjectPackage mkProjectPackage printWarnings dir = do (gpd, name, cabalfp) <- loadCabalFilePath (resolvedAbsolute dir) - return ProjectPackage + pure ProjectPackage { ppCabalFP = cabalfp , ppGPD' = gpd printWarnings , ppResolvedDir = dir @@ -1364,7 +614,7 @@ mkDepPackage pl = do PackageIdentifier name _ <- getPackageLocationIdent pli run <- askRunInIO pure (name, run $ loadCabalFileImmutable pli) - return DepPackage + pure DepPackage { dpGPD' = gpdio , dpLocation = pl , dpName = name @@ -1374,24 +624,292 @@ mkDepPackage pl = do -- | Force a package to be treated as a custom build type, see -- -applyForceCustomBuild - :: Version -- ^ global Cabal version +applyForceCustomBuild :: + Version + -- ^ Global Cabal version. -> Package -> Package applyForceCustomBuild cabalVersion package - | forceCustomBuild = - package - { packageBuildType = Custom - , packageDeps = M.insertWith (<>) "Cabal" (DepValue cabalVersionRange AsLibrary) - $ packageDeps package - , packageSetupDeps = Just $ M.fromList - [ ("Cabal", cabalVersionRange) - , ("base", anyVersion) - ] - } - | otherwise = package - where - cabalVersionRange = packageCabalSpec package - forceCustomBuild = - packageBuildType package == Simple && - not (cabalVersion `withinRange` cabalVersionRange) + | forceCustomBuild = + package + { buildType = Custom + , setupDeps = Just $ M.fromList + [ ("Cabal", libraryDepFromVersionRange cabalVersionRange) + , ("base", libraryDepFromVersionRange anyVersion) + ] + } + | otherwise = package + where + cabalVersionRange = + orLaterVersion $ mkVersion $ cabalSpecToVersionDigits + package.cabalSpec + forceCustomBuild = package.buildType == Simple + && not (cabalVersion `withinRange` cabalVersionRange) + +-- | Check if the package has a main library that is buildable. +hasBuildableMainLibrary :: Package -> Bool +hasBuildableMainLibrary package = + maybe False isComponentBuildable package.library + +-- | Aggregate all unknown tools from all components. Mostly meant for +-- build tools specified in the legacy manner (build-tools:) that failed the +-- hard-coded lookup. See 'Stack.Types.Component.unknownTools' for more +-- information. +packageUnknownTools :: Package -> Set Text +packageUnknownTools pkg = lib (bench <> tests <> flib <> sublib <> exe) + where + lib setT = case pkg.library of + Just libV -> addUnknownTools libV setT + Nothing -> setT + bench = gatherUnknownTools pkg.benchmarks + tests = gatherUnknownTools pkg.testSuites + flib = gatherUnknownTools pkg.foreignLibraries + sublib = gatherUnknownTools pkg.subLibraries + exe = gatherUnknownTools pkg.executables + addUnknownTools :: HasBuildInfo x => x -> Set Text -> Set Text + addUnknownTools = (<>) . (.buildInfo.unknownTools) + gatherUnknownTools :: HasBuildInfo x => CompCollection x -> Set Text + gatherUnknownTools = foldr' addUnknownTools mempty + +buildableForeignLibs :: Package -> Set StackUnqualCompName +buildableForeignLibs pkg = getBuildableSet pkg.foreignLibraries + +buildableSubLibs :: Package -> Set StackUnqualCompName +buildableSubLibs pkg = getBuildableSet pkg.subLibraries + +buildableExes :: Package -> Set StackUnqualCompName +buildableExes pkg = getBuildableSet pkg.executables + +buildableTestSuites :: Package -> Set StackUnqualCompName +buildableTestSuites pkg = getBuildableSet pkg.testSuites + +buildableBenchmarks :: Package -> Set StackUnqualCompName +buildableBenchmarks pkg = getBuildableSet pkg.benchmarks + +-- | Apply a generic processing function in a Monad over all of the Package's +-- components. +processPackageComponent :: + forall m a. (Monad m) + => Package + -> (forall component. HasComponentInfo component => component -> m a -> m a) + -- ^ Processing function with all the component's info. + -> m a + -- ^ Initial value. + -> m a +processPackageComponent pkg componentFn = do + let componentKindProcessor :: + forall component. HasComponentInfo component + => (Package -> CompCollection component) + -> m a + -> m a + componentKindProcessor target = + foldComponentToAnotherCollection + (target pkg) + componentFn + processMainLib = maybe id componentFn pkg.library + processAllComp = + ( if pkg.benchmarkEnabled + then componentKindProcessor (.benchmarks) + else id + ) + . ( if pkg.testEnabled + then componentKindProcessor (.testSuites) + else id + ) + . componentKindProcessor (.foreignLibraries) + . componentKindProcessor (.executables) + . componentKindProcessor (.subLibraries) + . processMainLib + processAllComp + +-- | This is a function to iterate in a monad over all of a package's +-- dependencies, and yield a collection of results (used with list and set). +processPackageMapDeps :: + (Monad m) + => Package + -> (Map PackageName DepValue -> m a -> m a) + -> m a + -> m a +processPackageMapDeps pkg fn = do + let packageSetupDepsProcessor resAction = case pkg.setupDeps of + Nothing -> resAction + Just v -> fn v resAction + processAllComp = processPackageComponent pkg (fn . componentDependencyMap) + . packageSetupDepsProcessor + processAllComp + +-- | This is a function to iterate in a monad over all of a package component's +-- dependencies, and yield a collection of results. +processPackageDeps :: + (Monad m) + => Package + -> (smallResT -> resT -> resT) + -> (PackageName -> DepValue -> m smallResT) + -> m resT + -> m resT +processPackageDeps pkg combineResults fn = do + let + asPackageNameSet :: + (Package -> CompCollection component) + -> Set PackageName + asPackageNameSet accessor = + S.map (mkPackageName . T.unpack) $ getBuildableSetText $ accessor pkg + (!subLibNames, !foreignLibNames) = + ( asPackageNameSet (.subLibraries) + , asPackageNameSet (.foreignLibraries) + ) + shouldIgnoreDep (packageNameV :: PackageName) + | packageNameV == pkg.name = True + | packageNameV `S.member` subLibNames = True + | packageNameV `S.member` foreignLibNames = True + | otherwise = False + innerIterator packageName depValue resListInMonad + | shouldIgnoreDep packageName = resListInMonad + | otherwise = do + resList <- resListInMonad + newResElement <- fn packageName depValue + pure $ combineResults newResElement resList + processPackageMapDeps pkg (flip (M.foldrWithKey' innerIterator)) + +-- | Iterate/fold on all the package dependencies, components, setup deps and +-- all. +processPackageDepsToList :: + Monad m + => Package + -> (PackageName -> DepValue -> m resT) + -> m [resT] +processPackageDepsToList pkg fn = processPackageDeps pkg (:) fn (pure []) + +-- | Iterate/fold on all the package dependencies, components, setup deps and +-- all. +processPackageDepsEither :: + (Monad m, Monoid a, Monoid b) + => Package + -> (PackageName -> DepValue -> m (Either a b)) + -> m (Either a b) +processPackageDepsEither pkg fn = + processPackageDeps pkg combineRes fn (pure (Right mempty)) + where + combineRes (Left err) (Left errs) = Left (errs <> err) + combineRes _ (Left b) = Left b + combineRes (Left err) _ = Left err + combineRes (Right a) (Right b) = Right $ a <> b + +-- | List all package's dependencies in a "free" context through the identity +-- monad. +listOfPackageDeps :: Package -> [PackageName] +listOfPackageDeps pkg = + runIdentity $ processPackageDepsToList pkg (\pn _ -> pure pn) + +-- | The set of package's dependencies. +setOfPackageDeps :: Package -> Set PackageName +setOfPackageDeps pkg = + runIdentity $ processPackageDeps pkg S.insert (\pn _ -> pure pn) (pure mempty) + +-- | This implements a topological sort on all targeted components for the build +-- and their dependencies. It's only targeting internal dependencies, so it's +-- doing a topological sort on a subset of a package's components. +-- +-- Note that in Cabal they use the Data.Graph struct to pursue the same goal. +-- But dong this here would require a large number intermediate data structure. +-- This is needed because we need to get the right GhcPkgId of the relevant +-- internal dependencies of a component before building it as a component. +topSortPackageComponent :: + Package + -> Target + -> Bool + -- ^ Include directTarget or not. False here means we won't include the + -- actual targets in the result, only their deps. Using it with False here + -- only in GHCi + -> Seq NamedComponent +topSortPackageComponent package target includeDirectTarget = + topProcessPackageComponent package target processor mempty + where + processor :: + HasQualiName component + => PackageType + -> component + -> Seq NamedComponent + -> Seq NamedComponent + processor packageType component + | not includeDirectTarget && packageType == PTProject = id + | otherwise = \v -> v |> component.qualifiedName + +-- | Process a package's internal components in the order of their topological +-- sort. The first iteration will effect the component depending on no other +-- component etc, iterating by increasing amount of required dependencies. +-- 'PackageType' with value 'PTProject' here means the component is a direct +-- target and 'PTDependency' means it's a dependency of a direct target. +topProcessPackageComponent :: + forall b. + Package + -> Target + -> ( forall component. (HasComponentInfo component) + => PackageType + -> component + -> b + -> b + ) + -> b + -> b +topProcessPackageComponent package target fn res = do + let + initialState :: (Set NamedComponent, b) + initialState = (mempty, res) + processInitialComponents :: + HasComponentInfo component + => component + -> (Set NamedComponent, b) + -> (Set NamedComponent, b) + processInitialComponents c = case target of + TargetAll{} -> processComponent PTProject c + TargetComps targetSet -> if S.member c.qualifiedName targetSet + then processComponent PTProject c + else id + snd $ processPackageComponent package processInitialComponents initialState + where + processComponent :: + HasComponentInfo component + => PackageType + -- ^ Finally add this component in the seq + -> component + -> (Set NamedComponent, b) + -> (Set NamedComponent, b) + processComponent packageType component currentRes@(_a, !_b) = do + let depMap = componentDependencyMap component + internalDep = M.lookup package.name depMap + qualName = component.qualifiedName + alreadyProcessed = fst currentRes + !appendToResult = fn packageType component + -- This is an optimization, the only components we are likely to process + -- multiple times are the ones we can find in dependencies, otherwise we + -- only fold on a single version of each component by design. + processedDeps = processOneDep internalDep currentRes + if isPotentialDependency qualName + then + if S.member qualName alreadyProcessed + then currentRes + else bimap (S.insert qualName) appendToResult processedDeps + else second appendToResult processedDeps + lookupLibName isMain name = if isMain + then package.library + else collectionLookup name package.subLibraries + processOneDep :: + Maybe DepValue + -> (Set NamedComponent, b) + -> (Set NamedComponent, b) + processOneDep mDependency res' = + case (.depType) <$> mDependency of + Just (AsLibrary (DepLibrary mainLibDep subLibDeps)) -> do + let processMainLibDep = + case (mainLibDep, lookupLibName True emptyCompName) of + (True, Just mainLib) -> + processComponent PTDependency mainLib + _ -> id + processSingleSubLib name = + case lookupLibName False name of + Just lib -> processComponent PTDependency lib + Nothing -> id + processSubLibDep r = foldr' processSingleSubLib r subLibDeps + processSubLibDep (processMainLibDep res') + _ -> res' diff --git a/src/Stack/PackageDump.hs b/src/Stack/PackageDump.hs index 8092a3b2f6..bfbecce336 100644 --- a/src/Stack/PackageDump.hs +++ b/src/Stack/PackageDump.hs @@ -1,324 +1,386 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.PackageDump +License : BSD-3-Clause +-} + module Stack.PackageDump - ( Line - , eachSection - , eachPair - , DumpPackage (..) - , conduitDumpPackage - , ghcPkgDump - , ghcPkgDescribe - , sinkMatching - , pruneDeps - ) where + ( Line + , eachSection + , eachPair + , DumpPackage (..) + , conduitDumpPackage + , ghcPkgDump + , ghcPkgDescribe + , ghcPkgField + , sinkMatching + , pruneDeps + ) where -import Stack.Prelude -import Data.Attoparsec.Args +import Control.Monad.Extra ( whenJust ) +import Data.Attoparsec.Args ( EscapingMode (..), argsParser ) import Data.Attoparsec.Text as P -import Data.Conduit +import Data.Conduit ( await, leftover, toConsumer, yield ) import qualified Data.Conduit.List as CL import qualified Data.Conduit.Text as CT import qualified Data.Map as Map import qualified Data.Set as Set -import qualified RIO.Text as T +import qualified Distribution.Pretty as C import qualified Distribution.Text as C -import Path.Extra (toFilePathNoTrailingSep) -import Stack.GhcPkg -import Stack.Types.Config (HasCompiler (..), GhcPkgExe (..), DumpPackage (..)) -import Stack.Types.GhcPkgId -import RIO.Process hiding (readProcess) +import Distribution.Types.MungedPackageName + ( decodeCompatPackageName ) +import Path.Extra ( toFilePathNoTrailingSep ) +import RIO.Process ( HasProcessContext ) +import qualified RIO.Text as T +import Stack.Component ( fromCabalName ) +import Stack.GhcPkg ( createDatabase ) +import Stack.Prelude +import Stack.Types.CompilerPaths ( GhcPkgExe (..), HasCompiler (..) ) +import Stack.Types.ComponentUtils ( unqualCompFromText ) +import Stack.Types.DumpPackage ( DumpPackage (..), SublibDump (..) ) +import Stack.Types.GhcPkgId ( GhcPkgId, parseGhcPkgId ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.PackageDump" module. +data PackageDumpException + = MissingSingleField Text (Map Text [Line]) + | Couldn'tParseField Text [Line] + deriving Show + +instance Exception PackageDumpException where + displayException (MissingSingleField name values) = unlines $ + concat + [ "Error: [S-4257]\n" + , "Expected single value for field name " + , show name + , " when parsing ghc-pkg dump output:" + ] + : map (\(k, v) -> " " ++ show (k, v)) (Map.toList values) + displayException (Couldn'tParseField name ls) = concat + [ "Error: [S-2016]\n" + , "Couldn't parse the field " + , show name + , " from lines: " + , show ls + , "." + ] --- | Call ghc-pkg dump with appropriate flags and stream to the given @Sink@, for a single database -ghcPkgDump - :: (HasProcessContext env, HasLogFunc env) - => GhcPkgExe - -> [Path Abs Dir] -- ^ if empty, use global - -> ConduitM Text Void (RIO env) a - -> RIO env a +-- | Call @ghc-pkg dump@ with appropriate flags and stream to the given sink, +-- using either the global package database or the given package databases. +ghcPkgDump :: + (HasProcessContext env, HasTerm env) + => GhcPkgExe + -> [Path Abs Dir] + -- ^ A list of package databases. If empty, use the global package + -- database. + -> ConduitM Text Void (RIO env) a + -- ^ Sink. + -> RIO env a ghcPkgDump pkgexe = ghcPkgCmdArgs pkgexe ["dump"] --- | Call ghc-pkg describe with appropriate flags and stream to the given @Sink@, for a single database -ghcPkgDescribe - :: (HasProcessContext env, HasLogFunc env, HasCompiler env) - => GhcPkgExe - -> PackageName - -> [Path Abs Dir] -- ^ if empty, use global - -> ConduitM Text Void (RIO env) a - -> RIO env a -ghcPkgDescribe pkgexe pkgName' = ghcPkgCmdArgs pkgexe ["describe", "--simple-output", packageNameString pkgName'] +-- | Call @ghc-pkg describe@ with appropriate flags and stream to the given +-- sink, using either the global package database or the given package +-- databases. +ghcPkgDescribe :: + (HasCompiler env, HasProcessContext env, HasTerm env) + => GhcPkgExe + -> PackageName + -> [Path Abs Dir] + -- ^ A list of package databases. If empty, use the global package + -- database. + -> ConduitM Text Void (RIO env) a + -- ^ Sink. + -> RIO env a +ghcPkgDescribe pkgexe pkgName' = ghcPkgCmdArgs + pkgexe + ["describe", "--simple-output", packageNameString pkgName'] --- | Call ghc-pkg and stream to the given @Sink@, for a single database -ghcPkgCmdArgs - :: (HasProcessContext env, HasLogFunc env) - => GhcPkgExe - -> [String] - -> [Path Abs Dir] -- ^ if empty, use global - -> ConduitM Text Void (RIO env) a - -> RIO env a +-- | Call @ghc-pkg field@ with appropriate flags and stream to the given sink, +-- using the given package database. Throws 'RIO.Process.ExitCodeException' if +-- the process fails (for example, if the package is not found in the package +-- database or the field is not found in the package's *.conf file). +ghcPkgField :: + (HasCompiler env, HasProcessContext env, HasTerm env) + => GhcPkgExe + -> Path Abs Dir + -- ^ A package database. + -> MungedPackageId + -- ^ A munged package identifier. + -> String + -- ^ A field name. + -> ConduitM Text Void (RIO env) a + -- ^ Sink. + -> RIO env a +ghcPkgField pkgexe pkgDb mungedPkgId fieldName = ghcPkgCmdArgs + pkgexe + ["field", C.prettyShow mungedPkgId, fieldName, "--simple-output" ] + [pkgDb] + +-- | Call @ghc-pkg@ and stream to the given sink, using the either the global +-- package database or the given package databases. +ghcPkgCmdArgs :: + (HasProcessContext env, HasTerm env) + => GhcPkgExe + -> [String] + -- ^ A list of commands. + -> [Path Abs Dir] + -- ^ A list of package databases. If empty, use the global package + -- database. + -> ConduitM Text Void (RIO env) a + -- ^ Sink. + -> RIO env a ghcPkgCmdArgs pkgexe@(GhcPkgExe pkgPath) cmd mpkgDbs sink = do - case reverse mpkgDbs of - (pkgDb:_) -> createDatabase pkgexe pkgDb -- TODO maybe use some retry logic instead? - _ -> return () - sinkProcessStdout (toFilePath pkgPath) args sink' - where - args = concat - [ case mpkgDbs of - [] -> ["--global", "--no-user-package-db"] - _ -> ["--user", "--no-user-package-db"] ++ - concatMap (\pkgDb -> ["--package-db", toFilePathNoTrailingSep pkgDb]) mpkgDbs - , cmd - , ["--expand-pkgroot"] - ] - sink' = CT.decodeUtf8 .| sink + case reverse mpkgDbs of + (pkgDb:_) -> createDatabase pkgexe pkgDb -- TODO maybe use some retry logic instead? + _ -> pure () + -- https://github.com/haskell/process/issues/251 + snd <$> sinkProcessStderrStdout (toFilePath pkgPath) args CL.sinkNull sink' + where + args = concat + [ case mpkgDbs of + [] -> ["--global", "--no-user-package-db"] + _ -> "--user" + : "--no-user-package-db" + : concatMap + (\pkgDb -> ["--package-db", toFilePathNoTrailingSep pkgDb]) + mpkgDbs + , cmd + , ["--expand-pkgroot"] + ] + sink' = CT.decodeUtf8 .| sink -- | Prune a list of possible packages down to those whose dependencies are met. -- -- * id uniquely identifies an item -- -- * There can be multiple items per name -pruneDeps - :: (Ord name, Ord id) - => (id -> name) -- ^ extract the name from an id - -> (item -> id) -- ^ the id of an item - -> (item -> [id]) -- ^ get the dependencies of an item - -> (item -> item -> item) -- ^ choose the desired of two possible items - -> [item] -- ^ input items - -> Map name item +pruneDeps :: + (Ord name, Ord id) + => (id -> name) -- ^ extract the name from an id + -> (item -> id) -- ^ the id of an item + -> (item -> [id]) -- ^ get the dependencies of an item + -> (item -> item -> item) -- ^ choose the desired of two possible items + -> [item] -- ^ input items + -> Map name item pruneDeps getName getId getDepends chooseBest = - Map.fromList - . fmap (getName . getId &&& id) - . loop Set.empty Set.empty [] - where - loop foundIds usedNames foundItems dps = - case partitionEithers $ map depsMet dps of - ([], _) -> foundItems - (s', dps') -> - let foundIds' = Map.fromListWith chooseBest s' - foundIds'' = Set.fromList $ map getId $ Map.elems foundIds' - usedNames' = Map.keysSet foundIds' - foundItems' = Map.elems foundIds' - in loop - (Set.union foundIds foundIds'') - (Set.union usedNames usedNames') - (foundItems ++ foundItems') - (catMaybes dps') - where - depsMet dp - | name `Set.member` usedNames = Right Nothing - | all (`Set.member` foundIds) (getDepends dp) = Left (name, dp) - | otherwise = Right $ Just dp - where - id' = getId dp - name = getName id' + Map.fromList + . fmap (getName . getId &&& id) + . loop Set.empty Set.empty [] + where + loop foundIds usedNames foundItems dps = + case partitionEithers $ map depsMet dps of + ([], _) -> foundItems + (s', dps') -> + let foundIds' = Map.fromListWith chooseBest s' + foundIds'' = Set.fromList $ map getId $ Map.elems foundIds' + usedNames' = Map.keysSet foundIds' + foundItems' = Map.elems foundIds' + in loop + (Set.union foundIds foundIds'') + (Set.union usedNames usedNames') + (foundItems ++ foundItems') + (catMaybes dps') + where + depsMet dp + | name `Set.member` usedNames = Right Nothing + | all (`Set.member` foundIds) (getDepends dp) = Left (name, dp) + | otherwise = Right $ Just dp + where + id' = getId dp + name = getName id' -- | Find the package IDs matching the given constraints with all dependencies installed. -- Packages not mentioned in the provided @Map@ are allowed to be present too. -sinkMatching :: Monad m - => Map PackageName Version -- ^ allowed versions - -> ConduitM DumpPackage o m (Map PackageName DumpPackage) +sinkMatching :: + Monad m + => Map PackageName Version -- ^ allowed versions + -> ConduitM DumpPackage o m (Map PackageName DumpPackage) sinkMatching allowed = - Map.fromList - . map (pkgName . dpPackageIdent &&& id) - . Map.elems - . pruneDeps - id - dpGhcPkgId - dpDepends - const -- Could consider a better comparison in the future - <$> (CL.filter (isAllowed . dpPackageIdent) .| CL.consume) - where - isAllowed (PackageIdentifier name version) = - case Map.lookup name allowed of - Just version' | version /= version' -> False - _ -> True - -data PackageDumpException - = MissingSingleField Text (Map Text [Line]) - | Couldn'tParseField Text [Line] - deriving Typeable -instance Exception PackageDumpException -instance Show PackageDumpException where - show (MissingSingleField name values) = unlines $ - return (concat - [ "Expected single value for field name " - , show name - , " when parsing ghc-pkg dump output:" - ]) ++ map (\(k, v) -> " " ++ show (k, v)) (Map.toList values) - show (Couldn'tParseField name ls) = - "Couldn't parse the field " ++ show name ++ " from lines: " ++ show ls + Map.fromList + . map (pkgName . (.packageIdent) &&& id) + . Map.elems + . pruneDeps + id + (.ghcPkgId) + (.depends) + const -- Could consider a better comparison in the future + <$> (CL.filter (isAllowed . (.packageIdent)) .| CL.consume) + where + isAllowed (PackageIdentifier name version) = + case Map.lookup name allowed of + Just version' | version /= version' -> False + _ -> True -- | Convert a stream of bytes into a stream of @DumpPackage@s -conduitDumpPackage :: MonadThrow m - => ConduitM Text DumpPackage m () +conduitDumpPackage :: + MonadThrow m + => ConduitM Text DumpPackage m () conduitDumpPackage = (.| CL.catMaybes) $ eachSection $ do - pairs <- eachPair (\k -> (k, ) <$> CL.consume) .| CL.consume - let m = Map.fromList pairs - let parseS k = - case Map.lookup k m of - Just [v] -> return v - _ -> throwM $ MissingSingleField k m - -- Can't fail: if not found, same as an empty list. See: - -- https://github.com/fpco/stack/issues/182 - parseM k = Map.findWithDefault [] k m - - parseDepend :: MonadThrow m => Text -> m (Maybe GhcPkgId) - parseDepend "builtin_rts" = return Nothing - parseDepend bs = - liftM Just $ parseGhcPkgId bs' - where - (bs', _builtinRts) = - case stripSuffixText " builtin_rts" bs of - Nothing -> - case stripPrefixText "builtin_rts " bs of - Nothing -> (bs, False) - Just x -> (x, True) - Just x -> (x, True) - case Map.lookup "id" m of - Just ["builtin_rts"] -> return Nothing - _ -> do - name <- parseS "name" >>= parsePackageNameThrowing . T.unpack - version <- parseS "version" >>= parseVersionThrowing . T.unpack - ghcPkgId <- parseS "id" >>= parseGhcPkgId - - -- if a package has no modules, these won't exist - let libDirKey = "library-dirs" - libraries = parseM "hs-libraries" - exposedModules = parseM "exposed-modules" - exposed = parseM "exposed" - license = - case parseM "license" of - [licenseText] -> C.simpleParse (T.unpack licenseText) - _ -> Nothing - depends <- mapMaybeM parseDepend $ concatMap T.words $ parseM "depends" + pairs <- eachPair (\k -> (k, ) <$> CL.consume) .| CL.consume + let m = Map.fromList pairs - -- Handle sublibs by recording the name of the parent library - -- If name of parent library is missing, this is not a sublib. - let mkParentLib n = PackageIdentifier n version - parentLib = mkParentLib <$> (parseS "package-name" >>= - parsePackageNameThrowing . T.unpack) + parseS :: MonadThrow m => Text -> m Line + parseS k = + case Map.lookup k m of + Just [v] -> pure v + _ -> throwM $ MissingSingleField k m + -- Can't fail: if not found, same as an empty list. See: + -- https://github.com/commercialhaskell/stack/issues/182 + parseM k = Map.findWithDefault [] k m - let parseQuoted key = - case mapM (P.parseOnly (argsParser NoEscaping)) val of - Left{} -> throwM (Couldn'tParseField key val) - Right dirs -> return (concat dirs) - where - val = parseM key - libDirPaths <- parseQuoted libDirKey - haddockInterfaces <- parseQuoted "haddock-interfaces" - haddockHtml <- parseQuoted "haddock-html" + parseDepend :: MonadThrow m => Text -> m (Maybe GhcPkgId) + parseDepend "builtin_rts" = pure Nothing + parseDepend bs = Just <$> parseGhcPkgId bs' + where + (bs', _builtinRts) = + case stripSuffixText " builtin_rts" bs of + Nothing -> + case stripPrefixText "builtin_rts " bs of + Nothing -> (bs, False) + Just x -> (x, True) + Just x -> (x, True) + case Map.lookup "id" m of + Just ["builtin_rts"] -> pure Nothing + _ -> do + name <- parseS "name" >>= parsePackageNameThrowing . T.unpack + version <- parseS "version" >>= parseVersionThrowing . T.unpack + ghcPkgId <- parseS "id" >>= parseGhcPkgId - return $ Just DumpPackage - { dpGhcPkgId = ghcPkgId - , dpPackageIdent = PackageIdentifier name version - , dpParentLibIdent = parentLib - , dpLicense = license - , dpLibDirs = libDirPaths - , dpLibraries = T.words $ T.unwords libraries - , dpHasExposedModules = not (null libraries || null exposedModules) + -- if a package has no modules, these won't exist + let libDirKey = "library-dirs" + libraries = parseM "hs-libraries" + exposedModules = parseM "exposed-modules" + exposed = parseM "exposed" + license = + case parseM "license" of + [licenseText] -> C.simpleParse (T.unpack licenseText) + _ -> Nothing + depends <- mapMaybeM parseDepend $ concatMap T.words $ parseM "depends" - -- Strip trailing commas from ghc package exposed-modules (looks buggy to me...). - -- Then try to parse the module names. - , dpExposedModules = - Set.fromList - $ mapMaybe (C.simpleParse . T.unpack . T.dropSuffix ",") - $ T.words - $ T.unwords exposedModules - - , dpDepends = depends - , dpHaddockInterfaces = haddockInterfaces - , dpHaddockHtml = listToMaybe haddockHtml - , dpIsExposed = exposed == ["True"] - } + -- Handle sub-libraries by recording the name of the parent library + -- If name of parent library is missing, this is not a sub-library. + let maybePackageName :: Maybe PackageName = + parseS "package-name" >>= parsePackageNameThrowing . T.unpack + maybeLibName = parseS "lib-name" + getLibNameFromLegacyName = case decodeCompatPackageName name of + MungedPackageName _parentPackageName (LSubLibName libName') -> + fromCabalName libName' + MungedPackageName _parentPackageName _ -> "" + libName = + maybe getLibNameFromLegacyName unqualCompFromText maybeLibName + sublib = flip SublibDump libName <$> maybePackageName + parseQuoted key = + case mapM (P.parseOnly (argsParser NoEscaping)) val of + Left{} -> throwM (Couldn'tParseField key val) + Right dirs -> pure (concat dirs) + where + val = parseM key + libDirs <- parseQuoted libDirKey + haddockInterfaces <- parseQuoted "haddock-interfaces" + haddockHtml <- listToMaybe <$> parseQuoted "haddock-html" + pure $ Just DumpPackage + { ghcPkgId + , packageIdent = PackageIdentifier name version + , sublib + , license + , libDirs + , libraries = T.words $ T.unwords libraries + , hasExposedModules = not (null libraries || null exposedModules) + -- Strip trailing commas from ghc package exposed-modules (looks buggy + -- to me...). Then try to parse the module names. + , exposedModules = + Set.fromList + $ mapMaybe (C.simpleParse . T.unpack . T.dropSuffix ",") + $ T.words + $ T.unwords exposedModules + , depends + , haddockInterfaces + , haddockHtml + , isExposed = exposed == ["True"] + } stripPrefixText :: Text -> Text -> Maybe Text stripPrefixText x y - | x `T.isPrefixOf` y = Just $ T.drop (T.length x) y - | otherwise = Nothing + | x `T.isPrefixOf` y = Just $ T.drop (T.length x) y + | otherwise = Nothing stripSuffixText :: Text -> Text -> Maybe Text stripSuffixText x y - | x `T.isSuffixOf` y = Just $ T.take (T.length y - T.length x) y - | otherwise = Nothing + | x `T.isSuffixOf` y = Just $ T.take (T.length y - T.length x) y + | otherwise = Nothing -- | A single line of input, not including line endings type Line = Text -- | Apply the given Sink to each section of output, broken by a single line containing --- -eachSection :: Monad m - => ConduitM Line Void m a - -> ConduitM Text a m () -eachSection inner = - CL.map (T.filter (/= '\r')) .| CT.lines .| start - where - - peekText = await >>= maybe (return Nothing) (\bs -> - if T.null bs - then peekText - else leftover bs >> return (Just bs)) +eachSection :: + Monad m + => ConduitM Line Void m a + -> ConduitM Text a m () +eachSection inner = CL.map (T.filter (/= '\r')) .| CT.lines .| start + where + peekText = await >>= maybe (pure Nothing) (\bs -> + if T.null bs + then peekText + else leftover bs >> pure (Just bs)) - start = peekText >>= maybe (return ()) (const go) + start = peekText >>= maybe (pure ()) (const go) - go = do - x <- toConsumer $ takeWhileC (/= "---") .| inner - yield x - CL.drop 1 - start + go = do + x <- toConsumer $ takeWhileC (/= "---") .| inner + yield x + CL.drop 1 + start -- | Grab each key/value pair -eachPair :: Monad m - => (Text -> ConduitM Line Void m a) - -> ConduitM Line a m () -eachPair inner = - start - where - start = await >>= maybe (return ()) start' +eachPair :: + Monad m + => (Text -> ConduitM Line Void m a) + -> ConduitM Line a m () +eachPair inner = start + where + start = await >>= maybe (pure ()) start' - start' bs1 = - toConsumer (valSrc .| inner key) >>= yield >> start - where - (key, bs2) = T.break (== ':') bs1 - (spaces, bs3) = T.span (== ' ') $ T.drop 1 bs2 - indent = T.length key + 1 + T.length spaces + start' bs1 = toConsumer (valSrc .| inner key) >>= yield >> start + where + (key, bs2) = T.break (== ':') bs1 + (spaces, bs3) = T.span (== ' ') $ T.drop 1 bs2 + ind = T.length key + 1 + T.length spaces - valSrc - | T.null bs3 = noIndent - | otherwise = yield bs3 >> loopIndent indent + valSrc + | T.null bs3 = noIndent + | otherwise = yield bs3 >> loopIndent ind - noIndent = do - mx <- await - case mx of - Nothing -> return () - Just bs -> do - let (spaces, val) = T.span (== ' ') bs - if T.length spaces == 0 - then leftover val - else do - yield val - loopIndent (T.length spaces) + noIndent = do + mx <- await + whenJust mx $ \bs -> do + let (spaces, val) = T.span (== ' ') bs + if T.length spaces == 0 + then leftover val + else do + yield val + loopIndent (T.length spaces) - loopIndent i = - loop - where - loop = await >>= maybe (return ()) go + loopIndent i = loop + where + loop = await >>= maybe (pure ()) go - go bs - | T.length spaces == i && T.all (== ' ') spaces = - yield val >> loop - | otherwise = leftover bs - where - (spaces, val) = T.splitAt i bs + go bs + | T.length spaces == i && T.all (== ' ') spaces = + yield val >> loop + | otherwise = leftover bs + where + (spaces, val) = T.splitAt i bs -- | General purpose utility takeWhileC :: Monad m => (a -> Bool) -> ConduitM a a m () -takeWhileC f = - loop - where - loop = await >>= maybe (return ()) go +takeWhileC f = loop + where + loop = await >>= maybe (pure ()) go - go x - | f x = yield x >> loop - | otherwise = leftover x + go x + | f x = yield x >> loop + | otherwise = leftover x diff --git a/src/Stack/PackageFile.hs b/src/Stack/PackageFile.hs new file mode 100644 index 0000000000..c35f7f3c8a --- /dev/null +++ b/src/Stack/PackageFile.hs @@ -0,0 +1,203 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.PackageFile +Description : All package-level file-gathering logic. +License : BSD-3-Clause + +A module which exports all package-level file-gathering logic. +-} + +module Stack.PackageFile + ( getPackageFile + , stackPackageFileFromCabal + ) where + +import qualified Data.Map.Strict as M +import qualified Data.Set as S +import Distribution.CabalSpecVersion ( CabalSpecVersion ) +import qualified Distribution.PackageDescription as Cabal +import Distribution.Simple.Glob ( matchDirFileGlob ) +import Distribution.Utils.Path ( makeSymbolicPath, getSymbolicPath ) +import Path ( parent, () ) +import Path.Extra ( forgivingResolveFile, rejectMissingFile ) +import Path.IO ( doesFileExist ) +import Stack.ComponentFile + ( ComponentFile (..), resolveOrWarn, stackBenchmarkFiles + , stackExecutableFiles, stackLibraryFiles + , stackTestSuiteFiles + ) +import Stack.Constants + ( relFileHpackPackageConfig, relFileSetupHs, relFileSetupLhs + ) +import Stack.Constants.Config ( distDirFromDir ) +import Stack.Prelude +import Stack.Types.BuildConfig ( HasBuildConfig (..) ) +import Stack.Types.CompilerPaths ( cabalVersionL ) +import Stack.Types.EnvConfig ( HasEnvConfig (..) ) +import Stack.Types.NamedComponent ( NamedComponent (..) ) +import Stack.Types.Package ( Package(..) ) +import Stack.Types.PackageFile + ( GetPackageFileContext (..), PackageComponentFile (..) + , StackPackageFile (..) + ) +import qualified System.FilePath as FilePath +import System.IO.Error ( isUserError ) + +-- | Resolve the file, if it can't be resolved, warn for the user +-- (purely to be helpful). +resolveFileOrWarn :: + FilePath.FilePath + -> RIO GetPackageFileContext (Maybe (Path Abs File)) +resolveFileOrWarn = resolveOrWarn "File" f + where + f p x = forgivingResolveFile p x >>= rejectMissingFile + +-- | Get all files referenced by the package. +packageDescModulesAndFiles :: + Package + -> RIO + GetPackageFileContext + PackageComponentFile +packageDescModulesAndFiles pkg = do + packageExtraFile <- resolveGlobFilesFromStackPackageFile + pkg.cabalSpec pkg.file + let initialValue = mempty{packageExtraFile=packageExtraFile} + accumulator :: + Applicative f + => (t -> f (NamedComponent, ComponentFile)) + -> t + -> f PackageComponentFile + -> f PackageComponentFile + accumulator f comp st = (insertComponentFile <$> st) <*> f comp + gatherCompFileCollection :: + (Applicative f, Foldable t) + => (a -> f (NamedComponent, ComponentFile)) + -> (Package -> t a) + -> f PackageComponentFile + -> f PackageComponentFile + gatherCompFileCollection createCompFileFn getCompFn res = + foldr' (accumulator createCompFileFn) res (getCompFn pkg) + gatherCompFileCollection stackLibraryFiles (.library) + . gatherCompFileCollection stackLibraryFiles (.subLibraries) + . gatherCompFileCollection stackExecutableFiles (.executables) + . gatherCompFileCollection stackTestSuiteFiles (.testSuites) + . gatherCompFileCollection stackBenchmarkFiles (.benchmarks) + $ pure initialValue + +resolveGlobFilesFromStackPackageFile :: + CabalSpecVersion + -> StackPackageFile + -> RIO GetPackageFileContext (Set (Path Abs File)) +resolveGlobFilesFromStackPackageFile + csvV + (StackPackageFile extraSrcFilesV dataDirV dataFilesV) + = resolveGlobFiles + csvV + (extraSrcFilesV ++ map (dataDirV FilePath.) dataFilesV) + +-- | Resolve globbing of files (e.g. data files) to absolute paths. +resolveGlobFiles :: + CabalSpecVersion -- ^ Cabal file version + -> [String] + -> RIO GetPackageFileContext (Set (Path Abs File)) +resolveGlobFiles cabalFileVersion = + fmap (S.fromList . concatMap catMaybes) . mapM resolve + where + resolve :: FilePath -> RIO GetPackageFileContext [Maybe (Path Abs File)] + resolve name = + if '*' `elem` name + then explode name + else fmap pure (resolveFileOrWarn name) + + explode :: FilePath -> RIO GetPackageFileContext [Maybe (Path Abs File)] + explode name = do + dir <- asks (parent . (.file)) + names <- matchDirFileGlob' (toFilePath dir) name + mapM resolveFileOrWarn names + + matchDirFileGlob' :: + FilePath + -> FilePath + -> RIO GetPackageFileContext [FilePath] + matchDirFileGlob' dir glob = map getSymbolicPath <$> do + catch + (liftIO $ matchDirFileGlob minBound cabalFileVersion (Just $ makeSymbolicPath dir) (makeSymbolicPath glob)) + ( \(e :: IOException) -> + if isUserError e + then do + prettyWarnL + [ flow "Wildcard does not match any files:" + , style File $ fromString glob + , line <> flow "in directory:" + , style Dir $ fromString dir + ] + pure [] + else throwIO e + ) + +-- | Gets all of the modules, files, build files, and data files that constitute +-- the package. This is primarily used for dirtiness checking during build, as +-- well as use by "stack ghci" +getPackageFile :: + ( HasEnvConfig s, MonadReader s m, MonadThrow m, MonadUnliftIO m ) + => Package + -> Path Abs File + -> m PackageComponentFile +getPackageFile pkg cabalFP = + debugBracket ("getPackageFiles" <+> pretty cabalFP) $ do + let pkgDir = parent cabalFP + distDir <- distDirFromDir pkgDir + bc <- view buildConfigL + cabalVer <- view cabalVersionL + packageComponentFile <- + runRIO + (GetPackageFileContext cabalFP distDir bc cabalVer) + (packageDescModulesAndFiles pkg) + setupFiles <- + if pkg.buildType == Cabal.Custom + then do + let setupHsPath = pkgDir relFileSetupHs + setupLhsPath = pkgDir relFileSetupLhs + setupHsExists <- doesFileExist setupHsPath + if setupHsExists + then pure (S.singleton setupHsPath) + else do + setupLhsExists <- doesFileExist setupLhsPath + if setupLhsExists + then pure (S.singleton setupLhsPath) + else pure S.empty + else pure S.empty + moreBuildFiles <- fmap (S.insert cabalFP . S.union setupFiles) $ do + let hpackPath = pkgDir relFileHpackPackageConfig + hpackExists <- doesFileExist hpackPath + pure $ if hpackExists then S.singleton hpackPath else S.empty + pure packageComponentFile + { packageExtraFile = + moreBuildFiles <> packageComponentFile.packageExtraFile + } + +-- | For the given contents of a Cabal file, yields the information from it that +-- Stack needs to track files. +stackPackageFileFromCabal :: Cabal.PackageDescription -> StackPackageFile +stackPackageFileFromCabal cabalPkg = + StackPackageFile + (map getSymbolicPath $ Cabal.extraSrcFiles cabalPkg) + (getSymbolicPath $ Cabal.dataDir cabalPkg) + (map getSymbolicPath $ Cabal.dataFiles cabalPkg) + +insertComponentFile :: + PackageComponentFile + -> (NamedComponent, ComponentFile) + -> PackageComponentFile +insertComponentFile packageCompFile (name, compFile) = + PackageComponentFile nCompFile nDotCollec packageExtraFile nWarnings + where + (ComponentFile moduleFileMap dotCabalFileList warningsCollec) = compFile + (PackageComponentFile modules files packageExtraFile warnings) = + packageCompFile + nCompFile = M.insert name moduleFileMap modules + nDotCollec = M.insert name dotCabalFileList files + nWarnings = warningsCollec ++ warnings diff --git a/src/Stack/Path.hs b/src/Stack/Path.hs index df517ffbc6..eef69b6aa7 100644 --- a/src/Stack/Path.hs +++ b/src/Stack/Path.hs @@ -1,228 +1,381 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Path +Description : Types and functions related to Stack's @path@ command. +License : BSD-3-Clause + +Types and functions related to Stack's @path@ command. +-} --- | Handy path information. module Stack.Path - ( path - , pathParser - ) where + ( EnvConfigPathInfo + , UseHaddocks + , path + , pathsFromRunner + , pathsFromConfig + , pathsFromEnvConfig + ) where -import Stack.Prelude -import Data.List (intercalate) +import Control.Exception ( throw ) +import Data.List ( intercalate ) import qualified Data.Text as T import qualified Data.Text.IO as T -import qualified Options.Applicative as OA -import Path -import Path.Extra +import Path ( (), parent ) +import Path.Extra ( toFilePathNoTrailingSep ) +import RIO.Process ( HasProcessContext (..), exeSearchPathL ) +import Stack.Config ( determineStackRootAndOwnership ) import Stack.Constants -import Stack.Constants.Config + ( docDirSuffix, stackGlobalConfigOptionName + , stackRootOptionName + ) +import Stack.Constants.Config ( distRelativeDir ) import Stack.GhcPkg as GhcPkg +import Stack.Prelude hiding ( pi ) import Stack.Runners + ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..), configFileL ) +import Stack.Types.BuildOptsMonoid ( buildOptsMonoidHaddockL ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), HasCompiler (..), getCompilerPath ) import Stack.Types.Config + ( Config (..), HasConfig (..), userGlobalConfigFileL ) +import Stack.Types.Config.Exception ( ConfigPrettyException (..) ) +import Stack.Types.EnvConfig + ( EnvConfig, HasEnvConfig (..), bindirCompilerTools + , hpcReportDir, installationRootDeps, installationRootLocal + , packageDatabaseDeps, packageDatabaseExtra + , packageDatabaseLocal + ) +import qualified Stack.Types.EnvConfig as EnvConfig +import Stack.Types.GHCVariant ( HasGHCVariant (..) ) +import Stack.Types.GlobalOpts + ( GlobalOpts (..), globalOptsBuildOptsMonoidL ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Runner ( HasRunner (..), Runner, globalOptsL ) import qualified System.FilePath as FP -import RIO.PrettyPrint -import RIO.Process (HasProcessContext (..), exeSearchPathL) --- | Print out useful path information in a human-readable format (and --- support others later). +-- | Print out useful path information in a human-readable format (and support +-- others later). path :: [Text] -> RIO Runner () -path keys = - do let deprecated = filter ((`elem` keys) . fst) deprecatedPathKeys - forM_ deprecated $ \(oldOption, newOption) -> logWarn $ - "\n" <> - "'--" <> display oldOption <> "' will be removed in a future release.\n" <> - "Please use '--" <> display newOption <> "' instead.\n" <> - "\n" - let -- filter the chosen paths in flags (keys), - -- or show all of them if no specific paths chosen. - goodPaths = filter - (\(_,key,_) -> - (null keys && key /= T.pack deprecatedStackRootOptionName) || elem key keys) - paths - singlePath = length goodPaths == 1 - toEither (_, k, UseHaddocks p) = Left (k, p) - toEither (_, k, WithoutHaddocks p) = Right (k, p) - (with, without) = partitionEithers $ map toEither goodPaths - printKeys extractors single = do - pathInfo <- fillPathInfo - liftIO $ forM_ extractors $ \(key, extractPath) -> do - let prefix = if single then "" else key <> ": " - T.putStrLn $ prefix <> extractPath pathInfo - runHaddock x = local - (set (globalOptsL.globalOptsBuildOptsMonoidL.buildOptsMonoidHaddockL) (Just x)) . - withConfig YesReexec . -- FIXME this matches previous behavior, but doesn't make a lot of sense - withDefaultEnvConfig - -- MSS 2019-03-17 Not a huge fan of rerunning withConfig and - -- withDefaultEnvConfig each time, need to figure out what - -- purpose is served and whether we can achieve it without two - -- completely separate Config setups - runHaddock True $ printKeys with singlePath - runHaddock False $ printKeys without singlePath - -fillPathInfo :: HasEnvConfig env => RIO env PathInfo -fillPathInfo = do - -- We must use a BuildConfig from an EnvConfig to ensure that it contains the - -- full environment info including GHC paths etc. - piBuildConfig <- view $ envConfigL.buildConfigL - -- This is the modified 'bin-path', - -- including the local GHC or MSYS if not configured to operate on - -- global GHC. - -- It was set up in 'withBuildConfigAndLock -> withBuildConfigExt -> setupEnv'. - -- So it's not the *minimal* override path. - piSnapDb <- packageDatabaseDeps - piLocalDb <- packageDatabaseLocal - piExtraDbs <- packageDatabaseExtra - piGlobalDb <- view $ compilerPathsL.to cpGlobalDB - piSnapRoot <- installationRootDeps - piLocalRoot <- installationRootLocal - piToolsDir <- bindirCompilerTools - piHoogleRoot <- hoogleRoot - piDistDir <- distRelativeDir - piHpcDir <- hpcReportDir - piCompiler <- getCompilerPath - return PathInfo {..} - -pathParser :: OA.Parser [Text] -pathParser = - mapMaybeA - (\(desc,name,_) -> - OA.flag Nothing - (Just name) - (OA.long (T.unpack name) <> - OA.help desc)) - paths - --- | Passed to all the path printers as a source of info. -data PathInfo = PathInfo - { piBuildConfig :: !BuildConfig - , piSnapDb :: !(Path Abs Dir) - , piLocalDb :: !(Path Abs Dir) - , piGlobalDb :: !(Path Abs Dir) - , piSnapRoot :: !(Path Abs Dir) - , piLocalRoot :: !(Path Abs Dir) - , piToolsDir :: !(Path Abs Dir) - , piHoogleRoot :: !(Path Abs Dir) - , piDistDir :: Path Rel Dir - , piHpcDir :: !(Path Abs Dir) - , piExtraDbs :: ![Path Abs Dir] - , piCompiler :: !(Path Abs File) +path keys = do + let -- filter the chosen paths in flags (keys), or show all of them if no + -- specific paths chosen. + filterKeys :: (String, Text, a) -> Bool + filterKeys (_, key, _) = null keys || elem key keys + goodPathsFromRunner = null keys || elem stackRootOptionName' keys + goodPathsFromConfig = filter filterKeys pathsFromConfig + goodPathsFromEnvConfig = filter filterKeys pathsFromEnvConfig + toKeyPath (_, key, p) = (key, p) + goodPathsFromConfig' = map toKeyPath goodPathsFromConfig + singlePath = (if goodPathsFromRunner then 1 else 0) + + length goodPathsFromConfig + length goodPathsFromEnvConfig == 1 + toEither (_, k, UseHaddocks a) = Left (k, a) + toEither (_, k, WithoutHaddocks a) = Right (k, a) + (with, without) = partitionEithers $ map toEither goodPathsFromEnvConfig + when goodPathsFromRunner $ printKeysWithRunner singlePath + unless (null goodPathsFromConfig') $ + runHaddockWithConfig $ printKeysWithConfig goodPathsFromConfig' singlePath + unless (null without) $ + runHaddockWithEnvConfig False $ printKeysWithEnvConfig without singlePath + unless (null with) $ + runHaddockWithEnvConfig True $ printKeysWithEnvConfig with singlePath + +printKeysWithRunner :: + Bool + -> RIO Runner () +printKeysWithRunner single = do + clArgs <- view $ globalOptsL . to (.configMonoid) + liftIO $ do + (_, stackRoot, _) <- determineStackRootAndOwnership clArgs + let prefix = if single then "" else stackRootOptionName' <> ": " + T.putStrLn $ prefix <> T.pack (toFilePathNoTrailingSep stackRoot) + +printKeysWithConfig :: + HasConfig env + => [(Text, Config -> Text)] + -> Bool + -> RIO env () +printKeysWithConfig extractors single = + view configL >>= printKeys extractors single + +printKeysWithEnvConfig :: + HasEnvConfig env + => [(Text, EnvConfigPathInfo -> Text)] + -> Bool + -> RIO env () +printKeysWithEnvConfig extractors single = + fillEnvConfigPathInfo >>= printKeys extractors single + +printKeys :: + [(Text, info -> Text)] + -> Bool + -> info + -> RIO env () +printKeys extractors single info = do + liftIO $ forM_ extractors $ \(key, extractPath) -> do + let prefix = if single then "" else key <> ": " + T.putStrLn $ prefix <> extractPath info + +runHaddockWithEnvConfig :: Bool -> RIO EnvConfig () -> RIO Runner () +runHaddockWithEnvConfig x action = runHaddock x (withDefaultEnvConfig action) + +runHaddockWithConfig :: RIO Config () -> RIO Runner () +runHaddockWithConfig = runHaddock False + +runHaddock :: Bool -> RIO Config () -> RIO Runner () +runHaddock x action = local modifyConfig $ withConfig YesReexec action + where + modifyConfig = set + (globalOptsL . globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL) + (Just x) + +fillEnvConfigPathInfo :: HasEnvConfig env => RIO env EnvConfigPathInfo +fillEnvConfigPathInfo = do + -- We must use a BuildConfig from an EnvConfig to ensure that it contains the + -- full environment info including GHC paths etc. + buildConfig <- view $ envConfigL . buildConfigL + -- This is the modified 'bin-path', + -- including the local GHC or MSYS if not configured to operate on + -- global GHC. + -- It was set up in 'withBuildConfigAndLock -> withBuildConfigExt -> setupEnv'. + -- So it's not the *minimal* override path. + snapDb <- packageDatabaseDeps + localDb <- packageDatabaseLocal + extraDbs <- packageDatabaseExtra + globalDb <- view $ compilerPathsL . to (.globalDB) + snapRoot <- installationRootDeps + localRoot <- installationRootLocal + toolsDir <- bindirCompilerTools + hoogleRoot <- EnvConfig.hoogleRoot + distDir <- distRelativeDir + hpcDir <- hpcReportDir + compiler <- getCompilerPath + pure EnvConfigPathInfo + { buildConfig + , snapDb + , localDb + , globalDb + , snapRoot + , localRoot + , toolsDir + , hoogleRoot + , distDir + , hpcDir + , extraDbs + , compiler } -instance HasPlatform PathInfo -instance HasLogFunc PathInfo where - logFuncL = configL.logFuncL -instance HasRunner PathInfo where - runnerL = configL.runnerL -instance HasStylesUpdate PathInfo where - stylesUpdateL = runnerL.stylesUpdateL -instance HasTerm PathInfo where - useColorL = runnerL.useColorL - termWidthL = runnerL.termWidthL -instance HasGHCVariant PathInfo -instance HasConfig PathInfo -instance HasPantryConfig PathInfo where - pantryConfigL = configL.pantryConfigL -instance HasProcessContext PathInfo where - processContextL = configL.processContextL -instance HasBuildConfig PathInfo where - buildConfigL = lens piBuildConfig (\x y -> x { piBuildConfig = y }) - . buildConfigL - -data UseHaddocks a = UseHaddocks a | WithoutHaddocks a - --- | The paths of interest to a user. The first tuple string is used --- for a description that the optparse flag uses, and the second --- string as a machine-readable key and also for @--foo@ flags. The user --- can choose a specific path to list like @--stack-root@. But --- really it's mainly for the documentation aspect. +-- | Type representing information needed to generate an appropriate string for +-- paths of interest to a user which require an 'EnvConfig'. +data EnvConfigPathInfo = EnvConfigPathInfo + { buildConfig :: !BuildConfig + , snapDb :: !(Path Abs Dir) + , localDb :: !(Path Abs Dir) + , globalDb :: !(Path Abs Dir) + , snapRoot :: !(Path Abs Dir) + , localRoot :: !(Path Abs Dir) + , toolsDir :: !(Path Abs Dir) + , hoogleRoot :: !(Path Abs Dir) + , distDir :: Path Rel Dir + , hpcDir :: !(Path Abs Dir) + , extraDbs :: ![Path Abs Dir] + , compiler :: !(Path Abs File) + } + +instance HasPlatform EnvConfigPathInfo where + platformL = configL . platformL + {-# INLINE platformL #-} + platformVariantL = configL . platformVariantL + {-# INLINE platformVariantL #-} + +instance HasLogFunc EnvConfigPathInfo where + logFuncL = configL . logFuncL + +instance HasRunner EnvConfigPathInfo where + runnerL = configL . runnerL + +instance HasStylesUpdate EnvConfigPathInfo where + stylesUpdateL = runnerL . stylesUpdateL + +instance HasTerm EnvConfigPathInfo where + useColorL = runnerL . useColorL + termWidthL = runnerL . termWidthL + +instance HasGHCVariant EnvConfigPathInfo where + ghcVariantL = configL . ghcVariantL + {-# INLINE ghcVariantL #-} + +instance HasConfig EnvConfigPathInfo where + configL = buildConfigL . lens (.config) (\x y -> x { config = y }) + {-# INLINE configL #-} + +instance HasPantryConfig EnvConfigPathInfo where + pantryConfigL = configL . pantryConfigL + +instance HasProcessContext EnvConfigPathInfo where + processContextL = configL . processContextL + +instance HasBuildConfig EnvConfigPathInfo where + buildConfigL = + lens (.buildConfig) (\x y -> x { buildConfig = y }) . buildConfigL + +-- | Type representing whether or not building Haddocks is required. +data UseHaddocks a + = UseHaddocks a + -- ^ Building Haddocks is required. + | WithoutHaddocks a + -- ^ Building Haddocks is not required. + +-- | The paths of interest to a user which do require a t'Config' or +-- 'EnvConfig'. The first tuple string is used for a description that the +-- optparse flag uses, and the second string as a machine-readable key and also +-- for @--foo@ flags. The user can choose a specific path to list like +-- @--stack-root@. But really it's mainly for the documentation aspect. +pathsFromRunner :: (String, Text) +pathsFromRunner = ("Global Stack root directory", stackRootOptionName') + +-- | The paths of interest to a user which do require an 'EnvConfig'. The first +-- tuple string is used for a description that the optparse flag uses, and the +-- second string as a machine-readable key and also for @--foo@ flags. The user +-- can choose a specific path to list like @--stack-root@. But really it's +-- mainly for the documentation aspect. -- --- When printing output we generate @PathInfo@ and pass it to the --- function to generate an appropriate string. Trailing slashes are --- removed, see #506 -paths :: [(String, Text, UseHaddocks (PathInfo -> Text))] -paths = - [ ( "Global stack root directory" - , T.pack stackRootOptionName - , WithoutHaddocks $ view (stackRootL.to toFilePathNoTrailingSep.to T.pack)) - , ( "Project root (derived from stack.yaml file)" - , "project-root" - , WithoutHaddocks $ view (projectRootL.to toFilePathNoTrailingSep.to T.pack)) - , ( "Configuration location (where the stack.yaml file is)" - , "config-location" - , WithoutHaddocks $ view (stackYamlL.to toFilePath.to T.pack)) - , ( "PATH environment variable" - , "bin-path" - , WithoutHaddocks $ T.pack . intercalate [FP.searchPathSeparator] . view exeSearchPathL) - , ( "Install location for GHC and other core tools" - , "programs" - , WithoutHaddocks $ view (configL.to configLocalPrograms.to toFilePathNoTrailingSep.to T.pack)) - , ( "Compiler binary (e.g. ghc)" - , "compiler-exe" - , WithoutHaddocks $ T.pack . toFilePath . piCompiler ) - , ( "Directory containing the compiler binary (e.g. ghc)" - , "compiler-bin" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . parent . piCompiler ) - , ( "Directory containing binaries specific to a particular compiler (e.g. intero)" - , "compiler-tools-bin" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piToolsDir ) - , ( "Local bin dir where stack installs executables (e.g. ~/.local/bin (Unix-like OSs) or %APPDATA%\\local\\bin (Windows))" - , "local-bin" - , WithoutHaddocks $ view $ configL.to configLocalBin.to toFilePathNoTrailingSep.to T.pack) - , ( "Extra include directories" - , "extra-include-dirs" - , WithoutHaddocks $ T.intercalate ", " . map T.pack . configExtraIncludeDirs . view configL ) - , ( "Extra library directories" - , "extra-library-dirs" - , WithoutHaddocks $ T.intercalate ", " . map T.pack . configExtraLibDirs . view configL ) - , ( "Snapshot package database" - , "snapshot-pkg-db" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piSnapDb ) - , ( "Local project package database" - , "local-pkg-db" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piLocalDb ) - , ( "Global package database" - , "global-pkg-db" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piGlobalDb ) - , ( "GHC_PACKAGE_PATH environment variable" - , "ghc-package-path" - , WithoutHaddocks $ \pi' -> mkGhcPackagePath True (piLocalDb pi') (piSnapDb pi') (piExtraDbs pi') (piGlobalDb pi')) - , ( "Snapshot installation root" - , "snapshot-install-root" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piSnapRoot ) - , ( "Local project installation root" - , "local-install-root" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piLocalRoot ) - , ( "Snapshot documentation root" - , "snapshot-doc-root" - , UseHaddocks $ \pi' -> T.pack (toFilePathNoTrailingSep (piSnapRoot pi' docDirSuffix))) - , ( "Local project documentation root" - , "local-doc-root" - , UseHaddocks $ \pi' -> T.pack (toFilePathNoTrailingSep (piLocalRoot pi' docDirSuffix))) - , ( "Local project documentation root" - , "local-hoogle-root" - , UseHaddocks $ T.pack . toFilePathNoTrailingSep . piHoogleRoot) - , ( "Dist work directory, relative to package directory" - , "dist-dir" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piDistDir ) - , ( "Where HPC reports and tix files are stored" - , "local-hpc-root" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piHpcDir ) - , ( "DEPRECATED: Use '--local-bin' instead" - , "local-bin-path" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . configLocalBin . view configL ) - , ( "DEPRECATED: Use '--programs' instead" - , "ghc-paths" - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . configLocalPrograms . view configL ) - , ( "DEPRECATED: Use '--" <> stackRootOptionName <> "' instead" - , T.pack deprecatedStackRootOptionName - , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . view stackRootL ) - ] - -deprecatedPathKeys :: [(Text, Text)] -deprecatedPathKeys = - [ (T.pack deprecatedStackRootOptionName, T.pack stackRootOptionName) - , ("ghc-paths", "programs") - , ("local-bin-path", "local-bin") - ] +-- When printing output we generate t'Config' and pass it to the function +-- to generate an appropriate string. Trailing slashes are removed, see #506. +pathsFromConfig :: [(String, Text, Config -> Text)] +pathsFromConfig = + [ ( "User-specific global configuration file" + , T.pack stackGlobalConfigOptionName + , view (userGlobalConfigFileL . to toFilePath . to T.pack) + ) + , ( "Install location for GHC and other core tools (see 'stack ls tools' command)" + , "programs" + , view (configL . to (.localPrograms) . to toFilePathNoTrailingSep . to T.pack) + ) + , ( "Directory where Stack installs executables (e.g. ~/.local/bin (Unix-like OSs) or %APPDATA%\\local\\bin (Windows))" + , "local-bin" + , view $ configL . to (.localBin) . to toFilePathNoTrailingSep . to T.pack + ) + ] + +-- | The paths of interest to a user which require a 'EnvConfig'. The first +-- tuple string is used for a description that the optparse flag uses, and the +-- second string as a machine-readable key and also for @--foo@ flags. The user +-- can choose a specific path to list like @--project-root@. But really it's +-- mainly for the documentation aspect. +-- +-- When printing output we generate t'EnvConfigPathInfo' and pass it to the +-- function to generate an appropriate string. Trailing slashes are removed, see +-- #506. +pathsFromEnvConfig :: [(String, Text, UseHaddocks (EnvConfigPathInfo -> Text))] +pathsFromEnvConfig = + [ ( "Project root (derived from the project-level configuration file; \ + \stack.yaml, by default)" + , "project-root" + , WithoutHaddocks $ view (configFileL . to toProjectConfigFileRootPath) + ) + , ( "Project-level configuration file (stack.yaml, by default)" + , "config-location" + , WithoutHaddocks $ view (configFileL . to toProjectConfigFilePath) + ) + , ( "PATH environment variable" + , "bin-path" + , WithoutHaddocks $ + T.pack . intercalate [FP.searchPathSeparator] . view exeSearchPathL + ) + , ( "Compiler binary (e.g. ghc)" + , "compiler-exe" + , WithoutHaddocks $ T.pack . toFilePath . (.compiler) + ) + , ( "Directory containing the compiler binary (e.g. ghc)" + , "compiler-bin" + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . parent . (.compiler) + ) + , ( "Directory containing binaries specific to a particular compiler" + , "compiler-tools-bin" + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.toolsDir) + ) + , ( "Extra include directories" + , "extra-include-dirs" + , WithoutHaddocks $ + T.intercalate ", " . map T.pack . (.extraIncludeDirs) . view configL + ) + , ( "Extra library directories" + , "extra-library-dirs" + , WithoutHaddocks $ + T.intercalate ", " . map T.pack . (.extraLibDirs) . view configL + ) + , ( "Snapshot package database" + , "snapshot-pkg-db" + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.snapDb) + ) + , ( "Local project package database" + , "local-pkg-db" + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.localDb) + ) + , ( "Global package database" + , "global-pkg-db" + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.globalDb) + ) + , ( "GHC_PACKAGE_PATH environment variable" + , "ghc-package-path" + , WithoutHaddocks $ + \pi -> mkGhcPackagePath + True + pi.localDb + pi.snapDb + pi.extraDbs + pi.globalDb + ) + , ( "Snapshot installation root" + , "snapshot-install-root" + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.snapRoot) + ) + , ( "Local project installation root" + , "local-install-root" + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.localRoot) + ) + , ( "Snapshot documentation root" + , "snapshot-doc-root" + , UseHaddocks $ + \pi -> T.pack (toFilePathNoTrailingSep (pi.snapRoot docDirSuffix)) + ) + , ( "Local project documentation root" + , "local-doc-root" + , UseHaddocks $ + \pi -> T.pack (toFilePathNoTrailingSep (pi.localRoot docDirSuffix)) + ) + , ( "Local project documentation root" + , "local-hoogle-root" + , UseHaddocks $ T.pack . toFilePathNoTrailingSep . (.hoogleRoot) + ) + , ( "Dist work directory, relative to package directory" + , "dist-dir" + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.distDir) + ) + , ( "Where HPC reports and tix files are stored" + , "local-hpc-root" + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.hpcDir) + ) + ] + where + toProjectConfigFileRootPath :: Either (Path Abs File) (Path Abs File) -> Text + toProjectConfigFileRootPath (Left _) = + throw $ PrettyException ConfigFileNotProjectLevelBug + toProjectConfigFileRootPath (Right projectConfigFile) = + T.pack $ toFilePathNoTrailingSep $ parent projectConfigFile + toProjectConfigFilePath :: Either (Path Abs File) (Path Abs File) -> Text + toProjectConfigFilePath (Left _) = + throw $ PrettyException ConfigFileNotProjectLevelBug + toProjectConfigFilePath (Right projectConfigFile) = + T.pack $ toFilePath projectConfigFile + +-- | 'Text' equivalent of 'stackRootOptionName'. +stackRootOptionName' :: Text +stackRootOptionName' = T.pack stackRootOptionName diff --git a/src/Stack/Prelude.hs b/src/Stack/Prelude.hs index 18483f8ba5..e7c5856b26 100644 --- a/src/Stack/Prelude.hs +++ b/src/Stack/Prelude.hs @@ -1,6 +1,16 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Prelude +Description : Common types and functions. +License : BSD-3-Clause + +Common types and functions. +-} + module Stack.Prelude ( withSystemTempDir , withKeepSystemTempDir @@ -13,7 +23,6 @@ module Stack.Prelude , prompt , promptPassword , promptBool - , stackProgName , FirstTrue (..) , fromFirstTrue , defaultFirstTrue @@ -21,46 +30,148 @@ module Stack.Prelude , fromFirstFalse , defaultFirstFalse , writeBinaryFileAtomic + , bugReport + , bugPrettyReport + , blankLine + , putUtf8Builder + , putBuilder + , ppException + , prettyThrowIO + , prettyThrowM + , prettyImpureThrow + , mcons + , MungedPackageId (..) + , MungedPackageName (..) + , LibraryName (..) , module X + -- * Re-exports from the rio-pretty print package + , HasStylesUpdate (..) + , HasTerm (..) + , Pretty (..) + , PrettyException (..) + , PrettyRawSnapshotLocation (..) + , StyleDoc + , Style (..) + , StyleSpec + , StylesUpdate (..) + , (<+>) + , align + , bulletedList + , debugBracket + , defaultStyles + , displayWithColor + , encloseSep + , fill + , fillSep + , foldr' + , fromPackageId + , fromPackageName + , flow + , hang + , hcat + , hsep + , indent + , line + , logLevelToStyle + , mkNarrativeList + , parens + , parseStylesUpdateFromString + , prettyDebug + , prettyDebugL + , prettyError + , prettyErrorL + , prettyGeneric + , prettyInfo + , prettyInfoL + , prettyInfoS + , prettyNote + , prettyNoteL + , prettyNoteS + , prettyWarn + , prettyWarnL + , prettyWarnNoIndent + , prettyWarnS + , punctuate + , sep + , softbreak + , softline + , spacedBulletedList + , string + , style + , vsep ) where -import RIO as X -import RIO.File as X hiding (writeBinaryFileAtomic) -import Data.Conduit as X (ConduitM, runConduit, (.|)) -import Path as X (Abs, Dir, File, Path, Rel, - toFilePath) -import Pantry as X hiding (Package (..), loadSnapshot) - -import Data.Monoid as X (First (..), Any (..), Sum (..), Endo (..)) - -import qualified Path.IO - -import System.IO.Echo (withoutInputEcho) - +import Data.Monoid as X + ( Any (..), Endo (..), First (..), Sum (..) ) +import Data.Conduit as X ( ConduitM, runConduit, (.|) ) import qualified Data.Conduit.Binary as CB import qualified Data.Conduit.List as CL -import Data.Conduit.Process.Typed (withLoggedProcess_, createSource, byteStringInput) -import RIO.Process (HasProcessContext (..), ProcessContext, setStdin, closed, getStderr, getStdout, proc, withProcessWait_, setStdout, setStderr, ProcessConfig, readProcess_, workingDirL, waitExitCode) - +import Data.Conduit.Process.Typed + ( byteStringInput, createSource, withLoggedProcess_ ) +import Data.Foldable ( Foldable(foldr') ) import qualified Data.Text.IO as T +import Distribution.Types.LibraryName ( LibraryName (..) ) +import Distribution.Types.MungedPackageId ( MungedPackageId (..) ) +import Distribution.Types.MungedPackageName ( MungedPackageName (..) ) +import Pantry as X hiding ( Package (..), loadSnapshot ) +import Path as X + ( Abs, Dir, File, Path, Rel, toFilePath ) +import qualified Path.IO +import RIO as X +import RIO.File as X hiding ( writeBinaryFileAtomic ) +import RIO.PrettyPrint + ( HasStylesUpdate (..), HasTerm (..), Pretty (..), Style (..) + , StyleDoc, (<+>), align, blankLine, bulletedList + , debugBracket, displayWithColor, encloseSep, fill, fillSep + , flow, hang, hcat, hsep, indent, line, logLevelToStyle + , mkNarrativeList, parens, prettyDebug, prettyDebugL + , prettyError, prettyErrorL, prettyGeneric, prettyInfo + , prettyInfoL, prettyInfoS, prettyNote, prettyNoteL + , prettyNoteS, prettyWarn, prettyWarnL, prettyWarnNoIndent + , prettyWarnS, punctuate, sep, softbreak, softline + , spacedBulletedList, string, style, stylesUpdateL, useColorL + , vsep + ) +import RIO.PrettyPrint.DefaultStyles (defaultStyles) +import RIO.PrettyPrint.PrettyException + ( PrettyException (..), ppException, prettyImpureThrow + , prettyThrowIO, prettyThrowM + ) +import RIO.PrettyPrint.StylesUpdate + ( StylesUpdate (..), parseStylesUpdateFromString ) +import RIO.PrettyPrint.Types ( StyleSpec ) +import RIO.Process + ( HasProcessContext (..), ProcessConfig, ProcessContext + , closed, getStderr, getStdout, proc, readProcess_, setStderr + , setStdin, setStdout, waitExitCode, withProcessWait_ + , workingDirL + ) import qualified RIO.Text as T +import System.IO.Echo ( withoutInputEcho ) -- | Path version withSystemTempDir :: MonadUnliftIO m => String -> (Path Abs Dir -> m a) -> m a -withSystemTempDir str inner = withRunInIO $ \run -> Path.IO.withSystemTempDir str $ run . inner +withSystemTempDir str inner = withRunInIO $ \run -> + Path.IO.withSystemTempDir str $ run . inner -- | Like `withSystemTempDir`, but the temporary directory is not deleted. -withKeepSystemTempDir :: MonadUnliftIO m => String -> (Path Abs Dir -> m a) -> m a +withKeepSystemTempDir :: + MonadUnliftIO m + => String + -> (Path Abs Dir -> m a) + -> m a withKeepSystemTempDir str inner = withRunInIO $ \run -> do path <- Path.IO.getTempDir dir <- Path.IO.createTempDir path str run $ inner dir --- | Consume the stdout and stderr of a process feeding strict 'ByteString's to the consumers. +-- | Consume the stdout and stderr of a process feeding strict 'ByteString's to +-- the consumers. -- --- Throws a 'ReadProcessException' if unsuccessful in launching, or 'ExitCodeException' if the process itself fails. -sinkProcessStderrStdout - :: forall e o env. (HasProcessContext env, HasLogFunc env, HasCallStack) +-- Throws a 'Rio.Process.ReadProcessException' if unsuccessful in launching, or +-- 'Rio.Process.ExitCodeException' if the process itself fails. +sinkProcessStderrStdout :: + forall e o env. (HasProcessContext env, HasLogFunc env, HasCallStack) => String -- ^ Command -> [String] -- ^ Command line arguments -> ConduitM ByteString Void (RIO env) e -- ^ Sink for stderr @@ -83,49 +194,54 @@ sinkProcessStderrStdout name args sinkStderr sinkStdout = -- level. Should not be used for long-running processes or ones with -- lots of output; for that use 'sinkProcessStderrStdout'. -- --- Throws a 'ReadProcessException' if unsuccessful. -sinkProcessStdout - :: (HasProcessContext env, HasLogFunc env, HasCallStack) - => String -- ^ Command - -> [String] -- ^ Command line arguments - -> ConduitM ByteString Void (RIO env) a -- ^ Sink for stdout - -> RIO env a +-- Throws a 'Rio.Process.ReadProcessException' if unsuccessful. +sinkProcessStdout :: + (HasProcessContext env, HasLogFunc env, HasCallStack) + => String -- ^ Command + -> [String] -- ^ Command line arguments + -> ConduitM ByteString Void (RIO env) a -- ^ Sink for stdout + -> RIO env a sinkProcessStdout name args sinkStdout = proc name args $ \pc -> withLoggedProcess_ (setStdin closed pc) $ \p -> runConcurrently $ Concurrently (runConduit $ getStderr p .| CL.sinkNull) *> Concurrently (runConduit $ getStdout p .| sinkStdout) -logProcessStderrStdout - :: (HasCallStack, HasProcessContext env, HasLogFunc env) - => ProcessConfig stdin stdoutIgnored stderrIgnored - -> RIO env () +logProcessStderrStdout :: + (HasCallStack, HasProcessContext env, HasLogFunc env) + => ProcessConfig stdin stdoutIgnored stderrIgnored + -> RIO env () logProcessStderrStdout pc = withLoggedProcess_ pc $ \p -> - let logLines = CB.lines .| CL.mapM_ (logInfo . displayBytesUtf8) - in runConcurrently - $ Concurrently (runConduit $ getStdout p .| logLines) - *> Concurrently (runConduit $ getStderr p .| logLines) + let logLines = CB.lines .| CL.mapM_ (logInfo . displayBytesUtf8) + in runConcurrently + $ Concurrently (runConduit $ getStdout p .| logLines) + *> Concurrently (runConduit $ getStderr p .| logLines) -- | Read from the process, ignoring any output. -- --- Throws a 'ReadProcessException' exception if the process fails. -readProcessNull :: (HasProcessContext env, HasLogFunc env, HasCallStack) - => String -- ^ Command - -> [String] -- ^ Command line arguments - -> RIO env () +-- Throws a 'Rio.Process.ReadProcessException' exception if the process fails. +readProcessNull :: + (HasProcessContext env, HasLogFunc env, HasCallStack) + => String -- ^ Command + -> [String] -- ^ Command line arguments + -> RIO env () readProcessNull name args = -- We want the output to appear in any exceptions, so we capture and drop it void $ proc name args readProcess_ -- | Use the new 'ProcessContext', but retain the working directory -- from the parent environment. -withProcessContext :: HasProcessContext env => ProcessContext -> RIO env a -> RIO env a +withProcessContext :: + HasProcessContext env + => ProcessContext + -> RIO env a + -> RIO env a withProcessContext pcNew inner = do pcOld <- view processContextL let pcNew' = set workingDirL (view workingDirL pcOld) pcNew local (set processContextL pcNew') inner --- | Remove a trailing carriage return if present +-- | Remove a trailing carriage pure if present stripCR :: Text -> Text stripCR = T.dropSuffix "\r" @@ -151,67 +267,123 @@ promptPassword txt = liftIO $ do password <- withoutInputEcho T.getLine -- Since the user's newline is not echoed, one needs to be inserted. T.putStrLn "" - return password + pure password -- | Prompt the user by sending text to stdout, and collecting a line of -- input from stdin. If something other than "y" or "n" is entered, then -- print a message indicating that "y" or "n" is expected, and ask -- again. promptBool :: MonadIO m => Text -> m Bool -promptBool txt = liftIO $ do - input <- prompt txt - case input of - "y" -> return True - "n" -> return False +promptBool txt = liftIO $ + prompt txt >>= \case + "y" -> pure True + "n" -> pure False _ -> do T.putStrLn "Please press either 'y' or 'n', and then enter." promptBool txt --- | Name of the 'stack' program. --- --- NOTE: Should be defined in "Stack.Constants", but not doing so due to the --- GHC stage restrictions. -stackProgName :: String -stackProgName = "stack" - -- | Like @First Bool@, but the default is @True@. -newtype FirstTrue = FirstTrue { getFirstTrue :: Maybe Bool } - deriving (Show, Eq, Ord) +newtype FirstTrue + = FirstTrue { firstTrue :: Maybe Bool } + deriving (Eq, Ord, Show) + instance Semigroup FirstTrue where FirstTrue (Just x) <> _ = FirstTrue (Just x) FirstTrue Nothing <> x = x + instance Monoid FirstTrue where mempty = FirstTrue Nothing mappend = (<>) -- | Get the 'Bool', defaulting to 'True' fromFirstTrue :: FirstTrue -> Bool -fromFirstTrue = fromMaybe True . getFirstTrue +fromFirstTrue = fromMaybe True . (.firstTrue) -- | Helper for filling in default values -defaultFirstTrue :: (a -> FirstTrue) -> Bool +defaultFirstTrue :: FirstTrue -> Bool defaultFirstTrue _ = True -- | Like @First Bool@, but the default is @False@. -newtype FirstFalse = FirstFalse { getFirstFalse :: Maybe Bool } - deriving (Show, Eq, Ord) +newtype FirstFalse + = FirstFalse { firstFalse :: Maybe Bool } + deriving (Eq, Ord, Show) + instance Semigroup FirstFalse where FirstFalse (Just x) <> _ = FirstFalse (Just x) FirstFalse Nothing <> x = x + instance Monoid FirstFalse where mempty = FirstFalse Nothing mappend = (<>) -- | Get the 'Bool', defaulting to 'False' fromFirstFalse :: FirstFalse -> Bool -fromFirstFalse = fromMaybe False . getFirstFalse +fromFirstFalse = fromMaybe False . (.firstFalse) -- | Helper for filling in default values -defaultFirstFalse :: (a -> FirstFalse) -> Bool +defaultFirstFalse :: FirstFalse -> Bool defaultFirstFalse _ = False -- | Write a @Builder@ to a file and atomically rename. writeBinaryFileAtomic :: MonadIO m => Path absrel File -> Builder -> m () writeBinaryFileAtomic fp builder = - liftIO $ - withBinaryFileAtomic (toFilePath fp) WriteMode (`hPutBuilder` builder) + liftIO $ + withBinaryFileAtomic (toFilePath fp) WriteMode (`hPutBuilder` builder) + +newtype PrettyRawSnapshotLocation + = PrettyRawSnapshotLocation RawSnapshotLocation + +instance Pretty PrettyRawSnapshotLocation where + pretty (PrettyRawSnapshotLocation (RSLCompiler compiler)) = + fromString $ T.unpack $ utf8BuilderToText $ display compiler + pretty (PrettyRawSnapshotLocation (RSLUrl url Nothing)) = + style Url (fromString $ T.unpack url) + pretty (PrettyRawSnapshotLocation (RSLUrl url (Just blob))) = + fillSep + [ style Url (fromString $ T.unpack url) + , parens $ fromString $ T.unpack $ utf8BuilderToText $ display blob + ] + pretty (PrettyRawSnapshotLocation (RSLFilePath resolved)) = + style File (fromString $ show $ resolvedRelative resolved) + pretty (PrettyRawSnapshotLocation (RSLSynonym syn)) = fromString $ show syn + +-- | Report a bug in Stack. +bugReport :: String -> String -> String +bugReport code msg = + "Error: " ++ code ++ "\n" ++ + bugDeclaration ++ " " ++ msg ++ " " ++ bugRequest + +-- | Report a pretty bug in Stack. +bugPrettyReport :: String -> StyleDoc -> StyleDoc +bugPrettyReport code msg = + "Error:" <+> fromString code + <> line + <> flow bugDeclaration <+> msg <+> flow bugRequest + +-- | Bug declaration message. +bugDeclaration :: String +bugDeclaration = "The impossible happened!" + +-- | Bug report message. +bugRequest :: String +bugRequest = "Please report this bug at Stack's repository." + +-- | Maybe cons. +mcons :: Maybe a -> [a] -> [a] +mcons ma as = maybe as (:as) ma + +-- | Write a t'Utf8Builder' to the standard output stream. +putUtf8Builder :: MonadIO m => Utf8Builder -> m () +putUtf8Builder = putBuilder . getUtf8Builder + +-- | Write a 'Builder' to the standard output stream. +putBuilder :: MonadIO m => Builder -> m () +putBuilder = hPutBuilder stdout + +-- | Convert a package identifier to a value of a string-like type. +fromPackageId :: IsString a => PackageIdentifier -> a +fromPackageId = fromString . packageIdentifierString + +-- | Convert a package name to a value of a string-like type. +fromPackageName :: IsString a => PackageName -> a +fromPackageName = fromString . packageNameString diff --git a/src/Stack/Query.hs b/src/Stack/Query.hs new file mode 100644 index 0000000000..a86adbc7d1 --- /dev/null +++ b/src/Stack/Query.hs @@ -0,0 +1,136 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Query +Description : Types and functions related to Stack's @query@ command. +License : BSD-3-Clause + +Types and functions related to Stack's @query@ command. +-} + +module Stack.Query + ( queryCmd + , queryBuildInfo + ) where + +import Data.Aeson ( Value (Object, Array), (.=), object ) +import qualified Data.Aeson.Key as Key +import qualified Data.Aeson.KeyMap as KeyMap +import Data.List ( isPrefixOf ) +import qualified Data.Text as T +import Data.Text.Encoding ( decodeUtf8 ) +import qualified Data.Text.IO as TIO +import Data.Text.Read ( decimal ) +import qualified Data.Vector as V +import qualified Data.Yaml as Yaml +import Path ( parent ) +import Stack.Build.Source ( projectLocalPackages ) +import Stack.Prelude +import Stack.Runners + ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) +import Stack.Types.BuildConfig ( wantedCompilerVersionL ) +import Stack.Types.Compiler ( compilerVersionText ) +import Stack.Types.EnvConfig ( HasEnvConfig, actualCompilerVersionL ) +import Stack.Types.Runner ( Runner ) +import Stack.Types.Package ( LocalPackage (..), Package (..) ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Query"module. +data QueryException + = SelectorNotFound ![Text] + | IndexOutOfRange ![Text] + | NoNumericSelector ![Text] + | CannotApplySelector !Value ![Text] + deriving Show + +instance Exception QueryException where + displayException (SelectorNotFound sels) = + err "[S-4419]" "Selector not found" sels + displayException (IndexOutOfRange sels) = + err "[S-8422]" "Index out of range" sels + displayException (NoNumericSelector sels) = + err "[S-4360]" "Encountered array and needed numeric selector" sels + displayException (CannotApplySelector value sels) = + err "[S-1711]" ("Cannot apply selector to " ++ show value) sels + +-- | Helper function for 'QueryException' instance of 'Show' +err :: String -> String -> [Text] -> String +err msg code sels = "Error: " ++ code ++ "\n" ++ msg ++ ": " ++ show sels + +-- | Function underlying the @stack query@ command. +queryCmd :: + [String] + -- ^ Selectors. + -> RIO Runner () +queryCmd selectors = withConfig YesReexec $ + withDefaultEnvConfig $ queryBuildInfo $ map T.pack selectors + +-- | Query information about the build and print the result to stdout in YAML +-- format. +queryBuildInfo :: + HasEnvConfig env + => [Text] -- ^ Selectors. + -> RIO env () +queryBuildInfo selectors0 = + rawBuildInfo + >>= select id selectors0 + >>= liftIO . TIO.putStrLn . addGlobalHintsComment . decodeUtf8 . Yaml.encode + where + select _ [] value = pure value + select front (sel:sels) value = + case value of + Object o -> + case KeyMap.lookup (Key.fromText sel) o of + Nothing -> throwIO $ SelectorNotFound sels' + Just value' -> cont value' + Array v -> + case decimal sel of + Right (i, "") + | i >= 0 && i < V.length v -> cont $ v V.! i + | otherwise -> throwIO $ IndexOutOfRange sels' + _ -> throwIO $ NoNumericSelector sels' + _ -> throwIO $ CannotApplySelector value sels' + where + cont = select (front . (sel:)) sels + sels' = front [sel] + -- Include comments to indicate that this portion of the "stack + -- query" API is not necessarily stable. + addGlobalHintsComment + | null selectors0 = T.replace globalHintsLine ("\n" <> globalHintsComment <> globalHintsLine) + -- Append comment instead of pre-pending. The reasoning here is + -- that something *could* expect that the result of 'stack query + -- global-hints ghc-boot' is just a string literal. Seems easier + -- for to expect the first line of the output to be the literal. + | ["global-hints"] `isPrefixOf` selectors0 = (<> ("\n" <> globalHintsComment)) + | otherwise = id + globalHintsLine = "\nglobal-hints:\n" + globalHintsComment = T.concat + [ "# Note: global-hints is experimental and may be renamed / removed in the future.\n" + , "# See https://github.com/commercialhaskell/stack/issues/3796" + ] + +-- | Get the raw build information object +rawBuildInfo :: HasEnvConfig env => RIO env Value +rawBuildInfo = do + locals <- projectLocalPackages + wantedCompiler <- + view $ wantedCompilerVersionL . to (utf8BuilderToText . display) + actualCompiler <- view $ actualCompilerVersionL . to compilerVersionText + pure $ object + [ "locals" .= Object (KeyMap.fromList $ map localToPair locals) + , "compiler" .= object + [ "wanted" .= wantedCompiler + , "actual" .= actualCompiler + ] + ] + where + localToPair lp = + (Key.fromText $ T.pack $ packageNameString p.name, value) + where + p = lp.package + value = object + [ "version" .= CabalString p.version + , "path" .= toFilePath (parent lp.cabalFP) + ] diff --git a/src/Stack/Runners.hs b/src/Stack/Runners.hs index 4ce79de2d2..e038564a1b 100644 --- a/src/Stack/Runners.hs +++ b/src/Stack/Runners.hs @@ -1,72 +1,117 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Runners +Description : Utilities for running stack commands. +License : BSD-3-Clause + +Utilities for running stack commands. + +Instead of using Has-style classes below, the type signatures use concrete +environments to try and avoid accidentally rerunning configuration parsing. For +example, we want @withConfig $ withConfig $ ...@ to fail. +-} --- | Utilities for running stack commands. --- --- Instead of using Has-style classes below, the type signatures use --- concrete environments to try and avoid accidentally rerunning --- configuration parsing. For example, we want @withConfig $ --- withConfig $ ...@ to fail. module Stack.Runners - ( withBuildConfig - , withEnvConfig - , withDefaultEnvConfig - , withConfig - , withGlobalProject - , withRunnerGlobal - , ShouldReexec (..) - ) where + ( withBuildConfig + , withEnvConfig + , withDefaultEnvConfig + , withConfig + , withGlobalProject + , withRunnerGlobal + , ShouldReexec (..) + ) where -import Stack.Prelude -import RIO.Process (mkDefaultProcessContext) -import RIO.Time (addUTCTime, getCurrentTime) -import Stack.Build.Target(NeedTargets(..)) +import qualified Data.ByteString.Lazy.Char8 as L8 +import RIO.Process + ( findExecutable, mkDefaultProcessContext, proc + , readProcess + ) +import RIO.Time ( addUTCTime, getCurrentTime ) +import Stack.Build.Target ( NeedTargets (..) ) import Stack.Config + ( getInContainer, getInNixShell, loadConfig, withBuildConfig + , withNewLogFunc + ) import Stack.Constants -import Stack.DefaultColorWhen (defaultColorWhen) + ( defaultTerminalWidth, maxTerminalWidth, minTerminalWidth + , nixProgName + ) +import Stack.DefaultColorWhen ( defaultColorWhen ) import qualified Stack.Docker as Docker import qualified Stack.Nix as Nix -import Stack.Setup -import Stack.Storage.User (upgradeChecksSince, logUpgradeCheck) -import Stack.Types.Config -import Stack.Types.Docker (dockerEnable) -import Stack.Types.Nix (nixEnable) -import Stack.Types.Version (stackMinorVersion, minorVersion) -import System.Console.ANSI (hSupportsANSIWithoutEmulation) -import System.Terminal (getTerminalWidth) +import Stack.Prelude +import Stack.Setup ( setupEnv ) +import Stack.Storage.User ( logUpgradeCheck, upgradeChecksSince ) +import Stack.Types.BuildOptsCLI + ( BuildOptsCLI, defaultBuildOptsCLI ) +import Stack.Types.ColorWhen ( ColorWhen (..) ) +import Stack.Types.Config ( Config (..) ) +import Stack.Types.ConfigMonoid ( ConfigMonoid (..) ) +import Stack.Types.Docker ( DockerOpts (..) ) +import Stack.Types.EnvConfig ( EnvConfig ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Nix ( NixOpts (..) ) +import Stack.Types.Runner + ( Runner (..), globalOptsL, reExecL, stackYamlLocL ) +import Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) +import Stack.Types.Version + ( minorVersion, stackMinorVersion, stackVersion ) +import System.Console.ANSI ( hNowSupportsANSI ) +import System.Terminal ( getTerminalWidth ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Runners" module. +data RunnersException + = CommandInvalid + | DockerAndNixInvalid + | NixWithinDockerInvalid + | DockerWithinNixInvalid + deriving Show + +instance Exception RunnersException where + displayException CommandInvalid = + "Error: [S-7144]\n" + ++ "Cannot use this command with options which override the stack.yaml \ + \location." + displayException DockerAndNixInvalid = + "Error: [S-8314]\n" + ++ "Cannot use both Docker and Nix at the same time." + displayException NixWithinDockerInvalid = + "Error: [S-8641]\n" + ++ "Cannot use Nix from within a Docker container." + displayException DockerWithinNixInvalid = + "Error: [S-5107]\n" + ++ "Cannot use Docker from within a Nix shell." -- | Ensure that no project settings are used when running 'withConfig'. withGlobalProject :: RIO Runner a -> RIO Runner a -withGlobalProject inner = do - oldSYL <- view stackYamlLocL - case oldSYL of - SYLDefault -> local (set stackYamlLocL SYLGlobalProject) inner - _ -> throwString "Cannot use this command with options which override the stack.yaml location" +withGlobalProject inner = view stackYamlLocL >>= \case + SYLDefault -> local (set stackYamlLocL SYLGlobalProject) inner + _ -> throwIO CommandInvalid -- | Helper for 'withEnvConfig' which passes in some default arguments: -- -- * No targets are requested -- -- * Default command line build options are assumed -withDefaultEnvConfig - :: RIO EnvConfig a - -> RIO Config a +withDefaultEnvConfig :: RIO EnvConfig a -> RIO Config a withDefaultEnvConfig = withEnvConfig AllowNoTargets defaultBuildOptsCLI --- | Upgrade a 'Config' environment to an 'EnvConfig' environment by --- performing further parsing of project-specific configuration (like --- 'withBuildConfig') and then setting up a build environment --- toolchain. This is intended to be run inside a call to --- 'withConfig'. -withEnvConfig - :: NeedTargets - -> BuildOptsCLI - -> RIO EnvConfig a - -- ^ Action that uses the build config. If Docker is enabled for builds, - -- this will be run in a Docker container. - -> RIO Config a +-- | Upgrade a t'Config' environment to an 'EnvConfig' environment by performing +-- further parsing of project-specific configuration (like 'withBuildConfig') +-- and then setting up a build environment toolchain. This is intended to be run +-- inside a call to 'withConfig'. +withEnvConfig :: + NeedTargets + -> BuildOptsCLI + -> RIO EnvConfig a + -- ^ Action that uses the build config. If Docker is enabled for builds, + -- this will be run in a Docker container. + -> RIO Config a withEnvConfig needTargets boptsCLI inner = withBuildConfig $ do envConfig <- setupEnv needTargets boptsCLI Nothing @@ -74,55 +119,94 @@ withEnvConfig needTargets boptsCLI inner = runRIO envConfig inner -- | If the settings justify it, should we reexec inside Docker or Nix? -data ShouldReexec = YesReexec | NoReexec +data ShouldReexec + = YesReexec + | NoReexec -- | Load the configuration. Convenience function used -- throughout this module. -withConfig - :: ShouldReexec - -> RIO Config a - -> RIO Runner a +withConfig :: ShouldReexec -> RIO Config a -> RIO Runner a withConfig shouldReexec inner = - loadConfig $ \config -> do - -- If we have been relaunched in a Docker container, perform in-container initialization - -- (switch UID, etc.). We do this after first loading the configuration since it must - -- happen ASAP but needs a configuration. - view (globalOptsL.to globalDockerEntrypoint) >>= - traverse_ (Docker.entrypoint config) - runRIO config $ do - -- Catching all exceptions here, since we don't want this - -- check to ever cause Stack to stop working - shouldUpgradeCheck `catchAny` \e -> - logError ("Error when running shouldUpgradeCheck: " <> displayShow e) - case shouldReexec of - YesReexec -> reexec inner - NoReexec -> inner - --- | Perform a Docker or Nix reexec, if warranted. Otherwise run the --- inner action. + loadConfig $ \config -> do + -- If we have been relaunched in a Docker container, perform in-container + -- initialization (switch UID, etc.). We do this after first loading the + -- configuration since it must happen ASAP but needs a configuration. + view (globalOptsL . to (.dockerEntrypoint)) >>= + traverse_ (Docker.entrypoint config) + runRIO config $ do + -- Catching all exceptions here, since we don't want this + -- check to ever cause Stack to stop working + shouldUpgradeCheck `catchAny` \e -> + logError $ + "Error: [S-7353]\n" <> + "Error when running shouldUpgradeCheck: " <> + displayShow e + case shouldReexec of + YesReexec -> reexec inner + NoReexec -> inner + +-- | Perform a Docker or Nix reexec, if warranted. Otherwise run the inner +-- action. reexec :: RIO Config a -> RIO Config a reexec inner = do - nixEnable' <- asks $ nixEnable . configNix - dockerEnable' <- asks $ dockerEnable . configDocker + nixEnable' <- asks $ (.nix.enable) + notifyIfNixOnPath <- asks (.notifyIfNixOnPath) + when (not nixEnable' && notifyIfNixOnPath) $ + findExecutable nixProgName >>= \case + Left _ -> pure () + Right nix -> proc nix ["--version"] $ \pc -> do + let nixProgName' = style Shell (fromString nixProgName) + muteMsg = fillSep + [ flow "To mute this message in future, set" + , style Shell (flow "notify-if-nix-on-path: false") + , flow "in Stack's configuration." + ] + reportErr errMsg = prettyWarn $ + fillSep + [ nixProgName' + , flow "is on the PATH" + , parens (fillSep ["at", style File (fromString nix)]) + , flow "but Stack encountered the following error with" + , nixProgName' + , style Shell "--version" <> ":" + ] + <> blankLine + <> errMsg + <> blankLine + <> muteMsg + <> line + tryAny (readProcess pc) >>= \case + Left e -> reportErr (ppException e) + Right (ec, out, err) -> case ec of + ExitFailure _ -> reportErr $ string (L8.unpack err) + ExitSuccess -> do + let trimFinalNewline str = case reverse str of + '\n' : rest -> reverse rest + _ -> str + prettyWarn $ fillSep + [ fromString (trimFinalNewline $ L8.unpack out) + , flow "is on the PATH" + , parens (fillSep ["at", style File (fromString nix)]) + , flow "but Stack's Nix integration is disabled." + , muteMsg + ] + <> line + dockerEnable' <- asks (.docker.enable) case (nixEnable', dockerEnable') of - (True, True) -> throwString "Cannot use both Docker and Nix at the same time" + (True, True) -> throwIO DockerAndNixInvalid (False, False) -> inner -- Want to use Nix (True, False) -> do - whenM getInContainer $ throwString "Cannot use Nix from within a Docker container" - inShell <- getInNixShell - if inShell - then do - isReexec <- view reExecL - if isReexec - then inner - else throwString "In Nix shell but reExecL is False" - else Nix.runShellAndExit + whenM getInContainer $ throwIO NixWithinDockerInvalid + isReexec <- view reExecL + if isReexec + then inner + else Nix.runShellAndExit -- Want to use Docker (False, True) -> do - whenM getInNixShell $ throwString "Cannot use Docker from within a Nix shell" + whenM getInNixShell $ throwIO DockerWithinNixInvalid inContainer <- getInContainer if inContainer then do @@ -132,62 +216,73 @@ reexec inner = do else throwIO Docker.OnlyOnHostException else Docker.runContainerAndExit --- | Use the 'GlobalOpts' to create a 'Runner' and run the provided +-- | Use the t'GlobalOpts' to create a t'Runner' and run the provided -- action. withRunnerGlobal :: GlobalOpts -> RIO Runner a -> IO a withRunnerGlobal go inner = do - colorWhen <- - maybe defaultColorWhen pure $ - getFirst $ configMonoidColorWhen $ globalConfigMonoid go - useColor <- case colorWhen of - ColorNever -> return False - ColorAlways -> return True - ColorAuto -> fromMaybe True <$> - hSupportsANSIWithoutEmulation stderr - termWidth <- clipWidth <$> maybe (fromMaybe defaultTerminalWidth - <$> getTerminalWidth) - pure (globalTermWidth go) + useColor <- + maybe defaultColorWhen pure (getFirst go.configMonoid.colorWhen) >>= \case + ColorNever -> pure False + ColorAlways -> pure True + ColorAuto -> hNowSupportsANSI stderr + termWidth <- clipWidth <$> maybe + (fromMaybe defaultTerminalWidth <$> getTerminalWidth) + pure + go.termWidthOpt menv <- mkDefaultProcessContext - let update = globalStylesUpdate go - withNewLogFunc go useColor update $ \logFunc -> runRIO Runner - { runnerGlobalOpts = go - , runnerUseColor = useColor - , runnerLogFunc = logFunc - , runnerTermWidth = termWidth - , runnerProcessContext = menv - } inner - where clipWidth w - | w < minTerminalWidth = minTerminalWidth - | w > maxTerminalWidth = maxTerminalWidth - | otherwise = w + -- MVar used to ensure the Docker entrypoint is performed exactly once. + dockerEntrypointMVar <- newMVar False + let update = go.stylesUpdate + withNewLogFunc go useColor update $ \logFunc -> + runRIO Runner + { globalOpts = go + , useColor = useColor + , logFunc = logFunc + , termWidth = termWidth + , processContext = menv + , dockerEntrypointMVar = dockerEntrypointMVar + } inner + where + clipWidth w + | w < minTerminalWidth = minTerminalWidth + | w > maxTerminalWidth = maxTerminalWidth + | otherwise = w -- | Check if we should recommend upgrading Stack and, if so, recommend it. shouldUpgradeCheck :: RIO Config () shouldUpgradeCheck = do config <- ask - when (configRecommendUpgrade config) $ do + when config.recommendStackUpgrade $ do now <- getCurrentTime - let yesterday = addUTCTime (-24 * 60 * 60) now + let yesterday = addUTCTime (-(24 * 60 * 60)) now checks <- upgradeChecksSince yesterday when (checks == 0) $ do - mversion <- getLatestHackageVersion NoRequireHackageIndex "stack" UsePreferredVersions + mversion <- + getLatestHackageVersion NoRequireHackageIndex "stack" UsePreferredVersions case mversion of -- Compare the minor version so we avoid patch-level, Hackage-only releases. -- See: https://github.com/commercialhaskell/stack/pull/4729#pullrequestreview-227176315 Just (PackageIdentifierRevision _ version _) | minorVersion version > stackMinorVersion -> do - logWarn "<<<<<<<<<<<<<<<<<<" - logWarn $ - "You are currently using Stack version " <> - fromString (versionString stackVersion) <> - ", but version " <> - fromString (versionString version) <> - " is available" - logWarn "You can try to upgrade by running 'stack upgrade'" - logWarn $ - "Tired of seeing this? Add 'recommend-stack-upgrade: false' to " <> - fromString (toFilePath (configUserConfigPath config)) - logWarn ">>>>>>>>>>>>>>>>>>" - logWarn "" - logWarn "" + prettyWarn $ + fillSep + [ flow "You are currently using Stack version" + , fromString (versionString stackVersion) + , flow "but version" + , fromString (versionString version) + , flow "is available." + ] + <> blankLine + <> fillSep + [ "You can try to upgrade by running" + , style Shell (flow "stack upgrade") + ] + <> blankLine + <> fillSep + [ flow "Tired of seeing this? Add" + , style Shell (flow "recommend-stack-upgrade: false") + , "to" + , pretty config.userGlobalConfigFile <> "." + ] + <> blankLine _ -> pure () logUpgradeCheck now diff --git a/src/Stack/SDist.hs b/src/Stack/SDist.hs index 375a1ac608..a27f117ad6 100644 --- a/src/Stack/SDist.hs +++ b/src/Stack/SDist.hs @@ -1,31 +1,40 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE TypeFamilies #-} --- Create a source distribution tarball + +{-| +Module : Stack.SDist +Description : Types and functions related to Stack's @sdist@ command. +License : BSD-3-Clause + +Types and functions related to Stack's @sdist@ command. +-} + module Stack.SDist - ( getSDistTarball - , checkSDistTarball - , checkSDistTarball' - , SDistOpts (..) - ) where + ( SDistOpts (..) + , sdistCmd + , getSDistTarball + , checkSDistTarball + , checkSDistTarball' + , readLocalPackage + ) where import qualified Codec.Archive.Tar as Tar import qualified Codec.Archive.Tar.Entry as Tar import qualified Codec.Compression.GZip as GZip -import Control.Applicative -import Control.Concurrent.Execute (ActionContext(..), Concurrency(..)) -import Stack.Prelude hiding (Display (..)) +import Conduit ( runConduitRes, sourceLazy, sinkFileCautious ) +import Control.Concurrent.Execute + ( ActionContext (..), Concurrency (..) ) +import Control.Monad.Extra ( whenJust ) import qualified Data.ByteString as S -import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Lazy as L -import Data.Char (toLower) -import Data.Data (cast) -import Data.List -import qualified Data.List.NonEmpty as NE +import Data.Char ( toLower ) +import Data.Data ( cast ) +import qualified Data.Either.Extra as EE +import qualified Data.List as List import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T @@ -33,261 +42,402 @@ import qualified Data.Text.Encoding as T import qualified Data.Text.Encoding.Error as T import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.Encoding as TLE -import Data.Time.Clock.POSIX -import Distribution.Package (Dependency (..)) +import Data.Time.Clock.POSIX ( getPOSIXTime, utcTimeToPOSIXSeconds ) +import Distribution.Package ( Dependency (..) ) import qualified Distribution.PackageDescription as Cabal import qualified Distribution.PackageDescription.Check as Check import qualified Distribution.PackageDescription.Parsec as Cabal -import Distribution.PackageDescription.PrettyPrint (showGenericPackageDescription) -import qualified Distribution.Types.UnqualComponentName as Cabal -import Distribution.Version (simplifyVersionRange, orLaterVersion, earlierVersion, hasUpperBound, hasLowerBound) -import Path -import Path.IO hiding (getModificationTime, getPermissions, withSystemTempDir) -import RIO.PrettyPrint -import Stack.Build (mkBaseConfigOpts, build, buildLocalTargets) +import Distribution.PackageDescription.PrettyPrint + ( showGenericPackageDescription ) +import Distribution.Simple.Utils ( cabalVersion ) +import Distribution.Version + ( earlierVersion, hasLowerBound, hasUpperBound, isAnyVersion + , orLaterVersion, simplifyVersionRange + ) +import Path ( (), parent, parseRelDir, parseRelFile ) +import Path.IO ( ensureDir, resolveDir' ) +import RIO.NonEmpty ( nonEmpty ) +import qualified RIO.NonEmpty as NE +import Stack.Build ( mkBaseConfigOpts, build, buildLocalTargets ) import Stack.Build.Execute -import Stack.Build.Installed -import Stack.Build.Source (projectLocalPackages) -import Stack.Types.GhcPkgId -import Stack.Package -import Stack.SourceMap -import Stack.Types.Build -import Stack.Types.Config + ( ExcludeTHLoading (..), KeepOutputOpen (..) ) +import Stack.Build.ExecuteEnv ( withExecuteEnv, withSingleContext ) +import Stack.Build.Installed ( getInstalled, toInstallMap ) +import Stack.Build.Source ( projectLocalPackages ) +import Stack.BuildOpts ( defaultBuildOpts ) +import Stack.Constants ( stackProgName, stackProgName' ) +import Stack.Constants.Config ( distDirFromDir ) +import Stack.Package ( resolvePackage, resolvePackageDescription ) +import Stack.Prelude +import Stack.Runners + ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) +import Stack.SourceMap ( mkProjectPackage ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..), configFileL ) +import Stack.Types.BuildOpts ( BuildOpts (..) ) +import Stack.Types.BuildOptsCLI ( defaultBuildOptsCLI ) +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.EnvConfig + ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL ) +import Stack.Types.GhcPkgId ( GhcPkgId ) +import Stack.Types.Installed + ( InstallMap, Installed (..), InstalledMap + , InstalledLibraryInfo (..), installedVersion + ) import Stack.Types.Package + ( LocalPackage (..), Package (..), PackageConfig (..) + , packageIdentifier + ) +import Stack.Types.Plan ( TaskType (..) ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.PvpBounds ( PvpBounds (..), PvpBoundsType (..) ) +import Stack.Types.Runner ( HasRunner, Runner ) +import Stack.Types.SDistOpts ( SDistOpts (..) ) import Stack.Types.SourceMap + ( CommonPackage (..), ProjectPackage (..), SMWanted (..) + , SourceMap (..), ppRoot + ) +import qualified Stack.Types.SourceMap as SourceMap ( SourceMap (..) ) import Stack.Types.Version -import System.Directory (getModificationTime, getPermissions) + ( intersectVersionRanges, nextMajorVersion ) +import System.Directory + ( copyFile, createDirectoryIfMissing, executable + , getModificationTime, getPermissions + ) import qualified System.FilePath as FP --- | Special exception to throw when you want to fail because of bad results --- of package check. - -data SDistOpts = SDistOpts - { sdoptsDirsToWorkWith :: [String] - -- ^ Directories to package - , sdoptsPvpBounds :: Maybe PvpBounds - -- ^ PVP Bounds overrides - , sdoptsIgnoreCheck :: Bool - -- ^ Whether to ignore check of the package for common errors - , sdoptsBuildTarball :: Bool - -- ^ Whether to build the tarball - , sdoptsTarPath :: Maybe FilePath - -- ^ Where to copy the tarball - } - -newtype CheckException +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.SDist" module. +data SDistPrettyException = CheckException (NonEmpty Check.PackageCheck) - deriving (Typeable) + | CabalFilePathsInconsistentBug (Path Abs File) (Path Abs File) + | ToTarPathException String + deriving Show -instance Exception CheckException +instance Pretty SDistPrettyException where + pretty (CheckException xs) = + "[S-6439]" + <> line + <> flow "Package check reported the following errors:" + <> line + <> bulletedList (map (string . show) (NE.toList xs) :: [StyleDoc]) + pretty (CabalFilePathsInconsistentBug cabalFP cabalFP') = + "[S-9595]" + <> line + <> fillSep + [ flow "The impossible happened! Two Cabal file paths are \ + \inconsistent:" + , pretty cabalFP + , "and" + , pretty cabalFP' <> "." + ] + pretty (ToTarPathException e) = + "[S-7875]" + <> line + <> string e -instance Show CheckException where - show (CheckException xs) = - "Package check reported the following errors:\n" ++ - (intercalate "\n" . fmap show . NE.toList $ xs) +instance Exception SDistPrettyException --- | Given the path to a local package, creates its source --- distribution tarball. +-- | Function underlying the @stack sdist@ command. +sdistCmd :: SDistOpts -> RIO Runner () +sdistCmd sdistOpts = + withConfig YesReexec $ withDefaultEnvConfig $ do + -- If no directories are specified, build all sdist tarballs. + dirs' <- if null sdistOpts.dirsToWorkWith + then do + dirs <- view $ + buildConfigL . to (map ppRoot . Map.elems . (.smWanted.project)) + when (null dirs) $ do + configFile <- view configFileL + -- We are indifferent as to whether the configuration file is a + -- user-specific global or a project-level one. + let eitherConfigFile = EE.fromEither configFile + prettyErrorL + [ style Shell "stack sdist" + , flow "expects a list of targets, and otherwise defaults to all \ + \of the project's packages. However, the configuration at" + , pretty eitherConfigFile + , flow "contains no packages, so no sdist tarballs will be \ + \generated." + ] + exitFailure + pure dirs + else mapM resolveDir' sdistOpts.dirsToWorkWith + forM_ dirs' $ \dir -> do + (tarName, tarBytes, _mcabalRevision) <- + getSDistTarball sdistOpts.pvpBounds dir + distDir <- distDirFromDir dir + tarPath <- (distDir ) <$> parseRelFile tarName + ensureDir (parent tarPath) + runConduitRes $ + sourceLazy tarBytes .| + sinkFileCautious (toFilePath tarPath) + prettyInfoL + [flow "Wrote sdist-format compressed archive to" + , pretty tarPath <> "." + ] + checkSDistTarball sdistOpts tarPath + forM_ sdistOpts.tarPath $ copyTarToTarPath tarPath tarName + where + copyTarToTarPath tarPath tarName targetDir = liftIO $ do + let targetTarPath = targetDir FP. tarName + createDirectoryIfMissing True $ FP.takeDirectory targetTarPath + copyFile (toFilePath tarPath) targetTarPath + +-- | Given the path to a package directory, creates a source distribution +-- tarball for the package. -- --- While this yields a 'FilePath', the name of the tarball, this --- tarball is not written to the disk and instead yielded as a lazy --- bytestring. -getSDistTarball - :: HasEnvConfig env - => Maybe PvpBounds -- ^ Override Config value - -> Path Abs Dir -- ^ Path to local package - -> RIO env (FilePath, L.ByteString, Maybe (PackageIdentifier, L.ByteString)) - -- ^ Filename, tarball contents, and option cabal file revision to upload +-- While this yields a 'FilePath', the name of the tarball, this tarball is not +-- written to the disk and instead yielded as a lazy bytestring. +getSDistTarball :: + HasEnvConfig env + => Maybe PvpBounds + -- ^ Override Config value + -> Path Abs Dir + -- ^ Path to package directory + -> RIO + env + ( FilePath + , L.ByteString + , Maybe (PackageIdentifier, L.ByteString) + ) + -- ^ Filename, tarball contents, and option Cabal file revision to upload getSDistTarball mpvpBounds pkgDir = do - config <- view configL - let PvpBounds pvpBounds asRevision = fromMaybe (configPvpBounds config) mpvpBounds - tweakCabal = pvpBounds /= PvpBoundsNone - pkgFp = toFilePath pkgDir - lp <- readLocalPackage pkgDir - forM_ (packageSetupDeps (lpPackage lp)) $ \customSetupDeps -> - case NE.nonEmpty (map (T.pack . packageNameString) (Map.keys customSetupDeps)) of - Just nonEmptyDepTargets -> do - eres <- buildLocalTargets nonEmptyDepTargets - case eres of - Left err -> - logError $ "Error building custom-setup dependencies: " <> displayShow err - Right _ -> - return () - Nothing -> - logWarn "unexpected empty custom-setup dependencies" - sourceMap <- view $ envConfigL.to envConfigSourceMap - - installMap <- toInstallMap sourceMap - (installedMap, _globalDumpPkgs, _snapshotDumpPkgs, _localDumpPkgs) <- - getInstalled installMap - let deps = Map.fromList [ (pid, ghcPkgId) - | (_, Library pid ghcPkgId _) <- Map.elems installedMap] - - logInfo $ "Getting file list for " <> fromString pkgFp - (fileList, cabalfp) <- getSDistFileList lp deps - logInfo $ "Building sdist tarball for " <> fromString pkgFp - files <- normalizeTarballPaths (map (T.unpack . stripCR . T.pack) (lines fileList)) - - -- We're going to loop below and eventually find the cabal - -- file. When we do, we'll upload this reference, if the - -- mpvpBounds value indicates that we should be uploading a cabal - -- file revision. - cabalFileRevisionRef <- liftIO (newIORef Nothing) + config <- view configL + let PvpBounds pvpBounds asRevision = + fromMaybe config.pvpBounds mpvpBounds + tweakCabal = pvpBounds /= PvpBoundsNone + pkgFp = toFilePath pkgDir + lp <- readLocalPackage pkgDir + forM_ lp.package.setupDeps $ \customSetupDeps -> + case nonEmpty (map (T.pack . packageNameString) (Map.keys customSetupDeps)) of + Just nonEmptyDepTargets -> + buildLocalTargets nonEmptyDepTargets >>= \case + Left err -> + logError $ + "Error: [S-8399]\n" <> + "Error building custom-setup dependencies: " <> + displayShow err + Right _ -> + pure () + Nothing -> + prettyWarnS "unexpected empty custom-setup dependencies." + sourceMap <- view $ envConfigL . to (.sourceMap) + installMap <- toInstallMap sourceMap + (installedMap, _globalDumpPkgs, _snapshotDumpPkgs, _localDumpPkgs) <- + getInstalled installMap + let deps = Map.fromList + [ (pid, libInfo.ghcPkgId) + | (_, Library pid libInfo) <- Map.elems installedMap] + prettyInfoL + [ flow "Getting the file list for" + , style File (fromString pkgFp) <> "." + ] + (fileList, cabalFP) <- getSDistFileList lp deps + prettyInfoL + [ flow "Building a compressed archive file in the sdist format for" + , style File (fromString pkgFp) <> "." + ] + files <- + normalizeTarballPaths (map (T.unpack . stripCR . T.pack) (lines fileList)) + -- We're going to loop below and eventually find the Cabal file. When we do, + -- we'll upload this reference, if the mpvpBounds value indicates that we + -- should be uploading a Cabal file revision. + cabalFileRevisionRef <- liftIO (newIORef Nothing) + -- NOTE: Could make this use lazy I/O to only read files as needed for upload + -- (both GZip.compress and Tar.write are lazy). However, it seems less error + -- prone and more predictable to read everything in at once, so that's what + -- we're doing for now: + let tarPath isDir fp = + case Tar.toTarPath isDir (pkgIdName FP. fp) of + Left e -> prettyThrowIO $ ToTarPathException e + Right tp -> pure tp + packWith f isDir fp = liftIO $ f (pkgFp FP. fp) =<< tarPath isDir fp + packDir = packWith Tar.packDirectoryEntry True + packFile fp + -- This is a Cabal file, we're going to tweak it, but only tweak it as a + -- revision. + | tweakCabal && isCabalFp fp && asRevision = do + lbsIdent <- getCabalLbs pvpBounds (Just 1) cabalFP sourceMap + liftIO (writeIORef cabalFileRevisionRef (Just lbsIdent)) + packWith packFileEntry False fp + -- Same, except we'll include the Cabal file in the original tarball + -- upload. + | tweakCabal && isCabalFp fp = do + (_ident, lbs) <- getCabalLbs pvpBounds Nothing cabalFP sourceMap + currTime <- liftIO getPOSIXTime -- Seconds from UNIX epoch + tp <- liftIO $ tarPath False fp + pure $ (Tar.fileEntry tp lbs) { Tar.entryTime = floor currTime } + | otherwise = packWith packFileEntry False fp + isCabalFp fp = toFilePath pkgDir FP. fp == toFilePath cabalFP + tarName = pkgIdName FP.<.> "tar.gz" + pkgIdName = packageIdentifierString pkgId + pkgId = packageIdentifier lp.package + dirEntries <- mapM packDir (dirsFromFiles files) + fileEntries <- mapM packFile files + mcabalFileRevision <- liftIO (readIORef cabalFileRevisionRef) + pure + ( tarName + , GZip.compress (Tar.write (dirEntries ++ fileEntries)) + , mcabalFileRevision + ) - -- NOTE: Could make this use lazy I/O to only read files as needed - -- for upload (both GZip.compress and Tar.write are lazy). - -- However, it seems less error prone and more predictable to read - -- everything in at once, so that's what we're doing for now: - let tarPath isDir fp = either throwString return - (Tar.toTarPath isDir (forceUtf8Enc (pkgId FP. fp))) - -- convert a String of proper characters to a String of bytes - -- in UTF8 encoding masquerading as characters. This is - -- necessary for tricking the tar package into proper - -- character encoding. - forceUtf8Enc = S8.unpack . T.encodeUtf8 . T.pack - packWith f isDir fp = liftIO $ f (pkgFp FP. fp) =<< tarPath isDir fp - packDir = packWith Tar.packDirectoryEntry True - packFile fp - -- This is a cabal file, we're going to tweak it, but only - -- tweak it as a revision. - | tweakCabal && isCabalFp fp && asRevision = do - lbsIdent <- getCabalLbs pvpBounds (Just 1) cabalfp sourceMap - liftIO (writeIORef cabalFileRevisionRef (Just lbsIdent)) - packWith packFileEntry False fp - -- Same, except we'll include the cabal file in the - -- original tarball upload. - | tweakCabal && isCabalFp fp = do - (_ident, lbs) <- getCabalLbs pvpBounds Nothing cabalfp sourceMap - currTime <- liftIO getPOSIXTime -- Seconds from UNIX epoch - tp <- liftIO $ tarPath False fp - return $ (Tar.fileEntry tp lbs) { Tar.entryTime = floor currTime } - | otherwise = packWith packFileEntry False fp - isCabalFp fp = toFilePath pkgDir FP. fp == toFilePath cabalfp - tarName = pkgId FP.<.> "tar.gz" - pkgId = packageIdentifierString (packageIdentifier (lpPackage lp)) - dirEntries <- mapM packDir (dirsFromFiles files) - fileEntries <- mapM packFile files - mcabalFileRevision <- liftIO (readIORef cabalFileRevisionRef) - return (tarName, GZip.compress (Tar.write (dirEntries ++ fileEntries)), mcabalFileRevision) - --- | Get the PVP bounds-enabled version of the given cabal file -getCabalLbs :: HasEnvConfig env - => PvpBoundsType - -> Maybe Int -- ^ optional revision - -> Path Abs File -- ^ cabal file - -> SourceMap - -> RIO env (PackageIdentifier, L.ByteString) -getCabalLbs pvpBounds mrev cabalfp sourceMap = do - (gpdio, _name, cabalfp') <- loadCabalFilePath (parent cabalfp) - gpd <- liftIO $ gpdio NoPrintWarnings - unless (cabalfp == cabalfp') - $ error $ "getCabalLbs: cabalfp /= cabalfp': " ++ show (cabalfp, cabalfp') - installMap <- toInstallMap sourceMap - (installedMap, _, _, _) <- getInstalled installMap - let internalPackages = Set.fromList $ - gpdPackageName gpd : - map (Cabal.unqualComponentNameToPackageName . fst) (Cabal.condSubLibraries gpd) - gpd' = gtraverseT (addBounds internalPackages installMap installedMap) gpd - gpd'' = - case mrev of - Nothing -> gpd' - Just rev -> gpd' - { Cabal.packageDescription - = (Cabal.packageDescription gpd') - { Cabal.customFieldsPD - = (("x-revision", show rev):) - $ filter (\(x, _) -> map toLower x /= "x-revision") - $ Cabal.customFieldsPD - $ Cabal.packageDescription gpd' - } - } - ident = Cabal.package $ Cabal.packageDescription gpd'' - -- Sanity rendering and reparsing the input, to ensure there are no - -- cabal bugs, since there have been bugs here before, and currently - -- are at the time of writing: - -- - -- https://github.com/haskell/cabal/issues/1202 - -- https://github.com/haskell/cabal/issues/2353 - -- https://github.com/haskell/cabal/issues/4863 (current issue) - let roundtripErrs = - [ flow "Bug detected in Cabal library. ((parse . render . parse) === id) does not hold for the cabal file at" - <+> pretty cabalfp - , "" - ] - (_warnings, eres) = Cabal.runParseResult - $ Cabal.parseGenericPackageDescription - $ T.encodeUtf8 - $ T.pack - $ showGenericPackageDescription gpd - case eres of - Right roundtripped - | roundtripped == gpd -> return () - | otherwise -> do - prettyWarn $ vsep $ roundtripErrs ++ - [ "This seems to be fixed in development versions of Cabal, but at time of writing, the fix is not in any released versions." - , "" - , "Please see this GitHub issue for status:" <+> style Url "https://github.com/commercialhaskell/stack/issues/3549" - , "" - , fillSep - [ flow "If the issue is closed as resolved, then you may be able to fix this by upgrading to a newer version of stack via" - , style Shell "stack upgrade" - , flow "for latest stable version or" - , style Shell "stack upgrade --git" - , flow "for the latest development version." - ] - , "" - , fillSep - [ flow "If the issue is fixed, but updating doesn't solve the problem, please check if there are similar open issues, and if not, report a new issue to the stack issue tracker, at" - , style Url "https://github.com/commercialhaskell/stack/issues/new" - ] - , "" - , flow "If the issue is not fixed, feel free to leave a comment on it indicating that you would like it to be fixed." - , "" - ] - Left (_version, errs) -> do - prettyWarn $ vsep $ roundtripErrs ++ - [ flow "In particular, parsing the rendered cabal file is yielding a parse error. Please check if there are already issues tracking this, and if not, please report new issues to the stack and cabal issue trackers, via" - , bulletedList - [ style Url "https://github.com/commercialhaskell/stack/issues/new" - , style Url "https://github.com/haskell/cabal/issues/new" - ] - , flow $ "The parse error is: " ++ unlines (map show (toList errs)) - , "" - ] - return - ( ident - , TLE.encodeUtf8 $ TL.pack $ showGenericPackageDescription gpd'' - ) - where - addBounds :: Set PackageName -> InstallMap -> InstalledMap -> Dependency -> Dependency - addBounds internalPackages installMap installedMap dep@(Dependency name range s) = - if name `Set.member` internalPackages - then dep - else case foundVersion of - Nothing -> dep - Just version -> Dependency name (simplifyVersionRange - $ (if toAddUpper && not (hasUpperBound range) then addUpper version else id) - $ (if toAddLower && not (hasLowerBound range) then addLower version else id) - range) s - where - foundVersion = - case Map.lookup name installMap of - Just (_, version) -> Just version - Nothing -> - case Map.lookup name installedMap of - Just (_, installed) -> Just (installedVersion installed) - Nothing -> Nothing +-- | Get the PVP bounds-enabled version of the given Cabal file +getCabalLbs :: + HasEnvConfig env + => PvpBoundsType + -> Maybe Int -- ^ optional revision + -> Path Abs File -- ^ Cabal file + -> SourceMap + -> RIO env (PackageIdentifier, L.ByteString) +getCabalLbs pvpBounds mrev cabalFP sourceMap = do + (gpdio, _name, cabalFP') <- + loadCabalFilePath (Just stackProgName') (parent cabalFP) + gpd <- liftIO $ gpdio NoPrintWarnings + unless (cabalFP == cabalFP') $ + prettyThrowIO $ CabalFilePathsInconsistentBug cabalFP cabalFP' + installMap <- toInstallMap sourceMap + (installedMap, _, _, _) <- getInstalled installMap + let subLibPackages = Set.fromList $ + gpdPackageName gpd + : map + (Cabal.unqualComponentNameToPackageName . fst) + (Cabal.condSubLibraries gpd) + gpd' = gtraverseT (addBounds subLibPackages installMap installedMap) gpd + gpd'' = + case mrev of + Nothing -> gpd' + Just rev -> gpd' + { Cabal.packageDescription + = (Cabal.packageDescription gpd') + { Cabal.customFieldsPD + = (("x-revision", show rev):) + $ filter (\(x, _) -> map toLower x /= "x-revision") + $ Cabal.customFieldsPD + $ Cabal.packageDescription gpd' + } + } + ident = Cabal.package $ Cabal.packageDescription gpd'' + -- Sanity rendering and reparsing the input, to ensure there are no Cabal + -- bugs, since there have been bugs here before, and currently are at the time + -- of writing: + -- + -- https://github.com/haskell/cabal/issues/1202 + -- https://github.com/haskell/cabal/issues/2353 + -- https://github.com/haskell/cabal/issues/4863 (current issue) + let roundtripErrs = + fillSep + [ flow "Bug detected in Cabal library. ((parse . render . parse) \ + \=== id) does not hold for the Cabal file at" + , pretty cabalFP + ] + <> blankLine + (_warnings, eres) = Cabal.runParseResult + $ Cabal.parseGenericPackageDescription + $ T.encodeUtf8 + $ T.pack + $ showGenericPackageDescription gpd + case eres of + Right roundtripped + | roundtripped == gpd -> pure () + | otherwise -> prettyWarn $ + roundtripErrs + <> flow "This seems to be fixed in development versions of Cabal, \ + \but at time of writing, the fix is not in any released \ + \versions." + <> blankLine + <> fillSep + [ flow "Please see this GitHub issue for status:" + , style Url "https://github.com/commercialhaskell/stack/issues/3549" + ] + <> blankLine + <> fillSep + [ flow "If the issue is closed as resolved, then you may be \ + \able to fix this by upgrading to a newer version of \ + \Stack via" + , style Shell "stack upgrade" + , flow "for latest stable version or" + , style Shell "stack upgrade --git" + , flow "for the latest development version." + ] + <> blankLine + <> fillSep + [ flow "If the issue is fixed, but updating doesn't solve the \ + \problem, please check if there are similar open \ + \issues, and if not, report a new issue to the Stack \ + \issue tracker, at" + , style Url "https://github.com/commercialhaskell/stack/issues/new" + ] + <> blankLine + <> flow "If the issue is not fixed, feel free to leave a comment \ + \on it indicating that you would like it to be fixed." + <> blankLine + Left (_version, errs) -> prettyWarn $ + roundtripErrs + <> flow "In particular, parsing the rendered Cabal file is yielding a \ + \parse error. Please check if there are already issues \ + \tracking this, and if not, please report new issues to the \ + \Stack and Cabal issue trackers, via" + <> line + <> bulletedList + [ style Url "https://github.com/commercialhaskell/stack/issues/new" + , style Url "https://github.com/haskell/cabal/issues/new" + ] + <> line + <> flow ("The parse error is: " <> unlines (map show (toList errs))) + <> blankLine + pure + ( ident + , TLE.encodeUtf8 $ TL.pack $ showGenericPackageDescription gpd'' + ) + where + addBounds :: + Set PackageName + -> InstallMap + -> InstalledMap + -> Dependency + -> Dependency + addBounds subLibPackages installMap installedMap dep = + if name `Set.member` subLibPackages + then dep + else case foundVersion of + Nothing -> dep + Just version -> Dependency + name + ( simplifyVersionRange + $ ( if toAddUpper && not (hasUpperBound range) + then addUpper version + else id + ) + -- From Cabal-3.4.0.0, 'hasLowerBound isAnyVersion' is 'True'. + $ ( if toAddLower + && (isAnyVersion range || not (hasLowerBound range)) + then addLower version + else id + ) + range + ) + s + where + Dependency name range s = dep + foundVersion = + case Map.lookup name installMap of + Just (_, version) -> Just version + Nothing -> + case Map.lookup name installedMap of + Just (_, installed) -> Just (installedVersion installed) + Nothing -> Nothing - addUpper version = intersectVersionRanges - (earlierVersion $ nextMajorVersion version) - addLower version = intersectVersionRanges (orLaterVersion version) + addUpper version = intersectVersionRanges + (earlierVersion $ nextMajorVersion version) + addLower version = intersectVersionRanges (orLaterVersion version) - (toAddLower, toAddUpper) = - case pvpBounds of - PvpBoundsNone -> (False, False) - PvpBoundsUpper -> (False, True) - PvpBoundsLower -> (True, False) - PvpBoundsBoth -> (True, True) + (toAddLower, toAddUpper) = + case pvpBounds of + PvpBoundsNone -> (False, False) + PvpBoundsUpper -> (False, True) + PvpBoundsLower -> (True, False) + PvpBoundsBoth -> (True, True) -- | Traverse a data type. gtraverseT :: (Data a,Typeable b) => (Typeable b => b -> b) -> a -> a @@ -296,153 +446,147 @@ gtraverseT f = Nothing -> gtraverseT f x Just b -> fromMaybe x (cast (f b))) --- | Read in a 'LocalPackage' config. This makes some default decisions --- about 'LocalPackage' fields that might not be appropriate for other +-- | Read in a t'LocalPackage' config. This makes some default decisions +-- about v'LocalPackage' fields that might not be appropriate for other -- use-cases. readLocalPackage :: HasEnvConfig env => Path Abs Dir -> RIO env LocalPackage readLocalPackage pkgDir = do - config <- getDefaultPackageConfig - (gpdio, _, cabalfp) <- loadCabalFilePath pkgDir - gpd <- liftIO $ gpdio YesPrintWarnings - let package = resolvePackage config gpd - return LocalPackage - { lpPackage = package - , lpWanted = False -- HACK: makes it so that sdist output goes to a log instead of a file. - , lpCabalFile = cabalfp - -- NOTE: these aren't the 'correct values, but aren't used in - -- the usage of this function in this module. - , lpTestDeps = Map.empty - , lpBenchDeps = Map.empty - , lpTestBench = Nothing - , lpBuildHaddocks = False - , lpForceDirty = False - , lpDirtyFiles = pure Nothing - , lpNewBuildCaches = pure Map.empty - , lpComponentFiles = pure Map.empty - , lpComponents = Set.empty - , lpUnbuildable = Set.empty - } + config <- getDefaultPackageConfig + (gpdio, _, cabalFP) <- loadCabalFilePath (Just stackProgName') pkgDir + gpd <- liftIO $ gpdio YesPrintWarnings + let package = resolvePackage config gpd + pure LocalPackage + { package + , wanted = False -- HACK: makes it so that sdist output goes to a log + -- instead of a file. + , cabalFP + -- NOTE: these aren't the 'correct' values, but aren't used in the usage of + -- this function in this module. + , testBench = Nothing + , buildHaddocks = False + , forceDirty = False + , dirtyFiles = pure Nothing + , newBuildCaches = pure Map.empty + , componentFiles = pure Map.empty + , components = Set.empty + , unbuildable = Set.empty + } --- | Returns a newline-separate list of paths, and the absolute path to the .cabal file. -getSDistFileList :: HasEnvConfig env => LocalPackage -> Map PackageIdentifier GhcPkgId -> RIO env (String, Path Abs File) +-- | Returns a newline-separate list of paths, and the absolute path to the +-- Cabal file. +getSDistFileList :: + HasEnvConfig env + => LocalPackage + -> Map PackageIdentifier GhcPkgId + -> RIO env (String, Path Abs File) getSDistFileList lp deps = - withSystemTempDir (stackProgName <> "-sdist") $ \tmpdir -> do - let bopts = defaultBuildOpts - let boptsCli = defaultBuildOptsCLI - baseConfigOpts <- mkBaseConfigOpts boptsCli - locals <- projectLocalPackages - withExecuteEnv bopts boptsCli baseConfigOpts locals - [] [] [] Nothing -- provide empty list of globals. This is a hack around custom Setup.hs files - $ \ee -> - withSingleContext ac ee task deps (Just "sdist") $ \_package cabalfp _pkgDir cabal _announce _outputType -> do - let outFile = toFilePath tmpdir FP. "source-files-list" - cabal CloseOnException KeepTHLoading ["sdist", "--list-sources", outFile] - contents <- liftIO (S.readFile outFile) - return (T.unpack $ T.decodeUtf8With T.lenientDecode contents, cabalfp) - where - package = lpPackage lp - ac = ActionContext Set.empty [] ConcurrencyAllowed - task = Task - { taskProvides = PackageIdentifier (packageName package) (packageVersion package) - , taskType = TTLocalMutable lp - , taskConfigOpts = TaskConfigOpts - { tcoMissing = Set.empty - , tcoOpts = \_ -> ConfigureOpts [] [] - } - , taskBuildHaddock = False - , taskPresent = Map.empty - , taskAllInOne = True - , taskCachePkgSrc = CacheSrcLocal (toFilePath (parent $ lpCabalFile lp)) - , taskAnyMissing = True - , taskBuildTypeConfig = False - } + withSystemTempDir (stackProgName <> "-sdist") $ \tmpdir -> do + let bopts = defaultBuildOpts + let boptsCli = defaultBuildOptsCLI + baseConfigOpts <- mkBaseConfigOpts boptsCli + locals <- projectLocalPackages + withExecuteEnv bopts boptsCli baseConfigOpts locals + [] [] [] Nothing -- provide empty list of globals. This is a hack around + -- custom Setup.hs files + $ \ee -> + withSingleContext ac ee taskType deps (Just "sdist") $ + \_package cabalFP _pkgDir cabal _announce _outputType -> do + let outFile = toFilePath tmpdir FP. "source-files-list" + cabal + CloseOnException + KeepTHLoading + ["sdist", "--list-sources", outFile] + contents <- liftIO (S.readFile outFile) + pure (T.unpack $ T.decodeUtf8With T.lenientDecode contents, cabalFP) + where + ac = ActionContext Set.empty [] ConcurrencyAllowed + taskType = TTLocalMutable lp -normalizeTarballPaths :: HasRunner env => [FilePath] -> RIO env [FilePath] +normalizeTarballPaths :: + (HasRunner env, HasTerm env) + => [FilePath] + -> RIO env [FilePath] normalizeTarballPaths fps = do - -- TODO: consider whether erroring out is better - otherwise the - -- user might upload an incomplete tar? - unless (null outsideDir) $ - logWarn $ - "Warning: These files are outside of the package directory, and will be omitted from the tarball: " <> - displayShow outsideDir - return (nubOrd files) - where - (outsideDir, files) = partitionEithers (map pathToEither fps) - pathToEither fp = maybe (Left fp) Right (normalizePath fp) + -- TODO: consider whether erroring out is better - otherwise the user might + -- upload an incomplete tar? + unless (null outsideDir) $ + prettyWarn $ + flow "These files are outside of the package directory, and will be \ + \omitted from the tarball:" + <> line + <> bulletedList (map (style File . fromString) outsideDir) + pure (nubOrd files) + where + (outsideDir, files) = partitionEithers (map pathToEither fps) + pathToEither fp = maybe (Left fp) Right (normalizePath fp) normalizePath :: FilePath -> Maybe FilePath normalizePath = fmap FP.joinPath . go . FP.splitDirectories . FP.normalise - where - go [] = Just [] - go ("..":_) = Nothing - go (_:"..":xs) = go xs - go (x:xs) = (x :) <$> go xs + where + go [] = Just [] + go ("..":_) = Nothing + go (_:"..":xs) = go xs + go (x:xs) = (x :) <$> go xs dirsFromFiles :: [FilePath] -> [FilePath] dirsFromFiles dirs = Set.toAscList (Set.delete "." results) - where - results = foldl' (\s -> go s . FP.takeDirectory) Set.empty dirs - go s x - | Set.member x s = s - | otherwise = go (Set.insert x s) (FP.takeDirectory x) + where + results = foldl' (\s -> go s . FP.takeDirectory) Set.empty dirs + go s x + | Set.member x s = s + | otherwise = go (Set.insert x s) (FP.takeDirectory x) --- | Check package in given tarball. This will log all warnings --- and will throw an exception in case of critical errors. +-- | Check package in given tarball. This will log all warnings and will throw +-- an exception in case of critical errors. -- -- Note that we temporarily decompress the archive to analyze it. -checkSDistTarball - :: HasEnvConfig env +checkSDistTarball :: + HasEnvConfig env => SDistOpts -- ^ The configuration of what to check -> Path Abs File -- ^ Absolute path to tarball -> RIO env () checkSDistTarball opts tarball = withTempTarGzContents tarball $ \pkgDir' -> do - pkgDir <- (pkgDir' ) `liftM` - (parseRelDir . FP.takeBaseName . FP.takeBaseName . toFilePath $ tarball) - -- ^ drop ".tar" ^ drop ".gz" - when (sdoptsBuildTarball opts) (buildExtractedTarball ResolvedPath - { resolvedRelative = RelFilePath "this-is-not-used" -- ugly hack - , resolvedAbsolute = pkgDir - }) - unless (sdoptsIgnoreCheck opts) (checkPackageInExtractedTarball pkgDir) + pkgDir <- (pkgDir' ) <$> + (parseRelDir . FP.takeBaseName . FP.takeBaseName . toFilePath $ tarball) + -- ^ drop ".tar" ^ drop ".gz" + when opts.buildTarball + ( buildExtractedTarball ResolvedPath + { resolvedRelative = RelFilePath "this-is-not-used" -- ugly hack + , resolvedAbsolute = pkgDir + } + ) + unless opts.ignoreCheck (checkPackageInExtractedTarball pkgDir) -checkPackageInExtractedTarball - :: HasEnvConfig env +checkPackageInExtractedTarball :: + HasEnvConfig env => Path Abs Dir -- ^ Absolute path to tarball -> RIO env () checkPackageInExtractedTarball pkgDir = do - (gpdio, name, _cabalfp) <- loadCabalFilePath pkgDir - gpd <- liftIO $ gpdio YesPrintWarnings - config <- getDefaultPackageConfig - let PackageDescriptionPair pkgDesc _ = resolvePackageDescription config gpd - logInfo $ - "Checking package '" <> fromString (packageNameString name) <> "' for common mistakes" - let pkgChecks = - -- MSS 2017-12-12: Try out a few different variants of - -- pkgDesc to try and provoke an error or warning. I don't - -- know why, but when using `Just pkgDesc`, it appears that - -- Cabal does not detect that `^>=` is used with - -- `cabal-version: 1.24` or earlier. It seems like pkgDesc - -- (the one we create) does not populate the `buildDepends` - -- field, whereas flattenPackageDescription from Cabal - -- does. In any event, using `Nothing` seems more logical - -- for this check anyway, and the fallback to `Just pkgDesc` - -- is just a crazy sanity check. - case Check.checkPackage gpd Nothing of - [] -> Check.checkPackage gpd (Just pkgDesc) - x -> x - fileChecks <- liftIO $ Check.checkPackageFiles minBound pkgDesc (toFilePath pkgDir) - let checks = pkgChecks ++ fileChecks - (errors, warnings) = - let criticalIssue (Check.PackageBuildImpossible _) = True - criticalIssue (Check.PackageDistInexcusable _) = True - criticalIssue _ = False - in partition criticalIssue checks - unless (null warnings) $ - logWarn $ "Package check reported the following warnings:\n" <> - mconcat (intersperse "\n" . fmap displayShow $ warnings) - case NE.nonEmpty errors of - Nothing -> return () - Just ne -> throwM $ CheckException ne + (gpdio, name, _cabalfp) <- loadCabalFilePath (Just stackProgName') pkgDir + gpd <- liftIO $ gpdio YesPrintWarnings + config <- getDefaultPackageConfig + let pkgDesc = resolvePackageDescription config gpd + prettyInfoL + [ flow "Checking package" + , style Current (fromPackageName name) + , flow "for common mistakes using Cabal version" + , fromString $ versionString cabalVersion <> "." + ] + let pkgChecks = Check.checkPackage gpd + fileChecks <- + liftIO $ Check.checkPackageFiles minBound pkgDesc (toFilePath pkgDir) + let checks = pkgChecks ++ fileChecks + (errors, warnings) = + let criticalIssue (Check.PackageBuildImpossible _) = True + criticalIssue (Check.PackageDistInexcusable _) = True + criticalIssue _ = False + in List.partition criticalIssue checks + unless (null warnings) $ + prettyWarn $ + flow "Package check reported the following warnings:" + <> line + <> bulletedList (map (fromString . show) warnings) + whenJust (nonEmpty errors) $ \ne -> prettyThrowM $ CheckException ne buildExtractedTarball :: HasEnvConfig env => ResolvedPath Dir -> RIO env () buildExtractedTarball pkgDir = do @@ -451,83 +595,90 @@ buildExtractedTarball pkgDir = do -- We remove the path based on the name of the package let isPathToRemove path = do localPackage <- readLocalPackage path - return $ packageName (lpPackage localPackage) == packageName (lpPackage localPackageToBuild) - pathsToKeep - <- fmap Map.fromList - $ flip filterM (Map.toList (smwProject (bcSMWanted (envConfigBuildConfig envConfig)))) - $ fmap not . isPathToRemove . resolvedAbsolute . ppResolvedDir . snd + pure + $ localPackage.package.name + == localPackageToBuild.package.name + pathsToKeep <- Map.fromList <$> filterM + (fmap not . isPathToRemove . resolvedAbsolute . (.resolvedDir) . snd) + (Map.toList envConfig.buildConfig.smWanted.project) pp <- mkProjectPackage YesPrintWarnings pkgDir False let adjustEnvForBuild env = let updatedEnvConfig = envConfig - { envConfigSourceMap = updatePackagesInSourceMap (envConfigSourceMap envConfig) - , envConfigBuildConfig = updateBuildConfig (envConfigBuildConfig envConfig) + { sourceMap = updatePackagesInSourceMap envConfig.sourceMap + , buildConfig = updateBuildConfig envConfig.buildConfig } updateBuildConfig bc = bc - { bcConfig = (bcConfig bc) - { configBuild = defaultBuildOpts { boptsTests = True } } + { config = bc.config { build = defaultBuildOpts { tests = True } } } - in set envConfigL updatedEnvConfig env + in set envConfigL updatedEnvConfig env updatePackagesInSourceMap sm = - sm {smProject = Map.insert (cpName $ ppCommon pp) pp pathsToKeep} + sm { SourceMap.project = Map.insert pp.projectCommon.name pp pathsToKeep } local adjustEnvForBuild $ build Nothing -- | Version of 'checkSDistTarball' that first saves lazy bytestring to -- temporary directory and then calls 'checkSDistTarball' on it. -checkSDistTarball' - :: HasEnvConfig env +checkSDistTarball' :: + HasEnvConfig env => SDistOpts -> String -- ^ Tarball name -> L.ByteString -- ^ Tarball contents as a byte string -> RIO env () checkSDistTarball' opts name bytes = withSystemTempDir "stack" $ \tpath -> do - npath <- (tpath ) `liftM` parseRelFile name - liftIO $ L.writeFile (toFilePath npath) bytes - checkSDistTarball opts npath + npath <- (tpath ) <$> parseRelFile name + liftIO $ L.writeFile (toFilePath npath) bytes + checkSDistTarball opts npath -withTempTarGzContents - :: Path Abs File -- ^ Location of tarball - -> (Path Abs Dir -> RIO env a) -- ^ Perform actions given dir with tarball contents +withTempTarGzContents :: + Path Abs File + -- ^ Location of tarball + -> (Path Abs Dir -> RIO env a) + -- ^ Perform actions given dir with tarball contents -> RIO env a withTempTarGzContents apath f = withSystemTempDir "stack" $ \tpath -> do - archive <- liftIO $ L.readFile (toFilePath apath) - liftIO . Tar.unpack (toFilePath tpath) . Tar.read . GZip.decompress $ archive - f tpath + archive <- liftIO $ L.readFile (toFilePath apath) + liftIO . Tar.unpack (toFilePath tpath) . Tar.read . GZip.decompress $ archive + f tpath -------------------------------------------------------------------------------- -- Copy+modified from the tar package to avoid issues with lazy IO ( see -- https://github.com/commercialhaskell/stack/issues/1344 ) -packFileEntry :: FilePath -- ^ Full path to find the file on the local disk - -> Tar.TarPath -- ^ Path to use for the tar Entry in the archive - -> IO Tar.Entry +packFileEntry :: + FilePath -- ^ Full path to find the file on the local disk + -> Tar.TarPath -- ^ Path to use for the tar Entry in the archive + -> IO Tar.Entry packFileEntry filepath tarpath = do - mtime <- getModTime filepath - perms <- getPermissions filepath + mtime <- getModTime filepath + perms <- getPermissions filepath content <- S.readFile filepath let size = fromIntegral (S.length content) - return (Tar.simpleEntry tarpath (Tar.NormalFile (L.fromStrict content) size)) { - Tar.entryPermissions = if executable perms then Tar.executableFilePermissions - else Tar.ordinaryFilePermissions, - Tar.entryTime = mtime - } + entryContent = Tar.NormalFile (L.fromStrict content) size + entry = Tar.simpleEntry tarpath entryContent + pure entry + { Tar.entryPermissions = if executable perms + then Tar.executableFilePermissions + else Tar.ordinaryFilePermissions + , Tar.entryTime = mtime + } getModTime :: FilePath -> IO Tar.EpochTime getModTime path = do - t <- getModificationTime path - return . floor . utcTimeToPOSIXSeconds $ t + t <- getModificationTime path + pure $ floor . utcTimeToPOSIXSeconds $ t -getDefaultPackageConfig :: (MonadIO m, MonadReader env m, HasEnvConfig env) +getDefaultPackageConfig :: + (MonadIO m, MonadReader env m, HasEnvConfig env) => m PackageConfig getDefaultPackageConfig = do platform <- view platformL compilerVersion <- view actualCompilerVersionL - return PackageConfig - { packageConfigEnableTests = False - , packageConfigEnableBenchmarks = False - , packageConfigFlags = mempty - , packageConfigGhcOptions = [] - , packageConfigCabalConfigOpts = [] - , packageConfigCompilerVersion = compilerVersion - , packageConfigPlatform = platform + pure PackageConfig + { enableTests = False + , enableBenchmarks = False + , flags = mempty + , ghcOptions = [] + , cabalConfigOpts = [] + , compilerVersion + , platform } diff --git a/src/Stack/Script.hs b/src/Stack/Script.hs index a0196fac0e..2434ae6857 100644 --- a/src/Stack/Script.hs +++ b/src/Stack/Script.hs @@ -1,254 +1,422 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Script +Description : Types and functions related to Stack's @script@ command. +License : BSD-3-Clause + +Types and functions related to Stack's @script@ command. +-} + module Stack.Script - ( scriptCmd - ) where + ( ScriptOpts (..) + , ScriptExecute (..) + , ShouldRun (..) + , scriptCmd + ) where -import Stack.Prelude -import Data.ByteString.Builder (toLazyByteString) -import qualified Data.ByteString.Char8 as S8 -import qualified Data.Conduit.List as CL -import Data.List.Split (splitWhen) -import qualified Data.Map.Strict as Map -import qualified Data.Set as Set -import Distribution.Compiler (CompilerFlavor (..)) -import Distribution.ModuleName (ModuleName) +import Data.ByteString.Builder ( toLazyByteString ) +import qualified Data.ByteString.Char8 as S8 +import qualified Data.Conduit.List as CL +import qualified Data.List.NonEmpty as NE +import Data.List.Split ( splitWhen ) +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set +import Distribution.Compiler ( CompilerFlavor (..) ) +import Distribution.ModuleName ( ModuleName ) import qualified Distribution.PackageDescription as PD import qualified Distribution.Types.CondTree as C -import Distribution.Types.PackageName (mkPackageName) -import Distribution.Types.VersionRange (withinRange) -import Distribution.System (Platform (..)) +import Distribution.Types.ModuleReexport ( moduleReexportName ) +import Distribution.Types.PackageName ( mkPackageName ) +import Distribution.Types.VersionRange ( withinRange ) +import Distribution.System ( Platform (..) ) import qualified Pantry.SHA256 as SHA256 -#if MIN_VERSION_path(0,7,0) -import Path hiding (replaceExtension) -#else import Path -#endif -import Path.IO -import qualified Stack.Build -import Stack.Build.Installed -import Stack.Constants (osIsWindows) -import Stack.PackageDump -import Stack.Options.ScriptParser -import Stack.Runners -import Stack.Setup (withNewLocalBuildTargets) -import Stack.SourceMap (getCompilerInfo, immutableLocSha) -import Stack.Types.Compiler -import Stack.Types.Config -import Stack.Types.SourceMap -import System.FilePath (dropExtension, replaceExtension) + ( (), filename, fromAbsDir, fromAbsFile, fromRelFile + , parent, parseRelDir, replaceExtension, splitExtension + ) +import Path.IO ( getModificationTime, resolveFile' ) import qualified RIO.Directory as Dir import RIO.Process + ( HasProcessContext, exec, proc, readProcessStdout_ + , withWorkingDir + ) import qualified RIO.Text as T +import Stack.Build ( build ) +import Stack.Build.Installed ( getInstalled, toInstallMap ) +import Stack.Constants ( osIsWindows, relDirScripts ) +import Stack.Prelude +import Stack.Runners + ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) +import Stack.Setup ( withNewLocalBuildTargets ) +import Stack.SourceMap ( getCompilerInfo, immutableLocSha ) +import Stack.Types.Compiler ( ActualCompiler (..) ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), GhcPkgExe (..), HasCompiler (..) ) +import Stack.Types.Config ( Config (..), HasConfig (..), stackRootL ) +import Stack.Types.ConfigMonoid ( ConfigMonoid (..) ) +import qualified Stack.Types.ConfigMonoid as ConfigMonoid ( ConfigMonoid (..) ) +import Stack.Types.DumpPackage ( DumpPackage (..) ) +import Stack.Types.EnvConfig + ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL + , appropriateGhcColorFlag + ) +import Stack.Types.EnvSettings ( defaultEnvSettings ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Runner ( Runner, globalOptsL ) +import Stack.Types.SourceMap + ( CommonPackage (..), DepPackage (..), SourceMap (..) ) +import Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) +import System.FilePath ( splitDrive ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Script" module. +data ScriptException + = MutableDependenciesForScript [PackageName] + | AmbiguousModuleName ModuleName [PackageName] + | ArgumentsWithNoRunInvalid + | NoRunWithoutCompilationInvalid + | FailedToParseScriptFileAsDirBug (Path Rel File) + | FailedToParseFileAsDirBug (Path Abs Dir) + deriving Show -data StackScriptException - = MutableDependenciesForScript [PackageName] - | AmbiguousModuleName ModuleName [PackageName] - deriving Typeable +instance Exception ScriptException where + displayException (MutableDependenciesForScript names) = unlines + $ "Error: [S-4994]" + : "No mutable packages are allowed in the 'script' command. Mutable \ + \packages found:" + : map (\name -> "- " ++ packageNameString name) names + displayException (AmbiguousModuleName mname pkgs) = unlines + $ "Error: [S-1691]" + : ( "Module " + ++ moduleNameString mname + ++ " appears in multiple packages: " + ) + : [ unwords $ map packageNameString pkgs ] + displayException ArgumentsWithNoRunInvalid = + "Error: [S-5067]\n" + ++ "'--no-run' incompatible with arguments." + displayException NoRunWithoutCompilationInvalid = + "Error: [S-9469]\n" + ++ "'--no-run' requires either '--compile' or '--optimize'." + displayException (FailedToParseScriptFileAsDirBug fp) = bugReport "[S-5055]" $ + "Failed to parse script file name as directory:\n" + <> fromRelFile fp <> "\n" + displayException (FailedToParseFileAsDirBug p) = bugReport "[S-9464]" $ + "Failed to parse path to script file as directory:\n" + <> fromAbsDir p <> "\n" -instance Exception StackScriptException +-- | Type representing choices of interpreting, compiling (without optimisation) +-- and compiling (with optimisation). +data ScriptExecute + = SEInterpret + | SECompile + -- ^ Without optimisation. + | SEOptimize + -- ^ Compile with optimisation. + deriving Show -instance Show StackScriptException where - show (MutableDependenciesForScript names) = unlines - $ "No mutable packages are allowed in the `script` command. Mutable packages found:" - : map (\name -> "- " ++ packageNameString name) names - show (AmbiguousModuleName mname pkgs) = unlines - $ ("Module " ++ moduleNameString mname ++ " appears in multiple packages: ") - : [unwords $ map packageNameString pkgs ] +-- | Type representing choices of whether to run or not. +data ShouldRun + = YesRun + -- ^ Run. + | NoRun + -- ^ Do not run. + deriving Show + +-- | Type representing command line options for the @stack script@ command. +data ScriptOpts = ScriptOpts + { packages :: ![String] + , file :: !FilePath + , args :: ![String] + , compile :: !ScriptExecute + , useRoot :: !Bool + , ghcOptions :: ![String] + , scriptExtraDeps :: ![Unresolved (NonEmpty RawPackageLocationImmutable)] + , shouldRun :: !ShouldRun + } -- | Run a Stack Script scriptCmd :: ScriptOpts -> RIO Runner () scriptCmd opts = do - -- Some warnings in case the user somehow tries to set a - -- stack.yaml location. Note that in this functions we use - -- logError instead of logWarn because, when using the - -- interpreter mode, only error messages are shown. See: - -- https://github.com/commercialhaskell/stack/issues/3007 - view (globalOptsL.to globalStackYaml) >>= \case - SYLOverride fp -> logError $ - "Ignoring override stack.yaml file for script command: " <> - fromString (toFilePath fp) - SYLGlobalProject -> logError "Ignoring SYLGlobalProject for script command" - SYLDefault -> return () - SYLNoProject _ -> assert False (return ()) - - file <- resolveFile' $ soFile opts - let scriptDir = parent file - modifyGO go = go - { globalConfigMonoid = (globalConfigMonoid go) - { configMonoidInstallGHC = FirstTrue $ Just True - } - , globalStackYaml = SYLNoProject $ soScriptExtraDeps opts + -- Some warnings in case the user somehow tries to set a stack.yaml location. + -- Note that in this functions we use logError instead of logWarn because, + -- when using the interpreter mode, only error messages are shown. See: + -- https://github.com/commercialhaskell/stack/issues/3007 + view (globalOptsL . to (.stackYaml)) >>= \case + SYLOverride fp -> logError $ + "Ignoring override stack.yaml file for script command: " + <> fromString (toFilePath fp) + SYLGlobalProject -> logError "Ignoring SYLGlobalProject for script command" + SYLDefault -> pure () + SYLNoProject _ -> assert False (pure ()) + + file <- resolveFile' opts.file + let scriptFile = filename file + scriptRoot = parent file + + isNoRunCompile <- fromFirstFalse . (.noRunCompile) <$> + view (globalOptsL . to (.configMonoid)) + + resolvedExtraDeps <- + mapM (resolvePaths (Just scriptRoot)) opts.scriptExtraDeps + let scriptDir = parent file + extraDeps = concatMap NE.toList resolvedExtraDeps + modifyGO go = go + { configMonoid = go.configMonoid + { ConfigMonoid.installGHC = FirstTrue $ Just True } + , stackYaml = SYLNoProject extraDeps + } + (shouldRun, shouldCompile) = if isNoRunCompile + then (NoRun, SECompile) + else (opts.shouldRun, opts.compile) + + outputDir <- if opts.useRoot + then do + root <- local (over globalOptsL modifyGO) $ + withConfig NoReexec $ view stackRootL + scriptFileAsDir <- maybe + (throwIO $ FailedToParseScriptFileAsDirBug scriptFile) + pure + (parseRelDir $ fromRelFile scriptFile) + let fileAsDir = scriptDir scriptFileAsDir + -- We drop the information about the drive. On Windows, in principle, + -- the drive could distinguish between two otherwise identical + -- fileAsDir (eg C:\MyScript.hs\ D:\MyScript.hs\). In pactice, we + -- tolerate that possibility as being unlikely. + (_, escaped) = splitDrive (fromAbsDir fileAsDir) + escapedRelDir <- maybe + (throwIO $ FailedToParseFileAsDirBug fileAsDir) + pure + (parseRelDir escaped) + pure $ root relDirScripts escapedRelDir + else pure scriptDir + + -- path does not necessarily end with an extension. + let dropExtension path = pure $ maybe path fst $ splitExtension path + + exe <- if osIsWindows + then replaceExtension ".exe" (outputDir scriptFile) + else dropExtension (outputDir scriptFile) + + case shouldRun of + YesRun -> pure () + NoRun -> do + unless (null opts.args) $ throwIO ArgumentsWithNoRunInvalid + case shouldCompile of + SEInterpret -> throwIO NoRunWithoutCompilationInvalid + SECompile -> pure () + SEOptimize -> pure () + + -- Optimization: if we're compiling, and the executable is newer than the + -- source file, run it immediately. + local (over globalOptsL modifyGO) $ + case shouldCompile of + SEInterpret -> longWay shouldRun shouldCompile file exe + SECompile -> shortCut shouldRun shouldCompile file exe + SEOptimize -> shortCut shouldRun shouldCompile file exe + + where + runCompiled :: + (HasProcessContext env, HasTerm env) + => ShouldRun + -> Path Abs File + -> RIO env () + runCompiled shouldRun exe = do + case shouldRun of + YesRun -> exec (fromAbsFile exe) opts.args + NoRun -> prettyInfoL + [ flow "Compilation finished, executable available at" + , style File (fromString (fromAbsFile exe)) <> "." + ] - -- Optimization: if we're compiling, and the executable is newer - -- than the source file, run it immediately. - local (over globalOptsL modifyGO) $ - case soCompile opts of - SEInterpret -> longWay file scriptDir - SECompile -> shortCut file scriptDir - SEOptimize -> shortCut file scriptDir - - where - shortCut file scriptDir = handleIO (const $ longWay file scriptDir) $ do - srcMod <- getModificationTime file - exeMod <- Dir.getModificationTime $ toExeName $ toFilePath file - if srcMod < exeMod - then exec (toExeName $ toFilePath file) (soArgs opts) - else longWay file scriptDir - - longWay file scriptDir = + shortCut shouldRun shouldCompile file exe = + handleIO (const $ longWay shouldRun shouldCompile file exe) $ do + srcMod <- getModificationTime file + exeMod <- Dir.getModificationTime (fromAbsFile exe) + if srcMod < exeMod + then runCompiled shouldRun exe + else longWay shouldRun shouldCompile file exe + + longWay shouldRun shouldCompile file exe = withConfig YesReexec $ withDefaultEnvConfig $ do config <- view configL - menv <- liftIO $ configProcessContextSettings config defaultEnvSettings + menv <- liftIO $ config.processContextSettings defaultEnvSettings withProcessContext menv $ do colorFlag <- appropriateGhcColorFlag targetsSet <- - case soPackages opts of - [] -> do - -- Using the import parser - getPackagesFromImports (soFile opts) - packages -> do - let targets = concatMap wordsComma packages - targets' <- mapM parsePackageNameThrowing targets - return $ Set.fromList targets' + case opts.packages of + [] -> getPackagesFromImports opts.file -- Using the import parser + packages -> do + let targets = concatMap wordsComma packages + targets' <- mapM parsePackageNameThrowing targets + pure $ Set.fromList targets' + GhcPkgExe pkg <- view $ compilerPathsL . to (.pkg) + let ghcPkgPath = toFilePath pkg unless (Set.null targetsSet) $ do - -- Optimization: use the relatively cheap ghc-pkg list - -- --simple-output to check which packages are installed - -- already. If all needed packages are available, we can - -- skip the (rather expensive) build call below. - GhcPkgExe pkg <- view $ compilerPathsL.to cpPkg - bss <- sinkProcessStdout (toFilePath pkg) - ["list", "--simple-output"] CL.consume -- FIXME use the package info from envConfigPackages, or is that crazy? - let installed = Set.fromList - $ map toPackageName - $ words - $ S8.unpack - $ S8.concat bss - if Set.null $ Set.difference (Set.map packageNameString targetsSet) installed - then logDebug "All packages already installed" - else do - logDebug "Missing packages, performing installation" - let targets = map (T.pack . packageNameString) $ Set.toList targetsSet - withNewLocalBuildTargets targets $ Stack.Build.build Nothing - - let ghcArgs = concat - [ ["-i", "-i" ++ toFilePath scriptDir] - , ["-hide-all-packages"] - , maybeToList colorFlag - , map (\x -> "-package" ++ x) - $ Set.toList - $ Set.insert "base" - $ Set.map packageNameString targetsSet - , case soCompile opts of - SEInterpret -> [] - SECompile -> [] - SEOptimize -> ["-O2"] - , soGhcOptions opts - ] - case soCompile opts of + -- Optimization: use the relatively cheap ghc-pkg list --simple-output + -- to check which packages are installed already. If all needed + -- packages are available, we can skip the (rather expensive) build + -- call below. + -- https://github.com/haskell/process/issues/251 + bss <- snd <$> sinkProcessStderrStdout + ghcPkgPath + ["list", "--simple-output"] + CL.sinkNull + CL.consume + -- ^ FIXME use the package info from envConfigPackages, or is that crazy? + let installed = Set.fromList + $ map toPackageName + $ words + $ S8.unpack + $ S8.concat bss + if Set.null $ Set.difference (Set.map packageNameString targetsSet) installed + then logDebug "All packages already installed" + else do + logDebug "Missing packages, performing installation" + let targets = + map (T.pack . packageNameString) $ Set.toList targetsSet + withNewLocalBuildTargets targets $ build Nothing + + let packagesSet = Set.insert (mkPackageName "base") targetsSet + -- Yields 'raw' strings with trailing whitespace. Assumes that the + -- ghc-pkg application will find a package of the given name. + getRawPackageId :: PackageName -> RIO EnvConfig [ByteString] + getRawPackageId target = snd <$> sinkProcessStderrStdout + ghcPkgPath + ["field", packageNameString target, "id", "--simple-output"] + CL.sinkNull + CL.consume + rawPackageIds <- mapM getRawPackageId $ Set.toList packagesSet + let packageIds = words $ S8.unpack $ S8.concat $ concat rawPackageIds + -- ^ The use of words will eliminate whitespace between 'raw' items + ghcArgs = concat + [ ["-i", "-i" ++ fromAbsDir (parent file)] + , ["-hide-all-packages"] + , maybeToList colorFlag + -- We use GHC's -package-id option rather than -package because + -- there is a bug in the latter. For packages with a public + -- sublibrary, -package can expose an installed package + -- that is not listed by ghc-pkg list . See: + -- https://gitlab.haskell.org/ghc/ghc/-/issues/25025 + , map ("-package-id=" ++) packageIds + , case shouldCompile of + SEInterpret -> [] + SECompile -> [] + SEOptimize -> ["-O2"] + , opts.ghcOptions + , if opts.useRoot + then + [ "-outputdir=" ++ fromAbsDir (parent exe) + , "-o", fromAbsFile exe + ] + else [] + ] + case shouldCompile of SEInterpret -> do - interpret <- view $ compilerPathsL.to cpInterpreter + interpret <- view $ compilerPathsL . to (.interpreter) exec (toFilePath interpret) - (ghcArgs ++ toFilePath file : soArgs opts) + (ghcArgs ++ toFilePath file : opts.args) _ -> do -- Use readProcessStdout_ so that (1) if GHC does send any output -- to stdout, we capture it and stop it from being sent to our -- stdout, which could break scripts, and (2) if there's an -- exception, the standard output we did capture will be reported -- to the user. - compilerExeName <- view $ compilerPathsL.to cpCompiler.to toFilePath - withWorkingDir (toFilePath scriptDir) $ proc + liftIO $ Dir.createDirectoryIfMissing True (fromAbsDir (parent exe)) + compilerExeName <- + view $ compilerPathsL . to (.compiler) . to toFilePath + withWorkingDir (fromAbsDir (parent file)) $ proc compilerExeName (ghcArgs ++ [toFilePath file]) (void . readProcessStdout_) - exec (toExeName $ toFilePath file) (soArgs opts) + runCompiled shouldRun exe toPackageName = reverse . drop 1 . dropWhile (/= '-') . reverse -- Like words, but splits on both commas and spaces wordsComma = splitWhen (\c -> c == ' ' || c == ',') - toExeName fp = - if osIsWindows - then replaceExtension fp "exe" - else dropExtension fp - -getPackagesFromImports - :: FilePath -- ^ script filename +getPackagesFromImports :: + FilePath -- ^ script filename -> RIO EnvConfig (Set PackageName) getPackagesFromImports scriptFP = do - (pns, mns) <- liftIO $ parseImports <$> S8.readFile scriptFP - if Set.null mns - then return pns - else Set.union pns <$> getPackagesFromModuleNames mns + (pns, mns) <- liftIO $ parseImports <$> S8.readFile scriptFP + if Set.null mns + then pure pns + else Set.union pns <$> getPackagesFromModuleNames mns -getPackagesFromModuleNames - :: Set ModuleName +getPackagesFromModuleNames :: + Set ModuleName -> RIO EnvConfig (Set PackageName) getPackagesFromModuleNames mns = do - hash <- hashSnapshot - withSnapshotCache hash mapSnapshotPackageModules $ \getModulePackages -> do - pns <- forM (Set.toList mns) $ \mn -> do - pkgs <- getModulePackages mn - case pkgs of - [] -> return Set.empty - [pn] -> return $ Set.singleton pn - _ -> throwM $ AmbiguousModuleName mn pkgs - return $ Set.unions pns `Set.difference` blacklist + hash <- hashSnapshot + withSnapshotCache hash mapSnapshotPackageModules $ \getModulePackages -> do + pns <- forM (Set.toList mns) $ \mn -> do + pkgs <- getModulePackages mn + case pkgs of + [] -> pure Set.empty + [pn] -> pure $ Set.singleton pn + _ -> throwM $ AmbiguousModuleName mn pkgs + pure $ Set.unions pns `Set.difference` blacklist hashSnapshot :: RIO EnvConfig SnapshotCacheHash hashSnapshot = do - sourceMap <- view $ envConfigL . to envConfigSourceMap - compilerInfo <- getCompilerInfo - let eitherPliHash (pn, dep) | PLImmutable pli <- dpLocation dep = - Right $ immutableLocSha pli - | otherwise = - Left pn - deps = Map.toList (smDeps sourceMap) - case partitionEithers (map eitherPliHash deps) of - ([], pliHashes) -> do - let hashedContent = mconcat $ compilerInfo : pliHashes - pure $ SnapshotCacheHash (SHA256.hashLazyBytes $ toLazyByteString hashedContent) - (mutables, _) -> - throwM $ MutableDependenciesForScript mutables + sourceMap <- view $ envConfigL . to (.sourceMap) + compilerInfo <- getCompilerInfo + let eitherPliHash (pn, dep) + | PLImmutable pli <- dep.location = Right $ immutableLocSha pli + | otherwise = Left pn + deps = Map.toList sourceMap.deps + case partitionEithers (map eitherPliHash deps) of + ([], pliHashes) -> do + let hashedContent = mconcat $ compilerInfo : pliHashes + pure + $ SnapshotCacheHash (SHA256.hashLazyBytes + $ toLazyByteString hashedContent) + (mutables, _) -> throwM $ MutableDependenciesForScript mutables mapSnapshotPackageModules :: RIO EnvConfig (Map PackageName (Set ModuleName)) mapSnapshotPackageModules = do - sourceMap <- view $ envConfigL . to envConfigSourceMap - installMap <- toInstallMap sourceMap - (_installedMap, globalDumpPkgs, snapshotDumpPkgs, _localDumpPkgs) <- - getInstalled installMap - let globals = dumpedPackageModules (smGlobal sourceMap) globalDumpPkgs - notHidden = Map.filter (not . dpHidden) - notHiddenDeps = notHidden $ smDeps sourceMap - installedDeps = dumpedPackageModules notHiddenDeps snapshotDumpPkgs - dumpPkgs = Set.fromList $ map (pkgName . dpPackageIdent) snapshotDumpPkgs - notInstalledDeps = Map.withoutKeys notHiddenDeps dumpPkgs - otherDeps <- for notInstalledDeps $ \dep -> do - gpd <- liftIO $ cpGPD (dpCommon dep) - Set.fromList <$> allExposedModules gpd - -- source map construction process should guarantee unique package names - -- in these maps - return $ globals <> installedDeps <> otherDeps - -dumpedPackageModules :: Map PackageName a - -> [DumpPackage] - -> Map PackageName (Set ModuleName) + sourceMap <- view $ envConfigL . to (.sourceMap) + installMap <- toInstallMap sourceMap + (_installedMap, globalDumpPkgs, snapshotDumpPkgs, _localDumpPkgs) <- + getInstalled installMap + let globals = dumpedPackageModules sourceMap.globalPkgs globalDumpPkgs + notHidden = Map.filter (not . (.hidden)) + notHiddenDeps = notHidden sourceMap.deps + installedDeps = dumpedPackageModules notHiddenDeps snapshotDumpPkgs + dumpPkgs = + Set.fromList $ map (pkgName . (.packageIdent)) snapshotDumpPkgs + notInstalledDeps = Map.withoutKeys notHiddenDeps dumpPkgs + otherDeps <- for notInstalledDeps $ \dep -> do + gpd <- liftIO dep.depCommon.gpd + Set.fromList <$> allExposedModules gpd + -- source map construction process should guarantee unique package names in + -- these maps + pure $ globals <> installedDeps <> otherDeps + +dumpedPackageModules :: + Map PackageName a + -> [DumpPackage] + -> Map PackageName (Set ModuleName) dumpedPackageModules pkgs dumpPkgs = - let pnames = Map.keysSet pkgs `Set.difference` blacklist - in Map.fromList - [ (pn, dpExposedModules) - | DumpPackage {..} <- dumpPkgs - , let PackageIdentifier pn _ = dpPackageIdent - , pn `Set.member` pnames - ] + let pnames = Map.keysSet pkgs `Set.difference` blacklist + in Map.fromList + [ (pn, dp.exposedModules) + | dp <- dumpPkgs + , let PackageIdentifier pn _ = dp.packageIdent + , pn `Set.member` pnames + ] allExposedModules :: PD.GenericPackageDescription -> RIO EnvConfig [ModuleName] allExposedModules gpd = do @@ -264,91 +432,91 @@ allExposedModules gpd = do -- currently we don't do flag checking here checkCond other = Left other mlibrary = snd . C.simplifyCondTree checkCond <$> PD.condLibrary gpd - pure $ case mlibrary of + pure $ case mlibrary of Just lib -> PD.exposedModules lib ++ - map PD.moduleReexportName (PD.reexportedModules lib) + map moduleReexportName (PD.reexportedModules lib) Nothing -> mempty --- | The Stackage project introduced the concept of hidden packages, --- to deal with conflicting module names. However, this is a --- relatively recent addition (at time of writing). See: --- http://www.snoyman.com/blog/2017/01/conflicting-module-names. To --- kick this thing off a bit better, we're included a blacklist of --- packages that should never be auto-parsed in. +-- | The Stackage project introduced the concept of hidden packages, to deal +-- with conflicting module names. However, this is a relatively recent addition +-- (at time of writing). See: +-- http://www.snoyman.com/blog/2017/01/conflicting-module-names. To kick this +-- thing off a bit better, we're included a blacklist of packages that should +-- never be auto-parsed in. blacklist :: Set PackageName blacklist = Set.fromList - [ mkPackageName "async-dejafu" - , mkPackageName "monads-tf" - , mkPackageName "crypto-api" - , mkPackageName "fay-base" - , mkPackageName "hashmap" - , mkPackageName "hxt-unicode" - , mkPackageName "hledger-web" - , mkPackageName "plot-gtk3" - , mkPackageName "gtk3" - , mkPackageName "regex-pcre-builtin" - , mkPackageName "regex-compat-tdfa" - , mkPackageName "log" - , mkPackageName "zip" - , mkPackageName "monad-extras" - , mkPackageName "control-monad-free" - , mkPackageName "prompt" - , mkPackageName "kawhi" - , mkPackageName "language-c" - , mkPackageName "gl" - , mkPackageName "svg-tree" - , mkPackageName "Glob" - , mkPackageName "nanospec" - , mkPackageName "HTF" - , mkPackageName "courier" - , mkPackageName "newtype-generics" - , mkPackageName "objective" - , mkPackageName "binary-ieee754" - , mkPackageName "rerebase" - , mkPackageName "cipher-aes" - , mkPackageName "cipher-blowfish" - , mkPackageName "cipher-camellia" - , mkPackageName "cipher-des" - , mkPackageName "cipher-rc4" - , mkPackageName "crypto-cipher-types" - , mkPackageName "crypto-numbers" - , mkPackageName "crypto-pubkey" - , mkPackageName "crypto-random" - , mkPackageName "cryptohash" - , mkPackageName "cryptohash-conduit" - , mkPackageName "cryptohash-md5" - , mkPackageName "cryptohash-sha1" - , mkPackageName "cryptohash-sha256" - ] + [ mkPackageName "Glob" + , mkPackageName "HTF" + , mkPackageName "async-dejafu" + , mkPackageName "binary-ieee754" + , mkPackageName "cipher-aes" + , mkPackageName "cipher-blowfish" + , mkPackageName "cipher-camellia" + , mkPackageName "cipher-des" + , mkPackageName "cipher-rc4" + , mkPackageName "control-monad-free" + , mkPackageName "courier" + , mkPackageName "crypto-api" + , mkPackageName "crypto-cipher-types" + , mkPackageName "crypto-numbers" + , mkPackageName "crypto-pubkey" + , mkPackageName "crypto-random" + , mkPackageName "cryptohash" + , mkPackageName "cryptohash-conduit" + , mkPackageName "cryptohash-md5" + , mkPackageName "cryptohash-sha1" + , mkPackageName "cryptohash-sha256" + , mkPackageName "fay-base" + , mkPackageName "gl" + , mkPackageName "gtk3" + , mkPackageName "hashmap" + , mkPackageName "hledger-web" + , mkPackageName "hxt-unicode" + , mkPackageName "kawhi" + , mkPackageName "language-c" + , mkPackageName "log" + , mkPackageName "monad-extras" + , mkPackageName "monads-tf" + , mkPackageName "nanospec" + , mkPackageName "newtype-generics" + , mkPackageName "objective" + , mkPackageName "plot-gtk3" + , mkPackageName "prompt" + , mkPackageName "regex-compat-tdfa" + , mkPackageName "regex-pcre-builtin" + , mkPackageName "rerebase" + , mkPackageName "svg-tree" + , mkPackageName "zip" + ] parseImports :: ByteString -> (Set PackageName, Set ModuleName) parseImports = - fold . mapMaybe (parseLine . stripCR') . S8.lines - where - -- Remove any carriage return character present at the end, to - -- support Windows-style line endings (CRLF) - stripCR' bs - | S8.null bs = bs - | S8.last bs == '\r' = S8.init bs - | otherwise = bs - - stripPrefix x y - | x `S8.isPrefixOf` y = Just $ S8.drop (S8.length x) y - | otherwise = Nothing - - parseLine bs0 = do - bs1 <- stripPrefix "import " bs0 - let bs2 = S8.dropWhile (== ' ') bs1 - bs3 = fromMaybe bs2 $ stripPrefix "qualified " bs2 - case stripPrefix "\"" bs3 of - Just bs4 -> do - pn <- parsePackageNameThrowing $ S8.unpack $ S8.takeWhile (/= '"') bs4 - Just (Set.singleton pn, Set.empty) - Nothing -> Just - ( Set.empty - , Set.singleton - $ fromString - $ T.unpack - $ decodeUtf8With lenientDecode - $ S8.takeWhile (\c -> c /= ' ' && c /= '(') bs3 - ) + fold . mapMaybe (parseLine . stripCR') . S8.lines + where + -- Remove any carriage pure character present at the end, to support + -- Windows-style line endings (CRLF) + stripCR' bs + | S8.null bs = bs + | S8.last bs == '\r' = S8.init bs + | otherwise = bs + + stripPrefix x y + | x `S8.isPrefixOf` y = Just $ S8.drop (S8.length x) y + | otherwise = Nothing + + parseLine bs0 = do + bs1 <- stripPrefix "import " bs0 + let bs2 = S8.dropWhile (== ' ') bs1 + bs3 = fromMaybe bs2 $ stripPrefix "qualified " bs2 + case stripPrefix "\"" bs3 of + Just bs4 -> do + pn <- parsePackageNameThrowing $ S8.unpack $ S8.takeWhile (/= '"') bs4 + Just (Set.singleton pn, Set.empty) + Nothing -> Just + ( Set.empty + , Set.singleton + $ fromString + $ T.unpack + $ decodeUtf8With lenientDecode + $ S8.takeWhile (\c -> c /= ' ' && c /= '(') bs3 + ) diff --git a/src/Stack/Setup.hs b/src/Stack/Setup.hs index 9391752dd0..54ce267092 100644 --- a/src/Stack/Setup.hs +++ b/src/Stack/Setup.hs @@ -1,15 +1,16 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PackageImports #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE ViewPatterns #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} + +{-| +Module : Stack.Setup +License : BSD-3-Clause +-} module Stack.Setup ( setupEnv @@ -28,320 +29,813 @@ module Stack.Setup , downloadStackExe ) where -import qualified Codec.Archive.Tar as Tar -import Conduit -import Control.Applicative (empty) -import "cryptonite" Crypto.Hash (SHA1(..), SHA256(..)) -import Pantry.Internal.AesonExtended -import qualified Data.ByteString as S -import qualified Data.ByteString.Lazy as LBS -import qualified Data.Conduit.Binary as CB -import Data.Conduit.Lazy (lazyConsume) -import qualified Data.Conduit.List as CL -import Data.Conduit.Process.Typed (createSource) -import Data.Conduit.Zlib (ungzip) -import Data.Foldable (maximumBy) -import qualified Data.HashMap.Strict as HashMap -import Data.List hiding (concat, elem, maximumBy, any) -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Data.Text as T -import qualified Data.Text.Encoding as T -import qualified Data.Text.Encoding.Error as T -import qualified Data.Yaml as Yaml -import Distribution.System (OS, Arch (..), Platform (..)) -import qualified Distribution.System as Cabal -import Distribution.Text (simpleParse) -import Distribution.Types.PackageName (mkPackageName) -import Distribution.Version (mkVersion) -import Network.HTTP.StackClient (CheckHexDigest (..), HashCheck (..), - getResponseBody, getResponseStatusCode, httpLbs, httpJSON, - mkDownloadRequest, parseRequest, parseUrlThrow, setGithubHeaders, - setHashChecks, setLengthCheck, verifiedDownloadWithProgress, withResponse) -import Path hiding (fileExtension) -import Path.CheckInstall (warnInstallSearchPathIssues) -import Path.Extended (fileExtension) -import Path.Extra (toFilePathNoTrailingSep) -import Path.IO hiding (findExecutable, withSystemTempDir) -import qualified Pantry -import qualified RIO -import RIO.List -import RIO.PrettyPrint -import RIO.Process -import Stack.Build.Haddock (shouldHaddockDeps) -import Stack.Build.Source (loadSourceMap, hashSourceMapData) -import Stack.Build.Target (NeedTargets(..), parseTargets) -import Stack.Constants -import Stack.Constants.Config (distRelativeDir) -import Stack.GhcPkg (createDatabase, getGlobalDB, mkGhcPackagePath, ghcPkgPathEnvVar) -import Stack.Prelude hiding (Display (..)) -import Stack.SourceMap -import Stack.Setup.Installed -import Stack.Storage.User (loadCompilerPaths, saveCompilerPaths) -import Stack.Types.Build -import Stack.Types.Compiler -import Stack.Types.CompilerBuild -import Stack.Types.Config -import Stack.Types.Docker -import Stack.Types.SourceMap -import Stack.Types.Version -import qualified System.Directory as D -import System.Environment (getExecutablePath, lookupEnv) -import System.IO.Error (isPermissionError) -import System.FilePath (searchPathSeparator) -import qualified System.FilePath as FP -import System.Permissions (setFileExecutable) -import System.Uname (getRelease) -import Data.List.Split (splitOn) +import qualified Codec.Archive.Tar as Tar +import Conduit + ( ConduitT, await, concatMapMC, filterCE, foldMC, yield ) +import Control.Applicative ( empty ) +import Crypto.Hash ( SHA1 (..), SHA256 (..) ) +import qualified Data.Aeson.KeyMap as KeyMap +import Data.Aeson.Types ( Value (..) ) +import Data.Aeson.WarningParser + ( WithJSONWarnings (..), logJSONWarnings ) +import qualified Data.Attoparsec.Text as P +import qualified Data.ByteString as S +import qualified Data.ByteString.Lazy as LBS +import Data.Char ( isDigit ) +import qualified Data.Conduit.Binary as CB +import Data.Conduit.Lazy ( lazyConsume ) +import qualified Data.Conduit.List as CL +import Data.Conduit.Process.Typed ( createSource ) +import Data.Conduit.Zlib ( ungzip ) +import qualified Data.Either.Extra as EE +import Data.List.Split ( splitOn ) +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Data.Text as T +import qualified Data.Text.Lazy as TL +import qualified Data.Text.Encoding as T +import qualified Data.Text.Lazy.Encoding as TL +import qualified Data.Text.Encoding.Error as T +import qualified Data.Yaml as Yaml +import Distribution.System ( Arch (..), OS, Platform (..) ) +import qualified Distribution.System as Cabal +import Distribution.Text ( simpleParse ) +import Distribution.Types.PackageName ( mkPackageName ) +import Distribution.Version ( mkVersion ) +import Network.HTTP.Client ( redirectCount ) +import Network.HTTP.StackClient + ( CheckHexDigest (..), HashCheck (..), getResponseBody + , getResponseStatusCode, httpLbs, httpJSON, mkDownloadRequest + , parseRequest, parseUrlThrow, setGitHubHeaders + , setHashChecks, setLengthCheck, setRequestMethod + , verifiedDownloadWithProgress, withResponse + ) +import Network.HTTP.Simple ( getResponseHeader ) +import Path + ( (), addExtension, fileExtension, filename, parent + , parseAbsDir, parseAbsFile, parseRelDir, parseRelFile + , takeDrive, toFilePath + ) +import Path.CheckInstall ( warnInstallSearchPathIssues ) +import Path.Extra ( toFilePathNoTrailingSep ) +import Path.IO + ( canonicalizePath, doesFileExist, ensureDir, executable + , getPermissions, getTempDir, ignoringAbsence, listDir + , removeDirRecur, removeFile, renameDir, renameFile + , resolveFile', withTempDir + ) +import RIO.List + ( headMaybe, intercalate, intersperse, isPrefixOf + , maximumByMaybe, sort, sortOn, stripPrefix ) +import RIO.Process + ( EnvVars, HasProcessContext (..), ProcessContext + , augmentPath, augmentPathMap, doesExecutableExist, envVarsL + , exeSearchPathL, getStdout, mkProcessContext, modifyEnvVars + , proc, readProcess_, readProcessStdout, runProcess + , runProcess_, setStdout, waitExitCode, withModifyEnvVars + , withProcessWait, withWorkingDir, workingDirL + ) +import Stack.Build.Haddock ( shouldHaddockDeps ) +import Stack.Build.Source ( hashSourceMapData, loadSourceMap ) +import Stack.Build.Target ( NeedTargets (..), parseTargets ) +import Stack.Config.ConfigureScript ( ensureConfigureScript ) +import Stack.Constants + ( cabalPackageName, ghcBootScript,ghcConfigureMacOS + , ghcConfigurePosix, ghcConfigureWindows, hadrianScriptsPosix + , hadrianScriptsWindows, libDirs, osIsMacOS, osIsWindows + , relDirBin, relDirUsr, relFile7zdll, relFile7zexe + , relFileConfigure, relFileHadrianStackDotYaml + , relFileLibcMuslx86_64So1, relFileLibgmpSo10 + , relFileLibgmpSo3, relFileLibncurseswSo6, relFileLibtinfoSo5 + , relFileLibtinfoSo6, relFileMainHs, relFileStack + , relFileStackDotExe, relFileStackDotTmp + , relFileStackDotTmpDotExe, stackProgName, usrLibDirs + ) +import Stack.Constants.Config ( distRelativeDir ) +import Stack.GhcPkg + ( createDatabase, getGlobalDB, ghcPkgPathEnvVar + , mkGhcPackagePath ) +import Stack.Prelude +import Stack.Setup.Installed + ( Tool (..), filterTools, getCompilerVersion, installDir + , listInstalled, markInstalled, tempInstallDir,toolExtraDirs + , toolString, unmarkInstalled + ) +import Stack.SourceMap + ( actualFromGhc, globalsFromDump, pruneGlobals ) +import Stack.Storage.User ( loadCompilerPaths, saveCompilerPaths ) +import Stack.Types.Build.Exception ( BuildPrettyException (..) ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..), configFileRootL + , wantedCompilerVersionL + ) +import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) +import Stack.Types.Compiler + ( ActualCompiler (..), CompilerBindistPath (..) + , CompilerException (..), CompilerRepository (..) + , CompilerTarget (..), WhichCompiler (..) + , compilerVersionText, getGhcVersion, isWantedCompiler + , wantedToActual, whichCompiler, whichCompilerL + ) +import Stack.Types.CompilerBuild + ( CompilerBuild (..), compilerBuildName, compilerBuildSuffix + ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), GhcPkgExe (..), HasCompiler (..) ) +import Stack.Types.Config + ( Config (..), HasConfig (..), envOverrideSettingsL + , ghcInstallHook + ) +import Stack.Types.DownloadInfo ( DownloadInfo (..) ) +import Stack.Types.DumpPackage ( DumpPackage (..) ) +import Stack.Types.EnvConfig + ( EnvConfig (..), HasEnvConfig (..), extraBinDirs + , packageDatabaseDeps, packageDatabaseExtra + , packageDatabaseLocal + ) +import Stack.Types.EnvSettings + ( EnvSettings (..), minimalEnvSettings ) +import Stack.Types.ExtraDirs ( ExtraDirs (..) ) +import Stack.Types.FileDigestCache ( newFileDigestCache ) +import Stack.Types.GHCDownloadInfo ( GHCDownloadInfo (..) ) +import Stack.Types.GHCVariant + ( GHCVariant (..), HasGHCVariant (..), ghcVariantName + , ghcVariantSuffix + ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Platform + ( HasPlatform (..), PlatformVariant (..) + , platformOnlyRelDir ) +import Stack.Types.Runner + ( HasRunner (..), Runner (..), mExecutablePathL + , viewExecutablePath + ) +import Stack.Types.SetupInfo ( SetupInfo (..) ) +import Stack.Types.SourceMap + ( SMActual (..), SMWanted (..), SourceMap (..) ) +import Stack.Types.Version + ( VersionCheck, stackMinorVersion, stackVersion ) +import Stack.Types.VersionedDownloadInfo + ( VersionedDownloadInfo (..) ) +import Stack.Types.WantedCompilerSetter ( WantedCompilerSetter (..) ) +import qualified System.Directory as D +import System.Environment ( lookupEnv ) +import System.IO.Error ( isPermissionError ) +import System.FilePath ( searchPathSeparator ) +import qualified System.FilePath as FP +import System.Permissions ( setFileExecutable ) +import System.Uname ( getRelease ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Setup" module +data SetupException + = WorkingDirectoryInvalidBug + | StackBinaryArchiveZipUnsupportedBug + deriving Show + +instance Exception SetupException where + displayException WorkingDirectoryInvalidBug = bugReport "[S-2076]" + "Invalid working directory." + displayException StackBinaryArchiveZipUnsupportedBug = bugReport "[S-3967]" + "FIXME: Handle zip files." + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Setup" module +data SetupPrettyException + = GHCInstallFailed + !SomeException + !String + !String + ![String] + !(Path Abs Dir) + !(Path Abs Dir) + !(Path Abs Dir) + | InvalidGhcAt !(Path Abs File) !SomeException + | ExecutableNotFound ![Path Abs File] + | SandboxedCompilerNotFound ![String] ![Path Abs Dir] + | UnsupportedSetupCombo !OS !Arch !StyleDoc !StyleDoc !(Path Abs Dir) + | MissingDependencies ![String] + | UnknownCompilerVersion + !(Set.Set Text) + !WantedCompiler + !(Set.Set ActualCompiler) + | UnknownOSKey !Text + | GHCSanityCheckCompileFailed !SomeException !(Path Abs File) + | RequireCustomGHCVariant + | ProblemWhileDecompressing !(Path Abs File) + | SetupInfoMissingSevenz + | UnsupportedSetupConfiguration + | MSYS2NotFound !Text + | UnwantedCompilerVersion + | UnwantedArchitecture + | GHCInfoNotValidUTF8 !UnicodeException + | GHCInfoNotListOfPairs + | GHCInfoMissingGlobalPackageDB + | GHCInfoMissingTargetPlatform + | GHCInfoTargetPlatformInvalid !String + | CabalNotFound !(Path Abs File) + | GhcBootScriptNotFound + | HadrianScriptNotFound + | URLInvalid !String + | UnknownArchiveExtension !String + | Unsupported7z + | TarballInvalid !String + | TarballFileInvalid !String !(Path Abs File) + | UnknownArchiveStructure !(Path Abs File) + | StackReleaseInfoNotFound !String + | StackBinaryArchiveNotFound ![String] + | HadrianBindistNotFound + | DownloadAndInstallCompilerError + | StackBinaryArchiveUnsupported !Text + | StackBinaryNotInArchive !String !Text + | FileTypeInArchiveInvalid !Tar.Entry !Text + | BinaryUpgradeOnOSUnsupported !Cabal.OS + | BinaryUpgradeOnArchUnsupported !Cabal.Arch + | ExistingMSYS2NotDeleted !(Path Abs Dir) !IOException + deriving Show + +instance Pretty SetupPrettyException where + pretty (GHCInstallFailed ex step cmd args wd tempDir destDir) = + "[S-7441]" + <> line + <> string (displayException ex) + <> line + <> hang 2 ( fillSep + [ flow "Error encountered while" + , fromString step + , flow "GHC with" + ] + <> line + <> style Shell (fromString (unwords (cmd : args))) + <> line + -- TODO: Figure out how to insert \ in the appropriate spots + -- hang 2 (shellColor (fillSep (fromString cmd : map fromString args))) <> line <> + <> fillSep + [ flow "run in" + , pretty wd + ] + ) + <> blankLine + <> flow "The following directories may now contain files, but won't be \ + \used by Stack:" + <> line + <> bulletedList [pretty tempDir, pretty destDir] + <> blankLine + <> fillSep + [ flow "For more information consider rerunning with" + , style Shell "--verbose" + , "flag." + ] + <> line + pretty (InvalidGhcAt compiler e) = + "[S-2476]" + <> line + <> fillSep + [ flow "Stack considers the compiler at" + , pretty compiler + , flow "to be invalid." + ] + <> blankLine + <> flow "While assessing that compiler, Stack encountered the error:" + <> blankLine + <> ppException e + pretty (ExecutableNotFound toTry) = + "[S-4764]" + <> line + <> flow "Stack could not find any of the following executables:" + <> line + <> bulletedList (map pretty toTry) + pretty (SandboxedCompilerNotFound names fps) = + "[S-9953]" + <> line + <> fillSep + ( ( flow "Stack could not find the sandboxed compiler. It looked for \ + \one named one of:" + : mkNarrativeList Nothing False + ( map fromString names :: [StyleDoc] ) + ) + <> ( flow "However, it could not find any on one of the paths:" + : mkNarrativeList Nothing False fps + ) + ) + <> blankLine + <> fillSep + [ flow "Perhaps a previously-installed compiler was not completely \ + \uninstalled. For further information about uninstalling \ + \tools, see the output of" + , style Shell (flow "stack uninstall") <> "." + ] + pretty (UnsupportedSetupCombo os arch tool toolDirAdvice programsDir) = + "[S-1852]" + <> line + <> fillSep + [ flow "Stack does not know how to install" + , tool + , flow "for the combination of operating system" + , style Shell (pretty os) + , "and architecture" + , style Shell (pretty arch) <> "." + , flow "Please install manually." + ] + <> blankLine + <> fillSep + [ flow "To install manually the version of" + , tool <> "," + , flow "its root directory should be named" + , toolDirAdvice + , flow "and the directory should be accompanied by a file with the \ + \same name and extension" + , style File ".installed" + , flow "(which marks the" + , tool + , flow "version as installed). Both items should be located in the \ + \subdirectory for the specified platform in Stack's directory \ + \for local tools" + , parens (pretty programsDir) <> "." + ] + pretty (MissingDependencies tools) = + "[S-2126]" + <> line + <> fillSep + ( flow "The following executables are missing and must be installed:" + : mkNarrativeList Nothing False (map fromString tools :: [StyleDoc]) + ) + pretty (UnknownCompilerVersion oskeys wanted known) = + "[S-9443]" + <> line + <> fillSep + ( ( flow "No setup information found for" + : style Current wanted' + : flow "on your platform. This probably means a GHC binary \ + \distribution has not yet been added for OS key" + : mkNarrativeList (Just Shell) False + (map (fromString . T.unpack) (sort $ Set.toList oskeys) :: [StyleDoc]) + ) + <> ( flow "Supported versions:" + : mkNarrativeList Nothing False + ( map + (fromString . T.unpack . compilerVersionText) + (sort $ Set.toList known) + :: [StyleDoc] + ) + ) + ) + where + wanted' = fromString . T.unpack . utf8BuilderToText $ display wanted + pretty (UnknownOSKey oskey) = + "[S-6810]" + <> line + <> fillSep + [ flow "Unable to find installation URLs for OS key:" + , fromString $ T.unpack oskey <> "." + ] + pretty (GHCSanityCheckCompileFailed e ghc) = + "[S-5159]" + <> line + <> fillSep + [ flow "The GHC located at" + , pretty ghc + , flow "failed to compile a sanity check. Please see:" + , style Url "http://docs.haskellstack.org/en/stable/install_and_upgrade/" + , flow "for more information. Stack encountered the following \ + \error:" + ] + <> blankLine + <> string (displayException e) + pretty RequireCustomGHCVariant = + "[S-8948]" + <> line + <> fillSep + [ flow "A custom" + , style Shell "--ghc-variant" + , flow "must be specified to use" + , style Shell "--ghc-bindist" <> "." + ] + pretty (ProblemWhileDecompressing archive) = + "[S-2905]" + <> line + <> fillSep + [ flow "Problem while decompressing" + , pretty archive <> "." + ] + pretty SetupInfoMissingSevenz = + "[S-9561]" + <> line + <> flow "SetupInfo missing Sevenz EXE/DLL." + pretty UnsupportedSetupConfiguration = + "[S-7748]" + <> line + <> flow "Stack does not know how to install GHC on your system \ + \configuration. Please install manually." + pretty (MSYS2NotFound osKey) = + "[S-5308]" + <> line + <> fillSep + [ flow "MSYS2 not found for" + , fromString $ T.unpack osKey <> "." + ] + pretty UnwantedCompilerVersion = + "[S-5127]" + <> line + <> flow "Not the compiler version we want." + pretty UnwantedArchitecture = + "[S-1540]" + <> line + <> flow "Not the architecture we want." + pretty (GHCInfoNotValidUTF8 e) = + "[S-8668]" + <> line + <> flow "GHC info is not valid UTF-8. Stack encountered the following \ + \error:" + <> blankLine + <> string (displayException e) + pretty GHCInfoNotListOfPairs = + "[S-4878]" + <> line + <> flow "GHC info does not parse as a list of pairs." + pretty GHCInfoMissingGlobalPackageDB = + "[S-2965]" + <> line + <> flow "Key 'Global Package DB' not found in GHC info." + pretty GHCInfoMissingTargetPlatform = + "[S-5219]" + <> line + <> flow "Key 'Target platform' not found in GHC info." + pretty (GHCInfoTargetPlatformInvalid targetPlatform) = + "[S-8299]" + <> line + <> fillSep + [ flow "Invalid target platform in GHC info:" + , fromString targetPlatform <> "." + ] + pretty (CabalNotFound compiler) = + "[S-2574]" + <> line + <> fillSep + [ flow "Cabal library not found in global package database for" + , pretty compiler <> "." + ] + pretty GhcBootScriptNotFound = + "[S-8488]" + <> line + <> flow "No GHC boot script found." + pretty HadrianScriptNotFound = + "[S-1128]" + <> line + <> flow "No Hadrian build script found." + pretty (URLInvalid url) = + "[S-1906]" + <> line + <> fillSep + [ flow "`url` must be either an HTTP URL or a file path:" + , fromString url <> "." + ] + pretty (UnknownArchiveExtension url) = + "[S-1648]" + <> line + <> fillSep + [ flow "Unknown extension for url:" + , style Url (fromString url) <> "." + ] + pretty Unsupported7z = + "[S-4509]" + <> line + <> fillSep + [ flow "Stack does not know how to deal with" + , style File ".7z" + , flow "files on non-Windows operating systems." + ] + pretty (TarballInvalid name) = + "[S-3158]" + <> line + <> fillSep + [ style File (fromString name) + , flow "must be a tarball file." + ] + pretty (TarballFileInvalid name archiveFile) = + "[S-5252]" + <> line + <> fillSep + [ "Invalid" + , style File (fromString name) + , "filename:" + , pretty archiveFile <> "." + ] + pretty (UnknownArchiveStructure archiveFile) = + "[S-1827]" + <> line + <> fillSep + [ flow "Expected a single directory within unpacked" + , pretty archiveFile <> "." + ] + pretty (StackReleaseInfoNotFound url) = + "[S-9476]" + <> line + <> fillSep + [ flow "Could not get release information for Stack from:" + , style Url (fromString url) <> "." + ] + pretty (StackBinaryArchiveNotFound platforms) = + "[S-4461]" + <> line + <> fillSep + ( flow "Unable to find binary Stack archive for platforms:" + : mkNarrativeList Nothing False + (map fromString platforms :: [StyleDoc]) + ) + pretty HadrianBindistNotFound = + "[S-6617]" + <> line + <> flow "Can't find Hadrian-generated binary distribution." + pretty DownloadAndInstallCompilerError = + "[S-7227]" + <> line + <> flow "'downloadAndInstallCompiler' should not be reached with ghc-git." + pretty (StackBinaryArchiveUnsupported archiveURL) = + "[S-6636]" + <> line + <> fillSep + [ flow "Unknown archive format for Stack archive:" + , style Url (fromString $ T.unpack archiveURL) <> "." + ] + pretty (StackBinaryNotInArchive exeName url) = + "[S-7871]" + <> line + <> fillSep + [ flow "Stack executable" + , style File (fromString exeName) + , flow "not found in archive from" + , style Url (fromString $ T.unpack url) <> "." + ] + pretty (FileTypeInArchiveInvalid e url) = + "[S-5046]" + <> line + <> fillSep + [ flow "Invalid file type for tar entry named" + , fromString (Tar.entryPath e) + , flow "downloaded from" + , style Url (fromString $ T.unpack url) <> "." + ] + pretty (BinaryUpgradeOnOSUnsupported os) = + "[S-4132]" + <> line + <> fillSep + [ flow "Binary upgrade not yet supported on OS:" + , pretty os <> "." + ] + pretty (BinaryUpgradeOnArchUnsupported arch) = + "[S-3249]" + <> line + <> fillSep + [ flow "Binary upgrade not yet supported on architecture:" + , pretty arch <> "." + ] + pretty (ExistingMSYS2NotDeleted destDir e) = + "[S-4230]" + <> line + <> fillSep + [ flow "Could not delete existing MSYS2 directory:" + , pretty destDir <> "." + , flow "Stack encountered the following error:" + ] + <> blankLine + <> string (displayException e) + +instance Exception SetupPrettyException + +-- | Type representing exceptions thrown by 'performPathChecking' +data PerformPathCheckingException + = ProcessExited ExitCode String [String] + deriving Show + +instance Exception PerformPathCheckingException where + displayException (ProcessExited ec cmd args) = concat + [ "Error: [S-1991]\n" + , "Process exited with " + , displayException ec + , ": " + , unwords (cmd:args) + ] -- | Default location of the stack-setup.yaml file defaultSetupInfoYaml :: String defaultSetupInfoYaml = - "https://raw.githubusercontent.com/fpco/stackage-content/master/stack/stack-setup-2.yaml" + "https://raw.githubusercontent.com/commercialhaskell/stackage-content/master/stack/stack-setup-2.yaml" +-- | Type representing setup configurations. data SetupOpts = SetupOpts - { soptsInstallIfMissing :: !Bool - , soptsUseSystem :: !Bool + { installGhcIfMissing :: !Bool + , installMsysIfMissing :: !Bool + , useSystem :: !Bool -- ^ Should we use a system compiler installation, if available? - , soptsWantedCompiler :: !WantedCompiler - , soptsCompilerCheck :: !VersionCheck - , soptsStackYaml :: !(Maybe (Path Abs File)) - -- ^ If we got the desired GHC version from that file - , soptsForceReinstall :: !Bool - , soptsSanityCheck :: !Bool + , wantedCompiler :: !WantedCompiler + , compilerCheck :: !VersionCheck + , configFile :: !(Maybe (Path Abs File)) + -- ^ If we got the desired GHC version from that configuration file, which + -- may be either a user-specific global or a project-level one. + , forceReinstall :: !Bool + , sanityCheck :: !Bool -- ^ Run a sanity check on the selected GHC - , soptsSkipGhcCheck :: !Bool + , skipGhcCheck :: !Bool -- ^ Don't check for a compatible GHC version/architecture - , soptsSkipMsys :: !Bool + , skipMsys :: !Bool -- ^ Do not use a custom msys installation on Windows - , soptsResolveMissingGHC :: !(Maybe Text) + , resolveMissingGHC :: !(Maybe StyleDoc) -- ^ Message shown to user for how to resolve the missing GHC - , soptsGHCBindistURL :: !(Maybe String) + , ghcBindistURL :: !(Maybe String) -- ^ Alternate GHC binary distribution (requires custom GHCVariant) - } - deriving Show -data SetupException = UnsupportedSetupCombo OS Arch - | MissingDependencies [String] - | UnknownCompilerVersion (Set.Set Text) WantedCompiler (Set.Set ActualCompiler) - | UnknownOSKey Text - | GHCSanityCheckCompileFailed SomeException (Path Abs File) - | WantedMustBeGHC - | RequireCustomGHCVariant - | ProblemWhileDecompressing (Path Abs File) - | SetupInfoMissingSevenz - | DockerStackExeNotFound Version Text - | UnsupportedSetupConfiguration - | InvalidGhcAt (Path Abs File) SomeException - deriving Typeable -instance Exception SetupException -instance Show SetupException where - show (UnsupportedSetupCombo os arch) = concat - [ "I don't know how to install GHC for " - , show (os, arch) - , ", please install manually" - ] - show (MissingDependencies tools) = - "The following executables are missing and must be installed: " ++ - intercalate ", " tools - show (UnknownCompilerVersion oskeys wanted known) = concat - [ "No setup information found for " - , T.unpack $ utf8BuilderToText $ RIO.display wanted - , " on your platform.\nThis probably means a GHC bindist has not yet been added for OS key '" - , T.unpack (T.intercalate "', '" (sort $ Set.toList oskeys)) - , "'.\nSupported versions: " - , T.unpack (T.intercalate ", " (map compilerVersionText (sort $ Set.toList known))) - ] - show (UnknownOSKey oskey) = - "Unable to find installation URLs for OS key: " ++ - T.unpack oskey - show (GHCSanityCheckCompileFailed e ghc) = concat - [ "The GHC located at " - , toFilePath ghc - , " failed to compile a sanity check. Please see:\n\n" - , " http://docs.haskellstack.org/en/stable/install_and_upgrade/\n\n" - , "for more information. Exception was:\n" - , show e - ] - show WantedMustBeGHC = - "The wanted compiler must be GHC" - show RequireCustomGHCVariant = - "A custom --ghc-variant must be specified to use --ghc-bindist" - show (ProblemWhileDecompressing archive) = - "Problem while decompressing " ++ toFilePath archive - show SetupInfoMissingSevenz = - "SetupInfo missing Sevenz EXE/DLL" - show (DockerStackExeNotFound stackVersion' osKey) = concat - [ stackProgName - , "-" - , versionString stackVersion' - , " executable not found for " - , T.unpack osKey - , "\nUse the '" - , T.unpack dockerStackExeArgName - , "' option to specify a location"] - show UnsupportedSetupConfiguration = - "I don't know how to install GHC on your system configuration, please install manually" - show (InvalidGhcAt compiler e) = - "Found an invalid compiler at " ++ show (toFilePath compiler) ++ ": " ++ displayException e - --- | Modify the environment variables (like PATH) appropriately, possibly doing installation too -setupEnv :: NeedTargets - -> BuildOptsCLI - -> Maybe Text -- ^ Message to give user when necessary GHC is not available - -> RIO BuildConfig EnvConfig -setupEnv needTargets boptsCLI mResolveMissingGHC = do - config <- view configL - bc <- view buildConfigL - let stackYaml = bcStackYaml bc - platform <- view platformL - wcVersion <- view wantedCompilerVersionL - wanted <- view wantedCompilerVersionL - actual <- either throwIO pure $ wantedToActual wanted - let wc = actual^.whichCompilerL - let sopts = SetupOpts - { soptsInstallIfMissing = configInstallGHC config - , soptsUseSystem = configSystemGHC config - , soptsWantedCompiler = wcVersion - , soptsCompilerCheck = configCompilerCheck config - , soptsStackYaml = Just stackYaml - , soptsForceReinstall = False - , soptsSanityCheck = False - , soptsSkipGhcCheck = configSkipGHCCheck config - , soptsSkipMsys = configSkipMsys config - , soptsResolveMissingGHC = mResolveMissingGHC - , soptsGHCBindistURL = Nothing - } - - (compilerPaths, ghcBin) <- ensureCompilerAndMsys sopts - let compilerVer = cpCompilerVersion compilerPaths + } + deriving Show + +-- | Modify the environment variables (like PATH) appropriately, possibly doing +-- installation too +setupEnv :: + NeedTargets + -> BuildOptsCLI + -> Maybe StyleDoc + -- ^ Message to give user when necessary GHC is not available. + -> RIO BuildConfig EnvConfig +setupEnv needTargets buildOptsCLI mResolveMissingGHC = do + config <- view configL + bc <- view buildConfigL + -- We are indifferent as to whether the configuration file is a + -- user-specific global or a project-level one. + let eitherConfigFile = EE.fromEither bc.configFile + platform <- view platformL + wcVersion <- view wantedCompilerVersionL + actual <- either throwIO pure $ wantedToActual wcVersion + let wc = actual^.whichCompilerL + sopts = SetupOpts + { installGhcIfMissing = config.installGHC + , installMsysIfMissing = config.installMsys + , useSystem = config.systemGHC + , wantedCompiler = wcVersion + , compilerCheck = config.compilerCheck + , configFile = Just eitherConfigFile + , forceReinstall = False + , sanityCheck = False + , skipGhcCheck = config.skipGHCCheck + , skipMsys = config.skipMsys + , resolveMissingGHC = mResolveMissingGHC + , ghcBindistURL = Nothing + } - -- Modify the initial environment to include the GHC path, if a local GHC - -- is being used - menv0 <- view processContextL - env <- either throwM (return . removeHaskellEnvVars) - $ augmentPathMap - (map toFilePath $ edBins ghcBin) - (view envVarsL menv0) - menv <- mkProcessContext env - - logDebug "Resolving package entries" - - (sourceMap, sourceMapHash) <- runWithGHC menv compilerPaths $ do - smActual <- actualFromGhc (bcSMWanted bc) compilerVer - let actualPkgs = Map.keysSet (smaDeps smActual) <> - Map.keysSet (smaProject smActual) - prunedActual = smActual { smaGlobal = pruneGlobals (smaGlobal smActual) actualPkgs } - haddockDeps = shouldHaddockDeps (configBuild config) - targets <- parseTargets needTargets haddockDeps boptsCLI prunedActual - sourceMap <- loadSourceMap targets boptsCLI smActual - sourceMapHash <- hashSourceMapData boptsCLI sourceMap - pure (sourceMap, sourceMapHash) - - let envConfig0 = EnvConfig - { envConfigBuildConfig = bc - , envConfigBuildOptsCLI = boptsCLI - , envConfigSourceMap = sourceMap - , envConfigSourceMapHash = sourceMapHash - , envConfigCompilerPaths = compilerPaths - } + (compilerPaths, ghcBin) <- ensureCompilerAndMsys sopts + let compilerVer = compilerPaths.compilerVersion + + -- Modify the initial environment to include the GHC path, if a local GHC + -- is being used + menv0 <- view processContextL + env <- either throwM (pure . removeHaskellEnvVars) + $ augmentPathMap + (map toFilePath ghcBin.bins) + (view envVarsL menv0) + menv <- mkProcessContext env + + logDebug "Resolving package entries" + + (sourceMap, sourceMapHash) <- runWithGHC menv compilerPaths $ do + smActual <- actualFromGhc bc.smWanted compilerVer + let actualPkgs = Map.keysSet smActual.deps <> + Map.keysSet smActual.project + prunedActual = smActual + { globals = pruneGlobals smActual.globals actualPkgs } + haddockDeps = shouldHaddockDeps config.build + targets <- parseTargets needTargets haddockDeps buildOptsCLI prunedActual + sourceMap <- loadSourceMap targets buildOptsCLI smActual + sourceMapHash <- hashSourceMapData buildOptsCLI sourceMap + pure (sourceMap, sourceMapHash) + + fileDigestCache <- newFileDigestCache + + let envConfig0 = EnvConfig + { buildConfig = bc + , buildOptsCLI + , fileDigestCache + , sourceMap + , sourceMapHash + , compilerPaths + } - -- extra installation bin directories - mkDirs <- runRIO envConfig0 extraBinDirs - let mpath = Map.lookup "PATH" env - depsPath <- either throwM return $ augmentPath (toFilePath <$> mkDirs False) mpath - localsPath <- either throwM return $ augmentPath (toFilePath <$> mkDirs True) mpath - - deps <- runRIO envConfig0 packageDatabaseDeps - runWithGHC menv compilerPaths $ createDatabase (cpPkg compilerPaths) deps - localdb <- runRIO envConfig0 packageDatabaseLocal - runWithGHC menv compilerPaths $ createDatabase (cpPkg compilerPaths) localdb - extras <- runReaderT packageDatabaseExtra envConfig0 - let mkGPP locals = mkGhcPackagePath locals localdb deps extras $ cpGlobalDB compilerPaths - - distDir <- runReaderT distRelativeDir envConfig0 >>= canonicalizePath - - executablePath <- liftIO getExecutablePath - - utf8EnvVars <- withProcessContext menv $ getUtf8EnvVars compilerVer - - mGhcRtsEnvVar <- liftIO $ lookupEnv "GHCRTS" - - envRef <- liftIO $ newIORef Map.empty - let getProcessContext' es = do - m <- readIORef envRef - case Map.lookup es m of - Just eo -> return eo - Nothing -> do - eo <- mkProcessContext - $ Map.insert "PATH" (if esIncludeLocals es then localsPath else depsPath) - $ (if esIncludeGhcPackagePath es - then Map.insert (ghcPkgPathEnvVar wc) (mkGPP (esIncludeLocals es)) - else id) - - $ (if esStackExe es - then Map.insert "STACK_EXE" (T.pack executablePath) - else id) - - $ (if esLocaleUtf8 es - then Map.union utf8EnvVars - else id) - - $ case (soptsSkipMsys sopts, platform) of - (False, Platform Cabal.I386 Cabal.Windows) - -> Map.insert "MSYSTEM" "MINGW32" - (False, Platform Cabal.X86_64 Cabal.Windows) - -> Map.insert "MSYSTEM" "MINGW64" - _ -> id - - -- See https://github.com/commercialhaskell/stack/issues/3444 - $ case (esKeepGhcRts es, mGhcRtsEnvVar) of - (True, Just ghcRts) -> Map.insert "GHCRTS" (T.pack ghcRts) - _ -> id - - -- For reasoning and duplication, see: https://github.com/fpco/stack/issues/70 - $ Map.insert "HASKELL_PACKAGE_SANDBOX" (T.pack $ toFilePathNoTrailingSep deps) - $ Map.insert "HASKELL_PACKAGE_SANDBOXES" - (T.pack $ if esIncludeLocals es - then intercalate [searchPathSeparator] - [ toFilePathNoTrailingSep localdb - , toFilePathNoTrailingSep deps - , "" - ] - else intercalate [searchPathSeparator] - [ toFilePathNoTrailingSep deps - , "" - ]) - $ Map.insert "HASKELL_DIST_DIR" (T.pack $ toFilePathNoTrailingSep distDir) - - -- Make sure that any .ghc.environment files - -- are ignored, since we're settting up our - -- own package databases. See - -- https://github.com/commercialhaskell/stack/issues/4706 - $ (case cpCompilerVersion compilerPaths of - ACGhc version | version >= mkVersion [8, 4, 4] -> - Map.insert "GHC_ENVIRONMENT" "-" - _ -> id) - - env - - () <- atomicModifyIORef envRef $ \m' -> - (Map.insert es eo m', ()) - return eo - - envOverride <- liftIO $ getProcessContext' minimalEnvSettings - return EnvConfig - { envConfigBuildConfig = bc - { bcConfig = addIncludeLib ghcBin - $ set processContextL envOverride - (view configL bc) - { configProcessContextSettings = getProcessContext' - } + -- extra installation bin directories + mkDirs <- runRIO envConfig0 extraBinDirs + let mpath = Map.lookup "PATH" env + depsPath <- + either throwM pure $ augmentPath (toFilePath <$> mkDirs False) mpath + localsPath <- + either throwM pure $ augmentPath (toFilePath <$> mkDirs True) mpath + + deps <- runRIO envConfig0 packageDatabaseDeps + runWithGHC menv compilerPaths $ createDatabase compilerPaths.pkg deps + localdb <- runRIO envConfig0 packageDatabaseLocal + runWithGHC menv compilerPaths $ createDatabase compilerPaths.pkg localdb + extras <- runReaderT packageDatabaseExtra envConfig0 + let mkGPP locals = + mkGhcPackagePath locals localdb deps extras compilerPaths.globalDB + + distDir <- runReaderT distRelativeDir envConfig0 >>= canonicalizePath + + mExecutablePath <- view mExecutablePathL + + mGhcRtsEnvVar <- liftIO $ lookupEnv "GHCRTS" + + envRef <- liftIO $ newIORef Map.empty + let msysEnv = maybe "" (T.pack . show) config.msysEnvironment + getProcessContext' es = do + m <- readIORef envRef + case Map.lookup es m of + Just eo -> pure eo + Nothing -> do + eo <- mkProcessContext + $ Map.insert + "PATH" + (if es.includeLocals then localsPath else depsPath) + $ (if es.includeGhcPackagePath + then + Map.insert + (ghcPkgPathEnvVar wc) + (mkGPP es.includeLocals) + else id) + + $ (if es.stackExe + then maybe + -- We don't throw an exception if there is no Stack + -- executable path, so that buildConfigCompleter does not + -- need to specify a path. + id + ( \executablePath -> Map.insert + "STACK_EXE" + (T.pack $ toFilePath executablePath) + ) + mExecutablePath + else id) + + $ (if es.localeUtf8 + then Map.union utf8EnvVars + else id) + + $ case (sopts.skipMsys, platform) of + (False, Platform Cabal.I386 Cabal.Windows) -> + Map.insert "MSYSTEM" msysEnv + (False, Platform Cabal.X86_64 Cabal.Windows) -> + Map.insert "MSYSTEM" msysEnv + _ -> id + + -- See https://github.com/commercialhaskell/stack/issues/3444 + $ case (es.keepGhcRts, mGhcRtsEnvVar) of + (True, Just ghcRts) -> Map.insert "GHCRTS" (T.pack ghcRts) + _ -> id + + -- For reasoning and duplication, see: + -- https://github.com/commercialhaskell/stack/issues/70 + $ Map.insert + "HASKELL_PACKAGE_SANDBOX" + (T.pack $ toFilePathNoTrailingSep deps) + $ Map.insert "HASKELL_PACKAGE_SANDBOXES" + (T.pack $ if es.includeLocals + then intercalate [searchPathSeparator] + [ toFilePathNoTrailingSep localdb + , toFilePathNoTrailingSep deps + , "" + ] + else intercalate [searchPathSeparator] + [ toFilePathNoTrailingSep deps + , "" + ]) + $ Map.insert + "HASKELL_DIST_DIR" + (T.pack $ toFilePathNoTrailingSep distDir) + + -- Make sure that any .ghc.environment files + -- are ignored, since we're setting up our + -- own package databases. See + -- https://github.com/commercialhaskell/stack/issues/4706 + $ (case compilerPaths.compilerVersion of + ACGhc version | version >= mkVersion [8, 4, 4] -> + Map.insert "GHC_ENVIRONMENT" "-" + _ -> id) + + env + + () <- atomicModifyIORef envRef $ \m' -> + (Map.insert es eo m', ()) + pure eo + + envOverride <- liftIO $ getProcessContext' minimalEnvSettings + pure EnvConfig + { buildConfig = bc + { config = addIncludeLib ghcBin + $ set processContextL envOverride + (view configL bc) + { processContextSettings = getProcessContext' } - , envConfigBuildOptsCLI = boptsCLI - , envConfigSourceMap = sourceMap - , envConfigSourceMapHash = sourceMapHash - , envConfigCompilerPaths = compilerPaths } + , buildOptsCLI + , fileDigestCache + , sourceMap + , sourceMapHash + , compilerPaths + } -- | A modified env which we know has an installed compiler on the PATH. data WithGHC env = WithGHC !CompilerPaths !env @@ -350,344 +844,598 @@ insideL :: Lens' (WithGHC env) env insideL = lens (\(WithGHC _ x) -> x) (\(WithGHC cp _) -> WithGHC cp) instance HasLogFunc env => HasLogFunc (WithGHC env) where - logFuncL = insideL.logFuncL + logFuncL = insideL . logFuncL + instance HasRunner env => HasRunner (WithGHC env) where - runnerL = insideL.runnerL + runnerL = insideL . runnerL + instance HasProcessContext env => HasProcessContext (WithGHC env) where - processContextL = insideL.processContextL + processContextL = insideL . processContextL + instance HasStylesUpdate env => HasStylesUpdate (WithGHC env) where - stylesUpdateL = insideL.stylesUpdateL + stylesUpdateL = insideL . stylesUpdateL + instance HasTerm env => HasTerm (WithGHC env) where - useColorL = insideL.useColorL - termWidthL = insideL.termWidthL + useColorL = insideL . useColorL + termWidthL = insideL . termWidthL + instance HasPantryConfig env => HasPantryConfig (WithGHC env) where - pantryConfigL = insideL.pantryConfigL -instance HasConfig env => HasPlatform (WithGHC env) -instance HasConfig env => HasGHCVariant (WithGHC env) + pantryConfigL = insideL . pantryConfigL + +instance HasConfig env => HasPlatform (WithGHC env) where + platformL = configL . platformL + {-# INLINE platformL #-} + platformVariantL = configL . platformVariantL + {-# INLINE platformVariantL #-} + +instance HasConfig env => HasGHCVariant (WithGHC env) where + ghcVariantL = configL . ghcVariantL + {-# INLINE ghcVariantL #-} + instance HasConfig env => HasConfig (WithGHC env) where - configL = insideL.configL + configL = insideL . configL + instance HasBuildConfig env => HasBuildConfig (WithGHC env) where - buildConfigL = insideL.buildConfigL + buildConfigL = insideL . buildConfigL + instance HasCompiler (WithGHC env) where compilerPathsL = to (\(WithGHC cp _) -> cp) --- | Set up a modified environment which includes the modified PATH --- that GHC can be found on. This is needed for looking up global --- package information and ghc fingerprint (result from 'ghc --info'). -runWithGHC :: HasConfig env => ProcessContext -> CompilerPaths -> RIO (WithGHC env) a -> RIO env a +-- | Set up a modified environment which includes the modified PATH that GHC can +-- be found on. This is needed for looking up global package information and ghc +-- fingerprint (result from 'ghc --info'). +runWithGHC :: + HasConfig env + => ProcessContext + -> CompilerPaths + -> RIO (WithGHC env) a + -> RIO env a runWithGHC pc cp inner = do env <- ask let envg = WithGHC cp $ - set envOverrideSettingsL (\_ -> return pc) $ + set envOverrideSettingsL (\_ -> pure pc) $ set processContextL pc env runRIO envg inner +-- | A modified environment which we know has MSYS2 on the PATH. +newtype WithMSYS env = WithMSYS env + +insideMSYSL :: Lens' (WithMSYS env) env +insideMSYSL = lens (\(WithMSYS x) -> x) (\(WithMSYS _) -> WithMSYS) + +instance HasLogFunc env => HasLogFunc (WithMSYS env) where + logFuncL = insideMSYSL . logFuncL + +instance HasRunner env => HasRunner (WithMSYS env) where + runnerL = insideMSYSL . runnerL + +instance HasProcessContext env => HasProcessContext (WithMSYS env) where + processContextL = insideMSYSL . processContextL + +instance HasStylesUpdate env => HasStylesUpdate (WithMSYS env) where + stylesUpdateL = insideMSYSL . stylesUpdateL + +instance HasTerm env => HasTerm (WithMSYS env) where + useColorL = insideMSYSL . useColorL + termWidthL = insideMSYSL . termWidthL + +instance HasPantryConfig env => HasPantryConfig (WithMSYS env) where + pantryConfigL = insideMSYSL . pantryConfigL + +instance HasConfig env => HasPlatform (WithMSYS env) where + platformL = configL . platformL + {-# INLINE platformL #-} + platformVariantL = configL . platformVariantL + {-# INLINE platformVariantL #-} + +instance HasConfig env => HasGHCVariant (WithMSYS env) where + ghcVariantL = configL . ghcVariantL + {-# INLINE ghcVariantL #-} + +instance HasConfig env => HasConfig (WithMSYS env) where + configL = insideMSYSL . configL + +instance HasBuildConfig env => HasBuildConfig (WithMSYS env) where + buildConfigL = insideMSYSL . buildConfigL + +-- | Set up a modified environment which includes the modified PATH that MSYS2 +-- can be found on. +runWithMSYS :: + HasConfig env + => Maybe ExtraDirs + -> RIO (WithMSYS env) a + -> RIO env a +runWithMSYS mmsysPaths inner = do + env <- ask + pc0 <- view processContextL + pc <- case mmsysPaths of + Nothing -> pure pc0 + Just msysPaths -> do + envars <- either throwM pure $ + augmentPathMap + (map toFilePath msysPaths.bins) + (view envVarsL pc0) + mkProcessContext envars + let envMsys + = WithMSYS $ + set envOverrideSettingsL (\_ -> pure pc) $ + set processContextL pc env + runRIO envMsys inner + -- | special helper for GHCJS which needs an updated source map -- only project dependencies should get included otherwise source map hash will -- get changed and EnvConfig will become inconsistent -rebuildEnv :: EnvConfig - -> NeedTargets - -> Bool - -> BuildOptsCLI - -> RIO env EnvConfig +rebuildEnv :: + EnvConfig + -> NeedTargets + -> Bool + -> BuildOptsCLI + -> RIO env EnvConfig rebuildEnv envConfig needTargets haddockDeps boptsCLI = do - let bc = envConfigBuildConfig envConfig - cp = envConfigCompilerPaths envConfig - compilerVer = smCompiler $ envConfigSourceMap envConfig - runRIO (WithGHC cp bc) $ do - smActual <- actualFromGhc (bcSMWanted bc) compilerVer - let actualPkgs = Map.keysSet (smaDeps smActual) <> Map.keysSet (smaProject smActual) - prunedActual = smActual { - smaGlobal = pruneGlobals (smaGlobal smActual) actualPkgs - } - targets <- parseTargets needTargets haddockDeps boptsCLI prunedActual - sourceMap <- loadSourceMap targets boptsCLI smActual - return $ - envConfig - {envConfigSourceMap = sourceMap, envConfigBuildOptsCLI = boptsCLI} + let bc = envConfig.buildConfig + cp = envConfig.compilerPaths + compilerVer = envConfig.sourceMap.compiler + runRIO (WithGHC cp bc) $ do + smActual <- actualFromGhc bc.smWanted compilerVer + let actualPkgs = + Map.keysSet smActual.deps <> Map.keysSet smActual.project + prunedActual = smActual + { globals = pruneGlobals smActual.globals actualPkgs } + targets <- parseTargets needTargets haddockDeps boptsCLI prunedActual + sourceMap <- loadSourceMap targets boptsCLI smActual + pure $ envConfig + { sourceMap = sourceMap + , buildOptsCLI = boptsCLI + } -- | Some commands (script, ghci and exec) set targets dynamically -- see also the note about only local targets for rebuildEnv -withNewLocalBuildTargets :: HasEnvConfig env => [Text] -> RIO env a -> RIO env a +withNewLocalBuildTargets :: + HasEnvConfig env + => [Text] + -> RIO env a + -> RIO env a withNewLocalBuildTargets targets f = do - envConfig <- view $ envConfigL - haddockDeps <- view $ configL.to configBuild.to shouldHaddockDeps - let boptsCLI = envConfigBuildOptsCLI envConfig - envConfig' <- rebuildEnv envConfig NeedTargets haddockDeps $ - boptsCLI {boptsCLITargets = targets} - local (set envConfigL envConfig') f + envConfig <- view envConfigL + haddockDeps <- view $ configL . to (.build) . to shouldHaddockDeps + let boptsCLI = envConfig.buildOptsCLI + envConfig' <- + rebuildEnv envConfig NeedTargets haddockDeps $ boptsCLI + { targetsCLI = targets} + local (set envConfigL envConfig') f -- | Add the include and lib paths to the given Config addIncludeLib :: ExtraDirs -> Config -> Config -addIncludeLib (ExtraDirs _bins includes libs) config = config - { configExtraIncludeDirs = - configExtraIncludeDirs config ++ - map toFilePathNoTrailingSep includes - , configExtraLibDirs = - configExtraLibDirs config ++ - map toFilePathNoTrailingSep libs - } +addIncludeLib extraDirs config = config + { extraIncludeDirs = + config.extraIncludeDirs ++ map toFilePathNoTrailingSep extraDirs.includes + , extraLibDirs = + config.extraLibDirs ++ map toFilePathNoTrailingSep extraDirs.libs + } -- | Ensure both the compiler and the msys toolchain are installed and -- provide the PATHs to add if necessary -ensureCompilerAndMsys - :: (HasBuildConfig env, HasGHCVariant env) +ensureCompilerAndMsys :: + (HasBuildConfig env, HasGHCVariant env) => SetupOpts -> RIO env (CompilerPaths, ExtraDirs) ensureCompilerAndMsys sopts = do - actual <- either throwIO pure $ wantedToActual $ soptsWantedCompiler sopts - didWarn <- warnUnsupportedCompiler $ getGhcVersion actual - getSetupInfo' <- memoizeRef getSetupInfo - (cp, ghcPaths) <- ensureCompiler sopts getSetupInfo' + mmsys2Tool <- ensureMsys sopts getSetupInfo' + mmsysPaths <- maybe (pure Nothing) (fmap Just . toolExtraDirs) mmsys2Tool + actual <- either throwIO pure $ wantedToActual sopts.wantedCompiler + didWarn <- warnUnsupportedCompiler $ getGhcVersion actual + -- Modify the initial environment to include the MSYS2 path, if MSYS2 is being + -- used + (cp, ghcPaths) <- runWithMSYS mmsysPaths $ ensureCompiler sopts getSetupInfo' warnUnsupportedCompilerCabal cp didWarn - mmsys2Tool <- ensureMsys sopts getSetupInfo' - paths <- - case mmsys2Tool of - Nothing -> pure ghcPaths - Just msys2Tool -> do - msys2Paths <- extraDirs msys2Tool - pure $ ghcPaths <> msys2Paths + let paths = maybe ghcPaths (ghcPaths <>) mmsysPaths pure (cp, paths) -- | See -warnUnsupportedCompiler :: HasLogFunc env => Version -> RIO env Bool +warnUnsupportedCompiler :: + (HasConfig env, HasTerm env) + => Version + -> RIO env Bool warnUnsupportedCompiler ghcVersion = do + notifyIfGhcUntested <- view $ configL . to (.notifyIfGhcUntested) if - | ghcVersion < mkVersion [7, 8] -> do - logWarn $ - "Stack will almost certainly fail with GHC below version 7.8, requested " <> - fromString (versionString ghcVersion) - logWarn "Valiantly attempting to run anyway, but I know this is doomed" - logWarn "For more information, see: https://github.com/commercialhaskell/stack/issues/648" - logWarn "" + | ghcVersion < mkVersion [8, 4] -> do + prettyWarnL + [ flow "Stack will almost certainly fail with GHC below version 8.4, \ + \requested" + , fromString (versionString ghcVersion) <> "." + , flow "Valiantly attempting to run anyway, but this is doomed." + ] pure True - | ghcVersion >= mkVersion [8, 11] -> do - logWarn $ - "Stack has not been tested with GHC versions above 8.10, and using " <> - fromString (versionString ghcVersion) <> - ", this may fail" + | ghcVersion >= mkVersion [9, 15] && notifyIfGhcUntested -> do + prettyWarnL + [ flow "Stack has not been tested with GHC versions 9.16 and above, \ + \and using" + , fromString (versionString ghcVersion) <> "," + , flow "this may fail." + ] pure True | otherwise -> do logDebug "Asking for a supported GHC version" pure False -- | See -warnUnsupportedCompilerCabal - :: HasLogFunc env +warnUnsupportedCompilerCabal :: + (HasConfig env, HasTerm env) => CompilerPaths -> Bool -- ^ already warned about GHC? -> RIO env () warnUnsupportedCompilerCabal cp didWarn = do - unless didWarn $ void $ warnUnsupportedCompiler $ getGhcVersion $ cpCompilerVersion cp - let cabalVersion = cpCabalVersion cp - + unless didWarn $ + void $ warnUnsupportedCompiler $ getGhcVersion cp.compilerVersion + let cabalVersion = cp.cabalVersion + notifyIfCabalUntested <- view $ configL . to (.notifyIfCabalUntested) if - | cabalVersion < mkVersion [1, 19, 2] -> do - logWarn $ "Stack no longer supports Cabal versions below 1.19.2," - logWarn $ "but version " <> fromString (versionString cabalVersion) <> " was found." - logWarn "This invocation will most likely fail." - logWarn "To fix this, either use an older version of Stack or a newer resolver" - logWarn "Acceptable resolvers: lts-3.0/nightly-2015-05-05 or later" - | cabalVersion >= mkVersion [3, 3] -> - logWarn $ - "Stack has not been tested with Cabal versions above 3.2, but version " <> - fromString (versionString cabalVersion) <> - " was found, this may fail" + | cabalVersion < mkVersion [2, 2] -> do + -- Due to a bug, Stack 2.15.1 does not support Cabal < 2. + let downgradeRecommendation = fillSep $ + [ flow "Stack 2.15.5 or earlier" ] + <> [ flow "(except Stack 2.15.1)" | cabalVersion < mkVersion [2] ] + prettyWarnL + [ flow "Stack uses the version of the Cabal package that comes with \ + \the specified version of GHC. However, Stack no longer \ + \supports such Cabal versions before 2.2. Version" + , fromString (versionString cabalVersion) + , flow "was found. This invocation of Stack may fail. To fix this, \ + \either use" + , downgradeRecommendation + , flow "or use a snapshot that specifies a version of GHC that is \ + \8.4 or later. Stackage LTS Haskell 12.0" + , parens (style Shell "lts-12.0") + , flow "or later or Nightly 2018-03-13" + , parens (style Shell "nightly-2018-03-13") + , flow "or later specify such GHC versions." + ] + | cabalVersion >= mkVersion [3, 17] && notifyIfCabalUntested -> + prettyWarnL + [ flow "Stack has not been tested with Cabal versions 3.18 and \ + \above, but version" + , fromString (versionString cabalVersion) + , flow "was found, this may fail." + ] | otherwise -> pure () --- | Ensure that the msys toolchain is installed if necessary and --- provide the PATHs to add if necessary -ensureMsys - :: HasBuildConfig env +-- | Ensure that the msys toolchain is installed if necessary and provide the +-- PATHs to add if necessary +ensureMsys :: + HasBuildConfig env => SetupOpts -> Memoized SetupInfo -> RIO env (Maybe Tool) ensureMsys sopts getSetupInfo' = do - platform <- view platformL - localPrograms <- view $ configL.to configLocalPrograms + localPrograms <- view $ configL . to (.localPrograms) installed <- listInstalled localPrograms + view platformL >>= \case + Platform _ Cabal.Windows | not sopts.skipMsys -> + case getInstalledTool installed (mkPackageName "msys2") (const True) of + Just tool -> pure (Just tool) + Nothing + | sopts.installMsysIfMissing -> do + si <- runMemoized getSetupInfo' + let msysDir = fillSep + [ style Dir "msys2-yyyymmdd" + , flow "(where yyyymmdd is the date-based version)" + ] + osKey <- getOSKey "MSYS2" msysDir + config <- view configL + VersionedDownloadInfo version info <- + case Map.lookup osKey si.msys2 of + Just x -> pure x + Nothing -> prettyThrowIO $ MSYS2NotFound osKey + let tool = Tool (PackageIdentifier (mkPackageName "msys2") version) + Just <$> downloadAndInstallTool + config.localPrograms + info + tool + (installMsys2Windows si) + | otherwise -> do + prettyWarnS "Stack is not using a Stack-supplied MSYS2." + pure Nothing + _ -> pure Nothing - case platform of - Platform _ Cabal.Windows | not (soptsSkipMsys sopts) -> - case getInstalledTool installed (mkPackageName "msys2") (const True) of - Just tool -> return (Just tool) - Nothing - | soptsInstallIfMissing sopts -> do - si <- runMemoized getSetupInfo' - osKey <- getOSKey platform - config <- view configL - VersionedDownloadInfo version info <- - case Map.lookup osKey $ siMsys2 si of - Just x -> return x - Nothing -> throwString $ "MSYS2 not found for " ++ T.unpack osKey - let tool = Tool (PackageIdentifier (mkPackageName "msys2") version) - Just <$> downloadAndInstallTool (configLocalPrograms config) info tool (installMsys2Windows osKey si) - | otherwise -> do - logWarn "Continuing despite missing tool: msys2" - return Nothing - _ -> return Nothing - -installGhcBindist - :: HasBuildConfig env +installGhcBindist :: + HasBuildConfig env => SetupOpts -> Memoized SetupInfo -> [Tool] -> RIO env (Tool, CompilerBuild) installGhcBindist sopts getSetupInfo' installed = do - Platform expectedArch _ <- view platformL - let wanted = soptsWantedCompiler sopts - isWanted = isWantedCompiler (soptsCompilerCheck sopts) (soptsWantedCompiler sopts) - config <- view configL - ghcVariant <- view ghcVariantL - wc <- either throwIO (pure . whichCompiler) $ wantedToActual wanted - possibleCompilers <- - case wc of - Ghc -> do - ghcBuilds <- getGhcBuilds - forM ghcBuilds $ \ghcBuild -> do - ghcPkgName <- parsePackageNameThrowing ("ghc" ++ ghcVariantSuffix ghcVariant ++ compilerBuildSuffix ghcBuild) - return (getInstalledTool installed ghcPkgName (isWanted . ACGhc), ghcBuild) - let existingCompilers = concatMap - (\(installedCompiler, compilerBuild) -> - case (installedCompiler, soptsForceReinstall sopts) of - (Just tool, False) -> [(tool, compilerBuild)] - _ -> []) - possibleCompilers - logDebug $ - "Found already installed GHC builds: " <> - mconcat (intersperse ", " (map (fromString . compilerBuildName . snd) existingCompilers)) - case existingCompilers of - (tool, build_):_ -> return (tool, build_) - [] - | soptsInstallIfMissing sopts -> do - si <- runMemoized getSetupInfo' - downloadAndInstallPossibleCompilers - (map snd possibleCompilers) - si - (soptsWantedCompiler sopts) - (soptsCompilerCheck sopts) - (soptsGHCBindistURL sopts) - | otherwise -> do - let suggestion = fromMaybe - (mconcat - [ "To install the correct GHC into " - , T.pack (toFilePath (configLocalPrograms config)) - , ", try running \"stack setup\" or use the \"--install-ghc\" flag." - , " To use your system GHC installation, run \"stack config set system-ghc --global true\", or use the \"--system-ghc\" flag." - ]) - (soptsResolveMissingGHC sopts) - throwM $ CompilerVersionMismatch - Nothing -- FIXME ((\(x, y, _) -> (x, y)) <$> msystem) - (soptsWantedCompiler sopts, expectedArch) - ghcVariant - (case possibleCompilers of - [] -> CompilerBuildStandard - (_, compilerBuild):_ -> compilerBuild) - (soptsCompilerCheck sopts) - (soptsStackYaml sopts) - suggestion - --- | Ensure compiler is installed, without worrying about msys -ensureCompiler - :: forall env. (HasBuildConfig env, HasGHCVariant env) + Platform expectedArch _ <- view platformL + let wanted = sopts.wantedCompiler + isWanted = + isWantedCompiler sopts.compilerCheck sopts.wantedCompiler + config <- view configL + ghcVariant <- view ghcVariantL + wc <- either throwIO (pure . whichCompiler) $ wantedToActual wanted + possibleCompilers <- + case wc of + Ghc -> do + ghcBuilds <- getGhcBuilds + forM ghcBuilds $ \ghcBuild -> do + ghcPkgName <- parsePackageNameThrowing + ( "ghc" + ++ ghcVariantSuffix ghcVariant + ++ compilerBuildSuffix ghcBuild + ) + pure (getInstalledTool installed ghcPkgName (isWanted . ACGhc), ghcBuild) + let existingCompilers = concatMap + (\(installedCompiler, compilerBuild) -> + case (installedCompiler, sopts.forceReinstall) of + (Just tool, False) -> [(tool, compilerBuild)] + _ -> []) + possibleCompilers + globalOpts = config.runner.globalOpts + wantedCompilerSetter + | isJust globalOpts.compiler = CompilerAtCommandLine + | isJust globalOpts.snapshot = SnapshotAtCommandLine + | otherwise = YamlConfiguration sopts.configFile + logDebug $ + "Found already installed GHC builds: " + <> mconcat (intersperse ", " (map (fromString . compilerBuildName . snd) existingCompilers)) + case existingCompilers of + (tool, build_):_ -> pure (tool, build_) + [] + | sopts.installGhcIfMissing -> do + si <- runMemoized getSetupInfo' + downloadAndInstallPossibleCompilers + (map snd possibleCompilers) + si + sopts.wantedCompiler + sopts.compilerCheck + sopts.ghcBindistURL + | otherwise -> do + let suggestion = + fromMaybe defaultSuggestion sopts.resolveMissingGHC + defaultSuggestion = fillSep + [ flow "To install the correct version of GHC into the \ + \subdirectory for the specified platform in Stack's \ + \directory for local tools" + , parens (pretty config.localPrograms) <> "," + , flow "try running" + , style Shell (flow "stack setup") + , flow "or use the" + , style Shell "--install-ghc" + , flow "flag. To use your system GHC installation, run" + , style + Shell + (flow "stack config set system-ghc --global true") + <> "," + , flow "or use the" + , style Shell "--system-ghc" + , "flag." + ] + + prettyThrowM $ CompilerVersionMismatch + Nothing -- FIXME ((\(x, y, _) -> (x, y)) <$> msystem) + (sopts.wantedCompiler, expectedArch) + ghcVariant + (case possibleCompilers of + [] -> CompilerBuildStandard + (_, compilerBuild):_ -> compilerBuild) + sopts.compilerCheck + wantedCompilerSetter + suggestion + +-- | Ensure compiler is installed. +ensureCompiler :: + forall env. (HasConfig env, HasBuildConfig env, HasGHCVariant env) => SetupOpts -> Memoized SetupInfo - -> RIO env (CompilerPaths, ExtraDirs) + -> RIO (WithMSYS env) (CompilerPaths, ExtraDirs) ensureCompiler sopts getSetupInfo' = do - let wanted = soptsWantedCompiler sopts - wc <- either throwIO (pure . whichCompiler) $ wantedToActual wanted - - Platform expectedArch _ <- view platformL - - let canUseCompiler cp - | soptsSkipGhcCheck sopts = pure cp - | not $ isWanted $ cpCompilerVersion cp = throwString "Not the compiler version we want" - | cpArch cp /= expectedArch = throwString "Not the architecture we want" - | otherwise = pure cp - isWanted = isWantedCompiler (soptsCompilerCheck sopts) (soptsWantedCompiler sopts) - - let checkCompiler :: Path Abs File -> RIO env (Maybe CompilerPaths) - checkCompiler compiler = do - eres <- tryAny $ pathsFromCompiler wc CompilerBuildStandard False compiler >>= canUseCompiler - case eres of - Left e -> do - logDebug $ "Not using compiler at " <> displayShow (toFilePath compiler) <> ": " <> displayShow e - pure Nothing - Right cp -> pure $ Just cp + let wanted = sopts.wantedCompiler + wc <- either throwIO (pure . whichCompiler) $ wantedToActual wanted + + hook <- ghcInstallHook + hookIsExecutable <- handleIO (\_ -> pure False) $ if osIsWindows + then doesFileExist hook -- can't really detect executable on windows, only + -- file extension + else executable <$> getPermissions hook + + Platform expectedArch _ <- view platformL + + let canUseCompiler cp + | sopts.skipGhcCheck = pure cp + | not $ isWanted cp.compilerVersion = + prettyThrowIO UnwantedCompilerVersion + | cp.arch /= expectedArch = prettyThrowIO UnwantedArchitecture + | otherwise = pure cp + isWanted = + isWantedCompiler sopts.compilerCheck sopts.wantedCompiler + + let checkCompiler :: Path Abs File -> RIO (WithMSYS env) (Maybe CompilerPaths) + checkCompiler compiler = do + eres <- tryAny $ + pathsFromCompiler wc CompilerBuildStandard False compiler >>= canUseCompiler + case eres of + Left e -> do + logDebug $ + "Not using compiler at " + <> displayShow (toFilePath compiler) + <> ": " + <> displayShow e + pure Nothing + Right cp -> pure $ Just cp + + mcp <- + if | sopts.useSystem -> do + logDebug "Getting system compiler version" + runConduit $ + sourceSystemCompilers wanted .| + concatMapMC checkCompiler .| + await + | hookIsExecutable -> do + -- if the hook fails, we fall through to stacks sandboxed installation + hookGHC <- runGHCInstallHook sopts hook + maybe (pure Nothing) checkCompiler hookGHC + | otherwise -> pure Nothing + case mcp of + Nothing -> ensureSandboxedCompiler sopts getSetupInfo' + Just cp -> do + let paths = ExtraDirs + { bins = [parent cp.compiler] + , includes = [] + , libs = [] + } + pure (cp, paths) - mcp <- - if soptsUseSystem sopts - then do - logDebug "Getting system compiler version" - runConduit $ - sourceSystemCompilers wanted .| - concatMapMC checkCompiler .| - await - else return Nothing - case mcp of - Nothing -> ensureSandboxedCompiler sopts getSetupInfo' - Just cp -> do - let paths = ExtraDirs { edBins = [parent $ cpCompiler cp], edInclude = [], edLib = [] } - pure (cp, paths) - -ensureSandboxedCompiler - :: HasBuildConfig env +-- | Runs @STACK_ROOT\/hooks\/ghc-install.sh@. +-- +-- Reads and possibly validates the output of the process as the GHC binary and +-- returns it. +runGHCInstallHook :: + HasBuildConfig env + => SetupOpts + -> Path Abs File + -> RIO env (Maybe (Path Abs File)) +runGHCInstallHook sopts hook = do + logDebug "Getting hook installed compiler version" + let wanted = sopts.wantedCompiler + menv0 <- view processContextL + menv <- mkProcessContext (Map.union (wantedCompilerToEnv wanted) $ + removeHaskellEnvVars (view envVarsL menv0)) + (exit, out) <- withProcessContext menv $ proc "sh" [toFilePath hook] readProcessStdout + case exit of + ExitSuccess -> do + let ghcPath = stripNewline . TL.unpack . TL.decodeUtf8With T.lenientDecode $ out + case parseAbsFile ghcPath of + Just compiler -> do + when sopts.sanityCheck $ sanityCheck compiler + logDebug ("Using GHC compiler at: " <> fromString (toFilePath compiler)) + pure (Just compiler) + Nothing -> do + prettyWarnL + [ flow "Path to GHC binary is not a valid path:" + , style Dir (fromString ghcPath) <> "." + ] + pure Nothing + ExitFailure i -> do + prettyWarnL + [ flow "GHC install hook exited with code:" + , style Error (fromString $ show i) <> "." + ] + pure Nothing + where + wantedCompilerToEnv :: WantedCompiler -> EnvVars + wantedCompilerToEnv (WCGhc ver) = + Map.fromList [ ("HOOK_GHC_TYPE", "bindist") + , ("HOOK_GHC_VERSION", T.pack (versionString ver)) + ] + wantedCompilerToEnv (WCGhcGit commit flavor) = + Map.fromList [ ("HOOK_GHC_TYPE", "git") + , ("HOOK_GHC_COMMIT", commit) + , ("HOOK_GHC_FLAVOR", flavor) + , ("HOOK_GHC_FLAVOUR", flavor) + ] + wantedCompilerToEnv (WCGhcjs ghcjs_ver ghc_ver) = + Map.fromList [ ("HOOK_GHC_TYPE", "ghcjs") + , ("HOOK_GHC_VERSION", T.pack (versionString ghc_ver)) + , ("HOOK_GHCJS_VERSION", T.pack (versionString ghcjs_ver)) + ] + newlines :: [Char] + newlines = ['\n', '\r'] + + stripNewline :: String -> String + stripNewline = filter (`notElem` newlines) + +ensureSandboxedCompiler :: + HasBuildConfig env => SetupOpts -> Memoized SetupInfo - -> RIO env (CompilerPaths, ExtraDirs) + -> RIO (WithMSYS env) (CompilerPaths, ExtraDirs) ensureSandboxedCompiler sopts getSetupInfo' = do - let wanted = soptsWantedCompiler sopts - -- List installed tools - config <- view configL - let localPrograms = configLocalPrograms config - installed <- listInstalled localPrograms - logDebug $ "Installed tools: \n - " <> mconcat (intersperse "\n - " (map (fromString . toolString) installed)) - (compilerTool, compilerBuild) <- - case soptsWantedCompiler sopts of - -- shall we build GHC from source? - WCGhcGit commitId flavour -> buildGhcFromSource getSetupInfo' installed (configCompilerRepository config) commitId flavour - _ -> installGhcBindist sopts getSetupInfo' installed - paths <- extraDirs compilerTool - - wc <- either throwIO (pure . whichCompiler) $ wantedToActual wanted - menv0 <- view processContextL - m <- either throwM return - $ augmentPathMap (toFilePath <$> edBins paths) (view envVarsL menv0) - menv <- mkProcessContext (removeHaskellEnvVars m) - - names <- - case wanted of - WCGhc version -> pure ["ghc-" ++ versionString version, "ghc"] - WCGhcGit{} -> pure ["ghc"] - WCGhcjs{} -> throwIO GhcjsNotSupported - let loop [] = do - logError $ "Looked for sandboxed compiler named one of: " <> displayShow names - logError $ "Could not find it on the paths " <> displayShow (edBins paths) - throwString "Could not find sandboxed compiler" - loop (x:xs) = do - res <- findExecutable x - case res of - Left _ -> loop xs - Right y -> parseAbsFile y - compiler <- withProcessContext menv $ loop names - - when (soptsSanityCheck sopts) $ sanityCheck compiler - cp <- pathsFromCompiler wc compilerBuild True compiler - pure (cp, paths) - -pathsFromCompiler - :: forall env. HasConfig env + let wanted = sopts.wantedCompiler + -- List installed tools + config <- view configL + let localPrograms = config.localPrograms + installed <- listInstalled localPrograms + logDebug $ + "Installed tools: \n - " + <> mconcat (intersperse "\n - " (map (fromString . toolString) installed)) + (compilerTool, compilerBuild) <- + case sopts.wantedCompiler of + -- shall we build GHC from source? + WCGhcGit commitId flavour -> + buildGhcFromSource + getSetupInfo' + installed + config.compilerRepository + config.compilerTarget + config.compilerBindistPath + commitId + flavour + _ -> installGhcBindist sopts getSetupInfo' installed + paths <- toolExtraDirs compilerTool + + wc <- either throwIO (pure . whichCompiler) $ wantedToActual wanted + menv0 <- view processContextL + m <- either throwM pure + $ augmentPathMap (toFilePath <$> paths.bins) (view envVarsL menv0) + menv <- mkProcessContext (removeHaskellEnvVars m) + + names <- + case wanted of + WCGhc version -> pure ["ghc-" ++ versionString version, "ghc"] + WCGhcGit{} -> pure ["ghc"] + WCGhcjs{} -> throwIO GhcjsNotSupported + + -- Previously, we used findExecutable to locate these executables. This was + -- actually somewhat sloppy, as it could discover executables outside of the + -- sandbox. This led to a specific issue on Windows with GHC 9.0.1. See + -- https://gitlab.haskell.org/ghc/ghc/-/issues/20074. Instead, now, we look + -- on the paths specified only. + let loop [] = prettyThrowIO $ SandboxedCompilerNotFound names paths.bins + loop (x:xs) = do + res <- liftIO $ + D.findExecutablesInDirectories (map toFilePath paths.bins) x + case res of + [] -> loop xs + compiler:rest -> do + unless (null rest) $ do + prettyWarn $ + flow "Found multiple candidate compilers:" + <> line + <> bulletedList (map fromString res) + <> blankLine + <> fillSep + [ flow "This usually indicates a failed installation. \ + \Trying anyway with" + , fromString compiler + ] + parseAbsFile compiler + compiler <- withProcessContext menv $ do + compiler <- loop names + + -- Run this here to ensure that the sanity check uses the modified + -- environment, otherwise we may infect GHC_PACKAGE_PATH and break sanity + -- checks. + when sopts.sanityCheck $ sanityCheck compiler + + pure compiler + + cp <- pathsFromCompiler wc compilerBuild True compiler + pure (cp, paths) + +pathsFromCompiler :: + forall env. HasConfig env => WhichCompiler -> CompilerBuild -> Bool -> Path Abs File -- ^ executable filepath -> RIO env CompilerPaths -pathsFromCompiler wc compilerBuild isSandboxed compiler = withCache $ handleAny onErr $ do +pathsFromCompiler wc build sandboxed compiler = + withCache $ handleAny onErr $ do let dir = toFilePath $ parent compiler + suffixNoVersion | osIsWindows = ".exe" | otherwise = "" @@ -699,15 +1447,21 @@ pathsFromCompiler wc compilerBuild isSandboxed compiler = withCache $ handleAny suffixes = maybe id (:) msuffixWithVersion [suffixNoVersion] findHelper :: (WhichCompiler -> [String]) -> RIO env (Path Abs File) findHelper getNames = do - let toTry = [dir ++ name ++ suffix | suffix <- suffixes, name <- getNames wc] - loop [] = throwString $ "Could not find any of: " <> show toTry - loop (guessedPath':rest) = do - guessedPath <- parseAbsFile guessedPath' + toTry <- mapM + parseAbsFile + [ dir ++ name ++ suffix + | suffix <- suffixes, name <- getNames wc + ] + let loop [] = throwIO $ PrettyException $ ExecutableNotFound toTry + loop (guessedPath:rest) = do exists <- doesFileExist guessedPath if exists then pure guessedPath else loop rest - logDebug $ "Looking for executable(s): " <> displayShow toTry + prettyDebug $ + flow "Looking for executable(s):" + <> line + <> bulletedList (map pretty toTry) loop toTry pkg <- fmap GhcPkgExe $ findHelper $ \case Ghc -> ["ghc-pkg"] @@ -721,271 +1475,393 @@ pathsFromCompiler wc compilerBuild isSandboxed compiler = withCache $ handleAny haddock <- findHelper $ \case Ghc -> ["haddock", "haddock-ghc"] - infobs <- proc (toFilePath compiler) ["--info"] + ghcInfo <- proc (toFilePath compiler) ["--info"] $ fmap (toStrictBytes . fst) . readProcess_ infotext <- - case decodeUtf8' infobs of - Left e -> throwString $ "GHC info is not valid UTF-8: " ++ show e + case decodeUtf8' ghcInfo of + Left e -> prettyThrowIO $ GHCInfoNotValidUTF8 e Right info -> pure info infoPairs :: [(String, String)] <- case readMaybe $ T.unpack infotext of - Nothing -> throwString "GHC info does not parse as a list of pairs" + Nothing -> prettyThrowIO GHCInfoNotListOfPairs Just infoPairs -> pure infoPairs let infoMap = Map.fromList infoPairs eglobaldb <- tryAny $ case Map.lookup "Global Package DB" infoMap of - Nothing -> throwString "Key 'Global Package DB' not found in GHC info" + Nothing -> prettyThrowIO GHCInfoMissingGlobalPackageDB Just db -> parseAbsDir db arch <- case Map.lookup "Target platform" infoMap of - Nothing -> throwString "Key 'Target platform' not found in GHC info" + Nothing -> prettyThrowIO GHCInfoMissingTargetPlatform Just targetPlatform -> case simpleParse $ takeWhile (/= '-') targetPlatform of - Nothing -> throwString $ "Invalid target platform in GHC info: " ++ show targetPlatform + Nothing -> + prettyThrowIO $ GHCInfoTargetPlatformInvalid targetPlatform Just arch -> pure arch - compilerVer <- + compilerVersion <- case wc of Ghc -> case Map.lookup "Project version" infoMap of Nothing -> do - logWarn "Key 'Project version' not found in GHC info" + prettyWarnS "Key 'Project version' not found in GHC info." getCompilerVersion wc compiler Just versionString' -> ACGhc <$> parseVersionThrowing versionString' - globaldb <- + globalDB <- case eglobaldb of Left e -> do - logWarn "Parsing global DB from GHC info failed" - logWarn $ displayShow e - logWarn "Asking ghc-pkg directly" + prettyWarn $ + flow "Stack failed to parse the global DB from GHC info." + <> blankLine + <> flow "While parsing, Stack encountered the error:" + <> blankLine + <> string (show e) + <> blankLine + <> flow "Asking ghc-pkg directly." withProcessContext menv $ getGlobalDB pkg Right x -> pure x globalDump <- withProcessContext menv $ globalsFromDump pkg - cabalPkgVer <- + cabalVersion <- case Map.lookup cabalPackageName globalDump of - Nothing -> throwString $ "Cabal library not found in global package database for " ++ toFilePath compiler - Just dp -> pure $ pkgVersion $ dpPackageIdent dp - - return CompilerPaths - { cpBuild = compilerBuild - , cpArch = arch - , cpSandboxed = isSandboxed - , cpCompilerVersion = compilerVer - , cpCompiler = compiler - , cpPkg = pkg - , cpInterpreter = interpreter - , cpHaddock = haddock - , cpCabalVersion = cabalPkgVer - , cpGlobalDB = globaldb - , cpGhcInfo = infobs - , cpGlobalDump = globalDump + Nothing -> prettyThrowIO $ CabalNotFound compiler + Just dp -> pure $ pkgVersion dp.packageIdent + + pure CompilerPaths + { build + , arch + , sandboxed + , compilerVersion + , compiler + , pkg + , interpreter + , haddock + , cabalVersion + , globalDB + , ghcInfo + , globalDump } - where - onErr = throwIO . InvalidGhcAt compiler - - withCache inner = do - eres <- tryAny $ loadCompilerPaths compiler compilerBuild isSandboxed - mres <- - case eres of - Left e -> do - logWarn $ "Trouble loading CompilerPaths cache: " <> displayShow e - pure Nothing - Right x -> pure x - case mres of - Just cp -> cp <$ logDebug "Loaded compiler information from cache" - Nothing -> do - cp <- inner - saveCompilerPaths cp `catchAny` \e -> - logWarn ("Unable to save CompilerPaths cache: " <> displayShow e) - pure cp - -buildGhcFromSource :: forall env. - ( HasTerm env - , HasProcessContext env - , HasBuildConfig env - ) => Memoized SetupInfo -> [Tool] -> CompilerRepository -> Text -> Text - -> RIO env (Tool, CompilerBuild) -buildGhcFromSource getSetupInfo' installed (CompilerRepository url) commitId flavour = do - config <- view configL - let compilerTool = ToolGhcGit commitId flavour - - -- detect when the correct GHC is already installed - if compilerTool `elem` installed - then return (compilerTool,CompilerBuildStandard) - else do - let repo = Repo - { repoCommit = commitId - , repoUrl = url - , repoType = RepoGit - , repoSubdir = mempty - } + where + onErr = throwIO . PrettyException . InvalidGhcAt compiler - -- clone the repository and execute the given commands - Pantry.withRepo repo $ do - -- withRepo is guaranteed to set workingDirL, so let's get it - mcwd <- traverse parseAbsDir =<< view workingDirL - let cwd = fromMaybe (error "Invalid working directory") mcwd - - threads <- view $ configL.to configJobs - let - hadrianArgs = fmap T.unpack - [ "-c" -- run ./boot and ./configure - , "-j" <> tshow threads -- parallel build - , "--flavour=" <> flavour -- selected flavour - , "binary-dist" - ] - hadrianCmd - | osIsWindows = hadrianCmdWindows - | otherwise = hadrianCmdPosix - - logSticky $ "Building GHC from source with `" - <> RIO.display flavour + withCache inner = do + eres <- tryAny $ loadCompilerPaths compiler build sandboxed + mres <- + case eres of + Left e -> do + prettyWarn $ + flow "Trouble loading CompilerPaths cache:" + <> blankLine + <> string (displayException e) + pure Nothing + Right x -> pure x + case mres of + Just cp -> cp <$ logDebug "Loaded compiler information from cache" + Nothing -> do + cp <- inner + saveCompilerPaths cp `catchAny` \e -> + prettyWarn $ + flow "Unable to save CompilerPaths cache:" + <> blankLine + <> string (displayException e) + pure cp + +buildGhcFromSource :: + forall env. ( HasTerm env, HasProcessContext env, HasBuildConfig env) + => Memoized SetupInfo + -> [Tool] + -> CompilerRepository + -> CompilerTarget + -- ^ The Hadrian build target. + -> CompilerBindistPath + -- ^ The Hadrian path to the built binary distribution. + -> Text + -- ^ Commit ID. + -> Text + -- ^ Hadrain flavour. + -> RIO (WithMSYS env) (Tool, CompilerBuild) +buildGhcFromSource + getSetupInfo' + installed + (CompilerRepository url) + (CompilerTarget hadrianBuildTarget) + (CompilerBindistPath hadrianBindistPath) + commitId + flavour + = do + config <- view configL + let compilerTool = ToolGhcGit commitId flavour + -- detect when the correct GHC is already installed + if compilerTool `elem` installed + then pure (compilerTool, CompilerBuildStandard) + else + -- clone the repository and execute the given commands + withRepo (SimpleRepo url commitId RepoGit) $ do + -- withRepo is guaranteed to set workingDirL, so let's get it + mcwd <- traverse parseAbsDir =<< view workingDirL + cwd <- maybe (throwIO WorkingDirectoryInvalidBug) pure mcwd + let threads = config.jobs + relFileHadrianStackDotYaml' = toFilePath relFileHadrianStackDotYaml + ghcBootScriptPath = cwd ghcBootScript + boot = if osIsWindows + then proc "python3" ["boot"] runProcess_ + else + proc (toFilePath ghcBootScriptPath) [] runProcess_ + stack args = proc "stack" args'' runProcess_ + where + args'' = "--stack-yaml=" <> relFileHadrianStackDotYaml' : args' + -- If a snapshot is specified on the command line, Stack will + -- apply it. This allows the snapshot specified in Hadrian's + -- stack.yaml file to be overridden. + args' = maybe args addSnapshot config.snapshot + addSnapshot snapshot = "--snapshot=" <> show snapshot : args + happy = stack ["install", "happy"] + alex = stack ["install", "alex"] + -- Executed in the Stack environment, because GHC is required. + configure = stack ("exec" : "--" : ghcConfigure) + ghcConfigure + | osIsWindows = ghcConfigureWindows + | osIsMacOS = ghcConfigureMacOS + | otherwise = ghcConfigurePosix + hadrianScripts + | osIsWindows = hadrianScriptsWindows + | otherwise = hadrianScriptsPosix + hadrianArgs = fmap T.unpack + [ "-j" <> tshow threads -- parallel build + , "--flavour=" <> flavour -- selected flavour + , hadrianBuildTarget + ] + foundHadrianPaths <- + filterM doesFileExist $ (cwd ) <$> hadrianScripts + hadrianPath <- maybe (prettyThrowIO HadrianScriptNotFound) pure $ + listToMaybe foundHadrianPaths + exists <- doesFileExist ghcBootScriptPath + unless exists $ prettyThrowIO GhcBootScriptNotFound + ensureConfigureScript cwd + logInfo "Running GHC boot script..." + boot + doesExecutableExist "happy" >>= \case + True -> logInfo "happy executable installed on the PATH." + False -> do + logInfo "Installing happy executable..." + happy + doesExecutableExist "alex" >>= \case + True -> logInfo "alex executable installed on the PATH." + False -> do + logInfo "Installing alex executable..." + alex + logInfo "Running GHC configure script..." + configure + logSticky $ + "Building GHC from source with `" + <> display flavour <> "` flavour. It can take a long time (more than one hour)..." - - -- We need to provide an absolute path to the script since - -- the process package only sets working directory _after_ - -- discovering the executable - proc (toFilePath (cwd hadrianCmd)) hadrianArgs runProcess_ - - -- find the bindist and install it - bindistPath <- parseRelDir "_build/bindist" - (_,files) <- listDir (cwd bindistPath) - let - isBindist p = do - extension <- fileExtension (filename p) - - return $ "ghc-" `isPrefixOf` (toFilePath (filename p)) - && extension == ".xz" - - mbindist <- filterM isBindist files - case mbindist of - [bindist] -> do - let bindist' = T.pack (toFilePath bindist) - dlinfo = DownloadInfo - { downloadInfoUrl = bindist' - -- we can specify a filepath instead of a URL - , downloadInfoContentLength = Nothing - , downloadInfoSha1 = Nothing - , downloadInfoSha256 = Nothing - } - ghcdlinfo = GHCDownloadInfo mempty mempty dlinfo - installer - | osIsWindows = installGHCWindows Nothing - | otherwise = installGHCPosix Nothing ghcdlinfo - si <- runMemoized getSetupInfo' - _ <- downloadAndInstallTool - (configLocalPrograms config) - dlinfo - compilerTool - (installer si) - return (compilerTool, CompilerBuildStandard) - _ -> do + -- We need to provide an absolute path to the script since the process + -- package only sets working directory _after_ discovering the + -- executable. + proc (toFilePath hadrianPath) hadrianArgs runProcess_ + + -- find the bindist and install it + bindistPath <- parseRelDir (T.unpack hadrianBindistPath) + (_,files) <- listDir (cwd bindistPath) + let isBindist p = do + extension <- fileExtension (filename p) + + pure $ + "ghc-" `isPrefixOf` toFilePath (filename p) + && extension == ".xz" + + filterM isBindist files >>= \case + [bindist] -> do + let bindist' = T.pack (toFilePath bindist) + dlinfo = DownloadInfo + { url = bindist' + -- we can specify a filepath instead of a URL + , contentLength = Nothing + , sha1 = Nothing + , sha256 = Nothing + } + ghcdlinfo = GHCDownloadInfo mempty mempty dlinfo + installer + | osIsWindows = installGHCWindows + | otherwise = installGHCPosix ghcdlinfo + si <- runMemoized getSetupInfo' + _ <- downloadAndInstallTool + config.localPrograms + dlinfo + compilerTool + (installer si) + pure (compilerTool, CompilerBuildStandard) + _ -> do forM_ files (logDebug . fromString . (" - " ++) . toFilePath) - error "Can't find hadrian generated bindist" - + prettyThrowIO HadrianBindistNotFound --- | Determine which GHC builds to use depending on which shared libraries are available --- on the system. +-- | Determine which GHC builds to use depending on which shared libraries are +-- available on the system. getGhcBuilds :: HasConfig env => RIO env [CompilerBuild] getGhcBuilds = do - - config <- view configL - case configGHCBuild config of - Just ghcBuild -> return [ghcBuild] - Nothing -> determineGhcBuild - where - determineGhcBuild = do - -- TODO: a more reliable, flexible, and data driven approach would be to actually download small - -- "test" executables (from setup-info) that link to the same gmp/tinfo versions - -- that GHC does (i.e. built in same environment as the GHC bindist). The algorithm would go - -- something like this: - -- - -- check for previous 'uname -a'/`ldconfig -p` plus compiler version/variant in cache - -- if cached, then use that as suffix - -- otherwise: - -- download setup-info - -- go through all with right prefix for os/version/variant - -- first try "standard" (no extra suffix), then the rest - -- download "compatibility check" exe if not already downloaded - -- try running it - -- if successful, then choose that - -- cache compiler suffix with the uname -a and ldconfig -p output hash plus compiler version - -- - -- Of course, could also try to make a static GHC bindist instead of all this rigamarole. - - platform <- view platformL - case platform of - Platform _ Cabal.Linux -> do - -- Some systems don't have ldconfig in the PATH, so make sure to look in /sbin and /usr/sbin as well - let sbinEnv m = Map.insert - "PATH" - ("/sbin:/usr/sbin" <> maybe "" (":" <>) (Map.lookup "PATH" m)) - m - eldconfigOut - <- withModifyEnvVars sbinEnv - $ proc "ldconfig" ["-p"] - $ tryAny . fmap fst . readProcess_ - let firstWords = case eldconfigOut of - Right ldconfigOut -> mapMaybe (listToMaybe . T.words) $ - T.lines $ T.decodeUtf8With T.lenientDecode - $ LBS.toStrict ldconfigOut - Left _ -> [] - checkLib lib - | libT `elem` firstWords = do - logDebug ("Found shared library " <> libD <> " in 'ldconfig -p' output") - return True - | osIsWindows = - -- Cannot parse /usr/lib on Windows - return False - | otherwise = do - -- This is a workaround for the fact that libtinfo.so.x doesn't appear in - -- the 'ldconfig -p' output on Arch or Slackware even when it exists. - -- There doesn't seem to be an easy way to get the true list of directories - -- to scan for shared libs, but this works for our particular cases. - matches <- filterM (doesFileExist .( lib)) usrLibDirs - case matches of - [] -> logDebug ("Did not find shared library " <> libD) - >> return False - (path:_) -> logDebug ("Found shared library " <> libD - <> " in " <> fromString (Path.toFilePath path)) - >> return True - where - libT = T.pack (toFilePath lib) - libD = fromString (toFilePath lib) - hastinfo5 <- checkLib relFileLibtinfoSo5 - hastinfo6 <- checkLib relFileLibtinfoSo6 - hasncurses6 <- checkLib relFileLibncurseswSo6 - hasgmp5 <- checkLib relFileLibgmpSo10 - hasgmp4 <- checkLib relFileLibgmpSo3 - let libComponents = concat - [ [["tinfo6"] | hastinfo6 && hasgmp5] - , [[] | hastinfo5 && hasgmp5] - , [["ncurses6"] | hasncurses6 && hasgmp5 ] - , [["gmp4"] | hasgmp4 ] - ] - useBuilds $ map - (\c -> case c of - [] -> CompilerBuildStandard - _ -> CompilerBuildSpecialized (intercalate "-" c)) - libComponents - Platform _ Cabal.FreeBSD -> do - let getMajorVer = readMaybe <=< headMaybe . (splitOn ".") - majorVer <- getMajorVer <$> sysRelease - if majorVer >= Just (12 :: Int) then - useBuilds [CompilerBuildSpecialized "ino64"] - else - useBuilds [CompilerBuildStandard] - Platform _ Cabal.OpenBSD -> do - releaseStr <- mungeRelease <$> sysRelease - useBuilds [CompilerBuildSpecialized releaseStr] - _ -> useBuilds [CompilerBuildStandard] - useBuilds builds = do - logDebug $ - "Potential GHC builds: " <> - mconcat (intersperse ", " (map (fromString . compilerBuildName) builds)) - return builds + config <- view configL + case config.ghcBuild of + Just ghcBuild -> pure [ghcBuild] + Nothing -> determineGhcBuild + where + -- The GHCup project is also interested in the algorithm below, as it copies + -- it at GHCup.Platform.getStackGhcBuilds. If you change this algorithm, it + -- would be a courtesy to bring that to the attention of the GHCup project + -- maintainers. + determineGhcBuild = do + -- TODO: a more reliable, flexible, and data driven approach would be to + -- actually download small "test" executables (from setup-info) that link to + -- the same gmp/tinfo versions that GHC does (i.e. built in same environment + -- as the GHC bindist). The algorithm would go something like this: + -- + -- check for previous 'uname -a'/`ldconfig -p` plus compiler version/variant + -- in cache. + -- if cached, then use that as suffix + -- otherwise: + -- download setup-info + -- go through all with right prefix for os/version/variant + -- first try "standard" (no extra suffix), then the rest + -- download "compatibility check" exe if not already downloaded + -- try running it + -- if successful, then choose that + -- cache compiler suffix with the uname -a and + -- ldconfig -p output hash plus compiler version + -- + -- Of course, could also try to make a static GHC bindist instead of all + -- this rigamarole. + + view platformL >>= \case + Platform _ Cabal.Linux -> do + -- Some systems don't have ldconfig in the PATH, so make sure to look in + -- /sbin and /usr/sbin as well + let sbinEnv m = Map.insert + "PATH" + ("/sbin:/usr/sbin" <> maybe "" (":" <>) (Map.lookup "PATH" m)) + m + eldconfigOut <- withModifyEnvVars sbinEnv + $ proc "ldconfig" ["-p"] + $ tryAny . fmap fst . readProcess_ + let firstWords = case eldconfigOut of + Right ldconfigOut -> mapMaybe (listToMaybe . T.words) $ + T.lines $ T.decodeUtf8With T.lenientDecode + $ LBS.toStrict ldconfigOut + Left _ -> [] + checkLib lib + | libT `elem` firstWords = do + logDebug $ + "Found shared library " + <> libD + <> " in 'ldconfig -p' output" + pure True + | osIsWindows = + -- Cannot parse /usr/lib on Windows + pure False + | otherwise = hasMatches lib usrLibDirs + -- This is a workaround for the fact that libtinfo.so.x doesn't + -- appear in the 'ldconfig -p' output on Arch or Slackware even + -- when it exists. There doesn't seem to be an easy way to get the + -- true list of directories to scan for shared libs, but this + -- works for our particular cases. + where + libD = fromString (toFilePath lib) + libT = T.pack (toFilePath lib) + hasMatches lib dirs = do + filterM (doesFileExist . ( lib)) dirs >>= \case + [] -> + logDebug + ( "Did not find shared library " + <> libD + ) + >> pure False + (path:_) -> + logDebug + ( "Found shared library " + <> libD + <> " in " + <> fromString (Path.toFilePath path) + ) + >> pure True + where + libD = fromString (toFilePath lib) + getLibc6Version = + -- On Alpine Linux, 'ldd --version' will send output to stderr, + -- which we wish to smother. + proc "ldd" ["--version"] (tryAny . readProcess_) <&> \case + Right (lddOut, _) -> + let lddOut' = decodeUtf8Lenient (LBS.toStrict lddOut) + in case P.parse lddVersion lddOut' of + P.Done _ result -> Just result + _ -> Nothing + Left _ -> Nothing + -- Assumes the first line of ldd has the format: + -- + -- ldd (...) nn.nn + -- + -- where nn.nn corresponds to the version of libc6. + lddVersion :: P.Parser Version + lddVersion = do + P.skipWhile (/= ')') + P.skip (== ')') + P.skipSpace + lddMajorVersion <- P.decimal + P.skip (== '.') + lddMinorVersion <- P.decimal + P.skip (not . isDigit) + pure $ mkVersion [ lddMajorVersion, lddMinorVersion ] + hasMusl <- hasMatches relFileLibcMuslx86_64So1 libDirs + mLibc6Version <- getLibc6Version + case mLibc6Version of + Just libc6Version -> logDebug $ + "Found shared library libc6 in version: " + <> fromString (versionString libc6Version) + Nothing -> logDebug + "Did not find a version of shared library libc6." + let hasLibc6_2_32 = + maybe False (>= mkVersion [2 , 32]) mLibc6Version + hastinfo5 <- checkLib relFileLibtinfoSo5 + hastinfo6 <- checkLib relFileLibtinfoSo6 + hasncurses6 <- checkLib relFileLibncurseswSo6 + hasgmp5 <- checkLib relFileLibgmpSo10 + hasgmp4 <- checkLib relFileLibgmpSo3 + let libComponents = if hasMusl + then + [ ["musl"] ] + else + concat + [ if hastinfo6 && hasgmp5 + then + if hasLibc6_2_32 + then [["tinfo6"]] + else [["tinfo6-libc6-pre232"]] + else [[]] + , [ [] | hastinfo5 && hasgmp5 ] + , [ ["ncurses6"] | hasncurses6 && hasgmp5 ] + , [ ["gmp4"] | hasgmp4 ] + ] + useBuilds $ map + (\c -> case c of + [] -> CompilerBuildStandard + _ -> CompilerBuildSpecialized (intercalate "-" c)) + libComponents + Platform _ Cabal.FreeBSD -> do + let getMajorVer = readMaybe <=< headMaybe . splitOn "." + majorVer <- getMajorVer <$> sysRelease + if majorVer >= Just (12 :: Int) + then + useBuilds [CompilerBuildSpecialized "ino64"] + else + useBuilds [CompilerBuildStandard] + Platform _ Cabal.OpenBSD -> do + releaseStr <- mungeRelease <$> sysRelease + useBuilds [CompilerBuildSpecialized releaseStr] + _ -> useBuilds [CompilerBuildStandard] + useBuilds builds = do + logDebug $ + "Potential GHC builds: " + <> mconcat (intersperse ", " (map (fromString . compilerBuildName) builds)) + pure builds -- | Encode an OpenBSD version (like "6.1") into a valid argument for -- CompilerBuildSpecialized, so "maj6-min1". Later version numbers are prefixed @@ -994,42 +1870,54 @@ getGhcBuilds = do -- as recognized by parsePackageNameFromString. mungeRelease :: String -> String mungeRelease = intercalate "-" . prefixMaj . splitOn "." - where - prefixFst pfx k (rev : revs) = (pfx ++ rev) : k revs - prefixFst _ _ [] = [] - prefixMaj = prefixFst "maj" prefixMin - prefixMin = prefixFst "min" (map ('r':)) + where + prefixFst pfx k (rev : revs) = (pfx ++ rev) : k revs + prefixFst _ _ [] = [] + prefixMaj = prefixFst "maj" prefixMin + prefixMin = prefixFst "min" (map ('r':)) -sysRelease :: HasLogFunc env => RIO env String +sysRelease :: HasTerm env => RIO env String sysRelease = - handleIO (\e -> do - logWarn $ "Could not query OS version: " <> displayShow e - return "") - (liftIO getRelease) - --- | Ensure Docker container-compatible 'stack' executable is downloaded + handleIO + ( \e -> do + prettyWarn $ + flow "Could not query OS version:" + <> blankLine + <> string (displayException e) + pure "" + ) + (liftIO getRelease) + +-- | Ensure Docker container-compatible \'stack\' executable is downloaded ensureDockerStackExe :: HasConfig env => Platform -> RIO env (Path Abs File) ensureDockerStackExe containerPlatform = do - config <- view configL - containerPlatformDir <- runReaderT platformOnlyRelDir (containerPlatform,PlatformVariantNone) - let programsPath = configLocalProgramsBase config containerPlatformDir - tool = Tool (PackageIdentifier (mkPackageName "stack") stackVersion) - stackExeDir <- installDir programsPath tool - let stackExePath = stackExeDir relFileStack - stackExeExists <- doesFileExist stackExePath - unless stackExeExists $ do - logInfo $ - "Downloading Docker-compatible " <> - fromString stackProgName <> - " executable" - sri <- downloadStackReleaseInfo Nothing Nothing (Just (versionString stackMinorVersion)) - platforms <- runReaderT preferredPlatforms (containerPlatform, PlatformVariantNone) - downloadStackExe platforms sri stackExeDir False (const $ return ()) - return stackExePath + config <- view configL + containerPlatformDir <- + runReaderT platformOnlyRelDir (containerPlatform,PlatformVariantNone) + let programsPath = config.localProgramsBase containerPlatformDir + tool = Tool (PackageIdentifier (mkPackageName "stack") stackVersion) + stackExeDir <- installDir programsPath tool + let stackExePath = stackExeDir relFileStack + stackExeExists <- doesFileExist stackExePath + unless stackExeExists $ do + prettyInfoL + [ flow "Downloading Docker-compatible" + , fromString stackProgName + , "executable." + ] + sri <- + downloadStackReleaseInfo + Nothing + Nothing + (Just (versionString stackMinorVersion)) + platforms <- + runReaderT preferredPlatforms (containerPlatform, PlatformVariantNone) + downloadStackExe platforms sri stackExeDir False (const $ pure ()) + pure stackExePath -- | Get all executables on the path that might match the wanted compiler -sourceSystemCompilers - :: (HasProcessContext env, HasLogFunc env) +sourceSystemCompilers :: + (HasLogFunc env, HasProcessContext env) => WantedCompiler -> ConduitT i (Path Abs File) (RIO env) () sourceSystemCompilers wanted = do @@ -1046,972 +1934,1090 @@ sourceSystemCompilers wanted = do fp <- resolveFile' $ addExe $ dir FP. name exists <- doesFileExist fp when exists $ yield fp - where - addExe - | osIsWindows = (++ ".exe") - | otherwise = id + where + addExe + | osIsWindows = (++ ".exe") + | otherwise = id -- | Download the most recent SetupInfo getSetupInfo :: HasConfig env => RIO env SetupInfo getSetupInfo = do - config <- view configL - let inlineSetupInfo = configSetupInfoInline config - locations' = configSetupInfoLocations config - locations = if null locations' then [defaultSetupInfoYaml] else locations' - - resolvedSetupInfos <- mapM loadSetupInfo locations - return (inlineSetupInfo <> mconcat resolvedSetupInfos) - where - loadSetupInfo urlOrFile = do - bs <- - case parseUrlThrow urlOrFile of - Just req -> liftM (LBS.toStrict . getResponseBody) $ httpLbs req - Nothing -> liftIO $ S.readFile urlOrFile - WithJSONWarnings si warnings <- either throwM return (Yaml.decodeEither' bs) - when (urlOrFile /= defaultSetupInfoYaml) $ - logJSONWarnings urlOrFile warnings - return si - -getInstalledTool :: [Tool] -- ^ already installed - -> PackageName -- ^ package to find - -> (Version -> Bool) -- ^ which versions are acceptable - -> Maybe Tool -getInstalledTool installed name goodVersion = - if null available - then Nothing - else Just $ Tool $ maximumBy (comparing pkgVersion) available - where - available = mapMaybe goodPackage installed - goodPackage (Tool pi') = - if pkgName pi' == name && - goodVersion (pkgVersion pi') - then Just pi' - else Nothing - goodPackage _ = Nothing - -downloadAndInstallTool :: (HasTerm env, HasBuildConfig env) - => Path Abs Dir - -> DownloadInfo - -> Tool - -> (Path Abs File -> ArchiveType -> Path Abs Dir -> Path Abs Dir -> RIO env ()) - -> RIO env Tool + config <- view configL + let inlineSetupInfo = config.setupInfoInline + locations' = config.setupInfoLocations + locations = if null locations' then [defaultSetupInfoYaml] else locations' + + resolvedSetupInfos <- mapM loadSetupInfo locations + pure (inlineSetupInfo <> mconcat resolvedSetupInfos) + where + loadSetupInfo urlOrFile = do + bs <- case parseUrlThrow urlOrFile of + Just req -> LBS.toStrict . getResponseBody <$> httpLbs req + Nothing -> liftIO $ S.readFile urlOrFile + WithJSONWarnings si warnings <- either throwM pure (Yaml.decodeEither' bs) + when (urlOrFile /= defaultSetupInfoYaml) $ + logJSONWarnings urlOrFile warnings + pure si + +getInstalledTool :: + [Tool] -- ^ already installed + -> PackageName -- ^ package to find + -> (Version -> Bool) -- ^ which versions are acceptable + -> Maybe Tool +getInstalledTool installed name goodVersion = Tool <$> + maximumByMaybe (comparing pkgVersion) (filterTools name goodVersion installed) + +-- | Obtain and install the specified tool, using the specified download +-- information and installer. Also deletes the archive file for the tool after +-- installation. +downloadAndInstallTool :: + (HasTerm env, HasBuildConfig env) + => Path Abs Dir + -- ^ Location of the directory for tools. + -> DownloadInfo + -- ^ Information about the file to obtain. + -> Tool + -- ^ The tool in question. + -> ( Path Abs File + -- Location of archive file. + -> ArchiveType + -- Type of archive file. + -> Path Abs Dir + -- Tempory directory to use. + -> Path Abs Dir + -- Destination directory for installed tool. + -> RIO env () + ) + -- ^ Installer. + -> RIO env Tool downloadAndInstallTool programsDir downloadInfo tool installer = do - ensureDir programsDir - (file, at) <- downloadFromInfo programsDir downloadInfo tool - dir <- installDir programsDir tool - tempDir <- tempInstallDir programsDir tool - liftIO $ ignoringAbsence (removeDirRecur tempDir) - ensureDir tempDir - unmarkInstalled programsDir tool - installer file at tempDir dir - markInstalled programsDir tool - liftIO $ ignoringAbsence (removeDirRecur tempDir) - return tool - -downloadAndInstallCompiler :: (HasBuildConfig env, HasGHCVariant env) - => CompilerBuild - -> SetupInfo - -> WantedCompiler - -> VersionCheck - -> Maybe String - -> RIO env Tool -downloadAndInstallCompiler ghcBuild si wanted@WCGhc{} versionCheck mbindistURL = do - ghcVariant <- view ghcVariantL - (selectedVersion, downloadInfo) <- case mbindistURL of - Just bindistURL -> do - case ghcVariant of - GHCCustom _ -> return () - _ -> throwM RequireCustomGHCVariant - case wanted of - WCGhc version -> - return (version, GHCDownloadInfo mempty mempty DownloadInfo - { downloadInfoUrl = T.pack bindistURL - , downloadInfoContentLength = Nothing - , downloadInfoSha1 = Nothing - , downloadInfoSha256 = Nothing - }) - _ -> - throwM WantedMustBeGHC - _ -> do - ghcKey <- getGhcKey ghcBuild - case Map.lookup ghcKey $ siGHCs si of - Nothing -> throwM $ UnknownOSKey ghcKey - Just pairs_ -> getWantedCompilerInfo ghcKey versionCheck wanted ACGhc pairs_ - config <- view configL - let installer = - case configPlatform config of - Platform _ Cabal.Windows -> installGHCWindows (Just selectedVersion) - _ -> installGHCPosix (Just selectedVersion) downloadInfo - logInfo $ - "Preparing to install GHC" <> - (case ghcVariant of - GHCStandard -> "" - v -> " (" <> fromString (ghcVariantName v) <> ")") <> - (case ghcBuild of - CompilerBuildStandard -> "" - b -> " (" <> fromString (compilerBuildName b) <> ")") <> - " to an isolated location." - logInfo "This will not interfere with any system-level installation." - ghcPkgName <- parsePackageNameThrowing ("ghc" ++ ghcVariantSuffix ghcVariant ++ compilerBuildSuffix ghcBuild) - let tool = Tool $ PackageIdentifier ghcPkgName selectedVersion - downloadAndInstallTool (configLocalPrograms config) (gdiDownloadInfo downloadInfo) tool (installer si) + ensureDir programsDir + (file, at) <- downloadFromInfo programsDir downloadInfo tool + dir <- installDir programsDir tool + tempDir <- tempInstallDir programsDir tool + liftIO $ ignoringAbsence (removeDirRecur tempDir) + ensureDir tempDir + unmarkInstalled programsDir tool + installer file at tempDir dir + markInstalled programsDir tool + liftIO $ ignoringAbsence (removeDirRecur tempDir) + liftIO $ ignoringAbsence (removeFile file) + pure tool + +-- Exceptions thrown by this function are caught by +-- 'downloadAndInstallPossibleCompilers'. +downloadAndInstallCompiler :: + (HasBuildConfig env, HasGHCVariant env) + => CompilerBuild + -> SetupInfo + -> WantedCompiler + -> VersionCheck + -> Maybe String + -> RIO env Tool +downloadAndInstallCompiler ghcBuild si wanted@(WCGhc version) versionCheck mbindistURL = do + ghcVariant <- view ghcVariantL + (selectedVersion, downloadInfo) <- case mbindistURL of + Just bindistURL -> do + case ghcVariant of + GHCCustom _ -> pure () + _ -> throwM RequireCustomGHCVariant + pure + ( version + , GHCDownloadInfo mempty mempty DownloadInfo + { url = T.pack bindistURL + , contentLength = Nothing + , sha1 = Nothing + , sha256 = Nothing + } + ) + _ -> do + ghcKey <- getGhcKey ghcBuild + case Map.lookup ghcKey si.ghcByVersion of + Nothing -> throwM $ UnknownOSKey ghcKey + Just pairs_ -> + getWantedCompilerInfo ghcKey versionCheck wanted ACGhc pairs_ + config <- view configL + let installer = + case config.platform of + Platform _ Cabal.Windows -> installGHCWindows + _ -> installGHCPosix downloadInfo + prettyInfo $ + fillSep $ + flow "Preparing to install GHC" + : case ghcVariant of + GHCStandard -> [] + v -> ["(" <> fromString (ghcVariantName v) <> ")"] + <> case ghcBuild of + CompilerBuildStandard -> [] + b -> ["(" <> fromString (compilerBuildName b) <> ")"] + <> [ flow "to an isolated location. This will not interfere with any \ + \system-level installation." + ] + ghcPkgName <- parsePackageNameThrowing + ("ghc" ++ ghcVariantSuffix ghcVariant ++ compilerBuildSuffix ghcBuild) + let tool = Tool $ PackageIdentifier ghcPkgName selectedVersion + downloadAndInstallTool + config.localPrograms + downloadInfo.downloadInfo + tool + (installer si) downloadAndInstallCompiler _ _ WCGhcjs{} _ _ = throwIO GhcjsNotSupported downloadAndInstallCompiler _ _ WCGhcGit{} _ _ = - error "downloadAndInstallCompiler: shouldn't be reached with ghc-git" - -getWantedCompilerInfo :: (Ord k, MonadThrow m) - => Text - -> VersionCheck - -> WantedCompiler - -> (k -> ActualCompiler) - -> Map k a - -> m (k, a) + throwIO DownloadAndInstallCompilerError + +-- Exceptions thrown by this function are caught by +-- 'downloadAndInstallPossibleCompilers'. +getWantedCompilerInfo :: + (Ord k, MonadThrow m) + => Text + -> VersionCheck + -> WantedCompiler + -> (k -> ActualCompiler) + -> Map k a + -> m (k, a) getWantedCompilerInfo key versionCheck wanted toCV pairs_ = - case mpair of - Just pair -> return pair - Nothing -> throwM $ UnknownCompilerVersion (Set.singleton key) wanted (Set.fromList $ map toCV (Map.keys pairs_)) - where - mpair = - listToMaybe $ - sortBy (flip (comparing fst)) $ - filter (isWantedCompiler versionCheck wanted . toCV . fst) (Map.toList pairs_) + case mpair of + Just pair -> pure pair + Nothing -> throwM $ + UnknownCompilerVersion + (Set.singleton key) + wanted + (Set.fromList $ map toCV (Map.keys pairs_)) + where + mpair = + listToMaybe $ + sortOn (Down . fst) $ + filter + (isWantedCompiler versionCheck wanted . toCV . fst) + (Map.toList pairs_) -- | Download and install the first available compiler build. -downloadAndInstallPossibleCompilers - :: (HasGHCVariant env, HasBuildConfig env) - => [CompilerBuild] - -> SetupInfo - -> WantedCompiler - -> VersionCheck - -> Maybe String - -> RIO env (Tool, CompilerBuild) +downloadAndInstallPossibleCompilers :: + (HasGHCVariant env, HasBuildConfig env) + => [CompilerBuild] + -> SetupInfo + -> WantedCompiler + -> VersionCheck + -> Maybe String + -> RIO env (Tool, CompilerBuild) downloadAndInstallPossibleCompilers possibleCompilers si wanted versionCheck mbindistURL = - go possibleCompilers Nothing - where - -- This will stop as soon as one of the builds doesn't throw an @UnknownOSKey@ or - -- @UnknownCompilerVersion@ exception (so it will only try subsequent builds if one is non-existent, - -- not if the download or install fails for some other reason). - -- The @Unknown*@ exceptions thrown by each attempt are combined into a single exception - -- (if only @UnknownOSKey@ is thrown, then the first of those is rethrown, but if any - -- @UnknownCompilerVersion@s are thrown then the attempted OS keys and available versions - -- are unioned). - go [] Nothing = throwM UnsupportedSetupConfiguration - go [] (Just e) = throwM e - go (b:bs) e = do - logDebug $ "Trying to setup GHC build: " <> fromString (compilerBuildName b) - er <- try $ downloadAndInstallCompiler b si wanted versionCheck mbindistURL - case er of - Left e'@(UnknownCompilerVersion ks' w' vs') -> - case e of - Nothing -> go bs (Just e') - Just (UnknownOSKey k) -> - go bs $ Just $ UnknownCompilerVersion (Set.insert k ks') w' vs' - Just (UnknownCompilerVersion ks _ vs) -> - go bs $ Just $ UnknownCompilerVersion (Set.union ks' ks) w' (Set.union vs' vs) - Just x -> throwM x - Left e'@(UnknownOSKey k') -> - case e of - Nothing -> go bs (Just e') - Just (UnknownOSKey _) -> go bs e - Just (UnknownCompilerVersion ks w vs) -> - go bs $ Just $ UnknownCompilerVersion (Set.insert k' ks) w vs - Just x -> throwM x - Left e' -> throwM e' - Right r -> return (r, b) - -getGhcKey :: (MonadReader env m, HasPlatform env, HasGHCVariant env, MonadThrow m) - => CompilerBuild -> m Text + go possibleCompilers Nothing + where + -- This will stop as soon as one of the builds doesn't throw an @UnknownOSKey@ + -- or @UnknownCompilerVersion@ exception (so it will only try subsequent + -- builds if one is nonexistent, not if the download or install fails for some + -- other reason). The @Unknown*@ exceptions thrown by each attempt are + -- combined into a single exception (if only @UnknownOSKey@ is thrown, then + -- the first of those is rethrown, but if any @UnknownCompilerVersion@s are + -- thrown then the attempted OS keys and available versions are unioned). + go [] Nothing = prettyThrowM UnsupportedSetupConfiguration + go [] (Just e) = prettyThrowM e + go (b:bs) e = do + logDebug $ "Trying to setup GHC build: " <> fromString (compilerBuildName b) + er <- try $ downloadAndInstallCompiler b si wanted versionCheck mbindistURL + case er of + Left e'@(UnknownCompilerVersion ks' w' vs') -> + case e of + Nothing -> go bs (Just e') + Just (UnknownOSKey k) -> + go bs $ Just $ UnknownCompilerVersion (Set.insert k ks') w' vs' + Just (UnknownCompilerVersion ks _ vs) -> + go bs $ Just $ + UnknownCompilerVersion (Set.union ks' ks) w' (Set.union vs' vs) + Just x -> prettyThrowM x + Left e'@(UnknownOSKey k') -> + case e of + Nothing -> go bs (Just e') + Just (UnknownOSKey _) -> go bs e + Just (UnknownCompilerVersion ks w vs) -> + go bs $ Just $ UnknownCompilerVersion (Set.insert k' ks) w vs + Just x -> prettyThrowM x + Left e' -> prettyThrowM e' + Right r -> pure (r, b) + +getGhcKey :: + (HasBuildConfig env, HasGHCVariant env) + => CompilerBuild + -> RIO env Text getGhcKey ghcBuild = do - ghcVariant <- view ghcVariantL - platform <- view platformL - osKey <- getOSKey platform - return $ osKey <> T.pack (ghcVariantSuffix ghcVariant) <> T.pack (compilerBuildSuffix ghcBuild) - -getOSKey :: (MonadThrow m) - => Platform -> m Text -getOSKey platform = - case platform of - Platform I386 Cabal.Linux -> return "linux32" - Platform X86_64 Cabal.Linux -> return "linux64" - Platform I386 Cabal.OSX -> return "macosx" - Platform X86_64 Cabal.OSX -> return "macosx" - Platform I386 Cabal.FreeBSD -> return "freebsd32" - Platform X86_64 Cabal.FreeBSD -> return "freebsd64" - Platform I386 Cabal.OpenBSD -> return "openbsd32" - Platform X86_64 Cabal.OpenBSD -> return "openbsd64" - Platform I386 Cabal.Windows -> return "windows32" - Platform X86_64 Cabal.Windows -> return "windows64" - Platform Arm Cabal.Linux -> return "linux-armv7" - Platform AArch64 Cabal.Linux -> return "linux-aarch64" - Platform Sparc Cabal.Linux -> return "linux-sparc" - Platform arch os -> throwM $ UnsupportedSetupCombo os arch - -downloadOrUseLocal - :: (HasTerm env, HasBuildConfig env) - => Text -> DownloadInfo -> Path Abs File -> RIO env (Path Abs File) + ghcVariant <- view ghcVariantL + ghcVersion <- view (buildConfigL . to (.smWanted.compiler)) >>= \case + WCGhc version -> pure version + WCGhcjs _ _ -> throwIO GhcjsNotSupported + WCGhcGit _ _ -> throwIO DownloadAndInstallCompilerError + let variantSuffix = ghcVariantSuffix ghcVariant + buildSuffix = compilerBuildSuffix ghcBuild + ghcDir = style Dir $ mconcat + [ "ghc" + , fromString variantSuffix + , fromString buildSuffix + , "-" + , fromString $ versionString ghcVersion + ] + osKey <- getOSKey "GHC" ghcDir + pure $ osKey <> T.pack variantSuffix <> T.pack buildSuffix + +getOSKey :: + (HasConfig env, HasPlatform env) + => StyleDoc + -- ^ Description of the tool that is being set up. + -> StyleDoc + -- ^ Description of the root directory of the tool. + -> RIO env Text +getOSKey tool toolDir = + view platformL >>= \case + Platform I386 Cabal.Linux -> pure "linux32" + Platform X86_64 Cabal.Linux -> pure "linux64" + Platform I386 Cabal.OSX -> pure "macosx" + Platform X86_64 Cabal.OSX -> pure "macosx" + Platform I386 Cabal.FreeBSD -> pure "freebsd32" + Platform X86_64 Cabal.FreeBSD -> pure "freebsd64" + Platform I386 Cabal.OpenBSD -> pure "openbsd32" + Platform X86_64 Cabal.OpenBSD -> pure "openbsd64" + Platform I386 Cabal.Windows -> pure "windows32" + Platform X86_64 Cabal.Windows -> pure "windows64" + Platform Arm Cabal.Linux -> pure "linux-armv7" + Platform AArch64 Cabal.Linux -> pure "linux-aarch64" + Platform Sparc Cabal.Linux -> pure "linux-sparc" + Platform AArch64 Cabal.OSX -> pure "macosx-aarch64" + Platform AArch64 Cabal.FreeBSD -> pure "freebsd-aarch64" + Platform arch os -> do + programsDir <- view $ configL . to (.localPrograms) + prettyThrowM $ UnsupportedSetupCombo os arch tool toolDir programsDir + +downloadOrUseLocal :: + (HasTerm env, HasBuildConfig env) + => Text + -> DownloadInfo + -> Path Abs File + -> RIO env (Path Abs File) downloadOrUseLocal downloadLabel downloadInfo destination = case url of (parseUrlThrow -> Just _) -> do - ensureDir (parent destination) - chattyDownload downloadLabel downloadInfo destination - return destination + ensureDir (parent destination) + chattyDownload downloadLabel downloadInfo destination + pure destination (parseAbsFile -> Just path) -> do - warnOnIgnoredChecks - return path + warnOnIgnoredChecks + pure path (parseRelFile -> Just path) -> do - warnOnIgnoredChecks - root <- view projectRootL - return (root path) - _ -> - throwString $ "Error: `url` must be either an HTTP URL or a file path: " ++ url - where - url = T.unpack $ downloadInfoUrl downloadInfo - warnOnIgnoredChecks = do - let DownloadInfo{downloadInfoContentLength=contentLength, downloadInfoSha1=sha1, - downloadInfoSha256=sha256} = downloadInfo - when (isJust contentLength) $ - logWarn "`content-length` is not checked and should not be specified when `url` is a file path" - when (isJust sha1) $ - logWarn "`sha1` is not checked and should not be specified when `url` is a file path" - when (isJust sha256) $ - logWarn "`sha256` is not checked and should not be specified when `url` is a file path" - -downloadFromInfo - :: (HasTerm env, HasBuildConfig env) - => Path Abs Dir -> DownloadInfo -> Tool -> RIO env (Path Abs File, ArchiveType) + warnOnIgnoredChecks + configFileRoot <- view configFileRootL + pure (configFileRoot path) + _ -> prettyThrowIO $ URLInvalid url + where + url = T.unpack downloadInfo.url + warnOnIgnoredChecks = do + let DownloadInfo + { contentLength + , sha1 + , sha256 + } = downloadInfo + when (isJust contentLength) $ + prettyWarnS + "`content-length` is not checked and should not be specified when \ + \`url` is a file path." + when (isJust sha1) $ + prettyWarnS + "`sha1` is not checked and should not be specified when `url` is a \ + \file path." + when (isJust sha256) $ + prettyWarn + "`sha256` is not checked and should not be specified when `url` is a \ + \file path" + +downloadFromInfo :: + (HasTerm env, HasBuildConfig env) + => Path Abs Dir + -> DownloadInfo + -> Tool + -> RIO env (Path Abs File, ArchiveType) downloadFromInfo programsDir downloadInfo tool = do - archiveType <- - case extension of - ".tar.xz" -> return TarXz - ".tar.bz2" -> return TarBz2 - ".tar.gz" -> return TarGz - ".7z.exe" -> return SevenZ - _ -> throwString $ "Error: Unknown extension for url: " ++ url - - relativeFile <- parseRelFile $ toolString tool ++ extension - let destinationPath = programsDir relativeFile - localPath <- downloadOrUseLocal (T.pack (toolString tool)) downloadInfo destinationPath - return (localPath, archiveType) - - where - url = T.unpack $ downloadInfoUrl downloadInfo - extension = loop url - where - loop fp - | ext `elem` [".tar", ".bz2", ".xz", ".exe", ".7z", ".gz"] = loop fp' ++ ext - | otherwise = "" - where - (fp', ext) = FP.splitExtension fp + archiveType <- + case extension of + ".tar.xz" -> pure TarXz + ".tar.bz2" -> pure TarBz2 + ".tar.gz" -> pure TarGz + ".7z.exe" -> pure SevenZ + _ -> prettyThrowIO $ UnknownArchiveExtension url + + relativeFile <- parseRelFile $ toolString tool ++ extension + let destinationPath = programsDir relativeFile + localPath <- + downloadOrUseLocal (T.pack (toolString tool)) downloadInfo destinationPath + pure (localPath, archiveType) + + where + url = T.unpack downloadInfo.url + extension = loop url + where + loop fp + | ext `elem` [".tar", ".bz2", ".xz", ".exe", ".7z", ".gz"] = loop fp' ++ ext + | otherwise = "" + where + (fp', ext) = FP.splitExtension fp data ArchiveType - = TarBz2 - | TarXz - | TarGz - | SevenZ - -installGHCPosix :: HasConfig env - => Maybe Version - -> GHCDownloadInfo - -> SetupInfo - -> Path Abs File - -> ArchiveType - -> Path Abs Dir - -> Path Abs Dir - -> RIO env () -installGHCPosix mversion downloadInfo _ archiveFile archiveType tempDir destDir = do - platform <- view platformL - menv0 <- view processContextL - menv <- mkProcessContext (removeHaskellEnvVars (view envVarsL menv0)) - logDebug $ "menv = " <> displayShow (view envVarsL menv) - (zipTool', compOpt) <- - case archiveType of - TarXz -> return ("xz", 'J') - TarBz2 -> return ("bzip2", 'j') - TarGz -> return ("gzip", 'z') - SevenZ -> throwString "Don't know how to deal with .7z files on non-Windows" - -- Slight hack: OpenBSD's tar doesn't support xz. - -- https://github.com/commercialhaskell/stack/issues/2283#issuecomment-237980986 - let tarDep = - case (platform, archiveType) of - (Platform _ Cabal.OpenBSD, TarXz) -> checkDependency "gtar" - _ -> checkDependency "tar" - (zipTool, makeTool, tarTool) <- checkDependencies $ (,,) - <$> checkDependency zipTool' - <*> (checkDependency "gmake" <|> checkDependency "make") - <*> tarDep - - logDebug $ "ziptool: " <> fromString zipTool - logDebug $ "make: " <> fromString makeTool - logDebug $ "tar: " <> fromString tarTool - - let runStep step wd env cmd args = do - menv' <- modifyEnvVars menv (Map.union env) - let logLines lvl = CB.lines .| CL.mapM_ (lvl . displayBytesUtf8) - logStdout = logLines logDebug - logStderr = logLines logError - void $ withWorkingDir (toFilePath wd) $ - withProcessContext menv' $ - sinkProcessStderrStdout cmd args logStderr logStdout - `catchAny` \ex -> do - logError $ displayShow ex - prettyError $ hang 2 ( - "Error encountered while" <+> step <+> "GHC with" - <> line <> - style Shell (fromString (unwords (cmd : args))) - <> line <> - -- TODO: Figure out how to insert \ in the appropriate spots - -- hang 2 (shellColor (fillSep (fromString cmd : map fromString args))) <> line <> - "run in " <> pretty wd - ) - <> line <> line <> - "The following directories may now contain files, but won't be used by stack:" - <> line <> - " -" <+> pretty tempDir - <> line <> - " -" <+> pretty destDir - <> line <> line <> - "For more information consider rerunning with --verbose flag" - <> line - exitFailure - - logSticky $ - "Unpacking GHC into " <> - fromString (toFilePath tempDir) <> - " ..." - logDebug $ "Unpacking " <> fromString (toFilePath archiveFile) - runStep "unpacking" tempDir mempty tarTool [compOpt : "xf", toFilePath archiveFile] - - dir <- case mversion of - Just version -> do - relDir <- parseRelDir $ "ghc-" ++ versionString version - return (tempDir relDir) - Nothing -> expectSingleUnpackedDir archiveFile tempDir - - logSticky "Configuring GHC ..." - runStep "configuring" dir - (gdiConfigureEnv downloadInfo) - (toFilePath $ dir relFileConfigure) - (("--prefix=" ++ toFilePath destDir) : map T.unpack (gdiConfigureOpts downloadInfo)) - - logSticky "Installing GHC ..." - runStep "installing" dir mempty makeTool ["install"] - - logStickyDone $ "Installed GHC." - logDebug $ "GHC installed to " <> fromString (toFilePath destDir) + = TarBz2 + | TarXz + | TarGz + | SevenZ + +installGHCPosix :: + HasConfig env + => GHCDownloadInfo + -> SetupInfo + -> Path Abs File + -> ArchiveType + -> Path Abs Dir + -> Path Abs Dir + -> RIO env () +installGHCPosix downloadInfo _ archiveFile archiveType tempDir destDir = do + platform <- view platformL + menv0 <- view processContextL + menv <- mkProcessContext (removeHaskellEnvVars (view envVarsL menv0)) + logDebug $ "menv = " <> displayShow (view envVarsL menv) + (zipTool', compOpt) <- + case archiveType of + TarXz -> pure ("xz", 'J') + TarBz2 -> pure ("bzip2", 'j') + TarGz -> pure ("gzip", 'z') + SevenZ -> prettyThrowIO Unsupported7z + -- Slight hack: OpenBSD's tar doesn't support xz. + -- https://github.com/commercialhaskell/stack/issues/2283#issuecomment-237980986 + let tarDep = + case (platform, archiveType) of + (Platform _ Cabal.OpenBSD, TarXz) -> checkDependency "gtar" + _ -> checkDependency "tar" + (zipTool, makeTool, tarTool) <- checkDependencies $ (,,) + <$> checkDependency zipTool' + <*> (checkDependency "gmake" <|> checkDependency "make") + <*> tarDep + + logDebug $ "ziptool: " <> fromString zipTool + logDebug $ "make: " <> fromString makeTool + logDebug $ "tar: " <> fromString tarTool + + let runStep step wd env cmd args = do + menv' <- modifyEnvVars menv (Map.union env) + let logLines lvl = CB.lines .| CL.mapM_ (lvl . displayBytesUtf8) + logStdout = logLines logDebug + logStderr = logLines logError + void $ withWorkingDir (toFilePath wd) $ + withProcessContext menv' $ + sinkProcessStderrStdout cmd args logStderr logStdout + `catchAny` \ex -> + prettyThrowIO (GHCInstallFailed ex step cmd args wd tempDir destDir) + + logSticky $ + "Unpacking GHC into " + <> fromString (toFilePath tempDir) + <> " ..." + logDebug $ "Unpacking " <> fromString (toFilePath archiveFile) + runStep "unpacking" tempDir + mempty + tarTool + [compOpt : "xf", toFilePath archiveFile] + + dir <- expectSingleUnpackedDir archiveFile tempDir + + mOverrideGccPath <- view $ configL . to (.overrideGccPath) + + -- The make application uses the CC environment variable to configure the + -- program for compiling C programs + let mGccEnv = let gccEnvFromPath p = + Map.singleton "CC" $ T.pack (toFilePath p) + in gccEnvFromPath <$> mOverrideGccPath + + -- Data.Map.union provides a left-biased union, so mGccEnv will prevail + let ghcConfigureEnv = + fromMaybe Map.empty mGccEnv `Map.union` downloadInfo.configureEnv + + logSticky "Configuring GHC ..." + runStep "configuring" dir + ghcConfigureEnv + (toFilePath $ dir relFileConfigure) + ( ("--prefix=" ++ toFilePath destDir) + : map T.unpack downloadInfo.configureOpts + ) + + logSticky "Installing GHC ..." + runStep "installing" dir mempty makeTool ["install"] + + logStickyDone "Installed GHC." + logDebug $ "GHC installed to " <> fromString (toFilePath destDir) -- | Check if given processes appear to be present, throwing an exception if -- missing. checkDependencies :: CheckDependency env a -> RIO env a -checkDependencies (CheckDependency f) = f >>= either (throwIO . MissingDependencies) return +checkDependencies (CheckDependency f) = + f >>= either (prettyThrowIO . MissingDependencies) pure checkDependency :: HasProcessContext env => String -> CheckDependency env String checkDependency tool = CheckDependency $ do - exists <- doesExecutableExist tool - return $ if exists then Right tool else Left [tool] + exists <- doesExecutableExist tool + pure $ if exists then Right tool else Left [tool] + +newtype CheckDependency env a + = CheckDependency (RIO env (Either [String] a)) + deriving Functor -newtype CheckDependency env a = CheckDependency (RIO env (Either [String] a)) - deriving Functor instance Applicative (CheckDependency env) where - pure x = CheckDependency $ return (Right x) - CheckDependency f <*> CheckDependency x = CheckDependency $ do - f' <- f - x' <- x - return $ - case (f', x') of - (Left e1, Left e2) -> Left $ e1 ++ e2 - (Left e, Right _) -> Left e - (Right _, Left e) -> Left e - (Right f'', Right x'') -> Right $ f'' x'' + pure x = CheckDependency $ pure (Right x) + CheckDependency f <*> CheckDependency x = CheckDependency $ do + f' <- f + x' <- x + pure $ + case (f', x') of + (Left e1, Left e2) -> Left $ e1 ++ e2 + (Left e, Right _) -> Left e + (Right _, Left e) -> Left e + (Right f'', Right x'') -> Right $ f'' x'' + instance Alternative (CheckDependency env) where - empty = CheckDependency $ return $ Left [] - CheckDependency x <|> CheckDependency y = CheckDependency $ do - res1 <- x - case res1 of - Left _ -> y - Right x' -> return $ Right x' - -installGHCWindows :: HasBuildConfig env - => Maybe Version - -> SetupInfo - -> Path Abs File - -> ArchiveType - -> Path Abs Dir - -> Path Abs Dir - -> RIO env () -installGHCWindows mversion si archiveFile archiveType _tempDir destDir = do - tarComponent <- mapM (\v -> parseRelDir $ "ghc-" ++ versionString v) mversion - withUnpackedTarball7z "GHC" si archiveFile archiveType tarComponent destDir - logInfo $ "GHC installed to " <> fromString (toFilePath destDir) - -installMsys2Windows :: HasBuildConfig env - => Text -- ^ OS Key - -> SetupInfo - -> Path Abs File - -> ArchiveType - -> Path Abs Dir - -> Path Abs Dir - -> RIO env () -installMsys2Windows osKey si archiveFile archiveType _tempDir destDir = do - exists <- liftIO $ D.doesDirectoryExist $ toFilePath destDir - when exists $ liftIO (D.removeDirectoryRecursive $ toFilePath destDir) `catchIO` \e -> do - logError $ - "Could not delete existing msys directory: " <> - fromString (toFilePath destDir) - throwM e - - msys <- parseRelDir $ "msys" ++ T.unpack (fromMaybe "32" $ T.stripPrefix "windows" osKey) - withUnpackedTarball7z "MSYS2" si archiveFile archiveType (Just msys) destDir - - - -- I couldn't find this officially documented anywhere, but you need to run - -- the MSYS shell once in order to initialize some pacman stuff. Once that - -- run happens, you can just run commands as usual. - menv0 <- view processContextL - newEnv0 <- modifyEnvVars menv0 $ Map.insert "MSYSTEM" "MSYS" - newEnv <- either throwM return $ augmentPathMap - [toFilePath $ destDir relDirUsr relDirBin] - (view envVarsL newEnv0) - menv <- mkProcessContext newEnv - withWorkingDir (toFilePath destDir) $ withProcessContext menv - $ proc "sh" ["--login", "-c", "true"] runProcess_ - - -- No longer installing git, it's unreliable - -- (https://github.com/commercialhaskell/stack/issues/1046) and the - -- MSYS2-installed version has bad CRLF defaults. - -- - -- Install git. We could install other useful things in the future too. - -- runCmd (Cmd (Just destDir) "pacman" menv ["-Sy", "--noconfirm", "git"]) Nothing - --- | Unpack a compressed tarball using 7zip. Expects a single directory in --- the unpacked results, which is renamed to the destination directory. -withUnpackedTarball7z :: HasBuildConfig env - => String -- ^ Name of tool, used in error messages - -> SetupInfo - -> Path Abs File -- ^ Path to archive file - -> ArchiveType - -> Maybe (Path Rel Dir) -- ^ Name of directory expected in archive. If Nothing, expects a single folder. - -> Path Abs Dir -- ^ Destination directory. - -> RIO env () -withUnpackedTarball7z name si archiveFile archiveType msrcDir destDir = do - suffix <- - case archiveType of - TarXz -> return ".xz" - TarBz2 -> return ".bz2" - TarGz -> return ".gz" - _ -> throwString $ name ++ " must be a tarball file" - tarFile <- - case T.stripSuffix suffix $ T.pack $ toFilePath (filename archiveFile) of - Nothing -> throwString $ "Invalid " ++ name ++ " filename: " ++ show archiveFile - Just x -> parseRelFile $ T.unpack x - run7z <- setup7z si - let tmpName = toFilePathNoTrailingSep (dirname destDir) ++ "-tmp" - ensureDir (parent destDir) - withRunInIO $ \run -> withTempDir (parent destDir) tmpName $ \tmpDir -> run $ do + empty = CheckDependency $ pure $ Left [] + CheckDependency x <|> CheckDependency y = CheckDependency $ + x >>= \case + Left _ -> y + Right x' -> pure $ Right x' + +installGHCWindows :: + HasBuildConfig env + => SetupInfo + -> Path Abs File + -> ArchiveType + -> Path Abs Dir + -> Path Abs Dir + -> RIO env () +installGHCWindows si archiveFile archiveType _tempDir destDir = do + withUnpackedTarball7z "GHC" si archiveFile archiveType destDir + prettyInfoL + [ flow "GHC installed to" + , pretty destDir <> "." + ] + +installMsys2Windows :: + HasBuildConfig env + => SetupInfo + -> Path Abs File + -> ArchiveType + -> Path Abs Dir + -> Path Abs Dir + -> RIO env () +installMsys2Windows si archiveFile archiveType _tempDir destDir = do + exists <- liftIO $ D.doesDirectoryExist $ toFilePath destDir + when exists $ + liftIO (D.removeDirectoryRecursive $ toFilePath destDir) `catchIO` \e -> + prettyThrowM $ ExistingMSYS2NotDeleted destDir e + + withUnpackedTarball7z "MSYS2" si archiveFile archiveType destDir + + -- I couldn't find this officially documented anywhere, but you need to run + -- the MSYS shell once in order to initialize some pacman stuff. Once that run + -- happens, you can just run commands as usual. + menv0 <- view processContextL + newEnv0 <- modifyEnvVars menv0 $ Map.insert "MSYSTEM" "MSYS" + newEnv <- either throwM pure $ augmentPathMap + [toFilePath $ destDir relDirUsr relDirBin] + (view envVarsL newEnv0) + menv <- mkProcessContext newEnv + withWorkingDir (toFilePath destDir) $ withProcessContext menv + $ proc "sh" ["--login", "-c", "true"] runProcess_ + + -- No longer installing git, it's unreliable + -- (https://github.com/commercialhaskell/stack/issues/1046) and the + -- MSYS2-installed version has bad CRLF defaults. + -- + -- Install git. We could install other useful things in the future too. + -- runCmd (Cmd (Just destDir) "pacman" menv ["-Sy", "--noconfirm", "git"]) Nothing + +-- | Unpack a compressed tarball using 7zip. Expects a single directory in the +-- unpacked results, which is renamed to the destination directory. +withUnpackedTarball7z :: + HasBuildConfig env + => String -- ^ Name of tool, used in error messages + -> SetupInfo + -> Path Abs File -- ^ Path to archive file + -> ArchiveType + -> Path Abs Dir -- ^ Destination directory. + -> RIO env () +withUnpackedTarball7z name si archiveFile archiveType destDir = do + suffix <- + case archiveType of + TarXz -> pure ".xz" + TarBz2 -> pure ".bz2" + TarGz -> pure ".gz" + _ -> prettyThrowIO $ TarballInvalid name + tarFile <- + case T.stripSuffix suffix $ T.pack $ toFilePath (filename archiveFile) of + Nothing -> prettyThrowIO $ TarballFileInvalid name archiveFile + Just x -> parseRelFile $ T.unpack x + run7z <- setup7z si + -- We aim to reduce the risk of a filepath length of more than 260 characters, + -- which can be problematic for 7-Zip if Windows is not 'long file paths' + -- enabled. We use a short name for the temporary directory ... + let tmpName = "stack-tmp" + destDrive = takeDrive destDir + ensureDir (parent destDir) + tempDrive <- takeDrive <$> getTempDir + -- We use a temporary directory with likely a short absolute path ... + let withTempDir' = if tempDrive == destDrive + then + -- We use the system temporary directory if we can, as a Standard user + -- may well not have permission to create a directory in the root of + -- the system drive. + withSystemTempDir + else + -- Otherwise we use a temporary directory in the root of the + -- destination drive. + withTempDir destDrive + withRunInIO $ \run -> + withTempDir' tmpName $ \tmpDir -> + run $ do liftIO $ ignoringAbsence (removeDirRecur destDir) run7z tmpDir archiveFile run7z tmpDir (tmpDir tarFile) - absSrcDir <- case msrcDir of - Just srcDir -> return $ tmpDir srcDir - Nothing -> expectSingleUnpackedDir archiveFile tmpDir + absSrcDir <- expectSingleUnpackedDir archiveFile tmpDir + -- On Windows, 'renameDir' does not work across drives. However, we have + -- ensured that 'tmpDir' has the same drive as 'destDir'. renameDir absSrcDir destDir -expectSingleUnpackedDir :: (MonadIO m, MonadThrow m) => Path Abs File -> Path Abs Dir -> m (Path Abs Dir) -expectSingleUnpackedDir archiveFile destDir = do - contents <- listDir destDir - case contents of - ([dir], _ ) -> return dir - _ -> throwString $ "Expected a single directory within unpacked " ++ toFilePath archiveFile +expectSingleUnpackedDir :: + (MonadIO m, MonadThrow m) + => Path Abs File + -> Path Abs Dir + -> m (Path Abs Dir) +expectSingleUnpackedDir archiveFile unpackDir = + listDir unpackDir >>= \case + ([dir], _ ) -> pure dir + _ -> prettyThrowIO $ UnknownArchiveStructure archiveFile -- | Download 7z as necessary, and get a function for unpacking things. -- -- Returned function takes an unpack directory and archive. -setup7z :: (HasBuildConfig env, MonadIO m) - => SetupInfo - -> RIO env (Path Abs Dir -> Path Abs File -> m ()) +setup7z :: + (HasBuildConfig env, MonadIO m) + => SetupInfo + -> RIO env (Path Abs Dir -> Path Abs File -> m ()) setup7z si = do - dir <- view $ configL.to configLocalPrograms - ensureDir dir - let exeDestination = dir relFile7zexe - dllDestination = dir relFile7zdll - case (siSevenzDll si, siSevenzExe si) of - (Just sevenzDll, Just sevenzExe) -> do - _ <- downloadOrUseLocal "7z.dll" sevenzDll dllDestination - exePath <- downloadOrUseLocal "7z.exe" sevenzExe exeDestination - withRunInIO $ \run -> return $ \outdir archive -> liftIO $ run $ do - let cmd = toFilePath exePath - args = - [ "x" - , "-o" ++ toFilePath outdir - , "-y" - , toFilePath archive - ] - let archiveDisplay = fromString $ FP.takeFileName $ toFilePath archive - isExtract = FP.takeExtension (toFilePath archive) == ".tar" - logInfo $ - (if isExtract then "Extracting " else "Decompressing ") <> - archiveDisplay <> "..." - ec <- - proc cmd args $ \pc -> - if isExtract - then withProcessWait (setStdout createSource pc) $ \p -> do - total <- runConduit - $ getStdout p - .| filterCE (== 10) -- newline characters - .| foldMC - (\count bs -> do - let count' = count + S.length bs - logSticky $ "Extracted " <> RIO.display count' <> " files" - pure count' - ) - 0 - logStickyDone $ - "Extracted total of " <> - RIO.display total <> - " files from " <> - archiveDisplay - waitExitCode p - else runProcess pc - when (ec /= ExitSuccess) - $ liftIO $ throwM (ProblemWhileDecompressing archive) - _ -> throwM SetupInfoMissingSevenz - -chattyDownload :: HasTerm env - => Text -- ^ label - -> DownloadInfo -- ^ URL, content-length, sha1, and sha256 - -> Path Abs File -- ^ destination - -> RIO env () + dir <- view $ configL . to (.localPrograms) + ensureDir dir + let exeDestination = dir relFile7zexe + dllDestination = dir relFile7zdll + case (si.sevenzDll, si.sevenzExe) of + (Just sevenzDll, Just sevenzExe) -> do + _ <- downloadOrUseLocal "7z.dll" sevenzDll dllDestination + exePath <- downloadOrUseLocal "7z.exe" sevenzExe exeDestination + withRunInIO $ \run -> pure $ \outdir archive -> liftIO $ run $ do + let cmd = toFilePath exePath + args = + [ "x" + , "-o" ++ toFilePath outdir + , "-y" + , archiveFP + ] + archiveFP = toFilePath archive + archiveFileName = filename archive + archiveDisplay = fromString $ toFilePath archiveFileName + isExtract = FP.takeExtension archiveFP == ".tar" + prettyInfoL + [ if isExtract then "Extracting" else "Decompressing" + , pretty archiveFileName <> "..." + ] + ec <- + proc cmd args $ \pc -> + if isExtract + then withProcessWait (setStdout createSource pc) $ \p -> do + total <- runConduit + $ getStdout p + .| filterCE (== 10) -- newline characters + .| foldMC + (\count bs -> do + let count' = count + S.length bs + logSticky $ "Extracted " <> display count' <> " files" + pure count' + ) + 0 + logStickyDone $ + "Extracted total of " + <> display total + <> " files from " + <> archiveDisplay + waitExitCode p + else runProcess pc + when (ec /= ExitSuccess) $ + liftIO $ prettyThrowM (ProblemWhileDecompressing archive) + _ -> prettyThrowM SetupInfoMissingSevenz + +chattyDownload :: + HasTerm env + => Text -- ^ label + -> DownloadInfo -- ^ URL, content-length, sha1, and sha256 + -> Path Abs File -- ^ destination + -> RIO env () chattyDownload label downloadInfo path = do - let url = downloadInfoUrl downloadInfo - req <- parseUrlThrow $ T.unpack url - logSticky $ - "Preparing to download " <> - RIO.display label <> - " ..." - logDebug $ - "Downloading from " <> - RIO.display url <> - " to " <> - fromString (toFilePath path) <> - " ..." - hashChecks <- fmap catMaybes $ forM - [ ("sha1", HashCheck SHA1, downloadInfoSha1) - , ("sha256", HashCheck SHA256, downloadInfoSha256) - ] - $ \(name, constr, getter) -> - case getter downloadInfo of - Just bs -> do - logDebug $ - "Will check against " <> - name <> - " hash: " <> - displayBytesUtf8 bs - return $ Just $ constr $ CheckHexDigestByteString bs - Nothing -> return Nothing - when (null hashChecks) $ logWarn $ - "No sha1 or sha256 found in metadata," <> - " download hash won't be checked." - let dReq = setHashChecks hashChecks $ - setLengthCheck mtotalSize $ - mkDownloadRequest req - x <- verifiedDownloadWithProgress dReq path label mtotalSize - if x - then logStickyDone ("Downloaded " <> RIO.display label <> ".") - else logStickyDone "Already downloaded." - where - mtotalSize = downloadInfoContentLength downloadInfo + let url = downloadInfo.url + req <- parseUrlThrow $ T.unpack url + logSticky $ + "Preparing to download " + <> display label + <> " ..." + logDebug $ + "Downloading from " + <> display url + <> " to " + <> fromString (toFilePath path) + <> " ..." + hashChecks <- fmap catMaybes $ forM + [ ("sha1", HashCheck SHA1, (.sha1)) + , ("sha256", HashCheck SHA256, (.sha256)) + ] + $ \(name, constr, getter) -> + case getter downloadInfo of + Just bs -> do + logDebug $ + "Will check against " + <> name + <> " hash: " + <> displayBytesUtf8 bs + pure $ Just $ constr $ CheckHexDigestByteString bs + Nothing -> pure Nothing + when (null hashChecks) $ + prettyWarnS + "No sha1 or sha256 found in metadata, download hash won't be checked." + let dReq = setHashChecks hashChecks $ + setLengthCheck mtotalSize $ + mkDownloadRequest req + x <- verifiedDownloadWithProgress dReq path label mtotalSize + if x + then logStickyDone ("Downloaded " <> display label <> ".") + else logStickyDone ("Already downloaded " <> display label <> ".") + where + mtotalSize = downloadInfo.contentLength -- | Perform a basic sanity check of GHC -sanityCheck :: (HasProcessContext env, HasLogFunc env) - => Path Abs File -> RIO env () +sanityCheck :: + (HasLogFunc env, HasProcessContext env) + => Path Abs File + -> RIO env () sanityCheck ghc = withSystemTempDir "stack-sanity-check" $ \dir -> do - let fp = toFilePath $ dir relFileMainHs - liftIO $ S.writeFile fp $ T.encodeUtf8 $ T.pack $ unlines - [ "import Distribution.Simple" -- ensure Cabal library is present - , "main = putStrLn \"Hello World\"" - ] - logDebug $ "Performing a sanity check on: " <> fromString (toFilePath ghc) - eres <- withWorkingDir (toFilePath dir) $ proc (toFilePath ghc) - [ fp - , "-no-user-package-db" - ] $ try . readProcess_ - case eres of - Left e -> throwIO $ GHCSanityCheckCompileFailed e ghc - Right _ -> return () -- TODO check that the output of running the command is correct + let fp = toFilePath $ dir relFileMainHs + liftIO $ S.writeFile fp $ T.encodeUtf8 $ T.pack $ unlines + [ "import Distribution.Simple" -- ensure Cabal library is present + , "main = putStrLn \"Hello World\"" + ] + logDebug $ "Performing a sanity check on: " <> fromString (toFilePath ghc) + eres <- withWorkingDir (toFilePath dir) $ proc (toFilePath ghc) + [ fp + , "-no-user-package-db" + -- Required to stop GHC looking for a package environment in default + -- locations. + , "-hide-all-packages" + -- Required because GHC flag -hide-all-packages is passed. + , "-package base" + , "-package Cabal" -- required for "import Distribution.Simple" + ] $ try . readProcess_ + case eres of + Left e -> prettyThrowIO $ GHCSanityCheckCompileFailed e ghc + Right _ -> pure () -- TODO check that the output of running the command is + -- correct -- Remove potentially confusing environment variables removeHaskellEnvVars :: Map Text Text -> Map Text Text removeHaskellEnvVars = - Map.delete "GHC_PACKAGE_PATH" . - Map.delete "GHC_ENVIRONMENT" . - Map.delete "HASKELL_PACKAGE_SANDBOX" . - Map.delete "HASKELL_PACKAGE_SANDBOXES" . - Map.delete "HASKELL_DIST_DIR" . - -- https://github.com/commercialhaskell/stack/issues/1460 - Map.delete "DESTDIR" . - -- https://github.com/commercialhaskell/stack/issues/3444 - Map.delete "GHCRTS" - --- | Get map of environment variables to set to change the GHC's encoding to UTF-8 -getUtf8EnvVars - :: (HasProcessContext env, HasPlatform env, HasLogFunc env) - => ActualCompiler - -> RIO env (Map Text Text) -getUtf8EnvVars compilerVer = - if getGhcVersion compilerVer >= mkVersion [7, 10, 3] - -- GHC_CHARENC supported by GHC >=7.10.3 - then return $ Map.singleton "GHC_CHARENC" "UTF-8" - else legacyLocale - where - legacyLocale = do - menv <- view processContextL - Platform _ os <- view platformL - if os == Cabal.Windows - then - -- On Windows, locale is controlled by the code page, so we don't set any environment - -- variables. - return - Map.empty - else do - let checkedVars = map checkVar (Map.toList $ view envVarsL menv) - -- List of environment variables that will need to be updated to set UTF-8 (because - -- they currently do not specify UTF-8). - needChangeVars = concatMap fst checkedVars - -- Set of locale-related environment variables that have already have a value. - existingVarNames = Set.unions (map snd checkedVars) - -- True if a locale is already specified by one of the "global" locale variables. - hasAnyExisting = - any (`Set.member` existingVarNames) ["LANG", "LANGUAGE", "LC_ALL"] - if null needChangeVars && hasAnyExisting - then - -- If no variables need changes and at least one "global" variable is set, no - -- changes to environment need to be made. - return - Map.empty - else do - -- Get a list of known locales by running @locale -a@. - elocales <- tryAny $ fmap fst $ proc "locale" ["-a"] readProcess_ - let - -- Filter the list to only include locales with UTF-8 encoding. - utf8Locales = - case elocales of - Left _ -> [] - Right locales -> - filter - isUtf8Locale - (T.lines $ - T.decodeUtf8With - T.lenientDecode $ - LBS.toStrict locales) - mfallback = getFallbackLocale utf8Locales - when - (isNothing mfallback) - (logWarn - "Warning: unable to set locale to UTF-8 encoding; GHC may fail with 'invalid character'") - let - -- Get the new values of variables to adjust. - changes = - Map.unions $ - map - (adjustedVarValue menv utf8Locales mfallback) - needChangeVars - -- Get the values of variables to add. - adds - | hasAnyExisting = - -- If we already have a "global" variable, then nothing needs - -- to be added. - Map.empty - | otherwise = - -- If we don't already have a "global" variable, then set LANG to the - -- fallback. - case mfallback of - Nothing -> Map.empty - Just fallback -> - Map.singleton "LANG" fallback - return (Map.union changes adds) - -- Determines whether an environment variable is locale-related and, if so, whether it needs to - -- be adjusted. - checkVar - :: (Text, Text) -> ([Text], Set Text) - checkVar (k,v) = - if k `elem` ["LANG", "LANGUAGE"] || "LC_" `T.isPrefixOf` k - then if isUtf8Locale v - then ([], Set.singleton k) - else ([k], Set.singleton k) - else ([], Set.empty) - -- Adjusted value of an existing locale variable. Looks for valid UTF-8 encodings with - -- same language /and/ territory, then with same language, and finally the first UTF-8 locale - -- returned by @locale -a@. - adjustedVarValue - :: ProcessContext -> [Text] -> Maybe Text -> Text -> Map Text Text - adjustedVarValue menv utf8Locales mfallback k = - case Map.lookup k (view envVarsL menv) of - Nothing -> Map.empty - Just v -> - case concatMap - (matchingLocales utf8Locales) - [ T.takeWhile (/= '.') v <> "." - , T.takeWhile (/= '_') v <> "_"] of - (v':_) -> Map.singleton k v' - [] -> - case mfallback of - Just fallback -> Map.singleton k fallback - Nothing -> Map.empty - -- Determine the fallback locale, by looking for any UTF-8 locale prefixed with the list in - -- @fallbackPrefixes@, and if not found, picking the first UTF-8 encoding returned by @locale - -- -a@. - getFallbackLocale - :: [Text] -> Maybe Text - getFallbackLocale utf8Locales = - case concatMap (matchingLocales utf8Locales) fallbackPrefixes of - (v:_) -> Just v - [] -> - case utf8Locales of - [] -> Nothing - (v:_) -> Just v - -- Filter the list of locales for any with the given prefixes (case-insitive). - matchingLocales - :: [Text] -> Text -> [Text] - matchingLocales utf8Locales prefix = - filter (\v -> T.toLower prefix `T.isPrefixOf` T.toLower v) utf8Locales - -- Does the locale have one of the encodings in @utf8Suffixes@ (case-insensitive)? - isUtf8Locale locale = - any (\ v -> T.toLower v `T.isSuffixOf` T.toLower locale) utf8Suffixes - -- Prefixes of fallback locales (case-insensitive) - fallbackPrefixes = ["C.", "en_US.", "en_"] - -- Suffixes of UTF-8 locales (case-insensitive) - utf8Suffixes = [".UTF-8", ".utf8"] + Map.delete "GHC_PACKAGE_PATH" . + Map.delete "GHC_ENVIRONMENT" . + Map.delete "HASKELL_PACKAGE_SANDBOX" . + Map.delete "HASKELL_PACKAGE_SANDBOXES" . + Map.delete "HASKELL_DIST_DIR" . + -- https://github.com/commercialhaskell/stack/issues/1460 + Map.delete "DESTDIR" . + -- https://github.com/commercialhaskell/stack/issues/3444 + Map.delete "GHCRTS" + +-- | Map of environment variables to set to change the GHC's encoding to UTF-8. +utf8EnvVars :: Map Text Text +utf8EnvVars = + -- GHC_CHARENC supported by GHC >=7.10.3 + Map.singleton "GHC_CHARENC" "UTF-8" -- Binary Stack upgrades -newtype StackReleaseInfo = StackReleaseInfo Value - -downloadStackReleaseInfo :: (MonadIO m, MonadThrow m) - => Maybe String -- Github org - -> Maybe String -- Github repo - -> Maybe String -- ^ optional version - -> m StackReleaseInfo -downloadStackReleaseInfo morg mrepo mver = liftIO $ do - let org = fromMaybe "commercialhaskell" morg - repo = fromMaybe "stack" mrepo - let url = concat - [ "https://api.github.com/repos/" - , org - , "/" - , repo - , "/releases/" - , case mver of - Nothing -> "latest" - Just ver -> "tags/v" ++ ver +-- | Information on a binary release of Stack. +data StackReleaseInfo + = SRIGitHub !Value + -- ^ Metadata downloaded from GitHub releases about available binaries. + | SRIHaskellStackOrg !HaskellStackOrg + -- ^ Information on the latest available binary for the current platforms. + +data HaskellStackOrg = HaskellStackOrg + { url :: !Text + , version :: !Version + } + deriving Show + +-- | Download information on a binary release of Stack. If there is no given +-- GitHub user, GitHub repository and version, then first tries +-- @haskellstack.org@. +downloadStackReleaseInfo :: + (HasLogFunc env, HasPlatform env) + => Maybe String -- ^ Optional GitHub user. + -> Maybe String -- ^ Optional GitHub repository. + -> Maybe String -- ^ Optional version. + -> RIO env StackReleaseInfo +downloadStackReleaseInfo Nothing Nothing Nothing = do + platform <- view platformL + -- Fallback list of URLs to try for upgrading. + let urls0 = + case platform of + Platform X86_64 Cabal.Linux -> + [ "https://get.haskellstack.org/upgrade/linux-x86_64-static.tar.gz" + , "https://get.haskellstack.org/upgrade/linux-x86_64.tar.gz" + ] + Platform X86_64 Cabal.OSX -> + [ "https://get.haskellstack.org/upgrade/osx-x86_64.tar.gz" ] - req <- parseRequest url - res <- httpJSON $ setGithubHeaders req - let code = getResponseStatusCode res - if code >= 200 && code < 300 - then return $ StackReleaseInfo $ getResponseBody res - else throwString $ "Could not get release information for Stack from: " ++ url - -preferredPlatforms :: (MonadReader env m, HasPlatform env, MonadThrow m) - => m [(Bool, String)] + Platform X86_64 Cabal.Windows -> + [ "https://get.haskellstack.org/upgrade/windows-x86_64.tar.gz" + ] + _ -> [] + -- Helper function: extract the version from a GitHub releases URL. + let extractVersion loc = do + version0 <- + case reverse $ splitOn "/" $ T.unpack loc of + _final:version0:_ -> Right version0 + _ -> Left $ "Insufficient pieces in location: " ++ show loc + version1 <- + maybe (Left "no leading v on version") Right $ stripPrefix "v" version0 + maybe (Left $ "Invalid version: " ++ show version1) Right $ parseVersion version1 + + -- Try out different URLs. If we've exhausted all of them, fall back to GitHub. + loop [] = do + logDebug "Could not get binary from haskellstack.org, trying GitHub" + downloadStackReleaseInfoGitHub Nothing Nothing Nothing + + -- Try the next URL + loop (url:urls) = do + -- Make a HEAD request without any redirects + req <- setRequestMethod "HEAD" <$> parseRequest (T.unpack url) + res <- httpLbs req { redirectCount = 0 } + + -- Look for a redirect. We're looking for a standard GitHub releases + -- URL where we can extract version information from. + case getResponseHeader "location" res of + [] -> logDebug "No location header found, continuing" *> loop urls + -- Exactly one location header. + [locBS] -> + case decodeUtf8' locBS of + Left e -> + logDebug + ( "Invalid UTF8: " + <> displayShow (locBS, e) + ) + *> loop urls + Right loc -> + case extractVersion loc of + Left s -> + logDebug + ( "No version found: " + <> displayShow (url, loc, s) + ) + *> loop (loc:urls) + -- We found a valid URL, let's use it! + Right version -> do + let hso = HaskellStackOrg + { url = loc + , version + } + logDebug $ + "Downloading from haskellstack.org: " + <> displayShow hso + pure $ SRIHaskellStackOrg hso + locs -> + logDebug + ( "Multiple location headers found: " + <> displayShow locs + ) + *> loop urls + loop urls0 +downloadStackReleaseInfo morg mrepo mver = + downloadStackReleaseInfoGitHub morg mrepo mver + +-- | Same as above, but always uses GitHub +downloadStackReleaseInfoGitHub :: + (MonadIO m, MonadThrow m) + => Maybe String -- GitHub org + -> Maybe String -- GitHub repo + -> Maybe String -- ^ optional version + -> m StackReleaseInfo +downloadStackReleaseInfoGitHub morg mrepo mver = liftIO $ do + let org = fromMaybe "commercialhaskell" morg + repo = fromMaybe "stack" mrepo + let url = concat + [ "https://api.github.com/repos/" + , org + , "/" + , repo + , "/releases/" + , case mver of + Nothing -> "latest" + Just ver -> "tags/v" ++ ver + ] + req <- parseRequest url + res <- httpJSON $ setGitHubHeaders req + let code = getResponseStatusCode res + if code >= 200 && code < 300 + then pure $ SRIGitHub $ getResponseBody res + else prettyThrowIO $ StackReleaseInfoNotFound url + +-- | Yield a list of the preferred GHC variants for the platform. The first item +-- of each pair indicates if the operating system is Windows. The second item +-- is the name of the GHC variant in Stack's @setup-info@ dictionary. +preferredPlatforms :: + (MonadReader env m, HasPlatform env, MonadThrow m) + => m [(Bool, String)] preferredPlatforms = do - Platform arch' os' <- view platformL - (isWindows, os) <- - case os' of - Cabal.Linux -> return (False, "linux") - Cabal.Windows -> return (True, "windows") - Cabal.OSX -> return (False, "osx") - Cabal.FreeBSD -> return (False, "freebsd") - _ -> throwM $ stringException $ "Binary upgrade not yet supported on OS: " ++ show os' - arch <- - case arch' of - I386 -> return "i386" - X86_64 -> return "x86_64" - Arm -> return "arm" - _ -> throwM $ stringException $ "Binary upgrade not yet supported on arch: " ++ show arch' - hasgmp4 <- return False -- FIXME import relevant code from Stack.Setup? checkLib $(mkRelFile "libgmp.so.3") - let suffixes - | hasgmp4 = ["-static", "-gmp4", ""] - | otherwise = ["-static", ""] - return $ map (\suffix -> (isWindows, concat [os, "-", arch, suffix])) suffixes - -downloadStackExe - :: HasConfig env - => [(Bool, String)] -- ^ acceptable platforms - -> StackReleaseInfo - -> Path Abs Dir -- ^ destination directory - -> Bool -- ^ perform PATH-aware checking, see #3232 - -> (Path Abs File -> IO ()) -- ^ test the temp exe before renaming - -> RIO env () + Platform arch' os' <- view platformL + (isWindows, os) <- + case os' of + Cabal.Linux -> pure (False, "linux") + Cabal.Windows -> pure (True, "windows") + Cabal.OSX -> pure (False, "osx") + Cabal.FreeBSD -> pure (False, "freebsd") + _ -> prettyThrowM $ BinaryUpgradeOnOSUnsupported os' + arch <- + case arch' of + I386 -> pure "i386" + X86_64 -> pure "x86_64" + Arm -> pure "arm" + AArch64 -> pure "aarch64" + _ -> prettyThrowM $ BinaryUpgradeOnArchUnsupported arch' + let hasgmp4 = False -- FIXME import relevant code from Stack.Setup? + -- checkLib $(mkRelFile "libgmp.so.3") + suffixes + -- 'gmp4' ceased to be relevant after Stack 1.9.3 (December 2018). + | hasgmp4 = ["-static", "-gmp4", ""] + -- 'static' will cease to be relevant after Stack 2.11.1 (May 2023). + | otherwise = ["-static", ""] + pure $ map (\suffix -> (isWindows, concat [os, "-", arch, suffix])) suffixes + +-- | Download a Stack executable. +downloadStackExe :: + HasConfig env + => [(Bool, String)] -- ^ acceptable platforms + -> StackReleaseInfo + -> Path Abs Dir -- ^ destination directory + -> Bool -- ^ perform PATH-aware checking, see #3232 + -> (Path Abs File -> IO ()) -- ^ test the temp exe before renaming + -> RIO env () downloadStackExe platforms0 archiveInfo destDir checkPath testExe = do - (isWindows, archiveURL) <- - let loop [] = throwString $ "Unable to find binary Stack archive for platforms: " - ++ unwords (map snd platforms0) - loop ((isWindows, p'):ps) = do - let p = T.pack p' - logInfo $ "Querying for archive location for platform: " <> fromString p' - case findArchive archiveInfo p of - Just x -> return (isWindows, x) - Nothing -> loop ps - in loop platforms0 - - let (destFile, tmpFile) - | isWindows = - ( destDir relFileStackDotExe - , destDir relFileStackDotTmpDotExe - ) - | otherwise = - ( destDir relFileStack - , destDir relFileStackDotTmp - ) + (isWindows, archiveURL) <- + let loop [] = + prettyThrowIO $ StackBinaryArchiveNotFound (map snd platforms0) + loop ((isWindows, p'):ps) = do + let p = T.pack p' + prettyInfoL + [ flow "Querying for archive location for platform:" + , style Current (fromString p') <> "." + ] + case findArchive archiveInfo p of + Just x -> pure (isWindows, x) + Nothing -> loop ps + in loop platforms0 + + let (destFile, tmpFile) + | isWindows = + ( destDir relFileStackDotExe + , destDir relFileStackDotTmpDotExe + ) + | otherwise = + ( destDir relFileStack + , destDir relFileStackDotTmp + ) + + prettyInfoL + [ flow "Downloading from:" + , style Url (fromString $ T.unpack archiveURL) <> "." + ] + + liftIO $ + if | ".tar.gz" `T.isSuffixOf` archiveURL -> + handleTarball tmpFile isWindows archiveURL + | ".zip" `T.isSuffixOf` archiveURL -> + throwIO StackBinaryArchiveZipUnsupportedBug + | otherwise -> prettyThrowIO $ StackBinaryArchiveUnsupported archiveURL + + prettyInfoS "Download complete, testing executable." + + -- We need to preserve the name of the executable file before we overwrite the + -- currently running binary: after that, Linux will append (deleted) to the + -- filename. + currExe <- viewExecutablePath + + liftIO $ do + setFileExecutable (toFilePath tmpFile) + testExe tmpFile + + relocateStackExeFile currExe tmpFile destFile + + prettyInfoL + [ flow "New Stack executable available at:" + , pretty destFile <> "." + ] + + destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir + warnInstallSearchPathIssues destDir' ["stack"] + + when checkPath $ performPathChecking destFile currExe + `catchAny` (logError . displayShow) + where + + findArchive (SRIGitHub val) platformPattern = do + Object top <- pure val + Array assets <- KeyMap.lookup "assets" top + getFirst $ foldMap (First . findMatch pattern') assets + where + pattern' = mconcat ["-", platformPattern, "."] + + findMatch pattern'' (Object o) = do + String name <- KeyMap.lookup "name" o + guard $ not $ ".asc" `T.isSuffixOf` name + guard $ pattern'' `T.isInfixOf` name + String url <- KeyMap.lookup "browser_download_url" o + Just url + findMatch _ _ = Nothing + findArchive (SRIHaskellStackOrg hso) _ = pure hso.url + + handleTarball :: Path Abs File -> Bool -> T.Text -> IO () + handleTarball tmpFile isWindows url = do + req <- fmap setGitHubHeaders $ parseUrlThrow $ T.unpack url + withResponse req $ \res -> do + entries <- fmap (Tar.read . LBS.fromChunks) + $ lazyConsume + $ getResponseBody res .| ungzip + let loop Tar.Done = prettyThrowIO $ StackBinaryNotInArchive exeName url + loop (Tar.Fail e) = throwM e + loop (Tar.Next e es) = + case FP.splitPath (Tar.entryPath e) of + -- Ignore the first component, see: + -- https://github.com/commercialhaskell/stack/issues/5288 + [_ignored, name] | name == exeName -> do + case Tar.entryContent e of + Tar.NormalFile lbs _ -> do + ensureDir destDir + LBS.writeFile (toFilePath tmpFile) lbs + _ -> prettyThrowIO $ FileTypeInArchiveInvalid e url + _ -> loop es + loop entries + where + exeName + | isWindows = "stack.exe" + | otherwise = "stack" + +relocateStackExeFile :: + HasTerm env + => Path Abs File + -- ^ Path to the currently running executable + -> Path Abs File + -- ^ Path to the executable file to be relocated + -> Path Abs File + -- ^ Path to the new location for the excutable file + -> RIO env () +relocateStackExeFile currExeFile newExeFile destExeFile = do + when (osIsWindows && destExeFile == currExeFile) $ do + -- Windows allows a running executable's file to be renamed, but not to be + -- overwritten. + old <- addExtension ".old" currExeFile + prettyInfoL + [ flow "Renaming existing:" + , pretty currExeFile + , "as:" + , pretty old <> "." + ] + renameFile currExeFile old + renameFile newExeFile destExeFile - logInfo $ "Downloading from: " <> RIO.display archiveURL - - liftIO $ do - case () of - () - | ".tar.gz" `T.isSuffixOf` archiveURL -> handleTarball tmpFile isWindows archiveURL - | ".zip" `T.isSuffixOf` archiveURL -> error "FIXME: Handle zip files" - | otherwise -> error $ "Unknown archive format for Stack archive: " ++ T.unpack archiveURL - - logInfo "Download complete, testing executable" - - platform <- view platformL - - -- We need to call getExecutablePath before we overwrite the - -- currently running binary: after that, Linux will append - -- (deleted) to the filename. - currExe <- liftIO getExecutablePath - - liftIO $ do - setFileExecutable (toFilePath tmpFile) - - testExe tmpFile - - case platform of - Platform _ Cabal.Windows | FP.equalFilePath (toFilePath destFile) currExe -> do - old <- parseAbsFile (toFilePath destFile ++ ".old") - renameFile destFile old - renameFile tmpFile destFile - _ -> renameFile tmpFile destFile - - destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir - warnInstallSearchPathIssues destDir' ["stack"] - - logInfo $ "New stack executable available at " <> fromString (toFilePath destFile) - - when checkPath $ performPathChecking destFile currExe - `catchAny` (logError . displayShow) - where - - findArchive (StackReleaseInfo val) pattern = do - Object top <- return val - Array assets <- HashMap.lookup "assets" top - getFirst $ fold $ fmap (First . findMatch pattern') assets - where - pattern' = mconcat ["-", pattern, "."] - - findMatch pattern'' (Object o) = do - String name <- HashMap.lookup "name" o - guard $ not $ ".asc" `T.isSuffixOf` name - guard $ pattern'' `T.isInfixOf` name - String url <- HashMap.lookup "browser_download_url" o - Just url - findMatch _ _ = Nothing - - handleTarball :: Path Abs File -> Bool -> T.Text -> IO () - handleTarball tmpFile isWindows url = do - req <- fmap setGithubHeaders $ parseUrlThrow $ T.unpack url - withResponse req $ \res -> do - entries <- fmap (Tar.read . LBS.fromChunks) - $ lazyConsume - $ getResponseBody res .| ungzip - let loop Tar.Done = error $ concat - [ "Stack executable " - , show exeName - , " not found in archive from " - , T.unpack url - ] - loop (Tar.Fail e) = throwM e - loop (Tar.Next e es) - | Tar.entryPath e == exeName = - case Tar.entryContent e of - Tar.NormalFile lbs _ -> do - ensureDir destDir - LBS.writeFile (toFilePath tmpFile) lbs - _ -> error $ concat - [ "Invalid file type for tar entry named " - , exeName - , " downloaded from " - , T.unpack url - ] - | otherwise = loop es - loop entries - where - -- The takeBaseName drops the .gz, dropExtension drops the .tar - exeName = - let base = FP.dropExtension (FP.takeBaseName (T.unpack url)) FP. "stack" - in if isWindows then base FP.<.> "exe" else base - --- | Ensure that the Stack executable download is in the same location --- as the currently running executable. See: +-- | Ensure that the Stack executable download is in the same location as the +-- currently running executable. See: -- https://github.com/commercialhaskell/stack/issues/3232 -performPathChecking - :: HasConfig env - => Path Abs File -- ^ location of the newly downloaded file - -> String -- ^ currently running executable - -> RIO env () -performPathChecking newFile executablePath = do - executablePath' <- parseAbsFile executablePath - unless (toFilePath newFile == executablePath) $ do - logInfo $ "Also copying stack executable to " <> fromString executablePath - tmpFile <- parseAbsFile $ executablePath ++ ".tmp" - eres <- tryIO $ do - liftIO $ copyFile newFile tmpFile - setFileExecutable (toFilePath tmpFile) - liftIO $ renameFile tmpFile executablePath' - logInfo "Stack executable copied successfully!" - case eres of - Right () -> return () +performPathChecking :: + HasConfig env + => Path Abs File + -- ^ Path to the newly downloaded file + -> Path Abs File + -- ^ Path to the currently running executable + -> RIO env () +performPathChecking newExeFile currExeFile = do + unless (newExeFile == currExeFile) $ do + prettyInfoL + [ flow "Also copying Stack executable to:" + , pretty currExeFile <> "." + ] + tmpFile <- toFilePath <$> addExtension ".tmp" currExeFile + tryIO (relocateStackExeFile currExeFile newExeFile currExeFile) >>= \case + Right () -> prettyInfoS "Stack executable copied successfully!" Left e - | isPermissionError e -> do - logWarn $ "Permission error when trying to copy: " <> displayShow e - logWarn "Should I try to perform the file copy using sudo? This may fail" - toSudo <- promptBool "Try using sudo? (y/n) " - when toSudo $ do - let run cmd args = do - ec <- proc cmd args runProcess - when (ec /= ExitSuccess) $ error $ concat - [ "Process exited with " - , show ec - , ": " - , unwords (cmd:args) - ] - commands = - [ ("sudo", - [ "cp" - , toFilePath newFile - , toFilePath tmpFile - ]) - , ("sudo", - [ "mv" - , toFilePath tmpFile - , executablePath - ]) - ] - logInfo "Going to run the following commands:" - logInfo "" - forM_ commands $ \(cmd, args) -> - logInfo $ "- " <> mconcat (intersperse " " (fromString <$> (cmd:args))) - mapM_ (uncurry run) commands - logInfo "" - logInfo "sudo file copy worked!" + | isPermissionError e -> if osIsWindows + then do + prettyWarn $ + flow "Permission error when trying to copy:" + <> blankLine + <> string (displayException e) + else do + prettyWarn $ + flow "Permission error when trying to copy:" + <> blankLine + <> string (displayException e) + <> blankLine + <> fillSep + [ flow "Should I try to perform the file copy using" + , style Shell "sudo" <> "?" + , flow "This may fail." + ] + toSudo <- promptBool "Try using sudo? (y/n) " + when toSudo $ do + let run cmd args = do + ec <- proc cmd args runProcess + when (ec /= ExitSuccess) $ + throwIO $ ProcessExited ec cmd args + commands = + [ ("sudo", + [ "cp" + , toFilePath newExeFile + , tmpFile + ]) + , ("sudo", + [ "mv" + , tmpFile + , toFilePath currExeFile + ]) + ] + prettyInfo $ + flow "Going to run the following commands:" + <> blankLine + <> bulletedList + ( map + ( \(cmd, args) -> + style Shell $ fillSep + $ fromString cmd + : map fromString args + ) + commands + ) + mapM_ (uncurry run) commands + prettyInfo $ + line + <> flow "sudo file copy worked!" | otherwise -> throwM e +-- | If available, yields the version of the given binary release of Stack. getDownloadVersion :: StackReleaseInfo -> Maybe Version -getDownloadVersion (StackReleaseInfo val) = do - Object o <- Just val - String rawName <- HashMap.lookup "name" o - -- drop the "v" at the beginning of the name - parseVersion $ T.unpack (T.drop 1 rawName) +getDownloadVersion (SRIGitHub val) = do + Object o <- Just val + String rawName <- KeyMap.lookup "name" o + -- drop the "v" at the beginning of the name + parseVersion $ T.unpack (T.drop 1 rawName) +getDownloadVersion (SRIHaskellStackOrg hso) = Just hso.version diff --git a/src/Stack/Setup/Installed.hs b/src/Stack/Setup/Installed.hs index 5db160d237..b2c7f81652 100644 --- a/src/Stack/Setup/Installed.hs +++ b/src/Stack/Setup/Installed.hs @@ -1,44 +1,71 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ViewPatterns #-} -{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} + +{-| +Module : Stack.Setup.Installed +License : BSD-3-Clause +-} module Stack.Setup.Installed - ( getCompilerVersion - , markInstalled - , unmarkInstalled - , listInstalled - , Tool (..) - , toolString - , toolNameString - , parseToolText - , extraDirs - , installDir - , tempInstallDir - ) where + ( getCompilerVersion + , markInstalled + , unmarkInstalled + , listInstalled + , Tool (..) + , toolString + , toolNameString + , parseToolText + , filterTools + , toolExtraDirs + , installDir + , tempInstallDir + ) where -import Stack.Prelude import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Lazy as BL -import Data.List hiding (concat, elem, maximumBy) +import Data.Char ( isDigit ) +import qualified Data.List as L import qualified Data.Text as T import qualified Data.Text.Encoding as T -import Distribution.System (Platform (..)) +import Distribution.System ( Platform (..) ) import qualified Distribution.System as Cabal -import Path -import Path.IO +import Path ( (), filename, parseRelDir, parseRelFile ) +import Path.IO ( doesDirExist, ignoringAbsence, listDir, removeFile ) +import RIO.Process ( HasProcessContext, proc, readProcess_ ) import Stack.Constants + ( relDirBin, relDirInclude, relDirLib, relDirLocal + , relDirMingw, relDirUsr + ) +import Stack.Prelude import Stack.Types.Compiler -import Stack.Types.Config -import RIO.Process + ( ActualCompiler (..), WhichCompiler (..) ) +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.Config.Exception ( ConfigPrettyException (..) ) +import Stack.Types.ExtraDirs ( ExtraDirs (..) ) +import Stack.Types.MsysEnvironment ( relDirMsysEnv ) data Tool - = Tool PackageIdentifier -- ^ e.g. ghc-7.8.4, msys2-20150512 - | ToolGhcGit !Text !Text -- ^ e.g. ghc-git-COMMIT_ID-FLAVOUR - deriving (Eq) + = Tool PackageIdentifier -- ^ e.g. ghc-7.8.4, msys2-20150512 + | ToolGhcGit !Text !Text -- ^ e.g. ghc-git-COMMIT_ID-FLAVOUR + deriving Eq + +-- | t'Tool' values are ordered by name (being @ghc-git@, for @ToolGhcGit _ _@) +-- alphabetically and then by version (later versions are ordered before +-- earlier versions, where applicable). +instance Ord Tool where + compare (Tool pkgId1) (Tool pkgId2) = if pkgName1 == pkgName2 + then compare pkgVersion2 pkgVersion1 -- Later versions ordered first + else compare pkgName1 pkgName2 + where + PackageIdentifier pkgName1 pkgVersion1 = pkgId1 + PackageIdentifier pkgName2 pkgVersion2 = pkgId2 + compare (Tool pkgId) (ToolGhcGit _ _) = compare (pkgName pkgId) "ghc-git" + compare (ToolGhcGit _ _) (Tool pkgId) = compare "ghc-git" (pkgName pkgId) + compare (ToolGhcGit c1 f1) (ToolGhcGit c2 f2) = if f1 == f2 + then compare c1 c2 + else compare f1 f2 toolString :: Tool -> String toolString (Tool ident) = packageIdentifierString ident @@ -54,116 +81,124 @@ parseToolText (parseWantedCompiler -> Right (WCGhcGit c f)) = Just (ToolGhcGit c parseToolText (parsePackageIdentifier . T.unpack -> Just pkgId) = Just (Tool pkgId) parseToolText _ = Nothing -markInstalled :: (MonadIO m, MonadThrow m) - => Path Abs Dir - -> Tool - -> m () +markInstalled :: + (MonadIO m, MonadThrow m) + => Path Abs Dir + -> Tool + -> m () markInstalled programsPath tool = do - fpRel <- parseRelFile $ toolString tool ++ ".installed" - writeBinaryFileAtomic (programsPath fpRel) "installed" + fpRel <- parseRelFile $ toolString tool ++ ".installed" + writeBinaryFileAtomic (programsPath fpRel) "installed" -unmarkInstalled :: MonadIO m - => Path Abs Dir - -> Tool - -> m () +unmarkInstalled :: + MonadIO m + => Path Abs Dir + -> Tool + -> m () unmarkInstalled programsPath tool = liftIO $ do - fpRel <- parseRelFile $ toolString tool ++ ".installed" - ignoringAbsence (removeFile $ programsPath fpRel) - -listInstalled :: (MonadIO m, MonadThrow m) - => Path Abs Dir - -> m [Tool] -listInstalled programsPath = do - doesDirExist programsPath >>= \case - False -> return [] - True -> do (_, files) <- listDir programsPath - return $ mapMaybe toTool files - where - toTool fp = do - x <- T.stripSuffix ".installed" $ T.pack $ toFilePath $ filename fp - parseToolText x - -getCompilerVersion - :: (HasProcessContext env, HasLogFunc env) + fpRel <- parseRelFile $ toolString tool ++ ".installed" + ignoringAbsence (removeFile $ programsPath fpRel) + +listInstalled :: + (MonadIO m, MonadThrow m) + => Path Abs Dir + -> m [Tool] +listInstalled programsPath = + doesDirExist programsPath >>= \case + False -> pure [] + True -> do (_, files) <- listDir programsPath + pure $ mapMaybe toTool files + where + toTool fp = do + x <- T.stripSuffix ".installed" $ T.pack $ toFilePath $ filename fp + parseToolText x + +filterTools :: + PackageName -- ^ package to find + -> (Version -> Bool) -- ^ which versions are acceptable + -> [Tool] -- ^ tools to filter + -> [PackageIdentifier] +filterTools name goodVersion installed = + [ pkgId | Tool pkgId <- installed + , pkgName pkgId == name + , goodVersion (pkgVersion pkgId) ] + +getCompilerVersion :: + (HasProcessContext env, HasLogFunc env) => WhichCompiler -> Path Abs File -- ^ executable -> RIO env ActualCompiler -getCompilerVersion wc exe = do - case wc of - Ghc -> do - logDebug "Asking GHC for its version" - bs <- fst <$> proc (toFilePath exe) ["--numeric-version"] readProcess_ - let (_, ghcVersion) = versionFromEnd $ BL.toStrict bs - x <- ACGhc <$> parseVersionThrowing (T.unpack $ T.decodeUtf8 ghcVersion) - logDebug $ "GHC version is: " <> display x - return x - where - versionFromEnd = S8.spanEnd isValid . fst . S8.breakEnd isValid - isValid c = c == '.' || ('0' <= c && c <= '9') +getCompilerVersion wc exe = + case wc of + Ghc -> do + logDebug "Asking GHC for its version" + bs <- fst <$> proc (toFilePath exe) ["--numeric-version"] readProcess_ + let (_, ghcVersion) = versionFromEnd $ BL.toStrict bs + x <- ACGhc <$> parseVersionThrowing (T.unpack $ T.decodeUtf8 ghcVersion) + logDebug $ "GHC version is: " <> display x + pure x + where + versionFromEnd = S8.spanEnd isValid . fst . S8.breakEnd isValid + isValid c = c == '.' || isDigit c -- | Binary directories for the given installed package -extraDirs :: HasConfig env => Tool -> RIO env ExtraDirs -extraDirs tool = do - config <- view configL - dir <- installDir (configLocalPrograms config) tool - case (configPlatform config, toolNameString tool) of - (Platform _ Cabal.Windows, isGHC -> True) -> return mempty - { edBins = - [ dir relDirBin - , dir relDirMingw relDirBin - ] - } - (Platform Cabal.I386 Cabal.Windows, "msys2") -> return mempty - { edBins = - [ dir relDirMingw32 relDirBin - , dir relDirUsr relDirBin - , dir relDirUsr relDirLocal relDirBin - ] - , edInclude = - [ dir relDirMingw32 relDirInclude - ] - , edLib = - [ dir relDirMingw32 relDirLib - , dir relDirMingw32 relDirBin - ] - } - (Platform Cabal.X86_64 Cabal.Windows, "msys2") -> return mempty - { edBins = - [ dir relDirMingw64 relDirBin - , dir relDirUsr relDirBin - , dir relDirUsr relDirLocal relDirBin - ] - , edInclude = - [ dir relDirMingw64 relDirInclude - ] - , edLib = - [ dir relDirMingw64 relDirLib - , dir relDirMingw64 relDirBin - ] - } - (_, isGHC -> True) -> return mempty - { edBins = - [ dir relDirBin - ] - } - (Platform _ x, toolName) -> do - logWarn $ "binDirs: unexpected OS/tool combo: " <> displayShow (x, toolName) - return mempty - where - isGHC n = "ghc" == n || "ghc-" `isPrefixOf` n - -installDir :: (MonadReader env m, MonadThrow m) - => Path Abs Dir - -> Tool - -> m (Path Abs Dir) +toolExtraDirs :: HasConfig env => Tool -> RIO env ExtraDirs +toolExtraDirs tool = do + config <- view configL + dir <- installDir config.localPrograms tool + case (config.platform, toolNameString tool) of + (Platform _ Cabal.Windows, isGHC -> True) -> pure mempty + { bins = + [ dir relDirBin + , dir relDirMingw relDirBin + ] + } + (Platform _ Cabal.Windows, "msys2") -> do + relDirMsysEnvPrefix <- case config.msysEnvironment of + Just msysEnv -> pure $ relDirMsysEnv msysEnv + Nothing -> throwM NoMsysEnvironmentBug + pure mempty + { bins = + [ dir relDirMsysEnvPrefix relDirBin + , dir relDirUsr relDirBin + , dir relDirUsr relDirLocal relDirBin + ] + , includes = + [ dir relDirMsysEnvPrefix relDirInclude + ] + , libs = + [ dir relDirMsysEnvPrefix relDirLib + , dir relDirMsysEnvPrefix relDirBin + ] + } + (_, isGHC -> True) -> pure mempty + { bins = + [ dir relDirBin + ] + } + (Platform _ x, toolName) -> do + prettyWarnL + [ flow "binDirs: unexpected OS/tool combo:" + , flow (show (x, toolName) <> ".") + ] + pure mempty + where + isGHC n = "ghc" == n || "ghc-" `L.isPrefixOf` n + +installDir :: + (MonadReader env m, MonadThrow m) + => Path Abs Dir + -> Tool + -> m (Path Abs Dir) installDir programsDir tool = do - relativeDir <- parseRelDir $ toolString tool - return $ programsDir relativeDir + relativeDir <- parseRelDir $ toolString tool + pure $ programsDir relativeDir -tempInstallDir :: (MonadReader env m, MonadThrow m) - => Path Abs Dir - -> Tool - -> m (Path Abs Dir) +tempInstallDir :: + (MonadReader env m, MonadThrow m) + => Path Abs Dir + -> Tool + -> m (Path Abs Dir) tempInstallDir programsDir tool = do - relativeDir <- parseRelDir $ toolString tool ++ ".temp" - return $ programsDir relativeDir + relativeDir <- parseRelDir $ toolString tool ++ ".temp" + pure $ programsDir relativeDir diff --git a/src/Stack/SetupCmd.hs b/src/Stack/SetupCmd.hs index 99e8c01658..eb339107a3 100644 --- a/src/Stack/SetupCmd.hs +++ b/src/Stack/SetupCmd.hs @@ -1,98 +1,136 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.SetupCmd +Description : Function related to Stack's @setup@ command. +License : BSD-3-Clause + +Function related to Stack's @setup@ command. +-} --- | Install GHC/GHCJS and Cabal. module Stack.SetupCmd - ( setup - , setupParser - , SetupCmdOpts(..) - ) where + ( setupCmd + ) where -import Control.Applicative -import Control.Monad.Reader -import qualified Data.Text as T -import qualified Options.Applicative as OA -import qualified Options.Applicative.Builder.Extra as OA -import qualified Options.Applicative.Types as OA -import Path +import qualified Data.Either.Extra as EE import Stack.Prelude -import Stack.Setup -import Stack.Types.Config -import Stack.Types.Version +import Stack.Runners + ( ShouldReexec (..), withBuildConfig, withConfig ) +import Stack.Setup ( SetupOpts (..), ensureCompilerAndMsys ) +import Stack.Types.BuildConfig + ( HasBuildConfig, configFileL, wantedCompilerVersionL ) +import Stack.Types.CompilerPaths ( CompilerPaths (..) ) +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.GHCVariant ( HasGHCVariant ) +import Stack.Types.Runner ( Runner ) +import Stack.Types.SetupOpts ( SetupCmdOpts (..) ) +import Stack.Types.Version ( VersionCheck (..) ) -data SetupCmdOpts = SetupCmdOpts - { scoCompilerVersion :: !(Maybe WantedCompiler) - , scoForceReinstall :: !Bool - , scoGHCBindistURL :: !(Maybe String) - , scoGHCJSBootOpts :: ![String] - , scoGHCJSBootClean :: !Bool - } +-- | Function underlying the @stack setup@ command. +setupCmd :: SetupCmdOpts -> RIO Runner () +setupCmd sco = withConfig YesReexec $ do + installGHC <- view $ configL . to (.installGHC) + installMsys <- view $ configL . to (.installMsys) + case (installGHC, installMsys) of + (True, True) -> withBuildConfig $ do + (wantedCompiler, compilerCheck, mConfigFile) <- + case sco.compilerVersion of + Just v -> pure (v, MatchMinor, Nothing) + Nothing -> do + wantedCompilerVersion <- view wantedCompilerVersionL + compilerCheck <- view (configL . to (.compilerCheck)) + configFile <- view configFileL + -- We are indifferent as to whether the configuration file is a + -- user-specific global or a project-level one. + let eitherConfigFile = EE.fromEither configFile + pure + ( wantedCompilerVersion + , compilerCheck + , Just eitherConfigFile + ) + setup sco wantedCompiler compilerCheck mConfigFile + (False, True) -> warn + [ styledNoInstallGHC + , singleFlag + ] + (True, False) -> warn + [ styledNoInstallMsys + , singleFlag + ] + (False, False) -> warn + [ styledNoInstallGHC + , "and" + , styledNoInstallMsys + , flow "flags are" + ] + where + styledNoInstallGHC = style Shell "--no-install-ghc" + styledNoInstallMsys = style Shell "--no-install-msys" + singleFlag = flow "flag is" + warn docs = prettyWarnL $ + ["The"] + <> docs + <> [ flow "inconsistent with" + , style Shell (flow "stack setup") <> "." + , flow "No action taken." + ] -setupParser :: OA.Parser SetupCmdOpts -setupParser = SetupCmdOpts - <$> OA.optional (OA.argument readVersion - (OA.metavar "GHC_VERSION" <> - OA.help ("Version of GHC to install, e.g. 7.10.2. " ++ - "The default is to install the version implied by the resolver."))) - <*> OA.boolFlags False - "reinstall" - "reinstalling GHC, even if available (incompatible with --system-ghc)" - OA.idm - <*> OA.optional (OA.strOption - (OA.long "ghc-bindist" - <> OA.metavar "URL" - <> OA.help "Alternate GHC binary distribution (requires custom --ghc-variant)")) - <*> OA.many (OA.strOption - (OA.long "ghcjs-boot-options" - <> OA.metavar "GHCJS_BOOT" - <> OA.help "Additional ghcjs-boot options")) - <*> OA.boolFlags True - "ghcjs-boot-clean" - "Control if ghcjs-boot should have --clean option present" - OA.idm - where - readVersion = do - s <- OA.readerAsk - case parseWantedCompiler ("ghc-" <> T.pack s) of - Left _ -> - case parseWantedCompiler (T.pack s) of - Left _ -> OA.readerError $ "Invalid version: " ++ s - Right x -> return x - Right x -> return x - -setup - :: (HasBuildConfig env, HasGHCVariant env) - => SetupCmdOpts - -> WantedCompiler - -> VersionCheck - -> Maybe (Path Abs File) - -> RIO env () -setup SetupCmdOpts{..} wantedCompiler compilerCheck mstack = do - Config{..} <- view configL - sandboxedGhc <- cpSandboxed . fst <$> ensureCompilerAndMsys SetupOpts - { soptsInstallIfMissing = True - , soptsUseSystem = configSystemGHC && not scoForceReinstall - , soptsWantedCompiler = wantedCompiler - , soptsCompilerCheck = compilerCheck - , soptsStackYaml = mstack - , soptsForceReinstall = scoForceReinstall - , soptsSanityCheck = True - , soptsSkipGhcCheck = False - , soptsSkipMsys = configSkipMsys - , soptsResolveMissingGHC = Nothing - , soptsGHCBindistURL = scoGHCBindistURL - } - let compiler = case wantedCompiler of - WCGhc _ -> "GHC" - WCGhcGit{} -> "GHC (built from source)" - WCGhcjs {} -> "GHCJS" - if sandboxedGhc - then logInfo $ "stack will use a sandboxed " <> compiler <> " it installed" - else logInfo $ "stack will use the " <> compiler <> " on your PATH" - logInfo "For more information on paths, see 'stack path' and 'stack exec env'" - logInfo $ "To use this " <> compiler <> " and packages outside of a project, consider using:" - logInfo "stack ghc, stack ghci, stack runghc, or stack exec" +setup :: + (HasBuildConfig env, HasGHCVariant env) + => SetupCmdOpts + -> WantedCompiler + -> VersionCheck + -> Maybe (Path Abs File) + -- ^ If we got the desired GHC version from that configuration file, which + -- may be either a user-specific global or a project-level one. + -> RIO env () +setup sco wantedCompiler compilerCheck configFile = do + config <- view configL + sandboxedGhc <- (.sandboxed) . fst <$> ensureCompilerAndMsys SetupOpts + { installGhcIfMissing = True + , installMsysIfMissing = True + , useSystem = config.systemGHC && not sco.forceReinstall + , wantedCompiler + , compilerCheck + , configFile + , forceReinstall = sco.forceReinstall + , sanityCheck = True + , skipGhcCheck = False + , skipMsys = config.skipMsys + , resolveMissingGHC = Nothing + , ghcBindistURL = sco.ghcBindistUrl + } + let compiler = case wantedCompiler of + WCGhc _ -> "GHC" + WCGhcGit{} -> "GHC (built from source)" + WCGhcjs {} -> "GHCJS" + compilerHelpMsg = fillSep + [ flow "To use this" + , compiler + , flow "and packages outside of a project, consider using:" + , style Shell (flow "stack ghc") <> "," + , style Shell (flow "stack ghci") <> "," + , style Shell (flow "stack runghc") <> "," + , "or" + , style Shell (flow "stack exec") <> "." + ] + if sandboxedGhc + then prettyInfoL + [ flow "Stack will use a sandboxed" + , compiler + , flow "it installed." + , compilerHelpMsg + ] + else prettyInfoL + [ flow "Stack will use the" + , compiler + , flow "on your PATH. For more information on paths, see" + , style Shell (flow "stack path") + , "and" + , style Shell (flow "stack exec env") <> "." + , compilerHelpMsg + ] diff --git a/src/Stack/SourceMap.hs b/src/Stack/SourceMap.hs index fead78ce24..823f219b99 100644 --- a/src/Stack/SourceMap.hs +++ b/src/Stack/SourceMap.hs @@ -1,280 +1,273 @@ -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.SourceMap +License : BSD-3-Clause +-} + module Stack.SourceMap - ( mkProjectPackage - , snapToDepPackage - , additionalDepPackage - , loadVersion - , getPLIVersion - , loadGlobalHints - , DumpedGlobalPackage - , actualFromGhc - , actualFromHints - , checkFlagsUsedThrowing - , globalCondCheck - , pruneGlobals - , globalsFromHints - , getCompilerInfo - , immutableLocSha - , loadProjectSnapshotCandidate - , SnapshotCandidate - , globalsFromDump - ) where + ( mkProjectPackage + , snapToDepPackage + , additionalDepPackage + , loadVersion + , getPLIVersion + , loadGlobalHints + , actualFromGhc + , globalCondCheck + , pruneGlobals + , globalsFromHints + , getCompilerInfo + , immutableLocSha + , loadProjectSnapshotCandidate + , SnapshotCandidate + , globalsFromDump + ) where -import Data.ByteString.Builder (byteString) +import Data.ByteString.Builder ( byteString ) import qualified Data.Conduit.List as CL +import qualified Data.Text as T import qualified Distribution.PackageDescription as PD -import Distribution.System (Platform(..)) -import Pantry +import Distribution.System ( Platform (..) ) import qualified Pantry.SHA256 as SHA256 -import qualified RIO import qualified RIO.Map as Map -import qualified RIO.Set as Set -import RIO.Process -import Stack.PackageDump -import Stack.Prelude -import Stack.Types.Build -import Stack.Types.Compiler -import Stack.Types.Config -import Stack.Types.SourceMap +import RIO.Process ( HasProcessContext ) +import Stack.Constants ( stackProgName' ) +import Stack.PackageDump ( conduitDumpPackage, ghcPkgDump ) +import Stack.Prelude +import Stack.Types.Compiler + ( ActualCompiler, wantedToActual ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), GhcPkgExe, HasCompiler (..) ) +import Stack.Types.Config ( HasConfig ) +import Stack.Types.DumpPackage + ( DumpPackage (..), DumpedGlobalPackage ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Runner ( rslInLogL ) +import Stack.Types.SourceMap + ( CommonPackage (..), DepPackage (..), FromSnapshot (..) + , GlobalPackage (..), GlobalPackageVersion (..) + , ProjectPackage (..), SMActual (..), SMWanted (..) + ) --- | Create a 'ProjectPackage' from a directory containing a package. +-- | Create a t'ProjectPackage' from a directory containing a package. mkProjectPackage :: - forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) - => PrintWarnings - -> ResolvedPath Dir - -> Bool - -> RIO env ProjectPackage -mkProjectPackage printWarnings dir buildHaddocks = do - (gpd, name, cabalfp) <- loadCabalFilePath (resolvedAbsolute dir) - return ProjectPackage - { ppCabalFP = cabalfp - , ppResolvedDir = dir - , ppCommon = CommonPackage - { cpGPD = gpd printWarnings - , cpName = name - , cpFlags = mempty - , cpGhcOptions = mempty - , cpCabalConfigOpts = mempty - , cpHaddocks = buildHaddocks - } - } + forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => PrintWarnings + -> ResolvedPath Dir + -> Bool + -- ^ Should Haddock documentation be built for the package? + -> RIO env ProjectPackage +mkProjectPackage printWarnings resolvedDir buildHaddocks = do + (gpd, name, cabalFP) <- + loadCabalFilePath (Just stackProgName') (resolvedAbsolute resolvedDir) + pure ProjectPackage + { cabalFP + , resolvedDir + , projectCommon = + CommonPackage + { gpd = gpd printWarnings + , name + , flags = mempty + , ghcOptions = mempty + , cabalConfigOpts = mempty + , buildHaddocks + } + } --- | Create a 'DepPackage' from a 'PackageLocation', from some additional --- to a snapshot setting (extra-deps or command line) -additionalDepPackage - :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) +-- | Create a t'DepPackage' from a 'PackageLocation', from some additional to a +-- snapshot setting (extra-deps or command line). +additionalDepPackage :: + forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) => Bool + -- ^ Should Haddock documentation be built for the package? -> PackageLocation -> RIO env DepPackage -additionalDepPackage buildHaddocks pl = do - (name, gpdio) <- - case pl of +additionalDepPackage buildHaddocks location = do + (name, gpd) <- + case location of PLMutable dir -> do - (gpdio, name, _cabalfp) <- loadCabalFilePath (resolvedAbsolute dir) - pure (name, gpdio NoPrintWarnings) + (gpd, name, _cabalfp) <- + loadCabalFilePath (Just stackProgName') (resolvedAbsolute dir) + pure (name, gpd NoPrintWarnings) PLImmutable pli -> do let PackageIdentifier name _ = packageLocationIdent pli run <- askRunInIO pure (name, run $ loadCabalFileImmutable pli) - return DepPackage - { dpLocation = pl - , dpHidden = False - , dpFromSnapshot = NotFromSnapshot - , dpCommon = CommonPackage - { cpGPD = gpdio - , cpName = name - , cpFlags = mempty - , cpGhcOptions = mempty - , cpCabalConfigOpts = mempty - , cpHaddocks = buildHaddocks - } + pure DepPackage + { location + , hidden = False + , fromSnapshot = NotFromSnapshot + , depCommon = + CommonPackage + { gpd + , name + , flags = mempty + , ghcOptions = mempty + , cabalConfigOpts = mempty + , buildHaddocks + } } +-- | Given a t'PackageName' and its t'SnapshotPackage', yields the corresponding +-- t'DepPackage'. snapToDepPackage :: - forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) - => Bool - -> PackageName - -> SnapshotPackage - -> RIO env DepPackage -snapToDepPackage buildHaddocks name SnapshotPackage{..} = do + forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Bool + -- ^ Should Haddock documentation be built for the package? + -> PackageName + -> SnapshotPackage + -> RIO env DepPackage +snapToDepPackage buildHaddocks name sp = do run <- askRunInIO - return DepPackage - { dpLocation = PLImmutable spLocation - , dpHidden = spHidden - , dpFromSnapshot = FromSnapshot - , dpCommon = CommonPackage - { cpGPD = run $ loadCabalFileImmutable spLocation - , cpName = name - , cpFlags = spFlags - , cpGhcOptions = spGhcOptions - , cpCabalConfigOpts = [] -- No spCabalConfigOpts, not present in snapshots - , cpHaddocks = buildHaddocks - } + pure DepPackage + { location = PLImmutable sp.spLocation + , hidden = sp.spHidden + , fromSnapshot = FromSnapshot + , depCommon = + CommonPackage + { gpd = run $ loadCabalFileImmutable sp.spLocation + , name + , flags = sp.spFlags + , ghcOptions = sp.spGhcOptions + , cabalConfigOpts = [] -- No spCabalConfigOpts, not present in snapshots + , buildHaddocks + } } +-- | For the given t'CommonPackage', load its generic package description and +-- yield its version. loadVersion :: MonadIO m => CommonPackage -> m Version loadVersion common = do - gpd <- liftIO $ cpGPD common - return (pkgVersion $ PD.package $ PD.packageDescription gpd) + gpd <- liftIO common.gpd + pure gpd.packageDescription.package.pkgVersion +-- | For the given t'PackageLocationImmutable', yield the version of the +-- referenced package. getPLIVersion :: PackageLocationImmutable -> Version getPLIVersion (PLIHackage (PackageIdentifier _ v) _ _) = v getPLIVersion (PLIArchive _ pm) = pkgVersion $ pmIdent pm getPLIVersion (PLIRepo _ pm) = pkgVersion $ pmIdent pm +-- | For the given @ghc-pkg@ executable, yield the contents of the global +-- package database. globalsFromDump :: - (HasLogFunc env, HasProcessContext env) - => GhcPkgExe - -> RIO env (Map PackageName DumpedGlobalPackage) + (HasProcessContext env, HasTerm env) + => GhcPkgExe + -> RIO env (Map PackageName DumpedGlobalPackage) globalsFromDump pkgexe = do - let pkgConduit = - conduitDumpPackage .| - CL.foldMap (\dp -> Map.singleton (dpGhcPkgId dp) dp) - toGlobals ds = - Map.fromList $ map (pkgName . dpPackageIdent &&& id) $ Map.elems ds - toGlobals <$> ghcPkgDump pkgexe [] pkgConduit + let pkgConduit = conduitDumpPackage + .| CL.foldMap (\dp -> Map.singleton dp.ghcPkgId dp) + toGlobals ds = + Map.fromList $ map (pkgName . (.packageIdent) &&& id) $ Map.elems ds + toGlobals <$> ghcPkgDump pkgexe [] pkgConduit +-- | For the given wanted compiler, yield the global hints (if available). globalsFromHints :: - HasConfig env - => WantedCompiler - -> RIO env (Map PackageName Version) -globalsFromHints compiler = do - mglobalHints <- loadGlobalHints compiler - case mglobalHints of - Just hints -> pure hints - Nothing -> do - logWarn $ "Unable to load global hints for " <> RIO.display compiler - pure mempty - -type DumpedGlobalPackage = DumpPackage + HasConfig env + => WantedCompiler + -> RIO env (Map PackageName Version) +globalsFromHints compiler = loadGlobalHints compiler >>= maybe + ( do + prettyWarnL + [ flow "Unable to load global hints for" + , fromString $ T.unpack $ textDisplay compiler + ] + pure mempty + ) + pure +-- | When the environment 'HasCompiler', for the +-- given t'Stack.Types.SourceMap.SMWanted' and 'ActualCompiler', yield +-- a t'SMActual' parameterised by t'DumpedGlobalPackage'. actualFromGhc :: - (HasConfig env, HasCompiler env) - => SMWanted - -> ActualCompiler - -> RIO env (SMActual DumpedGlobalPackage) -actualFromGhc smw ac = do - globals <- view $ compilerPathsL.to cpGlobalDump - return - SMActual - { smaCompiler = ac - , smaProject = smwProject smw - , smaDeps = smwDeps smw - , smaGlobal = globals - } - -actualFromHints :: - (HasConfig env) - => SMWanted - -> ActualCompiler - -> RIO env (SMActual GlobalPackageVersion) -actualFromHints smw ac = do - globals <- globalsFromHints (actualToWanted ac) - return - SMActual - { smaCompiler = ac - , smaProject = smwProject smw - , smaDeps = smwDeps smw - , smaGlobal = Map.map GlobalPackageVersion globals - } + (HasConfig env, HasCompiler env) + => SMWanted + -> ActualCompiler + -> RIO env (SMActual DumpedGlobalPackage) +actualFromGhc smw compiler = do + globals <- view $ compilerPathsL . to (.globalDump) + pure + SMActual + { compiler + , project = smw.project + , deps = smw.deps + , globals + } -- | Simple cond check for boot packages - checks only OS and Arch -globalCondCheck :: (HasConfig env) => RIO env (PD.ConfVar -> Either PD.ConfVar Bool) +globalCondCheck :: + (HasConfig env) + => RIO env (PD.ConfVar + -> Either PD.ConfVar Bool) globalCondCheck = do Platform arch os <- view platformL let condCheck (PD.OS os') = pure $ os' == os condCheck (PD.Arch arch') = pure $ arch' == arch condCheck c = Left c - return condCheck - -checkFlagsUsedThrowing :: - (MonadIO m, MonadThrow m) - => Map PackageName (Map FlagName Bool) - -> FlagSource - -> Map PackageName ProjectPackage - -> Map PackageName DepPackage - -> m () -checkFlagsUsedThrowing packageFlags source prjPackages deps = do - unusedFlags <- - forMaybeM (Map.toList packageFlags) $ \(pname, flags) -> - getUnusedPackageFlags (pname, flags) source prjPackages deps - unless (null unusedFlags) $ - throwM $ InvalidFlagSpecification $ Set.fromList unusedFlags - -getUnusedPackageFlags :: - MonadIO m - => (PackageName, Map FlagName Bool) - -> FlagSource - -> Map PackageName ProjectPackage - -> Map PackageName DepPackage - -> m (Maybe UnusedFlags) -getUnusedPackageFlags (name, userFlags) source prj deps = - let maybeCommon = - fmap ppCommon (Map.lookup name prj) <|> - fmap dpCommon (Map.lookup name deps) - in case maybeCommon of - -- Package is not available as project or dependency - Nothing -> - pure $ Just $ UFNoPackage source name - -- Package exists, let's check if the flags are defined - Just common -> do - gpd <- liftIO $ cpGPD common - let pname = pkgName $ PD.package $ PD.packageDescription gpd - pkgFlags = Set.fromList $ map PD.flagName $ PD.genPackageFlags gpd - unused = Map.keysSet $ Map.withoutKeys userFlags pkgFlags - if Set.null unused - -- All flags are defined, nothing to do - then pure Nothing - -- Error about the undefined flags - else pure $ Just $ UFFlagsNotDefined source pname pkgFlags unused + pure condCheck +-- | Prune the given packages from GHC's global package database. pruneGlobals :: - Map PackageName DumpedGlobalPackage - -> Set PackageName - -> Map PackageName GlobalPackage + Map PackageName DumpedGlobalPackage + -- ^ Packages in GHC's global package database. + -> Set PackageName + -- ^ Package names to prune. + -> Map PackageName GlobalPackage pruneGlobals globals deps = let (prunedGlobals, keptGlobals) = - partitionReplacedDependencies globals (pkgName . dpPackageIdent) - dpGhcPkgId dpDepends deps - in Map.map (GlobalPackage . pkgVersion . dpPackageIdent) keptGlobals <> - Map.map ReplacedGlobalPackage prunedGlobals + partitionReplacedDependencies globals (pkgName . (.packageIdent)) + (.ghcPkgId) (.depends) deps + in Map.map (GlobalPackage . pkgVersion . (.packageIdent)) keptGlobals <> + Map.map ReplacedGlobalPackage prunedGlobals +-- | Get the output of @ghc --info@ for the compiler in the environment. getCompilerInfo :: (HasConfig env, HasCompiler env) => RIO env Builder -getCompilerInfo = view $ compilerPathsL.to (byteString . cpGhcInfo) +getCompilerInfo = view $ compilerPathsL . to (byteString . (.ghcInfo)) +-- | For the given 'PackageLocationImmutable', yield its 256-bit cryptographic +-- hash. immutableLocSha :: PackageLocationImmutable -> Builder immutableLocSha = byteString . treeKeyToBs . locationTreeKey - where - locationTreeKey (PLIHackage _ _ tk) = tk - locationTreeKey (PLIArchive _ pm) = pmTreeKey pm - locationTreeKey (PLIRepo _ pm) = pmTreeKey pm - treeKeyToBs (TreeKey (BlobKey sha _)) = SHA256.toHexBytes sha + where + locationTreeKey (PLIHackage _ _ tk) = tk + locationTreeKey (PLIArchive _ pm) = pmTreeKey pm + locationTreeKey (PLIRepo _ pm) = pmTreeKey pm + treeKeyToBs (TreeKey (BlobKey sha _)) = SHA256.toHexBytes sha +-- | Type synonym for functions that yield a t'SMActual' parameterised by +-- t'GlobalPackageVersion' for a given list of project package directories. type SnapshotCandidate env - = [ResolvedPath Dir] -> RIO env (SMActual GlobalPackageVersion) + = [ResolvedPath Dir] -> RIO env (SMActual GlobalPackageVersion) +-- | For the given raw snapshot location, yield a function to yield a +-- t'SMActual' from a list of project package directories. loadProjectSnapshotCandidate :: - (HasConfig env) - => RawSnapshotLocation - -> PrintWarnings - -> Bool - -> RIO env (SnapshotCandidate env) + (HasConfig env) + => RawSnapshotLocation + -> PrintWarnings + -> Bool + -- ^ Should Haddock documentation be build for the package? + -> RIO env (SnapshotCandidate env) loadProjectSnapshotCandidate loc printWarnings buildHaddocks = do - (snapshot, _, _) <- loadAndCompleteSnapshotRaw loc Map.empty Map.empty - deps <- Map.traverseWithKey (snapToDepPackage False) (snapshotPackages snapshot) - let wc = snapshotCompiler snapshot - globals <- Map.map GlobalPackageVersion <$> globalsFromHints wc - return $ \projectPackages -> do - prjPkgs <- fmap Map.fromList . for projectPackages $ \resolved -> do - pp <- mkProjectPackage printWarnings resolved buildHaddocks - pure (cpName $ ppCommon pp, pp) - compiler <- either throwIO pure $ wantedToActual - $ snapshotCompiler snapshot - return SMActual - { smaCompiler = compiler - , smaProject = prjPkgs - , smaDeps = Map.difference deps prjPkgs - , smaGlobal = globals - } + debugRSL <- view rslInLogL + (snapshot, _, _) <- + loadAndCompleteSnapshotRaw' debugRSL loc Map.empty Map.empty + deps <- + Map.traverseWithKey (snapToDepPackage False) (snapshotPackages snapshot) + let wc = snapshotCompiler snapshot + globals <- Map.map GlobalPackageVersion <$> globalsFromHints wc + pure $ \projectPackages -> do + project <- fmap Map.fromList . for projectPackages $ \resolved -> do + pp <- mkProjectPackage printWarnings resolved buildHaddocks + pure (pp.projectCommon.name, pp) + compiler <- either throwIO pure $ wantedToActual $ snapshotCompiler snapshot + pure + SMActual + { compiler + , project + , deps = Map.difference deps project + , globals + } diff --git a/src/Stack/Storage/Project.hs b/src/Stack/Storage/Project.hs index e57a5641f1..7d858f36b5 100644 --- a/src/Stack/Storage/Project.hs +++ b/src/Stack/Storage/Project.hs @@ -1,50 +1,81 @@ -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DerivingStrategies #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE QuasiQuotes #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE StandaloneDeriving #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} {-# OPTIONS_GHC -Wno-unused-top-binds -Wno-identities #-} --- | Work with SQLite database used for caches across a single project. +{-| +Module : Stack.Storage.Project +Description : Work with the SQLite database for a project's caches. +License : BSD-3-Clause + +Work with the SQLite database used for a project's caches. +-} + module Stack.Storage.Project - ( initProjectStorage - , ConfigCacheKey - , configCacheKey - , loadConfigCache - , saveConfigCache - , deactiveConfigCache - ) where + ( initProjectStorage + , ConfigCacheKey + , ConfigCacheParent (..) + , ConfigCacheParentId + , configCacheKey + , loadConfigCache + , saveConfigCache + , deactiveConfigCache + ) where import qualified Data.ByteString as S import qualified Data.Set as Set -import Database.Persist.Sqlite -import Database.Persist.TH -import qualified Pantry.Internal as SQLite -import Path -import Stack.Prelude hiding (MigrationFailure) -import Stack.Storage.Util -import Stack.Types.Build -import Stack.Types.Cache -import Stack.Types.Config (HasBuildConfig, buildConfigL, bcProjectStorage, ProjectStorage (..)) -import Stack.Types.GhcPkgId +import Database.Persist.Sqlite + ( Entity (..), SelectOpt (..), SqlBackend, Unique, (=.) + , (==.), getBy, insert, selectList, update, updateWhere + ) +import Database.Persist.TH + ( mkMigrate, mkPersist, persistLowerCase, share + , sqlSettings + ) +import Pantry.SQLite ( initStorage, withStorage_ ) +import Stack.ConfigureOpts ( configureOptsFromDb ) +import Stack.Prelude +import Stack.Storage.Util + ( handleMigrationException, listUpdateDiff, setUpdateDiff + , updateCollection + ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.Cache + ( CachePkgSrc, ConfigCache (..), ConfigCacheType ) +import Stack.Types.ConfigureOpts ( ConfigureOpts (..) ) +import Stack.Types.GhcPkgId ( GhcPkgId ) +import Stack.Types.Storage ( ProjectStorage (..) ) +-- Uses the Persistent entity syntax to generate entities for five tables in a +-- SQLite database: +-- +-- config_cache +-- config_cache_dir_option +-- config_cache_no_dir_option +-- config_cache_dep +-- config_cache_component +-- +-- The ID column for each table is automatically generated. +-- +-- The other tables have a foreign key referring to the config_cache table, via: +-- +-- parent ConfigCacheParentId sql="config_cache_id" OnDeleteCascade +-- +-- The tables have UNIQUE constraints on multiple columns. +-- +-- Creates a function migrateAll to perform all migrations for the generated +-- entities. share [ mkPersist sqlSettings - , mkDeleteCascade sqlSettings , mkMigrate "migrateAll" - ] - [persistLowerCase| + ] + [persistLowerCase| ConfigCacheParent sql="config_cache" - directory FilePath "default=(hex(randomblob(16)))" + directory FilePath default="(hex(randomblob(16)))" type ConfigCacheType pkgSrc CachePkgSrc active Bool @@ -54,165 +85,176 @@ ConfigCacheParent sql="config_cache" deriving Show ConfigCacheDirOption - parent ConfigCacheParentId sql="config_cache_id" + parent ConfigCacheParentId sql="config_cache_id" OnDeleteCascade index Int value String sql="option" UniqueConfigCacheDirOption parent index deriving Show ConfigCacheNoDirOption - parent ConfigCacheParentId sql="config_cache_id" + parent ConfigCacheParentId sql="config_cache_id" OnDeleteCascade index Int value String sql="option" UniqueConfigCacheNoDirOption parent index deriving Show ConfigCacheDep - parent ConfigCacheParentId sql="config_cache_id" + parent ConfigCacheParentId sql="config_cache_id" OnDeleteCascade value GhcPkgId sql="ghc_pkg_id" UniqueConfigCacheDep parent value deriving Show ConfigCacheComponent - parent ConfigCacheParentId sql="config_cache_id" + parent ConfigCacheParentId sql="config_cache_id" OnDeleteCascade value S.ByteString sql="component" UniqueConfigCacheComponent parent value deriving Show |] --- | Initialize the database. +-- | Initialize the project database for caches. initProjectStorage :: - HasLogFunc env - => Path Abs File -- ^ storage file - -> (ProjectStorage -> RIO env a) - -> RIO env a -initProjectStorage fp f = SQLite.initStorage "Stack" migrateAll fp $ f . ProjectStorage + HasLogFunc env + => Path Abs File + -- ^ The storage file. + -> (ProjectStorage -> RIO env a) + -- ^ Action, given a SQL database connection to the project database for + -- caches. + -> RIO env a +initProjectStorage fp f = handleMigrationException $ + initStorage "Stack" migrateAll fp $ f . ProjectStorage -- | Run an action in a database transaction withProjectStorage :: - (HasBuildConfig env, HasLogFunc env) - => ReaderT SqlBackend (RIO env) a - -> RIO env a -withProjectStorage inner = - flip SQLite.withStorage_ inner =<< view (buildConfigL . to bcProjectStorage . to unProjectStorage) + (HasBuildConfig env, HasLogFunc env) + => ReaderT SqlBackend (RIO env) a + -> RIO env a +withProjectStorage inner = do + storage <- view (buildConfigL . to (.projectStorage.projectStorage)) + withStorage_ storage inner --- | Key used to retrieve configuration or flag cache +-- | Type synonym representing keys used to retrieve a record from the Cabal +-- configuration cache or the library or executable Cabal flag cache. type ConfigCacheKey = Unique ConfigCacheParent --- | Build key used to retrieve configuration or flag cache -configCacheKey :: Path Abs Dir -> ConfigCacheType -> ConfigCacheKey +-- | For the given directory and type of cache, yields the key used to retrieve +-- a record from the Cabal configuration cache or the library or executable +-- Cabal flag cache. +configCacheKey :: + Path Abs Dir + -- ^ Directory. + -> ConfigCacheType + -- ^ Type of cache. + -> ConfigCacheKey configCacheKey dir = UniqueConfigCacheParent (toFilePath dir) --- | Internal helper to read the 'ConfigCache' +-- | Internal helper to read the t'ConfigCache' readConfigCache :: - (HasBuildConfig env, HasLogFunc env) - => Entity ConfigCacheParent - -> ReaderT SqlBackend (RIO env) ConfigCache -readConfigCache (Entity parentId ConfigCacheParent {..}) = do - let configCachePkgSrc = configCacheParentPkgSrc - coDirs <- - map (configCacheDirOptionValue . entityVal) <$> - selectList - [ConfigCacheDirOptionParent ==. parentId] - [Asc ConfigCacheDirOptionIndex] - coNoDirs <- - map (configCacheNoDirOptionValue . entityVal) <$> - selectList - [ConfigCacheNoDirOptionParent ==. parentId] - [Asc ConfigCacheNoDirOptionIndex] - let configCacheOpts = ConfigureOpts {..} - configCacheDeps <- - Set.fromList . map (configCacheDepValue . entityVal) <$> - selectList [ConfigCacheDepParent ==. parentId] [] - configCacheComponents <- - Set.fromList . map (configCacheComponentValue . entityVal) <$> - selectList [ConfigCacheComponentParent ==. parentId] [] - let configCachePathEnvVar = configCacheParentPathEnvVar - let configCacheHaddock = configCacheParentHaddock - return ConfigCache {..} - --- | Load 'ConfigCache' from the database. + (HasBuildConfig env, HasLogFunc env) + => Entity ConfigCacheParent + -> ReaderT SqlBackend (RIO env) ConfigCache +readConfigCache (Entity parentId configCacheParent) = do + let pkgSrc = configCacheParent.configCacheParentPkgSrc + pathRelatedInfo <- + selectList + [ConfigCacheDirOptionParent ==. parentId] + [Asc ConfigCacheDirOptionIndex] + nonPathRelatedInfo <- + selectList + [ConfigCacheNoDirOptionParent ==. parentId] + [Asc ConfigCacheNoDirOptionIndex] + let configureOpts = configureOptsFromDb pathRelatedInfo nonPathRelatedInfo + deps <- + Set.fromList . map ((.configCacheDepValue) . entityVal) <$> + selectList [ConfigCacheDepParent ==. parentId] [] + components <- + Set.fromList . map ((.configCacheComponentValue) . entityVal) <$> + selectList [ConfigCacheComponentParent ==. parentId] [] + let pathEnvVar = configCacheParent.configCacheParentPathEnvVar + let buildHaddocks = configCacheParent.configCacheParentHaddock + pure ConfigCache + { configureOpts + , deps + , components + , buildHaddocks + , pkgSrc + , pathEnvVar + } + +-- | Load a t'ConfigCache' value from the project database for caches. loadConfigCache :: - (HasBuildConfig env, HasLogFunc env) - => ConfigCacheKey - -> RIO env (Maybe ConfigCache) + (HasBuildConfig env, HasLogFunc env) + => ConfigCacheKey + -> RIO env (Maybe ConfigCache) loadConfigCache key = - withProjectStorage $ do - mparent <- getBy key - case mparent of - Nothing -> return Nothing - Just parentEntity@(Entity _ ConfigCacheParent {..}) - | configCacheParentActive -> - Just <$> readConfigCache parentEntity - | otherwise -> return Nothing - --- | Insert or update 'ConfigCache' to the database. + withProjectStorage $ + getBy key >>= \case + Nothing -> pure Nothing + Just parentEntity@(Entity _ configCacheParent) + | configCacheParent.configCacheParentActive -> + Just <$> readConfigCache parentEntity + | otherwise -> pure Nothing + +-- | Insert or update a t'ConfigCache' value to the project database for caches. saveConfigCache :: - (HasBuildConfig env, HasLogFunc env) - => ConfigCacheKey - -> ConfigCache - -> RIO env () + (HasBuildConfig env, HasLogFunc env) + => ConfigCacheKey + -> ConfigCache + -> RIO env () saveConfigCache key@(UniqueConfigCacheParent dir type_) new = - withProjectStorage $ do - mparent <- getBy key - (parentId, mold) <- - case mparent of - Nothing -> - (, Nothing) <$> - insert - ConfigCacheParent - { configCacheParentDirectory = dir - , configCacheParentType = type_ - , configCacheParentPkgSrc = configCachePkgSrc new - , configCacheParentActive = True - , configCacheParentPathEnvVar = configCachePathEnvVar new - , configCacheParentHaddock = configCacheHaddock new - } - Just parentEntity@(Entity parentId _) -> do - old <- readConfigCache parentEntity - update - parentId - [ ConfigCacheParentPkgSrc =. configCachePkgSrc new - , ConfigCacheParentActive =. True - , ConfigCacheParentPathEnvVar =. configCachePathEnvVar new - ] - return (parentId, Just old) - updateList - ConfigCacheDirOption - ConfigCacheDirOptionParent + withProjectStorage $ do + (parentId, mold) <- getBy key >>= \case + Nothing -> + (, Nothing) <$> + insert + ConfigCacheParent + { configCacheParentDirectory = dir + , configCacheParentType = type_ + , configCacheParentPkgSrc = new.pkgSrc + , configCacheParentActive = True + , configCacheParentPathEnvVar = new.pathEnvVar + , configCacheParentHaddock = new.buildHaddocks + } + Just parentEntity@(Entity parentId _) -> do + old <- readConfigCache parentEntity + update parentId - ConfigCacheDirOptionIndex - (maybe [] (coDirs . configCacheOpts) mold) - (coDirs $ configCacheOpts new) - updateList - ConfigCacheNoDirOption - ConfigCacheNoDirOptionParent - parentId - ConfigCacheNoDirOptionIndex - (maybe [] (coNoDirs . configCacheOpts) mold) - (coNoDirs $ configCacheOpts new) - updateSet - ConfigCacheDep - ConfigCacheDepParent - parentId - ConfigCacheDepValue - (maybe Set.empty configCacheDeps mold) - (configCacheDeps new) - updateSet - ConfigCacheComponent - ConfigCacheComponentParent - parentId - ConfigCacheComponentValue - (maybe Set.empty configCacheComponents mold) - (configCacheComponents new) - --- | Mark 'ConfigCache' as inactive in the database. --- We use a flag instead of deleting the records since, in most cases, the same --- cache will be written again within in a few seconds (after --- `cabal configure`), so this avoids unnecessary database churn. + [ ConfigCacheParentPkgSrc =. new.pkgSrc + , ConfigCacheParentActive =. True + , ConfigCacheParentPathEnvVar =. new.pathEnvVar + ] + pure (parentId, Just old) + updateCollection + (listUpdateDiff ConfigCacheDirOptionIndex) + (uncurry $ ConfigCacheDirOption parentId) + [ConfigCacheDirOptionParent ==. parentId] + (maybe [] (.configureOpts.pathRelated) mold) + new.configureOpts.pathRelated + updateCollection + (listUpdateDiff ConfigCacheNoDirOptionIndex) + (uncurry $ ConfigCacheNoDirOption parentId) + [ConfigCacheNoDirOptionParent ==. parentId] + (maybe [] (.configureOpts.nonPathRelated) mold) + new.configureOpts.nonPathRelated + updateCollection + (setUpdateDiff ConfigCacheDepValue) + (ConfigCacheDep parentId) + [ConfigCacheDepParent ==. parentId] + (maybe Set.empty (.deps) mold) + new.deps + updateCollection + (setUpdateDiff ConfigCacheComponentValue) + (ConfigCacheComponent parentId) + [ConfigCacheComponentParent ==. parentId] + (maybe Set.empty (.components) mold) + new.components + +-- | Mark t'ConfigCache' as inactive in the database. We use a flag instead of +-- deleting the records since, in most cases, the same cache will be written +-- again within in a few seconds (after `cabal configure`), so this avoids +-- unnecessary database churn. deactiveConfigCache :: HasBuildConfig env => ConfigCacheKey -> RIO env () deactiveConfigCache (UniqueConfigCacheParent dir type_) = - withProjectStorage $ + withProjectStorage $ updateWhere - [ConfigCacheParentDirectory ==. dir, ConfigCacheParentType ==. type_] - [ConfigCacheParentActive =. False] + [ConfigCacheParentDirectory ==. dir, ConfigCacheParentType ==. type_] + [ConfigCacheParentActive =. False] diff --git a/src/Stack/Storage/User.hs b/src/Stack/Storage/User.hs index c8b739da8b..b5de0ca2a3 100644 --- a/src/Stack/Storage/User.hs +++ b/src/Stack/Storage/User.hs @@ -1,63 +1,100 @@ -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DerivingStrategies #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE QuasiQuotes #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE StandaloneDeriving #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} {-# OPTIONS_GHC -Wno-unused-top-binds -Wno-identities #-} --- | Work with SQLite database used for caches across an entire user account. +{-| +Module : Stack.Storage.User +Description : Work with SQLite DB for caches across a user account. +License : BSD-3-Clause + +Work with SQLite database used for caches across an entire user account. +-} + module Stack.Storage.User - ( initUserStorage - , PrecompiledCacheKey - , precompiledCacheKey - , loadPrecompiledCache - , savePrecompiledCache - , loadDockerImageExeCache - , saveDockerImageExeCache - , loadCompilerPaths - , saveCompilerPaths - , upgradeChecksSince - , logUpgradeCheck - ) where + ( initUserStorage + , PrecompiledCacheKey + , PrecompiledCacheParent (..) + , precompiledCacheKey + , loadPrecompiledCache + , savePrecompiledCache + , loadDockerImageExeCache + , saveDockerImageExeCache + , loadCompilerPaths + , saveCompilerPaths + , upgradeChecksSince + , logUpgradeCheck + ) where import qualified Data.Set as Set import qualified Data.Text as T -import Data.Time.Clock (UTCTime) -import Database.Persist.Sqlite -import Database.Persist.TH -import Distribution.Text (simpleParse, display) -import Foreign.C.Types (CTime (..)) -import qualified Pantry.Internal as SQLite -import Path -import Path.IO (resolveFile', resolveDir') +import Data.Time.Clock ( UTCTime ) +import Database.Persist.Sqlite + ( Entity (..), SqlBackend, Unique, (=.), (==.), (>=.), count + , deleteBy, getBy, insert, insert_, selectList, update + , upsert + ) +import Database.Persist.TH + ( mkMigrate, mkPersist, persistLowerCase, share + , sqlSettings + ) +import Distribution.Text ( simpleParse, display ) +import Foreign.C.Types ( CTime (..) ) +import Pantry.SQLite ( initStorage, withStorage_ ) +import Path ( (), mkRelFile, parseRelFile ) +import Path.IO ( resolveFile', resolveDir' ) import qualified RIO.FilePath as FP -import Stack.Prelude hiding (MigrationFailure) -import Stack.Storage.Util -import Stack.Types.Build -import Stack.Types.Cache -import Stack.Types.Compiler -import Stack.Types.CompilerBuild (CompilerBuild) -import Stack.Types.Config (HasConfig, configL, configUserStorage, CompilerPaths (..), GhcPkgExe (..), UserStorage (..)) -import System.Posix.Types (COff (..)) -import System.PosixCompat.Files (getFileStatus, fileSize, modificationTime) +import Stack.Prelude +import Stack.Storage.Util + ( handleMigrationException, setUpdateDiff, updateCollection ) +import Stack.Types.Cache ( Action (..), PrecompiledCache (..) ) +import Stack.Types.Compiler ( ActualCompiler, compilerVersionText ) +import Stack.Types.CompilerBuild ( CompilerBuild ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), GhcPkgExe (..) ) +import Stack.Types.Config ( Config (..), HasConfig (..) ) +import Stack.Types.Storage ( UserStorage (..) ) +import System.Posix.Types ( COff (..) ) +import System.PosixCompat.Files + ( fileSize, getFileStatus, modificationTime ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Storage.User" module. +data StorageUserException + = CompilerFileMetadataMismatch + | GlobalPackageCacheFileMetadataMismatch + | GlobalDumpParseFailure + | CompilerCacheArchitectureInvalid Text + deriving Show + +instance Exception StorageUserException where + displayException CompilerFileMetadataMismatch = + "Error: [S-8196]\n" + ++ "Compiler file metadata mismatch, ignoring cache." + displayException GlobalPackageCacheFileMetadataMismatch = + "Error: [S-5378]\n" + ++ "Global package cache file metadata mismatch, ignoring cache." + displayException GlobalDumpParseFailure = + "Error: [S-2673]\n" + ++ "Global dump did not parse correctly." + displayException + (CompilerCacheArchitectureInvalid compilerCacheArch) = concat + [ "Error: [S-8441]\n" + , "Invalid arch: " + , show compilerCacheArch + ] share [ mkPersist sqlSettings - , mkDeleteCascade sqlSettings , mkMigrate "migrateAll" - ] - [persistLowerCase| + ] + [persistLowerCase| PrecompiledCacheParent sql="precompiled_cache" - platformGhcDir FilePath "default=(hex(randomblob(16)))" + platformGhcDir FilePath default="(hex(randomblob(16)))" compiler Text cabalVersion Text packageKey Text @@ -68,13 +105,13 @@ PrecompiledCacheParent sql="precompiled_cache" deriving Show PrecompiledCacheSubLib - parent PrecompiledCacheParentId sql="precompiled_cache_id" + parent PrecompiledCacheParentId sql="precompiled_cache_id" OnDeleteCascade value FilePath sql="sub_lib" UniquePrecompiledCacheSubLib parent value deriving Show PrecompiledCacheExe - parent PrecompiledCacheParentId sql="precompiled_cache_id" + parent PrecompiledCacheParentId sql="precompiled_cache_id" OnDeleteCaseCascade value FilePath sql="exe" UniquePrecompiledCacheExe parent value deriving Show @@ -123,221 +160,254 @@ LastPerformed -- | Initialize the database. initUserStorage :: - HasLogFunc env - => Path Abs File -- ^ storage file - -> (UserStorage -> RIO env a) - -> RIO env a -initUserStorage fp f = SQLite.initStorage "Stack" migrateAll fp $ f . UserStorage + HasLogFunc env + => Path Abs File -- ^ storage file + -> (UserStorage -> RIO env a) + -> RIO env a +initUserStorage fp f = handleMigrationException $ + initStorage "Stack" migrateAll fp $ f . UserStorage -- | Run an action in a database transaction withUserStorage :: - (HasConfig env, HasLogFunc env) - => ReaderT SqlBackend (RIO env) a - -> RIO env a -withUserStorage inner = - flip SQLite.withStorage_ inner =<< view (configL . to configUserStorage . to unUserStorage) + (HasConfig env, HasLogFunc env) + => ReaderT SqlBackend (RIO env) a + -> RIO env a +withUserStorage inner = do + storage <- view (configL . to (.userStorage.userStorage)) + withStorage_ storage inner -- | Key used to retrieve the precompiled cache type PrecompiledCacheKey = Unique PrecompiledCacheParent -- | Build key used to retrieve the precompiled cache precompiledCacheKey :: - Path Rel Dir - -> ActualCompiler - -> Version - -> Text - -> ByteString - -> Bool - -> PrecompiledCacheKey + Path Rel Dir + -> ActualCompiler + -> Version + -> Text + -> ByteString + -> Bool + -> PrecompiledCacheKey precompiledCacheKey platformGhcDir compiler cabalVersion = - UniquePrecompiledCacheParent - (toFilePath platformGhcDir) - (compilerVersionText compiler) - (T.pack $ versionString cabalVersion) + UniquePrecompiledCacheParent + (toFilePath platformGhcDir) + (compilerVersionText compiler) + (T.pack $ versionString cabalVersion) --- | Internal helper to read the 'PrecompiledCache' from the database +-- | Internal helper to read the t'PrecompiledCache' from the database readPrecompiledCache :: - (HasConfig env, HasLogFunc env) - => PrecompiledCacheKey - -> ReaderT SqlBackend (RIO env) (Maybe ( PrecompiledCacheParentId - , PrecompiledCache Rel)) + (HasConfig env, HasLogFunc env) + => PrecompiledCacheKey + -> ReaderT SqlBackend (RIO env) (Maybe ( PrecompiledCacheParentId + , PrecompiledCache Rel)) readPrecompiledCache key = do - mparent <- getBy key - forM mparent $ \(Entity parentId PrecompiledCacheParent {..}) -> do - pcLibrary <- mapM parseRelFile precompiledCacheParentLibrary - pcSubLibs <- - mapM (parseRelFile . precompiledCacheSubLibValue . entityVal) =<< - selectList [PrecompiledCacheSubLibParent ==. parentId] [] - pcExes <- - mapM (parseRelFile . precompiledCacheExeValue . entityVal) =<< - selectList [PrecompiledCacheExeParent ==. parentId] [] - return (parentId, PrecompiledCache {..}) - --- | Load 'PrecompiledCache' from the database. + mparent <- getBy key + forM mparent $ \(Entity parentId precompiledCacheParent) -> do + library <- + mapM parseRelFile precompiledCacheParent.precompiledCacheParentLibrary + subLibs <- + mapM (parseRelFile . (.precompiledCacheSubLibValue) . entityVal) =<< + selectList [PrecompiledCacheSubLibParent ==. parentId] [] + exes <- + mapM (parseRelFile . (.precompiledCacheExeValue) . entityVal) =<< + selectList [PrecompiledCacheExeParent ==. parentId] [] + pure + ( parentId + , PrecompiledCache + { library + , subLibs + , exes + } + ) + +-- | Load t'PrecompiledCache' from the database. loadPrecompiledCache :: - (HasConfig env, HasLogFunc env) - => PrecompiledCacheKey - -> RIO env (Maybe (PrecompiledCache Rel)) -loadPrecompiledCache key = withUserStorage $ fmap snd <$> readPrecompiledCache key + (HasConfig env, HasLogFunc env) + => PrecompiledCacheKey + -> RIO env (Maybe (PrecompiledCache Rel)) +loadPrecompiledCache key = + withUserStorage $ fmap snd <$> readPrecompiledCache key --- | Insert or update 'PrecompiledCache' to the database. +-- | Insert or update t'PrecompiledCache' to the database. savePrecompiledCache :: - (HasConfig env, HasLogFunc env) - => PrecompiledCacheKey - -> PrecompiledCache Rel - -> RIO env () -savePrecompiledCache key@(UniquePrecompiledCacheParent precompiledCacheParentPlatformGhcDir precompiledCacheParentCompiler precompiledCacheParentCabalVersion precompiledCacheParentPackageKey precompiledCacheParentOptionsHash precompiledCacheParentHaddock) new = - withUserStorage $ do - let precompiledCacheParentLibrary = fmap toFilePath (pcLibrary new) - mIdOld <- readPrecompiledCache key - (parentId, mold) <- - case mIdOld of - Nothing -> (, Nothing) <$> insert PrecompiledCacheParent {..} - Just (parentId, old) -> do - update - parentId - [ PrecompiledCacheParentLibrary =. - precompiledCacheParentLibrary - ] - return (parentId, Just old) - updateSet - PrecompiledCacheSubLib - PrecompiledCacheSubLibParent - parentId - PrecompiledCacheSubLibValue - (maybe Set.empty (toFilePathSet . pcSubLibs) mold) - (toFilePathSet $ pcSubLibs new) - updateSet - PrecompiledCacheExe - PrecompiledCacheExeParent + (HasConfig env, HasLogFunc env) + => PrecompiledCacheKey + -> PrecompiledCache Rel + -> RIO env () +savePrecompiledCache + key@( UniquePrecompiledCacheParent + precompiledCacheParentPlatformGhcDir + precompiledCacheParentCompiler + precompiledCacheParentCabalVersion + precompiledCacheParentPackageKey + precompiledCacheParentOptionsHash + precompiledCacheParentHaddock + ) + new + = withUserStorage $ do + let precompiledCacheParentLibrary = fmap toFilePath new.library + (parentId, mold) <- readPrecompiledCache key >>= \case + Nothing -> (, Nothing) <$> insert PrecompiledCacheParent + { precompiledCacheParentPlatformGhcDir + , precompiledCacheParentCompiler + , precompiledCacheParentCabalVersion + , precompiledCacheParentPackageKey + , precompiledCacheParentOptionsHash + , precompiledCacheParentHaddock + , precompiledCacheParentLibrary + } + Just (parentId, old) -> do + update parentId - PrecompiledCacheExeValue - (maybe Set.empty (toFilePathSet . pcExes) mold) - (toFilePathSet $ pcExes new) - where - toFilePathSet = Set.fromList . map toFilePath + [ PrecompiledCacheParentLibrary =. + precompiledCacheParentLibrary + ] + pure (parentId, Just old) + updateCollection + (setUpdateDiff PrecompiledCacheSubLibValue) + (PrecompiledCacheSubLib parentId) + [PrecompiledCacheSubLibParent ==. parentId] + (maybe Set.empty (toFilePathSet . (.subLibs)) mold) + (toFilePathSet new.subLibs) + updateCollection + (setUpdateDiff PrecompiledCacheExeValue) + (PrecompiledCacheExe parentId) + [PrecompiledCacheExeParent ==. parentId] + (maybe Set.empty (toFilePathSet . (.exes)) mold) + (toFilePathSet new.exes) + where + toFilePathSet = Set.fromList . map toFilePath -- | Get the record of whether an executable is compatible with a Docker image loadDockerImageExeCache :: - (HasConfig env, HasLogFunc env) - => Text - -> Path Abs File - -> UTCTime - -> RIO env (Maybe Bool) -loadDockerImageExeCache imageId exePath exeTimestamp = - withUserStorage $ - fmap (dockerImageExeCacheCompatible . entityVal) <$> - getBy (DockerImageExeCacheUnique imageId (toFilePath exePath) exeTimestamp) - --- | Sest the record of whether an executable is compatible with a Docker image + (HasConfig env, HasLogFunc env) + => Text + -> Path Abs File + -> UTCTime + -> RIO env (Maybe Bool) +loadDockerImageExeCache imageId exePath exeTimestamp = withUserStorage $ + fmap ((.dockerImageExeCacheCompatible) . entityVal) <$> + getBy (DockerImageExeCacheUnique imageId (toFilePath exePath) exeTimestamp) + +-- | Sets the record of whether an executable is compatible with a Docker image saveDockerImageExeCache :: - (HasConfig env, HasLogFunc env) - => Text - -> Path Abs File - -> UTCTime - -> Bool - -> RIO env () -saveDockerImageExeCache imageId exePath exeTimestamp compatible = - void $ - withUserStorage $ + (HasConfig env, HasLogFunc env) + => Text + -> Path Abs File + -> UTCTime + -> Bool + -> RIO env () +saveDockerImageExeCache imageId exePath exeTimestamp compatible = void $ + withUserStorage $ upsert - (DockerImageExeCache - imageId - (toFilePath exePath) - exeTimestamp - compatible) - [] - --- | Type-restricted version of 'fromIntegral' to ensure we're making --- the value bigger, not smaller. + ( DockerImageExeCache + imageId + (toFilePath exePath) + exeTimestamp + compatible + ) + [] + +-- | Type-restricted version of 'fromIntegral' to ensure we're making the value +-- bigger, not smaller. sizeToInt64 :: COff -> Int64 sizeToInt64 (COff i) = fromIntegral i -- fromIntegral added for 32-bit systems --- | Type-restricted version of 'fromIntegral' to ensure we're making --- the value bigger, not smaller. +-- | Type-restricted version of 'fromIntegral' to ensure we're making the value +-- bigger, not smaller. timeToInt64 :: CTime -> Int64 timeToInt64 (CTime i) = fromIntegral i -- fromIntegral added for 32-bit systems --- | Load compiler information, if available, and confirm that the --- referenced files are unchanged. May throw exceptions! -loadCompilerPaths - :: HasConfig env +-- | Load compiler information, if available, and confirm that the referenced +-- files are unchanged. May throw exceptions! +loadCompilerPaths :: + HasConfig env => Path Abs File -- ^ compiler executable -> CompilerBuild -> Bool -- ^ sandboxed? -> RIO env (Maybe CompilerPaths) loadCompilerPaths compiler build sandboxed = do mres <- withUserStorage $ getBy $ UniqueCompilerInfo $ toFilePath compiler - for mres $ \(Entity _ CompilerCache {..}) -> do + for mres $ \(Entity _ compilerCache) -> do compilerStatus <- liftIO $ getFileStatus $ toFilePath compiler when - (compilerCacheGhcSize /= sizeToInt64 (fileSize compilerStatus) || - compilerCacheGhcModified /= timeToInt64 (modificationTime compilerStatus)) - (throwString "Compiler file metadata mismatch, ignoring cache") - globalDbStatus <- liftIO $ getFileStatus $ compilerCacheGlobalDb FP. "package.cache" + ( compilerCache.compilerCacheGhcSize /= + sizeToInt64 (fileSize compilerStatus) + || compilerCache.compilerCacheGhcModified /= + timeToInt64 (modificationTime compilerStatus) + ) + (throwIO CompilerFileMetadataMismatch) + globalDbStatus <- liftIO $ + getFileStatus $ compilerCache.compilerCacheGlobalDb FP. "package.cache" when - (compilerCacheGlobalDbCacheSize /= sizeToInt64 (fileSize globalDbStatus) || - compilerCacheGlobalDbCacheModified /= timeToInt64 (modificationTime globalDbStatus)) - (throwString "Global package cache file metadata mismatch, ignoring cache") - - -- We could use parseAbsFile instead of resolveFile' below to - -- bypass some system calls, at the cost of some really wonky - -- error messages in case someone screws up their GHC installation - pkgexe <- resolveFile' compilerCacheGhcPkgPath - runghc <- resolveFile' compilerCacheRunghcPath - haddock <- resolveFile' compilerCacheHaddockPath - globaldb <- resolveDir' compilerCacheGlobalDb - - cabalVersion <- parseVersionThrowing $ T.unpack compilerCacheCabalVersion + ( compilerCache.compilerCacheGlobalDbCacheSize /= + sizeToInt64 (fileSize globalDbStatus) + || compilerCache.compilerCacheGlobalDbCacheModified /= + timeToInt64 (modificationTime globalDbStatus) + ) + (throwIO GlobalPackageCacheFileMetadataMismatch) + + -- We could use parseAbsFile instead of resolveFile' below to bypass some + -- system calls, at the cost of some really wonky error messages in case + -- someone screws up their GHC installation + pkg <- GhcPkgExe <$> resolveFile' compilerCache.compilerCacheGhcPkgPath + interpreter <- resolveFile' compilerCache.compilerCacheRunghcPath + haddock <- resolveFile' compilerCache.compilerCacheHaddockPath + globalDB <- resolveDir' compilerCache.compilerCacheGlobalDb + + cabalVersion <- parseVersionThrowing $ + T.unpack compilerCache.compilerCacheCabalVersion globalDump <- - case readMaybe $ T.unpack compilerCacheGlobalDump of - Nothing -> throwString "Global dump did not parse correctly" + case readMaybe $ T.unpack compilerCache.compilerCacheGlobalDump of + Nothing -> throwIO GlobalDumpParseFailure Just globalDump -> pure globalDump arch <- - case simpleParse $ T.unpack compilerCacheArch of - Nothing -> throwString $ "Invalid arch: " ++ show compilerCacheArch + case simpleParse $ T.unpack compilerCache.compilerCacheArch of + Nothing -> throwIO $ + CompilerCacheArchitectureInvalid compilerCache.compilerCacheArch Just arch -> pure arch - pure CompilerPaths - { cpCompiler = compiler - , cpCompilerVersion = compilerCacheActualVersion - , cpArch = arch - , cpBuild = build - , cpPkg = GhcPkgExe pkgexe - , cpInterpreter = runghc - , cpHaddock = haddock - , cpSandboxed = sandboxed - , cpCabalVersion = cabalVersion - , cpGlobalDB = globaldb - , cpGhcInfo = compilerCacheInfo - , cpGlobalDump = globalDump + { compiler + , compilerVersion = compilerCache.compilerCacheActualVersion + , arch + , build + , pkg + , interpreter + , haddock + , sandboxed + , cabalVersion + , globalDB + , ghcInfo = compilerCache.compilerCacheInfo + , globalDump } -- | Save compiler information. May throw exceptions! -saveCompilerPaths - :: HasConfig env +saveCompilerPaths :: + HasConfig env => CompilerPaths -> RIO env () -saveCompilerPaths CompilerPaths {..} = withUserStorage $ do - deleteBy $ UniqueCompilerInfo $ toFilePath cpCompiler - compilerStatus <- liftIO $ getFileStatus $ toFilePath cpCompiler - globalDbStatus <- liftIO $ getFileStatus $ toFilePath $ cpGlobalDB $(mkRelFile "package.cache") - let GhcPkgExe pkgexe = cpPkg +saveCompilerPaths cp = withUserStorage $ do + deleteBy $ UniqueCompilerInfo $ toFilePath cp.compiler + compilerStatus <- liftIO $ getFileStatus $ toFilePath cp.compiler + globalDbStatus <- liftIO $ + getFileStatus $ toFilePath $ cp.globalDB $(mkRelFile "package.cache") + let GhcPkgExe pkgexe = cp.pkg insert_ CompilerCache - { compilerCacheActualVersion = cpCompilerVersion - , compilerCacheGhcPath = toFilePath cpCompiler + { compilerCacheActualVersion = cp.compilerVersion + , compilerCacheGhcPath = toFilePath cp.compiler , compilerCacheGhcSize = sizeToInt64 $ fileSize compilerStatus , compilerCacheGhcModified = timeToInt64 $ modificationTime compilerStatus , compilerCacheGhcPkgPath = toFilePath pkgexe - , compilerCacheRunghcPath = toFilePath cpInterpreter - , compilerCacheHaddockPath = toFilePath cpHaddock - , compilerCacheCabalVersion = T.pack $ versionString cpCabalVersion - , compilerCacheGlobalDb = toFilePath cpGlobalDB + , compilerCacheRunghcPath = toFilePath cp.interpreter + , compilerCacheHaddockPath = toFilePath cp.haddock + , compilerCacheCabalVersion = T.pack $ versionString cp.cabalVersion + , compilerCacheGlobalDb = toFilePath cp.globalDB , compilerCacheGlobalDbCacheSize = sizeToInt64 $ fileSize globalDbStatus - , compilerCacheGlobalDbCacheModified = timeToInt64 $ modificationTime globalDbStatus - , compilerCacheInfo = cpGhcInfo - , compilerCacheGlobalDump = tshow cpGlobalDump - , compilerCacheArch = T.pack $ Distribution.Text.display cpArch + , compilerCacheGlobalDbCacheModified = + timeToInt64 $ modificationTime globalDbStatus + , compilerCacheInfo = cp.ghcInfo + , compilerCacheGlobalDump = tshow cp.globalDump + , compilerCacheArch = T.pack $ Distribution.Text.display cp.arch } -- | How many upgrade checks have occurred since the given timestamp? diff --git a/src/Stack/Storage/Util.hs b/src/Stack/Storage/Util.hs index bc445fe2fd..9700109220 100644 --- a/src/Stack/Storage/Util.hs +++ b/src/Stack/Storage/Util.hs @@ -1,69 +1,92 @@ -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE TypeFamilies #-} --- | Utils for the other Stack.Storage modules +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE TypeFamilies #-} + +{-| +Module : Stack.Storage.Util +Description : Utilities for other @Stack.Storage@ modules. +License : BSD-3-Clause + +Utilities for the other @Stack.Storage@ modules. +-} + module Stack.Storage.Util - ( updateList - , updateSet - ) where + ( handleMigrationException + , updateCollection + , setUpdateDiff + , listUpdateDiff + ) where import qualified Data.Set as Set -import Database.Persist -import Stack.Prelude hiding (MigrationFailure) +import Database.Persist + ( BaseBackend, EntityField, Filter, PersistEntity + , PersistEntityBackend, PersistField, PersistQueryWrite + , SafeToInsert, (<-.), deleteWhere, insertMany_ + ) +import Stack.Prelude +import Stack.Types.Storage ( StoragePrettyException (..) ) + +-- | Efficiently update a collection of values with a given diff function. +updateCollection :: + ( PersistEntityBackend record ~ BaseBackend backend + , Eq (collection rawValue) + , PersistEntity record + , PersistField value + , MonadIO m + , PersistQueryWrite backend + , SafeToInsert record + , Foldable collection + ) + => (collection rawValue -> collection rawValue -> ([Filter record], [value])) + -- ^ Function to yield items in old not in new, to delete, and values in + -- new not in old, to add, from the old and new collections of values. + -> (value -> record) + -- ^ Function to yield new records from values in new not in old. + -> [Filter record] + -- ^ Extra items to delete, if there are other items to delete. + -> collection rawValue + -- ^ The old collection of values. + -> collection rawValue + -- ^ The new collection of values. + -> ReaderT backend m () +updateCollection fnDiffer recordCons extra old new = + when (old /= new) $ do + let (oldMinusNewFilter, newMinusOld) = fnDiffer old new + unless (null oldMinusNewFilter) $ deleteWhere + (extra ++ oldMinusNewFilter) + unless (null newMinusOld) $ insertMany_ $ + map recordCons $ toList newMinusOld + +setUpdateDiff :: + (Ord value, PersistField value) + => EntityField record value + -> Set value + -> Set value + -> ([Filter record], [value]) +setUpdateDiff indexFieldCons old new = + let oldMinusNew = Set.difference old new + in ([indexFieldCons <-. toList oldMinusNew], toList $ Set.difference new old) --- | Efficiently update a set of values stored in a database table -updateSet :: - ( PersistEntityBackend record ~ BaseBackend backend - , PersistField parentid - , PersistField value - , Ord value - , PersistEntity record - , MonadIO m - , PersistQueryWrite backend - ) - => (parentid -> value -> record) - -> EntityField record parentid - -> parentid - -> EntityField record value - -> Set value - -> Set value - -> ReaderT backend m () -updateSet recordCons parentFieldCons parentId valueFieldCons old new = - when (old /= new) $ do - deleteWhere - [ parentFieldCons ==. parentId - , valueFieldCons <-. Set.toList (Set.difference old new) - ] - insertMany_ $ - map (recordCons parentId) $ Set.toList (Set.difference new old) +listUpdateDiff :: + (Ord value) + => EntityField record Int + -> [value] + -> [value] + -> ([Filter record], [(Int, value)]) +listUpdateDiff indexFieldCons old new = + let oldSet = Set.fromList (zip [0 ..] old) + newSet = Set.fromList (zip [0 ..] new) + oldMinusNew = Set.difference oldSet newSet + indexList = map fst (Set.toList oldMinusNew) + in ([indexFieldCons <-. indexList], toList $ Set.difference newSet oldSet) --- | Efficiently update a list of values stored in a database table. -updateList :: - ( PersistEntityBackend record ~ BaseBackend backend - , PersistField parentid - , Ord value - , PersistEntity record - , MonadIO m - , PersistQueryWrite backend - ) - => (parentid -> Int -> value -> record) - -> EntityField record parentid - -> parentid - -> EntityField record Int - -> [value] - -> [value] - -> ReaderT backend m () -updateList recordCons parentFieldCons parentId indexFieldCons old new = - when (old /= new) $ do - let oldSet = Set.fromList (zip [0 ..] old) - newSet = Set.fromList (zip [0 ..] new) - deleteWhere - [ parentFieldCons ==. parentId - , indexFieldCons <-. - map fst (Set.toList $ Set.difference oldSet newSet) - ] - insertMany_ $ - map (uncurry $ recordCons parentId) $ - Set.toList (Set.difference newSet oldSet) +handleMigrationException :: HasLogFunc env => RIO env a -> RIO env a +handleMigrationException inner = do + eres <- try inner + either + ( \e -> case e :: PantryException of + MigrationFailure desc fp ex -> + prettyThrowIO $ StorageMigrationFailure desc fp ex + _ -> throwIO e + ) + pure + eres diff --git a/src/Stack/Templates.hs b/src/Stack/Templates.hs new file mode 100644 index 0000000000..ee3c1dd3f2 --- /dev/null +++ b/src/Stack/Templates.hs @@ -0,0 +1,83 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Templates +Description : Functions related to Stack's @templates@ command. +License : BSD-3-Clause + +Functions related to Stack's @templates@ command. +-} + +module Stack.Templates + ( templatesCmd + , templatesHelp + ) where + +import qualified Data.ByteString.Lazy as LB +import qualified Data.Text.IO as T +import Network.HTTP.StackClient + ( HttpException (..), getResponseBody, httpLbs, parseUrlThrow + , setGitHubHeaders + ) +import Stack.Prelude +import Stack.Runners ( ShouldReexec (..), withConfig ) +import Stack.Types.Runner ( Runner ) + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Templates" module. +data TemplatesPrettyException + = DownloadTemplatesHelpFailed !HttpException + | TemplatesHelpEncodingInvalid !String !UnicodeException + +deriving instance Show TemplatesPrettyException + +instance Pretty TemplatesPrettyException where + pretty (DownloadTemplatesHelpFailed err) = + "[S-8143]" + <> line + <> fillSep + [ flow "Stack failed to download the help for" + , style Shell "stack templates" <> "." + ] + <> blankLine + <> flow "While downloading, Stack encountered the following error:" + <> blankLine + <> string (displayException err) + pretty (TemplatesHelpEncodingInvalid url err) = + "[S-6670]" + <> line + <> fillSep + [ flow "Stack failed to decode the help for" + , style Shell "stack templates" + , flow "downloaded from" + , style Url (fromString url) <> "." + ] + <> blankLine + <> flow "While decoding, Stack encountered the following error:" + <> blankLine + <> string (displayException err) + +instance Exception TemplatesPrettyException + +-- | Function underlying the @stack templates@ command. Display instructions for +-- how to use templates. +templatesCmd :: () -> RIO Runner () +templatesCmd () = withConfig NoReexec templatesHelp + +-- | Display help for the templates command. +templatesHelp :: HasTerm env => RIO env () +templatesHelp = do + let url = defaultTemplatesHelpUrl + req <- fmap setGitHubHeaders (parseUrlThrow url) + resp <- catch + (httpLbs req) + (prettyThrowM . DownloadTemplatesHelpFailed) + case decodeUtf8' $ LB.toStrict $ getResponseBody resp of + Left err -> prettyThrowM $ TemplatesHelpEncodingInvalid url err + Right txt -> liftIO $ T.putStrLn txt + +-- | Default web URL to get the `stack templates` help output. +defaultTemplatesHelpUrl :: String +defaultTemplatesHelpUrl = + "https://raw.githubusercontent.com/commercialhaskell/stack-templates/master/STACK_HELP.md" diff --git a/src/Stack/Types/AddCommand.hs b/src/Stack/Types/AddCommand.hs new file mode 100644 index 0000000000..dbc83e05e3 --- /dev/null +++ b/src/Stack/Types/AddCommand.hs @@ -0,0 +1,28 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.AddCommand +License : BSD-3-Clause +-} + +module Stack.Types.AddCommand + ( AddCommand + ) where + +import Control.Monad.Trans.Except ( ExceptT ) +import Control.Monad.Writer ( Writer ) +import qualified Options.Applicative as OA +import Stack.Prelude +import Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid ) +import Stack.Types.Runner ( Runner ) + +-- | A type synonym for the monad used to add command line commands to Stack. +-- The monad is a stack of an 'ExceptT' @(@t'RIO' 'Runner' @())@ monad on top of +-- a 'Writer' @f@ monad, where @f@ is +-- 'Options.Applicative.Mod' 'Options.Applicative.CommandFields' @(@t'RIO' 'Runner' @(),@ 'GlobalOptsMonoid'@)@ - that +-- is, an option modifier for command options that have return type +-- @(@t'RIO' 'Runner' @(),@ 'GlobalOptsMonoid'@)@. +type AddCommand = + ExceptT (RIO Runner ()) + (Writer (OA.Mod OA.CommandFields (RIO Runner (), GlobalOptsMonoid))) + () diff --git a/src/Stack/Types/AllowNewerDeps.hs b/src/Stack/Types/AllowNewerDeps.hs new file mode 100644 index 0000000000..0651fa2393 --- /dev/null +++ b/src/Stack/Types/AllowNewerDeps.hs @@ -0,0 +1,23 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.AllowNewerDeps +License : BSD-3-Clause +-} + +module Stack.Types.AllowNewerDeps + ( AllowNewerDeps (..) + ) where + +import Data.Aeson.Types ( FromJSON (..) ) +import Distribution.PackageDescription ( mkPackageName ) +import Stack.Prelude + +-- | A type representing lists of packages for which Stack should ignore lower +-- and upper version bounds in its Cabal file. +newtype AllowNewerDeps + = AllowNewerDeps [PackageName] + deriving (Eq, Generic, Monoid, Ord, Read, Semigroup, Show) + +instance FromJSON AllowNewerDeps where + parseJSON = fmap (AllowNewerDeps . fmap mkPackageName) . parseJSON diff --git a/src/Stack/Types/ApplyGhcOptions.hs b/src/Stack/Types/ApplyGhcOptions.hs new file mode 100644 index 0000000000..f8525b9028 --- /dev/null +++ b/src/Stack/Types/ApplyGhcOptions.hs @@ -0,0 +1,29 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.ApplyGhcOptions +License : BSD-3-Clause +-} + +module Stack.Types.ApplyGhcOptions + ( ApplyGhcOptions (..) + ) where + +import Data.Aeson.Types ( FromJSON (..), withText ) +import Stack.Prelude + +-- | Which packages do ghc-options on the command line apply to? +data ApplyGhcOptions + = AGOTargets -- ^ All project packages that are targets. + | AGOLocals -- ^ All project packages, even non-targets. + | AGOEverything -- ^ All packages, project packages and dependencies. + deriving (Bounded, Enum, Eq, Ord, Read, Show) + +instance FromJSON ApplyGhcOptions where + parseJSON = withText "ApplyGhcOptions" $ \t -> + case t of + "targets" -> pure AGOTargets + "locals" -> pure AGOLocals + "everything" -> pure AGOEverything + _ -> fail $ "Invalid ApplyGhcOptions: " ++ show t diff --git a/src/Stack/Types/ApplyProgOptions.hs b/src/Stack/Types/ApplyProgOptions.hs new file mode 100644 index 0000000000..2acd98216f --- /dev/null +++ b/src/Stack/Types/ApplyProgOptions.hs @@ -0,0 +1,30 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.ApplyProgOptions +License : BSD-3-Clause +-} + +module Stack.Types.ApplyProgOptions + ( ApplyProgOptions (..) + ) where + +import Data.Aeson.Types ( FromJSON (..), withText ) +import Stack.Prelude + +-- | Which packages do all and any --PROG-option options on the command line +-- apply to? +data ApplyProgOptions + = APOTargets -- ^ All project packages that are targets. + | APOLocals -- ^ All project packages (targets or otherwise). + | APOEverything -- ^ All packages (project packages or dependencies). + deriving (Bounded, Enum, Eq, Ord, Read, Show) + +instance FromJSON ApplyProgOptions where + parseJSON = withText "ApplyProgOptions" $ \t -> + case t of + "targets" -> pure APOTargets + "locals" -> pure APOLocals + "everything" -> pure APOEverything + _ -> fail $ "Invalid ApplyProgOptions: " ++ show t diff --git a/src/Stack/Types/Build.hs b/src/Stack/Types/Build.hs index 6d03cc62b7..ea64c0fe0b 100644 --- a/src/Stack/Types/Build.hs +++ b/src/Stack/Types/Build.hs @@ -1,701 +1,36 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} --- | Build-specific types. +{-| +Module : Stack.Types.Build +Description : Build-specific types. +License : BSD-3-Clause + +Build-specific types. +-} module Stack.Types.Build - (StackBuildException(..) - ,FlagSource(..) - ,UnusedFlags(..) - ,InstallLocation(..) - ,Installed(..) - ,psVersion - ,Task(..) - ,taskIsTarget - ,taskLocation - ,taskTargetIsMutable - ,LocalPackage(..) - ,BaseConfigOpts(..) - ,Plan(..) - ,TestOpts(..) - ,BenchmarkOpts(..) - ,FileWatchOpts(..) - ,BuildOpts(..) - ,BuildSubset(..) - ,defaultBuildOpts - ,TaskType(..) - ,IsMutable(..) - ,installLocationIsMutable - ,TaskConfigOpts(..) - ,BuildCache(..) - ,ConfigCache(..) - ,configureOpts - ,CachePkgSrc (..) - ,toCachePkgSrc - ,isStackOpt - ,wantedLocalPackages - ,FileCacheInfo (..) - ,ConfigureOpts (..) - ,PrecompiledCache (..) - ) - where + ( ExcludeTHLoading (..) + , ConvertPathsToAbsolute (..) + , KeepOutputOpen (..) + ) where import Stack.Prelude -import Data.Aeson (ToJSON, FromJSON) -import qualified Data.ByteString as S -import Data.Char (isSpace) -import Data.List.Extra -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Data.Text as T -import Database.Persist.Sql (PersistField(..) - ,PersistFieldSql(..) - ,PersistValue(PersistText) - ,SqlType(SqlString)) -import Distribution.PackageDescription (TestSuiteInterface) -import Distribution.System (Arch) -import qualified Distribution.Text as C -import Distribution.Version (mkVersion) -import Path (parseRelDir, (), parent) -import Path.Extra (toFilePathNoTrailingSep) -import Stack.Constants -import Stack.Types.Compiler -import Stack.Types.CompilerBuild -import Stack.Types.Config -import Stack.Types.GhcPkgId -import Stack.Types.NamedComponent -import Stack.Types.Package -import Stack.Types.Version -import System.FilePath (pathSeparator) -import RIO.Process (showProcessArgDebug) - ----------------------------------------------- --- Exceptions -data StackBuildException - = Couldn'tFindPkgId PackageName - | CompilerVersionMismatch - (Maybe (ActualCompiler, Arch)) -- found - (WantedCompiler, Arch) -- expected - GHCVariant -- expected - CompilerBuild -- expected - VersionCheck - (Maybe (Path Abs File)) -- Path to the stack.yaml file - Text -- recommended resolution - | Couldn'tParseTargets [Text] - | UnknownTargets - (Set PackageName) -- no known version - (Map PackageName Version) -- not in snapshot, here's the most recent version in the index - (Path Abs File) -- stack.yaml - | TestSuiteFailure PackageIdentifier (Map Text (Maybe ExitCode)) (Maybe (Path Abs File)) S.ByteString - | TestSuiteTypeUnsupported TestSuiteInterface - | ConstructPlanFailed String - | CabalExitedUnsuccessfully - ExitCode - PackageIdentifier - (Path Abs File) -- cabal Executable - [String] -- cabal arguments - (Maybe (Path Abs File)) -- logfiles location - [Text] -- log contents - | SetupHsBuildFailure - ExitCode - (Maybe PackageIdentifier) -- which package's custom setup, is simple setup if Nothing - (Path Abs File) -- ghc Executable - [String] -- ghc arguments - (Maybe (Path Abs File)) -- logfiles location - [Text] -- log contents - | ExecutionFailure [SomeException] - | LocalPackageDoesn'tMatchTarget - PackageName - Version -- local version - Version -- version specified on command line - | NoSetupHsFound (Path Abs Dir) - | InvalidFlagSpecification (Set UnusedFlags) - | InvalidGhcOptionsSpecification [PackageName] - | TargetParseException [Text] - | SomeTargetsNotBuildable [(PackageName, NamedComponent)] - | TestSuiteExeMissing Bool String String String - | CabalCopyFailed Bool String - | LocalPackagesPresent [PackageIdentifier] - | CouldNotLockDistDir !(Path Abs File) - deriving Typeable - -data FlagSource = FSCommandLine | FSStackYaml - deriving (Show, Eq, Ord) - -data UnusedFlags = UFNoPackage FlagSource PackageName - | UFFlagsNotDefined - FlagSource - PackageName - (Set FlagName) -- defined in package - (Set FlagName) -- not defined - | UFSnapshot PackageName - deriving (Show, Eq, Ord) - -instance Show StackBuildException where - show (Couldn'tFindPkgId name) = - "After installing " <> packageNameString name <> - ", the package id couldn't be found " <> "(via ghc-pkg describe " <> - packageNameString name <> "). This shouldn't happen, " <> - "please report as a bug" - show (CompilerVersionMismatch mactual (expected, earch) ghcVariant ghcBuild check mstack resolution) = concat - [ case mactual of - Nothing -> "No compiler found, expected " - Just (actual, arch) -> concat - [ "Compiler version mismatched, found " - , compilerVersionString actual - , " (" - , C.display arch - , ")" - , ", but expected " - ] - , case check of - MatchMinor -> "minor version match with " - MatchExact -> "exact version " - NewerMinor -> "minor version match or newer with " - , T.unpack $ utf8BuilderToText $ display expected - , " (" - , C.display earch - , ghcVariantSuffix ghcVariant - , compilerBuildSuffix ghcBuild - , ") (based on " - , case mstack of - Nothing -> "command line arguments" - Just stack -> "resolver setting in " ++ toFilePath stack - , ").\n" - , T.unpack resolution - ] - show (Couldn'tParseTargets targets) = unlines - $ "The following targets could not be parsed as package names or directories:" - : map T.unpack targets - show (UnknownTargets noKnown notInSnapshot stackYaml) = - unlines $ noKnown' ++ notInSnapshot' - where - noKnown' - | Set.null noKnown = [] - | otherwise = return $ - "The following target packages were not found: " ++ - intercalate ", " (map packageNameString $ Set.toList noKnown) ++ - "\nSee https://docs.haskellstack.org/en/stable/build_command/#target-syntax for details." - notInSnapshot' - | Map.null notInSnapshot = [] - | otherwise = - "The following packages are not in your snapshot, but exist" - : "in your package index. Recommended action: add them to your" - : ("extra-deps in " ++ toFilePath stackYaml) - : "(Note: these are the most recent versions," - : "but there's no guarantee that they'll build together)." - : "" - : map - (\(name, version') -> "- " ++ packageIdentifierString - (PackageIdentifier name version')) - (Map.toList notInSnapshot) - show (TestSuiteFailure ident codes mlogFile bs) = unlines $ concat - [ ["Test suite failure for package " ++ packageIdentifierString ident] - , flip map (Map.toList codes) $ \(name, mcode) -> concat - [ " " - , T.unpack name - , ": " - , case mcode of - Nothing -> " executable not found" - Just ec -> " exited with: " ++ show ec - ] - , return $ case mlogFile of - Nothing -> "Logs printed to console" - -- TODO Should we load up the full error output and print it here? - Just logFile -> "Full log available at " ++ toFilePath logFile - , if S.null bs - then [] - else ["", "", doubleIndent $ T.unpack $ decodeUtf8With lenientDecode bs] - ] - where - indent = dropWhileEnd isSpace . unlines . fmap (\line -> " " ++ line) . lines - doubleIndent = indent . indent - show (TestSuiteTypeUnsupported interface) = - "Unsupported test suite type: " <> show interface - -- Supressing duplicate output - show (CabalExitedUnsuccessfully exitCode taskProvides' execName fullArgs logFiles bss) = - showBuildError False exitCode (Just taskProvides') execName fullArgs logFiles bss - show (SetupHsBuildFailure exitCode mtaskProvides execName fullArgs logFiles bss) = - showBuildError True exitCode mtaskProvides execName fullArgs logFiles bss - show (ExecutionFailure es) = intercalate "\n\n" $ map show es - show (LocalPackageDoesn'tMatchTarget name localV requestedV) = concat - [ "Version for local package " - , packageNameString name - , " is " - , versionString localV - , ", but you asked for " - , versionString requestedV - , " on the command line" - ] - show (NoSetupHsFound dir) = - "No Setup.hs or Setup.lhs file found in " ++ toFilePath dir - show (InvalidFlagSpecification unused) = unlines - $ "Invalid flag specification:" - : map go (Set.toList unused) - where - showFlagSrc :: FlagSource -> String - showFlagSrc FSCommandLine = " (specified on command line)" - showFlagSrc FSStackYaml = " (specified in stack.yaml)" - - go :: UnusedFlags -> String - go (UFNoPackage src name) = concat - [ "- Package '" - , packageNameString name - , "' not found" - , showFlagSrc src - ] - go (UFFlagsNotDefined src pname pkgFlags flags) = concat - [ "- Package '" - , name - , "' does not define the following flags" - , showFlagSrc src - , ":\n" - , intercalate "\n" - (map (\flag -> " " ++ flagNameString flag) - (Set.toList flags)) - , "\n- Flags defined by package '" ++ name ++ "':\n" - , intercalate "\n" - (map (\flag -> " " ++ name ++ ":" ++ flagNameString flag) - (Set.toList pkgFlags)) - ] - where name = packageNameString pname - go (UFSnapshot name) = concat - [ "- Attempted to set flag on snapshot package " - , packageNameString name - , ", please add to extra-deps" - ] - show (InvalidGhcOptionsSpecification unused) = unlines - $ "Invalid GHC options specification:" - : map showGhcOptionSrc unused - where - showGhcOptionSrc name = concat - [ "- Package '" - , packageNameString name - , "' not found" - ] - show (TargetParseException [err]) = "Error parsing targets: " ++ T.unpack err - show (TargetParseException errs) = unlines - $ "The following errors occurred while parsing the build targets:" - : map (("- " ++) . T.unpack) errs - - show (SomeTargetsNotBuildable xs) = - "The following components have 'buildable: False' set in the cabal configuration, and so cannot be targets:\n " ++ - T.unpack (renderPkgComponents xs) ++ - "\nTo resolve this, either provide flags such that these components are buildable, or only specify buildable targets." - show (TestSuiteExeMissing isSimpleBuildType exeName pkgName' testName) = - missingExeError isSimpleBuildType $ concat - [ "Test suite executable \"" - , exeName - , " not found for " - , pkgName' - , ":test:" - , testName - ] - show (CabalCopyFailed isSimpleBuildType innerMsg) = - missingExeError isSimpleBuildType $ concat - [ "'cabal copy' failed. Error message:\n" - , innerMsg - , "\n" - ] - show (ConstructPlanFailed msg) = msg - show (LocalPackagesPresent locals) = unlines - $ "Local packages are not allowed when using the script command. Packages found:" - : map (\ident -> "- " ++ packageIdentifierString ident) locals - show (CouldNotLockDistDir lockFile) = unlines - [ "Locking the dist directory failed, try to lock file:" - , " " ++ toFilePath lockFile - , "Maybe you're running another copy of Stack?" - ] - -missingExeError :: Bool -> String -> String -missingExeError isSimpleBuildType msg = - unlines $ msg : - case possibleCauses of - [] -> [] - [cause] -> ["One possible cause of this issue is:\n* " <> cause] - _ -> "Possible causes of this issue:" : map ("* " <>) possibleCauses - where - possibleCauses = - "No module named \"Main\". The 'main-is' source file should usually have a header indicating that it's a 'Main' module." : - "A cabal file that refers to nonexistent other files (e.g. a license-file that doesn't exist). Running 'cabal check' may point out these issues." : - if isSimpleBuildType - then [] - else ["The Setup.hs file is changing the installation target dir."] - -showBuildError - :: Bool - -> ExitCode - -> Maybe PackageIdentifier - -> Path Abs File - -> [String] - -> Maybe (Path Abs File) - -> [Text] - -> String -showBuildError isBuildingSetup exitCode mtaskProvides execName fullArgs logFiles bss = - let fullCmd = unwords - $ dropQuotes (toFilePath execName) - : map (T.unpack . showProcessArgDebug) fullArgs - logLocations = maybe "" (\fp -> "\n Logs have been written to: " ++ toFilePath fp) logFiles - in "\n-- While building " ++ - (case (isBuildingSetup, mtaskProvides) of - (False, Nothing) -> error "Invariant violated: unexpected case in showBuildError" - (False, Just taskProvides') -> "package " ++ dropQuotes (packageIdentifierString taskProvides') - (True, Nothing) -> "simple Setup.hs" - (True, Just taskProvides') -> "custom Setup.hs for package " ++ dropQuotes (packageIdentifierString taskProvides') - ) ++ - " (scroll up to its section to see the error) using:\n " ++ fullCmd ++ "\n" ++ - " Process exited with code: " ++ show exitCode ++ - (if exitCode == ExitFailure (-9) - then " (THIS MAY INDICATE OUT OF MEMORY)" - else "") ++ - logLocations ++ - (if null bss - then "" - else "\n\n" ++ doubleIndent (map T.unpack bss)) - where - doubleIndent = dropWhileEnd isSpace . unlines . fmap (\line -> " " ++ line) - dropQuotes = filter ('\"' /=) - -instance Exception StackBuildException - ----------------------------------------------- - --- | Package dependency oracle. -newtype PkgDepsOracle = - PkgDeps PackageName - deriving (Show,Typeable,Eq,NFData) - --- | Stored on disk to know whether the files have changed. -newtype BuildCache = BuildCache - { buildCacheTimes :: Map FilePath FileCacheInfo - -- ^ Modification times of files. - } - deriving (Generic, Eq, Show, Typeable, ToJSON, FromJSON) -instance NFData BuildCache - --- | Stored on disk to know whether the flags have changed. -data ConfigCache = ConfigCache - { configCacheOpts :: !ConfigureOpts - -- ^ All options used for this package. - , configCacheDeps :: !(Set GhcPkgId) - -- ^ The GhcPkgIds of all of the dependencies. Since Cabal doesn't take - -- the complete GhcPkgId (only a PackageIdentifier) in the configure - -- options, just using the previous value is insufficient to know if - -- dependencies have changed. - , configCacheComponents :: !(Set S.ByteString) - -- ^ The components to be built. It's a bit of a hack to include this in - -- here, as it's not a configure option (just a build option), but this - -- is a convenient way to force compilation when the components change. - , configCacheHaddock :: !Bool - -- ^ Are haddocks to be built? - , configCachePkgSrc :: !CachePkgSrc - , configCachePathEnvVar :: !Text - -- ^ Value of the PATH env var, see - } - deriving (Generic, Eq, Show, Data, Typeable) -instance NFData ConfigCache - -data CachePkgSrc = CacheSrcUpstream | CacheSrcLocal FilePath - deriving (Generic, Eq, Read, Show, Data, Typeable) -instance NFData CachePkgSrc - -instance PersistField CachePkgSrc where - toPersistValue CacheSrcUpstream = PersistText "upstream" - toPersistValue (CacheSrcLocal fp) = PersistText ("local:" <> T.pack fp) - fromPersistValue (PersistText t) = do - if t == "upstream" - then Right CacheSrcUpstream - else case T.stripPrefix "local:" t of - Just fp -> Right $ CacheSrcLocal (T.unpack fp) - Nothing -> Left $ "Unexpected CachePkgSrc value: " <> t - fromPersistValue _ = Left "Unexpected CachePkgSrc type" - -instance PersistFieldSql CachePkgSrc where - sqlType _ = SqlString - -toCachePkgSrc :: PackageSource -> CachePkgSrc -toCachePkgSrc (PSFilePath lp) = CacheSrcLocal (toFilePath (parent (lpCabalFile lp))) -toCachePkgSrc PSRemote{} = CacheSrcUpstream - --- | A task to perform when building -data Task = Task - { taskProvides :: !PackageIdentifier -- FIXME turn this into a function on taskType? - -- ^ the package/version to be built - , taskType :: !TaskType - -- ^ the task type, telling us how to build this - , taskConfigOpts :: !TaskConfigOpts - , taskBuildHaddock :: !Bool - , taskPresent :: !(Map PackageIdentifier GhcPkgId) - -- ^ GhcPkgIds of already-installed dependencies - , taskAllInOne :: !Bool - -- ^ indicates that the package can be built in one step - , taskCachePkgSrc :: !CachePkgSrc - , taskAnyMissing :: !Bool - -- ^ Were any of the dependencies missing? The reason this is - -- necessary is... hairy. And as you may expect, a bug in - -- Cabal. See: - -- . The - -- problem is that Cabal may end up generating the same package ID - -- for a dependency, even if the ABI has changed. As a result, - -- without this field, Stack would think that a reconfigure is - -- unnecessary, when in fact we _do_ need to reconfigure. The - -- details here suck. We really need proper hashes for package - -- identifiers. - , taskBuildTypeConfig :: !Bool - -- ^ Is the build type of this package Configure. Check out - -- ensureConfigureScript in Stack.Build.Execute for the motivation - } - deriving Show - --- | Given the IDs of any missing packages, produce the configure options -data TaskConfigOpts = TaskConfigOpts - { tcoMissing :: !(Set PackageIdentifier) - -- ^ Dependencies for which we don't yet have an GhcPkgId - , tcoOpts :: !(Map PackageIdentifier GhcPkgId -> ConfigureOpts) - -- ^ Produce the list of options given the missing @GhcPkgId@s - } -instance Show TaskConfigOpts where - show (TaskConfigOpts missing f) = concat - [ "Missing: " - , show missing - , ". Without those: " - , show $ f Map.empty - ] - --- | The type of a task, either building local code or something from the --- package index (upstream) -data TaskType - = TTLocalMutable LocalPackage - | TTRemotePackage IsMutable Package PackageLocationImmutable - deriving Show - -data IsMutable - = Mutable - | Immutable - deriving (Eq, Show) - -instance Semigroup IsMutable where - Mutable <> _ = Mutable - _ <> Mutable = Mutable - Immutable <> Immutable = Immutable - -instance Monoid IsMutable where - mempty = Immutable - mappend = (<>) - -taskIsTarget :: Task -> Bool -taskIsTarget t = - case taskType t of - TTLocalMutable lp -> lpWanted lp - _ -> False - -taskLocation :: Task -> InstallLocation -taskLocation task = - case taskType task of - TTLocalMutable _ -> Local - TTRemotePackage Mutable _ _ -> Local - TTRemotePackage Immutable _ _ -> Snap - -taskTargetIsMutable :: Task -> IsMutable -taskTargetIsMutable task = - case taskType task of - TTLocalMutable _ -> Mutable - TTRemotePackage mutable _ _ -> mutable - -installLocationIsMutable :: InstallLocation -> IsMutable -installLocationIsMutable Snap = Immutable -installLocationIsMutable Local = Mutable - --- | A complete plan of what needs to be built and how to do it -data Plan = Plan - { planTasks :: !(Map PackageName Task) - , planFinals :: !(Map PackageName Task) - -- ^ Final actions to be taken (test, benchmark, etc) - , planUnregisterLocal :: !(Map GhcPkgId (PackageIdentifier, Text)) - -- ^ Text is reason we're unregistering, for display only - , planInstallExes :: !(Map Text InstallLocation) - -- ^ Executables that should be installed after successful building - } - deriving Show - --- | Basic information used to calculate what the configure options are -data BaseConfigOpts = BaseConfigOpts - { bcoSnapDB :: !(Path Abs Dir) - , bcoLocalDB :: !(Path Abs Dir) - , bcoSnapInstallRoot :: !(Path Abs Dir) - , bcoLocalInstallRoot :: !(Path Abs Dir) - , bcoBuildOpts :: !BuildOpts - , bcoBuildOptsCLI :: !BuildOptsCLI - , bcoExtraDBs :: ![Path Abs Dir] - } - deriving Show - --- | Render a @BaseConfigOpts@ to an actual list of options -configureOpts :: EnvConfig - -> BaseConfigOpts - -> Map PackageIdentifier GhcPkgId -- ^ dependencies - -> Bool -- ^ local non-extra-dep? - -> IsMutable - -> Package - -> ConfigureOpts -configureOpts econfig bco deps isLocal isMutable package = ConfigureOpts - { coDirs = configureOptsDirs bco isMutable package - , coNoDirs = configureOptsNoDir econfig bco deps isLocal package - } - --- options set by stack -isStackOpt :: Text -> Bool -isStackOpt t = any (`T.isPrefixOf` t) - [ "--dependency=" - , "--constraint=" - , "--package-db=" - , "--libdir=" - , "--bindir=" - , "--datadir=" - , "--libexecdir=" - , "--sysconfdir" - , "--docdir=" - , "--htmldir=" - , "--haddockdir=" - , "--enable-tests" - , "--enable-benchmarks" - , "--exact-configuration" - -- Treat these as causing dirtiness, to resolve - -- https://github.com/commercialhaskell/stack/issues/2984 - -- - -- , "--enable-library-profiling" - -- , "--enable-executable-profiling" - -- , "--enable-profiling" - ] || t == "--user" - -configureOptsDirs :: BaseConfigOpts - -> IsMutable - -> Package - -> [String] -configureOptsDirs bco isMutable package = concat - [ ["--user", "--package-db=clear", "--package-db=global"] - , map (("--package-db=" ++) . toFilePathNoTrailingSep) $ case isMutable of - Immutable -> bcoExtraDBs bco ++ [bcoSnapDB bco] - Mutable -> bcoExtraDBs bco ++ [bcoSnapDB bco] ++ [bcoLocalDB bco] - , [ "--libdir=" ++ toFilePathNoTrailingSep (installRoot relDirLib) - , "--bindir=" ++ toFilePathNoTrailingSep (installRoot bindirSuffix) - , "--datadir=" ++ toFilePathNoTrailingSep (installRoot relDirShare) - , "--libexecdir=" ++ toFilePathNoTrailingSep (installRoot relDirLibexec) - , "--sysconfdir=" ++ toFilePathNoTrailingSep (installRoot relDirEtc) - , "--docdir=" ++ toFilePathNoTrailingSep docDir - , "--htmldir=" ++ toFilePathNoTrailingSep docDir - , "--haddockdir=" ++ toFilePathNoTrailingSep docDir] - ] - where - installRoot = - case isMutable of - Immutable -> bcoSnapInstallRoot bco - Mutable -> bcoLocalInstallRoot bco - docDir = - case pkgVerDir of - Nothing -> installRoot docDirSuffix - Just dir -> installRoot docDirSuffix dir - pkgVerDir = - parseRelDir (packageIdentifierString (PackageIdentifier (packageName package) - (packageVersion package)) ++ - [pathSeparator]) - --- | Same as 'configureOpts', but does not include directory path options -configureOptsNoDir :: EnvConfig - -> BaseConfigOpts - -> Map PackageIdentifier GhcPkgId -- ^ dependencies - -> Bool -- ^ is this a local, non-extra-dep? - -> Package - -> [String] -configureOptsNoDir econfig bco deps isLocal package = concat - [ depOptions - , ["--enable-library-profiling" | boptsLibProfile bopts || boptsExeProfile bopts] - -- Cabal < 1.21.1 does not support --enable-profiling, use --enable-executable-profiling instead - , let profFlag = "--enable-" <> concat ["executable-" | not newerCabal] <> "profiling" - in [ profFlag | boptsExeProfile bopts && isLocal] - , ["--enable-split-objs" | boptsSplitObjs bopts] - , ["--disable-library-stripping" | not $ boptsLibStrip bopts || boptsExeStrip bopts] - , ["--disable-executable-stripping" | not (boptsExeStrip bopts) && isLocal] - , map (\(name,enabled) -> - "-f" <> - (if enabled - then "" - else "-") <> - flagNameString name) - (Map.toList flags) - , map T.unpack $ packageCabalConfigOpts package - , concatMap (\x -> [compilerOptionsCabalFlag wc, T.unpack x]) (packageGhcOptions package) - , map ("--extra-include-dirs=" ++) (configExtraIncludeDirs config) - , map ("--extra-lib-dirs=" ++) (configExtraLibDirs config) - , maybe [] (\customGcc -> ["--with-gcc=" ++ toFilePath customGcc]) (configOverrideGccPath config) - , ["--exact-configuration"] - , ["--ghc-option=-fhide-source-paths" | hideSourcePaths cv] - ] - where - wc = view (actualCompilerVersionL.to whichCompiler) econfig - cv = view (actualCompilerVersionL.to getGhcVersion) econfig - - hideSourcePaths ghcVersion = ghcVersion >= mkVersion [8, 2] && configHideSourcePaths config - - config = view configL econfig - bopts = bcoBuildOpts bco - - newerCabal = view cabalVersionL econfig >= mkVersion [1, 22] - - -- Unioning atop defaults is needed so that all flags are specified - -- with --exact-configuration. - flags = packageFlags package `Map.union` packageDefaultFlags package - - depOptions = map (uncurry toDepOption) $ Map.toList deps - where - toDepOption = if newerCabal then toDepOption1_22 else toDepOption1_18 - - toDepOption1_22 (PackageIdentifier name _) gid = concat - [ "--dependency=" - , packageNameString name - , "=" - , ghcPkgIdString gid - ] - - toDepOption1_18 ident _gid = concat - [ "--constraint=" - , packageNameString name - , "==" - , versionString version' - ] - where - PackageIdentifier name version' = ident - --- | Get set of wanted package names from locals. -wantedLocalPackages :: [LocalPackage] -> Set PackageName -wantedLocalPackages = Set.fromList . map (packageName . lpPackage) . filter lpWanted - --- | Configure options to be sent to Setup.hs configure -data ConfigureOpts = ConfigureOpts - { coDirs :: ![String] - -- ^ Options related to various paths. We separate these out since they do - -- not have an impact on the contents of the compiled binary for checking - -- if we can use an existing precompiled cache. - , coNoDirs :: ![String] - } - deriving (Show, Eq, Generic, Data, Typeable) -instance NFData ConfigureOpts --- | Information on a compiled package: the library conf file (if relevant), --- the sublibraries (if present) and all of the executable paths. -data PrecompiledCache base = PrecompiledCache - { pcLibrary :: !(Maybe (Path base File)) - -- ^ .conf file inside the package database - , pcSubLibs :: ![Path base File] - -- ^ .conf file inside the package database, for each of the sublibraries - , pcExes :: ![Path base File] - -- ^ Full paths to executables - } - deriving (Show, Eq, Generic, Typeable) -instance NFData (PrecompiledCache Abs) -instance NFData (PrecompiledCache Rel) +-- | Type representing treatments of GHC's informational messages during +-- compilation when it evaluates Template Haskell code. +data ExcludeTHLoading + = ExcludeTHLoading + -- ^ Suppress the messages. + | KeepTHLoading + -- ^ Do not suppress the messages. + +data ConvertPathsToAbsolute + = ConvertPathsToAbsolute + | KeepPathsAsIs + +-- | Special marker for expected failures in curator builds, using those we need +-- to keep log handle open as build continues further even after a failure. +data KeepOutputOpen + = KeepOpen + | CloseOnException + deriving Eq diff --git a/src/Stack/Types/Build/ConstructPlan.hs b/src/Stack/Types/Build/ConstructPlan.hs new file mode 100644 index 0000000000..35a6c8b9a9 --- /dev/null +++ b/src/Stack/Types/Build/ConstructPlan.hs @@ -0,0 +1,267 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Storage.Util +Description : Utilities for module "Stack.Build.ConstructPlan". +License : BSD-3-Clause + +A module providing types and related helper functions used in module +"Stack.Build.ConstructPlan". +-} + +module Stack.Types.Build.ConstructPlan + ( PackageInfo (..) + , CombinedMap + , M + , W (..) + , LibraryMap + , AddDepRes (..) + , toTask + , adrVersion + , adrHasLibrary + , isAdrToInstall + , Ctx (..) + , PackageLoader + , UnregisterState (..) + , ToolWarning (..) + , MissingPresentDeps (..) + ) where + +import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) +import RIO.Process ( HasProcessContext (..) ) +import RIO.State ( StateT ) +import RIO.Writer ( WriterT (..) ) +import Stack.Package ( hasBuildableMainLibrary ) +import Stack.Prelude hiding ( loadPackage ) +import Stack.Types.Build.Exception ( ConstructPlanException ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig(..) ) +import Stack.Types.CompilerPaths ( HasCompiler (..) ) +import Stack.Types.ComponentUtils ( StackUnqualCompName ) +import Stack.Types.Config ( HasConfig (..) ) +import Stack.Types.ConfigureOpts ( BaseConfigOpts ) +import Stack.Types.Curator ( Curator ) +import Stack.Types.DumpPackage ( DumpPackage ) +import Stack.Types.EnvConfig + ( EnvConfig (..), HasEnvConfig (..), HasSourceMap (..) ) +import Stack.Types.GhcPkgId ( GhcPkgId ) +import Stack.Types.GHCVariant ( HasGHCVariant (..) ) +import Stack.Types.Installed + ( InstallLocation, Installed (..), installedVersion ) +import Stack.Types.IsMutable ( IsMutable ) +import Stack.Types.Package + ( ExeName (..), LocalPackage (..), Package (..) + , PackageSource (..) + ) +import Stack.Types.ParentMap ( ParentMap ) +import Stack.Types.Plan + ( Task (..), TaskType (..), taskProvides ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Runner ( HasRunner (..) ) + +-- | Type representing information about packages, namely information about +-- whether or not a package is already installed and, unless the package is not +-- to be built (global packages), where its source code is located. +data PackageInfo + = PIOnlyInstalled InstallLocation Installed + -- ^ This indicates that the package is already installed, and that we + -- shouldn't build it from source. This is only the case for global + -- packages. + | PIOnlySource PackageSource + -- ^ This indicates that the package isn't installed, and we know where to + -- find its source. + | PIBoth PackageSource Installed + -- ^ This indicates that the package is installed and we know where to find + -- its source. We may want to reinstall from source. + deriving Show + +-- | A type synonym representing dictionaries of package names, and combined +-- information about the package in respect of whether or not it is already +-- installed and, unless the package is not to be built (global packages), where +-- its source code is located. +type CombinedMap = Map PackageName PackageInfo + +-- | Type synonym representing values used during the construction of a build +-- plan. The type is an instance of 'Monad', hence its name. +type M = WriterT W (StateT LibraryMap (RIO Ctx)) + +-- | Type representing values used as the output to be collected during the +-- construction of a build plan. +data W = W + { wFinals :: !(Map PackageName (Either ConstructPlanException Task)) + -- ^ A dictionary of package names, and either a final task to perform when + -- building the package or an exception. + , wInstall :: !(Map StackUnqualCompName InstallLocation) + -- ^ A dictionary of executables to be installed, and location where the + -- executable's binary is placed. + , wDirty :: !(Map PackageName Text) + -- ^ A dictionary of local packages, and the reason why the local package is + -- considered dirty. + , wWarnings :: !([StyleDoc] -> [StyleDoc]) + -- ^ Warnings. + , wParents :: !ParentMap + -- ^ A dictionary of package names, and a list of pairs of the identifier + -- of a package depending on the package and the version range specified for + -- the dependency by that package. Used in the reporting of failure to + -- construct a build plan. + } + deriving Generic + +instance Semigroup W where + (<>) = mappenddefault + +instance Monoid W where + mempty = memptydefault + mappend = (<>) + +-- | A type synonym representing dictionaries of package names, and either an +-- exception encountered during the construction of the build plan or the +-- 'Stack.Build.ConstructPlan.addDep' result. +type LibraryMap = Map PackageName (Either ConstructPlanException AddDepRes) + +-- | Type representing results of 'Stack.Build.ConstructPlan.addDep'. +data AddDepRes + = ADRToInstall Task + -- ^ A task must be performed to provide the package name. + | ADRFound InstallLocation Installed + -- ^ An existing installation provides the package name. + deriving Show + +isAdrToInstall :: AddDepRes -> Bool +isAdrToInstall ADRToInstall{} = True +isAdrToInstall _ = False + +toTask :: AddDepRes -> Maybe Task +toTask (ADRToInstall task) = Just task +toTask (ADRFound _ _) = Nothing + +adrVersion :: AddDepRes -> Version +adrVersion (ADRToInstall task) = pkgVersion $ taskProvides task +adrVersion (ADRFound _ installed) = installedVersion installed + +adrHasLibrary :: AddDepRes -> Bool +adrHasLibrary (ADRToInstall task) = case task.taskType of + TTLocalMutable lp -> packageHasLibrary lp.package + TTRemotePackage _ p _ -> packageHasLibrary p + where + -- make sure we consider sub-libraries as libraries too + packageHasLibrary :: Package -> Bool + packageHasLibrary p = + hasBuildableMainLibrary p || not (null p.subLibraries) +adrHasLibrary (ADRFound _ Library{}) = True +adrHasLibrary (ADRFound _ Executable{}) = False + +data MissingPresentDeps = MissingPresentDeps + { missingPackages :: !(Set PackageIdentifier) + , presentPackages :: !(Map PackageIdentifier GhcPkgId) + , isMutable :: !IsMutable + } + deriving Show + +instance Semigroup MissingPresentDeps where + (<>) a b = MissingPresentDeps + { missingPackages = missingPackages a <> missingPackages b + , presentPackages = presentPackages a <> presentPackages b + , isMutable = isMutable a <> isMutable b + } + +instance Monoid MissingPresentDeps where + mempty = MissingPresentDeps mempty mempty mempty + +-- | Type representing values used as the environment to be read from during the +-- construction of a build plan (the \'context\'). +data Ctx = Ctx + { baseConfigOpts :: !BaseConfigOpts + -- ^ Basic information used to determine configure options + , loadPackage :: !(PackageLoader M) + -- ^ A function to load a `Package` given the location of a package assumed + -- to be immutable. + , combinedMap :: !CombinedMap + -- ^ A dictionary of package names, and combined information about the + -- package in respect of whether or not it is already installed and, unless + -- the package is not to be built (global packages), where its source code + -- is located. + , ctxEnvConfig :: !EnvConfig + -- ^ Configuration after the environment has been setup. + , callStack :: ![PackageName] + , wanted :: !(Set PackageName) + , localNames :: !(Set PackageName) + , curator :: !(Maybe Curator) + , pathEnvVar :: !Text + } + +-- | A type synonym representing functions that yield a 'Package' given the +-- location of a package assumed to be immutable, parameterised by the relevant +-- monad. +type PackageLoader m = + PackageLocationImmutable + -- ^ Location of a package that is assumed to be immutable. + -> Map FlagName Bool + -- ^ Cabal flags. + -> [Text] + -- ^ GHC options. + -> [Text] + -- ^ Cabal configure options. + -> m Package + +instance HasPlatform Ctx where + platformL = configL . platformL + {-# INLINE platformL #-} + platformVariantL = configL . platformVariantL + {-# INLINE platformVariantL #-} + +instance HasGHCVariant Ctx where + ghcVariantL = configL . ghcVariantL + {-# INLINE ghcVariantL #-} + +instance HasLogFunc Ctx where + logFuncL = configL . logFuncL + +instance HasRunner Ctx where + runnerL = configL . runnerL + +instance HasStylesUpdate Ctx where + stylesUpdateL = runnerL . stylesUpdateL + +instance HasTerm Ctx where + useColorL = runnerL . useColorL + termWidthL = runnerL . termWidthL + +instance HasConfig Ctx where + configL = buildConfigL . lens (.config) (\x y -> x { config = y }) + {-# INLINE configL #-} + +instance HasPantryConfig Ctx where + pantryConfigL = configL . pantryConfigL + +instance HasProcessContext Ctx where + processContextL = configL . processContextL + +instance HasBuildConfig Ctx where + buildConfigL = envConfigL . lens + (.buildConfig) + (\x y -> x { buildConfig = y }) + +instance HasSourceMap Ctx where + sourceMapL = envConfigL . sourceMapL + +instance HasCompiler Ctx where + compilerPathsL = envConfigL . compilerPathsL + +instance HasEnvConfig Ctx where + envConfigL = lens (.ctxEnvConfig) (\x y -> x { ctxEnvConfig = y }) + +-- | State to be maintained during the calculation of project packages and local +-- extra-deps to unregister. +data UnregisterState = UnregisterState + { toUnregister :: !(Map GhcPkgId (PackageIdentifier, Text)) + , toKeep :: ![DumpPackage] + , anyAdded :: !Bool + } + +-- | Warn about tools in the snapshot definition. States the tool name +-- expected and the package name using it. +data ToolWarning + = ToolWarning ExeName PackageName + deriving Show diff --git a/src/Stack/Types/Build/Exception.hs b/src/Stack/Types/Build/Exception.hs new file mode 100644 index 0000000000..86414bc52f --- /dev/null +++ b/src/Stack/Types/Build/Exception.hs @@ -0,0 +1,1006 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.Build.Exception +License : BSD-3-Clause +-} + +module Stack.Types.Build.Exception + ( BuildException (..) + , BuildPrettyException (..) + , pprintTargetParseErrors + , ConstructPlanException (..) + , LatestApplicableVersion + , BadDependency (..) + ) where + +import qualified Data.ByteString as S +import Data.Char ( isSpace ) +import Data.List as L +import qualified Data.Map as Map +import qualified Data.Map.Strict as M +import Data.Monoid.Map ( MonoidMap (..) ) +import qualified Data.Set as Set +import qualified Data.Text as T +import Distribution.System ( Arch ) +import qualified Distribution.Text as C +import Distribution.Types.PackageName ( mkPackageName ) +import Distribution.Types.TestSuiteInterface ( TestSuiteInterface ) +import qualified Distribution.Version as C +import RIO.NonEmpty ( nonEmpty ) +import RIO.Process ( showProcessArgDebug ) +import Stack.Constants ( defaultUserConfigPath, wiredInPackages ) +import Stack.Prelude +import Stack.Types.Compiler ( ActualCompiler, compilerVersionString ) +import Stack.Types.CompilerBuild + ( CompilerBuild, compilerBuildSuffix ) +import Stack.Types.ComponentUtils + ( StackUnqualCompName, unqualCompToString ) +import Stack.Types.DumpPackage ( DumpPackage ) +import Stack.Types.UnusedFlags ( FlagSource (..), UnusedFlags (..) ) +import Stack.Types.GHCVariant ( GHCVariant, ghcVariantSuffix ) +import Stack.Types.NamedComponent + ( NamedComponent, renderPkgComponent ) +import Stack.Types.Package ( Package (..), packageIdentifier ) +import Stack.Types.ParentMap ( ParentMap ) +import Stack.Types.Version ( VersionCheck (..), VersionRange ) +import Stack.Types.WantedCompilerSetter ( WantedCompilerSetter (..) ) + +-- | Type representing exceptions thrown by functions exported by modules with +-- names beginning @Stack.Build@. +data BuildException + = Couldn'tFindPkgId PackageName + | Couldn'tParseTargets [Text] + | UnknownTargets + (Set PackageName) -- no known version + (Map PackageName Version) -- not in snapshot, here's the most recent + -- version in the index + (Path Abs File) -- stack.yaml + | TestSuiteFailure + PackageIdentifier + (Map StackUnqualCompName (Maybe ExitCode)) + (Maybe (Path Abs File)) + S.ByteString + | TestSuiteTypeUnsupported TestSuiteInterface + | LocalPackageDoesn'tMatchTarget + PackageName + Version -- local version + Version -- version specified on command line + | NoSetupHsFound (Path Abs Dir) + | InvalidGhcOptionsSpecification [PackageName] + | LocalPackagesPresent [PackageIdentifier] + | CouldNotLockDistDir !(Path Abs File) + | TaskCycleBug PackageIdentifier + | PackageIdMissingBug PackageIdentifier + | AllInOneBuildBug + | MultipleResultsBug PackageName [DumpPackage] + | TemplateHaskellNotFoundBug + | HaddockIndexNotFound + | ShowBuildErrorBug + | CallStackEmptyBug + deriving Show + +instance Exception BuildException where + displayException (Couldn'tFindPkgId name) = bugReport "[S-7178]" $ concat + [ "After installing " + , packageNameString name + ,", the package id couldn't be found (via ghc-pkg describe " + , packageNameString name + , ")." + ] + displayException (Couldn'tParseTargets targets) = unlines + $ "Error: [S-3127]" + : "The following targets could not be parsed as package names or \ + \directories:" + : map T.unpack targets + displayException (UnknownTargets noKnown notInSnapshot stackYaml) = unlines + $ "Error: [S-2154]" + : (noKnown' ++ notInSnapshot') + where + noKnown' + | Set.null noKnown = [] + | otherwise = pure $ + "The following target packages were not found: " ++ + intercalate ", " (map packageNameString $ Set.toList noKnown) ++ + "\nSee https://docs.haskellstack.org/en/stable/commands/build_command/#target-syntax for details." + notInSnapshot' + | Map.null notInSnapshot = [] + | otherwise = + "The following packages are not in your snapshot, but exist" + : "in your package index. Recommended action: add them to your" + : ("extra-deps in " ++ toFilePath stackYaml) + : "(Note: these are the most recent versions," + : "but there's no guarantee that they'll build together)." + : "" + : map + (\(name, version') -> "- " ++ packageIdentifierString + (PackageIdentifier name version')) + (Map.toList notInSnapshot) + displayException (TestSuiteFailure ident codes mlogFile bs) = unlines + $ "Error: [S-1995]" + : concat + [ ["Test suite failure for package " ++ packageIdentifierString ident] + , flip map (Map.toList codes) $ \(name, mcode) -> concat + [ " " + , unqualCompToString name + , ": " + , case mcode of + Nothing -> " executable not found" + Just ec -> " exited with: " ++ displayException ec + ] + , pure $ case mlogFile of + Nothing -> "Logs printed to console" + -- TODO Should we load up the full error output and print it here? + Just logFile -> "Full log available at " ++ toFilePath logFile + , if S.null bs + then [] + else + [ "" + , "" + , doubleIndent $ T.unpack $ decodeUtf8With lenientDecode bs + ] + ] + where + indent' = dropWhileEnd isSpace . unlines . fmap (" " ++) . lines + doubleIndent = indent' . indent' + displayException (TestSuiteTypeUnsupported interface) = concat + [ "Error: [S-3819]\n" + , "Unsupported test suite type: " + , show interface + ] + -- Suppressing duplicate output + displayException (LocalPackageDoesn'tMatchTarget name localV requestedV) = concat + [ "Error: [S-5797]\n" + , "Version for project package " + , packageNameString name + , " is " + , versionString localV + , ", but you asked for " + , versionString requestedV + , " on the command line" + ] + displayException (NoSetupHsFound dir) = concat + [ "Error: [S-3118]\n" + , "No Setup.hs or Setup.lhs file found in " + , toFilePath dir + ] + displayException (InvalidGhcOptionsSpecification unused) = unlines + $ "Error: [S-4925]" + : "Invalid GHC options specification:" + : map showGhcOptionSrc unused + where + showGhcOptionSrc name = concat + [ "- Package '" + , packageNameString name + , "' not found" + ] + displayException (LocalPackagesPresent locals) = unlines + $ "Error: [S-5510]" + : "Local packages are not allowed when using the 'script' command. \ + \Packages found:" + : map (\ident -> "- " ++ packageIdentifierString ident) locals + displayException (CouldNotLockDistDir lockFile) = unlines + [ "Error: [S-7168]" + , "Locking the dist directory failed, try to lock file:" + , " " ++ toFilePath lockFile + , "Maybe you're running another copy of Stack?" + ] + displayException (TaskCycleBug pid) = bugReport "[S-7868]" $ + "Unexpected task cycle for " + ++ packageNameString (pkgName pid) + displayException (PackageIdMissingBug ident) = bugReport "[S-8923]" $ + "singleBuild: missing package ID missing: " + ++ show ident + displayException AllInOneBuildBug = bugReport "[S-7371]" + "Cannot have an all-in-one build that also has a final build step." + displayException (MultipleResultsBug name dps) = bugReport "[S-6739]" $ + "singleBuild: multiple results when describing installed package " + ++ show (name, dps) + displayException TemplateHaskellNotFoundBug = bugReport "[S-3121]" + "template-haskell is a wired-in GHC boot library but it wasn't found." + displayException HaddockIndexNotFound = + "Error: [S-6901]\n" + ++ "No local or snapshot doc index found to open." + displayException ShowBuildErrorBug = bugReport "[S-5452]" + "Unexpected case in showBuildError." + displayException CallStackEmptyBug = bugReport "[S-2696]" + "addDep: call stack is empty." + +data BuildPrettyException + = ConstructPlanFailed + [ConstructPlanException] + (Either (Path Abs File) (Path Abs File)) + (Path Abs Dir) + Bool -- Is the project the implicit global project? + ParentMap + (Set PackageName) + (Map PackageName [PackageName]) + | ExecutionFailure [SomeException] + | CabalExitedUnsuccessfully + ExitCode + PackageIdentifier + (Path Abs File) -- cabal Executable + [String] -- cabal arguments + (Maybe (Path Abs File)) -- logfiles location + [Text] -- log contents + | SetupHsBuildFailure + ExitCode + (Maybe PackageIdentifier) -- which package's custom setup, is simple setup + -- if Nothing + (Path Abs File) -- ghc Executable + [String] -- ghc arguments + (Maybe (Path Abs File)) -- logfiles location + [Text] -- log contents + | TargetParseException [StyleDoc] + | SomeTargetsNotBuildable [(PackageName, NamedComponent)] + | InvalidFlagSpecification [UnusedFlags] + | GHCProfOptionInvalid + | NotOnlyLocal [PackageName] [StackUnqualCompName] + | CompilerVersionMismatch + (Maybe (ActualCompiler, Arch)) -- found + (WantedCompiler, Arch) -- expected + GHCVariant -- expected + CompilerBuild -- expected + VersionCheck + WantedCompilerSetter -- Way that the wanted compiler is set + StyleDoc -- recommended resolution + | ActionNotFilteredBug StyleDoc + | TestSuiteExeMissing !Bool !String !PackageName !StackUnqualCompName + | CabalCopyFailed !Bool !BuildPrettyException + deriving Show + +instance Pretty BuildPrettyException where + pretty ( ConstructPlanFailed errs configFile stackRoot isImplicitGlobal parents wanted prunedGlobalDeps ) = + "[S-4804]" + <> line + <> flow "Stack failed to construct a build plan." + <> blankLine + <> pprintExceptions + errs configFile stackRoot isImplicitGlobal parents wanted prunedGlobalDeps + pretty (ExecutionFailure es) = + "[S-7282]" + <> line + <> flow "Stack failed to execute the build plan." + <> blankLine + <> fillSep + [ flow "While executing the build plan, Stack encountered the" + , case es of + [_] -> "error:" + _ -> flow "following errors:" + ] + <> blankLine + <> hcat (L.intersperse blankLine (map ppException es)) + pretty (CabalExitedUnsuccessfully exitCode taskProvides' execName fullArgs logFiles bss) = + showBuildError "[S-7011]" + False exitCode (Just taskProvides') execName fullArgs logFiles bss + pretty (SetupHsBuildFailure exitCode mtaskProvides execName fullArgs logFiles bss) = + showBuildError "[S-6374]" + True exitCode mtaskProvides execName fullArgs logFiles bss + pretty (TargetParseException errs) = + "[S-8506]" + <> pprintTargetParseErrors errs + pretty (SomeTargetsNotBuildable xs) = + "[S-7086]" + <> line + <> fillSep + ( [ flow "The following components have" + , style Shell (flow "buildable: False") + , flow "set in the Cabal configuration, and so cannot be targets:" + ] + <> mkNarrativeList (Just Target) False + (map (fromString . T.unpack . renderPkgComponent) xs :: [StyleDoc]) + ) + <> blankLine + <> flow "To resolve this, either provide flags such that these components \ + \are buildable, or only specify buildable targets." + pretty (InvalidFlagSpecification unused) = + "[S-8664]" + <> line + <> flow "Invalid flag specification:" + <> line + <> bulletedList (map go (L.sort unused)) + where + showFlagSrc :: FlagSource -> StyleDoc + showFlagSrc FSCommandLine = flow "(specified on the command line)" + showFlagSrc FSStackYaml = + flow "(specified in the project-level configuration (e.g. stack.yaml))" + + go :: UnusedFlags -> StyleDoc + go (UFNoPackage src name) = fillSep + [ "Package" + , style Error (fromPackageName name) + , flow "not found" + , showFlagSrc src + ] + go (UFFlagsNotDefined src pname pkgFlags flags) = + fillSep + ( "Package" + : style Current (fromString name) + : flow "does not define the following flags" + : showFlagSrc src <> ":" + : mkNarrativeList (Just Error) False + (map (fromString . flagNameString) (Set.toList flags) :: [StyleDoc]) + ) + <> line + <> if Set.null pkgFlags + then fillSep + [ flow "No flags are defined by package" + , style Current (fromString name) <> "." + ] + else fillSep + ( flow "Flags defined by package" + : style Current (fromString name) + : "are:" + : mkNarrativeList (Just Good) False + (map (fromString . flagNameString) (Set.toList pkgFlags) :: [StyleDoc]) + ) + where + name = packageNameString pname + go (UFSnapshot name) = fillSep + [ flow "Attempted to set flag on snapshot package" + , style Current (fromPackageName name) <> "," + , flow "please add the package to" + , style Shell "extra-deps" <> "." + ] + pretty GHCProfOptionInvalid = + "[S-8100]" + <> line + <> fillSep + [ flow "When building with Stack, you should not use GHC's" + , style Shell "-prof" + , flow "option. Instead, please use Stack's" + , style Shell "--library-profiling" + , "and" + , style Shell "--executable-profiling" + , flow "flags. See:" + , style Url "https://github.com/commercialhaskell/stack/issues/1015" <> "." + ] + pretty (NotOnlyLocal packages exes) = + "[S-1727]" + <> line + <> flow "Specified only-locals, but Stack needs to build snapshot contents:" + <> line + <> if null packages + then mempty + else + fillSep + ( "Packages:" + : mkNarrativeList Nothing False + (map fromPackageName packages :: [StyleDoc]) + ) + <> line + <> if null exes + then mempty + else + fillSep + ( "Executables:" + : mkNarrativeList Nothing False + (map (fromString . unqualCompToString) exes :: [StyleDoc]) + ) + <> line + pretty ( CompilerVersionMismatch + mactual + (expected, eArch) + ghcVariant + ghcBuild + check + wantedCompilerSetter + resolution + ) = + "[S-6362]" + <> line + <> fillSep + [ case mactual of + Nothing -> flow "No compiler found, expected" + Just (actual, arch) -> fillSep + [ flow "Compiler version mismatched, found" + , fromString $ compilerVersionString actual + , parens (pretty arch) <> "," + , flow "but expected" + ] + , case check of + MatchMinor -> flow "minor version match with" + MatchExact -> flow "exact version" + NewerMinor -> flow "minor version match or newer with" + , fromString $ T.unpack $ utf8BuilderToText $ display expected + , parens $ mconcat + [ pretty eArch + , fromString $ ghcVariantSuffix ghcVariant + , fromString $ compilerBuildSuffix ghcBuild + ] + , parens + ( fillSep + [ flow "based on" + , case wantedCompilerSetter of + CompilerAtCommandLine -> fillSep + [ "the" + , style Shell "--compiler" + , "option" + ] + SnapshotAtCommandLine -> fillSep + [ "the" + , style Shell "--snapshot" <> "," + , "or (deprecated)" + , style Shell "--resolver" <> "," + , "option" + ] + YamlConfiguration mConfigFile -> case mConfigFile of + Nothing -> flow "command line arguments" + Just configFile -> fillSep + [ flow "the configuration in" + , pretty configFile + ] + ] + ) + <> "." + ] + <> blankLine + <> resolution + pretty (ActionNotFilteredBug source) = bugPrettyReport "S-4660" $ + fillSep + [ source + , flow "is seeking to run an action that should have been filtered from \ + \the list of actions." + ] + pretty (TestSuiteExeMissing isSimpleBuildType exeName pkgName testName) = + missingExeError "[S-7987]" isSimpleBuildType $ + fillSep + [ flow "Test suite executable" + , style Shell (fromString exeName) + , flow "not found for" + , style PkgComponent pkgComponent <> "." + ] + <> line + where + pkgComponent = + fromString (packageNameString pkgName) + <> ":test:" + <> fromString (unqualCompToString testName) + pretty (CabalCopyFailed isSimpleBuildType err) = + missingExeError "[S-8027]" isSimpleBuildType $ + fillSep + [ style Shell "cabal copy" + , flow "failed. Error message:" + ] + <> line + <> pretty err + <> line + +instance Exception BuildPrettyException + +-- | Helper function to pretty print an error message for target parse errors. +pprintTargetParseErrors :: [StyleDoc] -> StyleDoc +pprintTargetParseErrors errs = + line + <> flow "Stack failed to parse the target(s)." + <> blankLine + <> fillSep + [ flow "While parsing, Stack encountered the" + , case errs of + [err] -> + "error:" + <> blankLine + <> indent 4 err + _ -> + flow "following errors:" + <> blankLine + <> bulletedList errs + ] + <> blankLine + <> fillSep + [ flow "Stack expects a target to be a package name (e.g." + , style Shell "my-package" <> ")," + , flow "a package identifier (e.g." + , style Shell "my-package-0.1.2.3" <> ")," + , flow "a package component (e.g." + , style Shell "my-package:test:my-test-suite" <> ")," + , flow "or, failing that, a relative path to a local directory for a \ + \package or a parent directory of one or more such directories." + ] + +pprintExceptions :: + [ConstructPlanException] + -> Either (Path Abs File) (Path Abs File) + -- ^ The configuration file, which may be either (Left) a user-specific + -- global one or (Right) a project-level one. + -> Path Abs Dir + -> Bool + -> ParentMap + -> Set PackageName + -> Map PackageName [PackageName] + -> StyleDoc +pprintExceptions exceptions configFile stackRoot isImplicitGlobal parentMap wanted' prunedGlobalDeps = + fillSep + [ flow + ( "While constructing the build plan, Stack encountered the \ + \following errors" + <> if hasConfigurationRefs then "." else ":" + ) + , if hasConfigurationRefs + then flow + "The 'Stack configuration' refers to the set of package versions \ + \specified by the snapshot (after any dropped packages, or pruned \ + \GHC boot packages; if a boot package is replaced, Stack prunes \ + \all other such packages that depend on it) and any extra-deps:" + else mempty + ] + <> blankLine + <> mconcat (L.intersperse blankLine (mapMaybe pprintException exceptions')) + <> if L.null recommendations + then mempty + else + blankLine + <> flow "Some different approaches to resolving some or all of this:" + <> blankLine + <> indent 2 (spacedBulletedList recommendations) + where + exceptions' = {- should we dedupe these somehow? nubOrd -} exceptions + + recommendations = + [ allowNewerMsg True False | onlyHasDependencyMismatches ] + <> [ fillSep + $ allowNewerMsg False onlyHasDependencyMismatches + : flow "add these package names under" + : style Shell "allow-newer-deps" <> ":" + : mkNarrativeList (Just Shell) False + (map fromPackageName (Set.elems pkgsWithMismatches) :: [StyleDoc]) + | not $ Set.null pkgsWithMismatches + ] + <> addExtraDepsRecommendations + where + allowNewerMsg isAll isRepetitive = fillSep + $ flow "To ignore" + : (if isAll then "all" else "certain") + : flow "version constraints and build anyway," + : if isRepetitive + then ["also"] + else + [ fillSep + $ [ "pass" + , style Shell "--allow-newer" <> "," + , flow "or, in" + , pretty (defaultUserConfigPath stackRoot) + , flow + ( "(global configuration)" + <> if isImplicitGlobal then "," else mempty + ) + ] + <> ( case configFile of + Left _ -> [] + Right projectConfigFile -> if isImplicitGlobal + then [] + else + [ "or" + , pretty projectConfigFile + , flow "(project-level configuration)," + ] + ) + <> [ "set" + , style Shell (flow "allow-newer: true") + <> if isAll then "." else mempty + ] + <> [ "and" | not isAll ] + ] + + addExtraDepsRecommendations + | Map.null extras = [] + | (Just _) <- Map.lookup (mkPackageName "base") extras = + [ fillSep + [ flow "Build requires unattainable version of the" + , style Current "base" + , flow "package. Since" + , style Current "base" + , flow "is a part of GHC, you most likely need to use a \ + \different GHC version with the matching" + , style Current "base"<> "." + ] + ] + | otherwise = + [ fillSep + [ style Recommendation (flow "Recommended action:") + , flow "try adding the following to your" + , case configFile of + Left _ -> fillSep + [ style Shell "--extra-dep" + , flow "options of the" + , style Shell (flow "stack script") + , "command:" + ] + Right projectConfigFile -> fillSep + [ style Shell "extra-deps" + , "in" + , pretty projectConfigFile + , "(project-level configuration):" + ] + ] + <> blankLine + <> vsep (map pprintExtra (Map.toList extras)) + ] + + pprintExtra (name, (version, BlobKey cabalHash cabalSize)) = + let cfInfo = CFIHash cabalHash (Just cabalSize) + packageIdRev = PackageIdentifierRevision name version cfInfo + in fromString ("- " ++ T.unpack (utf8BuilderToText (display packageIdRev))) + + allNotInBuildPlan = Set.fromList $ concatMap toNotInBuildPlan exceptions' + toNotInBuildPlan (DependencyPlanFailures _ pDeps) = + map fst $ + filter + (\(_, (_, _, badDep)) -> badDep == NotInBuildPlan) + (Map.toList pDeps) + toNotInBuildPlan _ = [] + + (onlyHasDependencyMismatches, hasConfigurationRefs, extras, pkgsWithMismatches) = + filterExceptions + + filterExceptions :: + ( Bool + -- ^ All the errors are DependencyMismatch. This checks if + -- 'allow-newer: true' could resolve all reported issues. + , Bool + -- ^ One or more messages refer to 'the Stack configuration'. This + -- triggers a message to explain what that phrase means. + , Map PackageName (Version, BlobKey) + -- ^ Recommended extras. TO DO: Likely a good idea to distinguish these to + -- the user. In particular, those recommended for DependencyMismatch. + , Set.Set PackageName + -- ^ Set of names of packages with one or more DependencyMismatch errors. + ) + filterExceptions = L.foldl' go acc0 exceptions' + where + acc0 = (True, False, Map.empty, Set.empty) + go acc (DependencyPlanFailures pkg m) = Map.foldrWithKey go' acc m + where + pkgName = pkg.name + go' name (_, Just extra, NotInBuildPlan) (_, _, m', s) = + (False, True, Map.insert name extra m', s) + go' _ (_, _, NotInBuildPlan) (_, _, m', s) = (False, True, m', s) + go' name (_, Just extra, DependencyMismatch _) (p1, _, m', s) = + (p1, True, Map.insert name extra m', Set.insert pkgName s) + go' _ (_, _, DependencyMismatch _) (p1, _, m', s) = + (p1, True, m', Set.insert pkgName s) + go' _ (_, _, Couldn'tResolveItsDependencies _) acc' = acc' + go' _ _ (_, p2, m', s) = (False, p2, m', s) + go (_, p2, m, s) _ = (False, p2, m, s) + + pprintException (DependencyCycleDetected pNames) = Just $ + flow "Dependency cycle detected in packages:" + <> line + <> indent 4 + (encloseSep "[" "]" "," (map (style Error . fromPackageName) pNames)) + pprintException (DependencyPlanFailures pkg pDeps) = + case mapMaybe pprintDep (Map.toList pDeps) of + [] -> Nothing + depErrors -> Just $ + fillSep + [ flow "In the dependencies for" + , pkgIdent <> pprintFlags pkg.flags <> ":" + ] + <> line + <> indent 2 (bulletedList depErrors) + <> line + <> fillSep + ( flow "The above is/are needed" + : case getShortestDepsPath parentMap wanted' pkg.name of + Nothing -> + [flow "for unknown reason - Stack invariant violated."] + Just [] -> + [ "since" + , pkgName' + , flow "is a build target." + ] + Just (target:path) -> + [ flow "due to" + , encloseSep "" "" " -> " pathElems + ] + where + pathElems = + [style Target . fromPackageId $ target] + <> map fromPackageId path + <> [pkgIdent] + ) + where + pkgName' = style Current (fromPackageName pkg.name) + pkgIdent = style Current (fromPackageId $ packageIdentifier pkg) + -- Skip these when they are redundant with 'NotInBuildPlan' info. + pprintException (UnknownPackage compiler name) + | name `Set.member` allNotInBuildPlan = Nothing + | name `Set.member` wiredInPackages compiler = + Just $ fillSep + [ flow "Can't build a package with same name as a wired-in-package:" + , style Current . fromPackageName $ name + ] + | Just pruned <- Map.lookup name prunedGlobalDeps = + let prunedDeps = + map (style Current . fromPackageName) pruned + in Just $ fillSep + [ flow "Can't use GHC boot package" + , style Current . fromPackageName $ name + , flow "when it depends on a replaced boot package. You need to \ + \add the following as explicit dependencies to the \ + \project:" + , line + , encloseSep "" "" ", " prunedDeps + ] + | otherwise = Just $ fillSep + [ flow "Unknown package:" + , style Current . fromPackageName $ name + ] + + pprintFlags flags + | Map.null flags = "" + | otherwise = parens $ sep $ map pprintFlag $ Map.toList flags + pprintFlag (name, True) = "+" <> fromString (flagNameString name) + pprintFlag (name, False) = "-" <> fromString (flagNameString name) + + pprintDep (name, (range, mlatestApplicable, badDep)) = case badDep of + NotInBuildPlan + | name `elem` fold prunedGlobalDeps -> butMsg $ fillSep + [ flow "this GHC boot package has been pruned from the Stack \ + \configuration. You need to add the package explicitly to" + , style Shell "extra-deps" <> "." + ] + | otherwise -> butMsg $ inconsistentMsg Nothing + -- TODO: For local packages, suggest editing constraints + DependencyMismatch version -> butMsg $ inconsistentMsg $ Just version + -- I think the main useful info is these explain why missing packages are + -- needed. Instead lets give the user the shortest path from a target to the + -- package. + Couldn'tResolveItsDependencies _version -> Nothing + HasNoLibrary -> Just $ fillSep + [ errorName + , flow "is a library dependency, but the package provides no library." + ] + BDDependencyCycleDetected names -> Just $ fillSep + [ errorName + , flow $ "dependency cycle detected: " + ++ L.intercalate ", " (map packageNameString names) + ] + where + errorName = style Error . fromPackageName $ name + goodRange = style Good (fromString (C.display range)) + rangeMsg = if range == C.anyVersion + then "needed," + else fillSep + [ flow "must match" + , goodRange <> "," + ] + butMsg msg = Just $ fillSep + [ errorName + , rangeMsg + , "but" + , msg + , latestApplicable Nothing + ] + inconsistentMsg mVersion = fillSep + [ style Error $ maybe + ( flow "no version" ) + ( fromPackageId . PackageIdentifier name ) + mVersion + , flow "is in the Stack configuration" + ] + latestApplicable mversion = + case mlatestApplicable of + Nothing + | isNothing mversion -> fillSep + [ flow "(no matching package and version found. Perhaps there is \ + \an error in the specification of a package's" + , style Shell "dependencies" + , "or" + , style Shell "build-tools" + , flow "(Hpack) or" + , style Shell "build-depends" <> "," + , style Shell "build-tools" + , "or" + , style Shell "build-tool-depends" + , case configFile of + Left _ -> flow "(Cabal file)." + Right projectConfigFile -> fillSep + [ flow "(Cabal file) or an omission from the" + , style Shell "packages" + , flow "list in" + , pretty projectConfigFile + , flow "(project-level configuration).)" + ] + ] + | otherwise -> "" + Just (laVer, _) + | Just laVer == mversion -> + flow "(latest matching version is specified)." + | otherwise -> + fillSep + [ flow "(latest matching version is" + , style Good (fromString $ versionString laVer) <> ")." + ] + +data ConstructPlanException + = DependencyCycleDetected [PackageName] + | DependencyPlanFailures + Package + (Map PackageName (VersionRange, LatestApplicableVersion, BadDependency)) + | UnknownPackage ActualCompiler PackageName + -- TODO perhaps this constructor will be removed, and BadDependency will + -- handle it all + -- ^ Recommend adding to extra-deps, give a helpful version number? + deriving (Eq, Show) + +-- | The latest applicable version and it's latest Cabal file revision. +-- For display purposes only, Nothing if package not found +type LatestApplicableVersion = Maybe (Version, BlobKey) + +-- | Reason why a dependency was not used +data BadDependency + = NotInBuildPlan + | Couldn'tResolveItsDependencies Version + | DependencyMismatch Version + | HasNoLibrary + -- ^ See description of 'Stack.Types.Dependency.DepType' + | BDDependencyCycleDetected ![PackageName] + deriving (Eq, Ord, Show) + +missingExeError :: StyleDoc -> Bool -> StyleDoc -> StyleDoc +missingExeError errorCode isSimpleBuildType msg = + errorCode + <> line + <> msg + <> line + <> flow "Possible causes of this issue:" + <> line + <> bulletedList possibleCauses + where + possibleCauses = + [ fillSep + [ flow "No module named" + , style Shell "Main" <> "." + , "The" + , style Shell "main-is" + , flow "source file should usually have a header indicating that \ + \it's a" + , style Shell "Main" + , "module." + ] + , fillSep + [ flow "A Cabal file that refers to nonexistent other files (e.g. a" + , style Shell "license-file" + , flow "that doesn't exist). Running" + , style Shell "cabal check" + , flow "may point out these issues." + ] + ] + <> [ fillSep + [ "The" + , style File "Setup.hs" + , flow "file is changing the installation target directory." + ] + | not isSimpleBuildType + ] + +showBuildError :: + String + -> Bool + -> ExitCode + -> Maybe PackageIdentifier + -> Path Abs File + -> [String] + -> Maybe (Path Abs File) + -> [Text] + -> StyleDoc +showBuildError errorCode isBuildingSetup exitCode mtaskProvides execName fullArgs logFiles bss = + let fullCmd = unwords + $ dropQuotes (toFilePath execName) + : map (T.unpack . showProcessArgDebug) fullArgs + logLocations = + maybe + mempty + (\fp -> line <> flow "Logs have been written to:" <+> + pretty fp) + logFiles + in fromString errorCode + <> line + <> flow "While building" <+> + ( case (isBuildingSetup, mtaskProvides) of + (False, Nothing) -> impureThrow ShowBuildErrorBug + (False, Just taskProvides') -> + "package" <+> + style + Target + (fromString $ dropQuotes (packageIdentifierString taskProvides')) + (True, Nothing) -> "simple" <+> style File "Setup.hs" + (True, Just taskProvides') -> + "custom" <+> + style File "Setup.hs" <+> + flow "for package" <+> + style + Target + (fromString $ dropQuotes (packageIdentifierString taskProvides')) + ) <+> + flow "(scroll up to its section to see the error) using:" + <> line + <> style Shell (fromString fullCmd) + <> line + <> flow "Process exited with code:" <+> (fromString . show) exitCode <+> + ( if exitCode == ExitFailure (-9) + then flow "(THIS MAY INDICATE OUT OF MEMORY)" + else mempty + ) + <> logLocations + <> if null bss + then mempty + else blankLine <> string (removeTrailingSpaces (map T.unpack bss)) + where + removeTrailingSpaces = dropWhileEnd isSpace . unlines + dropQuotes = filter ('\"' /=) + +-- | Get the shortest reason for the package to be in the build plan. In other +-- words, trace the parent dependencies back to a \'wanted\' package. +getShortestDepsPath :: + ParentMap + -> Set PackageName + -> PackageName + -> Maybe [PackageIdentifier] +getShortestDepsPath (MonoidMap parentsMap) wanted' name = + if Set.member name wanted' + then Just [] + else case M.lookup name parentsMap of + Nothing -> Nothing + Just parents -> Just $ findShortest 256 paths0 + where + paths0 = M.fromList $ + map (\(ident, _) -> (pkgName ident, startDepsPath ident)) parents + where + -- The 'paths' map is a map from PackageName to the shortest path + -- found to get there. It is the frontier of our breadth-first + -- search of dependencies. + findShortest :: Int -> Map PackageName DepsPath -> [PackageIdentifier] + findShortest fuel _ | fuel <= 0 = + [ PackageIdentifier + (mkPackageName "stack-ran-out-of-jet-fuel") + (C.mkVersion [0]) + ] + findShortest _ paths | M.null paths = [] + findShortest fuel paths = + case nonEmpty targets of + Nothing -> findShortest (fuel - 1) $ M.fromListWith chooseBest $ + concatMap extendPath recurses + Just targets' -> + let (DepsPath _ _ path) = minimum (snd <$> targets') + in path + where + (targets, recurses) = + L.partition (\(n, _) -> n `Set.member` wanted') (M.toList paths) + chooseBest :: DepsPath -> DepsPath -> DepsPath + chooseBest = max + -- Extend a path to all its parents. + extendPath :: (PackageName, DepsPath) -> [(PackageName, DepsPath)] + extendPath (n, dp) = + case M.lookup n parentsMap of + Nothing -> [] + Just parents -> + map (\(pkgId, _) -> (pkgName pkgId, extendDepsPath pkgId dp)) parents + +startDepsPath :: PackageIdentifier -> DepsPath +startDepsPath ident = DepsPath + { dpLength = 1 + , dpNameLength = length (packageNameString (pkgName ident)) + , dpPath = [ident] + } + +extendDepsPath :: PackageIdentifier -> DepsPath -> DepsPath +extendDepsPath ident dp = DepsPath + { dpLength = dp.dpLength + 1 + , dpNameLength = dp.dpNameLength + length (packageNameString (pkgName ident)) + , dpPath = [ident] + } + +data DepsPath = DepsPath + { dpLength :: Int + -- ^ Length of dpPath + , dpNameLength :: Int + -- ^ Length of package names combined + , dpPath :: [PackageIdentifier] + -- ^ A path where the packages later in the list depend on those that come + -- earlier + } + deriving (Eq, Ord, Show) diff --git a/src/Stack/Types/BuildConfig.hs b/src/Stack/Types/BuildConfig.hs new file mode 100644 index 0000000000..dfed2f1506 --- /dev/null +++ b/src/Stack/Types/BuildConfig.hs @@ -0,0 +1,114 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE TypeFamilies #-} + +{-| +Module : Stack.Types.BuildConfig +License : BSD-3-Clause +-} + +module Stack.Types.BuildConfig + ( BuildConfig (..) + , HasBuildConfig (..) + , configFileL + , configFileRootL + , getWorkDir + , wantedCompilerVersionL + ) where + +import qualified Data.Either.Extra as EE +import Path ( (), parent ) +import RIO.Process ( HasProcessContext (..) ) +import Stack.Prelude +import Stack.Types.Config ( Config, HasConfig (..), workDirL ) +import Stack.Types.Curator ( Curator ) +import Stack.Types.GHCVariant ( HasGHCVariant (..) ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Runner ( HasRunner (..) ) +import Stack.Types.SourceMap ( SMWanted (..) ) +import Stack.Types.Storage ( ProjectStorage ) + +-- | A superset of 'Config' adding information on how to build code. The reason +-- for this breakdown is because we will need some of the information from +-- 'Config' in order to determine the values here. +-- +-- These are the components which know nothing about local configuration. +data BuildConfig = BuildConfig + { config :: !Config + , smWanted :: !SMWanted + , extraPackageDBs :: ![Path Abs Dir] + -- ^ Extra package databases + , configFile :: !(Either (Path Abs File) (Path Abs File)) + -- ^ Either (Left) the location of the user-specific global configuration + -- file or, in most cases, (Right) the location of the project-level + -- coniguration file (stack.yaml, by default). + -- + -- Note: if the STACK_YAML environment variable is used, the location of the + -- project-level configuration file may be different from + -- projectRootL "stack.yaml" if a different file name is used. + , projectStorage :: !ProjectStorage + -- ^ Database connection pool for project Stack database + , curator :: !(Maybe Curator) + } + +instance HasPlatform BuildConfig where + platformL = configL . platformL + {-# INLINE platformL #-} + platformVariantL = configL . platformVariantL + {-# INLINE platformVariantL #-} + +instance HasGHCVariant BuildConfig where + ghcVariantL = configL . ghcVariantL + {-# INLINE ghcVariantL #-} + +instance HasProcessContext BuildConfig where + processContextL = configL . processContextL + +instance HasPantryConfig BuildConfig where + pantryConfigL = configL . pantryConfigL + +instance HasConfig BuildConfig where + configL = lens (.config) (\x y -> x { config = y }) + +instance HasRunner BuildConfig where + runnerL = configL . runnerL + +instance HasLogFunc BuildConfig where + logFuncL = runnerL . logFuncL + +instance HasStylesUpdate BuildConfig where + stylesUpdateL = runnerL . stylesUpdateL + +instance HasTerm BuildConfig where + useColorL = runnerL . useColorL + termWidthL = runnerL . termWidthL + +class HasConfig env => HasBuildConfig env where + buildConfigL :: Lens' env BuildConfig + +instance HasBuildConfig BuildConfig where + buildConfigL = id + {-# INLINE buildConfigL #-} + +configFileL :: + HasBuildConfig env + => Lens' env (Either (Path Abs File) (Path Abs File)) +configFileL = buildConfigL . lens (.configFile) (\x y -> x { configFile = y }) + +-- | Directory containing the configuration file. +configFileRootL :: HasBuildConfig env => Getting r env (Path Abs Dir) +configFileRootL = configFileL . to EE.fromEither . to parent + +-- | Work directory in the directory of the configuration file (global or +-- project-level). +getWorkDir :: (HasBuildConfig env, MonadReader env m) => m (Path Abs Dir) +getWorkDir = do + configFileRoot <- view configFileRootL + workDir <- view workDirL + pure (configFileRoot workDir) + +-- | The compiler specified by the @SnapshotDef@. This may be different from the +-- actual compiler used! +wantedCompilerVersionL :: HasBuildConfig s => Getting r s WantedCompiler +wantedCompilerVersionL = buildConfigL . to (.smWanted.compiler) diff --git a/src/Stack/Types/BuildOpts.hs b/src/Stack/Types/BuildOpts.hs new file mode 100644 index 0000000000..cf7c335db8 --- /dev/null +++ b/src/Stack/Types/BuildOpts.hs @@ -0,0 +1,143 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.BuildOpts +Description : Configuration options for building. +License : BSD-3-Clause + +Configuration options for building. +-} + +module Stack.Types.BuildOpts + ( BuildOpts (..) + , HaddockOpts (..) + , TestOpts (..) + , BenchmarkOpts (..) + , buildOptsHaddockL + , buildOptsInstallExesL + , buildOptsSemaphoreL + ) where + +import Stack.Prelude +import Stack.Types.BuildOptsMonoid + ( CabalVerbosity (..), ProgressBarFormat (..) ) +import Stack.Types.Component ( StackUnqualCompName ) + +-- | Build options that is interpreted by the build command. This is built up +-- from BuildOptsCLI and BuildOptsMonoid +data BuildOpts = BuildOpts + { libProfile :: !Bool + , exeProfile :: !Bool + , libStrip :: !Bool + , exeStrip :: !Bool + , buildHaddocks :: !Bool + -- ^ Build Haddock documentation? + , haddockOpts :: !HaddockOpts + -- ^ Options to pass to haddock + , openHaddocks :: !Bool + -- ^ Open haddocks in the browser? + , haddockDeps :: !(Maybe Bool) + -- ^ Build haddocks for dependencies? + , haddockExecutables :: !Bool + -- ^ Also build Haddock documentation for all executable components, like + -- @runghc Setup.hs haddock --executables@. + , haddockTests :: !Bool + -- ^ Also build Haddock documentation for all test suite components, like + -- @runghc Setup.hs haddock --tests@. + , haddockBenchmarks :: !Bool + -- ^ Also build Haddock documentation for all benchmark components, like + -- @runghc Setup.hs haddock --benchmarks@. + , haddockInternal :: !Bool + -- ^ Build haddocks for all symbols and packages, like + -- @cabal haddock --internal@ + , haddockHyperlinkSource :: !Bool + -- ^ Build hyperlinked source. Disable for no sources. + , haddockForHackage :: !Bool + -- ^ Build with flags to generate Haddock documentation suitable to upload + -- to Hackage. + , installExes :: !Bool + -- ^ Install executables to user path after building? + , installCompilerTool :: !Bool + -- ^ Install executables to compiler tools path after building? + , preFetch :: !Bool + -- ^ Fetch all packages immediately + -- ^ Watch files for changes and automatically rebuild + , keepGoing :: !(Maybe Bool) + -- ^ Keep building/running after failure + , keepTmpFiles :: !Bool + -- ^ Keep intermediate files and build directories + , forceDirty :: !Bool + -- ^ Force treating all project packages and local extra-deps as having + -- dirty files. + , tests :: !Bool + -- ^ Turn on tests for local targets + , testOpts :: !TestOpts + -- ^ Additional test arguments + , benchmarks :: !Bool + -- ^ Turn on benchmarks for local targets + , benchmarkOpts :: !BenchmarkOpts + -- ^ Additional test arguments + -- ^ Commands (with arguments) to run after a successful build + -- ^ Only perform the configure step when building + , reconfigure :: !Bool + -- ^ Perform the configure step even if already configured + , cabalVerbose :: !CabalVerbosity + -- ^ Ask Cabal to be verbose in its builds + , splitObjs :: !Bool + -- ^ Whether to enable split-objs. + , skipComponents :: ![StackUnqualCompName] + -- ^ Which components to skip when building + , interleavedOutput :: !Bool + -- ^ Should we use the interleaved GHC output when building + -- multiple packages? + , progressBar :: !ProgressBarFormat + -- ^ Format of the progress bar + , ddumpDir :: !(Maybe Text) + , semaphore :: !Bool + -- ^ Use Cabal's --semaphore=SEMAPHORE option to build modules of the same + -- package in parallel. + } + deriving Show + +-- | Haddock Options +newtype HaddockOpts = HaddockOpts + { additionalArgs :: [String] -- ^ Arguments passed to haddock program + } + deriving (Eq, Show) + +-- | Options for the 'FinalAction' 'DoTests' +data TestOpts = TestOpts + { rerunTests :: !Bool -- ^ Whether successful tests will be run gain + , additionalArgs :: ![String] -- ^ Arguments passed to the test program + , coverage :: !Bool -- ^ Generate a code coverage report + , runTests :: !Bool -- ^ Enable running of tests + , maximumTimeSeconds :: !(Maybe Int) -- ^ test suite timeout in seconds + , timeoutGraceSeconds :: !(Maybe Int) + -- ^ additional grace period after timeout before force-killing + , allowStdin :: !Bool -- ^ Whether to allow standard input + } + deriving (Eq, Show) + +-- | Options for the 'FinalAction' 'DoBenchmarks' +data BenchmarkOpts = BenchmarkOpts + { additionalArgs :: !(Maybe String) + -- ^ Arguments passed to the benchmark program + , runBenchmarks :: !Bool + -- ^ Enable running of benchmarks + } + deriving (Eq, Show) + +buildOptsInstallExesL :: Lens' BuildOpts Bool +buildOptsInstallExesL = + lens (.installExes) (\bopts t -> bopts {installExes = t}) + +buildOptsHaddockL :: Lens' BuildOpts Bool +buildOptsHaddockL = + lens (.buildHaddocks) (\bopts t -> bopts {buildHaddocks = t}) + +buildOptsSemaphoreL :: Lens' BuildOpts Bool +buildOptsSemaphoreL = + lens (.semaphore) (\bopts t -> bopts {semaphore = t}) diff --git a/src/Stack/Types/BuildOptsCLI.hs b/src/Stack/Types/BuildOptsCLI.hs new file mode 100644 index 0000000000..dba26d0fd1 --- /dev/null +++ b/src/Stack/Types/BuildOptsCLI.hs @@ -0,0 +1,128 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.BuildOptsCLI +Description : Configuration options for building from the command line only. +License : BSD-3-Clause + +Configuration options for building from the command line only. +-} + +module Stack.Types.BuildOptsCLI + ( BuildOptsCLI (..) + , defaultBuildOptsCLI + , ApplyCLIFlag (..) + , BuildSubset (..) + , FileWatchOpts (..) + , BuildCommand (..) + , boptsCLIAllProgOptions + , boptsCLIFlagsByName + ) where + +import qualified Data.Map.Strict as Map +import qualified Data.Text as T +import Stack.Prelude + +-- | Build options that are specified from the CLI and not specified as +-- non-project specific configuration options under the build key. +data BuildOptsCLI = BuildOptsCLI + { targetsCLI :: ![Text] + , dryrun :: !Bool + , ghcOptions :: ![Text] + , progsOptions :: ![(Text, [Text])] + , flags :: !(Map ApplyCLIFlag (Map FlagName Bool)) + , allowNewer :: !(First Bool) + , buildSubset :: !BuildSubset + , fileWatch :: !FileWatchOpts + , watchAll :: !Bool + , exec :: ![(String, [String])] + , onlyConfigure :: !Bool + , command :: !BuildCommand + , initialBuildSteps :: !Bool + } + deriving Show + +defaultBuildOptsCLI ::BuildOptsCLI +defaultBuildOptsCLI = BuildOptsCLI + { targetsCLI = [] + , dryrun = False + , flags = Map.empty + , allowNewer = mempty + , ghcOptions = [] + , progsOptions = [] + , buildSubset = BSAll + , fileWatch = NoFileWatch + , watchAll = False + , exec = [] + , onlyConfigure = False + , command = Build + , initialBuildSteps = False + } + +-- | How to apply a CLI flag +data ApplyCLIFlag + = ACFAllProjectPackages + -- ^ Apply to all project packages which have such a flag name available. + | ACFByName !PackageName + -- ^ Apply to the specified package only. + deriving (Eq, Ord, Show) + +-- | Which subset of packages to build +data BuildSubset + = BSAll + | BSOnlySnapshot + -- ^ Only install packages in the snapshot database, skipping + -- packages intended for the local database. + | BSOnlyDependencies + | BSOnlyLocals + -- ^ Refuse to build anything in the snapshot database, see + -- https://github.com/commercialhaskell/stack/issues/5272 + deriving (Show, Eq) + +data FileWatchOpts + = NoFileWatch + | FileWatch + | FileWatchPoll + deriving (Show, Eq) + +-- | Command sum type for conditional arguments. +data BuildCommand + = Build + | Test + | Haddock + | Bench + | Install + deriving (Eq, Show) + +-- | Generate a list of @--PROG-option=@ arguments for all PROGs. + +-- At the command line, --PROG-option="" is received as +-- --PROG-option= (without quotes). However, with the process library, +-- what is received is --PROG-option="" (with quotes), which is NOT +-- what is required. +boptsCLIAllProgOptions :: BuildOptsCLI -> [Text] +boptsCLIAllProgOptions boptsCLI = + concatMap progOptionArgs boptsCLI.progsOptions + where + -- Generate a list of --PROG-option= arguments for a PROG. + progOptionArgs :: (Text, [Text]) -> [Text] + progOptionArgs (prog, opts) = map progOptionArg opts + where + -- Generate a --PROG-option= argument for a PROG and option. + progOptionArg :: Text -> Text + progOptionArg opt = T.concat + [ "--" + , prog + , "-option=" + , opt + ] + +-- | Only flags set via 'ACFByName' +boptsCLIFlagsByName :: BuildOptsCLI -> Map PackageName (Map FlagName Bool) +boptsCLIFlagsByName = Map.fromList . mapMaybe go . Map.toList . (.flags) + where + go (ACFAllProjectPackages, _) = Nothing + go (ACFByName name, flags) = Just (name, flags) diff --git a/src/Stack/Types/BuildOptsMonoid.hs b/src/Stack/Types/BuildOptsMonoid.hs new file mode 100644 index 0000000000..26847527bb --- /dev/null +++ b/src/Stack/Types/BuildOptsMonoid.hs @@ -0,0 +1,445 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.BuildOptsMonoid +Description : Configuration options for building. +License : BSD-3-Clause + +Configuration options for building from the command line and/or a configuration +file. +-} + +module Stack.Types.BuildOptsMonoid + ( BuildOptsMonoid (..) + , HaddockOptsMonoid (..) + , TestOptsMonoid (..) + , BenchmarkOptsMonoid (..) + , CabalVerbosity (..) + , ProgressBarFormat (..) + , buildOptsMonoidHaddockL + , buildOptsMonoidTestsL + , buildOptsMonoidBenchmarksL + , buildOptsMonoidInstallExesL + , buildOptsMonoidSemaphoreL + , toFirstCabalVerbosity + , readProgressBarFormat + ) where + +import Data.Aeson.Types ( FromJSON (..), withText ) +import Data.Aeson.WarningParser + ( WithJSONWarnings, (..:?), (..!=), jsonSubWarnings + , withObjectWarnings + ) +import qualified Data.Text as T +import Distribution.Parsec ( Parsec (..), simpleParsec ) +import Distribution.Verbosity ( Verbosity, normal, verbose ) +import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) +import Stack.Prelude hiding ( trace ) +import Stack.Types.ComponentUtils ( StackUnqualCompName ) + +-- | Build options that may be specified as non-project specific configuration +-- options under the build key (with certain exceptions) or from the CLI. +data BuildOptsMonoid = BuildOptsMonoid + { semaphore :: !FirstFalse + , trace :: !Any + -- ^ Cannot be specified under the build key + , profile :: !Any + -- ^ Cannot be specified under the build key + , noStrip :: !Any + -- ^ Cannot be specified under the build key + , libProfile :: !FirstFalse + , exeProfile :: !FirstFalse + , libStrip :: !FirstTrue + , exeStrip :: !FirstTrue + , buildHaddocks :: !FirstFalse + , haddockOpts :: !HaddockOptsMonoid + , openHaddocks :: !FirstFalse + , haddockDeps :: !(First Bool) + , haddockExecutables :: !FirstFalse + , haddockTests :: !FirstFalse + , haddockBenchmarks :: !FirstFalse + , haddockInternal :: !FirstFalse + , haddockHyperlinkSource :: !FirstTrue + , haddockForHackage :: !FirstFalse + , installExes :: !FirstFalse + , installCompilerTool :: !FirstFalse + , preFetch :: !FirstFalse + , keepGoing :: !(First Bool) + , keepTmpFiles :: !FirstFalse + , forceDirty :: !FirstFalse + , tests :: !FirstFalse + , testOpts :: !TestOptsMonoid + , benchmarks :: !FirstFalse + , benchmarkOpts :: !BenchmarkOptsMonoid + , reconfigure :: !FirstFalse + , cabalVerbose :: !(First CabalVerbosity) + , splitObjs :: !FirstFalse + , skipComponents :: ![StackUnqualCompName] + , interleavedOutput :: !FirstTrue + , progressBar :: !(First ProgressBarFormat) + , ddumpDir :: !(First Text) + } + deriving (Generic, Show) + +instance FromJSON (WithJSONWarnings BuildOptsMonoid) where + parseJSON = withObjectWarnings "BuildOptsMonoid" $ \o -> do + let trace = Any False + profile = Any False + noStrip = Any False + libProfile <- FirstFalse <$> o ..:? libProfileArgName + exeProfile <-FirstFalse <$> o ..:? exeProfileArgName + libStrip <- FirstTrue <$> o ..:? libStripArgName + exeStrip <-FirstTrue <$> o ..:? exeStripArgName + buildHaddocks <- FirstFalse <$> o ..:? haddockArgName + haddockOpts <- jsonSubWarnings (o ..:? haddockOptsArgName ..!= mempty) + openHaddocks <- FirstFalse <$> o ..:? openHaddocksArgName + haddockDeps <- First <$> o ..:? haddockDepsArgName + haddockExecutables <- FirstFalse <$> o ..:? haddockExecutablesArgName + haddockTests <- FirstFalse <$> o ..:? haddockTestsArgName + haddockBenchmarks <- FirstFalse <$> o ..:? haddockBenchmarksArgName + haddockInternal <- FirstFalse <$> o ..:? haddockInternalArgName + haddockHyperlinkSource <- FirstTrue <$> o ..:? haddockHyperlinkSourceArgName + haddockForHackage <- FirstFalse <$> o ..:? haddockForHackageArgName + installExes <- FirstFalse <$> o ..:? installExesArgName + installCompilerTool <- FirstFalse <$> o ..:? installCompilerToolArgName + preFetch <- FirstFalse <$> o ..:? preFetchArgName + keepGoing <- First <$> o ..:? keepGoingArgName + keepTmpFiles <- FirstFalse <$> o ..:? keepTmpFilesArgName + forceDirty <- FirstFalse <$> o ..:? forceDirtyArgName + tests <- FirstFalse <$> o ..:? testsArgName + testOpts <- jsonSubWarnings (o ..:? testOptsArgName ..!= mempty) + benchmarks <- FirstFalse <$> o ..:? benchmarksArgName + benchmarkOpts <- jsonSubWarnings (o ..:? benchmarkOptsArgName ..!= mempty) + reconfigure <- FirstFalse <$> o ..:? reconfigureArgName + cabalVerbosity <- First <$> o ..:? cabalVerbosityArgName + cabalVerbose' <- FirstFalse <$> o ..:? cabalVerboseArgName + let cabalVerbose = cabalVerbosity <> toFirstCabalVerbosity cabalVerbose' + splitObjs <- FirstFalse <$> o ..:? splitObjsName + skipComponents <- o ..:? skipComponentsName ..!= mempty + interleavedOutput <- FirstTrue <$> o ..:? interleavedOutputName + progressBar <- First <$> o ..:? progressBarName + ddumpDir <- o ..:? ddumpDirName ..!= mempty + semaphore <- FirstFalse <$> o ..:? semaphoreArgName + pure BuildOptsMonoid + { semaphore + , trace + , profile + , noStrip + , libProfile + , exeProfile + , libStrip + , exeStrip + , buildHaddocks + , haddockOpts + , openHaddocks + , haddockDeps + , haddockExecutables + , haddockTests + , haddockBenchmarks + , haddockInternal + , haddockHyperlinkSource + , haddockForHackage + , installExes + , installCompilerTool + , preFetch + , keepGoing + , keepTmpFiles + , forceDirty + , tests + , testOpts + , benchmarks + , benchmarkOpts + , reconfigure + , cabalVerbose + , splitObjs + , skipComponents + , interleavedOutput + , progressBar + , ddumpDir + } + +libProfileArgName :: Text +libProfileArgName = "library-profiling" + +exeProfileArgName :: Text +exeProfileArgName = "executable-profiling" + +libStripArgName :: Text +libStripArgName = "library-stripping" + +exeStripArgName :: Text +exeStripArgName = "executable-stripping" + +haddockArgName :: Text +haddockArgName = "haddock" + +haddockOptsArgName :: Text +haddockOptsArgName = "haddock-arguments" + +openHaddocksArgName :: Text +openHaddocksArgName = "open-haddocks" + +haddockDepsArgName :: Text +haddockDepsArgName = "haddock-deps" + +haddockExecutablesArgName :: Text +haddockExecutablesArgName = "haddock-executables" + +haddockTestsArgName :: Text +haddockTestsArgName = "haddock-tests" + +haddockBenchmarksArgName :: Text +haddockBenchmarksArgName = "haddock-benchmarks" + +haddockInternalArgName :: Text +haddockInternalArgName = "haddock-internal" + +haddockHyperlinkSourceArgName :: Text +haddockHyperlinkSourceArgName = "haddock-hyperlink-source" + +haddockForHackageArgName :: Text +haddockForHackageArgName = "haddock-for-hackage" + +installExesArgName :: Text +installExesArgName = "copy-bins" + +installCompilerToolArgName :: Text +installCompilerToolArgName = "copy-compiler-tool" + +preFetchArgName :: Text +preFetchArgName = "prefetch" + +keepGoingArgName :: Text +keepGoingArgName = "keep-going" + +keepTmpFilesArgName :: Text +keepTmpFilesArgName = "keep-tmp-files" + +forceDirtyArgName :: Text +forceDirtyArgName = "force-dirty" + +testsArgName :: Text +testsArgName = "test" + +testOptsArgName :: Text +testOptsArgName = "test-arguments" + +benchmarksArgName :: Text +benchmarksArgName = "bench" + +benchmarkOptsArgName :: Text +benchmarkOptsArgName = "benchmark-opts" + +reconfigureArgName :: Text +reconfigureArgName = "reconfigure" + +cabalVerbosityArgName :: Text +cabalVerbosityArgName = "cabal-verbosity" + +cabalVerboseArgName :: Text +cabalVerboseArgName = "cabal-verbose" + +splitObjsName :: Text +splitObjsName = "split-objs" + +skipComponentsName :: Text +skipComponentsName = "skip-components" + +interleavedOutputName :: Text +interleavedOutputName = "interleaved-output" + +progressBarName :: Text +progressBarName = "progress-bar" + +ddumpDirName :: Text +ddumpDirName = "ddump-dir" + +semaphoreArgName :: Text +semaphoreArgName = "semaphore" + +instance Semigroup BuildOptsMonoid where + (<>) = mappenddefault + +instance Monoid BuildOptsMonoid where + mempty = memptydefault + mappend = (<>) + +data TestOptsMonoid = TestOptsMonoid + { rerunTests :: !FirstTrue + , additionalArgs :: ![String] + , coverage :: !FirstFalse + , runTests :: !FirstTrue + , maximumTimeSeconds :: !(First (Maybe Int)) + , timeoutGraceSeconds :: !(First (Maybe Int)) + , allowStdin :: !FirstTrue + } + deriving (Show, Generic) + +instance FromJSON (WithJSONWarnings TestOptsMonoid) where + parseJSON = withObjectWarnings "TestOptsMonoid" $ \o -> do + rerunTests <- FirstTrue <$> o ..:? rerunTestsArgName + additionalArgs <- o ..:? testAdditionalArgsName ..!= [] + coverage <- FirstFalse <$> o ..:? coverageArgName + runTests <- FirstTrue . (not <$>) <$> o ..:? noRunTestsArgName + maximumTimeSeconds <- First <$> o ..:? maximumTimeSecondsArgName + timeoutGraceSeconds <- First <$> o ..:? timeoutGraceSecondsArgName + allowStdin <- FirstTrue <$> o ..:? testsAllowStdinName + pure TestOptsMonoid + { rerunTests + , additionalArgs + , coverage + , runTests + , maximumTimeSeconds + , timeoutGraceSeconds + , allowStdin + } + +rerunTestsArgName :: Text +rerunTestsArgName = "rerun-tests" + +testAdditionalArgsName :: Text +testAdditionalArgsName = "additional-args" + +coverageArgName :: Text +coverageArgName = "coverage" + +noRunTestsArgName :: Text +noRunTestsArgName = "no-run-tests" + +maximumTimeSecondsArgName :: Text +maximumTimeSecondsArgName = "test-suite-timeout" + +timeoutGraceSecondsArgName :: Text +timeoutGraceSecondsArgName = "test-suite-timeout-grace" + +testsAllowStdinName :: Text +testsAllowStdinName = "tests-allow-stdin" + +instance Semigroup TestOptsMonoid where + (<>) = mappenddefault + +instance Monoid TestOptsMonoid where + mempty = memptydefault + mappend = (<>) + +newtype HaddockOptsMonoid = HaddockOptsMonoid + { additionalArgs :: [String] + } + deriving (Generic, Show) + +instance FromJSON (WithJSONWarnings HaddockOptsMonoid) where + parseJSON = withObjectWarnings "HaddockOptsMonoid" $ \o -> do + additionalArgs <- o ..:? haddockAdditionalArgsName ..!= [] + pure HaddockOptsMonoid { additionalArgs } + +instance Semigroup HaddockOptsMonoid where + (<>) = mappenddefault + +instance Monoid HaddockOptsMonoid where + mempty = memptydefault + mappend = (<>) + +haddockAdditionalArgsName :: Text +haddockAdditionalArgsName = "haddock-args" + +data BenchmarkOptsMonoid = BenchmarkOptsMonoid + { additionalArgs :: !(First String) + , runBenchmarks :: !FirstTrue + } + deriving (Generic, Show) + +instance FromJSON (WithJSONWarnings BenchmarkOptsMonoid) where + parseJSON = withObjectWarnings "BenchmarkOptsMonoid" $ \o -> do + additionalArgs <- First <$> o ..:? benchmarkAdditionalArgsName + runBenchmarks <- FirstTrue . (not <$>) <$> o ..:? noRunBenchmarksArgName + pure BenchmarkOptsMonoid + { additionalArgs + , runBenchmarks + } + +benchmarkAdditionalArgsName :: Text +benchmarkAdditionalArgsName = "benchmark-arguments" + +noRunBenchmarksArgName :: Text +noRunBenchmarksArgName = "no-run-benchmarks" + +instance Semigroup BenchmarkOptsMonoid where + (<>) = mappenddefault + +instance Monoid BenchmarkOptsMonoid where + mempty = memptydefault + mappend :: BenchmarkOptsMonoid -> BenchmarkOptsMonoid -> BenchmarkOptsMonoid + mappend = (<>) + +newtype CabalVerbosity + = CabalVerbosity Verbosity + deriving (Eq, Show) + +toFirstCabalVerbosity :: FirstFalse -> First CabalVerbosity +toFirstCabalVerbosity vf = First $ vf.firstFalse <&> \p -> + if p then verboseLevel else normalLevel + where + verboseLevel = CabalVerbosity verbose + normalLevel = CabalVerbosity normal + +instance FromJSON CabalVerbosity where + + parseJSON = withText "CabalVerbosity" $ \t -> + let s = T.unpack t + errMsg = fail $ "Unrecognised Cabal verbosity: " ++ s + in maybe errMsg pure (simpleParsec s) + +instance Parsec CabalVerbosity where + parsec = CabalVerbosity <$> parsec + +buildOptsMonoidHaddockL :: Lens' BuildOptsMonoid (Maybe Bool) +buildOptsMonoidHaddockL = + lens (.buildHaddocks.firstFalse) + (\buildMonoid t -> buildMonoid {buildHaddocks = FirstFalse t}) + +buildOptsMonoidTestsL :: Lens' BuildOptsMonoid (Maybe Bool) +buildOptsMonoidTestsL = + lens (.tests.firstFalse) + (\buildMonoid t -> buildMonoid {tests = FirstFalse t}) + +buildOptsMonoidBenchmarksL :: Lens' BuildOptsMonoid (Maybe Bool) +buildOptsMonoidBenchmarksL = + lens (.benchmarks.firstFalse) + (\buildMonoid t -> buildMonoid {benchmarks = FirstFalse t}) + +buildOptsMonoidInstallExesL :: Lens' BuildOptsMonoid (Maybe Bool) +buildOptsMonoidInstallExesL = + lens (.installExes.firstFalse) + (\buildMonoid t -> buildMonoid {installExes = FirstFalse t}) + +buildOptsMonoidSemaphoreL :: Lens' BuildOptsMonoid (Maybe Bool) +buildOptsMonoidSemaphoreL = + lens (.semaphore.firstFalse) + (\buildMonoid t -> buildMonoid {semaphore = FirstFalse t}) + +-- Type representing formats of Stack's progress bar when building. +data ProgressBarFormat + = NoBar -- No progress bar at all. + | CountOnlyBar -- A bar that only counts packages. + | CappedBar -- A bar capped at a length equivalent to the terminal's width. + | FullBar -- A full progress bar. + deriving (Eq, Show) + +instance FromJSON ProgressBarFormat where + parseJSON = withText "ProgressBarFormat" $ \t -> either + fail + pure + (readProgressBarFormat $ T.unpack t) + +-- | Parse ProgressBarFormat from a String. +readProgressBarFormat :: String -> Either String ProgressBarFormat +readProgressBarFormat s + | s == "none" = pure NoBar + | s == "count-only" = pure CountOnlyBar + | s == "capped" = pure CappedBar + | s == "full" = pure FullBar + | otherwise = Left $ "Invalid progress bar format: " ++ s diff --git a/src/Stack/Types/CabalConfigKey.hs b/src/Stack/Types/CabalConfigKey.hs new file mode 100644 index 0000000000..7047c227d4 --- /dev/null +++ b/src/Stack/Types/CabalConfigKey.hs @@ -0,0 +1,42 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.CabalConfigKey +License : BSD-3-Clause +-} + +module Stack.Types.CabalConfigKey + ( CabalConfigKey (..) + , parseCabalConfigKey + ) where + +import Data.Aeson.Types + ( FromJSON (..), FromJSONKey (..), FromJSONKeyFunction (..) + , withText + ) +import qualified Data.Text as T +import Stack.Prelude + +-- | Which packages do configure opts apply to? +data CabalConfigKey + = CCKTargets -- ^ See AGOTargets + | CCKLocals -- ^ See AGOLocals + | CCKEverything -- ^ See AGOEverything + | CCKPackage !PackageName -- ^ A specific package + deriving (Show, Read, Eq, Ord) + +instance FromJSON CabalConfigKey where + parseJSON = withText "CabalConfigKey" parseCabalConfigKey + +instance FromJSONKey CabalConfigKey where + fromJSONKey = FromJSONKeyTextParser parseCabalConfigKey + +parseCabalConfigKey :: (Monad m, MonadFail m) => Text -> m CabalConfigKey +parseCabalConfigKey "$targets" = pure CCKTargets +parseCabalConfigKey "$locals" = pure CCKLocals +parseCabalConfigKey "$everything" = pure CCKEverything +parseCabalConfigKey name = + case parsePackageName $ T.unpack name of + Nothing -> fail $ "Invalid CabalConfigKey: " ++ show name + Just x -> pure $ CCKPackage x diff --git a/src/Stack/Types/Cache.hs b/src/Stack/Types/Cache.hs index 70087213da..154b6499b5 100644 --- a/src/Stack/Types/Cache.hs +++ b/src/Stack/Types/Cache.hs @@ -1,56 +1,181 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.Cache +License : BSD-3-Clause +-} module Stack.Types.Cache - ( ConfigCacheType(..) - , Action(..) - ) where + ( FileCache + , BuildFileCache (..) + , FileCacheInfo (..) + , ConfigCache (..) + , CachePkgSrc (..) + , PrecompiledCache (..) + , ConfigCacheType (..) + , Action (..) + ) where +import Data.Aeson + ( ToJSON (..), FromJSON (..), (.=), (.:), object, withObject + ) +import qualified Data.ByteString as S import qualified Data.Text as T -import Database.Persist.Sql -import Stack.Prelude -import Stack.Types.GhcPkgId +import Database.Persist.Sql + ( PersistField (..), PersistFieldSql (..), PersistValue (..) + , SqlType (..) + ) +import Stack.Prelude +import Stack.Types.ConfigureOpts ( ConfigureOpts ) +import Stack.Types.GhcPkgId + ( GhcPkgId, ghcPkgIdToText, parseGhcPkgId ) --- | Type of config cache +-- | Type representing types of cache in the Stack project SQLite database. data ConfigCacheType - = ConfigCacheTypeConfig - | ConfigCacheTypeFlagLibrary GhcPkgId - | ConfigCacheTypeFlagExecutable PackageIdentifier - deriving (Eq, Show) + = ConfigCacheTypeConfig + -- ^ Cabal configuration cache. + | ConfigCacheTypeFlagLibrary GhcPkgId + -- ^ Library Cabal flag cache. + | ConfigCacheTypeFlagExecutable PackageIdentifier + -- ^ Executable Cabal flag cache. + deriving (Eq, Show) instance PersistField ConfigCacheType where - toPersistValue ConfigCacheTypeConfig = PersistText "config" - toPersistValue (ConfigCacheTypeFlagLibrary v) = - PersistText $ "lib:" <> unGhcPkgId v - toPersistValue (ConfigCacheTypeFlagExecutable v) = - PersistText $ "exe:" <> T.pack (packageIdentifierString v) - fromPersistValue (PersistText t) = - fromMaybe (Left $ "Unexected ConfigCacheType value: " <> t) $ - config <|> fmap lib (T.stripPrefix "lib:" t) <|> - fmap exe (T.stripPrefix "exe:" t) - where - config - | t == "config" = Just (Right ConfigCacheTypeConfig) - | otherwise = Nothing - lib v = do - ghcPkgId <- mapLeft tshow (parseGhcPkgId v) - Right $ ConfigCacheTypeFlagLibrary ghcPkgId - exe v = do - pkgId <- - maybe (Left $ "Unexpected ConfigCacheType value: " <> t) Right $ - parsePackageIdentifier (T.unpack v) - Right $ ConfigCacheTypeFlagExecutable pkgId - fromPersistValue _ = Left "Unexected ConfigCacheType type" + toPersistValue ConfigCacheTypeConfig = PersistText "config" + toPersistValue (ConfigCacheTypeFlagLibrary v) = + PersistText $ "lib:" <> ghcPkgIdToText v + toPersistValue (ConfigCacheTypeFlagExecutable v) = + PersistText $ "exe:" <> T.pack (packageIdentifierString v) + fromPersistValue (PersistText t) = + fromMaybe (Left $ "Unexpected ConfigCacheType value: " <> t) $ + config <|> fmap lib (T.stripPrefix "lib:" t) <|> + fmap exe (T.stripPrefix "exe:" t) + where + config + | t == "config" = Just (Right ConfigCacheTypeConfig) + | otherwise = Nothing + lib v = do + ghcPkgId <- mapLeft tshow (parseGhcPkgId v) + Right $ ConfigCacheTypeFlagLibrary ghcPkgId + exe v = do + pkgId <- + maybe (Left $ "Unexpected ConfigCacheType value: " <> t) Right $ + parsePackageIdentifier (T.unpack v) + Right $ ConfigCacheTypeFlagExecutable pkgId + fromPersistValue _ = Left "Unexpected ConfigCacheType type" instance PersistFieldSql ConfigCacheType where - sqlType _ = SqlString + sqlType _ = SqlString +-- | Type representing actions for which the last time the action was performed +-- should be cached. data Action = UpgradeCheck - deriving (Show, Eq, Ord) + deriving (Eq, Ord, Show) + instance PersistField Action where - toPersistValue UpgradeCheck = PersistInt64 1 - fromPersistValue (PersistInt64 1) = Right UpgradeCheck - fromPersistValue x = Left $ T.pack $ "Invalid Action: " ++ show x + toPersistValue UpgradeCheck = PersistInt64 1 + fromPersistValue (PersistInt64 1) = Right UpgradeCheck + fromPersistValue x = Left $ T.pack $ "Invalid Action: " ++ show x + instance PersistFieldSql Action where - sqlType _ = SqlInt64 + sqlType _ = SqlInt64 + +-- | Type synonym representing caches of files and information about them +-- sufficient to identify if they have changed subsequently. +type FileCache = Map FilePath FileCacheInfo + +-- | Type representing caches of information about files sufficient to identify +-- if they have changed subsequently. Stored on disk. +newtype BuildFileCache = BuildFileCache + { fileCache :: FileCache + } + deriving (Eq, FromJSON, Generic, Show, ToJSON) + +instance NFData BuildFileCache + +-- | Type representing information about a file sufficient to identify if +-- it has changed subsequently. +newtype FileCacheInfo = FileCacheInfo + { hash :: SHA256 + -- ^ SHA-256 hash of file contents. + } + deriving (Eq, Generic, Show) + +instance NFData FileCacheInfo + +-- Provided for storing the t'BuildFileCache' values in a file. But maybe +-- JSON/YAML isn't the right choice here, worth considering. +instance ToJSON FileCacheInfo where + toJSON fileCacheInfo = object + [ "hash" .= fileCacheInfo.hash + ] + +instance FromJSON FileCacheInfo where + parseJSON = withObject "FileCacheInfo" $ \o -> FileCacheInfo + <$> o .: "hash" + +-- | Stored in the project's SQLite database to know whether the Cabal +-- configuration has changed or libarary or executable Cabal flags have changed. +data ConfigCache = ConfigCache + { configureOpts :: !ConfigureOpts + -- ^ All Cabal configure options used for this package. + , deps :: !(Set GhcPkgId) + -- ^ The GhcPkgIds of all of the dependencies. Since Cabal doesn't take + -- the complete GhcPkgId (only a PackageIdentifier) in the configure + -- options, just using the previous value is insufficient to know if + -- dependencies have changed. + , components :: !(Set S.ByteString) + -- ^ The components to be built. It's a bit of a hack to include this in + -- here, as it's not a configure option (just a build option), but this + -- is a convenient way to force compilation when the components change. + , buildHaddocks :: !Bool + -- ^ Is Haddock documentation to be built? + , pkgSrc :: !CachePkgSrc + -- ^ The origin of the package's source code. + , pathEnvVar :: !Text + -- ^ Value of the PATH environment variable. See + -- + } + deriving (Data, Eq, Generic, Show) + +instance NFData ConfigCache + +data CachePkgSrc + = CacheSrcUpstream + | CacheSrcLocal FilePath + deriving (Data, Eq, Generic, Read, Show) + +instance NFData CachePkgSrc + +instance PersistField CachePkgSrc where + toPersistValue CacheSrcUpstream = PersistText "upstream" + toPersistValue (CacheSrcLocal fp) = PersistText ("local:" <> T.pack fp) + fromPersistValue (PersistText t) = + if t == "upstream" + then Right CacheSrcUpstream + else case T.stripPrefix "local:" t of + Just fp -> Right $ CacheSrcLocal (T.unpack fp) + Nothing -> Left $ "Unexpected CachePkgSrc value: " <> t + fromPersistValue _ = Left "Unexpected CachePkgSrc type" + +instance PersistFieldSql CachePkgSrc where + sqlType _ = SqlString + +-- | Information on a compiled package: the library .conf file (if relevant), +-- the sub-libraries (if present) and all of the executable paths. +data PrecompiledCache base = PrecompiledCache + { library :: !(Maybe (Path base File)) + -- ^ .conf file inside the package database + , subLibs :: ![Path base File] + -- ^ .conf file inside the package database, for each of the sub-libraries + , exes :: ![Path base File] + -- ^ Full paths to executables + } + deriving (Eq, Generic, Show) + +instance NFData (PrecompiledCache Abs) + +instance NFData (PrecompiledCache Rel) diff --git a/src/Stack/Types/Casa.hs b/src/Stack/Types/Casa.hs new file mode 100644 index 0000000000..6cf0edd971 --- /dev/null +++ b/src/Stack/Types/Casa.hs @@ -0,0 +1,64 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.Casa +Description : Casa configuration types. +License : BSD-3-Clause + +Casa configuration types. +-} + +module Stack.Types.Casa + ( CasaOptsMonoid (..) + ) where + +import Data.Aeson.Types ( FromJSON (..) ) +import Data.Aeson.WarningParser + ( WithJSONWarnings, (..:?), withObjectWarnings ) +import Casa.Client ( CasaRepoPrefix ) +import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) +import Stack.Prelude + +-- | An uninterpreted representation of Casa configuration options. +-- Configurations may be "cascaded" using mappend (left-biased). +data CasaOptsMonoid = CasaOptsMonoid + { enable :: !FirstTrue + , repoPrefix :: !(First CasaRepoPrefix) + , maxKeysPerRequest :: !(First Int) + } + deriving (Generic, Show) + +-- | Decode uninterpreted Casa configuration options from JSON/YAML. +instance FromJSON (WithJSONWarnings CasaOptsMonoid) where + parseJSON = withObjectWarnings "CasaOptsMonoid" $ \o -> do + enable <- FirstTrue <$> o ..:? casaEnableName + repoPrefix <- First <$> o ..:? casaRepoPrefixName + maxKeysPerRequest <- First <$> o ..:? casaMaxKeysPerRequestName + pure CasaOptsMonoid + { enable + , repoPrefix + , maxKeysPerRequest + } + +-- | Left-biased combine Casa configuration options +instance Semigroup CasaOptsMonoid where + (<>) = mappenddefault + +-- | Left-biased combine Casa configurations options +instance Monoid CasaOptsMonoid where + mempty = memptydefault + mappend = (<>) + +-- | Casa configuration enable setting name. +casaEnableName :: Text +casaEnableName = "enable" + +-- | Casa configuration repository prefix setting name. +casaRepoPrefixName :: Text +casaRepoPrefixName = "repo-prefix" + +-- | Casa configuration maximum keys per request setting name. +casaMaxKeysPerRequestName :: Text +casaMaxKeysPerRequestName = "max-keys-per-request" diff --git a/src/Stack/Types/ColorWhen.hs b/src/Stack/Types/ColorWhen.hs new file mode 100644 index 0000000000..f583b64ba0 --- /dev/null +++ b/src/Stack/Types/ColorWhen.hs @@ -0,0 +1,42 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.ColorWhen +License : BSD-3-Clause +-} + +module Stack.Types.ColorWhen + ( ColorWhen (..) + , readColorWhen + ) where + +import Data.Aeson.Types ( FromJSON (..) ) +import Options.Applicative ( ReadM ) +import qualified Options.Applicative.Types as OA +import Stack.Prelude + +data ColorWhen + = ColorNever + | ColorAlways + | ColorAuto + deriving (Eq, Generic, Show) + +instance FromJSON ColorWhen where + parseJSON v = do + s <- parseJSON v + case s of + "never" -> pure ColorNever + "always" -> pure ColorAlways + "auto" -> pure ColorAuto + _ -> fail ("Unknown color use: " <> s <> ". Expected values of " <> + "option are 'never', 'always', or 'auto'.") + +readColorWhen :: ReadM ColorWhen +readColorWhen = + OA.readerAsk >>= \case + "never" -> pure ColorNever + "always" -> pure ColorAlways + "auto" -> pure ColorAuto + _ -> OA.readerError "Expected values of color option are 'never', \ + \'always', or 'auto'." diff --git a/src/Stack/Types/CompCollection.hs b/src/Stack/Types/CompCollection.hs new file mode 100644 index 0000000000..54c4966aeb --- /dev/null +++ b/src/Stack/Types/CompCollection.hs @@ -0,0 +1,183 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.CompCollection +License : BSD-3-Clause + +A module providing the type t'CompCollection' and associated helper functions. + +The corresponding Cabal approach uses lists. See, for example, the +'Distribution.Types.PackageDescription.sublibraries', +'Distribution.Types.PackageDescription.foreignLibs', +'Distribution.Types.PackageDescription.executables', +'Distribution.Types.PackageDescription.testSuites', and +'Distribution.Types.PackageDescription.benchmarks' fields. + +Cabal removes all the unbuildable components very early (at the cost of slightly +worse error messages). +-} +module Stack.Types.CompCollection + ( CompCollection + , getBuildableSet + , getBuildableSetText + , getBuildableListText + , getBuildableListAs + , foldAndMakeCollection + , hasBuildableComponent + , collectionLookup + , collectionKeyValueList + , collectionMember + , foldComponentToAnotherCollection + ) where + +import qualified Data.Map as M +import qualified Data.Set as Set +import Stack.Prelude +import Stack.Types.Component + ( HasBuildInfo, HasName, StackBuildInfo (..) ) +import Stack.Types.ComponentUtils + ( StackUnqualCompName, unqualCompToText ) + +-- | A type representing collections of components, distinguishing buildable +-- components and non-buildable components. +data CompCollection component = CompCollection + { buildableOnes :: {-# UNPACK #-} !(InnerCollection component) + , unbuildableOnes :: Set StackUnqualCompName + -- ^ The field is lazy beacause it should only serve when users explicitely + -- require unbuildable components to be built. The field allows for + -- intelligible error messages. + } + deriving Show + +instance Semigroup (CompCollection component) where + a <> b = CompCollection + { buildableOnes = a.buildableOnes <> b.buildableOnes + , unbuildableOnes = a.unbuildableOnes <> b.unbuildableOnes + } + +instance Monoid (CompCollection component) where + mempty = CompCollection + { buildableOnes = mempty + , unbuildableOnes = mempty + } + +instance Foldable CompCollection where + foldMap fn collection = foldMap fn collection.buildableOnes + foldr' fn c collection = M.foldr' fn c collection.buildableOnes + null = M.null . (.buildableOnes) + +-- | The 'Data.HashMap.Strict.HashMap' type is a more suitable choice than 'Map' +-- for 'Data.Text.Text' based keys in general (it scales better). However, +-- constant factors are largely dominant for maps with less than 1000 keys. +-- Packages with more than 100 components are extremely unlikely, so we use a +-- 'Map'. +type InnerCollection component = Map StackUnqualCompName component + +-- | A function to add a component to a collection of components. Ensures that +-- both 'asNameMap' and 'asNameSet' are updated consistently. +addComponent :: + HasName component + => component + -- ^ Component to add. + -> InnerCollection component + -- ^ Existing collection of components. + -> InnerCollection component +addComponent component = M.insert component.name component + +-- | For the given function and foldable data structure of components of type +-- @compA@, iterates on the elements of that structure and maps each element to +-- a component of type @compB@ while building a v'CompCollection'. +foldAndMakeCollection :: + (HasBuildInfo compB, HasName compB, Foldable sourceCollection) + => (compA -> compB) + -- ^ Function to apply to each element in the data struture. + -> sourceCollection compA + -- ^ Given foldable data structure of components of type @compA@. + -> CompCollection compB +foldAndMakeCollection mapFn = foldl' compIterator mempty + where + compIterator existingCollection component = + compCreator existingCollection (mapFn component) + compCreator existingCollection component + | component.buildInfo.buildable = existingCollection + { buildableOnes = + addComponent component existingCollection.buildableOnes + } + | otherwise = existingCollection + { unbuildableOnes = + Set.insert component.name existingCollection.unbuildableOnes + } + +-- | Get the names of the buildable components in the given collection, as a +-- 'Set' of 'StackUnqualCompName'. +getBuildableSet :: CompCollection component -> Set StackUnqualCompName +getBuildableSet = M.keysSet . (.buildableOnes) + +-- | Get the names of the buildable components in the given collection, as a +-- 'Set' of 'Text'. +getBuildableSetText :: CompCollection component -> Set Text +getBuildableSetText = Set.mapMonotonic unqualCompToText . getBuildableSet + +-- | Get the names of the buildable components in the given collection, as a +-- list of 'Text. +getBuildableListText :: CompCollection component -> [Text] +getBuildableListText = getBuildableListAs unqualCompToText + +-- | Apply the given function to the names of the buildable components in the +-- given collection, yielding a list. +getBuildableListAs :: + (StackUnqualCompName -> something) + -- ^ Function to apply to buildable components. + -> CompCollection component + -- ^ Collection of components. + -> [something] +getBuildableListAs fn = Set.foldr' (\v l -> fn v:l) [] . getBuildableSet + +-- | Yields 'True' if, and only if, the given collection includes at least one +-- buildable component. +hasBuildableComponent :: CompCollection component -> Bool +hasBuildableComponent = not . null . getBuildableSet + +-- | For the given name of a buildable component and the given collection of +-- components, yields 'Just' @component@ if the collection includes a buildable +-- component of that name, and 'Nothing' otherwise. +collectionLookup :: + StackUnqualCompName + -- ^ Name of the buildable component. + -> CompCollection component + -- ^ Collection of components. + -> Maybe component +collectionLookup needle haystack = M.lookup needle haystack.buildableOnes + +-- | For a given collection of components, yields a list of pairs for buildable +-- components of the name of the component and the component. +collectionKeyValueList :: + CompCollection component + -> [(StackUnqualCompName, component)] +collectionKeyValueList haystack = M.toList haystack.buildableOnes + +-- | Yields 'True' if, and only if, the given collection of components includes +-- a buildable component with the given name. +collectionMember :: + StackUnqualCompName + -- ^ Name of the buildable component. + -> CompCollection component + -- ^ Collection of components. + -> Bool +collectionMember needle haystack = isJust $ collectionLookup needle haystack + +-- | Reduce the buildable components of the given collection of components by +-- applying the given binary operator to all buildable components, using the +-- given starting value (typically the right-identity of the operator). +foldComponentToAnotherCollection :: + (Monad m) + => CompCollection component + -- ^ Collection of components. + -> (component -> m a -> m a) + -- ^ Binary operator. + -> m a + -- ^ Starting value. + -> m a +foldComponentToAnotherCollection collection fn initialValue = + M.foldr' fn initialValue collection.buildableOnes diff --git a/src/Stack/Types/Compiler.hs b/src/Stack/Types/Compiler.hs index 71feddb1d1..33a2c10faf 100644 --- a/src/Stack/Types/Compiler.hs +++ b/src/Stack/Types/Compiler.hs @@ -1,17 +1,22 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +{-| +Module : Stack.Types.Compiler +License : BSD-3-Clause +-} module Stack.Types.Compiler ( ActualCompiler (..) , WhichCompiler (..) , CompilerRepository (..) + , CompilerTarget (..) + , CompilerBindistPath (..) , CompilerException (..) , defaultCompilerRepository + , defaultCompilerTarget + , defaultCompilerBindistPath , getGhcVersion , whichCompiler , compilerVersionText @@ -20,58 +25,80 @@ module Stack.Types.Compiler , wantedToActual , actualToWanted , parseActualCompiler + , whichCompilerL ) where import Data.Aeson -import Database.Persist + ( FromJSON (..), FromJSONKey (..), FromJSONKeyFunction (..) + , ToJSON (..), Value (..), withText + ) import Database.Persist.Sql + ( PersistField (..), PersistFieldSql (..), SqlType (..) ) import qualified Data.Text as T +import Distribution.Version ( mkVersion ) import Stack.Prelude -import Stack.Types.Version -import Distribution.Version (mkVersion) +import Stack.Types.Version ( VersionCheck, checkVersion ) +import System.Permissions ( osIsWindows ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Types.Compiler" module. +data CompilerException + = GhcjsNotSupported + | PantryException PantryException + deriving Show + +instance Exception CompilerException where + displayException GhcjsNotSupported = + "Error: [S-7903]\n" + ++ "GHCJS is no longer supported by Stack." + displayException (PantryException p) = + "Error: [S-7972]\n" + ++ displayException p -- | Variety of compiler to use. data WhichCompiler - = Ghc - deriving (Show, Eq, Ord) + = Ghc + deriving (Eq, Ord, Show) -- | Specifies a compiler and its version number(s). -- --- Note that despite having this datatype, stack isn't in a hurry to +-- Note that despite having this datatype, Stack isn't in a hurry to -- support compilers other than GHC. data ActualCompiler - = ACGhc !Version - | ACGhcGit !Text !Text - deriving (Generic, Show, Eq, Ord, Data, Typeable) + = ACGhc !Version + | ACGhcGit !Text !Text + deriving (Data, Eq, Generic, Ord, Show) + instance NFData ActualCompiler + instance Display ActualCompiler where - display (ACGhc x) = display (WCGhc x) - display (ACGhcGit x y) = display (WCGhcGit x y) + display (ACGhc x) = display (WCGhc x) + display (ACGhcGit x y) = display (WCGhcGit x y) + instance ToJSON ActualCompiler where - toJSON = toJSON . compilerVersionText + toJSON = toJSON . compilerVersionText + instance FromJSON ActualCompiler where - parseJSON (String t) = either (const $ fail "Failed to parse compiler version") return (parseActualCompiler t) - parseJSON _ = fail "Invalid CompilerVersion, must be String" + parseJSON (String t) = + either + (const $ fail "Failed to parse compiler version") + pure + (parseActualCompiler t) + parseJSON _ = fail "Invalid CompilerVersion, must be String" + instance FromJSONKey ActualCompiler where - fromJSONKey = FromJSONKeyTextParser $ \k -> - case parseActualCompiler k of - Left _ -> fail $ "Failed to parse CompilerVersion " ++ T.unpack k - Right parsed -> return parsed + fromJSONKey = FromJSONKeyTextParser $ \k -> + case parseActualCompiler k of + Left _ -> fail $ "Failed to parse CompilerVersion " ++ T.unpack k + Right parsed -> pure parsed + instance PersistField ActualCompiler where toPersistValue = toPersistValue . compilerVersionText fromPersistValue = (mapLeft tshow . parseActualCompiler) <=< fromPersistValue + instance PersistFieldSql ActualCompiler where sqlType _ = SqlString -data CompilerException - = GhcjsNotSupported - | PantryException PantryException - -instance Show CompilerException where - show GhcjsNotSupported = "GHCJS is no longer supported by Stack" - show (PantryException p) = displayException p -instance Exception CompilerException - wantedToActual :: WantedCompiler -> Either CompilerException ActualCompiler wantedToActual (WCGhc x) = Right $ ACGhc x wantedToActual (WCGhcjs _ _) = Left GhcjsNotSupported @@ -83,8 +110,9 @@ actualToWanted (ACGhcGit x y) = WCGhcGit x y parseActualCompiler :: T.Text -> Either CompilerException ActualCompiler parseActualCompiler = - either (Left . PantryException) wantedToActual . - parseWantedCompiler + either + (Left . PantryException) + wantedToActual . parseWantedCompiler compilerVersionText :: ActualCompiler -> T.Text compilerVersionText = utf8BuilderToText . display @@ -98,27 +126,57 @@ whichCompiler ACGhcGit{} = Ghc isWantedCompiler :: VersionCheck -> WantedCompiler -> ActualCompiler -> Bool isWantedCompiler check (WCGhc wanted) (ACGhc actual) = - checkVersion check wanted actual + checkVersion check wanted actual isWantedCompiler _check (WCGhcGit wCommit wFlavour) (ACGhcGit aCommit aFlavour) = - wCommit == aCommit && wFlavour == aFlavour + wCommit == aCommit && wFlavour == aFlavour isWantedCompiler _ _ _ = False getGhcVersion :: ActualCompiler -> Version getGhcVersion (ACGhc v) = v getGhcVersion (ACGhcGit _ _) = - -- We can't return the actual version without running the installed ghc. - -- For now we assume that users of ghc-git use it with a recent commit so we - -- return a version far in the future. This disables our hacks for older - -- versions and passes version checking when we use newer features. - mkVersion [999,0,0] + -- We can't return the actual version without running the installed ghc. + -- For now we assume that users of ghc-git use it with a recent commit so we + -- return a version far in the future. This disables our hacks for older + -- versions and passes version checking when we use newer features. + mkVersion [999, 0, 0] -- | Repository containing the compiler sources newtype CompilerRepository = CompilerRepository Text - deriving (Show) + deriving Show instance FromJSON CompilerRepository where - parseJSON = withText "CompilerRepository" (return . CompilerRepository) + parseJSON = withText "CompilerRepository" (pure . CompilerRepository) defaultCompilerRepository :: CompilerRepository -defaultCompilerRepository = CompilerRepository "https://gitlab.haskell.org/ghc/ghc.git" +defaultCompilerRepository = + CompilerRepository "https://gitlab.haskell.org/ghc/ghc.git" + +-- | Target for Hadrian build +newtype CompilerTarget + = CompilerTarget Text + deriving Show + +instance FromJSON CompilerTarget where + parseJSON = withText "CompilerTarget" (pure . CompilerTarget) + +defaultCompilerTarget :: CompilerTarget +defaultCompilerTarget = if osIsWindows + then CompilerTarget "reloc-binary-dist" + else CompilerTarget "binary-dist" + +-- | Hadrian path to built binary distribution +newtype CompilerBindistPath + = CompilerBindistPath Text + deriving Show + +instance FromJSON CompilerBindistPath where + parseJSON = withText "CompilerBindistPath" (pure . CompilerBindistPath) + +defaultCompilerBindistPath :: CompilerBindistPath +defaultCompilerBindistPath = if osIsWindows + then CompilerBindistPath "_build/reloc-bindist" + else CompilerBindistPath "_build/bindist" + +whichCompilerL :: Getting r ActualCompiler WhichCompiler +whichCompilerL = to whichCompiler diff --git a/src/Stack/Types/CompilerBuild.hs b/src/Stack/Types/CompilerBuild.hs index 7f8ae9b0ca..606c849025 100644 --- a/src/Stack/Types/CompilerBuild.hs +++ b/src/Stack/Types/CompilerBuild.hs @@ -1,26 +1,33 @@ {-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.CompilerBuild +License : BSD-3-Clause +-} + module Stack.Types.CompilerBuild - (CompilerBuild(..) - ,compilerBuildName - ,compilerBuildSuffix - ,parseCompilerBuild + ( CompilerBuild (..) + , compilerBuildName + , compilerBuildSuffix + , parseCompilerBuild ) where +import Data.Aeson.Types ( FromJSON, parseJSON, withText ) +import qualified Data.Text as T import Stack.Prelude -import Pantry.Internal.AesonExtended (FromJSON, parseJSON, withText) -import Data.Text as T +-- | Build of the compiler distribution (e.g. standard, gmp4, tinfo6) data CompilerBuild - = CompilerBuildStandard - | CompilerBuildSpecialized String - deriving (Show) + = CompilerBuildStandard + | CompilerBuildSpecialized String + deriving Show instance FromJSON CompilerBuild where - -- Strange structuring is to give consistent error messages - parseJSON = - withText - "CompilerBuild" - (either (fail . show) return . parseCompilerBuild . T.unpack) + -- Strange structuring is to give consistent error messages + parseJSON = + withText + "CompilerBuild" + (either (fail . show) pure . parseCompilerBuild . T.unpack) -- | Descriptive name for compiler build compilerBuildName :: CompilerBuild -> String @@ -34,6 +41,6 @@ compilerBuildSuffix (CompilerBuildSpecialized s) = '-' : s -- | Parse compiler build from a String. parseCompilerBuild :: (MonadThrow m) => String -> m CompilerBuild -parseCompilerBuild "" = return CompilerBuildStandard -parseCompilerBuild "standard" = return CompilerBuildStandard -parseCompilerBuild name = return (CompilerBuildSpecialized name) +parseCompilerBuild "" = pure CompilerBuildStandard +parseCompilerBuild "standard" = pure CompilerBuildStandard +parseCompilerBuild name = pure (CompilerBuildSpecialized name) diff --git a/src/Stack/Types/CompilerPaths.hs b/src/Stack/Types/CompilerPaths.hs new file mode 100644 index 0000000000..b9a2b331e3 --- /dev/null +++ b/src/Stack/Types/CompilerPaths.hs @@ -0,0 +1,87 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.CompilerPaths +License : BSD-3-Clause +-} + +module Stack.Types.CompilerPaths + ( CompilerPaths (..) + , GhcPkgExe (..) + , HasCompiler (..) + , cabalVersionL + , compilerVersionL + , cpWhich + , getCompilerPath + , getGhcPkgExe + ) where + +import Distribution.System ( Arch ) +import Stack.Prelude +import Stack.Types.Compiler + ( ActualCompiler, WhichCompiler, whichCompiler ) +import Stack.Types.CompilerBuild ( CompilerBuild ) +import Stack.Types.DumpPackage ( DumpPackage ) + +-- | Paths on the filesystem for the compiler we're using +data CompilerPaths = CompilerPaths + { compilerVersion :: !ActualCompiler + , arch :: !Arch + , build :: !CompilerBuild + , compiler :: !(Path Abs File) + , pkg :: !GhcPkgExe + -- ^ ghc-pkg or equivalent + , interpreter :: !(Path Abs File) + -- ^ runghc + , haddock :: !(Path Abs File) + -- ^ haddock, in 'IO' to allow deferring the lookup + , sandboxed :: !Bool + -- ^ Is this a Stack-sandboxed installation? + , cabalVersion :: !Version + -- ^ This is the version of Cabal that Stack will use to compile Setup.hs + -- files in the build process. + -- + -- Note that this is not necessarily the same version as the one that Stack + -- depends on as a library and which is displayed when running + -- @stack ls dependencies | grep Cabal@ in the Stack project. + , globalDB :: !(Path Abs Dir) + -- ^ Global package database + , ghcInfo :: !ByteString + -- ^ Output of @ghc --info@ + , globalDump :: !(Map PackageName DumpPackage) + } + deriving Show + +-- | An environment which ensures that the given compiler is available on the +-- PATH +class HasCompiler env where + compilerPathsL :: SimpleGetter env CompilerPaths + +instance HasCompiler CompilerPaths where + compilerPathsL = id + +-- | Location of the ghc-pkg executable +newtype GhcPkgExe + = GhcPkgExe (Path Abs File) + deriving Show + +cabalVersionL :: HasCompiler env => SimpleGetter env Version +cabalVersionL = compilerPathsL . to (.cabalVersion) + +compilerVersionL :: HasCompiler env => SimpleGetter env ActualCompiler +compilerVersionL = compilerPathsL . to (.compilerVersion) + +cpWhich :: (MonadReader env m, HasCompiler env) => m WhichCompiler +cpWhich = view $ compilerPathsL . to (whichCompiler . (.compilerVersion)) + +-- | Get the path for the given compiler ignoring any local binaries. +-- +-- https://github.com/commercialhaskell/stack/issues/1052 +getCompilerPath :: HasCompiler env => RIO env (Path Abs File) +getCompilerPath = view $ compilerPathsL . to (.compiler) + +-- | Get the t'GhcPkgExe' from a 'HasCompiler' environment +getGhcPkgExe :: HasCompiler env => RIO env GhcPkgExe +getGhcPkgExe = view $ compilerPathsL . to (.pkg) diff --git a/src/Stack/Types/Component.hs b/src/Stack/Types/Component.hs new file mode 100644 index 0000000000..28c9e1b550 --- /dev/null +++ b/src/Stack/Types/Component.hs @@ -0,0 +1,195 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.Component +License : BSD-3-Clause + +A module providing the types that represent different sorts of components of a +package (library and sub-library, foreign library, executable, test suite and +benchmark). +-} + +module Stack.Types.Component + ( StackLibrary (..) + , StackForeignLibrary (..) + , StackExecutable (..) + , StackTestSuite (..) + , StackBenchmark (..) + , StackUnqualCompName (..) + , StackBuildInfo (..) + , HasName + , HasQualiName + , HasBuildInfo + , HasComponentInfo + ) where + +import Distribution.Compiler ( PerCompilerFlavor ) +import Distribution.ModuleName ( ModuleName ) +import Distribution.PackageDescription + ( BenchmarkInterface, Dependency, TestSuiteInterface ) +import Distribution.Simple ( Extension, Language ) +import Distribution.Utils.Path ( Pkg, Source, SymbolicPath ) +import qualified Distribution.Utils.Path as Cabal +import GHC.Records ( HasField (..) ) +import Stack.Prelude +import Stack.Types.ComponentUtils + ( StackUnqualCompName (..), emptyCompName ) +import Stack.Types.Dependency ( DepValue ) +import Stack.Types.NamedComponent ( NamedComponent (..) ) + +-- | A type representing (unnamed) main library or sub-library components of a +-- package. +-- +-- Cabal-syntax uses data constructors +-- 'Distribution.Types.LibraryName.LMainLibName' and +-- 'Distribution.Types.LibraryName.LSubLibName' to distinguish main libraries +-- and sub-libraries. We do not do so, as the \'missing\' name in the case of a +-- main library can be represented by the empty string. +-- +-- The corresponding Cabal-syntax type is 'Distribution.Types.Library.Library'. +data StackLibrary = StackLibrary + { name :: StackUnqualCompName + , buildInfo :: !StackBuildInfo + , exposedModules :: [ModuleName] + -- |^ This is only used for gathering the files related to this component. + } + deriving Show + +-- | A type representing foreign library components of a package. +-- +-- The corresponding Cabal-syntax type is +-- 'Distribution.Types.Foreign.Libraries.ForeignLib'. +data StackForeignLibrary = StackForeignLibrary + { name :: StackUnqualCompName + , buildInfo :: !StackBuildInfo + } + deriving Show + +-- | A type representing executable components of a package. +-- +-- The corresponding Cabal-syntax type is +-- 'Distribution.Types.Executable.Executable'. +data StackExecutable = StackExecutable + { name :: StackUnqualCompName + , buildInfo :: !StackBuildInfo + , modulePath :: FilePath + } + deriving Show + +-- | A type representing test suite components of a package. +-- +-- The corresponding Cabal-syntax type is +-- 'Distribution.Types.TestSuite.TestSuite'. +data StackTestSuite = StackTestSuite + { name :: StackUnqualCompName + , buildInfo :: !StackBuildInfo + , interface :: !TestSuiteInterface + } + deriving Show + +-- | A type representing benchmark components of a package. +-- +-- The corresponding Cabal-syntax type is +-- 'Distribution.Types.Benchmark.Benchmark'. +data StackBenchmark = StackBenchmark + { name :: StackUnqualCompName + , buildInfo :: StackBuildInfo + , interface :: BenchmarkInterface + -- ^ This is only used for gathering the files related to this component. + } + deriving Show + +-- | Type representing the name of an executable. +newtype ExeName = ExeName Text + deriving (Data, Eq, Hashable, IsString, Generic, NFData, Ord, Show) + +-- | Type representing information needed to build. The file gathering-related +-- fields are lazy because they are not always needed. +-- +-- The corresponding Cabal-syntax type is +-- 'Distribution.Types.BuildInfo.BuildInfo'. + +-- We don't use the Cabal-syntax type because Cabal provides a list of +-- dependencies, and Stack needs a Map and only a small subset of all the +-- information in Cabal-syntax type. +data StackBuildInfo = StackBuildInfo + { buildable :: !Bool + -- ^ Corresponding to Cabal-syntax's + -- 'Distribution.Types.BuildInfo.buildable'. The component is buildable + -- here. + , dependency :: !(Map PackageName DepValue) + -- ^ Corresponding to Cabal-syntax's + -- 'Distribution.Types.BuildInfo.targetBuildDepends'. Dependencies specific + -- to a library or executable target. + , unknownTools :: Set Text + -- ^ From Cabal-syntax's 'Distribution.Types.BuildInfo.buildTools'. We only + -- keep the legacy build tool depends that we know (from a hardcoded list). + -- We only use the deduplication aspect of the Set here, as this field is + -- only used for error reporting in the end. This is lazy because it's an + -- error reporting field only. + , otherModules :: [ModuleName] + -- ^ Only used in file gathering. See usage in "Stack.ComponentFile" module. + , jsSources :: [FilePath] + -- ^ Only used in file gathering. See usage in "Stack.ComponentFile" module. + , hsSourceDirs :: [SymbolicPath Pkg (Cabal.Dir Source)] + -- ^ Only used in file & opts gathering. See usage in "Stack.ComponentFile" + -- module for fle gathering. + , cSources :: [FilePath] + -- ^ Only used in file gathering. See usage in "Stack.ComponentFile" module. + , cppOptions :: [String] + -- ^ Only used in opts gathering. See usage in "Stack.Package" module. + , targetBuildDepends :: [Dependency] + -- ^ Only used in opts gathering. + , options :: PerCompilerFlavor [String] + -- ^ Only used in opts gathering. + , allLanguages :: [Language] + -- ^ Only used in opts gathering. + , usedExtensions :: [Extension] + -- ^ Only used in opts gathering. + , includeDirs :: [FilePath] + -- ^ Only used in opts gathering. + , extraLibs :: [String] + -- ^ Only used in opts gathering. + , extraLibDirs :: [String] + -- ^ Only used in opts gathering. + , frameworks :: [String] + -- ^ Only used in opts gathering. + } + deriving Show + +-- | Type synonym for a 'HasField' constraint. +type HasName component = HasField "name" component StackUnqualCompName + +-- | Type synonym for a 'HasField' constraint. +type HasBuildInfo component = HasField "buildInfo" component StackBuildInfo + +instance HasField "qualifiedName" StackLibrary NamedComponent where + getField v + | rawName == emptyCompName = CLib + | otherwise = CSubLib rawName + where + rawName = v.name + +instance HasField "qualifiedName" StackForeignLibrary NamedComponent where + getField = CFlib . (.name) + +instance HasField "qualifiedName" StackExecutable NamedComponent where + getField = CExe . (.name) + +instance HasField "qualifiedName" StackTestSuite NamedComponent where + getField = CTest . (.name) + +instance HasField "qualifiedName" StackBenchmark NamedComponent where + getField = CBench . (.name) + +-- | Type synonym for a 'HasField' constraint which represent a virtual field, +-- computed from the type, the NamedComponent constructor and the name. +type HasQualiName component = HasField "qualifiedName" component NamedComponent + +-- | Type synonym for a 'HasField' constraint for all the common component +-- fields i.e. @name@, @buildInfo@ and @qualifiedName@. +type HasComponentInfo component = + (HasName component, HasBuildInfo component, HasQualiName component) diff --git a/src/Stack/Types/ComponentUtils.hs b/src/Stack/Types/ComponentUtils.hs new file mode 100644 index 0000000000..6c965a79a0 --- /dev/null +++ b/src/Stack/Types/ComponentUtils.hs @@ -0,0 +1,67 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} + +{-| +Module : Stack.Types.ComponentUtils +License : BSD-3-Clause + +A module providing a type representing the name of an \'unqualified\' component +and related helper functions. +-} + +module Stack.Types.ComponentUtils + ( StackUnqualCompName (..) + , unqualCompToText + , unqualCompFromText + , unqualCompToString + , unqualCompFromString + , emptyCompName + , fromCabalName + , toCabalName + ) where + +import Data.Aeson ( FromJSON (..) ) +import Data.Hashable ( Hashable (..) ) +import Distribution.Compat.Binary ( decode, encode ) +import Distribution.PackageDescription + ( UnqualComponentName, mkUnqualComponentName + , unUnqualComponentName, unUnqualComponentNameST + ) +import Stack.Prelude + +-- | Type representing the name of an \'unqualified\' component (that is, the +-- component can be any sort - a (unnamed) main library or sub-library, +-- an executable, etc. ). +-- +-- The corresponding The Cabal-syntax type is +-- 'Distribution.Types.UnqualComponentName.UnqualComponentName'. + +-- Ideally, we would use the Cabal-syntax type and not 'Text', to avoid +-- unnecessary work, but there is no 'Hashable' instance for +-- 'Distribution.Types.UnqualComponentName.UnqualComponentName' yet. +newtype StackUnqualCompName = StackUnqualCompName UnqualComponentName + deriving (Data, Eq, Generic, IsString, NFData, Ord, Read, Show) + +instance Hashable StackUnqualCompName where + hashWithSalt a v = hashWithSalt a (show v) + +fromCabalName :: UnqualComponentName -> StackUnqualCompName +fromCabalName = StackUnqualCompName + +toCabalName :: StackUnqualCompName -> UnqualComponentName +toCabalName (StackUnqualCompName unqualName) = unqualName + +unqualCompToString :: StackUnqualCompName -> String +unqualCompToString = unUnqualComponentName . toCabalName +unqualCompFromString :: String -> StackUnqualCompName +unqualCompFromString = StackUnqualCompName . mkUnqualComponentName +unqualCompToText :: StackUnqualCompName -> Text +unqualCompToText = (decode . encode) . unUnqualComponentNameST . toCabalName +unqualCompFromText :: Text -> StackUnqualCompName +unqualCompFromText = StackUnqualCompName . decode . encode + +emptyCompName :: StackUnqualCompName +emptyCompName = StackUnqualCompName $ mkUnqualComponentName "" + +instance FromJSON StackUnqualCompName where + parseJSON = fmap (StackUnqualCompName . decode . encode) <$> parseJSON @Text diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index 23bb0b0cae..0beb310792 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -1,2122 +1,346 @@ -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DefaultSignatures #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE MultiWayIf #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE ViewPatterns #-} - --- | The Config type. +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DefaultSignatures #-} +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} + +{-| +Module : Stack.Types.Config +License : BSD-3-Clause +-} module Stack.Types.Config ( - -- * Main configuration types and classes - -- ** HasPlatform & HasStackRoot - HasPlatform(..) - ,PlatformVariant(..) - -- ** Runner - ,HasRunner(..) - ,Runner(..) - ,ColorWhen(..) - ,terminalL - ,reExecL - -- ** Config & HasConfig - ,Config(..) - ,HasConfig(..) - ,askLatestSnapshotUrl - ,configProjectRoot - -- ** BuildConfig & HasBuildConfig - ,BuildConfig(..) - ,ProjectPackage(..) - ,DepPackage(..) - ,ppRoot - ,ppVersion - ,ppComponents - ,ppGPD - ,stackYamlL - ,projectRootL - ,HasBuildConfig(..) - -- ** Storage databases - ,UserStorage(..) - ,ProjectStorage(..) - -- ** GHCVariant & HasGHCVariant - ,GHCVariant(..) - ,ghcVariantName - ,ghcVariantSuffix - ,parseGHCVariant - ,HasGHCVariant(..) - ,snapshotsDir - -- ** EnvConfig & HasEnvConfig - ,EnvConfig(..) - ,HasSourceMap(..) - ,HasEnvConfig(..) - ,getCompilerPath - -- * Details - -- ** ApplyGhcOptions - ,ApplyGhcOptions(..) - -- ** CabalConfigKey - ,CabalConfigKey(..) - -- ** ConfigException - ,HpackExecutable(..) - ,ConfigException(..) - -- ** ConfigMonoid - ,ConfigMonoid(..) - ,configMonoidInstallGHCName - ,configMonoidSystemGHCName - ,parseConfigMonoid - -- ** DumpLogs - ,DumpLogs(..) - -- ** EnvSettings - ,EnvSettings(..) - ,minimalEnvSettings - ,defaultEnvSettings - ,plainEnvSettings - -- ** GlobalOpts & GlobalOptsMonoid - ,GlobalOpts(..) - ,GlobalOptsMonoid(..) - ,StackYamlLoc(..) - ,stackYamlLocL - ,LockFileBehavior(..) - ,readLockFileBehavior - ,lockFileBehaviorL - ,defaultLogLevel - -- ** Project & ProjectAndConfigMonoid - ,Project(..) - ,ProjectConfig(..) - ,Curator(..) - ,ProjectAndConfigMonoid(..) - ,parseProjectAndConfigMonoid - -- ** PvpBounds - ,PvpBounds(..) - ,PvpBoundsType(..) - ,parsePvpBounds - -- ** ColorWhen - ,readColorWhen - -- ** Styles - ,readStyles - -- ** SCM - ,SCM(..) - -- * Paths - ,bindirSuffix - ,GlobalInfoSource(..) - ,getProjectWorkDir - ,docDirSuffix - ,extraBinDirs - ,hpcReportDir - ,installationRootDeps - ,installationRootLocal - ,bindirCompilerTools - ,hoogleRoot - ,hoogleDatabasePath - ,packageDatabaseDeps - ,packageDatabaseExtra - ,packageDatabaseLocal - ,platformOnlyRelDir - ,platformGhcRelDir - ,platformGhcVerOnlyRelDir - ,useShaPathOnWindows - ,shaPath - ,shaPathForBytes - ,workDirL - -- * Command-specific types - -- ** Eval - ,EvalOpts(..) - -- ** Exec - ,ExecOpts(..) - ,SpecialExecCmd(..) - ,ExecOptsExtra(..) - -- ** Setup - ,DownloadInfo(..) - ,VersionedDownloadInfo(..) - ,GHCDownloadInfo(..) - ,SetupInfo(..) - -- ** Docker entrypoint - ,DockerEntrypoint(..) - ,DockerUser(..) - ,module X + Config (..) + , HasConfig (..) + , askLatestSnapshotUrl + , askRecentSnapshotsUrl + , configProjectRoot + , ghcInstallHook -- * Lens helpers - ,wantedCompilerVersionL - ,actualCompilerVersionL - ,HasCompiler(..) - ,DumpPackage(..) - ,CompilerPaths(..) - ,GhcPkgExe(..) - ,getGhcPkgExe - ,cpWhich - ,ExtraDirs(..) - ,buildOptsL - ,globalOptsL - ,buildOptsInstallExesL - ,buildOptsMonoidHaddockL - ,buildOptsMonoidTestsL - ,buildOptsMonoidBenchmarksL - ,buildOptsMonoidInstallExesL - ,buildOptsHaddockL - ,globalOptsBuildOptsMonoidL - ,stackRootL - ,cabalVersionL - ,whichCompilerL - ,envOverrideSettingsL - ,shouldForceGhcColorFlag - ,appropriateGhcColorFlag + , buildOptsL + , envOverrideSettingsL + , globalOptsL + , userGlobalConfigFileL + , stackRootL + , workDirL -- * Helper logging functions - ,prettyStackDevL - -- * Lens reexport - ,view - ,to + , prettyStackDevL ) where -import Control.Monad.Writer (tell) -import Crypto.Hash (hashWith, SHA1(..)) +import Casa.Client ( CasaRepoPrefix ) +import Distribution.System ( Platform ) +import Path ( (), parent, reldir, relfile ) +import RIO.Process ( HasProcessContext (..), ProcessContext ) import Stack.Prelude -import Pantry.Internal.AesonExtended - (ToJSON, toJSON, FromJSON, FromJSONKey (..), parseJSON, withText, object, - (.=), (..:), (...:), (..:?), (..!=), Value(Bool), - withObjectWarnings, WarningParser, Object, jsonSubWarnings, - jsonSubWarningsT, jsonSubWarningsTT, WithJSONWarnings(..), - FromJSONKeyFunction (FromJSONKeyTextParser)) -import Data.Attoparsec.Args (parseArgs, EscapingMode (Escaping)) -import qualified Data.ByteArray.Encoding as Mem (convertToBase, Base(Base16)) -import qualified Data.ByteString.Char8 as S8 -import Data.Coerce (coerce) -import Data.List (stripPrefix) -import qualified Data.List.NonEmpty as NonEmpty -import qualified Data.Map as Map -import qualified Data.Map.Strict as M -import qualified Data.Monoid as Monoid -import Data.Monoid.Map (MonoidMap(..)) -import qualified Data.Set as Set -import qualified Data.Text as T -import Data.Yaml (ParseException) -import qualified Data.Yaml as Yaml -import qualified Distribution.License as C -import Distribution.ModuleName (ModuleName) -import Distribution.PackageDescription (GenericPackageDescription) -import qualified Distribution.PackageDescription as C -import Distribution.System (Platform, Arch) -import qualified Distribution.Text -import qualified Distribution.Types.UnqualComponentName as C -import Distribution.Version (anyVersion, mkVersion', mkVersion) -import Generics.Deriving.Monoid (memptydefault, mappenddefault) -import Lens.Micro -import Options.Applicative (ReadM) -import qualified Options.Applicative as OA -import qualified Options.Applicative.Types as OA -import Pantry.Internal (Storage) -import Path -import qualified Paths_stack as Meta -import qualified RIO.List as List -import RIO.PrettyPrint (HasTerm (..), StyleDoc, prettyWarnL, prettyDebugL) -import RIO.PrettyPrint.StylesUpdate (StylesUpdate, - parseStylesUpdateFromString, HasStylesUpdate (..)) -import Stack.Constants +import Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) +import Stack.Types.ApplyProgOptions ( ApplyProgOptions (..) ) +import Stack.Types.BuildOpts ( BuildOpts ) +import Stack.Types.CabalConfigKey ( CabalConfigKey ) import Stack.Types.Compiler -import Stack.Types.CompilerBuild -import Stack.Types.Docker -import Stack.Types.GhcPkgId -import Stack.Types.NamedComponent -import Stack.Types.Nix -import Stack.Types.Resolver -import Stack.Types.SourceMap -import Stack.Types.TemplateName -import Stack.Types.Version -import qualified System.FilePath as FilePath -import System.PosixCompat.Types (UserID, GroupID, FileMode) -import RIO.Process (ProcessContext, HasProcessContext (..)) -import Casa.Client (CasaRepoPrefix) - --- Re-exports -import Stack.Types.Config.Build as X - --- | The base environment that almost everything in Stack runs in, --- based off of parsing command line options in 'GlobalOpts'. Provides --- logging and process execution. -data Runner = Runner - { runnerGlobalOpts :: !GlobalOpts - , runnerUseColor :: !Bool - , runnerLogFunc :: !LogFunc - , runnerTermWidth :: !Int - , runnerProcessContext :: !ProcessContext - } - -data ColorWhen = ColorNever | ColorAlways | ColorAuto - deriving (Eq, Show, Generic) - -instance FromJSON ColorWhen where - parseJSON v = do - s <- parseJSON v - case s of - "never" -> return ColorNever - "always" -> return ColorAlways - "auto" -> return ColorAuto - _ -> fail ("Unknown color use: " <> s <> ". Expected values of " <> - "option are 'never', 'always', or 'auto'.") + ( CompilerBindistPath, CompilerRepository, CompilerTarget ) +import Stack.Types.CompilerBuild ( CompilerBuild ) +import Stack.Types.Docker ( DockerOpts ) +import Stack.Types.DumpLogs ( DumpLogs ) +import Stack.Types.EnvSettings ( EnvSettings ) +import Stack.Types.GHCVariant ( GHCVariant (..), HasGHCVariant (..) ) +import Stack.Types.MsysEnvironment ( MsysEnvironment ) +import Stack.Types.Nix ( NixOpts ) +import Stack.Types.Platform ( HasPlatform (..), PlatformVariant ) +import Stack.Types.Project ( Project (..) ) +import Stack.Types.ProjectConfig ( ProjectConfig (..) ) +import Stack.Types.PvpBounds ( PvpBounds ) +import Stack.Types.Runner ( HasRunner (..), Runner, globalOptsL ) +import Stack.Types.SCM ( SCM ) +import Stack.Types.SetupInfo ( SetupInfo ) +import Stack.Types.Snapshot ( AbstractSnapshot ) +import Stack.Types.Storage ( UserStorage ) +import Stack.Types.TemplateName ( TemplateName ) +import Stack.Types.Version ( VersionCheck (..), VersionRange ) -- | The top-level Stackage configuration. -data Config = - Config {configWorkDir :: !(Path Rel Dir) - -- ^ this allows to override .stack-work directory - ,configUserConfigPath :: !(Path Abs File) - -- ^ Path to user configuration file (usually ~/.stack/config.yaml) - ,configBuild :: !BuildOpts - -- ^ Build configuration - ,configDocker :: !DockerOpts - -- ^ Docker configuration - ,configNix :: !NixOpts - -- ^ Execution environment (e.g nix-shell) configuration - ,configProcessContextSettings :: !(EnvSettings -> IO ProcessContext) - -- ^ Environment variables to be passed to external tools - ,configLocalProgramsBase :: !(Path Abs Dir) - -- ^ Non-platform-specific path containing local installations - ,configLocalPrograms :: !(Path Abs Dir) - -- ^ Path containing local installations (mainly GHC) - ,configHideTHLoading :: !Bool - -- ^ Hide the Template Haskell "Loading package ..." messages from the - -- console - ,configPrefixTimestamps :: !Bool - -- ^ Prefix build output with timestamps for each line. - ,configPlatform :: !Platform - -- ^ The platform we're building for, used in many directory names - ,configPlatformVariant :: !PlatformVariant - -- ^ Variant of the platform, also used in directory names - ,configGHCVariant :: !(Maybe GHCVariant) - -- ^ The variant of GHC requested by the user. - ,configGHCBuild :: !(Maybe CompilerBuild) - -- ^ Override build of the compiler distribution (e.g. standard, gmp4, tinfo6) - ,configLatestSnapshot :: !Text - -- ^ URL of a JSON file providing the latest LTS and Nightly snapshots. - ,configSystemGHC :: !Bool - -- ^ Should we use the system-installed GHC (on the PATH) if - -- available? Can be overridden by command line options. - ,configInstallGHC :: !Bool - -- ^ Should we automatically install GHC if missing or the wrong - -- version is available? Can be overridden by command line options. - ,configSkipGHCCheck :: !Bool - -- ^ Don't bother checking the GHC version or architecture. - ,configSkipMsys :: !Bool - -- ^ On Windows: don't use a sandboxed MSYS - ,configCompilerCheck :: !VersionCheck - -- ^ Specifies which versions of the compiler are acceptable. - ,configCompilerRepository :: !CompilerRepository - -- ^ Specifies the repository containing the compiler sources - ,configLocalBin :: !(Path Abs Dir) - -- ^ Directory we should install executables into - ,configRequireStackVersion :: !VersionRange - -- ^ Require a version of stack within this range. - ,configJobs :: !Int - -- ^ How many concurrent jobs to run, defaults to number of capabilities - ,configOverrideGccPath :: !(Maybe (Path Abs File)) - -- ^ Optional gcc override path - ,configExtraIncludeDirs :: ![FilePath] - -- ^ --extra-include-dirs arguments - ,configExtraLibDirs :: ![FilePath] - -- ^ --extra-lib-dirs arguments - ,configConcurrentTests :: !Bool - -- ^ Run test suites concurrently - ,configTemplateParams :: !(Map Text Text) - -- ^ Parameters for templates. - ,configScmInit :: !(Maybe SCM) - -- ^ Initialize SCM (e.g. git) when creating new projects. - ,configGhcOptionsByName :: !(Map PackageName [Text]) - -- ^ Additional GHC options to apply to specific packages. - ,configGhcOptionsByCat :: !(Map ApplyGhcOptions [Text]) - -- ^ Additional GHC options to apply to categories of packages - ,configCabalConfigOpts :: !(Map CabalConfigKey [Text]) - -- ^ Additional options to be passed to ./Setup.hs configure - ,configSetupInfoLocations :: ![String] - -- ^ URLs or paths to stack-setup.yaml files, for finding tools. - -- If none present, the default setup-info is used. - ,configSetupInfoInline :: !SetupInfo - -- ^ Additional SetupInfo to use to find tools. - ,configPvpBounds :: !PvpBounds - -- ^ How PVP upper bounds should be added to packages - ,configModifyCodePage :: !Bool - -- ^ Force the code page to UTF-8 on Windows - ,configRebuildGhcOptions :: !Bool - -- ^ Rebuild on GHC options changes - ,configApplyGhcOptions :: !ApplyGhcOptions - -- ^ Which packages to ghc-options on the command line apply to? - ,configAllowNewer :: !Bool - -- ^ Ignore version ranges in .cabal files. Funny naming chosen to - -- match cabal. - ,configDefaultTemplate :: !(Maybe TemplateName) - -- ^ The default template to use when none is specified. - -- (If Nothing, the default default is used.) - ,configAllowDifferentUser :: !Bool - -- ^ Allow users other than the stack root owner to use the stack - -- installation. - ,configDumpLogs :: !DumpLogs - -- ^ Dump logs of local non-dependencies when doing a build. - ,configProject :: !(ProjectConfig (Project, Path Abs File)) - -- ^ Project information and stack.yaml file location - ,configAllowLocals :: !Bool - -- ^ Are we allowed to build local packages? The script - -- command disallows this. - ,configSaveHackageCreds :: !Bool - -- ^ Should we save Hackage credentials to a file? - ,configHackageBaseUrl :: !Text - -- ^ Hackage base URL used when uploading packages - ,configRunner :: !Runner - ,configPantryConfig :: !PantryConfig - ,configStackRoot :: !(Path Abs Dir) - ,configResolver :: !(Maybe AbstractResolver) - -- ^ Any resolver override from the command line - ,configUserStorage :: !UserStorage - -- ^ Database connection pool for user Stack database - ,configHideSourcePaths :: !Bool - -- ^ Enable GHC hiding source paths? - ,configRecommendUpgrade :: !Bool - -- ^ Recommend a Stack upgrade? - ,configStackDeveloperMode :: !Bool - -- ^ Turn on Stack developer mode for additional messages? - } - --- | A bit of type safety to ensure we're talking to the right database. -newtype UserStorage = UserStorage - { unUserStorage :: Storage - } - --- | A bit of type safety to ensure we're talking to the right database. -newtype ProjectStorage = ProjectStorage - { unProjectStorage :: Storage +data Config = Config + { workDir :: !(Path Rel Dir) + -- ^ this allows to override .stack-work directory + , userGlobalConfigFile :: !(Path Abs File) + -- ^ The user-specific global configuration file. + , build :: !BuildOpts + -- ^ Build configuration + , docker :: !DockerOpts + -- ^ Docker configuration + , nix :: !NixOpts + -- ^ Execution environment (e.g nix-shell) configuration + , processContextSettings :: !(EnvSettings -> IO ProcessContext) + -- ^ Environment variables to be passed to external tools + , localProgramsBase :: !(Path Abs Dir) + -- ^ Non-platform-specific path containing local installations + , localPrograms :: !(Path Abs Dir) + -- ^ Path containing local installations (mainly GHC) + , hideTHLoading :: !Bool + -- ^ Hide the Template Haskell "Loading package ..." messages from the + -- console + , prefixTimestamps :: !Bool + -- ^ Prefix build output with timestamps for each line. + , platform :: !Platform + -- ^ The platform we're building for, used in many directory names + , platformVariant :: !PlatformVariant + -- ^ Variant of the platform, also used in directory names + , ghcVariant :: !(Maybe GHCVariant) + -- ^ The variant of GHC requested by the user. + , ghcBuild :: !(Maybe CompilerBuild) + -- ^ Override build of the compiler distribution (e.g. standard, gmp4, + -- tinfo6) + , latestSnapshot :: !Text + -- ^ URL of a JSON file providing the latest LTS and Nightly snapshots. + , recentSnapshots :: !Text + -- ^ URL of a JSON file providing recently-published snapshots. + , systemGHC :: !Bool + -- ^ Should we use the system-installed GHC (on the PATH) if + -- available? Can be overridden by command line options. + , installGHC :: !Bool + -- ^ Should we automatically install GHC if missing or the wrong + -- version is available? Can be overridden by command line options. + , installMsys :: !Bool + -- ^ On Windows, should we automatically install MSYS2 if missing? Can be + -- overridden by command line options. + , skipGHCCheck :: !Bool + -- ^ Don't bother checking the GHC version or architecture. + , skipMsys :: !Bool + -- ^ On Windows: don't use a sandboxed MSYS + , msysEnvironment :: !(Maybe MsysEnvironment) + -- ^ On Windows: what MSYS2 environment to apply. Nothing on other operating + -- systems. + , compilerCheck :: !VersionCheck + -- ^ Specifies which versions of the compiler are acceptable. + , compilerRepository :: !CompilerRepository + -- ^ Specifies the repository containing the compiler sources + , compilerTarget :: !CompilerTarget + -- ^ Specifies the Hadrian build target + , compilerBindistPath :: !CompilerBindistPath + -- ^ Specifies the Hadrian path to built binary distribution + , localBin :: !(Path Abs Dir) + -- ^ Directory we should install executables into + , fileWatchHook :: !(Maybe (Path Abs File)) + -- ^ Optional path of executable used to override --file-watch + -- post-processing. + , requireStackVersion :: !VersionRange + -- ^ Require a version of Stack within this range. + , jobs :: !Int + -- ^ How many concurrent jobs to run, defaults to number of capabilities + , overrideGccPath :: !(Maybe (Path Abs File)) + -- ^ Optional gcc override path + , extraIncludeDirs :: ![FilePath] + -- ^ --extra-include-dirs arguments + , extraLibDirs :: ![FilePath] + -- ^ --extra-lib-dirs arguments + , customPreprocessorExts :: ![Text] + -- ^ List of custom preprocessors to complete the hard coded ones + , concurrentTests :: !Bool + -- ^ Run test suites concurrently + , templateParams :: !(Map Text Text) + -- ^ Parameters for templates. + , scmInit :: !(Maybe SCM) + -- ^ Initialize SCM (e.g. git) when creating new projects. + , ghcOptionsByName :: !(Map PackageName [Text]) + -- ^ Additional GHC options to apply to specific packages. + , ghcOptionsByCat :: !(Map ApplyGhcOptions [Text]) + -- ^ Additional GHC options to apply to categories of packages + , cabalConfigOpts :: !(Map CabalConfigKey [Text]) + -- ^ Additional options to be passed to ./Setup.hs configure + , setupInfoLocations :: ![String] + -- ^ URLs or paths to stack-setup.yaml files, for finding tools. + -- If none present, the default setup-info is used. + , setupInfoInline :: !SetupInfo + -- ^ Additional SetupInfo to use to find tools. + , pvpBounds :: !PvpBounds + -- ^ How PVP upper bounds should be added to packages + , modifyCodePage :: !Bool + -- ^ Force the code page to UTF-8 on Windows + , rebuildGhcOptions :: !Bool + -- ^ Rebuild on GHC options changes + , applyGhcOptions :: !ApplyGhcOptions + -- ^ Which packages do --ghc-options on the command line apply to? + , applyProgOptions :: !ApplyProgOptions + -- ^ Which packages do all and any --PROG-option options on the command line + -- apply to? + , allowNewer :: !(First Bool) + -- ^ Ignore version ranges in .cabal files. Funny naming chosen to + -- match cabal. + , allowNewerDeps :: !(Maybe [PackageName]) + -- ^ Ignore dependency upper and lower bounds only for specified + -- packages. No effect unless allow-newer is enabled. + , defaultInitSnapshot :: !(First AbstractSnapshot) + -- ^ An optional default snapshot to use with @stack init@ when none is + -- specified at the command line. + , defaultTemplate :: !(Maybe TemplateName) + -- ^ The default template to use when none is specified. + -- (If Nothing, the 'default' default template is used.) + , allowDifferentUser :: !Bool + -- ^ Allow users other than the Stack root owner to use the Stack + -- installation. + , dumpLogs :: !DumpLogs + -- ^ Dump logs of local non-dependencies when doing a build. + , project :: !(ProjectConfig (Project, Path Abs File)) + -- ^ Project information and stack.yaml file location + , allowLocals :: !Bool + -- ^ Are we allowed to build local packages? The script + -- command disallows this. + , saveHackageCreds :: !FirstTrue + -- ^ Should we save Hackage credentials to a file? + , hackageBaseUrl :: !Text + -- ^ Hackage base URL used when uploading packages + , runner :: !Runner + , pantryConfig :: !PantryConfig + , stackRoot :: !(Path Abs Dir) + , snapshot :: !(Maybe AbstractSnapshot) + -- ^ Any snapshot override from the command line + , userStorage :: !UserStorage + -- ^ Database connection pool for user Stack database + , hideSourcePaths :: !Bool + -- ^ Enable GHC hiding source paths? + , recommendStackUpgrade :: !Bool + -- ^ Recommend a Stack upgrade? + , notifyIfNixOnPath :: !Bool + -- ^ Notify if the Nix package manager (nix) is on the PATH, but + -- Stack's Nix integration is not enabled? + , notifyIfGhcUntested :: !Bool + -- ^ Notify if Stack has not been tested with the GHC version? + , notifyIfCabalUntested :: !Bool + -- ^ Notify if Stack has not been tested with the Cabal version? + , notifyIfArchUnknown :: !Bool + -- ^ Notify if the specified machine architecture is unknown to Cabal (the + -- library)? + , notifyIfNoRunTests :: !Bool + -- ^ Notify if the --no-run-tests flag has prevented the running of a + -- targeted test suite? + , notifyIfNoRunBenchmarks :: !Bool + -- ^ Notify if the --no-run-benchmarks flag has prevented the running of a + -- targeted benchmark? + , notifyIfBaseNotBoot :: !Bool + -- ^ Notify if the specified base package is other than the GHC boot + -- package? + , noRunCompile :: !Bool + -- ^ Use --no-run and --compile options when using `stack script` + , stackDeveloperMode :: !Bool + -- ^ Turn on Stack developer mode for additional messages? + , casa :: !(Maybe (CasaRepoPrefix, Int)) + -- ^ Optional Casa configuration } -- | The project root directory, if in a project. configProjectRoot :: Config -> Maybe (Path Abs Dir) configProjectRoot c = - case configProject c of + case c.project of PCProject (_, fp) -> Just $ parent fp PCGlobalProject -> Nothing PCNoProject _deps -> Nothing --- | Which packages do configure opts apply to? -data CabalConfigKey - = CCKTargets -- ^ See AGOTargets - | CCKLocals -- ^ See AGOLocals - | CCKEverything -- ^ See AGOEverything - | CCKPackage !PackageName -- ^ A specific package - deriving (Show, Read, Eq, Ord) -instance FromJSON CabalConfigKey where - parseJSON = withText "CabalConfigKey" parseCabalConfigKey -instance FromJSONKey CabalConfigKey where - fromJSONKey = FromJSONKeyTextParser parseCabalConfigKey - -parseCabalConfigKey :: (Monad m, MonadFail m) => Text -> m CabalConfigKey -parseCabalConfigKey "$targets" = pure CCKTargets -parseCabalConfigKey "$locals" = pure CCKLocals -parseCabalConfigKey "$everything" = pure CCKEverything -parseCabalConfigKey name = - case parsePackageName $ T.unpack name of - Nothing -> fail $ "Invalid CabalConfigKey: " ++ show name - Just x -> pure $ CCKPackage x - --- | Which packages do ghc-options on the command line apply to? -data ApplyGhcOptions = AGOTargets -- ^ all local targets - | AGOLocals -- ^ all local packages, even non-targets - | AGOEverything -- ^ every package - deriving (Show, Read, Eq, Ord, Enum, Bounded) - -instance FromJSON ApplyGhcOptions where - parseJSON = withText "ApplyGhcOptions" $ \t -> - case t of - "targets" -> return AGOTargets - "locals" -> return AGOLocals - "everything" -> return AGOEverything - _ -> fail $ "Invalid ApplyGhcOptions: " ++ show t - --- | Which build log files to dump -data DumpLogs - = DumpNoLogs -- ^ don't dump any logfiles - | DumpWarningLogs -- ^ dump logfiles containing warnings - | DumpAllLogs -- ^ dump all logfiles - deriving (Show, Read, Eq, Ord, Enum, Bounded) - -instance FromJSON DumpLogs where - parseJSON (Bool True) = return DumpAllLogs - parseJSON (Bool False) = return DumpNoLogs - parseJSON v = - withText - "DumpLogs" - (\t -> - if | t == "none" -> return DumpNoLogs - | t == "warning" -> return DumpWarningLogs - | t == "all" -> return DumpAllLogs - | otherwise -> fail ("Invalid DumpLogs: " ++ show t)) - v - --- | Controls which version of the environment is used -data EnvSettings = EnvSettings - { esIncludeLocals :: !Bool - -- ^ include local project bin directory, GHC_PACKAGE_PATH, etc - , esIncludeGhcPackagePath :: !Bool - -- ^ include the GHC_PACKAGE_PATH variable - , esStackExe :: !Bool - -- ^ set the STACK_EXE variable to the current executable name - , esLocaleUtf8 :: !Bool - -- ^ set the locale to C.UTF-8 - , esKeepGhcRts :: !Bool - -- ^ if True, keep GHCRTS variable in environment - } - deriving (Show, Eq, Ord) - -data ExecOpts = ExecOpts - { eoCmd :: !SpecialExecCmd - , eoArgs :: ![String] - , eoExtra :: !ExecOptsExtra - } deriving (Show) - -data SpecialExecCmd - = ExecCmd String - | ExecRun - | ExecGhc - | ExecRunGhc - deriving (Show, Eq) - -data ExecOptsExtra = ExecOptsExtra - { eoEnvSettings :: !EnvSettings - , eoPackages :: ![String] - , eoRtsOptions :: ![String] - , eoCwd :: !(Maybe FilePath) - } - deriving (Show) - -data EvalOpts = EvalOpts - { evalArg :: !String - , evalExtra :: !ExecOptsExtra - } deriving (Show) - --- | Parsed global command-line options. -data GlobalOpts = GlobalOpts - { globalReExecVersion :: !(Maybe String) -- ^ Expected re-exec in container version - , globalDockerEntrypoint :: !(Maybe DockerEntrypoint) - -- ^ Data used when stack is acting as a Docker entrypoint (internal use only) - , globalLogLevel :: !LogLevel -- ^ Log level - , globalTimeInLog :: !Bool -- ^ Whether to include timings in logs. - , globalConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig' - , globalResolver :: !(Maybe AbstractResolver) -- ^ Resolver override - , globalCompiler :: !(Maybe WantedCompiler) -- ^ Compiler override - , globalTerminal :: !Bool -- ^ We're in a terminal? - , globalStylesUpdate :: !StylesUpdate -- ^ SGR (Ansi) codes for styles - , globalTermWidth :: !(Maybe Int) -- ^ Terminal width override - , globalStackYaml :: !StackYamlLoc -- ^ Override project stack.yaml - , globalLockFileBehavior :: !LockFileBehavior - } deriving (Show) - --- | Location for the project's stack.yaml file. -data StackYamlLoc - = SYLDefault - -- ^ Use the standard parent-directory-checking logic - | SYLOverride !(Path Abs File) - -- ^ Use a specific stack.yaml file provided - | SYLNoProject ![PackageIdentifierRevision] - -- ^ Do not load up a project, just user configuration. Include - -- the given extra dependencies with the resolver. - | SYLGlobalProject - -- ^ Do not look for a project configuration, and use the implicit global. - deriving Show - -stackYamlLocL :: HasRunner env => Lens' env StackYamlLoc -stackYamlLocL = globalOptsL.lens globalStackYaml (\x y -> x { globalStackYaml = y }) - --- | How to interact with lock files -data LockFileBehavior - = LFBReadWrite - -- ^ Read and write lock files - | LFBReadOnly - -- ^ Read lock files, but do not write them - | LFBIgnore - -- ^ Entirely ignore lock files - | LFBErrorOnWrite - -- ^ Error out on trying to write a lock file. This can be used to - -- ensure that lock files in a repository already ensure - -- reproducible builds. - deriving (Show, Enum, Bounded) - -lockFileBehaviorL :: HasRunner env => SimpleGetter env LockFileBehavior -lockFileBehaviorL = globalOptsL.to globalLockFileBehavior - --- | Parser for 'LockFileBehavior' -readLockFileBehavior :: ReadM LockFileBehavior -readLockFileBehavior = do - s <- OA.readerAsk - case Map.lookup s m of - Just x -> pure x - Nothing -> OA.readerError $ "Invalid lock file behavior, valid options: " ++ - List.intercalate ", " (Map.keys m) - where - m = Map.fromList $ map (\x -> (render x, x)) [minBound..maxBound] - render LFBReadWrite = "read-write" - render LFBReadOnly = "read-only" - render LFBIgnore = "ignore" - render LFBErrorOnWrite = "error-on-write" - --- | Project configuration information. Not every run of Stack has a --- true local project; see constructors below. -data ProjectConfig a - = PCProject a - -- ^ Normal run: we want a project, and have one. This comes from - -- either 'SYLDefault' or 'SYLOverride'. - | PCGlobalProject - -- ^ No project was found when using 'SYLDefault'. Instead, use - -- the implicit global. - | PCNoProject ![PackageIdentifierRevision] - -- ^ Use a no project run. This comes from 'SYLNoProject'. - --- | Parsed global command-line options monoid. -data GlobalOptsMonoid = GlobalOptsMonoid - { globalMonoidReExecVersion :: !(First String) -- ^ Expected re-exec in container version - , globalMonoidDockerEntrypoint :: !(First DockerEntrypoint) - -- ^ Data used when stack is acting as a Docker entrypoint (internal use only) - , globalMonoidLogLevel :: !(First LogLevel) -- ^ Log level - , globalMonoidTimeInLog :: !FirstTrue -- ^ Whether to include timings in logs. - , globalMonoidConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig' - , globalMonoidResolver :: !(First (Unresolved AbstractResolver)) -- ^ Resolver override - , globalMonoidResolverRoot :: !(First FilePath) -- ^ root directory for resolver relative path - , globalMonoidCompiler :: !(First WantedCompiler) -- ^ Compiler override - , globalMonoidTerminal :: !(First Bool) -- ^ We're in a terminal? - , globalMonoidStyles :: !StylesUpdate -- ^ Stack's output styles - , globalMonoidTermWidth :: !(First Int) -- ^ Terminal width override - , globalMonoidStackYaml :: !(First FilePath) -- ^ Override project stack.yaml - , globalMonoidLockFileBehavior :: !(First LockFileBehavior) -- ^ See 'globalLockFileBehavior' - } deriving Generic - -instance Semigroup GlobalOptsMonoid where - (<>) = mappenddefault - -instance Monoid GlobalOptsMonoid where - mempty = memptydefault - mappend = (<>) - --- | Default logging level should be something useful but not crazy. -defaultLogLevel :: LogLevel -defaultLogLevel = LevelInfo - -readColorWhen :: ReadM ColorWhen -readColorWhen = do - s <- OA.readerAsk - case s of - "never" -> return ColorNever - "always" -> return ColorAlways - "auto" -> return ColorAuto - _ -> OA.readerError "Expected values of color option are 'never', 'always', or 'auto'." - -readStyles :: ReadM StylesUpdate -readStyles = parseStylesUpdateFromString <$> OA.readerAsk - --- | A superset of 'Config' adding information on how to build code. The reason --- for this breakdown is because we will need some of the information from --- 'Config' in order to determine the values here. --- --- These are the components which know nothing about local configuration. -data BuildConfig = BuildConfig - { bcConfig :: !Config - , bcSMWanted :: !SMWanted - , bcExtraPackageDBs :: ![Path Abs Dir] - -- ^ Extra package databases - , bcStackYaml :: !(Path Abs File) - -- ^ Location of the stack.yaml file. - -- - -- Note: if the STACK_YAML environment variable is used, this may be - -- different from projectRootL "stack.yaml" if a different file - -- name is used. - , bcProjectStorage :: !ProjectStorage - -- ^ Database connection pool for project Stack database - , bcCurator :: !(Maybe Curator) - } - -stackYamlL :: HasBuildConfig env => Lens' env (Path Abs File) -stackYamlL = buildConfigL.lens bcStackYaml (\x y -> x { bcStackYaml = y }) - --- | Directory containing the project's stack.yaml file -projectRootL :: HasBuildConfig env => Getting r env (Path Abs Dir) -projectRootL = stackYamlL.to parent - --- | Configuration after the environment has been setup. -data EnvConfig = EnvConfig - {envConfigBuildConfig :: !BuildConfig - ,envConfigBuildOptsCLI :: !BuildOptsCLI - ,envConfigSourceMap :: !SourceMap - ,envConfigSourceMapHash :: !SourceMapHash - ,envConfigCompilerPaths :: !CompilerPaths - } - -ppGPD :: MonadIO m => ProjectPackage -> m GenericPackageDescription -ppGPD = liftIO . cpGPD . ppCommon - --- | Root directory for the given 'ProjectPackage' -ppRoot :: ProjectPackage -> Path Abs Dir -ppRoot = parent . ppCabalFP - --- | All components available in the given 'ProjectPackage' -ppComponents :: MonadIO m => ProjectPackage -> m (Set NamedComponent) -ppComponents pp = do - gpd <- ppGPD pp - pure $ Set.fromList $ concat - [ maybe [] (const [CLib]) (C.condLibrary gpd) - , go CExe (fst <$> C.condExecutables gpd) - , go CTest (fst <$> C.condTestSuites gpd) - , go CBench (fst <$> C.condBenchmarks gpd) - ] - where - go :: (T.Text -> NamedComponent) - -> [C.UnqualComponentName] - -> [NamedComponent] - go wrapper = map (wrapper . T.pack . C.unUnqualComponentName) - --- | Version for the given 'ProjectPackage -ppVersion :: MonadIO m => ProjectPackage -> m Version -ppVersion = fmap gpdVersion . ppGPD - --- | A project is a collection of packages. We can have multiple stack.yaml --- files, but only one of them may contain project information. -data Project = Project - { projectUserMsg :: !(Maybe String) - -- ^ A warning message to display to the user when the auto generated - -- config may have issues. - , projectPackages :: ![RelFilePath] - -- ^ Packages which are actually part of the project (as opposed - -- to dependencies). - , projectDependencies :: ![RawPackageLocation] - -- ^ Dependencies defined within the stack.yaml file, to be - -- applied on top of the snapshot. - , projectFlags :: !(Map PackageName (Map FlagName Bool)) - -- ^ Flags to be applied on top of the snapshot flags. - , projectResolver :: !RawSnapshotLocation - -- ^ How we resolve which @Snapshot@ to use - , projectCompiler :: !(Maybe WantedCompiler) - -- ^ Override the compiler in 'projectResolver' - , projectExtraPackageDBs :: ![FilePath] - , projectCurator :: !(Maybe Curator) - -- ^ Extra configuration intended exclusively for usage by the - -- curator tool. In other words, this is /not/ part of the - -- documented and exposed Stack API. SUBJECT TO CHANGE. - , projectDropPackages :: !(Set PackageName) - -- ^ Packages to drop from the 'projectResolver'. - } - deriving Show - -instance ToJSON Project where - -- Expanding the constructor fully to ensure we don't miss any fields. - toJSON (Project userMsg packages extraDeps flags resolver mcompiler extraPackageDBs mcurator drops) = object $ concat - [ maybe [] (\cv -> ["compiler" .= cv]) mcompiler - , maybe [] (\msg -> ["user-message" .= msg]) userMsg - , if null extraPackageDBs then [] else ["extra-package-dbs" .= extraPackageDBs] - , if null extraDeps then [] else ["extra-deps" .= extraDeps] - , if Map.null flags then [] else ["flags" .= fmap toCabalStringMap (toCabalStringMap flags)] - , ["packages" .= packages] - , ["resolver" .= resolver] - , maybe [] (\c -> ["curator" .= c]) mcurator - , if Set.null drops then [] else ["drop-packages" .= Set.map CabalString drops] - ] - --- | Extra configuration intended exclusively for usage by the --- curator tool. In other words, this is /not/ part of the --- documented and exposed Stack API. SUBJECT TO CHANGE. -data Curator = Curator - { curatorSkipTest :: !(Set PackageName) - , curatorExpectTestFailure :: !(Set PackageName) - , curatorSkipBenchmark :: !(Set PackageName) - , curatorExpectBenchmarkFailure :: !(Set PackageName) - , curatorSkipHaddock :: !(Set PackageName) - , curatorExpectHaddockFailure :: !(Set PackageName) - } - deriving Show -instance ToJSON Curator where - toJSON c = object - [ "skip-test" .= Set.map CabalString (curatorSkipTest c) - , "expect-test-failure" .= Set.map CabalString (curatorExpectTestFailure c) - , "skip-bench" .= Set.map CabalString (curatorSkipBenchmark c) - , "expect-benchmark-failure" .= Set.map CabalString (curatorExpectTestFailure c) - , "skip-haddock" .= Set.map CabalString (curatorSkipHaddock c) - , "expect-test-failure" .= Set.map CabalString (curatorExpectHaddockFailure c) - ] -instance FromJSON (WithJSONWarnings Curator) where - parseJSON = withObjectWarnings "Curator" $ \o -> Curator - <$> fmap (Set.map unCabalString) (o ..:? "skip-test" ..!= mempty) - <*> fmap (Set.map unCabalString) (o ..:? "expect-test-failure" ..!= mempty) - <*> fmap (Set.map unCabalString) (o ..:? "skip-bench" ..!= mempty) - <*> fmap (Set.map unCabalString) (o ..:? "expect-benchmark-failure" ..!= mempty) - <*> fmap (Set.map unCabalString) (o ..:? "skip-haddock" ..!= mempty) - <*> fmap (Set.map unCabalString) (o ..:? "expect-haddock-failure" ..!= mempty) - --- An uninterpreted representation of configuration options. --- Configurations may be "cascaded" using mappend (left-biased). -data ConfigMonoid = - ConfigMonoid - { configMonoidStackRoot :: !(First (Path Abs Dir)) - -- ^ See: 'clStackRoot' - , configMonoidWorkDir :: !(First (Path Rel Dir)) - -- ^ See: 'configWorkDir'. - , configMonoidBuildOpts :: !BuildOptsMonoid - -- ^ build options. - , configMonoidDockerOpts :: !DockerOptsMonoid - -- ^ Docker options. - , configMonoidNixOpts :: !NixOptsMonoid - -- ^ Options for the execution environment (nix-shell or container) - , configMonoidConnectionCount :: !(First Int) - -- ^ See: 'configConnectionCount' - , configMonoidHideTHLoading :: !FirstTrue - -- ^ See: 'configHideTHLoading' - , configMonoidPrefixTimestamps :: !(First Bool) - -- ^ See: 'configPrefixTimestamps' - , configMonoidLatestSnapshot :: !(First Text) - -- ^ See: 'configLatestSnapshot' - , configMonoidPackageIndices :: !(First [HackageSecurityConfig]) - -- ^ See: @picIndices@ - , configMonoidSystemGHC :: !(First Bool) - -- ^ See: 'configSystemGHC' - ,configMonoidInstallGHC :: !FirstTrue - -- ^ See: 'configInstallGHC' - ,configMonoidSkipGHCCheck :: !FirstFalse - -- ^ See: 'configSkipGHCCheck' - ,configMonoidSkipMsys :: !FirstFalse - -- ^ See: 'configSkipMsys' - ,configMonoidCompilerCheck :: !(First VersionCheck) - -- ^ See: 'configCompilerCheck' - ,configMonoidCompilerRepository :: !(First CompilerRepository) - -- ^ See: 'configCompilerRepository' - ,configMonoidRequireStackVersion :: !IntersectingVersionRange - -- ^ See: 'configRequireStackVersion' - ,configMonoidArch :: !(First String) - -- ^ Used for overriding the platform - ,configMonoidGHCVariant :: !(First GHCVariant) - -- ^ Used for overriding the platform - ,configMonoidGHCBuild :: !(First CompilerBuild) - -- ^ Used for overriding the GHC build - ,configMonoidJobs :: !(First Int) - -- ^ See: 'configJobs' - ,configMonoidExtraIncludeDirs :: ![FilePath] - -- ^ See: 'configExtraIncludeDirs' - ,configMonoidExtraLibDirs :: ![FilePath] - -- ^ See: 'configExtraLibDirs' - , configMonoidOverrideGccPath :: !(First (Path Abs File)) - -- ^ Allow users to override the path to gcc - ,configMonoidOverrideHpack :: !(First FilePath) - -- ^ Use Hpack executable (overrides bundled Hpack) - ,configMonoidConcurrentTests :: !(First Bool) - -- ^ See: 'configConcurrentTests' - ,configMonoidLocalBinPath :: !(First FilePath) - -- ^ Used to override the binary installation dir - ,configMonoidTemplateParameters :: !(Map Text Text) - -- ^ Template parameters. - ,configMonoidScmInit :: !(First SCM) - -- ^ Initialize SCM (e.g. git init) when making new projects? - ,configMonoidGhcOptionsByName :: !(MonoidMap PackageName (Monoid.Dual [Text])) - -- ^ See 'configGhcOptionsByName'. Uses 'Monoid.Dual' so that - -- options from the configs on the right come first, so that they - -- can be overridden. - ,configMonoidGhcOptionsByCat :: !(MonoidMap ApplyGhcOptions (Monoid.Dual [Text])) - -- ^ See 'configGhcOptionsAll'. Uses 'Monoid.Dual' so that options - -- from the configs on the right come first, so that they can be - -- overridden. - ,configMonoidCabalConfigOpts :: !(MonoidMap CabalConfigKey (Monoid.Dual [Text])) - -- ^ See 'configCabalConfigOpts'. - ,configMonoidExtraPath :: ![Path Abs Dir] - -- ^ Additional paths to search for executables in - ,configMonoidSetupInfoLocations :: ![String] - -- ^ See 'configSetupInfoLocations' - ,configMonoidSetupInfoInline :: !SetupInfo - -- ^ See 'configSetupInfoInline' - ,configMonoidLocalProgramsBase :: !(First (Path Abs Dir)) - -- ^ Override the default local programs dir, where e.g. GHC is installed. - ,configMonoidPvpBounds :: !(First PvpBounds) - -- ^ See 'configPvpBounds' - ,configMonoidModifyCodePage :: !FirstTrue - -- ^ See 'configModifyCodePage' - ,configMonoidRebuildGhcOptions :: !FirstFalse - -- ^ See 'configMonoidRebuildGhcOptions' - ,configMonoidApplyGhcOptions :: !(First ApplyGhcOptions) - -- ^ See 'configApplyGhcOptions' - ,configMonoidAllowNewer :: !(First Bool) - -- ^ See 'configMonoidAllowNewer' - ,configMonoidDefaultTemplate :: !(First TemplateName) - -- ^ The default template to use when none is specified. - -- (If Nothing, the default default is used.) - , configMonoidAllowDifferentUser :: !(First Bool) - -- ^ Allow users other than the stack root owner to use the stack - -- installation. - , configMonoidDumpLogs :: !(First DumpLogs) - -- ^ See 'configDumpLogs' - , configMonoidSaveHackageCreds :: !(First Bool) - -- ^ See 'configSaveHackageCreds' - , configMonoidHackageBaseUrl :: !(First Text) - -- ^ See 'configHackageBaseUrl' - , configMonoidColorWhen :: !(First ColorWhen) - -- ^ When to use 'ANSI' colors - , configMonoidStyles :: !StylesUpdate - , configMonoidHideSourcePaths :: !FirstTrue - -- ^ See 'configHideSourcePaths' - , configMonoidRecommendUpgrade :: !FirstTrue - -- ^ See 'configRecommendUpgrade' - , configMonoidCasaRepoPrefix :: !(First CasaRepoPrefix) - , configMonoidSnapshotLocation :: !(First Text) - -- ^ Custom location of LTS/Nightly snapshots - , configMonoidStackDeveloperMode :: !(First Bool) - -- ^ See 'configStackDeveloperMode' - } - deriving (Show, Generic) - -instance Semigroup ConfigMonoid where - (<>) = mappenddefault - -instance Monoid ConfigMonoid where - mempty = memptydefault - mappend = (<>) - -parseConfigMonoid :: Path Abs Dir -> Value -> Yaml.Parser (WithJSONWarnings ConfigMonoid) -parseConfigMonoid = withObjectWarnings "ConfigMonoid" . parseConfigMonoidObject - --- | Parse a partial configuration. Used both to parse both a standalone config --- file and a project file, so that a sub-parser is not required, which would interfere with --- warnings for missing fields. -parseConfigMonoidObject :: Path Abs Dir -> Object -> WarningParser ConfigMonoid -parseConfigMonoidObject rootDir obj = do - -- Parsing 'stackRoot' from 'stackRoot'/config.yaml would be nonsensical - let configMonoidStackRoot = First Nothing - configMonoidWorkDir <- First <$> obj ..:? configMonoidWorkDirName - configMonoidBuildOpts <- jsonSubWarnings (obj ..:? configMonoidBuildOptsName ..!= mempty) - configMonoidDockerOpts <- jsonSubWarnings (obj ..:? configMonoidDockerOptsName ..!= mempty) - configMonoidNixOpts <- jsonSubWarnings (obj ..:? configMonoidNixOptsName ..!= mempty) - configMonoidConnectionCount <- First <$> obj ..:? configMonoidConnectionCountName - configMonoidHideTHLoading <- FirstTrue <$> obj ..:? configMonoidHideTHLoadingName - configMonoidPrefixTimestamps <- First <$> obj ..:? configMonoidPrefixTimestampsName - - murls :: Maybe Value <- obj ..:? configMonoidUrlsName - configMonoidLatestSnapshot <- - case murls of - Nothing -> pure $ First Nothing - Just urls -> jsonSubWarnings $ lift $ withObjectWarnings - "urls" - (\o -> First <$> o ..:? "latest-snapshot" :: WarningParser (First Text)) - urls - - configMonoidPackageIndices <- First <$> jsonSubWarningsTT (obj ..:? configMonoidPackageIndicesName) - configMonoidSystemGHC <- First <$> obj ..:? configMonoidSystemGHCName - configMonoidInstallGHC <- FirstTrue <$> obj ..:? configMonoidInstallGHCName - configMonoidSkipGHCCheck <- FirstFalse <$> obj ..:? configMonoidSkipGHCCheckName - configMonoidSkipMsys <- FirstFalse <$> obj ..:? configMonoidSkipMsysName - configMonoidRequireStackVersion <- IntersectingVersionRange . unVersionRangeJSON <$> ( - obj ..:? configMonoidRequireStackVersionName - ..!= VersionRangeJSON anyVersion) - configMonoidArch <- First <$> obj ..:? configMonoidArchName - configMonoidGHCVariant <- First <$> obj ..:? configMonoidGHCVariantName - configMonoidGHCBuild <- First <$> obj ..:? configMonoidGHCBuildName - configMonoidJobs <- First <$> obj ..:? configMonoidJobsName - configMonoidExtraIncludeDirs <- map (toFilePath rootDir FilePath.) <$> - obj ..:? configMonoidExtraIncludeDirsName ..!= [] - configMonoidExtraLibDirs <- map (toFilePath rootDir FilePath.) <$> - obj ..:? configMonoidExtraLibDirsName ..!= [] - configMonoidOverrideGccPath <- First <$> obj ..:? configMonoidOverrideGccPathName - configMonoidOverrideHpack <- First <$> obj ..:? configMonoidOverrideHpackName - configMonoidConcurrentTests <- First <$> obj ..:? configMonoidConcurrentTestsName - configMonoidLocalBinPath <- First <$> obj ..:? configMonoidLocalBinPathName - templates <- obj ..:? "templates" - (configMonoidScmInit,configMonoidTemplateParameters) <- - case templates of - Nothing -> return (First Nothing,M.empty) - Just tobj -> do - scmInit <- tobj ..:? configMonoidScmInitName - params <- tobj ..:? configMonoidTemplateParametersName - return (First scmInit,fromMaybe M.empty params) - configMonoidCompilerCheck <- First <$> obj ..:? configMonoidCompilerCheckName - configMonoidCompilerRepository <- First <$> (obj ..:? configMonoidCompilerRepositoryName) - - options <- Map.map unGhcOptions <$> obj ..:? configMonoidGhcOptionsName ..!= mempty - - optionsEverything <- - case (Map.lookup GOKOldEverything options, Map.lookup GOKEverything options) of - (Just _, Just _) -> fail "Cannot specify both `*` and `$everything` GHC options" - (Nothing, Just x) -> return x - (Just x, Nothing) -> do - tell "The `*` ghc-options key is not recommended. Consider using $locals, or if really needed, $everything" - return x - (Nothing, Nothing) -> return [] - - let configMonoidGhcOptionsByCat = coerce $ Map.fromList - [ (AGOEverything, optionsEverything) - , (AGOLocals, Map.findWithDefault [] GOKLocals options) - , (AGOTargets, Map.findWithDefault [] GOKTargets options) - ] - - configMonoidGhcOptionsByName = coerce $ Map.fromList - [(name, opts) | (GOKPackage name, opts) <- Map.toList options] - - configMonoidCabalConfigOpts' <- obj ..:? "configure-options" ..!= mempty - let configMonoidCabalConfigOpts = coerce (configMonoidCabalConfigOpts' :: Map CabalConfigKey [Text]) - - configMonoidExtraPath <- obj ..:? configMonoidExtraPathName ..!= [] - configMonoidSetupInfoLocations <- obj ..:? configMonoidSetupInfoLocationsName ..!= [] - configMonoidSetupInfoInline <- jsonSubWarningsT (obj ..:? configMonoidSetupInfoInlineName) ..!= mempty - configMonoidLocalProgramsBase <- First <$> obj ..:? configMonoidLocalProgramsBaseName - configMonoidPvpBounds <- First <$> obj ..:? configMonoidPvpBoundsName - configMonoidModifyCodePage <- FirstTrue <$> obj ..:? configMonoidModifyCodePageName - configMonoidRebuildGhcOptions <- FirstFalse <$> obj ..:? configMonoidRebuildGhcOptionsName - configMonoidApplyGhcOptions <- First <$> obj ..:? configMonoidApplyGhcOptionsName - configMonoidAllowNewer <- First <$> obj ..:? configMonoidAllowNewerName - configMonoidDefaultTemplate <- First <$> obj ..:? configMonoidDefaultTemplateName - configMonoidAllowDifferentUser <- First <$> obj ..:? configMonoidAllowDifferentUserName - configMonoidDumpLogs <- First <$> obj ..:? configMonoidDumpLogsName - configMonoidSaveHackageCreds <- First <$> obj ..:? configMonoidSaveHackageCredsName - configMonoidHackageBaseUrl <- First <$> obj ..:? configMonoidHackageBaseUrlName - - configMonoidColorWhenUS <- obj ..:? configMonoidColorWhenUSName - configMonoidColorWhenGB <- obj ..:? configMonoidColorWhenGBName - let configMonoidColorWhen = First $ configMonoidColorWhenUS - <|> configMonoidColorWhenGB - - configMonoidStylesUS <- obj ..:? configMonoidStylesUSName - configMonoidStylesGB <- obj ..:? configMonoidStylesGBName - let configMonoidStyles = fromMaybe mempty $ configMonoidStylesUS - <|> configMonoidStylesGB - - configMonoidHideSourcePaths <- FirstTrue <$> obj ..:? configMonoidHideSourcePathsName - configMonoidRecommendUpgrade <- FirstTrue <$> obj ..:? configMonoidRecommendUpgradeName - - configMonoidCasaRepoPrefix <- First <$> obj ..:? configMonoidCasaRepoPrefixName - configMonoidSnapshotLocation <- First <$> obj ..:? configMonoidSnapshotLocationName - - configMonoidStackDeveloperMode <- First <$> obj ..:? configMonoidStackDeveloperModeName - - return ConfigMonoid {..} - -configMonoidWorkDirName :: Text -configMonoidWorkDirName = "work-dir" - -configMonoidBuildOptsName :: Text -configMonoidBuildOptsName = "build" - -configMonoidDockerOptsName :: Text -configMonoidDockerOptsName = "docker" - -configMonoidNixOptsName :: Text -configMonoidNixOptsName = "nix" - -configMonoidConnectionCountName :: Text -configMonoidConnectionCountName = "connection-count" - -configMonoidHideTHLoadingName :: Text -configMonoidHideTHLoadingName = "hide-th-loading" - -configMonoidPrefixTimestampsName :: Text -configMonoidPrefixTimestampsName = "build-output-timestamps" - -configMonoidUrlsName :: Text -configMonoidUrlsName = "urls" - -configMonoidPackageIndicesName :: Text -configMonoidPackageIndicesName = "package-indices" - -configMonoidSystemGHCName :: Text -configMonoidSystemGHCName = "system-ghc" - -configMonoidInstallGHCName :: Text -configMonoidInstallGHCName = "install-ghc" - -configMonoidSkipGHCCheckName :: Text -configMonoidSkipGHCCheckName = "skip-ghc-check" - -configMonoidSkipMsysName :: Text -configMonoidSkipMsysName = "skip-msys" - -configMonoidRequireStackVersionName :: Text -configMonoidRequireStackVersionName = "require-stack-version" - -configMonoidArchName :: Text -configMonoidArchName = "arch" - -configMonoidGHCVariantName :: Text -configMonoidGHCVariantName = "ghc-variant" - -configMonoidGHCBuildName :: Text -configMonoidGHCBuildName = "ghc-build" - -configMonoidJobsName :: Text -configMonoidJobsName = "jobs" - -configMonoidExtraIncludeDirsName :: Text -configMonoidExtraIncludeDirsName = "extra-include-dirs" - -configMonoidExtraLibDirsName :: Text -configMonoidExtraLibDirsName = "extra-lib-dirs" - -configMonoidOverrideGccPathName :: Text -configMonoidOverrideGccPathName = "with-gcc" - -configMonoidOverrideHpackName :: Text -configMonoidOverrideHpackName = "with-hpack" - -configMonoidConcurrentTestsName :: Text -configMonoidConcurrentTestsName = "concurrent-tests" - -configMonoidLocalBinPathName :: Text -configMonoidLocalBinPathName = "local-bin-path" - -configMonoidScmInitName :: Text -configMonoidScmInitName = "scm-init" - -configMonoidTemplateParametersName :: Text -configMonoidTemplateParametersName = "params" - -configMonoidCompilerCheckName :: Text -configMonoidCompilerCheckName = "compiler-check" - -configMonoidCompilerRepositoryName :: Text -configMonoidCompilerRepositoryName = "compiler-repository" - -configMonoidGhcOptionsName :: Text -configMonoidGhcOptionsName = "ghc-options" - -configMonoidExtraPathName :: Text -configMonoidExtraPathName = "extra-path" - -configMonoidSetupInfoLocationsName :: Text -configMonoidSetupInfoLocationsName = "setup-info-locations" - -configMonoidSetupInfoInlineName :: Text -configMonoidSetupInfoInlineName = "setup-info" - -configMonoidLocalProgramsBaseName :: Text -configMonoidLocalProgramsBaseName = "local-programs-path" - -configMonoidPvpBoundsName :: Text -configMonoidPvpBoundsName = "pvp-bounds" - -configMonoidModifyCodePageName :: Text -configMonoidModifyCodePageName = "modify-code-page" - -configMonoidRebuildGhcOptionsName :: Text -configMonoidRebuildGhcOptionsName = "rebuild-ghc-options" - -configMonoidApplyGhcOptionsName :: Text -configMonoidApplyGhcOptionsName = "apply-ghc-options" - -configMonoidAllowNewerName :: Text -configMonoidAllowNewerName = "allow-newer" - -configMonoidDefaultTemplateName :: Text -configMonoidDefaultTemplateName = "default-template" - -configMonoidAllowDifferentUserName :: Text -configMonoidAllowDifferentUserName = "allow-different-user" - -configMonoidDumpLogsName :: Text -configMonoidDumpLogsName = "dump-logs" - -configMonoidSaveHackageCredsName :: Text -configMonoidSaveHackageCredsName = "save-hackage-creds" - -configMonoidHackageBaseUrlName :: Text -configMonoidHackageBaseUrlName = "hackage-base-url" - -configMonoidColorWhenUSName :: Text -configMonoidColorWhenUSName = "color" - -configMonoidColorWhenGBName :: Text -configMonoidColorWhenGBName = "colour" - -configMonoidStylesUSName :: Text -configMonoidStylesUSName = "stack-colors" - -configMonoidStylesGBName :: Text -configMonoidStylesGBName = "stack-colours" - -configMonoidHideSourcePathsName :: Text -configMonoidHideSourcePathsName = "hide-source-paths" - -configMonoidRecommendUpgradeName :: Text -configMonoidRecommendUpgradeName = "recommend-stack-upgrade" - -configMonoidCasaRepoPrefixName :: Text -configMonoidCasaRepoPrefixName = "casa-repo-prefix" - -configMonoidSnapshotLocationName :: Text -configMonoidSnapshotLocationName = "snapshot-location-base" - -configMonoidStackDeveloperModeName :: Text -configMonoidStackDeveloperModeName = "stack-developer-mode" - -data ConfigException - = ParseConfigFileException (Path Abs File) ParseException - | ParseCustomSnapshotException Text ParseException - | NoProjectConfigFound (Path Abs Dir) (Maybe Text) - | UnexpectedArchiveContents [Path Abs Dir] [Path Abs File] - | UnableToExtractArchive Text (Path Abs File) - | BadStackVersionException VersionRange - | NoMatchingSnapshot (NonEmpty SnapName) - | ResolverMismatch !RawSnapshotLocation String - | ResolverPartial !RawSnapshotLocation String - | NoSuchDirectory FilePath - | ParseGHCVariantException String - | BadStackRoot (Path Abs Dir) - | Won'tCreateStackRootInDirectoryOwnedByDifferentUser (Path Abs Dir) (Path Abs Dir) -- ^ @$STACK_ROOT@, parent dir - | UserDoesn'tOwnDirectory (Path Abs Dir) - | ManualGHCVariantSettingsAreIncompatibleWithSystemGHC - | NixRequiresSystemGhc - | NoResolverWhenUsingNoProject - | DuplicateLocalPackageNames ![(PackageName, [PackageLocation])] - deriving Typeable -instance Show ConfigException where - show (ParseConfigFileException configFile exception) = concat - [ "Could not parse '" - , toFilePath configFile - , "':\n" - , Yaml.prettyPrintParseException exception - , "\nSee http://docs.haskellstack.org/en/stable/yaml_configuration/" - ] - show (ParseCustomSnapshotException url exception) = concat - [ "Could not parse '" - , T.unpack url - , "':\n" - , Yaml.prettyPrintParseException exception - , "\nSee https://docs.haskellstack.org/en/stable/custom_snapshot/" - ] - show (NoProjectConfigFound dir mcmd) = concat - [ "Unable to find a stack.yaml file in the current directory (" - , toFilePath dir - , ") or its ancestors" - , case mcmd of - Nothing -> "" - Just cmd -> "\nRecommended action: stack " ++ T.unpack cmd - ] - show (UnexpectedArchiveContents dirs files) = concat - [ "When unpacking an archive specified in your stack.yaml file, " - , "did not find expected contents. Expected: a single directory. Found: " - , show ( map (toFilePath . dirname) dirs - , map (toFilePath . filename) files - ) - ] - show (UnableToExtractArchive url file) = concat - [ "Archive extraction failed. Tarballs and zip archives are supported, couldn't handle the following URL, " - , T.unpack url, " downloaded to the file ", toFilePath $ filename file - ] - show (BadStackVersionException requiredRange) = concat - [ "The version of stack you are using (" - , show (mkVersion' Meta.version) - , ") is outside the required\n" - ,"version range specified in stack.yaml (" - , T.unpack (versionRangeText requiredRange) - , ")." ] - show (NoMatchingSnapshot names) = concat - [ "None of the following snapshots provides a compiler matching " - , "your package(s):\n" - , unlines $ map (\name -> " - " <> show name) - (NonEmpty.toList names) - , resolveOptions - ] - show (ResolverMismatch resolver errDesc) = concat - [ "Resolver '" - , T.unpack $ utf8BuilderToText $ display resolver - , "' does not have a matching compiler to build some or all of your " - , "package(s).\n" - , errDesc - , resolveOptions - ] - show (ResolverPartial resolver errDesc) = concat - [ "Resolver '" - , T.unpack $ utf8BuilderToText $ display resolver - , "' does not have all the packages to match your requirements.\n" - , unlines $ fmap (" " <>) (lines errDesc) - , resolveOptions - ] - show (NoSuchDirectory dir) = - "No directory could be located matching the supplied path: " ++ dir - show (ParseGHCVariantException v) = - "Invalid ghc-variant value: " ++ v - show (BadStackRoot stackRoot) = concat - [ "Invalid stack root: '" - , toFilePath stackRoot - , "'. Please provide a valid absolute path." - ] - show (Won'tCreateStackRootInDirectoryOwnedByDifferentUser envStackRoot parentDir) = concat - [ "Preventing creation of stack root '" - , toFilePath envStackRoot - , "'. Parent directory '" - , toFilePath parentDir - , "' is owned by someone else." - ] - show (UserDoesn'tOwnDirectory dir) = concat - [ "You are not the owner of '" - , toFilePath dir - , "'. Aborting to protect file permissions." - , "\nRetry with '--" - , T.unpack configMonoidAllowDifferentUserName - , "' to disable this precaution." - ] - show ManualGHCVariantSettingsAreIncompatibleWithSystemGHC = T.unpack $ T.concat - [ "stack can only control the " - , configMonoidGHCVariantName - , " of its own GHC installations. Please use '--no-" - , configMonoidSystemGHCName - , "'." - ] - show NixRequiresSystemGhc = T.unpack $ T.concat - [ "stack's Nix integration is incompatible with '--no-system-ghc'. " - , "Please use '--" - , configMonoidSystemGHCName - , "' or disable the Nix integration." - ] - show NoResolverWhenUsingNoProject = "When using the script command, you must provide a resolver argument" - show (DuplicateLocalPackageNames pairs) = concat - $ "The same package name is used in multiple local packages\n" - : map go pairs - where - go (name, dirs) = unlines - $ "" - : (packageNameString name ++ " used in:") - : map goLoc dirs - goLoc loc = "- " ++ show loc -instance Exception ConfigException - -resolveOptions :: String -resolveOptions = - unlines [ "\nThis may be resolved by:" - , " - Using '--omit-packages' to exclude mismatching package(s)." - , " - Using '--resolver' to specify a matching snapshot/resolver" - ] - -- | Get the URL to request the information on the latest snapshots askLatestSnapshotUrl :: (MonadReader env m, HasConfig env) => m Text -askLatestSnapshotUrl = view $ configL.to configLatestSnapshot - --- | @".stack-work"@ -workDirL :: HasConfig env => Lens' env (Path Rel Dir) -workDirL = configL.lens configWorkDir (\x y -> x { configWorkDir = y }) - --- | Per-project work dir -getProjectWorkDir :: (HasBuildConfig env, MonadReader env m) => m (Path Abs Dir) -getProjectWorkDir = do - root <- view projectRootL - workDir <- view workDirL - return (root workDir) - --- | Relative directory for the platform identifier -platformOnlyRelDir - :: (MonadReader env m, HasPlatform env, MonadThrow m) - => m (Path Rel Dir) -platformOnlyRelDir = do - platform <- view platformL - platformVariant <- view platformVariantL - parseRelDir (Distribution.Text.display platform ++ platformVariantSuffix platformVariant) - --- | Directory containing snapshots -snapshotsDir :: (MonadReader env m, HasEnvConfig env, MonadThrow m) => m (Path Abs Dir) -snapshotsDir = do - root <- view stackRootL - platform <- platformGhcRelDir - return $ root relDirSnapshots platform - --- | Installation root for dependencies -installationRootDeps :: (HasEnvConfig env) => RIO env (Path Abs Dir) -installationRootDeps = do - root <- view stackRootL - -- TODO: also useShaPathOnWindows here, once #1173 is resolved. - psc <- platformSnapAndCompilerRel - return $ root relDirSnapshots psc - --- | Installation root for locals -installationRootLocal :: (HasEnvConfig env) => RIO env (Path Abs Dir) -installationRootLocal = do - workDir <- getProjectWorkDir - psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel - return $ workDir relDirInstall psc - --- | Installation root for compiler tools -bindirCompilerTools :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir) -bindirCompilerTools = do - config <- view configL - platform <- platformGhcRelDir - compilerVersion <- view actualCompilerVersionL - compiler <- parseRelDir $ compilerVersionString compilerVersion - return $ - view stackRootL config - relDirCompilerTools - platform - compiler - bindirSuffix - --- | Hoogle directory. -hoogleRoot :: (HasEnvConfig env) => RIO env (Path Abs Dir) -hoogleRoot = do - workDir <- getProjectWorkDir - psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel - return $ workDir relDirHoogle psc - --- | Get the hoogle database path. -hoogleDatabasePath :: (HasEnvConfig env) => RIO env (Path Abs File) -hoogleDatabasePath = do - dir <- hoogleRoot - return (dir relFileDatabaseHoo) - --- | Path for platform followed by snapshot name followed by compiler --- name. -platformSnapAndCompilerRel - :: (HasEnvConfig env) - => RIO env (Path Rel Dir) -platformSnapAndCompilerRel = do - platform <- platformGhcRelDir - smh <- view $ envConfigL.to envConfigSourceMapHash - name <- smRelDir smh - ghc <- compilerVersionDir - useShaPathOnWindows (platform name ghc) - --- | Relative directory for the platform and GHC identifier -platformGhcRelDir - :: (MonadReader env m, HasEnvConfig env, MonadThrow m) - => m (Path Rel Dir) -platformGhcRelDir = do - cp <- view compilerPathsL - let cbSuffix = compilerBuildSuffix $ cpBuild cp - verOnly <- platformGhcVerOnlyRelDirStr - parseRelDir (mconcat [ verOnly, cbSuffix ]) - --- | Relative directory for the platform and GHC identifier without GHC bindist build -platformGhcVerOnlyRelDir - :: (MonadReader env m, HasPlatform env, HasGHCVariant env, MonadThrow m) - => m (Path Rel Dir) -platformGhcVerOnlyRelDir = - parseRelDir =<< platformGhcVerOnlyRelDirStr - --- | Relative directory for the platform and GHC identifier without GHC bindist build --- (before parsing into a Path) -platformGhcVerOnlyRelDirStr - :: (MonadReader env m, HasPlatform env, HasGHCVariant env) - => m FilePath -platformGhcVerOnlyRelDirStr = do - platform <- view platformL - platformVariant <- view platformVariantL - ghcVariant <- view ghcVariantL - return $ mconcat [ Distribution.Text.display platform - , platformVariantSuffix platformVariant - , ghcVariantSuffix ghcVariant ] - --- | This is an attempt to shorten stack paths on Windows to decrease our --- chances of hitting 260 symbol path limit. The idea is to calculate --- SHA1 hash of the path used on other architectures, encode with base --- 16 and take first 8 symbols of it. -useShaPathOnWindows :: MonadThrow m => Path Rel Dir -> m (Path Rel Dir) -useShaPathOnWindows - | osIsWindows = shaPath - | otherwise = pure - -shaPath :: (IsPath Rel t, MonadThrow m) => Path Rel t -> m (Path Rel t) -shaPath = shaPathForBytes . encodeUtf8 . T.pack . toFilePath - -shaPathForBytes :: (IsPath Rel t, MonadThrow m) => ByteString -> m (Path Rel t) -shaPathForBytes - = parsePath . S8.unpack . S8.take 8 - . Mem.convertToBase Mem.Base16 . hashWith SHA1 - --- TODO: Move something like this into the path package. Consider --- subsuming path-io's 'AnyPath'? -class IsPath b t where - parsePath :: MonadThrow m => FilePath -> m (Path b t) - -instance IsPath Abs Dir where parsePath = parseAbsDir -instance IsPath Rel Dir where parsePath = parseRelDir -instance IsPath Abs File where parsePath = parseAbsFile -instance IsPath Rel File where parsePath = parseRelFile - -compilerVersionDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Rel Dir) -compilerVersionDir = do - compilerVersion <- view actualCompilerVersionL - parseRelDir $ case compilerVersion of - ACGhc version -> versionString version - ACGhcGit {} -> compilerVersionString compilerVersion - --- | Package database for installing dependencies into -packageDatabaseDeps :: (HasEnvConfig env) => RIO env (Path Abs Dir) -packageDatabaseDeps = do - root <- installationRootDeps - return $ root relDirPkgdb - --- | Package database for installing local packages into -packageDatabaseLocal :: (HasEnvConfig env) => RIO env (Path Abs Dir) -packageDatabaseLocal = do - root <- installationRootLocal - return $ root relDirPkgdb - --- | Extra package databases -packageDatabaseExtra :: (MonadReader env m, HasEnvConfig env) => m [Path Abs Dir] -packageDatabaseExtra = view $ buildConfigL.to bcExtraPackageDBs - --- | Where do we get information on global packages for loading up a --- 'LoadedSnapshot'? -data GlobalInfoSource - = GISSnapshotHints - -- ^ Accept the hints in the snapshot definition - | GISCompiler ActualCompiler - -- ^ Look up the actual information in the installed compiler - --- | Where HPC reports and tix files get stored. -hpcReportDir :: (HasEnvConfig env) - => RIO env (Path Abs Dir) -hpcReportDir = do - root <- installationRootLocal - return $ root relDirHpc - --- | Get the extra bin directories (for the PATH). Puts more local first --- --- Bool indicates whether or not to include the locals -extraBinDirs :: (HasEnvConfig env) - => RIO env (Bool -> [Path Abs Dir]) -extraBinDirs = do - deps <- installationRootDeps - local' <- installationRootLocal - tools <- bindirCompilerTools - return $ \locals -> if locals - then [local' bindirSuffix, deps bindirSuffix, tools] - else [deps bindirSuffix, tools] - -minimalEnvSettings :: EnvSettings -minimalEnvSettings = - EnvSettings - { esIncludeLocals = False - , esIncludeGhcPackagePath = False - , esStackExe = False - , esLocaleUtf8 = False - , esKeepGhcRts = False - } - --- | Default @EnvSettings@ which includes locals and GHC_PACKAGE_PATH. --- --- Note that this also passes through the GHCRTS environment variable. --- See https://github.com/commercialhaskell/stack/issues/3444 -defaultEnvSettings :: EnvSettings -defaultEnvSettings = EnvSettings - { esIncludeLocals = True - , esIncludeGhcPackagePath = True - , esStackExe = True - , esLocaleUtf8 = False - , esKeepGhcRts = True - } - --- | Environment settings which do not embellish the environment --- --- Note that this also passes through the GHCRTS environment variable. --- See https://github.com/commercialhaskell/stack/issues/3444 -plainEnvSettings :: EnvSettings -plainEnvSettings = EnvSettings - { esIncludeLocals = False - , esIncludeGhcPackagePath = False - , esStackExe = False - , esLocaleUtf8 = False - , esKeepGhcRts = True - } - --- | Get the path for the given compiler ignoring any local binaries. --- --- https://github.com/commercialhaskell/stack/issues/1052 -getCompilerPath :: HasCompiler env => RIO env (Path Abs File) -getCompilerPath = view $ compilerPathsL.to cpCompiler - -data ProjectAndConfigMonoid - = ProjectAndConfigMonoid !Project !ConfigMonoid - -parseProjectAndConfigMonoid :: Path Abs Dir -> Value -> Yaml.Parser (WithJSONWarnings (IO ProjectAndConfigMonoid)) -parseProjectAndConfigMonoid rootDir = - withObjectWarnings "ProjectAndConfigMonoid" $ \o -> do - packages <- o ..:? "packages" ..!= [RelFilePath "."] - deps <- jsonSubWarningsTT (o ..:? "extra-deps") ..!= [] - flags' <- o ..:? "flags" ..!= mempty - let flags = unCabalStringMap <$> unCabalStringMap - (flags' :: Map (CabalString PackageName) (Map (CabalString FlagName) Bool)) - - resolver <- jsonSubWarnings $ o ...: ["snapshot", "resolver"] - mcompiler <- o ..:? "compiler" - msg <- o ..:? "user-message" - config <- parseConfigMonoidObject rootDir o - extraPackageDBs <- o ..:? "extra-package-dbs" ..!= [] - mcurator <- jsonSubWarningsT (o ..:? "curator") - drops <- o ..:? "drop-packages" ..!= mempty - return $ do - deps' <- mapM (resolvePaths (Just rootDir)) deps - resolver' <- resolvePaths (Just rootDir) resolver - let project = Project - { projectUserMsg = msg - , projectResolver = resolver' - , projectCompiler = mcompiler -- FIXME make sure resolver' isn't SLCompiler - , projectExtraPackageDBs = extraPackageDBs - , projectPackages = packages - , projectDependencies = concatMap toList (deps' :: [NonEmpty RawPackageLocation]) - , projectFlags = flags - , projectCurator = mcurator - , projectDropPackages = Set.map unCabalString drops - } - pure $ ProjectAndConfigMonoid project config - --- | A software control system. -data SCM = Git - deriving (Show) - -instance FromJSON SCM where - parseJSON v = do - s <- parseJSON v - case s of - "git" -> return Git - _ -> fail ("Unknown or unsupported SCM: " <> s) - -instance ToJSON SCM where - toJSON Git = toJSON ("git" :: Text) - --- | A variant of the platform, used to differentiate Docker builds from host -data PlatformVariant = PlatformVariantNone - | PlatformVariant String - --- | Render a platform variant to a String suffix. -platformVariantSuffix :: PlatformVariant -> String -platformVariantSuffix PlatformVariantNone = "" -platformVariantSuffix (PlatformVariant v) = "-" ++ v - --- | Specialized bariant of GHC (e.g. libgmp4 or integer-simple) -data GHCVariant - = GHCStandard -- ^ Standard bindist - | GHCIntegerSimple -- ^ Bindist that uses integer-simple - | GHCCustom String -- ^ Other bindists - deriving (Show) - -instance FromJSON GHCVariant where - -- Strange structuring is to give consistent error messages - parseJSON = - withText - "GHCVariant" - (either (fail . show) return . parseGHCVariant . T.unpack) - --- | Render a GHC variant to a String. -ghcVariantName :: GHCVariant -> String -ghcVariantName GHCStandard = "standard" -ghcVariantName GHCIntegerSimple = "integersimple" -ghcVariantName (GHCCustom name) = "custom-" ++ name +askLatestSnapshotUrl = view $ configL . to (.latestSnapshot) --- | Render a GHC variant to a String suffix. -ghcVariantSuffix :: GHCVariant -> String -ghcVariantSuffix GHCStandard = "" -ghcVariantSuffix v = "-" ++ ghcVariantName v +-- | Get the URL to request the information on the recently-published snapshots +askRecentSnapshotsUrl :: (MonadReader env m, HasConfig env) => m Text +askRecentSnapshotsUrl = view $ configL . to (.recentSnapshots) --- | Parse GHC variant from a String. -parseGHCVariant :: (MonadThrow m) => String -> m GHCVariant -parseGHCVariant s = - case stripPrefix "custom-" s of - Just name -> return (GHCCustom name) - Nothing - | s == "" -> return GHCStandard - | s == "standard" -> return GHCStandard - | s == "integersimple" -> return GHCIntegerSimple - | otherwise -> return (GHCCustom s) +-- | @STACK_ROOT\/hooks\/@ +hooksDir :: HasConfig env => RIO env (Path Abs Dir) +hooksDir = do + sr <- view $ configL . to (.stackRoot) + pure (sr [reldir|hooks|]) --- | Build of the compiler distribution (e.g. standard, gmp4, tinfo6) --- | Information for a file to download. -data DownloadInfo = DownloadInfo - { downloadInfoUrl :: Text - -- ^ URL or absolute file path - , downloadInfoContentLength :: Maybe Int - , downloadInfoSha1 :: Maybe ByteString - , downloadInfoSha256 :: Maybe ByteString - } deriving (Show) - -instance FromJSON (WithJSONWarnings DownloadInfo) where - parseJSON = withObjectWarnings "DownloadInfo" parseDownloadInfoFromObject - --- | Parse JSON in existing object for 'DownloadInfo' -parseDownloadInfoFromObject :: Object -> WarningParser DownloadInfo -parseDownloadInfoFromObject o = do - url <- o ..: "url" - contentLength <- o ..:? "content-length" - sha1TextMay <- o ..:? "sha1" - sha256TextMay <- o ..:? "sha256" - return - DownloadInfo - { downloadInfoUrl = url - , downloadInfoContentLength = contentLength - , downloadInfoSha1 = fmap encodeUtf8 sha1TextMay - , downloadInfoSha256 = fmap encodeUtf8 sha256TextMay - } - -data VersionedDownloadInfo = VersionedDownloadInfo - { vdiVersion :: Version - , vdiDownloadInfo :: DownloadInfo - } - deriving Show - -instance FromJSON (WithJSONWarnings VersionedDownloadInfo) where - parseJSON = withObjectWarnings "VersionedDownloadInfo" $ \o -> do - CabalString version <- o ..: "version" - downloadInfo <- parseDownloadInfoFromObject o - return VersionedDownloadInfo - { vdiVersion = version - , vdiDownloadInfo = downloadInfo - } - -data GHCDownloadInfo = GHCDownloadInfo - { gdiConfigureOpts :: [Text] - , gdiConfigureEnv :: Map Text Text - , gdiDownloadInfo :: DownloadInfo - } - deriving Show - -instance FromJSON (WithJSONWarnings GHCDownloadInfo) where - parseJSON = withObjectWarnings "GHCDownloadInfo" $ \o -> do - configureOpts <- o ..:? "configure-opts" ..!= mempty - configureEnv <- o ..:? "configure-env" ..!= mempty - downloadInfo <- parseDownloadInfoFromObject o - return GHCDownloadInfo - { gdiConfigureOpts = configureOpts - , gdiConfigureEnv = configureEnv - , gdiDownloadInfo = downloadInfo - } - -data SetupInfo = SetupInfo - { siSevenzExe :: Maybe DownloadInfo - , siSevenzDll :: Maybe DownloadInfo - , siMsys2 :: Map Text VersionedDownloadInfo - , siGHCs :: Map Text (Map Version GHCDownloadInfo) - , siStack :: Map Text (Map Version DownloadInfo) - } - deriving Show - -instance FromJSON (WithJSONWarnings SetupInfo) where - parseJSON = withObjectWarnings "SetupInfo" $ \o -> do - siSevenzExe <- jsonSubWarningsT (o ..:? "sevenzexe-info") - siSevenzDll <- jsonSubWarningsT (o ..:? "sevenzdll-info") - siMsys2 <- jsonSubWarningsT (o ..:? "msys2" ..!= mempty) - (fmap unCabalStringMap -> siGHCs) <- jsonSubWarningsTT (o ..:? "ghc" ..!= mempty) - (fmap unCabalStringMap -> siStack) <- jsonSubWarningsTT (o ..:? "stack" ..!= mempty) - return SetupInfo {..} - --- | For the @siGHCs@ field maps are deeply merged. --- For all fields the values from the first @SetupInfo@ win. -instance Semigroup SetupInfo where - l <> r = - SetupInfo - { siSevenzExe = siSevenzExe l <|> siSevenzExe r - , siSevenzDll = siSevenzDll l <|> siSevenzDll r - , siMsys2 = siMsys2 l <> siMsys2 r - , siGHCs = Map.unionWith (<>) (siGHCs l) (siGHCs r) - , siStack = Map.unionWith (<>) (siStack l) (siStack r) } - -instance Monoid SetupInfo where - mempty = - SetupInfo - { siSevenzExe = Nothing - , siSevenzDll = Nothing - , siMsys2 = Map.empty - , siGHCs = Map.empty - , siStack = Map.empty - } - mappend = (<>) - --- | How PVP bounds should be added to .cabal files -data PvpBoundsType - = PvpBoundsNone - | PvpBoundsUpper - | PvpBoundsLower - | PvpBoundsBoth - deriving (Show, Read, Eq, Typeable, Ord, Enum, Bounded) - -data PvpBounds = PvpBounds - { pbType :: !PvpBoundsType - , pbAsRevision :: !Bool - } - deriving (Show, Read, Eq, Typeable, Ord) - -pvpBoundsText :: PvpBoundsType -> Text -pvpBoundsText PvpBoundsNone = "none" -pvpBoundsText PvpBoundsUpper = "upper" -pvpBoundsText PvpBoundsLower = "lower" -pvpBoundsText PvpBoundsBoth = "both" - -parsePvpBounds :: Text -> Either String PvpBounds -parsePvpBounds t = maybe err Right $ do - (t', asRevision) <- - case T.break (== '-') t of - (x, "") -> Just (x, False) - (x, "-revision") -> Just (x, True) - _ -> Nothing - x <- Map.lookup t' m - Just PvpBounds - { pbType = x - , pbAsRevision = asRevision - } - where - m = Map.fromList $ map (pvpBoundsText &&& id) [minBound..maxBound] - err = Left $ "Invalid PVP bounds: " ++ T.unpack t - -instance ToJSON PvpBounds where - toJSON (PvpBounds typ asRevision) = - toJSON (pvpBoundsText typ <> (if asRevision then "-revision" else "")) -instance FromJSON PvpBounds where - parseJSON = withText "PvpBounds" (either fail return . parsePvpBounds) - --- | Data passed into Docker container for the Docker entrypoint's use -newtype DockerEntrypoint = DockerEntrypoint - { deUser :: Maybe DockerUser - -- ^ UID/GID/etc of host user, if we wish to perform UID/GID switch in container - } deriving (Read,Show) - --- | Docker host user info -data DockerUser = DockerUser - { duUid :: UserID -- ^ uid - , duGid :: GroupID -- ^ gid - , duGroups :: [GroupID] -- ^ Supplemantal groups - , duUmask :: FileMode -- ^ File creation mask } - } deriving (Read,Show) - -data GhcOptionKey - = GOKOldEverything - | GOKEverything - | GOKLocals - | GOKTargets - | GOKPackage !PackageName - deriving (Eq, Ord) - -instance FromJSONKey GhcOptionKey where - fromJSONKey = FromJSONKeyTextParser $ \t -> - case t of - "*" -> return GOKOldEverything - "$everything" -> return GOKEverything - "$locals" -> return GOKLocals - "$targets" -> return GOKTargets - _ -> - case parsePackageName $ T.unpack t of - Nothing -> fail $ "Invalid package name: " ++ show t - Just x -> return $ GOKPackage x - fromJSONKeyList = FromJSONKeyTextParser $ \_ -> fail "GhcOptionKey.fromJSONKeyList" - -newtype GhcOptions = GhcOptions { unGhcOptions :: [Text] } - -instance FromJSON GhcOptions where - parseJSON = withText "GhcOptions" $ \t -> - case parseArgs Escaping t of - Left e -> fail e - Right opts -> return $ GhcOptions $ map T.pack opts +-- | @STACK_ROOT\/hooks\/ghc-install.sh@ +ghcInstallHook :: HasConfig env => RIO env (Path Abs File) +ghcInstallHook = do + hd <- hooksDir + pure (hd [relfile|ghc-install.sh|]) ----------------------------------- -- Lens classes ----------------------------------- --- | Class for environment values which have a Platform -class HasPlatform env where - platformL :: Lens' env Platform - default platformL :: HasConfig env => Lens' env Platform - platformL = configL.platformL - {-# INLINE platformL #-} - platformVariantL :: Lens' env PlatformVariant - default platformVariantL :: HasConfig env => Lens' env PlatformVariant - platformVariantL = configL.platformVariantL - {-# INLINE platformVariantL #-} - --- | Class for environment values which have a GHCVariant -class HasGHCVariant env where - ghcVariantL :: SimpleGetter env GHCVariant - default ghcVariantL :: HasConfig env => SimpleGetter env GHCVariant - ghcVariantL = configL.ghcVariantL - {-# INLINE ghcVariantL #-} - --- | Class for environment values which have a 'Runner'. -class (HasProcessContext env, HasLogFunc env) => HasRunner env where - runnerL :: Lens' env Runner -instance HasLogFunc Runner where - logFuncL = lens runnerLogFunc (\x y -> x { runnerLogFunc = y }) -instance HasProcessContext Runner where - processContextL = lens runnerProcessContext (\x y -> x { runnerProcessContext = y }) -instance HasRunner Runner where - runnerL = id -instance HasStylesUpdate Runner where - stylesUpdateL = globalOptsL. - lens globalStylesUpdate (\x y -> x { globalStylesUpdate = y }) -instance HasTerm Runner where - useColorL = lens runnerUseColor (\x y -> x { runnerUseColor = y }) - termWidthL = lens runnerTermWidth (\x y -> x { runnerTermWidth = y }) - -globalOptsL :: HasRunner env => Lens' env GlobalOpts -globalOptsL = runnerL.lens runnerGlobalOpts (\x y -> x { runnerGlobalOpts = y }) - --- | Class for environment values that can provide a 'Config'. -class (HasPlatform env, HasGHCVariant env, HasProcessContext env, HasPantryConfig env, HasTerm env, HasRunner env) => HasConfig env where - configL :: Lens' env Config - default configL :: HasBuildConfig env => Lens' env Config - configL = buildConfigL.lens bcConfig (\x y -> x { bcConfig = y }) - {-# INLINE configL #-} - -class HasConfig env => HasBuildConfig env where - buildConfigL :: Lens' env BuildConfig - default buildConfigL :: HasEnvConfig env => Lens' env BuildConfig - buildConfigL = envConfigL.lens - envConfigBuildConfig - (\x y -> x { envConfigBuildConfig = y }) - -class (HasBuildConfig env, HasSourceMap env, HasCompiler env) => HasEnvConfig env where - envConfigL :: Lens' env EnvConfig +-- | Class for environment values that can provide a t'Config'. +class ( HasPlatform env + , HasGHCVariant env + , HasProcessContext env + , HasPantryConfig env + , HasTerm env + , HasRunner env + ) => HasConfig env where + configL :: Lens' env Config ----------------------------------- -- Lens instances ----------------------------------- -instance HasPlatform (Platform,PlatformVariant) where - platformL = _1 - platformVariantL = _2 instance HasPlatform Config where - platformL = lens configPlatform (\x y -> x { configPlatform = y }) - platformVariantL = lens configPlatformVariant (\x y -> x { configPlatformVariant = y }) -instance HasPlatform BuildConfig -instance HasPlatform EnvConfig + platformL = lens (.platform) (\x y -> x { platform = y }) + platformVariantL = + lens (.platformVariant) (\x y -> x { platformVariant = y }) -instance HasGHCVariant GHCVariant where - ghcVariantL = id - {-# INLINE ghcVariantL #-} instance HasGHCVariant Config where - ghcVariantL = to $ fromMaybe GHCStandard . configGHCVariant -instance HasGHCVariant BuildConfig -instance HasGHCVariant EnvConfig + ghcVariantL = to $ fromMaybe GHCStandard . (.ghcVariant) instance HasProcessContext Config where - processContextL = runnerL.processContextL -instance HasProcessContext BuildConfig where - processContextL = configL.processContextL -instance HasProcessContext EnvConfig where - processContextL = configL.processContextL + processContextL = runnerL . processContextL instance HasPantryConfig Config where - pantryConfigL = lens configPantryConfig (\x y -> x { configPantryConfig = y }) -instance HasPantryConfig BuildConfig where - pantryConfigL = configL.pantryConfigL -instance HasPantryConfig EnvConfig where - pantryConfigL = configL.pantryConfigL + pantryConfigL = lens + (.pantryConfig) + (\x y -> x { pantryConfig = y }) instance HasConfig Config where - configL = id - {-# INLINE configL #-} -instance HasConfig BuildConfig where - configL = lens bcConfig (\x y -> x { bcConfig = y }) -instance HasConfig EnvConfig - -instance HasBuildConfig BuildConfig where - buildConfigL = id - {-# INLINE buildConfigL #-} -instance HasBuildConfig EnvConfig - -instance HasCompiler EnvConfig where - compilerPathsL = to envConfigCompilerPaths -instance HasEnvConfig EnvConfig where - envConfigL = id - {-# INLINE envConfigL #-} + configL = id + {-# INLINE configL #-} instance HasRunner Config where - runnerL = lens configRunner (\x y -> x { configRunner = y }) -instance HasRunner BuildConfig where - runnerL = configL.runnerL -instance HasRunner EnvConfig where - runnerL = configL.runnerL + runnerL = lens (.runner) (\x y -> x { runner = y }) instance HasLogFunc Config where - logFuncL = runnerL.logFuncL -instance HasLogFunc BuildConfig where - logFuncL = runnerL.logFuncL -instance HasLogFunc EnvConfig where - logFuncL = runnerL.logFuncL + logFuncL = runnerL . logFuncL instance HasStylesUpdate Config where - stylesUpdateL = runnerL.stylesUpdateL -instance HasStylesUpdate BuildConfig where - stylesUpdateL = runnerL.stylesUpdateL -instance HasStylesUpdate EnvConfig where - stylesUpdateL = runnerL.stylesUpdateL + stylesUpdateL = runnerL . stylesUpdateL instance HasTerm Config where - useColorL = runnerL.useColorL - termWidthL = runnerL.termWidthL -instance HasTerm BuildConfig where - useColorL = runnerL.useColorL - termWidthL = runnerL.termWidthL -instance HasTerm EnvConfig where - useColorL = runnerL.useColorL - termWidthL = runnerL.termWidthL + useColorL = runnerL . useColorL + termWidthL = runnerL . termWidthL ----------------------------------- -- Helper lenses ----------------------------------- stackRootL :: HasConfig s => Lens' s (Path Abs Dir) -stackRootL = configL.lens configStackRoot (\x y -> x { configStackRoot = y }) - --- | The compiler specified by the @SnapshotDef@. This may be --- different from the actual compiler used! -wantedCompilerVersionL :: HasBuildConfig s => Getting r s WantedCompiler -wantedCompilerVersionL = buildConfigL.to (smwCompiler . bcSMWanted) - --- | Location of the ghc-pkg executable -newtype GhcPkgExe = GhcPkgExe (Path Abs File) - deriving Show - --- | Get the 'GhcPkgExe' from a 'HasCompiler' environment -getGhcPkgExe :: HasCompiler env => RIO env GhcPkgExe -getGhcPkgExe = view $ compilerPathsL.to cpPkg - --- | Dump information for a single package -data DumpPackage = DumpPackage - { dpGhcPkgId :: !GhcPkgId - , dpPackageIdent :: !PackageIdentifier - , dpParentLibIdent :: !(Maybe PackageIdentifier) - , dpLicense :: !(Maybe C.License) - , dpLibDirs :: ![FilePath] - , dpLibraries :: ![Text] - , dpHasExposedModules :: !Bool - , dpExposedModules :: !(Set ModuleName) - , dpDepends :: ![GhcPkgId] - , dpHaddockInterfaces :: ![FilePath] - , dpHaddockHtml :: !(Maybe FilePath) - , dpIsExposed :: !Bool - } - deriving (Show, Read, Eq) +stackRootL = + configL . lens (.stackRoot) (\x y -> x { stackRoot = y }) --- | Paths on the filesystem for the compiler we're using -data CompilerPaths = CompilerPaths - { cpCompilerVersion :: !ActualCompiler - , cpArch :: !Arch - , cpBuild :: !CompilerBuild - , cpCompiler :: !(Path Abs File) - -- | ghc-pkg or equivalent - , cpPkg :: !GhcPkgExe - -- | runghc - , cpInterpreter :: !(Path Abs File) - -- | haddock, in 'IO' to allow deferring the lookup - , cpHaddock :: !(Path Abs File) - -- | Is this a Stack-sandboxed installation? - , cpSandboxed :: !Bool - , cpCabalVersion :: !Version - -- ^ This is the version of Cabal that stack will use to compile Setup.hs files - -- in the build process. - -- - -- Note that this is not necessarily the same version as the one that stack - -- depends on as a library and which is displayed when running - -- @stack ls dependencies | grep Cabal@ in the stack project. - , cpGlobalDB :: !(Path Abs Dir) - -- ^ Global package database - , cpGhcInfo :: !ByteString - -- ^ Output of @ghc --info@ - , cpGlobalDump :: !(Map PackageName DumpPackage) - } - deriving Show - -cpWhich :: (MonadReader env m, HasCompiler env) => m WhichCompiler -cpWhich = view $ compilerPathsL.to (whichCompiler.cpCompilerVersion) - -data ExtraDirs = ExtraDirs - { edBins :: ![Path Abs Dir] - , edInclude :: ![Path Abs Dir] - , edLib :: ![Path Abs Dir] - } deriving (Show, Generic) -instance Semigroup ExtraDirs where - (<>) = mappenddefault -instance Monoid ExtraDirs where - mempty = memptydefault - mappend = (<>) - --- | An environment which ensures that the given compiler is available --- on the PATH -class HasCompiler env where - compilerPathsL :: SimpleGetter env CompilerPaths -instance HasCompiler CompilerPaths where - compilerPathsL = id - -class HasSourceMap env where - sourceMapL :: Lens' env SourceMap -instance HasSourceMap EnvConfig where - sourceMapL = lens envConfigSourceMap (\x y -> x { envConfigSourceMap = y }) - --- | The version of the compiler which will actually be used. May be --- different than that specified in the 'SnapshotDef' and returned --- by 'wantedCompilerVersionL'. -actualCompilerVersionL :: HasSourceMap env => SimpleGetter env ActualCompiler -actualCompilerVersionL = sourceMapL.to smCompiler +userGlobalConfigFileL :: HasConfig s => Lens' s (Path Abs File) +userGlobalConfigFileL = configL . lens + (.userGlobalConfigFile) + (\x y -> x { userGlobalConfigFile = y }) buildOptsL :: HasConfig s => Lens' s BuildOpts -buildOptsL = configL.lens - configBuild - (\x y -> x { configBuild = y }) - -buildOptsMonoidHaddockL :: Lens' BuildOptsMonoid (Maybe Bool) -buildOptsMonoidHaddockL = lens (getFirstFalse . buildMonoidHaddock) - (\buildMonoid t -> buildMonoid {buildMonoidHaddock = FirstFalse t}) +buildOptsL = configL . lens (.build) (\x y -> x { build = y }) -buildOptsMonoidTestsL :: Lens' BuildOptsMonoid (Maybe Bool) -buildOptsMonoidTestsL = lens (getFirstFalse . buildMonoidTests) - (\buildMonoid t -> buildMonoid {buildMonoidTests = FirstFalse t}) +envOverrideSettingsL :: + HasConfig env + => Lens' env (EnvSettings -> IO ProcessContext) +envOverrideSettingsL = configL . lens + (.processContextSettings) + (\x y -> x { processContextSettings = y }) -buildOptsMonoidBenchmarksL :: Lens' BuildOptsMonoid (Maybe Bool) -buildOptsMonoidBenchmarksL = lens (getFirstFalse . buildMonoidBenchmarks) - (\buildMonoid t -> buildMonoid {buildMonoidBenchmarks = FirstFalse t}) - -buildOptsMonoidInstallExesL :: Lens' BuildOptsMonoid (Maybe Bool) -buildOptsMonoidInstallExesL = - lens (getFirstFalse . buildMonoidInstallExes) - (\buildMonoid t -> buildMonoid {buildMonoidInstallExes = FirstFalse t}) - -buildOptsInstallExesL :: Lens' BuildOpts Bool -buildOptsInstallExesL = - lens boptsInstallExes - (\bopts t -> bopts {boptsInstallExes = t}) - -buildOptsHaddockL :: Lens' BuildOpts Bool -buildOptsHaddockL = - lens boptsHaddock - (\bopts t -> bopts {boptsHaddock = t}) - -globalOptsBuildOptsMonoidL :: Lens' GlobalOpts BuildOptsMonoid -globalOptsBuildOptsMonoidL = - lens - globalConfigMonoid - (\x y -> x { globalConfigMonoid = y }) - . - lens - configMonoidBuildOpts - (\x y -> x { configMonoidBuildOpts = y }) - -cabalVersionL :: HasCompiler env => SimpleGetter env Version -cabalVersionL = compilerPathsL.to cpCabalVersion - -whichCompilerL :: Getting r ActualCompiler WhichCompiler -whichCompilerL = to whichCompiler - -envOverrideSettingsL :: HasConfig env => Lens' env (EnvSettings -> IO ProcessContext) -envOverrideSettingsL = configL.lens - configProcessContextSettings - (\x y -> x { configProcessContextSettings = y }) - -shouldForceGhcColorFlag :: (HasRunner env, HasEnvConfig env) - => RIO env Bool -shouldForceGhcColorFlag = do - canDoColor <- (>= mkVersion [8, 2, 1]) . getGhcVersion - <$> view actualCompilerVersionL - shouldDoColor <- view useColorL - return $ canDoColor && shouldDoColor - -appropriateGhcColorFlag :: (HasRunner env, HasEnvConfig env) - => RIO env (Maybe String) -appropriateGhcColorFlag = f <$> shouldForceGhcColorFlag - where f True = Just ghcColorForceFlag - f False = Nothing - --- | See 'globalTerminal' -terminalL :: HasRunner env => Lens' env Bool -terminalL = globalOptsL.lens globalTerminal (\x y -> x { globalTerminal = y }) - --- | See 'globalReExecVersion' -reExecL :: HasRunner env => SimpleGetter env Bool -reExecL = globalOptsL.to (isJust . globalReExecVersion) +-- | @".stack-work"@ +workDirL :: HasConfig env => Lens' env (Path Rel Dir) +workDirL = configL . lens (.workDir) (\x y -> x { workDir = y }) -- | In dev mode, print as a warning, otherwise as debug prettyStackDevL :: HasConfig env => [StyleDoc] -> RIO env () prettyStackDevL docs = do config <- view configL - if configStackDeveloperMode config + if config.stackDeveloperMode then prettyWarnL docs else prettyDebugL docs diff --git a/src/Stack/Types/Config/Build.hs b/src/Stack/Types/Config/Build.hs deleted file mode 100644 index c329293793..0000000000 --- a/src/Stack/Types/Config/Build.hs +++ /dev/null @@ -1,479 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} - --- | Configuration options for building. - -module Stack.Types.Config.Build - ( - BuildOpts(..) - , BuildCommand(..) - , defaultBuildOpts - , defaultBuildOptsCLI - , BuildOptsCLI(..) - , BuildOptsMonoid(..) - , TestOpts(..) - , defaultTestOpts - , TestOptsMonoid(..) - , HaddockOpts(..) - , defaultHaddockOpts - , HaddockOptsMonoid(..) - , BenchmarkOpts(..) - , defaultBenchmarkOpts - , BenchmarkOptsMonoid(..) - , FileWatchOpts(..) - , BuildSubset(..) - , ApplyCLIFlag (..) - , boptsCLIFlagsByName - ) - where - -import Pantry.Internal.AesonExtended -import qualified Data.Map.Strict as Map -import Generics.Deriving.Monoid (memptydefault, mappenddefault) -import Stack.Prelude - --- | Build options that is interpreted by the build command. --- This is built up from BuildOptsCLI and BuildOptsMonoid -data BuildOpts = - BuildOpts {boptsLibProfile :: !Bool - ,boptsExeProfile :: !Bool - ,boptsLibStrip :: !Bool - ,boptsExeStrip :: !Bool - ,boptsHaddock :: !Bool - -- ^ Build haddocks? - ,boptsHaddockOpts :: !HaddockOpts - -- ^ Options to pass to haddock - ,boptsOpenHaddocks :: !Bool - -- ^ Open haddocks in the browser? - ,boptsHaddockDeps :: !(Maybe Bool) - -- ^ Build haddocks for dependencies? - ,boptsHaddockInternal :: !Bool - -- ^ Build haddocks for all symbols and packages, like @cabal haddock --internal@ - ,boptsHaddockHyperlinkSource :: !Bool - -- ^ Build hyperlinked source if possible. Fallback to - -- @hscolour@. Disable for no sources. - ,boptsInstallExes :: !Bool - -- ^ Install executables to user path after building? - ,boptsInstallCompilerTool :: !Bool - -- ^ Install executables to compiler tools path after building? - ,boptsPreFetch :: !Bool - -- ^ Fetch all packages immediately - -- ^ Watch files for changes and automatically rebuild - ,boptsKeepGoing :: !(Maybe Bool) - -- ^ Keep building/running after failure - ,boptsKeepTmpFiles :: !Bool - -- ^ Keep intermediate files and build directories - ,boptsForceDirty :: !Bool - -- ^ Force treating all local packages as having dirty files - - ,boptsTests :: !Bool - -- ^ Turn on tests for local targets - ,boptsTestOpts :: !TestOpts - -- ^ Additional test arguments - - ,boptsBenchmarks :: !Bool - -- ^ Turn on benchmarks for local targets - ,boptsBenchmarkOpts :: !BenchmarkOpts - -- ^ Additional test arguments - -- ^ Commands (with arguments) to run after a successful build - -- ^ Only perform the configure step when building - ,boptsReconfigure :: !Bool - -- ^ Perform the configure step even if already configured - ,boptsCabalVerbose :: !Bool - -- ^ Ask Cabal to be verbose in its builds - ,boptsSplitObjs :: !Bool - -- ^ Whether to enable split-objs. - ,boptsSkipComponents :: ![Text] - -- ^ Which components to skip when building - ,boptsInterleavedOutput :: !Bool - -- ^ Should we use the interleaved GHC output when building - -- multiple packages? - ,boptsDdumpDir :: !(Maybe Text) - } - deriving (Show) - -defaultBuildOpts :: BuildOpts -defaultBuildOpts = BuildOpts - { boptsLibProfile = defaultFirstFalse buildMonoidLibProfile - , boptsExeProfile = defaultFirstFalse buildMonoidExeProfile - , boptsLibStrip = defaultFirstTrue buildMonoidLibStrip - , boptsExeStrip = defaultFirstTrue buildMonoidExeStrip - , boptsHaddock = False - , boptsHaddockOpts = defaultHaddockOpts - , boptsOpenHaddocks = defaultFirstFalse buildMonoidOpenHaddocks - , boptsHaddockDeps = Nothing - , boptsHaddockInternal = defaultFirstFalse buildMonoidHaddockInternal - , boptsHaddockHyperlinkSource = defaultFirstTrue buildMonoidHaddockHyperlinkSource - , boptsInstallExes = defaultFirstFalse buildMonoidInstallExes - , boptsInstallCompilerTool = defaultFirstFalse buildMonoidInstallCompilerTool - , boptsPreFetch = defaultFirstFalse buildMonoidPreFetch - , boptsKeepGoing = Nothing - , boptsKeepTmpFiles = defaultFirstFalse buildMonoidKeepTmpFiles - , boptsForceDirty = defaultFirstFalse buildMonoidForceDirty - , boptsTests = defaultFirstFalse buildMonoidTests - , boptsTestOpts = defaultTestOpts - , boptsBenchmarks = defaultFirstFalse buildMonoidBenchmarks - , boptsBenchmarkOpts = defaultBenchmarkOpts - , boptsReconfigure = defaultFirstFalse buildMonoidReconfigure - , boptsCabalVerbose = defaultFirstFalse buildMonoidCabalVerbose - , boptsSplitObjs = defaultFirstFalse buildMonoidSplitObjs - , boptsSkipComponents = [] - , boptsInterleavedOutput = defaultFirstTrue buildMonoidInterleavedOutput - , boptsDdumpDir = Nothing - } - -defaultBuildOptsCLI ::BuildOptsCLI -defaultBuildOptsCLI = BuildOptsCLI - { boptsCLITargets = [] - , boptsCLIDryrun = False - , boptsCLIFlags = Map.empty - , boptsCLIGhcOptions = [] - , boptsCLIBuildSubset = BSAll - , boptsCLIFileWatch = NoFileWatch - , boptsCLIWatchAll = False - , boptsCLIExec = [] - , boptsCLIOnlyConfigure = False - , boptsCLICommand = Build - , boptsCLIInitialBuildSteps = False - } - --- | How to apply a CLI flag -data ApplyCLIFlag - = ACFAllProjectPackages - -- ^ Apply to all project packages which have such a flag name available. - | ACFByName !PackageName - -- ^ Apply to the specified package only. - deriving (Show, Eq, Ord) - --- | Only flags set via 'ACFByName' -boptsCLIFlagsByName :: BuildOptsCLI -> Map PackageName (Map FlagName Bool) -boptsCLIFlagsByName = - Map.fromList . - mapMaybe go . - Map.toList . - boptsCLIFlags - where - go (ACFAllProjectPackages, _) = Nothing - go (ACFByName name, flags) = Just (name, flags) - --- | Build options that may only be specified from the CLI -data BuildOptsCLI = BuildOptsCLI - { boptsCLITargets :: ![Text] - , boptsCLIDryrun :: !Bool - , boptsCLIGhcOptions :: ![Text] - , boptsCLIFlags :: !(Map ApplyCLIFlag (Map FlagName Bool)) - , boptsCLIBuildSubset :: !BuildSubset - , boptsCLIFileWatch :: !FileWatchOpts - , boptsCLIWatchAll :: !Bool - , boptsCLIExec :: ![(String, [String])] - , boptsCLIOnlyConfigure :: !Bool - , boptsCLICommand :: !BuildCommand - , boptsCLIInitialBuildSteps :: !Bool - } deriving Show - --- | Command sum type for conditional arguments. -data BuildCommand - = Build - | Test - | Haddock - | Bench - | Install - deriving (Eq, Show) - --- | Build options that may be specified in the stack.yaml or from the CLI -data BuildOptsMonoid = BuildOptsMonoid - { buildMonoidTrace :: !Any - , buildMonoidProfile :: !Any - , buildMonoidNoStrip :: !Any - , buildMonoidLibProfile :: !FirstFalse - , buildMonoidExeProfile :: !FirstFalse - , buildMonoidLibStrip :: !FirstTrue - , buildMonoidExeStrip :: !FirstTrue - , buildMonoidHaddock :: !FirstFalse - , buildMonoidHaddockOpts :: !HaddockOptsMonoid - , buildMonoidOpenHaddocks :: !FirstFalse - , buildMonoidHaddockDeps :: !(First Bool) - , buildMonoidHaddockInternal :: !FirstFalse - , buildMonoidHaddockHyperlinkSource :: !FirstTrue - , buildMonoidInstallExes :: !FirstFalse - , buildMonoidInstallCompilerTool :: !FirstFalse - , buildMonoidPreFetch :: !FirstFalse - , buildMonoidKeepGoing :: !(First Bool) - , buildMonoidKeepTmpFiles :: !FirstFalse - , buildMonoidForceDirty :: !FirstFalse - , buildMonoidTests :: !FirstFalse - , buildMonoidTestOpts :: !TestOptsMonoid - , buildMonoidBenchmarks :: !FirstFalse - , buildMonoidBenchmarkOpts :: !BenchmarkOptsMonoid - , buildMonoidReconfigure :: !FirstFalse - , buildMonoidCabalVerbose :: !FirstFalse - , buildMonoidSplitObjs :: !FirstFalse - , buildMonoidSkipComponents :: ![Text] - , buildMonoidInterleavedOutput :: !FirstTrue - , buildMonoidDdumpDir :: !(First Text) - } deriving (Show, Generic) - -instance FromJSON (WithJSONWarnings BuildOptsMonoid) where - parseJSON = withObjectWarnings "BuildOptsMonoid" - (\o -> do let buildMonoidTrace = Any False - buildMonoidProfile = Any False - buildMonoidNoStrip = Any False - buildMonoidLibProfile <- FirstFalse <$> o ..:? buildMonoidLibProfileArgName - buildMonoidExeProfile <-FirstFalse <$> o ..:? buildMonoidExeProfileArgName - buildMonoidLibStrip <- FirstTrue <$> o ..:? buildMonoidLibStripArgName - buildMonoidExeStrip <-FirstTrue <$> o ..:? buildMonoidExeStripArgName - buildMonoidHaddock <- FirstFalse <$> o ..:? buildMonoidHaddockArgName - buildMonoidHaddockOpts <- jsonSubWarnings (o ..:? buildMonoidHaddockOptsArgName ..!= mempty) - buildMonoidOpenHaddocks <- FirstFalse <$> o ..:? buildMonoidOpenHaddocksArgName - buildMonoidHaddockDeps <- First <$> o ..:? buildMonoidHaddockDepsArgName - buildMonoidHaddockInternal <- FirstFalse <$> o ..:? buildMonoidHaddockInternalArgName - buildMonoidHaddockHyperlinkSource <- FirstTrue <$> o ..:? buildMonoidHaddockHyperlinkSourceArgName - buildMonoidInstallExes <- FirstFalse <$> o ..:? buildMonoidInstallExesArgName - buildMonoidInstallCompilerTool <- FirstFalse <$> o ..:? buildMonoidInstallCompilerToolArgName - buildMonoidPreFetch <- FirstFalse <$> o ..:? buildMonoidPreFetchArgName - buildMonoidKeepGoing <- First <$> o ..:? buildMonoidKeepGoingArgName - buildMonoidKeepTmpFiles <- FirstFalse <$> o ..:? buildMonoidKeepTmpFilesArgName - buildMonoidForceDirty <- FirstFalse <$> o ..:? buildMonoidForceDirtyArgName - buildMonoidTests <- FirstFalse <$> o ..:? buildMonoidTestsArgName - buildMonoidTestOpts <- jsonSubWarnings (o ..:? buildMonoidTestOptsArgName ..!= mempty) - buildMonoidBenchmarks <- FirstFalse <$> o ..:? buildMonoidBenchmarksArgName - buildMonoidBenchmarkOpts <- jsonSubWarnings (o ..:? buildMonoidBenchmarkOptsArgName ..!= mempty) - buildMonoidReconfigure <- FirstFalse <$> o ..:? buildMonoidReconfigureArgName - buildMonoidCabalVerbose <- FirstFalse <$> o ..:? buildMonoidCabalVerboseArgName - buildMonoidSplitObjs <- FirstFalse <$> o ..:? buildMonoidSplitObjsName - buildMonoidSkipComponents <- o ..:? buildMonoidSkipComponentsName ..!= mempty - buildMonoidInterleavedOutput <- FirstTrue <$> o ..:? buildMonoidInterleavedOutputName - buildMonoidDdumpDir <- o ..:? buildMonoidDdumpDirName ..!= mempty - return BuildOptsMonoid{..}) - -buildMonoidLibProfileArgName :: Text -buildMonoidLibProfileArgName = "library-profiling" - -buildMonoidExeProfileArgName :: Text -buildMonoidExeProfileArgName = "executable-profiling" - -buildMonoidLibStripArgName :: Text -buildMonoidLibStripArgName = "library-stripping" - -buildMonoidExeStripArgName :: Text -buildMonoidExeStripArgName = "executable-stripping" - -buildMonoidHaddockArgName :: Text -buildMonoidHaddockArgName = "haddock" - -buildMonoidHaddockOptsArgName :: Text -buildMonoidHaddockOptsArgName = "haddock-arguments" - -buildMonoidOpenHaddocksArgName :: Text -buildMonoidOpenHaddocksArgName = "open-haddocks" - -buildMonoidHaddockDepsArgName :: Text -buildMonoidHaddockDepsArgName = "haddock-deps" - -buildMonoidHaddockInternalArgName :: Text -buildMonoidHaddockInternalArgName = "haddock-internal" - -buildMonoidHaddockHyperlinkSourceArgName :: Text -buildMonoidHaddockHyperlinkSourceArgName = "haddock-hyperlink-source" - -buildMonoidInstallExesArgName :: Text -buildMonoidInstallExesArgName = "copy-bins" - -buildMonoidInstallCompilerToolArgName :: Text -buildMonoidInstallCompilerToolArgName = "copy-compiler-tool" - -buildMonoidPreFetchArgName :: Text -buildMonoidPreFetchArgName = "prefetch" - -buildMonoidKeepGoingArgName :: Text -buildMonoidKeepGoingArgName = "keep-going" - -buildMonoidKeepTmpFilesArgName :: Text -buildMonoidKeepTmpFilesArgName = "keep-tmp-files" - -buildMonoidForceDirtyArgName :: Text -buildMonoidForceDirtyArgName = "force-dirty" - -buildMonoidTestsArgName :: Text -buildMonoidTestsArgName = "test" - -buildMonoidTestOptsArgName :: Text -buildMonoidTestOptsArgName = "test-arguments" - -buildMonoidBenchmarksArgName :: Text -buildMonoidBenchmarksArgName = "bench" - -buildMonoidBenchmarkOptsArgName :: Text -buildMonoidBenchmarkOptsArgName = "benchmark-opts" - -buildMonoidReconfigureArgName :: Text -buildMonoidReconfigureArgName = "reconfigure" - -buildMonoidCabalVerboseArgName :: Text -buildMonoidCabalVerboseArgName = "cabal-verbose" - -buildMonoidSplitObjsName :: Text -buildMonoidSplitObjsName = "split-objs" - -buildMonoidSkipComponentsName :: Text -buildMonoidSkipComponentsName = "skip-components" - -buildMonoidInterleavedOutputName :: Text -buildMonoidInterleavedOutputName = "interleaved-output" - -buildMonoidDdumpDirName :: Text -buildMonoidDdumpDirName = "ddump-dir" - -instance Semigroup BuildOptsMonoid where - (<>) = mappenddefault - -instance Monoid BuildOptsMonoid where - mempty = memptydefault - mappend = (<>) - --- | Which subset of packages to build -data BuildSubset - = BSAll - | BSOnlySnapshot - -- ^ Only install packages in the snapshot database, skipping - -- packages intended for the local database. - | BSOnlyDependencies - | BSOnlyLocals - -- ^ Refuse to build anything in the snapshot database, see - -- https://github.com/commercialhaskell/stack/issues/5272 - deriving (Show, Eq) - --- | Options for the 'FinalAction' 'DoTests' -data TestOpts = - TestOpts {toRerunTests :: !Bool -- ^ Whether successful tests will be run gain - ,toAdditionalArgs :: ![String] -- ^ Arguments passed to the test program - ,toCoverage :: !Bool -- ^ Generate a code coverage report - ,toDisableRun :: !Bool -- ^ Disable running of tests - ,toMaximumTimeSeconds :: !(Maybe Int) -- ^ test suite timeout in seconds - } deriving (Eq,Show) - -defaultTestOpts :: TestOpts -defaultTestOpts = TestOpts - { toRerunTests = defaultFirstTrue toMonoidRerunTests - , toAdditionalArgs = [] - , toCoverage = defaultFirstFalse toMonoidCoverage - , toDisableRun = defaultFirstFalse toMonoidDisableRun - , toMaximumTimeSeconds = Nothing - } - -data TestOptsMonoid = - TestOptsMonoid - { toMonoidRerunTests :: !FirstTrue - , toMonoidAdditionalArgs :: ![String] - , toMonoidCoverage :: !FirstFalse - , toMonoidDisableRun :: !FirstFalse - , toMonoidMaximumTimeSeconds :: !(First (Maybe Int)) - } deriving (Show, Generic) - -instance FromJSON (WithJSONWarnings TestOptsMonoid) where - parseJSON = withObjectWarnings "TestOptsMonoid" - (\o -> do toMonoidRerunTests <- FirstTrue <$> o ..:? toMonoidRerunTestsArgName - toMonoidAdditionalArgs <- o ..:? toMonoidAdditionalArgsName ..!= [] - toMonoidCoverage <- FirstFalse <$> o ..:? toMonoidCoverageArgName - toMonoidDisableRun <- FirstFalse <$> o ..:? toMonoidDisableRunArgName - toMonoidMaximumTimeSeconds <- First <$> o ..:? toMonoidMaximumTimeSecondsArgName - return TestOptsMonoid{..}) - -toMonoidRerunTestsArgName :: Text -toMonoidRerunTestsArgName = "rerun-tests" - -toMonoidAdditionalArgsName :: Text -toMonoidAdditionalArgsName = "additional-args" - -toMonoidCoverageArgName :: Text -toMonoidCoverageArgName = "coverage" - -toMonoidDisableRunArgName :: Text -toMonoidDisableRunArgName = "no-run-tests" - -toMonoidMaximumTimeSecondsArgName :: Text -toMonoidMaximumTimeSecondsArgName = "test-suite-timeout" - -instance Semigroup TestOptsMonoid where - (<>) = mappenddefault - -instance Monoid TestOptsMonoid where - mempty = memptydefault - mappend = (<>) - - - --- | Haddock Options -newtype HaddockOpts = - HaddockOpts { hoAdditionalArgs :: [String] -- ^ Arguments passed to haddock program - } deriving (Eq,Show) - -newtype HaddockOptsMonoid = - HaddockOptsMonoid {hoMonoidAdditionalArgs :: [String] - } deriving (Show, Generic) - -defaultHaddockOpts :: HaddockOpts -defaultHaddockOpts = HaddockOpts {hoAdditionalArgs = []} - -instance FromJSON (WithJSONWarnings HaddockOptsMonoid) where - parseJSON = withObjectWarnings "HaddockOptsMonoid" - (\o -> do hoMonoidAdditionalArgs <- o ..:? hoMonoidAdditionalArgsName ..!= [] - return HaddockOptsMonoid{..}) - -instance Semigroup HaddockOptsMonoid where - (<>) = mappenddefault - -instance Monoid HaddockOptsMonoid where - mempty = memptydefault - mappend = (<>) - -hoMonoidAdditionalArgsName :: Text -hoMonoidAdditionalArgsName = "haddock-args" - - --- | Options for the 'FinalAction' 'DoBenchmarks' -data BenchmarkOpts = - BenchmarkOpts - { beoAdditionalArgs :: !(Maybe String) -- ^ Arguments passed to the benchmark program - , beoDisableRun :: !Bool -- ^ Disable running of benchmarks - } deriving (Eq,Show) - -defaultBenchmarkOpts :: BenchmarkOpts -defaultBenchmarkOpts = BenchmarkOpts - { beoAdditionalArgs = Nothing - , beoDisableRun = False - } - -data BenchmarkOptsMonoid = - BenchmarkOptsMonoid - { beoMonoidAdditionalArgs :: !(First String) - , beoMonoidDisableRun :: !(First Bool) - } deriving (Show, Generic) - -instance FromJSON (WithJSONWarnings BenchmarkOptsMonoid) where - parseJSON = withObjectWarnings "BenchmarkOptsMonoid" - (\o -> do beoMonoidAdditionalArgs <- First <$> o ..:? beoMonoidAdditionalArgsArgName - beoMonoidDisableRun <- First <$> o ..:? beoMonoidDisableRunArgName - return BenchmarkOptsMonoid{..}) - -beoMonoidAdditionalArgsArgName :: Text -beoMonoidAdditionalArgsArgName = "benchmark-arguments" - -beoMonoidDisableRunArgName :: Text -beoMonoidDisableRunArgName = "no-run-benchmarks" - -instance Semigroup BenchmarkOptsMonoid where - (<>) = mappenddefault - -instance Monoid BenchmarkOptsMonoid where - mempty = memptydefault - mappend = (<>) - -data FileWatchOpts - = NoFileWatch - | FileWatch - | FileWatchPoll - deriving (Show,Eq) diff --git a/src/Stack/Types/Config/Exception.hs b/src/Stack/Types/Config/Exception.hs new file mode 100644 index 0000000000..3a935635a4 --- /dev/null +++ b/src/Stack/Types/Config/Exception.hs @@ -0,0 +1,269 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +{-| +Module : Stack.Types.Config.Exception +License : BSD-3-Clause +-} + +module Stack.Types.Config.Exception + ( ConfigException (..) + , ConfigPrettyException (..) + , ParseAbsolutePathException (..) + ) where + +import qualified Data.Text as T +import Data.Yaml ( ParseException ) +import qualified Data.Yaml as Yaml +import Distribution.System ( Arch ) +import Path( dirname, filename ) +import Stack.Prelude +import Stack.Types.ConfigMonoid + ( configMonoidAllowDifferentUserName + , configMonoidGHCVariantName, configMonoidSystemGHCName + ) +import Stack.Types.MsysEnvironment ( MsysEnvironment ) +import Stack.Types.Version + ( VersionRange, stackVersion, versionRangeText ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Config" module. +data ConfigException + = ParseCustomSnapshotException Text ParseException + | NoProjectConfigFound (Path Abs Dir) (Maybe Text) + | UnexpectedArchiveContents [Path Abs Dir] [Path Abs File] + | UnableToExtractArchive Text (Path Abs File) + | BadStackVersionException VersionRange + | NoSuchDirectory FilePath + | NoSuchFile FilePath + | ParseGHCVariantException String + | BadStackRoot (Path Abs Dir) + | Won'tCreateStackRootInDirectoryOwnedByDifferentUser + (Path Abs Dir) + (Path Abs Dir) + -- ^ @$STACK_ROOT@, parent dir + | UserDoesn'tOwnDirectory (Path Abs Dir) + | ManualGHCVariantSettingsAreIncompatibleWithSystemGHC + | NixRequiresSystemGhc + | NoSnapshotWhenUsingNoProject + | NoLTSWithMajorVersion Int + | NoLTSFound + deriving Show + +instance Exception ConfigException where + displayException (ParseCustomSnapshotException url exception) = concat + [ "Error: [S-8981]\n" + , "Could not parse '" + , T.unpack url + , "':\n" + , Yaml.prettyPrintParseException exception + , "\nSee https://docs.haskellstack.org/en/stable/topics/custom_snapshot/" + ] + displayException (NoProjectConfigFound dir mcmd) = concat + [ "Error: [S-2206]\n" + , "Unable to find a stack.yaml file in the current directory (" + , toFilePath dir + , ") or its ancestors" + , case mcmd of + Nothing -> "" + Just cmd -> "\nRecommended action: stack " ++ T.unpack cmd + ] + displayException (UnexpectedArchiveContents dirs files) = concat + [ "Error: [S-4964]\n" + , "When unpacking an archive specified in your stack.yaml file, " + , "did not find expected contents. Expected: a single directory. Found: " + , show ( map (toFilePath . dirname) dirs + , map (toFilePath . filename) files + ) + ] + displayException (UnableToExtractArchive url file) = concat + [ "Error: [S-2040]\n" + , "Archive extraction failed. Tarballs and zip archives are supported, \ + \couldn't handle the following URL, " + , T.unpack url + , " downloaded to the file " + , toFilePath $ filename file + ] + displayException (BadStackVersionException requiredRange) = concat + [ "Error: [S-1641]\n" + , "The version of Stack you are using (" + , show stackVersion + , ") is outside the required\n" + ,"version range specified in stack.yaml (" + , T.unpack (versionRangeText requiredRange) + , ").\n" + , "You can upgrade Stack by running:\n\n" + , "stack upgrade" + ] + displayException (NoSuchDirectory dir) = concat + [ "Error: [S-8773]\n" + , "No directory could be located matching the supplied path: " + , dir + ] + displayException (NoSuchFile file) = concat + [ "Error: [S-4335]\n" + , "No file could be located matching the supplied path: " + , file + ] + displayException (ParseGHCVariantException v) = concat + [ "Error: [S-3938]\n" + , "Invalid ghc-variant value: " + , v + ] + displayException (BadStackRoot stackRoot) = concat + [ "Error: [S-8530]\n" + , "Invalid Stack root: '" + , toFilePath stackRoot + , "'. Please provide a valid absolute path." + ] + displayException (Won'tCreateStackRootInDirectoryOwnedByDifferentUser envStackRoot parentDir) = concat + [ "Error: [S-7613]\n" + , "Preventing creation of Stack root '" + , toFilePath envStackRoot + , "'. Parent directory '" + , toFilePath parentDir + , "' is owned by someone else." + ] + displayException (UserDoesn'tOwnDirectory dir) = concat + [ "Error: [S-8707]\n" + , "You are not the owner of '" + , toFilePath dir + , "'. Aborting to protect file permissions." + , "\nRetry with '--" + , T.unpack configMonoidAllowDifferentUserName + , "' to disable this precaution." + ] + displayException ManualGHCVariantSettingsAreIncompatibleWithSystemGHC = T.unpack $ T.concat + [ "Error: [S-3605]\n" + , "Stack can only control the " + , configMonoidGHCVariantName + , " of its own GHC installations. Please use '--no-" + , configMonoidSystemGHCName + , "'." + ] + displayException NixRequiresSystemGhc = T.unpack $ T.concat + [ "Error: [S-6816]\n" + , "Stack's Nix integration is incompatible with '--no-system-ghc'. " + , "Please use '--" + , configMonoidSystemGHCName + , "' or disable the Nix integration." + ] + displayException NoSnapshotWhenUsingNoProject = + "Error: [S-5027]\n" + ++ "When using the script command, you must provide a snapshot argument" + displayException (NoLTSWithMajorVersion n) = concat + [ "Error: [S-3803]\n" + , "No LTS release found with major version " + , show n + , "." + ] + displayException NoLTSFound = + "Error: [S-5472]\n" + ++ "No LTS releases found." + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Config" module. +data ConfigPrettyException + = ParseConfigFileException !(Path Abs File) !ParseException + | StackWorkEnvNotRelativeDir !String + | DuplicateLocalPackageNames ![(PackageName, [PackageLocation])] + | BadMsysEnvironment !MsysEnvironment !Arch + | NoMsysEnvironmentBug + | ConfigFileNotProjectLevelBug + | NoExecutablePath !String + deriving Show + +instance Pretty ConfigPrettyException where + pretty (ParseConfigFileException configFile exception) = + "[S-6602]" + <> line + <> fillSep + [ flow "Stack could not load and parse" + , pretty configFile + , flow "as a configuraton file." + ] + <> blankLine + <> flow "While loading and parsing, Stack encountered the following \ + \error:" + <> blankLine + <> string (Yaml.prettyPrintParseException exception) + <> blankLine + <> fillSep + [ flow "For help about the content of Stack's configuration files, \ + \see (for the most recent release of Stack)" + , style + Url + "http://docs.haskellstack.org/en/stable/configure/yaml/" + <> "." + ] + pretty (StackWorkEnvNotRelativeDir x) = + "[S-7462]" + <> line + <> flow "Stack failed to interpret the value of the STACK_WORK \ + \environment variable as a valid relative path to a directory. \ + \Stack will not accept an absolute path. A path containing a \ + \.. (parent directory) component is not valid." + <> blankLine + <> fillSep + [ flow "If set, Stack expects the value to identify the location \ + \of Stack's work directory, relative to the root directory \ + \of the project or package. Stack encountered the value:" + , style Error (fromString x) <> "." + ] + pretty (DuplicateLocalPackageNames pairs) = + "[S-5470]" + <> line + <> fillSep + [ flow "The same package name is used in more than one project \ + \package or" + , style Shell "extra-deps" <> "." + ] + <> mconcat (map go pairs) + where + go (name, dirs) = + blankLine + <> fillSep + [ style Error (fromPackageName name) + , flow "used in:" + ] + <> line + <> bulletedList (map (fromString . T.unpack . textDisplay) dirs) + pretty (BadMsysEnvironment msysEnv arch) = + "[S-6854]" + <> line + <> fillSep + [ flow "The specified MSYS2 environment" + , style Error (fromString $ show msysEnv) + , flow "is not consistent with the architecture" + , fromString (show arch) <> "." + ] + pretty NoMsysEnvironmentBug = bugPrettyReport "[S-5006]" $ + flow "No default MSYS2 environment." + pretty ConfigFileNotProjectLevelBug = bugPrettyReport "[S-8398]" $ + flow "The configuration file is not a project-level one." + pretty (NoExecutablePath progName) = + "[S-6890]" + <> line + <> fillSep + [ flow "The path for the executable file invoked as" + , style Shell (fromString progName) + , flow "can not be identified." + ] + +instance Exception ConfigPrettyException + +-- | Type representing an exception thrown by functions exported by the +-- "Stack.Config" module. +data ParseAbsolutePathException + = ParseAbsolutePathException String String + deriving Show + +instance Exception ParseAbsolutePathException where + displayException (ParseAbsolutePathException envVar dir) = concat + [ "Error: [S-9437]\n" + , "Failed to parse " + , envVar + , " environment variable (expected absolute directory): " + , dir + ] diff --git a/src/Stack/Types/ConfigMonoid.hs b/src/Stack/Types/ConfigMonoid.hs new file mode 100644 index 0000000000..beffb65ba8 --- /dev/null +++ b/src/Stack/Types/ConfigMonoid.hs @@ -0,0 +1,702 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.ConfigMonoid +License : BSD-3-Clause +-} + +module Stack.Types.ConfigMonoid + ( ConfigMonoid (..) + , parseConfigMonoid + , parseConfigMonoidObject + , configMonoidAllowDifferentUserName + , configMonoidGHCVariantName + , configMonoidInstallGHCName + , configMonoidInstallMsysName + , configMonoidRecommendStackUpgradeName + , configMonoidSystemGHCName + ) where + +import Data.Aeson.Types ( FromJSON (..), Object, Value ) +import Data.Aeson.WarningParser + ( WarningParser, WithJSONWarnings, (..:?), (..!=) + , jsonSubWarnings, jsonSubWarningsT, withObjectWarnings + ) +import Casa.Client ( CasaRepoPrefix ) +import Control.Monad.Writer ( tell ) +import Data.Coerce ( coerce ) +import qualified Data.Map as Map +import qualified Data.Map.Strict as M +import qualified Data.Monoid as Monoid +import Data.Monoid.Map ( MonoidMap (..) ) +import qualified Data.Yaml as Yaml +import Distribution.Version ( anyVersion ) +import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) +import Stack.Prelude hiding ( snapshotLocation ) +import Stack.Types.AllowNewerDeps ( AllowNewerDeps ) +import Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) +import Stack.Types.ApplyProgOptions ( ApplyProgOptions (..) ) +import Stack.Types.BuildOptsMonoid ( BuildOptsMonoid ) +import Stack.Types.Casa ( CasaOptsMonoid ) +import Stack.Types.CabalConfigKey ( CabalConfigKey ) +import Stack.Types.ColorWhen ( ColorWhen ) +import Stack.Types.Compiler + ( CompilerBindistPath, CompilerRepository, CompilerTarget ) +import Stack.Types.CompilerBuild ( CompilerBuild ) +import Stack.Types.Docker ( DockerOptsMonoid, VersionRangeJSON (..) ) +import Stack.Types.DumpLogs ( DumpLogs ) +import Stack.Types.GhcOptionKey ( GhcOptionKey (..) ) +import Stack.Types.GhcOptions ( GhcOptions (..) ) +import Stack.Types.GHCVariant ( GHCVariant ) +import Stack.Types.MsysEnvironment ( MsysEnvironment ) +import Stack.Types.Nix ( NixOptsMonoid ) +import Stack.Types.PvpBounds ( PvpBounds ) +import Stack.Types.SCM ( SCM ) +import Stack.Types.SetupInfo ( SetupInfo ) +import Stack.Types.TemplateName ( TemplateName ) +import Stack.Types.Version + ( IntersectingVersionRange (..), VersionCheck ) +import qualified System.FilePath as FilePath +import Stack.Types.Snapshot (AbstractSnapshot) + +-- | An uninterpreted representation of configuration options. Configurations +-- may be "cascaded" using mappend (left-biased). +data ConfigMonoid = ConfigMonoid + { stackRoot :: !(First (Path Abs Dir)) + -- ^ See: 'Stack.Types.Config.stackRoot' + , workDir :: !(First (Path Rel Dir)) + -- ^ See: 'Stack.Types.Config.workDir'. + , buildOpts :: !BuildOptsMonoid + -- ^ build options. + , dockerOpts :: !DockerOptsMonoid + -- ^ Docker options. + , nixOpts :: !NixOptsMonoid + -- ^ Options for the execution environment (nix-shell or container) + , connectionCount :: !(First Int) + -- ^ See: 'Stack.Types.Config.connectionCount' + , hideTHLoading :: !FirstTrue + -- ^ See: 'Stack.Types.Config.hideTHLoading' + , prefixTimestamps :: !(First Bool) + -- ^ See: 'Stack.Types.Config.prefixTimestamps' + , latestSnapshot :: !(First Text) + -- ^ See: 'Stack.Types.Config.latestSnapshot' + , recentSnapshots :: !(First Text) + -- ^ See: 'Stack.Types.Config.recentSnapshots' + , packageIndex :: !(First PackageIndexConfig) + -- ^ See: 'withPantryConfig' + , systemGHC :: !(First Bool) + -- ^ See: 'Stack.Types.Config.systemGHC' + , installGHC :: !FirstTrue + -- ^ See: 'Stack.Types.Config.installGHC' + , installMsys :: !(First Bool) + -- ^ See: 'Stack.Types.Config.installMsys' + , skipGHCCheck :: !FirstFalse + -- ^ See: 'Stack.Types.Config.skipGHCCheck' + , skipMsys :: !FirstFalse + -- ^ See: 'Stack.Types.Config.skipMsys' + , msysEnvironment :: !(First MsysEnvironment) + -- ^ See: 'Stack.Types.Config.msysEnvironment' + , compilerCheck :: !(First VersionCheck) + -- ^ See: 'Stack.Types.Config.compilerCheck' + , compilerRepository :: !(First CompilerRepository) + -- ^ See: 'Stack.Types.Config.compilerRepository' + , compilerTarget :: !(First CompilerTarget) + -- ^ See: 'Stack.Types.Config.compilerTarget' + , compilerBindistPath :: !(First CompilerBindistPath) + -- ^ See: 'Stack.Types.Config.compilerBindistPath' + , requireStackVersion :: !IntersectingVersionRange + -- ^ See: 'Stack.Types.Config.requireStackVersion' + , arch :: !(First String) + -- ^ Used for overriding the platform + , ghcVariant :: !(First GHCVariant) + -- ^ Used for overriding the platform + , ghcBuild :: !(First CompilerBuild) + -- ^ Used for overriding the GHC build + , jobs :: !(First Int) + -- ^ See: 'Stack.Types.Config.jobs' + , extraIncludeDirs :: ![FilePath] + -- ^ See: 'Stack.Types.Config.extraIncludeDirs' + , extraLibDirs :: ![FilePath] + -- ^ See: 'Stack.Types.Config.extraLibDirs' + , customPreprocessorExts :: ![Text] + -- ^ See: 'Stack.Types.Config.customPreprocessorExts' + , overrideGccPath :: !(First (Path Abs File)) + -- ^ Allow users to override the path to gcc + , overrideHpack :: !(First FilePath) + -- ^ Use Hpack executable (overrides bundled Hpack) + , hpackForce :: !FirstFalse + -- ^ Pass --force to Hpack to always overwrite Cabal file + , concurrentTests :: !(First Bool) + -- ^ See: 'Stack.Types.Config.concurrentTests' + , localBinPath :: !(First FilePath) + -- ^ Used to override the binary installation dir + , fileWatchHook :: !(First FilePath) + -- ^ Path to executable used to override --file-watch post-processing. + , templateParameters :: !(Map Text Text) + -- ^ Template parameters. + , scmInit :: !(First SCM) + -- ^ Initialize SCM (e.g. git init) when making new projects? + , ghcOptionsByName :: !(MonoidMap PackageName (Monoid.Dual [Text])) + -- ^ See 'Stack.Types.Config.ghcOptionsByName'. Uses 'Monoid.Dual' so that + -- options from the configs on the right come first, so that they + -- can be overridden. + , ghcOptionsByCat :: !(MonoidMap ApplyGhcOptions (Monoid.Dual [Text])) + -- ^ See 'Stack.Types.Config.ghcOptionsAll'. Uses 'Monoid.Dual' so that options + -- from the configs on the right come first, so that they can be + -- overridden. + , cabalConfigOpts :: !(MonoidMap CabalConfigKey (Monoid.Dual [Text])) + -- ^ See 'Stack.Types.Config.cabalConfigOpts'. + , extraPath :: ![Path Abs Dir] + -- ^ Additional paths to search for executables in + , setupInfoLocations :: ![String] + -- ^ See 'Stack.Types.Config.setupInfoLocations' + , setupInfoInline :: !SetupInfo + -- ^ See 'Stack.Types.Config.setupInfoInline' + , localProgramsBase :: !(First (Path Abs Dir)) + -- ^ Override the default local programs dir, where e.g. GHC is installed. + , pvpBounds :: !(First PvpBounds) + -- ^ See 'Stack.Types.Config.pvpBounds' + , modifyCodePage :: !FirstTrue + -- ^ See 'Stack.Types.Config.modifyCodePage' + , rebuildGhcOptions :: !FirstFalse + -- ^ See 'Stack.Types.Config.monoidRebuildGhcOptions' + , applyGhcOptions :: !(First ApplyGhcOptions) + -- ^ See 'Stack.Types.Config.applyGhcOptions' + , applyProgOptions :: !(First ApplyProgOptions) + -- ^ See 'Stack.Types.Config.applyProgOptions' + , allowNewer :: !(First Bool) + -- ^ See 'Stack.Types.Config.monoidAllowNewer' + , allowNewerDeps :: !(Maybe AllowNewerDeps) + -- ^ See 'Stack.Types.Config.monoidAllowNewerDeps' + , defaultInitSnapshot :: !(First (Unresolved AbstractSnapshot)) + -- ^ An optional default snapshot to use with @stack init@ when none is + -- specified. + , defaultTemplate :: !(First TemplateName) + -- ^ The default template to use when none is specified. + -- (If Nothing, the 'default' default template is used.) + , allowDifferentUser :: !(First Bool) + -- ^ Allow users other than the Stack root owner to use the Stack + -- installation. + , dumpLogs :: !(First DumpLogs) + -- ^ See 'Stack.Types.Config.dumpLogs' + , saveHackageCreds :: !FirstTrue + -- ^ See 'Stack.Types.Config.saveHackageCreds' + , hackageBaseUrl :: !(First Text) + -- ^ See 'Stack.Types.Config.hackageBaseUrl' + , colorWhen :: !(First ColorWhen) + -- ^ When to use \'ANSI\' colors + , styles :: !StylesUpdate + , hideSourcePaths :: !FirstTrue + -- ^ See 'Stack.Types.Config.hideSourcePaths' + , recommendStackUpgrade :: !FirstTrue + -- ^ See 'Stack.Types.Config.recommendStackUpgrade' + , notifyIfNixOnPath :: !FirstTrue + -- ^ See 'Stack.Types.Config.notifyIfNixOnPath' + , notifyIfGhcUntested :: !FirstTrue + -- ^ See 'Stack.Types.Config.notifyIfGhcUntested' + , notifyIfCabalUntested :: !FirstTrue + -- ^ See 'Stack.Types.Config.notifyIfCabalUntested' + , notifyIfArchUnknown :: !FirstTrue + -- ^ See 'Stack.Types.Config.notifyIfArchUnknown' + , notifyIfNoRunTests :: !FirstTrue + -- ^ See 'Stack.Types.Config.notifyIfNoRunTests' + , notifyIfNoRunBenchmarks :: !FirstTrue + -- ^ See 'Stack.Types.Config.notifyIfNoRunBenchmarks' + , notifyIfBaseNotBoot :: !FirstTrue + -- ^ See 'Stack.Types.Config.notifyIfBaseNotBoot' + , casaOpts :: !CasaOptsMonoid + -- ^ Casa configuration options. + , casaRepoPrefix :: !(First CasaRepoPrefix) + -- ^ Casa repository prefix (deprecated). + , snapshotLocation :: !(First Text) + -- ^ Custom location of LTS/Nightly snapshots + , globalHintsLocation :: !(First (Unresolved GlobalHintsLocation)) + -- ^ Custom location of global hints + , noRunCompile :: !FirstFalse + -- ^ See: 'Stack.Types.Config.noRunCompile' + , stackDeveloperMode :: !(First Bool) + -- ^ See 'Stack.Types.Config.stackDeveloperMode' + } + deriving Generic + +instance Semigroup ConfigMonoid where + (<>) = mappenddefault + +instance Monoid ConfigMonoid where + mempty = memptydefault + mappend = (<>) + +-- | An uninterpreted representation of URLs options. Configurations may be +-- "cascaded" using mappend (left-biased). +data UrlsOptsMonoid = UrlsOptsMonoid + { latestSnapshot :: !(First Text) + -- ^ See: 'Stack.Types.Config.latestSnapshot' + , recentSnapshots :: !(First Text) + -- ^ See: 'Stack.Types.Config.recentSnapshots' + } + deriving Generic + +-- | Decode uninterpreted URLs options from JSON/YAML. +instance FromJSON (WithJSONWarnings UrlsOptsMonoid) where + parseJSON = withObjectWarnings "UrlsOptsMonoid" $ \o -> do + latestSnapshot <- First <$> o ..:? latestSnapshotArgName + recentSnapshots <- First <$> o ..:? recentSnapshotsArgName + pure UrlsOptsMonoid + { latestSnapshot + , recentSnapshots + } + +-- | Left-biased combine URLs options +instance Semigroup UrlsOptsMonoid where + (<>) = mappenddefault + +-- | Left-biased combine URLs options +instance Monoid UrlsOptsMonoid where + mempty = memptydefault + mappend = (<>) + +-- | URLs latest snapshots argument name. +latestSnapshotArgName :: Text +latestSnapshotArgName = "latest-snapshot" + +-- | URLs recent snapshots argument name. +recentSnapshotsArgName :: Text +recentSnapshotsArgName = "recent-snapshots" + +parseConfigMonoid :: + Path Abs Dir + -> Value + -> Yaml.Parser (WithJSONWarnings ConfigMonoid) +parseConfigMonoid = withObjectWarnings "ConfigMonoid" . parseConfigMonoidObject + +-- | Parse a partial configuration. Used both to parse both a standalone config +-- file and a project file, so that a sub-parser is not required, which would +-- interfere with warnings for missing fields. +parseConfigMonoidObject :: Path Abs Dir -> Object -> WarningParser ConfigMonoid +parseConfigMonoidObject rootDir obj = do + -- Parsing 'stackRoot' from 'stackRoot'/config.yaml would be nonsensical + let stackRoot = First Nothing + workDir <- First <$> obj ..:? configMonoidWorkDirName + buildOpts <- jsonSubWarnings (obj ..:? configMonoidBuildOptsName ..!= mempty) + dockerOpts <- + jsonSubWarnings (obj ..:? configMonoidDockerOptsName ..!= mempty) + nixOpts <- jsonSubWarnings (obj ..:? configMonoidNixOptsName ..!= mempty) + connectionCount <- First <$> obj ..:? configMonoidConnectionCountName + hideTHLoading <- FirstTrue <$> obj ..:? configMonoidHideTHLoadingName + prefixTimestamps <- First <$> obj ..:? configMonoidPrefixTimestampsName + urlsOpts :: UrlsOptsMonoid <- + jsonSubWarnings (obj ..:? configMonoidUrlsName ..!= mempty) + let latestSnapshot = urlsOpts.latestSnapshot + recentSnapshots = urlsOpts.recentSnapshots + packageIndex <- + First <$> jsonSubWarningsT (obj ..:? configMonoidPackageIndexName) + systemGHC <- First <$> obj ..:? configMonoidSystemGHCName + installGHC <- FirstTrue <$> obj ..:? configMonoidInstallGHCName + installMsys <- First <$> obj ..:? configMonoidInstallMsysName + skipGHCCheck <- FirstFalse <$> obj ..:? configMonoidSkipGHCCheckName + skipMsys <- FirstFalse <$> obj ..:? configMonoidSkipMsysName + msysEnvironment <- First <$> obj ..:? configMonoidMsysEnvironmentName + requireStackVersion <- + IntersectingVersionRange . (.versionRangeJSON) <$> + ( obj ..:? configMonoidRequireStackVersionName + ..!= VersionRangeJSON anyVersion + ) + arch <- First <$> obj ..:? configMonoidArchName + ghcVariant <- First <$> obj ..:? configMonoidGHCVariantName + ghcBuild <- First <$> obj ..:? configMonoidGHCBuildName + jobs <- First <$> obj ..:? configMonoidJobsName + extraIncludeDirs <- map (toFilePath rootDir FilePath.) <$> + obj ..:? configMonoidExtraIncludeDirsName ..!= [] + extraLibDirs <- map (toFilePath rootDir FilePath.) <$> + obj ..:? configMonoidExtraLibDirsName ..!= [] + customPreprocessorExts <- + obj ..:? configMonoidCustomPreprocessorExtsName ..!= [] + overrideGccPath <- First <$> obj ..:? configMonoidOverrideGccPathName + overrideHpack <- First <$> obj ..:? configMonoidOverrideHpackName + hpackForce <- FirstFalse <$> obj ..:? configMonoidHpackForceName + concurrentTests <- First <$> obj ..:? configMonoidConcurrentTestsName + localBinPath <- First <$> obj ..:? configMonoidLocalBinPathName + fileWatchHook <- First <$> obj ..:? configMonoidFileWatchHookName + (scmInit, templateParameters) <- obj ..:? "templates" >>= \case + Nothing -> pure (First Nothing,M.empty) + Just tobj -> do + scmInit <- tobj ..:? configMonoidScmInitName + params <- tobj ..:? configMonoidTemplateParametersName + pure (First scmInit,fromMaybe M.empty params) + compilerCheck <- First <$> obj ..:? configMonoidCompilerCheckName + compilerRepository <- First <$> (obj ..:? configMonoidCompilerRepositoryName) + compilerTarget <- First <$> (obj ..:? configMonoidCompilerTargetName) + compilerBindistPath <- + First <$> (obj ..:? configMonoidCompilerBindistPathName) + + options <- Map.map (.ghcOptions) <$> + obj ..:? configMonoidGhcOptionsName ..!= (mempty :: Map GhcOptionKey GhcOptions) + + optionsEverything <- + case (Map.lookup GOKOldEverything options, Map.lookup GOKEverything options) of + (Just _, Just _) -> + fail "Cannot specify both `*` and `$everything` GHC options" + (Nothing, Just x) -> pure x + (Just x, Nothing) -> do + tell "The `*` ghc-options key is not recommended. Consider using \ + \$locals, or if really needed, $everything" + pure x + (Nothing, Nothing) -> pure [] + + let ghcOptionsByCat = coerce $ Map.fromList + [ (AGOEverything, optionsEverything) + , (AGOLocals, Map.findWithDefault [] GOKLocals options) + , (AGOTargets, Map.findWithDefault [] GOKTargets options) + ] + + ghcOptionsByName = coerce $ Map.fromList + [(name, opts) | (GOKPackage name, opts) <- Map.toList options] + + cabalConfigOpts' <- obj ..:? configMonoidConfigureOptionsName ..!= mempty + let cabalConfigOpts = coerce (cabalConfigOpts' :: Map CabalConfigKey [Text]) + extraPath <- obj ..:? configMonoidExtraPathName ..!= [] + setupInfoLocations <- obj ..:? configMonoidSetupInfoLocationsName ..!= [] + setupInfoInline <- + jsonSubWarningsT (obj ..:? configMonoidSetupInfoInlineName) ..!= mempty + localProgramsBase <- First <$> obj ..:? configMonoidLocalProgramsBaseName + pvpBounds <- First <$> obj ..:? configMonoidPvpBoundsName + modifyCodePage <- FirstTrue <$> obj ..:? configMonoidModifyCodePageName + rebuildGhcOptions <- FirstFalse <$> obj ..:? configMonoidRebuildGhcOptionsName + applyGhcOptions <- First <$> obj ..:? configMonoidApplyGhcOptionsName + applyProgOptions <- First <$> obj ..:? configMonoidApplyProgOptionsName + allowNewer <- First <$> obj ..:? configMonoidAllowNewerName + allowNewerDeps <- obj ..:? configMonoidAllowNewerDepsName + defaultInitSnapshot <- First <$> obj ..:? configMonoidDefaultInitSnapshotName + defaultTemplate <- First <$> obj ..:? configMonoidDefaultTemplateName + allowDifferentUser <- First <$> obj ..:? configMonoidAllowDifferentUserName + dumpLogs <- First <$> obj ..:? configMonoidDumpLogsName + saveHackageCreds <- FirstTrue <$> obj ..:? configMonoidSaveHackageCredsName + hackageBaseUrl <- First <$> obj ..:? configMonoidHackageBaseUrlName + configMonoidColorWhenUS <- obj ..:? configMonoidColorWhenUSName + configMonoidColorWhenGB <- obj ..:? configMonoidColorWhenGBName + let colorWhen = First $ configMonoidColorWhenUS <|> configMonoidColorWhenGB + configMonoidStylesUS <- obj ..:? configMonoidStylesUSName + configMonoidStylesGB <- obj ..:? configMonoidStylesGBName + let styles = fromMaybe mempty $ configMonoidStylesUS <|> configMonoidStylesGB + hideSourcePaths <- FirstTrue <$> obj ..:? configMonoidHideSourcePathsName + recommendStackUpgrade <- + FirstTrue <$> obj ..:? configMonoidRecommendStackUpgradeName + notifyIfNixOnPath <- FirstTrue <$> obj ..:? configMonoidNotifyIfNixOnPathName + notifyIfGhcUntested <- + FirstTrue <$> obj ..:? configMonoidNotifyIfGhcUntestedName + notifyIfCabalUntested <- + FirstTrue <$> obj ..:? configMonoidNotifyIfCabalUntestedName + notifyIfArchUnknown <- + FirstTrue <$> obj ..:? configMonoidNotifyIfArchUnknownName + notifyIfNoRunTests <- + FirstTrue <$> obj ..:? configMonoidNotifyIfNoRunTestsName + notifyIfNoRunBenchmarks <- + FirstTrue <$> obj ..:? configMonoidNotifyIfNoRunBenchmarksName + notifyIfBaseNotBoot <- + FirstTrue <$> obj ..:? configMonoidNotifyIfBaseNotBootName + casaOpts <- jsonSubWarnings (obj ..:? configMonoidCasaOptsName ..!= mempty) + casaRepoPrefix <- First <$> obj ..:? configMonoidCasaRepoPrefixName + snapshotLocation <- First <$> obj ..:? configMonoidSnapshotLocationName + globalHintsLocation <- + First <$> jsonSubWarningsT (obj ..:? configMonoidGlobalHintsLocationName) + noRunCompile <- FirstFalse <$> obj ..:? configMonoidNoRunCompileName + stackDeveloperMode <- First <$> obj ..:? configMonoidStackDeveloperModeName + pure ConfigMonoid + { stackRoot + , workDir + , buildOpts + , dockerOpts + , nixOpts + , connectionCount + , hideTHLoading + , prefixTimestamps + , latestSnapshot + , recentSnapshots + , packageIndex + , systemGHC + , installGHC + , installMsys + , skipGHCCheck + , skipMsys + , msysEnvironment + , compilerCheck + , compilerRepository + , compilerTarget + , compilerBindistPath + , requireStackVersion + , arch + , ghcVariant + , ghcBuild + , jobs + , extraIncludeDirs + , extraLibDirs + , customPreprocessorExts + , overrideGccPath + , overrideHpack + , hpackForce + , concurrentTests + , localBinPath + , fileWatchHook + , templateParameters + , scmInit + , ghcOptionsByName + , ghcOptionsByCat + , cabalConfigOpts + , extraPath + , setupInfoLocations + , setupInfoInline + , localProgramsBase + , pvpBounds + , modifyCodePage + , rebuildGhcOptions + , applyGhcOptions + , applyProgOptions + , allowNewer + , allowNewerDeps + , defaultInitSnapshot + , defaultTemplate + , allowDifferentUser + , dumpLogs + , saveHackageCreds + , hackageBaseUrl + , colorWhen + , styles + , hideSourcePaths + , recommendStackUpgrade + , notifyIfNixOnPath + , notifyIfGhcUntested + , notifyIfCabalUntested + , notifyIfArchUnknown + , notifyIfNoRunTests + , notifyIfNoRunBenchmarks + , notifyIfBaseNotBoot + , casaOpts + , casaRepoPrefix + , snapshotLocation + , globalHintsLocation + , noRunCompile + , stackDeveloperMode + } + +configMonoidWorkDirName :: Text +configMonoidWorkDirName = "work-dir" + +configMonoidBuildOptsName :: Text +configMonoidBuildOptsName = "build" + +configMonoidDockerOptsName :: Text +configMonoidDockerOptsName = "docker" + +configMonoidNixOptsName :: Text +configMonoidNixOptsName = "nix" + +configMonoidConfigureOptionsName :: Text +configMonoidConfigureOptionsName = "configure-options" + +configMonoidConnectionCountName :: Text +configMonoidConnectionCountName = "connection-count" + +configMonoidHideTHLoadingName :: Text +configMonoidHideTHLoadingName = "hide-th-loading" + +configMonoidPrefixTimestampsName :: Text +configMonoidPrefixTimestampsName = "build-output-timestamps" + +configMonoidUrlsName :: Text +configMonoidUrlsName = "urls" + +configMonoidPackageIndexName :: Text +configMonoidPackageIndexName = "package-index" + +configMonoidSystemGHCName :: Text +configMonoidSystemGHCName = "system-ghc" + +configMonoidInstallGHCName :: Text +configMonoidInstallGHCName = "install-ghc" + +configMonoidInstallMsysName :: Text +configMonoidInstallMsysName = "install-msys" + +configMonoidSkipGHCCheckName :: Text +configMonoidSkipGHCCheckName = "skip-ghc-check" + +configMonoidSkipMsysName :: Text +configMonoidSkipMsysName = "skip-msys" + +configMonoidMsysEnvironmentName :: Text +configMonoidMsysEnvironmentName = "msys-environment" + +configMonoidRequireStackVersionName :: Text +configMonoidRequireStackVersionName = "require-stack-version" + +configMonoidArchName :: Text +configMonoidArchName = "arch" + +configMonoidGHCVariantName :: Text +configMonoidGHCVariantName = "ghc-variant" + +configMonoidGHCBuildName :: Text +configMonoidGHCBuildName = "ghc-build" + +configMonoidJobsName :: Text +configMonoidJobsName = "jobs" + +configMonoidExtraIncludeDirsName :: Text +configMonoidExtraIncludeDirsName = "extra-include-dirs" + +configMonoidExtraLibDirsName :: Text +configMonoidExtraLibDirsName = "extra-lib-dirs" + +configMonoidCustomPreprocessorExtsName :: Text +configMonoidCustomPreprocessorExtsName = "custom-preprocessor-extensions" + +configMonoidOverrideGccPathName :: Text +configMonoidOverrideGccPathName = "with-gcc" + +configMonoidOverrideHpackName :: Text +configMonoidOverrideHpackName = "with-hpack" + +configMonoidHpackForceName :: Text +configMonoidHpackForceName = "hpack-force" + +configMonoidConcurrentTestsName :: Text +configMonoidConcurrentTestsName = "concurrent-tests" + +configMonoidLocalBinPathName :: Text +configMonoidLocalBinPathName = "local-bin-path" + +configMonoidFileWatchHookName :: Text +configMonoidFileWatchHookName = "file-watch-hook" + +configMonoidScmInitName :: Text +configMonoidScmInitName = "scm-init" + +configMonoidTemplateParametersName :: Text +configMonoidTemplateParametersName = "params" + +configMonoidCompilerCheckName :: Text +configMonoidCompilerCheckName = "compiler-check" + +configMonoidCompilerRepositoryName :: Text +configMonoidCompilerRepositoryName = "compiler-repository" + +configMonoidCompilerTargetName :: Text +configMonoidCompilerTargetName = "compiler-target" + +configMonoidCompilerBindistPathName :: Text +configMonoidCompilerBindistPathName = "compiler-bindist-path" + +configMonoidGhcOptionsName :: Text +configMonoidGhcOptionsName = "ghc-options" + +configMonoidExtraPathName :: Text +configMonoidExtraPathName = "extra-path" + +configMonoidSetupInfoLocationsName :: Text +configMonoidSetupInfoLocationsName = "setup-info-locations" + +configMonoidSetupInfoInlineName :: Text +configMonoidSetupInfoInlineName = "setup-info" + +configMonoidLocalProgramsBaseName :: Text +configMonoidLocalProgramsBaseName = "local-programs-path" + +configMonoidPvpBoundsName :: Text +configMonoidPvpBoundsName = "pvp-bounds" + +configMonoidModifyCodePageName :: Text +configMonoidModifyCodePageName = "modify-code-page" + +configMonoidRebuildGhcOptionsName :: Text +configMonoidRebuildGhcOptionsName = "rebuild-ghc-options" + +configMonoidApplyGhcOptionsName :: Text +configMonoidApplyGhcOptionsName = "apply-ghc-options" + +configMonoidApplyProgOptionsName :: Text +configMonoidApplyProgOptionsName = "apply-prog-options" + +configMonoidAllowNewerName :: Text +configMonoidAllowNewerName = "allow-newer" + +configMonoidAllowNewerDepsName :: Text +configMonoidAllowNewerDepsName = "allow-newer-deps" + +configMonoidDefaultInitSnapshotName :: Text +configMonoidDefaultInitSnapshotName = "default-init-snapshot" + +configMonoidDefaultTemplateName :: Text +configMonoidDefaultTemplateName = "default-template" + +configMonoidAllowDifferentUserName :: Text +configMonoidAllowDifferentUserName = "allow-different-user" + +configMonoidDumpLogsName :: Text +configMonoidDumpLogsName = "dump-logs" + +configMonoidSaveHackageCredsName :: Text +configMonoidSaveHackageCredsName = "save-hackage-creds" + +configMonoidHackageBaseUrlName :: Text +configMonoidHackageBaseUrlName = "hackage-base-url" + +configMonoidColorWhenUSName :: Text +configMonoidColorWhenUSName = "color" + +configMonoidColorWhenGBName :: Text +configMonoidColorWhenGBName = "colour" + +configMonoidStylesUSName :: Text +configMonoidStylesUSName = "stack-colors" + +configMonoidStylesGBName :: Text +configMonoidStylesGBName = "stack-colours" + +configMonoidHideSourcePathsName :: Text +configMonoidHideSourcePathsName = "hide-source-paths" + +configMonoidRecommendStackUpgradeName :: Text +configMonoidRecommendStackUpgradeName = "recommend-stack-upgrade" + +configMonoidNotifyIfNixOnPathName :: Text +configMonoidNotifyIfNixOnPathName = "notify-if-nix-on-path" + +configMonoidNotifyIfGhcUntestedName :: Text +configMonoidNotifyIfGhcUntestedName = "notify-if-ghc-untested" + +configMonoidNotifyIfCabalUntestedName :: Text +configMonoidNotifyIfCabalUntestedName = "notify-if-cabal-untested" + +configMonoidNotifyIfArchUnknownName :: Text +configMonoidNotifyIfArchUnknownName = "notify-if-arch-unknown" + +configMonoidNotifyIfNoRunTestsName :: Text +configMonoidNotifyIfNoRunTestsName = "notify-if-no-run-tests" + +configMonoidNotifyIfNoRunBenchmarksName :: Text +configMonoidNotifyIfNoRunBenchmarksName = "notify-if-no-run-benchmarks" + +configMonoidNotifyIfBaseNotBootName :: Text +configMonoidNotifyIfBaseNotBootName = "notify-if-base-not-boot" + +configMonoidCasaOptsName :: Text +configMonoidCasaOptsName = "casa" + +configMonoidCasaRepoPrefixName :: Text +configMonoidCasaRepoPrefixName = "casa-repo-prefix" + +configMonoidSnapshotLocationName :: Text +configMonoidSnapshotLocationName = "snapshot-location-base" + +configMonoidGlobalHintsLocationName :: Text +configMonoidGlobalHintsLocationName = "global-hints-location" + +configMonoidNoRunCompileName :: Text +configMonoidNoRunCompileName = "script-no-run-compile" + +configMonoidStackDeveloperModeName :: Text +configMonoidStackDeveloperModeName = "stack-developer-mode" diff --git a/src/Stack/Types/ConfigSetOpts.hs b/src/Stack/Types/ConfigSetOpts.hs new file mode 100644 index 0000000000..702e2465a3 --- /dev/null +++ b/src/Stack/Types/ConfigSetOpts.hs @@ -0,0 +1,46 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.ConfigSetOpts +Description : Types related to Stack's @config set@ command. +License : BSD-3-Clause + +Types related to Stack's @config set@ command. +-} + +module Stack.Types.ConfigSetOpts + ( ConfigCmdSet (..) + , CommandScope (..) + , configCmdSetScope + ) where + +import Stack.Prelude +import Stack.Types.Snapshot ( AbstractSnapshot ) + +-- | Type representing options for Stack's @config set@ command. +data ConfigCmdSet + = ConfigCmdSetSnapshot !(Unresolved AbstractSnapshot) + | ConfigCmdSetResolver !(Unresolved AbstractSnapshot) + | ConfigCmdSetSystemGhc !CommandScope !Bool + | ConfigCmdSetInstallGhc !CommandScope !Bool + | ConfigCmdSetInstallMsys !CommandScope !Bool + | ConfigCmdSetRecommendStackUpgrade !CommandScope !Bool + | ConfigCmdSetDownloadPrefix !CommandScope !Text + +-- | Type representing scopes for Stack's @config set@ command. +data CommandScope + = CommandScopeGlobal + -- ^ Apply changes to Stack's global configuration file. + | CommandScopeProject + -- ^ Apply changes to Stack's project-level configuration file. + +-- | Yields the command scope for the given config command option. +configCmdSetScope :: ConfigCmdSet -> CommandScope +configCmdSetScope (ConfigCmdSetSnapshot _) = CommandScopeProject +configCmdSetScope (ConfigCmdSetResolver _) = CommandScopeProject +configCmdSetScope (ConfigCmdSetSystemGhc scope _) = scope +configCmdSetScope (ConfigCmdSetInstallGhc scope _) = scope +configCmdSetScope (ConfigCmdSetInstallMsys scope _) = scope +configCmdSetScope (ConfigCmdSetRecommendStackUpgrade scope _) = scope +configCmdSetScope (ConfigCmdSetDownloadPrefix scope _) = scope diff --git a/src/Stack/Types/ConfigureOpts.hs b/src/Stack/Types/ConfigureOpts.hs new file mode 100644 index 0000000000..84f1e88ef2 --- /dev/null +++ b/src/Stack/Types/ConfigureOpts.hs @@ -0,0 +1,53 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.ConfigureOpts +License : BSD-3-Clause +-} + +module Stack.Types.ConfigureOpts + ( ConfigureOpts (..) + , BaseConfigOpts (..) + , PackageConfigureOpts (..) + ) where + +import Stack.Prelude +import Stack.Types.BuildOpts ( BuildOpts (..) ) +import Stack.Types.BuildOptsCLI ( BuildOptsCLI ) + +-- | Basic information used to calculate what the configure options are +data BaseConfigOpts = BaseConfigOpts + { snapDB :: !(Path Abs Dir) + , localDB :: !(Path Abs Dir) + , snapInstallRoot :: !(Path Abs Dir) + , localInstallRoot :: !(Path Abs Dir) + , buildOpts :: !BuildOpts + , buildOptsCLI :: !BuildOptsCLI + , extraDBs :: ![Path Abs Dir] + } + deriving Show + +-- | All these fields come from the v'Package' data type but bringing the +-- whole t'Package' is way too much, hence this datatype. +data PackageConfigureOpts = PackageConfigureOpts + { pkgCabalConfigOpts :: [Text] + , pkgGhcOptions :: [Text] + , pkgFlags :: Map FlagName Bool + , pkgDefaultFlags :: Map FlagName Bool + , pkgIdentifier :: PackageIdentifier + } + deriving Show + +-- | Configure options to be sent to Setup.hs configure. +data ConfigureOpts = ConfigureOpts + { pathRelated :: ![String] + -- ^ Options related to various paths. We separate these out since they do + -- not have an effect on the contents of the compiled binary for checking + -- if we can use an existing precompiled cache. + , nonPathRelated :: ![String] + -- ^ Options other than path-related options. + } + deriving (Data, Eq, Generic, Show) + +instance NFData ConfigureOpts diff --git a/src/Stack/Types/Curator.hs b/src/Stack/Types/Curator.hs new file mode 100644 index 0000000000..2ff638355d --- /dev/null +++ b/src/Stack/Types/Curator.hs @@ -0,0 +1,67 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.Curator +License : BSD-3-Clause + +Module exporting the t'Curator' type, used to represent Stack's project-specific +@curator@ option, which supports the needs of the +[@curator@ tool](https://github.com/commercialhaskell/curator). +-} + +module Stack.Types.Curator + ( Curator (..) + ) where + +import Data.Aeson.Types ( FromJSON (..), ToJSON (..), (.=), object ) +import Data.Aeson.WarningParser + ( WithJSONWarnings (..), (..:?), (..!=), withObjectWarnings ) +import qualified Data.Set as Set +import Stack.Prelude + +-- | Type representing configuration options which support the needs of the +-- [@curator@ tool](https://github.com/commercialhaskell/curator). +data Curator = Curator + { skipTest :: !(Set PackageName) + -- ^ Packages for which Stack should ignore test suites. + , expectTestFailure :: !(Set PackageName) + -- ^ Packages for which Stack should expect building test suites to fail. + , skipBenchmark :: !(Set PackageName) + -- ^ Packages for which Stack should ignore benchmarks. + , expectBenchmarkFailure :: !(Set PackageName) + -- ^ Packages for which Stack should expect building benchmarks to fail. + , skipHaddock :: !(Set PackageName) + -- ^ Packages for which Stack should ignore creating Haddock documentation. + , expectHaddockFailure :: !(Set PackageName) + -- ^ Packages for which Stack should expect creating Haddock documentation + -- to fail. + } + deriving Show + +instance ToJSON Curator where + toJSON curator = object + [ "skip-test" .= Set.map CabalString curator.skipTest + , "expect-test-failure" .= Set.map CabalString curator.expectTestFailure + , "skip-bench" .= Set.map CabalString curator.skipBenchmark + , "expect-benchmark-failure" .= + Set.map CabalString curator.expectTestFailure + , "skip-haddock" .= Set.map CabalString curator.skipHaddock + , "expect-haddock-failure" .= + Set.map CabalString curator.expectHaddockFailure + ] + +instance FromJSON (WithJSONWarnings Curator) where + parseJSON = withObjectWarnings "Curator" $ \o -> Curator + <$> fmap (Set.map unCabalString) (o ..:? "skip-test" ..!= mempty) + <*> fmap (Set.map unCabalString) (o ..:? "expect-test-failure" ..!= mempty) + <*> fmap (Set.map unCabalString) (o ..:? "skip-bench" ..!= mempty) + <*> fmap + (Set.map unCabalString) + (o ..:? "expect-benchmark-failure" ..!= mempty) + <*> fmap (Set.map unCabalString) (o ..:? "skip-haddock" ..!= mempty) + <*> fmap + (Set.map unCabalString) + (o ..:? "expect-haddock-failure" ..!= mempty) diff --git a/src/Stack/Types/Dependency.hs b/src/Stack/Types/Dependency.hs new file mode 100644 index 0000000000..c6ba2f3f20 --- /dev/null +++ b/src/Stack/Types/Dependency.hs @@ -0,0 +1,101 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.Dependency +License : BSD-3-Clause +-} + +module Stack.Types.Dependency + ( DepValue (..) + , DepType (..) + , DepLibrary (..) + , cabalToStackDep + , cabalExeToStackDep + , cabalSetupDepsToStackDep + , libraryDepFromVersionRange + , isDepTypeLibrary + , getDepSublib + , depValueToTarget + ) where + +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Distribution.PackageDescription as Cabal +import Distribution.Types.VersionRange ( VersionRange ) +import Stack.Prelude +import Stack.Types.ComponentUtils + ( StackUnqualCompName (..), fromCabalName ) +import Stack.Types.NamedComponent ( NamedComponent(..) ) +import Stack.Types.SourceMap ( PackageType (..), Target (..) ) + +-- | The value for a map from dependency name. This contains both the version +-- range and the type of dependency. +data DepValue = DepValue + { versionRange :: !VersionRange + , depType :: !DepType + } + deriving Show + +-- | Is this package being used as a library, or just as a build tool? If the +-- former, we need to ensure that a library actually exists. See +-- +data DepType + = AsLibrary !DepLibrary + | AsBuildTool + deriving (Eq, Show) + +data DepLibrary = DepLibrary + { main :: !Bool + , subLib :: Set StackUnqualCompName + } + deriving (Eq, Show) + +getDepSublib :: DepValue -> Maybe (Set StackUnqualCompName) +getDepSublib val = case val.depType of + AsLibrary libVal -> Just libVal.subLib + _ -> Nothing + +defaultDepLibrary :: DepLibrary +defaultDepLibrary = DepLibrary True mempty + +isDepTypeLibrary :: DepType -> Bool +isDepTypeLibrary AsLibrary{} = True +isDepTypeLibrary AsBuildTool = False + +cabalToStackDep :: Cabal.Dependency -> DepValue +cabalToStackDep (Cabal.Dependency _ verRange libNameSet) = + DepValue { versionRange = verRange, depType = AsLibrary depLibrary } + where + depLibrary = DepLibrary finalHasMain filteredItems + (finalHasMain, filteredItems) = foldr' iterator (False, mempty) libNameSet + iterator LMainLibName (_, newLibNameSet) = (True, newLibNameSet) + iterator (LSubLibName libName) (hasMain, newLibNameSet) = + (hasMain, Set.insert (fromCabalName libName) newLibNameSet) + +cabalExeToStackDep :: Cabal.ExeDependency -> DepValue +cabalExeToStackDep (Cabal.ExeDependency _ _name verRange) = + DepValue { versionRange = verRange, depType = AsBuildTool } + +cabalSetupDepsToStackDep :: Cabal.SetupBuildInfo -> Map PackageName DepValue +cabalSetupDepsToStackDep setupInfo = + foldr' inserter mempty (Cabal.setupDepends setupInfo) + where + inserter d@(Cabal.Dependency packageName _ _) = + Map.insert packageName (cabalToStackDep d) + +libraryDepFromVersionRange :: VersionRange -> DepValue +libraryDepFromVersionRange range = DepValue + { versionRange = range + , depType = AsLibrary defaultDepLibrary + } + +depValueToTarget :: DepValue -> Target +depValueToTarget dv = case dv.depType of + AsLibrary dlib -> TargetComps (completeSet dlib) + AsBuildTool -> TargetAll PTDependency + where + completeSet dlib = + (if dlib.main then Set.insert CLib else id) $ sublibSet dlib + sublibSet dlib = Set.mapMonotonic CSubLib dlib.subLib diff --git a/src/Stack/Types/DependencyTree.hs b/src/Stack/Types/DependencyTree.hs new file mode 100644 index 0000000000..8e79019433 --- /dev/null +++ b/src/Stack/Types/DependencyTree.hs @@ -0,0 +1,103 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.DependencyTree +License : BSD-3-Clause +-} + +module Stack.Types.DependencyTree + ( DependencyTree (..) + , DependencyGraph + , DotPayload (..) + , licenseText + , versionText + ) where + +import Data.Aeson ( ToJSON (..), Value, (.=), object ) +import qualified Data.Set as Set +import qualified Data.Map as Map +import qualified Data.Text as Text +import Distribution.License ( License (..), licenseFromSPDX ) +import qualified Distribution.SPDX.License as SPDX +import Distribution.Text ( display ) +import Stack.Prelude hiding ( Display (..), pkgName, loadPackage ) + +-- | Information about a package in the dependency graph, when available. +data DotPayload = DotPayload + { version :: Maybe Version + -- ^ The package version. + , license :: Maybe (Either SPDX.License License) + -- ^ The license the package was released under. + , location :: Maybe PackageLocation + -- ^ The location of the package. + } + deriving (Eq, Show) + +-- | Type synoynm representing dependency graphs. +type DependencyGraph = Map PackageName (Set PackageName, DotPayload) + +-- | Type representing dependency trees. +data DependencyTree = + DependencyTree (Set PackageName) DependencyGraph + +instance ToJSON DependencyTree where + toJSON (DependencyTree _ dependencyMap) = + toJSON $ foldToList dependencyToJSON dependencyMap + +foldToList :: (k -> a -> b) -> Map k a -> [b] +foldToList f = Map.foldrWithKey (\k a bs -> bs ++ [f k a]) [] + +dependencyToJSON :: PackageName -> (Set PackageName, DotPayload) -> Value +dependencyToJSON pkg (deps, payload) = + let fieldsAlwaysPresent = [ "name" .= packageNameString pkg + , "license" .= licenseText payload + , "version" .= versionText payload + , "dependencies" .= Set.map packageNameString deps + ] + loc = catMaybes + [("location" .=) . pkgLocToJSON <$> payload.location] + in object $ fieldsAlwaysPresent ++ loc + +pkgLocToJSON :: PackageLocation -> Value +pkgLocToJSON (PLMutable (ResolvedPath _ dir)) = object + [ "type" .= ("project package" :: Text) + , "url" .= ("file://" ++ toFilePath dir) + ] +pkgLocToJSON (PLImmutable (PLIHackage pkgid _ _)) = object + [ "type" .= ("hackage" :: Text) + , "url" .= ("https://hackage.haskell.org/package/" ++ display pkgid) + ] +pkgLocToJSON (PLImmutable (PLIArchive archive _)) = + let url = case archiveLocation archive of + ALUrl u -> u + ALFilePath (ResolvedPath _ path) -> + Text.pack $ "file://" ++ toFilePath path + in object + [ "type" .= ("archive" :: Text) + , "url" .= url + , "sha256" .= archiveHash archive + , "size" .= archiveSize archive + ] +pkgLocToJSON (PLImmutable (PLIRepo repo _)) = object + [ "type" .= case repoType repo of + RepoGit -> "git" :: Text + RepoHg -> "hg" :: Text + , "url" .= repoUrl repo + , "commit" .= repoCommit repo + , "subdir" .= repoSubdir repo + ] + +-- | For the given dot payload, yield a text representation of the name of the +-- licence. +licenseText :: DotPayload -> Text +licenseText payload = + maybe "" (Text.pack . display . either licenseFromSPDX id) + payload.license + +-- | For the given dot payload, yield a text representation of the version. +versionText :: DotPayload -> Text +versionText payload = + maybe "" (Text.pack . display) payload.version diff --git a/src/Stack/Types/Docker.hs b/src/Stack/Types/Docker.hs index 5c55f436af..86d9e1581c 100644 --- a/src/Stack/Types/Docker.hs +++ b/src/Stack/Types/Docker.hs @@ -1,139 +1,389 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} - --- | Docker types. - -module Stack.Types.Docker where - -import Stack.Prelude hiding (Display (..)) -import Pantry.Internal.AesonExtended -import Data.List (intercalate) +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.Docker +Description : Docker types. +License : BSD-3-Clause + +Docker types. +-} + +module Stack.Types.Docker + ( DockerException (..) + , DockerMonoidRepoOrImage (..) + , DockerOpts (..) + , DockerOptsMonoid (..) + , DockerStackExe (..) + , Mount (..) + , VersionRangeJSON (..) + , dockerAutoPullArgName + , dockerCmdName + , dockerContainerNameArgName + , dockerContainerPlatform + , dockerDetachArgName + , dockerEnableArgName + , dockerEntrypointArgName + , dockerEnvArgName + , dockerHelpOptName + , dockerImageArgName + , dockerMountArgName + , dockerMountModeArgName + , dockerNetworkArgName + , dockerPersistArgName + , dockerPullCmdName + , dockerRegistryLoginArgName + , dockerRegistryPasswordArgName + , dockerRegistryUsernameArgName + , dockerRepoArgName + , dockerRequireDockerVersionArgName + , dockerRunArgsArgName + , dockerSetUserArgName + , dockerStackExeArgName + , dockerStackExeDownloadVal + , dockerStackExeHostVal + , dockerStackExeImageVal + , parseDockerStackExe + , reExecArgName + ) where + +import Data.Aeson.Types ( FromJSON (..), withText ) +import Data.Aeson.WarningParser + ( WithJSONWarnings, (..:), (..:?), (..!=), withObjectWarnings + ) +import Data.List ( intercalate ) import qualified Data.Text as T -import Distribution.System (Platform(..), OS(..), Arch(..)) -import Distribution.Text (simpleParse, display) -import Distribution.Version (anyVersion) -import Generics.Deriving.Monoid (mappenddefault, memptydefault) -import Path -import Stack.Types.Version -import Text.Read (Read (..)) +import Distribution.System ( Arch (..), OS (..), Platform (..) ) +import Distribution.Text ( display, simpleParse ) +import Distribution.Version ( anyVersion ) +import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) +import Path ( parseAbsFile ) +import Stack.Constants ( stackProgName ) +import Stack.Prelude hiding ( Display (..) ) +import Stack.Types.Version + ( IntersectingVersionRange (..), VersionRange + , versionRangeText + ) +import Text.Read ( Read (..) ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Docker" module. +data DockerException + = DockerMustBeEnabledException + -- ^ Docker must be enabled to use the command. + | OnlyOnHostException + -- ^ Command must be run on host OS (not in a container). + | InspectFailedException String + -- ^ @docker inspect@ failed. + | NotPulledException String + -- ^ Image does not exist. + | InvalidImagesOutputException String + -- ^ Invalid output from @docker images@. + | InvalidPSOutputException String + -- ^ Invalid output from @docker ps@. + | InvalidInspectOutputException String + -- ^ Invalid output from @docker inspect@. + | PullFailedException String + -- ^ Could not pull a Docker image. + | DockerTooOldException Version Version + -- ^ Installed version of @docker@ below minimum version. + | DockerVersionProhibitedException [Version] Version + -- ^ Installed version of @docker@ is prohibited. + | BadDockerVersionException VersionRange Version + -- ^ Installed version of @docker@ is out of range specified in config file. + | InvalidVersionOutputException + -- ^ Invalid output from @docker --version@. + | HostStackTooOldException Version (Maybe Version) + -- ^ Version of @stack@ on host is too old for version in image. + | ContainerStackTooOldException Version Version + -- ^ Version of @stack@ in container/image is too old for version on host. + | CannotDetermineProjectRootException + -- ^ Can't determine the project root (where to put docker sandbox). + | DockerNotInstalledException + -- ^ @docker --version@ failed. + | UnsupportedStackExeHostPlatformException + -- ^ Using host stack-exe on unsupported platform. + | DockerStackExeParseException String + -- ^ @stack-exe@ option fails to parse. + deriving Show + +instance Exception DockerException where + displayException DockerMustBeEnabledException = + "Error: [S-3223]\n" + ++ "Docker must be enabled in your configuration file to use this \ + \command." + displayException OnlyOnHostException = + "Error: [S-9779]\n" + ++ "This command must be run on host OS (not in a Docker container)." + displayException (InspectFailedException image) = concat + [ "Error: [S-9105]\n" + , "'docker inspect' failed for image after pull: " + , image + , "." + ] + displayException (NotPulledException image) = concat + [ "Error: [S-6626]\n" + , "The Docker image referenced by your configuration file" + , " has not\nbeen downloaded:\n " + , image + , "\n\nRun '" + , unwords [stackProgName, dockerCmdName, dockerPullCmdName] + , "' to download it, then try again." + ] + displayException (InvalidImagesOutputException l) = concat + [ "Error: [S-5841]\n" + , "Invalid 'docker images' output line: '" + , l + , "'." + ] + displayException (InvalidPSOutputException l) = concat + [ "Error: [S-9608]\n" + , "Invalid 'docker ps' output line: '" + , l + ,"'." + ] + displayException (InvalidInspectOutputException msg) = concat + [ "Error: [S-2240]\n" + , "Invalid 'docker inspect' output: " + , msg + , "." + ] + displayException (PullFailedException image) = concat + [ "Error: [S-6092]\n" + , "Could not pull Docker image:\n " + , image + , "\nThere may not be an image on the registry for your snapshot's LTS \ + \version in\n" + , "your configuration file." + ] + displayException (DockerTooOldException minVersion haveVersion) = concat + [ "Error: [S-6281]\n" + , "Minimum docker version '" + , versionString minVersion + , "' is required by " + , stackProgName + , " (you have '" + , versionString haveVersion + , "')." + ] + displayException (DockerVersionProhibitedException prohibitedVersions haveVersion) = concat + [ "Error: [S-8252]\n" + , "These Docker versions are incompatible with " + , stackProgName + , " (you have '" + , versionString haveVersion + , "'): " + , intercalate ", " (map versionString prohibitedVersions) + , "." + ] + displayException (BadDockerVersionException requiredRange haveVersion) = concat + [ "Error: [S-6170]\n" + , "The version of 'docker' you are using (" + , show haveVersion + , ") is outside the required\n" + , "version range specified in stack.yaml (" + , T.unpack (versionRangeText requiredRange) + , ")." + ] + displayException InvalidVersionOutputException = + "Error: [S-5827]\n" + ++ "Cannot get Docker version (invalid 'docker --version' output)." + displayException (HostStackTooOldException minVersion (Just hostVersion)) = concat + [ "Error: [S-7112]\n" + , "The host's version of '" + , stackProgName + , "' is too old for this Docker image.\nVersion " + , versionString minVersion + , " is required; you have " + , versionString hostVersion + , "." + ] + displayException (HostStackTooOldException minVersion Nothing) = concat + [ "Error: [S-7112]\n" + , "The host's version of '" + , stackProgName + , "' is too old.\nVersion " + , versionString minVersion + , " is required." + ] + displayException (ContainerStackTooOldException requiredVersion containerVersion) = concat + [ "Error: [S-5832]\n" + , "The Docker container's version of '" + , stackProgName + , "' is too old.\nVersion " + , versionString requiredVersion + , " is required; the container has " + , versionString containerVersion + , "." + ] + displayException CannotDetermineProjectRootException = + "Error: [S-4078]\n" + ++ "Cannot determine project root directory for Docker sandbox." + displayException DockerNotInstalledException = + "Error: [S-7058]\n" + ++ "Cannot find 'docker' in PATH. Is Docker installed?" + displayException UnsupportedStackExeHostPlatformException = concat + [ "Error: [S-6894]\n" + , "Using host's " + , stackProgName + , " executable in Docker container is only supported on " + , display dockerContainerPlatform + , " platform." + ] + displayException (DockerStackExeParseException s) = concat + [ "Error: [S-1512]\n" + , "Failed to parse " + , show s + , ". Expected " + , show dockerStackExeDownloadVal + , ", " + , show dockerStackExeHostVal + , ", " + , show dockerStackExeImageVal + , " or absolute path to executable." + ] -- | Docker configuration. data DockerOpts = DockerOpts - {dockerEnable :: !Bool - -- ^ Is using Docker enabled? - ,dockerImage :: !(Either SomeException String) - -- ^ Exact Docker image tag or ID. Overrides docker-repo-*/tag. - ,dockerRegistryLogin :: !Bool - -- ^ Does registry require login for pulls? - ,dockerRegistryUsername :: !(Maybe String) - -- ^ Optional username for Docker registry. - ,dockerRegistryPassword :: !(Maybe String) - -- ^ Optional password for Docker registry. - ,dockerAutoPull :: !Bool - -- ^ Automatically pull new images. - ,dockerDetach :: !Bool - -- ^ Whether to run a detached container - ,dockerPersist :: !Bool - -- ^ Create a persistent container (don't remove it when finished). Implied by - -- `dockerDetach`. - ,dockerContainerName :: !(Maybe String) - -- ^ Container name to use, only makes sense from command-line with `dockerPersist` - -- or `dockerDetach`. - ,dockerNetwork :: !(Maybe String) - -- ^ The network docker uses. - ,dockerRunArgs :: ![String] - -- ^ Arguments to pass directly to @docker run@. - ,dockerMount :: ![Mount] - -- ^ Volumes to mount in the container. - ,dockerMountMode :: !(Maybe String) - -- ^ Volume mount mode - ,dockerEnv :: ![String] - -- ^ Environment variables to set in the container. - ,dockerStackExe :: !(Maybe DockerStackExe) - -- ^ Location of container-compatible stack executable - ,dockerSetUser :: !(Maybe Bool) - -- ^ Set in-container user to match host's - ,dockerRequireDockerVersion :: !VersionRange - -- ^ Require a version of Docker within this range. + { enable :: !Bool + -- ^ Is using Docker enabled? + , image :: !(Either SomeException String) + -- ^ Exact Docker image tag or ID. Overrides docker-repo-*/tag. + , registryLogin :: !Bool + -- ^ Does registry require login for pulls? + , registryUsername :: !(Maybe String) + -- ^ Optional username for Docker registry. + , registryPassword :: !(Maybe String) + -- ^ Optional password for Docker registry. + , autoPull :: !Bool + -- ^ Automatically pull new images. + , detach :: !Bool + -- ^ Whether to run a detached container + , persist :: !Bool + -- ^ Create a persistent container (don't remove it when finished). Implied + -- by 'DockerOpts.detach'. + , containerName :: !(Maybe String) + -- ^ Container name to use, only makes sense from command-line with + -- 'DockerOpts.persist' or 'DockerOpts.detach'. + , network :: !(Maybe String) + -- ^ The network docker uses. + , runArgs :: ![String] + -- ^ Arguments to pass directly to @docker run@. + , mount :: ![Mount] + -- ^ Volumes to mount in the container. + , mountMode :: !(Maybe String) + -- ^ Volume mount mode + , env :: ![String] + -- ^ Environment variables to set in the container. + , stackExe :: !(Maybe DockerStackExe) + -- ^ Location of container-compatible Stack executable + , setUser :: !(Maybe Bool) + -- ^ Set in-container user to match host's + , requireDockerVersion :: !VersionRange + -- ^ Require a version of Docker within this range. } - deriving (Show) + deriving Show --- | An uninterpreted representation of docker options. --- Configurations may be "cascaded" using mappend (left-biased). +-- | An uninterpreted representation of docker options. Configurations may be +-- "cascaded" using mappend (left-biased). data DockerOptsMonoid = DockerOptsMonoid - {dockerMonoidDefaultEnable :: !Any - -- ^ Should Docker be defaulted to enabled (does @docker:@ section exist in the config)? - ,dockerMonoidEnable :: !(First Bool) + { defaultEnable :: !Any + -- ^ Should Docker be defaulted to enabled (does @docker:@ section exist in + -- the config)? + , enable :: !(First Bool) -- ^ Is using Docker enabled? - ,dockerMonoidRepoOrImage :: !(First DockerMonoidRepoOrImage) - -- ^ Docker repository name (e.g. @fpco/stack-build@ or @fpco/stack-full:lts-2.8@) - ,dockerMonoidRegistryLogin :: !(First Bool) + , repoOrImage :: !(First DockerMonoidRepoOrImage) + -- ^ Docker repository name (e.g. @fpco/stack-build@ or + -- @fpco/stack-full:lts-2.8@) + , registryLogin :: !(First Bool) -- ^ Does registry require login for pulls? - ,dockerMonoidRegistryUsername :: !(First String) + , registryUsername :: !(First String) -- ^ Optional username for Docker registry. - ,dockerMonoidRegistryPassword :: !(First String) + , registryPassword :: !(First String) -- ^ Optional password for Docker registry. - ,dockerMonoidAutoPull :: !FirstTrue + , autoPull :: !FirstTrue -- ^ Automatically pull new images. - ,dockerMonoidDetach :: !FirstFalse + , detach :: !FirstFalse -- ^ Whether to run a detached container - ,dockerMonoidPersist :: !FirstFalse - -- ^ Create a persistent container (don't remove it when finished). Implied by - -- `dockerDetach`. - ,dockerMonoidContainerName :: !(First String) - -- ^ Container name to use, only makes sense from command-line with `dockerPersist` - -- or `dockerDetach`. - ,dockerMonoidNetwork :: !(First String) - -- ^ See: 'dockerNetwork' - ,dockerMonoidRunArgs :: ![String] + , persist :: !FirstFalse + -- ^ Create a persistent container (don't remove it when finished). Implied + -- by 'DockerOpts.detach'. + , containerName :: !(First String) + -- ^ Container name to use, only makes sense from command-line with + -- 'DockerOpts.persist' or 'DockerOpts.detach'. + , network :: !(First String) + -- ^ See: 'DockerOpts.network' + , runArgs :: ![String] -- ^ Arguments to pass directly to @docker run@ - ,dockerMonoidMount :: ![Mount] + , mount :: ![Mount] -- ^ Volumes to mount in the container - ,dockerMonoidMountMode :: !(First String) + , mountMode :: !(First String) -- ^ Volume mount mode - ,dockerMonoidEnv :: ![String] + , env :: ![String] -- ^ Environment variables to set in the container - ,dockerMonoidStackExe :: !(First DockerStackExe) - -- ^ Location of container-compatible stack executable - ,dockerMonoidSetUser :: !(First Bool) - -- ^ Set in-container user to match host's - ,dockerMonoidRequireDockerVersion :: !IntersectingVersionRange - -- ^ See: 'dockerRequireDockerVersion' + , stackExe :: !(First DockerStackExe) + -- ^ Location of container-compatible Stack executable + , setUser :: !(First Bool) + -- ^ Set in-container user to match host's + , requireDockerVersion :: !IntersectingVersionRange + -- ^ See: 'DockerOpts.requireDockerVersion' } deriving (Show, Generic) -- | Decode uninterpreted docker options from JSON/YAML. instance FromJSON (WithJSONWarnings DockerOptsMonoid) where - parseJSON = withObjectWarnings "DockerOptsMonoid" - (\o -> do let dockerMonoidDefaultEnable = Any True - dockerMonoidEnable <- First <$> o ..:? dockerEnableArgName - dockerMonoidRepoOrImage <- First <$> - ((Just . DockerMonoidImage <$> o ..: dockerImageArgName) <|> - (Just . DockerMonoidRepo <$> o ..: dockerRepoArgName) <|> - pure Nothing) - dockerMonoidRegistryLogin <- First <$> o ..:? dockerRegistryLoginArgName - dockerMonoidRegistryUsername <- First <$> o ..:? dockerRegistryUsernameArgName - dockerMonoidRegistryPassword <- First <$> o ..:? dockerRegistryPasswordArgName - dockerMonoidAutoPull <- FirstTrue <$> o ..:? dockerAutoPullArgName - dockerMonoidDetach <- FirstFalse <$> o ..:? dockerDetachArgName - dockerMonoidPersist <- FirstFalse <$> o ..:? dockerPersistArgName - dockerMonoidContainerName <- First <$> o ..:? dockerContainerNameArgName - dockerMonoidNetwork <- First <$> o ..:? dockerNetworkArgName - dockerMonoidRunArgs <- o ..:? dockerRunArgsArgName ..!= [] - dockerMonoidMount <- o ..:? dockerMountArgName ..!= [] - dockerMonoidMountMode <- First <$> o ..:? dockerMountModeArgName - dockerMonoidEnv <- o ..:? dockerEnvArgName ..!= [] - dockerMonoidStackExe <- First <$> o ..:? dockerStackExeArgName - dockerMonoidSetUser <- First <$> o ..:? dockerSetUserArgName - dockerMonoidRequireDockerVersion - <- IntersectingVersionRange . unVersionRangeJSON <$> ( - o ..:? dockerRequireDockerVersionArgName - ..!= VersionRangeJSON anyVersion) - return DockerOptsMonoid{..}) + parseJSON = withObjectWarnings "DockerOptsMonoid" $ \o -> do + let defaultEnable = Any True + enable <- First <$> o ..:? dockerEnableArgName + repoOrImage <- First <$> + ( (Just . DockerMonoidImage <$> o ..: dockerImageArgName) + <|> (Just . DockerMonoidRepo <$> o ..: dockerRepoArgName) + <|> pure Nothing + ) + registryLogin <- First <$> o ..:? dockerRegistryLoginArgName + registryUsername <- + First <$> o ..:? dockerRegistryUsernameArgName + registryPassword <- + First <$> o ..:? dockerRegistryPasswordArgName + autoPull <- FirstTrue <$> o ..:? dockerAutoPullArgName + detach <- FirstFalse <$> o ..:? dockerDetachArgName + persist <- FirstFalse <$> o ..:? dockerPersistArgName + containerName <- First <$> o ..:? dockerContainerNameArgName + network <- First <$> o ..:? dockerNetworkArgName + runArgs <- o ..:? dockerRunArgsArgName ..!= [] + mount <- o ..:? dockerMountArgName ..!= [] + mountMode <- First <$> o ..:? dockerMountModeArgName + env <- o ..:? dockerEnvArgName ..!= [] + stackExe <- First <$> o ..:? dockerStackExeArgName + setUser <- First <$> o ..:? dockerSetUserArgName + requireDockerVersion <- + IntersectingVersionRange . (.versionRangeJSON) <$> + ( o ..:? dockerRequireDockerVersionArgName + ..!= VersionRangeJSON anyVersion + ) + pure DockerOptsMonoid + { defaultEnable + , enable + , repoOrImage + , registryLogin + , registryUsername + , registryPassword + , autoPull + , detach + , persist + , containerName + , network + , runArgs + , mount + , mountMode + , env + , stackExe + , setUser + , requireDockerVersion + } -- | Left-biased combine Docker options instance Semigroup DockerOptsMonoid where @@ -144,30 +394,30 @@ instance Monoid DockerOptsMonoid where mempty = memptydefault mappend = (<>) --- | Where to get the `stack` executable to run in Docker containers +-- | Where to get the \'stack\' executable to run in Docker containers data DockerStackExe - = DockerStackExeDownload -- ^ Download from official bindist - | DockerStackExeHost -- ^ Host's `stack` (linux-x86_64 only) - | DockerStackExeImage -- ^ Docker image's `stack` (versions must match) - | DockerStackExePath (Path Abs File) -- ^ Executable at given path - deriving (Show) + = DockerStackExeDownload -- ^ Download from official bindist + | DockerStackExeHost -- ^ Host's \'stack\' (linux-x86_64 only) + | DockerStackExeImage -- ^ Docker image's \'stack\' (versions must match) + | DockerStackExePath (Path Abs File) -- ^ Executable at given path + deriving Show instance FromJSON DockerStackExe where - parseJSON a = do - s <- parseJSON a - case parseDockerStackExe s of - Right dse -> return dse - Left e -> fail (show e) + parseJSON a = do + s <- parseJSON a + case parseDockerStackExe s of + Right dse -> pure dse + Left e -> fail (displayException e) -- | Parse 'DockerStackExe'. parseDockerStackExe :: (MonadThrow m) => String -> m DockerStackExe parseDockerStackExe t - | t == dockerStackExeDownloadVal = return DockerStackExeDownload - | t == dockerStackExeHostVal = return DockerStackExeHost - | t == dockerStackExeImageVal = return DockerStackExeImage - | otherwise = case parseAbsFile t of - Just p -> return (DockerStackExePath p) - Nothing -> throwM (DockerStackExeParseException t) + | t == dockerStackExeDownloadVal = pure DockerStackExeDownload + | t == dockerStackExeHostVal = pure DockerStackExeHost + | t == dockerStackExeImageVal = pure DockerStackExeImage + | otherwise = case parseAbsFile t of + Just p -> pure (DockerStackExePath p) + Nothing -> throwM (DockerStackExeParseException t) -- | Docker volume mount. data Mount = Mount String String @@ -176,15 +426,13 @@ data Mount = Mount String String instance Read Mount where readsPrec _ s = case break (== ':') s of - (a,':':b) -> [(Mount a b,"")] - (a,[]) -> [(Mount a a,"")] + (a, ':':b) -> [(Mount a b, "")] + (a, []) -> [(Mount a a, "")] _ -> fail "Invalid value for Docker mount (expect '/host/path:/container/path')" -- | Show instance. instance Show Mount where - show (Mount a b) = if a == b - then a - else concat [a,":",b] + show (Mount a b) = if a == b then a else concat [a, ":", b] -- | For YAML. instance FromJSON Mount where @@ -192,158 +440,24 @@ instance FromJSON Mount where s <- parseJSON v case readMaybe s of Nothing -> fail $ "Mount read failed: " ++ s - Just x -> return x + Just x -> pure x -- | Options for Docker repository or image. data DockerMonoidRepoOrImage = DockerMonoidRepo String | DockerMonoidImage String - deriving (Show) + deriving Show -- | Newtype for non-orphan FromJSON instance. -newtype VersionRangeJSON = VersionRangeJSON { unVersionRangeJSON :: VersionRange } +newtype VersionRangeJSON = + VersionRangeJSON { versionRangeJSON :: VersionRange } -- | Parse VersionRange. instance FromJSON VersionRangeJSON where parseJSON = withText "VersionRange" - (\s -> maybe (fail ("Invalid cabal-style VersionRange: " ++ T.unpack s)) - (return . VersionRangeJSON) - (Distribution.Text.simpleParse (T.unpack s))) - --- | Exceptions thrown by Stack.Docker. -data StackDockerException - = DockerMustBeEnabledException - -- ^ Docker must be enabled to use the command. - | OnlyOnHostException - -- ^ Command must be run on host OS (not in a container). - | InspectFailedException String - -- ^ @docker inspect@ failed. - | NotPulledException String - -- ^ Image does not exist. - | InvalidImagesOutputException String - -- ^ Invalid output from @docker images@. - | InvalidPSOutputException String - -- ^ Invalid output from @docker ps@. - | InvalidInspectOutputException String - -- ^ Invalid output from @docker inspect@. - | PullFailedException String - -- ^ Could not pull a Docker image. - | DockerTooOldException Version Version - -- ^ Installed version of @docker@ below minimum version. - | DockerVersionProhibitedException [Version] Version - -- ^ Installed version of @docker@ is prohibited. - | BadDockerVersionException VersionRange Version - -- ^ Installed version of @docker@ is out of range specified in config file. - | InvalidVersionOutputException - -- ^ Invalid output from @docker --version@. - | HostStackTooOldException Version (Maybe Version) - -- ^ Version of @stack@ on host is too old for version in image. - | ContainerStackTooOldException Version Version - -- ^ Version of @stack@ in container/image is too old for version on host. - | CannotDetermineProjectRootException - -- ^ Can't determine the project root (where to put docker sandbox). - | DockerNotInstalledException - -- ^ @docker --version@ failed. - | UnsupportedStackExeHostPlatformException - -- ^ Using host stack-exe on unsupported platform. - | DockerStackExeParseException String - -- ^ @stack-exe@ option fails to parse. - deriving (Typeable) -instance Exception StackDockerException - -instance Show StackDockerException where - show DockerMustBeEnabledException = - "Docker must be enabled in your configuration file to use this command." - show OnlyOnHostException = - "This command must be run on host OS (not in a Docker container)." - show (InspectFailedException image) = - concat ["'docker inspect' failed for image after pull: ",image,"."] - show (NotPulledException image) = - concat ["The Docker image referenced by your configuration file" - ," has not\nbeen downloaded:\n " - ,image - ,"\n\nRun '" - ,unwords [stackProgName, dockerCmdName, dockerPullCmdName] - ,"' to download it, then try again."] - show (InvalidImagesOutputException line) = - concat ["Invalid 'docker images' output line: '",line,"'."] - show (InvalidPSOutputException line) = - concat ["Invalid 'docker ps' output line: '",line,"'."] - show (InvalidInspectOutputException msg) = - concat ["Invalid 'docker inspect' output: ",msg,"."] - show (PullFailedException image) = - concat ["Could not pull Docker image:\n " - ,image - ,"\nThere may not be an image on the registry for your resolver's LTS version in\n" - ,"your configuration file."] - show (DockerTooOldException minVersion haveVersion) = - concat ["Minimum docker version '" - ,versionString minVersion - ,"' is required by " - ,stackProgName - ," (you have '" - ,versionString haveVersion - ,"')."] - show (DockerVersionProhibitedException prohibitedVersions haveVersion) = - concat ["These Docker versions are incompatible with " - ,stackProgName - ," (you have '" - ,versionString haveVersion - ,"'): " - ,intercalate ", " (map versionString prohibitedVersions) - ,"."] - show (BadDockerVersionException requiredRange haveVersion) = - concat ["The version of 'docker' you are using (" - ,show haveVersion - ,") is outside the required\n" - ,"version range specified in stack.yaml (" - ,T.unpack (versionRangeText requiredRange) - ,")."] - show InvalidVersionOutputException = - "Cannot get Docker version (invalid 'docker --version' output)." - show (HostStackTooOldException minVersion (Just hostVersion)) = - concat ["The host's version of '" - ,stackProgName - ,"' is too old for this Docker image.\nVersion " - ,versionString minVersion - ," is required; you have " - ,versionString hostVersion - ,"."] - show (HostStackTooOldException minVersion Nothing) = - concat ["The host's version of '" - ,stackProgName - ,"' is too old.\nVersion " - ,versionString minVersion - ," is required."] - show (ContainerStackTooOldException requiredVersion containerVersion) = - concat ["The Docker container's version of '" - ,stackProgName - ,"' is too old.\nVersion " - ,versionString requiredVersion - ," is required; the container has " - ,versionString containerVersion - ,"."] - show CannotDetermineProjectRootException = - "Cannot determine project root directory for Docker sandbox." - show DockerNotInstalledException = - "Cannot find 'docker' in PATH. Is Docker installed?" - show UnsupportedStackExeHostPlatformException = concat - [ "Using host's " - , stackProgName - , " executable in Docker container is only supported on " - , display dockerContainerPlatform - , " platform" ] - show (DockerStackExeParseException s) = concat - [ "Failed to parse " - , show s - , ". Expected " - , show dockerStackExeDownloadVal - , ", " - , show dockerStackExeHostVal - , ", " - , show dockerStackExeImageVal - , " or absolute path to executable." - ] + (\s -> maybe (fail ("Invalid cabal-style VersionRange: " ++ T.unpack s)) + (pure . VersionRangeJSON) + (Distribution.Text.simpleParse (T.unpack s))) -- | Docker enable argument name. dockerEnableArgName :: Text @@ -405,7 +519,7 @@ dockerNetworkArgName = "network" dockerPersistArgName :: Text dockerPersistArgName = "persist" --- | Docker stack executable argument name. +-- | Docker Stack executable argument name. dockerStackExeArgName :: Text dockerStackExeArgName = "stack-exe" @@ -437,6 +551,7 @@ dockerEntrypointArgName = "internal-docker-entrypoint" dockerCmdName :: String dockerCmdName = "docker" +-- | Command-line option to show only @--docker-*@ options. dockerHelpOptName :: String dockerHelpOptName = dockerCmdName ++ "-help" diff --git a/src/Stack/Types/DockerEntrypoint.hs b/src/Stack/Types/DockerEntrypoint.hs new file mode 100644 index 0000000000..4d5ad69f1c --- /dev/null +++ b/src/Stack/Types/DockerEntrypoint.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.DockerEntrypoint +License : BSD-3-Clause +-} + +module Stack.Types.DockerEntrypoint + ( DockerEntrypoint (..) + , DockerUser (..) + ) where + +import Stack.Prelude +import System.PosixCompat.Types ( FileMode, GroupID, UserID ) + +-- | Data passed into Docker container for the Docker entrypoint's use +newtype DockerEntrypoint = DockerEntrypoint + { user :: Maybe DockerUser + -- ^ UID/GID/etc of host user, if we wish to perform UID/GID switch in + -- container + } + deriving (Read, Show) + +-- | Docker host user info +data DockerUser = DockerUser + { uid :: UserID -- ^ uid + , gid :: GroupID -- ^ gid + , groups :: [GroupID] -- ^ Supplemental groups + , umask :: FileMode -- ^ File creation mask } + } + deriving (Read, Show) diff --git a/src/Stack/Types/DotConfig.hs b/src/Stack/Types/DotConfig.hs new file mode 100644 index 0000000000..c624d8a9bb --- /dev/null +++ b/src/Stack/Types/DotConfig.hs @@ -0,0 +1,71 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.DotConfig +License : BSD-3-Clause +-} + +module Stack.Types.DotConfig + ( DotConfig (..) + ) where + +import RIO.Process ( HasProcessContext (..) ) +import Stack.Prelude hiding ( Display (..), pkgName, loadPackage ) +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.Config ( HasConfig (..) ) +import Stack.Types.DumpPackage ( DumpPackage (..) ) +import Stack.Types.EnvConfig ( HasSourceMap (..) ) +import Stack.Types.GHCVariant ( HasGHCVariant (..) ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Runner ( HasRunner (..) ) +import Stack.Types.SourceMap ( SourceMap (..) ) + +-- | Type representing configurations for the creation of a dependency graph. +data DotConfig = DotConfig + { buildConfig :: !BuildConfig + , sourceMap :: !SourceMap + , globalDump :: ![DumpPackage] + } + +instance HasLogFunc DotConfig where + logFuncL = runnerL . logFuncL + +instance HasPantryConfig DotConfig where + pantryConfigL = configL . pantryConfigL + +instance HasTerm DotConfig where + useColorL = runnerL . useColorL + termWidthL = runnerL . termWidthL + +instance HasStylesUpdate DotConfig where + stylesUpdateL = runnerL . stylesUpdateL + +instance HasGHCVariant DotConfig where + ghcVariantL = configL . ghcVariantL + {-# INLINE ghcVariantL #-} + +instance HasPlatform DotConfig where + platformL = configL . platformL + {-# INLINE platformL #-} + platformVariantL = configL . platformVariantL + {-# INLINE platformVariantL #-} + +instance HasRunner DotConfig where + runnerL = configL . runnerL + +instance HasProcessContext DotConfig where + processContextL = runnerL . processContextL + +instance HasConfig DotConfig where + configL = buildConfigL . lens (.config) (\x y -> x { config = y }) + {-# INLINE configL #-} + +instance HasBuildConfig DotConfig where + buildConfigL = lens (.buildConfig) (\x y -> x { buildConfig = y }) + +instance HasSourceMap DotConfig where + sourceMapL = lens (.sourceMap) (\x y -> x { sourceMap = y }) diff --git a/src/Stack/Types/DotOpts.hs b/src/Stack/Types/DotOpts.hs new file mode 100644 index 0000000000..dd8f4c9ba0 --- /dev/null +++ b/src/Stack/Types/DotOpts.hs @@ -0,0 +1,43 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.DotOpts +License : BSD-3-Clause + +Module exporting the t`DotOpts` type used by Stack's @dot@ and @ls dependencies@ +commands. +-} + +module Stack.Types.DotOpts + ( DotOpts (..) + ) where + +import Stack.Prelude +import Stack.Types.BuildOptsCLI ( ApplyCLIFlag ) + +-- | Options record for @stack dot@ and @stack ls dependencies@ +data DotOpts = DotOpts + { includeExternal :: !Bool + -- ^ Include external dependencies + , includeBase :: !Bool + -- ^ Include dependencies on base + , dependencyDepth :: !(Maybe Int) + -- ^ Limit the depth of dependency resolution to (Just n) or continue until + -- fixpoint + , prune :: !(Set PackageName) + -- ^ Package names to prune from the graph + , reach :: !(Set PackageName) + -- ^ If not empty, packages in the pruned graph must be able to reach one or + -- more of these packages + , dotTargets :: [Text] + -- ^ Stack TARGETs to trace dependencies for + , flags :: !(Map ApplyCLIFlag (Map FlagName Bool)) + -- ^ Flags to apply when calculating dependencies + , testTargets :: Bool + -- ^ Like the "--test" flag for build, affects the meaning of 'dotTargets'. + , benchTargets :: Bool + -- ^ Like the "--bench" flag for build, affects the meaning of 'dotTargets'. + , globalHints :: Bool + -- ^ Use global hints instead of relying on an actual GHC installation. + } diff --git a/src/Stack/Types/DownloadInfo.hs b/src/Stack/Types/DownloadInfo.hs new file mode 100644 index 0000000000..7e1ef35f35 --- /dev/null +++ b/src/Stack/Types/DownloadInfo.hs @@ -0,0 +1,50 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.DownloadInfo +License : BSD-3-Clause +-} + +module Stack.Types.DownloadInfo + ( DownloadInfo (..) + , parseDownloadInfoFromObject + ) where + +import Data.Aeson.Types ( FromJSON (..), Object ) +import Data.Aeson.WarningParser + ( WarningParser, WithJSONWarnings (..), (..:), (..:?) + , withObjectWarnings + ) +import Stack.Prelude + +-- | Information for a file to download. +data DownloadInfo = DownloadInfo + { url :: Text + -- ^ URL or absolute file path + , contentLength :: Maybe Int + , sha1 :: Maybe ByteString + , sha256 :: Maybe ByteString + } + deriving Show + +instance FromJSON (WithJSONWarnings DownloadInfo) where + parseJSON = withObjectWarnings "DownloadInfo" parseDownloadInfoFromObject + +-- | Parse JSON in existing object for t'DownloadInfo' +parseDownloadInfoFromObject :: Object -> WarningParser DownloadInfo +parseDownloadInfoFromObject o = do + url <- o ..: "url" + contentLength <- o ..:? "content-length" + sha1TextMay <- o ..:? "sha1" + sha256TextMay <- o ..:? "sha256" + let sha1 = fmap encodeUtf8 sha1TextMay + sha256 = fmap encodeUtf8 sha256TextMay + pure + DownloadInfo + { url + , contentLength + , sha1 + , sha256 + } diff --git a/src/Stack/Types/DumpLogs.hs b/src/Stack/Types/DumpLogs.hs new file mode 100644 index 0000000000..ea35f30d44 --- /dev/null +++ b/src/Stack/Types/DumpLogs.hs @@ -0,0 +1,35 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.DumpLogs +License : BSD-3-Clause +-} + +module Stack.Types.DumpLogs + ( DumpLogs (..) + ) where + +import Data.Aeson.Types ( FromJSON (..), Value (..), withText ) +import Stack.Prelude + +-- | Which build log files to dump +data DumpLogs + = DumpNoLogs -- ^ don't dump any logfiles + | DumpWarningLogs -- ^ dump logfiles containing warnings + | DumpAllLogs -- ^ dump all logfiles + deriving (Bounded, Enum, Eq, Ord, Read, Show) + +instance FromJSON DumpLogs where + parseJSON (Bool True) = pure DumpAllLogs + parseJSON (Bool False) = pure DumpNoLogs + parseJSON v = + withText + "DumpLogs" + (\t -> + if | t == "none" -> pure DumpNoLogs + | t == "warning" -> pure DumpWarningLogs + | t == "all" -> pure DumpAllLogs + | otherwise -> fail ("Invalid DumpLogs: " ++ show t)) + v diff --git a/src/Stack/Types/DumpPackage.hs b/src/Stack/Types/DumpPackage.hs new file mode 100644 index 0000000000..24da19052e --- /dev/null +++ b/src/Stack/Types/DumpPackage.hs @@ -0,0 +1,67 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.DumpPackage +License : BSD-3-Clause +-} + +module Stack.Types.DumpPackage + ( DumpPackage (..) + , SublibDump (..) + , DumpedGlobalPackage + , sublibParentPkgId + ) where + +import qualified Distribution.License as C +import Distribution.ModuleName ( ModuleName ) +import Stack.Prelude +import Stack.Types.Component ( StackUnqualCompName ) +import Stack.Types.GhcPkgId ( GhcPkgId ) + +-- | Type representing dump information for a single installed package, as +-- output by the @ghc-pkg describe@ command. +data DumpPackage = DumpPackage + { ghcPkgId :: !GhcPkgId + -- ^ The @id@ field. + , packageIdent :: !PackageIdentifier + -- ^ The @name@ and @version@ fields. The @name@ field is the munged package + -- name. If the package is not for a sub-library, its munged name is its + -- name. + , sublib :: !(Maybe SublibDump) + -- ^ The sub-library information, if it is a sub-library. + , license :: !(Maybe C.License) + , libDirs :: ![FilePath] + -- ^ The @library-dirs@ field. + , libraries :: ![Text] + -- ^ The @hs-libraries@ field. + , hasExposedModules :: !Bool + , exposedModules :: !(Set ModuleName) + , depends :: ![GhcPkgId] + -- ^ The @depends@ field (packages on which this package depends). + , haddockInterfaces :: ![FilePath] + , haddockHtml :: !(Maybe FilePath) + , isExposed :: !Bool + } + deriving (Eq, Read, Show) + +-- | An installed package for a sub-library of a Cabal package has additional +-- fields. +data SublibDump = SublibDump + { packageName :: PackageName + -- ^ The @package-name@ field. + , libraryName :: StackUnqualCompName + -- ^ The @lib-name@ field. + } + deriving (Eq, Read, Show) + +-- | Type synonym representing dump information for a single installed package +-- in the global package database. +type DumpedGlobalPackage = DumpPackage + +-- | If the given t'DumpPackage' is for a sub-library of a Cabal package, yields +-- the package identifier of the Cabal package. +sublibParentPkgId :: DumpPackage -> Maybe PackageIdentifier +sublibParentPkgId dp = dp.sublib <&> \subLibDump -> + PackageIdentifier subLibDump.packageName dp.packageIdent.pkgVersion diff --git a/src/Stack/Types/EnvConfig.hs b/src/Stack/Types/EnvConfig.hs new file mode 100644 index 0000000000..70ed68da86 --- /dev/null +++ b/src/Stack/Types/EnvConfig.hs @@ -0,0 +1,324 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.EnvConfig +License : BSD-3-Clause +-} + +module Stack.Types.EnvConfig + ( EnvConfig (..) + , HasEnvConfig (..) + , HasSourceMap (..) + , IsPath (..) + , actualCompilerVersionL + , appropriateGhcColorFlag + , bindirCompilerTools + , compilerVersionDir + , extraBinDirs + , hoogleDatabasePath + , hoogleRoot + , hpcReportDir + , installationRootDeps + , installationRootLocal + , packageDatabaseDeps + , packageDatabaseExtra + , packageDatabaseLocal + , platformGhcRelDir + , platformGhcVerOnlyRelDir + , platformSnapAndCompilerRel + , shouldForceGhcColorFlag + , snapshotsDir + , useShaPathOnWindows + , shaPathForBytes + ) where + +import Crypto.Hash ( SHA1 (..), hashWith ) +import qualified Data.ByteArray.Encoding as Mem ( Base(Base16), convertToBase ) +import qualified Data.ByteString.Char8 as S8 +import qualified Data.Text as T +import qualified Distribution.Text ( display ) +import Path + ( (), parseAbsDir, parseAbsFile, parseRelDir + , parseRelFile + ) +import RIO.Process ( HasProcessContext (..) ) +import Stack.Constants + ( bindirSuffix, ghcColorForceFlag, osIsWindows + , relDirCompilerTools, relDirHoogle, relDirHpc, relDirInstall + , relDirPkgdb, relDirSnapshots, relFileDatabaseHoo + ) +import Stack.Prelude +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..), getWorkDir ) +import Stack.Types.BuildOptsCLI ( BuildOptsCLI ) +import Stack.Types.Compiler + ( ActualCompiler (..), compilerVersionString ) +import Stack.Types.CompilerBuild ( compilerBuildSuffix ) +import Stack.Types.CompilerPaths + ( CompilerPaths (..), HasCompiler (..) ) +import Stack.Types.Config ( HasConfig (..), stackRootL ) +import Stack.Types.FileDigestCache ( FileDigestCache ) +import Stack.Types.GHCVariant ( HasGHCVariant (..), ghcVariantSuffix ) +import Stack.Types.Platform + ( HasPlatform (..), platformVariantSuffix ) +import Stack.Types.Runner ( HasRunner (..) ) +import Stack.Types.SourceMap + ( SourceMap (..), SourceMapHash, smRelDir ) + +-- | Configuration after the environment has been setup. +data EnvConfig = EnvConfig + { buildConfig :: !BuildConfig + , buildOptsCLI :: !BuildOptsCLI + , fileDigestCache :: !FileDigestCache + , sourceMap :: !SourceMap + , sourceMapHash :: !SourceMapHash + , compilerPaths :: !CompilerPaths + } + +instance HasConfig EnvConfig where + configL = buildConfigL . lens (.config) (\x y -> x { config = y }) + {-# INLINE configL #-} + +instance HasBuildConfig EnvConfig where + buildConfigL = envConfigL . lens + (.buildConfig) + (\x y -> x { buildConfig = y }) + +instance HasPlatform EnvConfig where + platformL = configL . platformL + {-# INLINE platformL #-} + platformVariantL = configL . platformVariantL + {-# INLINE platformVariantL #-} + +instance HasGHCVariant EnvConfig where + ghcVariantL = configL . ghcVariantL + {-# INLINE ghcVariantL #-} + +instance HasProcessContext EnvConfig where + processContextL = configL . processContextL + +instance HasPantryConfig EnvConfig where + pantryConfigL = configL . pantryConfigL + +instance HasCompiler EnvConfig where + compilerPathsL = to (.compilerPaths) + +instance HasRunner EnvConfig where + runnerL = configL . runnerL + +instance HasLogFunc EnvConfig where + logFuncL = runnerL . logFuncL + +instance HasStylesUpdate EnvConfig where + stylesUpdateL = runnerL . stylesUpdateL + +instance HasTerm EnvConfig where + useColorL = runnerL . useColorL + termWidthL = runnerL . termWidthL + +class (HasBuildConfig env, HasSourceMap env, HasCompiler env) => HasEnvConfig env where + envConfigL :: Lens' env EnvConfig + +instance HasEnvConfig EnvConfig where + envConfigL = id + {-# INLINE envConfigL #-} + +class HasSourceMap env where + sourceMapL :: Lens' env SourceMap + +instance HasSourceMap EnvConfig where + sourceMapL = lens (.sourceMap) (\x y -> x { sourceMap = y }) + +shouldForceGhcColorFlag :: + (HasEnvConfig env, HasRunner env) + => RIO env Bool +shouldForceGhcColorFlag = view useColorL + +appropriateGhcColorFlag :: + (HasEnvConfig env, HasRunner env) + => RIO env (Maybe String) +appropriateGhcColorFlag = f <$> shouldForceGhcColorFlag + where + f True = Just ghcColorForceFlag + f False = Nothing + +-- | Directory containing snapshots +snapshotsDir :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => m (Path Abs Dir) +snapshotsDir = do + root <- view stackRootL + platform <- platformGhcRelDir + pure $ root relDirSnapshots platform +{-# DEPRECATED snapshotsDir "Not used by Stack >= 1.0.4. May be removed from a future version of stack." #-} + +-- | Installation root for dependencies. +installationRootDeps :: HasEnvConfig env => RIO env (Path Abs Dir) +installationRootDeps = do + root <- view stackRootL + psc <- platformSnapAndCompilerRel + pure $ root relDirSnapshots psc + +-- | Installation root for locals. +installationRootLocal :: HasEnvConfig env => RIO env (Path Abs Dir) +installationRootLocal = do + workDir <- getWorkDir + psc <- platformSnapAndCompilerRel + pure $ workDir relDirInstall psc + +-- | Get the hoogle database path. +hoogleDatabasePath :: HasEnvConfig env => RIO env (Path Abs File) +hoogleDatabasePath = do + dir <- hoogleRoot + pure (dir relFileDatabaseHoo) + +-- | Path for platform followed by snapshot name followed by compiler +-- name. +platformSnapAndCompilerRel :: HasEnvConfig env => RIO env (Path Rel Dir) +platformSnapAndCompilerRel = do + platform <- platformGhcRelDir + smh <- view $ envConfigL . to (.sourceMapHash) + name <- smRelDir smh + ghc <- compilerVersionDir + useShaPathOnWindows (platform name ghc) + +-- | Relative directory for the platform and GHC identifier +platformGhcRelDir :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => m (Path Rel Dir) +platformGhcRelDir = do + cp <- view compilerPathsL + let cbSuffix = compilerBuildSuffix cp.build + verOnly <- platformGhcVerOnlyRelDirStr + parseRelDir (mconcat [ verOnly, cbSuffix ]) + +-- | Installation root for compiler tools +bindirCompilerTools :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => m (Path Abs Dir) +bindirCompilerTools = do + config <- view configL + platform <- platformGhcRelDir + compilerVersion <- view actualCompilerVersionL + compiler <- parseRelDir $ compilerVersionString compilerVersion + pure $ + view stackRootL config + relDirCompilerTools + platform + compiler + bindirSuffix + +-- | Hoogle directory. +hoogleRoot :: HasEnvConfig env => RIO env (Path Abs Dir) +hoogleRoot = do + workDir <- getWorkDir + psc <- platformSnapAndCompilerRel + pure $ workDir relDirHoogle psc + +compilerVersionDir :: + (HasEnvConfig env, MonadReader env m, MonadThrow m) + => m (Path Rel Dir) +compilerVersionDir = do + compilerVersion <- view actualCompilerVersionL + parseRelDir $ case compilerVersion of + ACGhc version -> versionString version + ACGhcGit {} -> compilerVersionString compilerVersion + +-- | Package database for installing dependencies into +packageDatabaseDeps :: HasEnvConfig env => RIO env (Path Abs Dir) +packageDatabaseDeps = do + root <- installationRootDeps + pure $ root relDirPkgdb + +-- | Package database for installing project packages and local extra-deps into. +packageDatabaseLocal :: HasEnvConfig env => RIO env (Path Abs Dir) +packageDatabaseLocal = do + root <- installationRootLocal + pure $ root relDirPkgdb + +-- | Extra package databases +packageDatabaseExtra :: + (HasEnvConfig env, MonadReader env m) + => m [Path Abs Dir] +packageDatabaseExtra = view $ buildConfigL . to (.extraPackageDBs) + +-- | Where HPC reports and tix files get stored. +hpcReportDir :: HasEnvConfig env => RIO env (Path Abs Dir) +hpcReportDir = do + root <- installationRootLocal + pure $ root relDirHpc + +-- | Get the extra bin directories (for the PATH). Puts more local first +-- +-- Bool indicates whether or not to include the locals +extraBinDirs :: HasEnvConfig env => RIO env (Bool -> [Path Abs Dir]) +extraBinDirs = do + deps <- installationRootDeps + local' <- installationRootLocal + tools <- bindirCompilerTools + pure $ \locals -> if locals + then [local' bindirSuffix, deps bindirSuffix, tools] + else [deps bindirSuffix, tools] + +-- | The version of the compiler which will actually be used. May be different +-- than that specified in the snapshot and returned by +-- 'Stack.Types.BuildConfig.wantedCompilerVersionL'. +actualCompilerVersionL :: HasSourceMap env => SimpleGetter env ActualCompiler +actualCompilerVersionL = sourceMapL . to (.compiler) + +-- | Relative directory for the platform and GHC identifier without GHC bindist +-- build +platformGhcVerOnlyRelDir :: + (HasGHCVariant env, HasPlatform env, MonadReader env m, MonadThrow m) + => m (Path Rel Dir) +platformGhcVerOnlyRelDir = + parseRelDir =<< platformGhcVerOnlyRelDirStr + +-- | Relative directory for the platform and GHC identifier without GHC bindist +-- build (before parsing into a Path) +platformGhcVerOnlyRelDirStr :: + (HasGHCVariant env, HasPlatform env, MonadReader env m) + => m FilePath +platformGhcVerOnlyRelDirStr = do + platform <- view platformL + platformVariant <- view platformVariantL + ghcVariant <- view ghcVariantL + pure $ mconcat [ Distribution.Text.display platform + , platformVariantSuffix platformVariant + , ghcVariantSuffix ghcVariant ] + +-- | This is an attempt to shorten Stack paths on Windows to decrease our +-- chances of hitting 260 symbol path limit. The idea is to calculate +-- SHA1 hash of the path used on other architectures, encode with base +-- 16 and take first 8 symbols of it. +useShaPathOnWindows :: MonadThrow m => Path Rel Dir -> m (Path Rel Dir) +useShaPathOnWindows + | osIsWindows = shaPath + | otherwise = pure + +shaPath :: (IsPath Rel t, MonadThrow m) => Path Rel t -> m (Path Rel t) +shaPath = shaPathForBytes . encodeUtf8 . T.pack . toFilePath + +shaPathForBytes :: (IsPath Rel t, MonadThrow m) => ByteString -> m (Path Rel t) +shaPathForBytes + = parsePath . S8.unpack . S8.take 8 + . Mem.convertToBase Mem.Base16 . hashWith SHA1 + +-- TODO: Move something like this into the path package. Consider +-- subsuming path-io's 'AnyPath'? +class IsPath b t where + parsePath :: MonadThrow m => FilePath -> m (Path b t) + +instance IsPath Abs Dir where + parsePath = parseAbsDir + +instance IsPath Rel Dir where + parsePath = parseRelDir + +instance IsPath Abs File where + parsePath = parseAbsFile + +instance IsPath Rel File where + parsePath = parseRelFile diff --git a/src/Stack/Types/EnvSettings.hs b/src/Stack/Types/EnvSettings.hs new file mode 100644 index 0000000000..8d4ae31150 --- /dev/null +++ b/src/Stack/Types/EnvSettings.hs @@ -0,0 +1,70 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.EnvSettings +License : BSD-3-Clause +-} + +module Stack.Types.EnvSettings + ( EnvSettings (..) + , minimalEnvSettings + , defaultEnvSettings + , plainEnvSettings + ) where + +import Stack.Prelude + +-- | Controls which version of the environment is used +data EnvSettings = EnvSettings + { includeLocals :: !Bool + -- ^ include project's local bin directory, GHC_PACKAGE_PATH, etc + , includeGhcPackagePath :: !Bool + -- ^ include the GHC_PACKAGE_PATH variable + , stackExe :: !Bool + -- ^ set the STACK_EXE variable to the current executable name + , localeUtf8 :: !Bool + -- ^ set the locale to C.UTF-8 + , keepGhcRts :: !Bool + -- ^ if True, keep GHCRTS variable in environment + } + deriving (Eq, Ord, Show) + +-- | Minimal @EnvSettings@ which do not embellish the environment and do not +-- pass through the GHCRTS environment variable. See +-- https://github.com/commercialhaskell/stack/issues/3444 +minimalEnvSettings :: EnvSettings +minimalEnvSettings = + EnvSettings + { includeLocals = False + , includeGhcPackagePath = False + , stackExe = False + , localeUtf8 = False + , keepGhcRts = False + } + +-- | Default @EnvSettings@ which includes locals and GHC_PACKAGE_PATH. +-- +-- Note that this also passes through the GHCRTS environment variable. See +-- https://github.com/commercialhaskell/stack/issues/3444 +defaultEnvSettings :: EnvSettings +defaultEnvSettings = EnvSettings + { includeLocals = True + , includeGhcPackagePath = True + , stackExe = True + , localeUtf8 = False + , keepGhcRts = True + } + +-- | Environment settings which do not embellish the environment +-- +-- Note that this also passes through the GHCRTS environment variable. See +-- https://github.com/commercialhaskell/stack/issues/3444 +plainEnvSettings :: EnvSettings +plainEnvSettings = EnvSettings + { includeLocals = False + , includeGhcPackagePath = False + , stackExe = False + , localeUtf8 = False + , keepGhcRts = True + } diff --git a/src/Stack/Types/ExtraDirs.hs b/src/Stack/Types/ExtraDirs.hs new file mode 100644 index 0000000000..957db38bef --- /dev/null +++ b/src/Stack/Types/ExtraDirs.hs @@ -0,0 +1,28 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.ExtraDirs +License : BSD-3-Clause +-} + +module Stack.Types.ExtraDirs + ( ExtraDirs (..) + ) where + +import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) +import Stack.Prelude + +data ExtraDirs = ExtraDirs + { bins :: ![Path Abs Dir] + , includes :: ![Path Abs Dir] + , libs :: ![Path Abs Dir] + } + deriving (Show, Generic) + +instance Semigroup ExtraDirs where + (<>) = mappenddefault + +instance Monoid ExtraDirs where + mempty = memptydefault + mappend = (<>) diff --git a/src/Stack/Types/FileDigestCache.hs b/src/Stack/Types/FileDigestCache.hs new file mode 100644 index 0000000000..2bb64d36e4 --- /dev/null +++ b/src/Stack/Types/FileDigestCache.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.FileDigestCache +License : BSD-3-Clause +-} + +module Stack.Types.FileDigestCache + ( FileDigestCache + , newFileDigestCache + , readFileDigest + ) where + +import qualified Data.Map.Strict as Map +import qualified Pantry.SHA256 as SHA256 +import Stack.Prelude + +-- | Type synonym representing caches of digests of files. +type FileDigestCache = IORef (Map FilePath SHA256) + +newFileDigestCache :: MonadIO m => m FileDigestCache +newFileDigestCache = newIORef Map.empty + +readFileDigest :: MonadIO m => FileDigestCache -> FilePath -> m SHA256 +readFileDigest cache filePath = do + digests <- readIORef cache + case Map.lookup filePath digests of + Just digest -> pure digest + Nothing -> do + sha256 <- SHA256.hashFile filePath + writeIORef cache $ Map.insert filePath sha256 digests + pure sha256 diff --git a/src/Stack/Types/GHCDownloadInfo.hs b/src/Stack/Types/GHCDownloadInfo.hs new file mode 100644 index 0000000000..3d86e980c4 --- /dev/null +++ b/src/Stack/Types/GHCDownloadInfo.hs @@ -0,0 +1,37 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.GHCDownloadInfo +License : BSD-3-Clause +-} + +module Stack.Types.GHCDownloadInfo + ( GHCDownloadInfo (..) + ) where + +import Data.Aeson.Types ( FromJSON (..) ) +import Data.Aeson.WarningParser + ( WithJSONWarnings (..), (..:?), (..!=), withObjectWarnings ) +import Stack.Prelude +import Stack.Types.DownloadInfo + ( DownloadInfo, parseDownloadInfoFromObject ) + +data GHCDownloadInfo = GHCDownloadInfo + { configureOpts :: [Text] + , configureEnv :: Map Text Text + , downloadInfo :: DownloadInfo + } + deriving Show + +instance FromJSON (WithJSONWarnings GHCDownloadInfo) where + parseJSON = withObjectWarnings "GHCDownloadInfo" $ \o -> do + configureOpts <- o ..:? "configure-opts" ..!= mempty + configureEnv <- o ..:? "configure-env" ..!= mempty + downloadInfo <- parseDownloadInfoFromObject o + pure GHCDownloadInfo + { configureOpts + , configureEnv + , downloadInfo + } diff --git a/src/Stack/Types/GHCVariant.hs b/src/Stack/Types/GHCVariant.hs new file mode 100644 index 0000000000..03275a4a87 --- /dev/null +++ b/src/Stack/Types/GHCVariant.hs @@ -0,0 +1,70 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.GHCVariant +License : BSD-3-Clause +-} + +module Stack.Types.GHCVariant + ( GHCVariant (..) + , HasGHCVariant (..) + , ghcVariantName + , ghcVariantSuffix + , parseGHCVariant + ) where + +import Data.Aeson.Types ( FromJSON, parseJSON, withText ) +import Data.List ( stripPrefix ) +import qualified Data.Text as T +import Stack.Prelude + +-- | Specialized variant of GHC (e.g. libgmp4 or integer-simple) +data GHCVariant + = GHCStandard + -- ^ Standard bindist + | GHCIntegerSimple + -- ^ Bindist that uses integer-simple + | GHCNativeBignum + -- ^ Bindist that uses the Haskell-native big-integer backend + | GHCCustom String + -- ^ Other bindists + deriving Show + +instance FromJSON GHCVariant where + -- Strange structuring is to give consistent error messages + parseJSON = + withText + "GHCVariant" + (either (fail . show) pure . parseGHCVariant . T.unpack) + +-- | Class for environment values which have a GHCVariant +class HasGHCVariant env where + ghcVariantL :: SimpleGetter env GHCVariant + +instance HasGHCVariant GHCVariant where + ghcVariantL = id + {-# INLINE ghcVariantL #-} + +-- | Render a GHC variant to a String. +ghcVariantName :: GHCVariant -> String +ghcVariantName GHCStandard = "standard" +ghcVariantName GHCIntegerSimple = "integersimple" +ghcVariantName GHCNativeBignum = "int-native" +ghcVariantName (GHCCustom name) = "custom-" ++ name + +-- | Render a GHC variant to a String suffix. +ghcVariantSuffix :: GHCVariant -> String +ghcVariantSuffix GHCStandard = "" +ghcVariantSuffix v = "-" ++ ghcVariantName v + +-- | Parse GHC variant from a String. +parseGHCVariant :: (MonadThrow m) => String -> m GHCVariant +parseGHCVariant s = + case stripPrefix "custom-" s of + Just name -> pure (GHCCustom name) + Nothing + | s == "" -> pure GHCStandard + | s == "standard" -> pure GHCStandard + | s == "integersimple" -> pure GHCIntegerSimple + | s == "int-native" -> pure GHCNativeBignum + | otherwise -> pure (GHCCustom s) diff --git a/src/Stack/Types/GhcOptionKey.hs b/src/Stack/Types/GhcOptionKey.hs new file mode 100644 index 0000000000..43fdab62e5 --- /dev/null +++ b/src/Stack/Types/GhcOptionKey.hs @@ -0,0 +1,44 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.GhcOptionKey +License : BSD-3-Clause +-} + +module Stack.Types.GhcOptionKey + ( GhcOptionKey (..) + ) where + +import Data.Aeson.Types ( FromJSONKey (..), FromJSONKeyFunction (..) ) +import qualified Data.Text as T +import Stack.Prelude + +-- | Type representing scopes of the application by Stack of GHC options. +data GhcOptionKey + = GOKOldEverything + -- ^ All packages, project packages or otherwise (specified with legacy + -- syntax). + | GOKEverything + -- ^ All packages, project packages or otherwise. + | GOKLocals + -- ^ All project packages, targets or otherwise. + | GOKTargets + -- ^ All project packages that are targets. + | GOKPackage !PackageName + -- ^ A named package. + deriving (Eq, Ord) + +instance FromJSONKey GhcOptionKey where + fromJSONKey = FromJSONKeyTextParser $ \t -> + case t of + "*" -> pure GOKOldEverything + "$everything" -> pure GOKEverything + "$locals" -> pure GOKLocals + "$targets" -> pure GOKTargets + _ -> + case parsePackageName $ T.unpack t of + Nothing -> fail $ "Invalid package name: " ++ show t + Just x -> pure $ GOKPackage x + fromJSONKeyList = + FromJSONKeyTextParser $ \_ -> fail "GhcOptionKey.fromJSONKeyList" diff --git a/src/Stack/Types/GhcOptions.hs b/src/Stack/Types/GhcOptions.hs new file mode 100644 index 0000000000..6350bd8650 --- /dev/null +++ b/src/Stack/Types/GhcOptions.hs @@ -0,0 +1,25 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.GhcOptions +License : BSD-3-Clause +-} + +module Stack.Types.GhcOptions + ( GhcOptions (..) + ) where + +import Data.Aeson.Types ( FromJSON (..), withText ) +import Data.Attoparsec.Args ( EscapingMode (Escaping), parseArgs ) +import qualified Data.Text as T +import Stack.Prelude + +-- | Type representing lists of GHC options. +newtype GhcOptions = GhcOptions { ghcOptions :: [Text] } + +instance FromJSON GhcOptions where + parseJSON = withText "GhcOptions" $ \t -> + case parseArgs Escaping t of + Left e -> fail e + Right opts -> pure $ GhcOptions $ map T.pack opts diff --git a/src/Stack/Types/GhcPkgExe.hs b/src/Stack/Types/GhcPkgExe.hs new file mode 100644 index 0000000000..5346f4c1d5 --- /dev/null +++ b/src/Stack/Types/GhcPkgExe.hs @@ -0,0 +1,135 @@ + +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.GhcPkgExe +License : BSD-3-Clause +-} + +module Stack.Types.GhcPkgExe + ( GhcPkgPrettyException (..) + , GlobPackageIdentifier (..) + , PackageArg (..) + ) where + +import Distribution.Package ( UnitId ) +import Distribution.Text ( display ) +import Path ( SomeBase (..) ) +import Stack.Prelude hiding ( display ) + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "GHC.Utils.GhcPkg.Main.Compat" module or the "Stack.GhcPkg" module. +data GhcPkgPrettyException + = CannotParse !String !String !String + | CannotOpenDBForModification !(SomeBase Dir) !IOException + | SingleFileDBUnsupported !(SomeBase Dir) + | ParsePackageInfoExceptions !String + | CannotFindPackage !PackageArg !(Maybe (SomeBase Dir)) + | CannotParseRelFileBug !String + | CannotParseDirectoryWithDBug !String + | CannotRecacheAfterUnregister !(Path Abs Dir) !SomeException + deriving Show + +instance Pretty GhcPkgPrettyException where + pretty (CannotParse str what e) = + "[S-6512]" + <> line + <> fillSep + [ flow "cannot parse" + , style Current (fromString str) + , flow "as a" + , fromString what <> ":" + ] + <> blankLine + <> fromString e + pretty (CannotOpenDBForModification db_path e) = + "[S-3384]" + <> line + <> fillSep + [ flow "Couldn't open database" + , pretty db_path + , flow "for modification:" + ] + <> blankLine + <> string (displayException e) + pretty (SingleFileDBUnsupported path) = + "[S-1430]" + <> line + <> fillSep + [ flow "ghc no longer supports single-file style package databases" + , parens (pretty path) + , "use" + , style Shell (flow "ghc-pkg init") + , flow "to create the database with the correct format." + ] + pretty (ParsePackageInfoExceptions errs) = + "[S-5996]" + <> line + <> flow errs + pretty (CannotFindPackage pkgarg mdb_path) = + "[S-3189]" + <> line + <> fillSep + [ flow "cannot find package" + , style Current (pkg_msg pkgarg) + , maybe + "" + (\db_path -> fillSep ["in", pretty db_path]) + mdb_path + ] + where + pkg_msg (Substring pkgpat _) = fillSep ["matching", fromString pkgpat] + pkg_msg pkgarg' = fromString $ show pkgarg' + pretty (CannotParseRelFileBug relFileName) = bugPrettyReport "[S-9323]" $ + fillSep + [ flow "changeDBDir': Could not parse" + , style File (fromString relFileName) + , flow "as a relative path to a file." + ] + pretty (CannotParseDirectoryWithDBug dirName) = bugPrettyReport "[S-7651]" $ + fillSep + [ flow "adjustOldDatabasePath: Could not parse" + , style Dir (fromString dirName) + , flow "as a directory." + ] + pretty (CannotRecacheAfterUnregister pkgDb e) = + "[S-6590]" + <> line + <> fillSep + [ flow "While recaching" + , pretty pkgDb + , flow "after unregistering packages, Stack encountered the following \ + \ error:" + ] + <> blankLine + <> string (displayException e) + +instance Exception GhcPkgPrettyException + +-- | Represents how a package may be specified by a user on the command line. +data PackageArg + -- | A package identifier foo-0.1, or a glob foo-* + = Id GlobPackageIdentifier + -- | An installed package ID foo-0.1-HASH. This is guaranteed to uniquely + -- match a single entry in the package database. + | IUId UnitId + -- | A glob against the package name. The first string is the literal + -- glob, the second is a function which returns @True@ if the argument + -- matches. + | Substring String (String -> Bool) + +instance Show PackageArg where + show (Id pkgid) = displayGlobPkgId pkgid + show (IUId ipid) = display ipid + show (Substring pkgpat _) = pkgpat + +displayGlobPkgId :: GlobPackageIdentifier -> String +displayGlobPkgId (ExactPackageIdentifier pid) = display pid +displayGlobPkgId (GlobPackageIdentifier pn) = display pn ++ "-*" + +-- | Either an exact t'PackageIdentifier', or a glob for all packages +-- matching 'PackageName'. +data GlobPackageIdentifier + = ExactPackageIdentifier MungedPackageId + | GlobPackageIdentifier MungedPackageName diff --git a/src/Stack/Types/GhcPkgId.hs b/src/Stack/Types/GhcPkgId.hs index 0d06b37905..d66886f05d 100644 --- a/src/Stack/Types/GhcPkgId.hs +++ b/src/Stack/Types/GhcPkgId.hs @@ -1,50 +1,75 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} --- | A ghc-pkg id. +{-| +Module : Stack.Types.GhcPkgId +Description : A ghc-pkg id. +License : BSD-3-Clause + +A ghc-pkg id. +-} module Stack.Types.GhcPkgId - (GhcPkgId - ,unGhcPkgId - ,ghcPkgIdParser - ,parseGhcPkgId - ,ghcPkgIdString) - where + ( GhcPkgId + , ghcPkgIdToText + , ghcPkgIdParser + , parseGhcPkgId + , ghcPkgIdString + ) where -import Stack.Prelude -import Pantry.Internal.AesonExtended +import Data.Aeson.Types ( FromJSON (..), ToJSON (..), withText ) import Data.Attoparsec.Text -import qualified Data.Text as T -import Database.Persist.Sql (PersistField, PersistFieldSql) -import Prelude (Read (..)) + ( Parser, (), choice, endOfInput, many1, parseOnly + , satisfy + ) +import Data.Char ( isAlphaNum ) +import Data.Hashable ( Hashable(..) ) +import Database.Persist.Sql + ( PersistField (..), PersistFieldSql (..) ) +import Distribution.Compat.Binary ( decode, encode ) +import Distribution.Types.UnitId ( UnitId, mkUnitId, unUnitId ) +import Stack.Prelude +import Text.Read ( Read (..) ) -- | A parse fail. newtype GhcPkgIdParseFail = GhcPkgIdParseFail Text - deriving Typeable -instance Show GhcPkgIdParseFail where - show (GhcPkgIdParseFail bs) = "Invalid package ID: " ++ show bs -instance Exception GhcPkgIdParseFail + deriving Show + +instance Exception GhcPkgIdParseFail where + displayException (GhcPkgIdParseFail bs) = concat + [ "Error: [S-5359]\n" + , "Invalid package ID: " + , show bs + ] -- | A ghc-pkg package identifier. -newtype GhcPkgId = GhcPkgId Text - deriving (Eq,Ord,Data,Typeable,Generic,PersistField,PersistFieldSql) +newtype GhcPkgId + = GhcPkgId UnitId + deriving (Data, Eq, Generic, Ord) + +instance PersistField GhcPkgId where + toPersistValue = toPersistValue . ghcPkgIdToText + fromPersistValue = (fmap . fmap) ghcPkgIdFromText fromPersistValue + +instance PersistFieldSql GhcPkgId where + sqlType _ = sqlType @Text Proxy + +instance Hashable GhcPkgId where + hashWithSalt a (GhcPkgId v) = hashWithSalt a (encode v) -instance Hashable GhcPkgId instance NFData GhcPkgId instance Show GhcPkgId where show = show . ghcPkgIdString + instance Read GhcPkgId where - readsPrec i = map (first (GhcPkgId . T.pack)) . readsPrec i + readsPrec i = map (first (GhcPkgId . mkUnitId)) . readsPrec i instance FromJSON GhcPkgId where parseJSON = withText "GhcPkgId" $ \t -> case parseGhcPkgId t of Left e -> fail $ show (e, t) - Right x -> return x + Right x -> pure x instance ToJSON GhcPkgId where toJSON g = @@ -53,20 +78,30 @@ instance ToJSON GhcPkgId where -- | Convenient way to parse a package name from a 'Text'. parseGhcPkgId :: MonadThrow m => Text -> m GhcPkgId parseGhcPkgId x = go x - where go = - either (const (throwM (GhcPkgIdParseFail x))) return . - parseOnly (ghcPkgIdParser <* endOfInput) + where + go = either + (const (throwM (GhcPkgIdParseFail x))) + pure . parseOnly (ghcPkgIdParser <* endOfInput) -- | A parser for a package-version-hash pair. ghcPkgIdParser :: Parser GhcPkgId ghcPkgIdParser = - let elements = "_.-" :: String in - GhcPkgId . T.pack <$> many1 (choice [digit, letter, satisfy (`elem` elements)]) + let elements = "_.-" :: String + in GhcPkgId . mkUnitId <$> + many1 (choice [alphaNum, satisfy (`elem` elements)]) + +-- | Parse an alphanumerical character, as recognised by `isAlphaNum`. +alphaNum :: Parser Char +alphaNum = satisfy isAlphaNum "alphanumeric" +{-# INLINE alphaNum #-} -- | Get a string representation of GHC package id. ghcPkgIdString :: GhcPkgId -> String -ghcPkgIdString (GhcPkgId x) = T.unpack x +ghcPkgIdString (GhcPkgId x) = unUnitId x --- | Get a text value of GHC package id -unGhcPkgId :: GhcPkgId -> Text -unGhcPkgId (GhcPkgId v) = v +-- | Get a text value of GHC package id. +ghcPkgIdToText :: GhcPkgId -> Text +ghcPkgIdToText (GhcPkgId v) = decode . encode $ v +-- | Create GhcPkgId from Text. +ghcPkgIdFromText :: Text -> GhcPkgId +ghcPkgIdFromText = GhcPkgId . decode . encode diff --git a/src/Stack/Types/GhciOpts.hs b/src/Stack/Types/GhciOpts.hs new file mode 100644 index 0000000000..15b9f322f7 --- /dev/null +++ b/src/Stack/Types/GhciOpts.hs @@ -0,0 +1,35 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.GhciOpts +Description : Types related to Stack's @ghci@ and @repl@ commands. +License : BSD-3-Clause + +Types related to Stack's @ghci@ and @repl@ commands. +-} + +module Stack.Types.GhciOpts + ( GhciOpts (..) + ) where + +import Stack.Prelude +import Stack.Types.BuildOptsCLI ( ApplyCLIFlag (..) ) + +-- | Type respresenting command line options for Stack's @ghci@ and @repl@ +-- commands. +data GhciOpts = GhciOpts + { targets :: ![Text] + , args :: ![String] + , ghcOptions :: ![String] + , flags :: !(Map ApplyCLIFlag (Map FlagName Bool)) + , ghcCommand :: !(Maybe FilePath) + , noLoadModules :: !Bool + , additionalPackages :: ![String] + , mainIs :: !(Maybe Text) + , loadLocalDeps :: !Bool + , hidePackages :: !(Maybe Bool) + , noBuild :: !Bool + , onlyMain :: !Bool + } + deriving Show diff --git a/src/Stack/Types/GlobalOpts.hs b/src/Stack/Types/GlobalOpts.hs new file mode 100644 index 0000000000..a1fa1d605e --- /dev/null +++ b/src/Stack/Types/GlobalOpts.hs @@ -0,0 +1,59 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.GlobalOpts +License : BSD-3-Clause +-} + +module Stack.Types.GlobalOpts + ( GlobalOpts (..) + , globalOptsBuildOptsMonoidL + ) where + +import Stack.Prelude +import Stack.Types.BuildOptsMonoid ( BuildOptsMonoid ) +import Stack.Types.ConfigMonoid ( ConfigMonoid (..) ) +import Stack.Types.DockerEntrypoint ( DockerEntrypoint ) +import Stack.Types.LockFileBehavior ( LockFileBehavior ) +import Stack.Types.StackYamlLoc ( StackYamlLoc ) +import Stack.Types.Snapshot ( AbstractSnapshot ) + +-- | Parsed global command-line options. +data GlobalOpts = GlobalOpts + { reExecVersion :: !(Maybe String) + -- ^ Expected re-exec in container version + , dockerEntrypoint :: !(Maybe DockerEntrypoint) + -- ^ Data used when Stack is acting as a Docker entrypoint (internal use + -- only) + , logLevel :: !LogLevel -- ^ Log level + , timeInLog :: !Bool -- ^ Whether to include timings in logs. + , rslInLog :: !Bool + -- ^ Whether to include raw snapshot layer (RSL) in logs. + , planInLog :: !Bool + -- ^ Whether to include debug information about the construction of the + -- build plan in logs. + , configMonoid :: !ConfigMonoid + -- ^ Config monoid, for passing into 'Stack.Config.loadConfig' + , snapshot :: !(Maybe AbstractSnapshot) -- ^ Snapshot override + , compiler :: !(Maybe WantedCompiler) -- ^ Compiler override + , terminal :: !Bool -- ^ We're in a terminal? + , stylesUpdate :: !StylesUpdate -- ^ SGR (Ansi) codes for styles + , termWidthOpt :: !(Maybe Int) -- ^ Terminal width override + , stackYaml :: !StackYamlLoc -- ^ Override project stack.yaml + , lockFileBehavior :: !LockFileBehavior + , progName :: !String + -- ^ The name of the current Stack executable, as it was invoked. + , mExecutablePath :: !(Maybe (Path Abs File)) + -- ^ The path to the current Stack executable, if the operating system + -- provides a reliable way to determine it and where a result was + -- available. + } + +-- | View or set the @buildOpts@ field of the @configMonoid@ field of a +-- v'GlobalOpts'. +globalOptsBuildOptsMonoidL :: Lens' GlobalOpts BuildOptsMonoid +globalOptsBuildOptsMonoidL = + lens (.configMonoid) (\x y -> x { configMonoid = y }) + . lens (.buildOpts) (\x y -> x { buildOpts = y }) diff --git a/src/Stack/Types/GlobalOptsMonoid.hs b/src/Stack/Types/GlobalOptsMonoid.hs new file mode 100644 index 0000000000..105ae8188e --- /dev/null +++ b/src/Stack/Types/GlobalOptsMonoid.hs @@ -0,0 +1,62 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.GlobalOptsMonoid +License : BSD-3-Clause +-} + +module Stack.Types.GlobalOptsMonoid + ( GlobalOptsMonoid (..) + ) where + +import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) +import Stack.Prelude +import Stack.Types.ConfigMonoid ( ConfigMonoid ) +import Stack.Types.DockerEntrypoint ( DockerEntrypoint ) +import Stack.Types.LockFileBehavior ( LockFileBehavior ) +import Stack.Types.Snapshot ( AbstractSnapshot ) + +-- | Parsed global command-line options monoid. +data GlobalOptsMonoid = GlobalOptsMonoid + { reExecVersion :: !(First String) + -- ^ Expected re-exec in container version + , dockerEntrypoint :: !(First DockerEntrypoint) + -- ^ Data used when Stack is acting as a Docker entrypoint (internal use + -- only) + , logLevel :: !(First LogLevel) + -- ^ Log level + , timeInLog :: !FirstTrue + -- ^ Whether to include timings in logs. + , rslInLog :: !FirstFalse + -- ^ Whether to include raw snapshot layer (RSL) in logs. + , planInLog :: !FirstFalse + -- ^ Whether to include debug information about the construction of the + -- build plan in logs. + , configMonoid :: !ConfigMonoid + -- ^ Config monoid, for passing into 'Stack.Config.loadConfig' + , snapshot :: !(First (Unresolved AbstractSnapshot)) + -- ^ Snapshot override + , snapshotRoot :: !(First FilePath) + -- ^ root directory for snapshot relative path + , compiler :: !(First WantedCompiler) + -- ^ Compiler override + , terminal :: !(First Bool) + -- ^ We're in a terminal? + , styles :: !StylesUpdate + -- ^ Stack's output styles + , termWidthOpt :: !(First Int) + -- ^ Terminal width override + , stackYaml :: !(First FilePath) + -- ^ Override project stack.yaml + , lockFileBehavior :: !(First LockFileBehavior) + -- ^ See 'Stack.Types.GlobalOpts.lockFileBehavior' + } + deriving Generic + +instance Semigroup GlobalOptsMonoid where + (<>) = mappenddefault + +instance Monoid GlobalOptsMonoid where + mempty = memptydefault + mappend = (<>) diff --git a/src/Stack/Types/HpcReportOpts.hs b/src/Stack/Types/HpcReportOpts.hs new file mode 100644 index 0000000000..97c09d31db --- /dev/null +++ b/src/Stack/Types/HpcReportOpts.hs @@ -0,0 +1,25 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.HpcReportOpts +Description : Types related to Stack's @hpc report@ command. +License : BSD-3-Clause + +Types related to Stack's @hpc report@ command. +-} + +module Stack.Types.HpcReportOpts + ( HpcReportOpts (..) + ) where + +import Stack.Prelude + +-- | Type representing command line options for the @stack hpc report@ command. +data HpcReportOpts = HpcReportOpts + { inputs :: [Text] + , all :: Bool + , destDir :: Maybe String + , openBrowser :: Bool + } + deriving Show diff --git a/src/Stack/Types/IdeOpts.hs b/src/Stack/Types/IdeOpts.hs new file mode 100644 index 0000000000..682ae64784 --- /dev/null +++ b/src/Stack/Types/IdeOpts.hs @@ -0,0 +1,30 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.IdeOpts +Description : Types for command line options for Stack's @ide@ commands. +License : BSD-3-Clause + +Types for command line options for Stack's @ide@ commands. +-} + +module Stack.Types.IdeOpts + ( OutputStream (..) + , ListPackagesCmd (..) + ) where + +-- | Type representing output stream choices for the @stack ide packages@ and +-- @stack ide targets@ commands. +data OutputStream + = OutputLogInfo + -- ^ To the same output stream as other log information. + | OutputStdout + -- ^ To the standard output stream. + +-- | Type representing output choices for the @stack ide packages@ command. +data ListPackagesCmd + = ListPackageNames + -- ^ Package names. + | ListPackageCabalFiles + -- ^ Paths to Cabal files. diff --git a/src/Stack/Types/Installed.hs b/src/Stack/Types/Installed.hs new file mode 100644 index 0000000000..ba84f66b1e --- /dev/null +++ b/src/Stack/Types/Installed.hs @@ -0,0 +1,160 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.Installed +License : BSD-3-Clause + +This module contains all the types related to the idea of installing a package +in the pkg-db or an executable on the file system. +-} + +module Stack.Types.Installed + ( InstallLocation (..) + , InstalledPackageLocation (..) + , PackageDatabase (..) + , PackageDbVariety (..) + , InstallMap + , Installed (..) + , InstalledMap + , InstalledLibraryInfo (..) + , toPackageDbVariety + , installedLibraryInfoFromGhcPkgId + , simpleInstalledLib + , installedToPackageIdOpt + , installedPackageIdentifier + , installedVersion + , foldOnGhcPkgId' + ) where + +import qualified Data.Map as M +import qualified Distribution.SPDX.License as SPDX +import Distribution.License ( License ) +import Stack.Prelude +import Stack.Types.ComponentUtils ( StackUnqualCompName ) +import Stack.Types.GhcPkgId ( GhcPkgId, ghcPkgIdString ) + +-- | Type representing user package databases that packages can be installed +-- into. +data InstallLocation + = Snap + -- ^ The write-only package database, formerly known as the snapshot + -- database. + | Local + -- ^ The mutable package database, formerly known as the local database. + deriving (Eq, Show) + +instance Semigroup InstallLocation where + Local <> _ = Local + _ <> Local = Local + Snap <> Snap = Snap + +instance Monoid InstallLocation where + mempty = Snap + mappend = (<>) + +-- | Type representing user (non-global) package databases that can provide +-- installed packages. +data InstalledPackageLocation + = InstalledTo InstallLocation + -- ^ A package database that a package can be installed into. + | ExtraPkgDb + -- ^ An \'extra\' package database, specified by @extra-package-dbs@. + deriving (Eq, Show) + +-- | Type representing package databases that can provide installed packages. +data PackageDatabase + = GlobalPkgDb + -- ^ GHC's global package database. + | UserPkgDb InstalledPackageLocation (Path Abs Dir) + -- ^ A user package database. + deriving (Eq, Show) + +-- | A function to yield the variety of package database for a given +-- package database that can provide installed packages. +toPackageDbVariety :: PackageDatabase -> PackageDbVariety +toPackageDbVariety GlobalPkgDb = GlobalDb +toPackageDbVariety (UserPkgDb ExtraPkgDb _) = ExtraDb +toPackageDbVariety (UserPkgDb (InstalledTo Snap) _) = WriteOnlyDb +toPackageDbVariety (UserPkgDb (InstalledTo Local) _) = MutableDb + +-- | Type representing varieties of package databases that can provide +-- installed packages. +data PackageDbVariety + = GlobalDb + -- ^ GHC's global package database. + | ExtraDb + -- ^ An \'extra\' package database, specified by @extra-package-dbs@. + | WriteOnlyDb + -- ^ The write-only package database, for immutable packages. + | MutableDb + -- ^ The mutable package database. + deriving (Eq, Show) + +-- | Type synonym representing dictionaries of package names for a project's +-- packages and dependencies, and pairs of their relevant database (write-only +-- or mutable) and package versions. +type InstallMap = Map PackageName (InstallLocation, Version) + +-- | Type synonym representing dictionaries of package names, and a pair of in +-- which package database the package is installed (write-only or mutable) and +-- information about what is installed. +type InstalledMap = Map PackageName (InstallLocation, Installed) + +data InstalledLibraryInfo = InstalledLibraryInfo + { ghcPkgId :: GhcPkgId + , license :: Maybe (Either SPDX.License License) + , subLib :: Map StackUnqualCompName GhcPkgId + } + deriving (Eq, Show) + +-- | Type representing information about what is installed. +data Installed + = Library PackageIdentifier InstalledLibraryInfo + -- ^ A library, including its installed package id and, optionally, its + -- license. + | Executable PackageIdentifier + -- ^ An executable. + deriving (Eq, Show) + +installedLibraryInfoFromGhcPkgId :: GhcPkgId -> InstalledLibraryInfo +installedLibraryInfoFromGhcPkgId ghcPkgId = + InstalledLibraryInfo ghcPkgId Nothing mempty + +simpleInstalledLib :: + PackageIdentifier + -> GhcPkgId + -> Map StackUnqualCompName GhcPkgId + -> Installed +simpleInstalledLib pkgIdentifier ghcPkgId = + Library pkgIdentifier . InstalledLibraryInfo ghcPkgId Nothing + +installedToPackageIdOpt :: InstalledLibraryInfo -> [String] +installedToPackageIdOpt libInfo = + M.foldr' (iterator (++)) (pure $ toStr libInfo.ghcPkgId) libInfo.subLib + where + toStr ghcPkgId = "-package-id=" <> ghcPkgIdString ghcPkgId + iterator op ghcPkgId acc = pure (toStr ghcPkgId) `op` acc + +installedPackageIdentifier :: Installed -> PackageIdentifier +installedPackageIdentifier (Library pid _) = pid +installedPackageIdentifier (Executable pid) = pid + +-- | A strict fold over the 'GhcPkgId' of the given installed package. This will +-- iterate on both sub and main libraries, if any. +foldOnGhcPkgId' :: + (Maybe StackUnqualCompName -> GhcPkgId -> resT -> resT) + -> Installed + -> resT + -> resT +foldOnGhcPkgId' _ Executable{} res = res +foldOnGhcPkgId' fn (Library _ libInfo) res = + M.foldrWithKey' (fn . Just) (base res) libInfo.subLib + where + base = fn Nothing libInfo.ghcPkgId + +-- | Get the installed Version. +installedVersion :: Installed -> Version +installedVersion i = + let PackageIdentifier _ version = installedPackageIdentifier i + in version diff --git a/src/Stack/Types/InterfaceOpt.hs b/src/Stack/Types/InterfaceOpt.hs new file mode 100644 index 0000000000..25629d7208 --- /dev/null +++ b/src/Stack/Types/InterfaceOpt.hs @@ -0,0 +1,26 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.InterfaceOpt +Description : Type representing Haddock interface options. +License : BSD-3-Clause + +Type representing Haddock interface options. +-} + +module Stack.Types.InterfaceOpt + ( InterfaceOpt (..) + ) where + +import Data.Time ( UTCTime ) +import Stack.Prelude + +-- | Type representing Haddock interface options. +data InterfaceOpt = InterfaceOpt + { readInterfaceArgs :: ![String] + , srcInterfaceFileModTime :: !UTCTime + , srcInterfaceFile :: !(Path Abs File) + , destInterfaceFile :: !(Path Abs File) + } + deriving (Eq, Ord) diff --git a/src/Stack/Types/IsMutable.hs b/src/Stack/Types/IsMutable.hs new file mode 100644 index 0000000000..e5dad523ba --- /dev/null +++ b/src/Stack/Types/IsMutable.hs @@ -0,0 +1,26 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.IsMutable +License : BSD-3-Clause +-} + +module Stack.Types.IsMutable + ( IsMutable (..) + ) where + +import Stack.Prelude + +data IsMutable + = Mutable + | Immutable + deriving (Eq, Show) + +instance Semigroup IsMutable where + Mutable <> _ = Mutable + _ <> Mutable = Mutable + Immutable <> Immutable = Immutable + +instance Monoid IsMutable where + mempty = Immutable + mappend = (<>) diff --git a/src/Stack/Types/LockFileBehavior.hs b/src/Stack/Types/LockFileBehavior.hs new file mode 100644 index 0000000000..8f96d1ef1b --- /dev/null +++ b/src/Stack/Types/LockFileBehavior.hs @@ -0,0 +1,46 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.LockFileBehavior +License : BSD-3-Clause +-} + +module Stack.Types.LockFileBehavior + ( LockFileBehavior (..) + , readLockFileBehavior + ) where + +import qualified Data.Map as Map +import Options.Applicative ( ReadM ) +import qualified Options.Applicative.Types as OA +import qualified RIO.List as List +import Stack.Prelude + +-- | How to interact with lock files +data LockFileBehavior + = LFBReadWrite + -- ^ Read and write lock files + | LFBReadOnly + -- ^ Read lock files, but do not write them + | LFBIgnore + -- ^ Entirely ignore lock files + | LFBErrorOnWrite + -- ^ Error out on trying to write a lock file. This can be used to + -- ensure that lock files in a repository already ensure + -- reproducible builds. + deriving (Bounded, Enum, Show) + +-- | Parser for 'LockFileBehavior' +readLockFileBehavior :: ReadM LockFileBehavior +readLockFileBehavior = do + s <- OA.readerAsk + case Map.lookup s m of + Just x -> pure x + Nothing -> OA.readerError $ "Invalid lock file behavior, valid options: " ++ + List.intercalate ", " (Map.keys m) + where + m = Map.fromList $ map (\x -> (render x, x)) [minBound..maxBound] + render LFBReadWrite = "read-write" + render LFBReadOnly = "read-only" + render LFBIgnore = "ignore" + render LFBErrorOnWrite = "error-on-write" diff --git a/src/Stack/Types/LsOpts.hs b/src/Stack/Types/LsOpts.hs new file mode 100644 index 0000000000..b953840609 --- /dev/null +++ b/src/Stack/Types/LsOpts.hs @@ -0,0 +1,105 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.LsOpts +Description : Types related to Stack's @ls@ command. +License : BSD-3-Clause + +Types related to Stack's @ls@ command. +-} + +module Stack.Types.LsOpts + ( LsCmdOpts (..) + , LsCmds (..) + , SnapshotOpts (..) + , LsView (..) + , ListDepsOpts (..) + , ListDepsFormat (..) + , ListDepsFormatOpts (..) + , ListDepsTextFilter (..) + , ListGlobalsOpts (..) + , ListStylesOpts (..) + , ListToolsOpts (..) + ) where + +import Stack.Prelude +import Stack.Types.DotOpts ( DotOpts (..) ) + +-- | Type representing command line options for the @stack ls@ command. +newtype LsCmdOpts + = LsCmdOpts { lsCmds :: LsCmds } + +-- | Type representing subcommands for the @stack ls@ command. +data LsCmds + = LsSnapshot SnapshotOpts + | LsGlobals ListGlobalsOpts + | LsDependencies ListDepsOpts + | LsStyles ListStylesOpts + | LsTools ListToolsOpts + +-- | Type representing command line options for the @stack ls snapshots@ +-- command. +data SnapshotOpts = SnapshotOpts + { viewType :: LsView + , ltsSnapView :: Bool + , nightlySnapView :: Bool + } + deriving (Eq, Ord, Show) + +-- | Type representing subcommands for the @stack ls snapshots@ command. +data LsView + = Local + | Remote + deriving (Eq, Ord, Show) + +-- | Type representing command line options for the @stack ls globals@ command. +newtype ListGlobalsOpts = ListGlobalsOpts + { globalHints :: Bool + -- ^ Use global hints instead of relying on an actual GHC installation. + } + +-- | Type representing command line options for the @stack ls dependencies@ +-- command. +data ListDepsOpts = ListDepsOpts + { format :: !ListDepsFormat + -- ^ Format of printing dependencies + , dotOpts :: !DotOpts + -- ^ The normal dot options. + } + +-- | Type representing formats for printing dependencies. +data ListDepsFormat + = ListDepsText ListDepsFormatOpts [ListDepsTextFilter] + | ListDepsTree ListDepsFormatOpts + | ListDepsJSON + | ListDepsConstraints + +-- | Type representing command line options for the @stack ls dependencies text@ +-- command and similar @cabal@, @tree@ and @json@ commands. +data ListDepsFormatOpts = ListDepsFormatOpts + { sep :: !Text + -- ^ Separator between the package name and details. + , license :: !Bool + -- ^ Print dependency licenses instead of versions. + } + +-- | Type representing items to filter the results of @stack ls dependencies@. +data ListDepsTextFilter + = FilterPackage PackageName + -- ^ Item is a package name. + | FilterLocals + -- ^ Item represents all project packages. + +-- | Type representing command line options for the @stack ls stack-colors@ and +-- @stack ls stack-colours@ commands. +data ListStylesOpts = ListStylesOpts + { basic :: Bool + , sgr :: Bool + , example :: Bool + } + deriving (Eq, Ord, Show) + +-- | Type representing command line options for the @stack ls tools@ command. +newtype ListToolsOpts + = ListToolsOpts { filter :: String } diff --git a/src/Stack/Types/MsysEnvironment.hs b/src/Stack/Types/MsysEnvironment.hs new file mode 100644 index 0000000000..7372f72ff7 --- /dev/null +++ b/src/Stack/Types/MsysEnvironment.hs @@ -0,0 +1,72 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.MsysEnvironment +Description : Type representing MSYS2 environments and related functions. +License : BSD-3-Clause + +Type representing MSYS2 environments and related functions. +-} + +module Stack.Types.MsysEnvironment + ( MsysEnvironment (..) + , msysEnvArch + , relDirMsysEnv + ) where + +import Data.Aeson.Types ( FromJSON (..) ) +import Distribution.System ( Arch (..) ) +import Stack.Constants + ( relDirClang32, relDirClang64, relDirClangArm64 + , relDirMingw32, relDirMingw64, relDirUcrt64 + ) +import Stack.Prelude + +-- | Type representing MSYS2 environments. +data MsysEnvironment + = CLANG32 + | CLANG64 + | CLANGARM64 + | MINGW32 + -- ^ Stack's default on architecture i386, and applied if GHC version is + -- earlier than GHC 9.6. + | MINGW64 + -- ^ Stack's default on architecture x86_64, and applied if GHC version is + -- earlier than GHC 9.6. + | UCRT64 + deriving (Eq, Ord, Show) + +-- | MSYS2 environment names are treated as case sensitive. +instance FromJSON MsysEnvironment where + parseJSON v = do + s <- parseJSON v + case s of + "CLANG32" -> pure CLANG32 + "CLANG64" -> pure CLANG64 + "CLANGARM64" -> pure CLANGARM64 + "MINGW32" -> pure MINGW32 + "MINGW64" -> pure MINGW64 + "UCRT64" -> pure UCRT64 + _ -> fail ("Unknown MSYS2 environment: " <> s) + +-- | Function that yields the architecture relevant to an MSYS2 environment, +-- based on https://www.msys2.org/docs/environments/. +msysEnvArch :: MsysEnvironment -> Arch +msysEnvArch env = case env of + CLANG32 -> I386 + CLANG64 -> X86_64 + CLANGARM64 -> AArch64 + MINGW32 -> I386 + MINGW64 -> X86_64 + UCRT64 -> X86_64 + +-- | Function that yields the prefix relevant to an MSYS2 environment, based on +-- https://www.msys2.org/docs/environments/. +relDirMsysEnv :: MsysEnvironment -> Path Rel Dir +relDirMsysEnv env = case env of + CLANG32 -> relDirClang32 + CLANG64 -> relDirClang64 + CLANGARM64 -> relDirClangArm64 + MINGW32 -> relDirMingw32 + MINGW64 -> relDirMingw64 + UCRT64 -> relDirUcrt64 diff --git a/src/Stack/Types/NamedComponent.hs b/src/Stack/Types/NamedComponent.hs index 3b360f0a12..dac24005e8 100644 --- a/src/Stack/Types/NamedComponent.hs +++ b/src/Stack/Types/NamedComponent.hs @@ -1,79 +1,116 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.NamedComponent +Description : Module exporting the 'NamedComponent' type and related functions. +License : BSD-3-Clause + +Module exporting the 'NamedComponent' type and related functions. +-} + module Stack.Types.NamedComponent ( NamedComponent (..) + , componentCachePath , renderComponent + , renderComponentTo , renderPkgComponents , renderPkgComponent , exeComponents , testComponents , benchComponents - , internalLibComponents + , subLibComponents , isCLib - , isCInternalLib + , isCSubLib , isCExe , isCTest , isCBench + , isPotentialDependency + , splitComponents ) where -import Pantry -import Stack.Prelude import qualified Data.Set as Set import qualified Data.Text as T +import Stack.Prelude +import Stack.Types.ComponentUtils + ( StackUnqualCompName, unqualCompToString, unqualCompToText + ) --- | A single, fully resolved component of a package +-- | Type representing components of a fully-resolved Cabal package. data NamedComponent - = CLib - | CInternalLib !Text - | CExe !Text - | CTest !Text - | CBench !Text - deriving (Show, Eq, Ord) + = CLib + -- The \'main\' unnamed library component. + | CSubLib !StackUnqualCompName + -- A named \'subsidiary\' or \'ancillary\` library component (sub-library). + | CFlib !StackUnqualCompName + -- A foreign library. + | CExe !StackUnqualCompName + -- A named executable component. + | CTest !StackUnqualCompName + -- A named test-suite component. + | CBench !StackUnqualCompName + -- A named benchmark component. + deriving (Eq, Ord, Show) + +-- | Render a component to anything with an "IsString" instance. For 'Text' +-- prefer 'renderComponent'. +renderComponentTo :: IsString a => NamedComponent -> a +renderComponentTo = fromString . T.unpack . renderComponent renderComponent :: NamedComponent -> Text renderComponent CLib = "lib" -renderComponent (CInternalLib x) = "internal-lib:" <> x -renderComponent (CExe x) = "exe:" <> x -renderComponent (CTest x) = "test:" <> x -renderComponent (CBench x) = "bench:" <> x +renderComponent (CSubLib x) = "sub-lib:" <> unqualCompToText x +renderComponent (CFlib x) = "flib:" <> unqualCompToText x +renderComponent (CExe x) = "exe:" <> unqualCompToText x +renderComponent (CTest x) = "test:" <> unqualCompToText x +renderComponent (CBench x) = "bench:" <> unqualCompToText x + +componentCachePath :: NamedComponent -> String +componentCachePath CLib = "lib" +componentCachePath (CSubLib x) = "sub-lib-" <> unqualCompToString x +componentCachePath (CFlib x) = "flib-" <> unqualCompToString x +componentCachePath (CExe x) = "exe-" <> unqualCompToString x +componentCachePath (CTest x) = "test-" <> unqualCompToString x +componentCachePath (CBench x) = "bench-" <> unqualCompToString x renderPkgComponents :: [(PackageName, NamedComponent)] -> Text renderPkgComponents = T.intercalate " " . map renderPkgComponent renderPkgComponent :: (PackageName, NamedComponent) -> Text -renderPkgComponent (pkg, comp) = fromString (packageNameString pkg) <> ":" <> renderComponent comp +renderPkgComponent (pkg, comp) = + fromPackageName pkg <> ":" <> renderComponent comp -exeComponents :: Set NamedComponent -> Set Text +exeComponents :: Set NamedComponent -> Set StackUnqualCompName exeComponents = Set.fromList . mapMaybe mExeName . Set.toList - where - mExeName (CExe name) = Just name - mExeName _ = Nothing + where + mExeName (CExe name) = Just name + mExeName _ = Nothing -testComponents :: Set NamedComponent -> Set Text +testComponents :: Set NamedComponent -> Set StackUnqualCompName testComponents = Set.fromList . mapMaybe mTestName . Set.toList - where - mTestName (CTest name) = Just name - mTestName _ = Nothing + where + mTestName (CTest name) = Just name + mTestName _ = Nothing -benchComponents :: Set NamedComponent -> Set Text +benchComponents :: Set NamedComponent -> Set StackUnqualCompName benchComponents = Set.fromList . mapMaybe mBenchName . Set.toList - where - mBenchName (CBench name) = Just name - mBenchName _ = Nothing + where + mBenchName (CBench name) = Just name + mBenchName _ = Nothing -internalLibComponents :: Set NamedComponent -> Set Text -internalLibComponents = Set.fromList . mapMaybe mInternalName . Set.toList - where - mInternalName (CInternalLib name) = Just name - mInternalName _ = Nothing +subLibComponents :: Set NamedComponent -> Set StackUnqualCompName +subLibComponents = Set.fromList . mapMaybe mSubLibName . Set.toList + where + mSubLibName (CSubLib name) = Just name + mSubLibName _ = Nothing isCLib :: NamedComponent -> Bool isCLib CLib{} = True isCLib _ = False -isCInternalLib :: NamedComponent -> Bool -isCInternalLib CInternalLib{} = True -isCInternalLib _ = False +isCSubLib :: NamedComponent -> Bool +isCSubLib CSubLib{} = True +isCSubLib _ = False isCExe :: NamedComponent -> Bool isCExe CExe{} = True @@ -86,3 +123,35 @@ isCTest _ = False isCBench :: NamedComponent -> Bool isCBench CBench{} = True isCBench _ = False + +isPotentialDependency :: NamedComponent -> Bool +isPotentialDependency v = isCLib v || isCSubLib v || isCExe v + +-- | A function to split the given list of components into sets of the names of +-- the named components by the type of component (sub-libraries, executables, +-- test-suites, benchmarks), ignoring any \'main\' unnamed library component or +-- foreign library component. This function should be used very sparingly; more +-- often than not, you can keep/parse the components split from the start. +splitComponents :: + [NamedComponent] + -> ( Set StackUnqualCompName + -- ^ Sub-libraries. + , Set StackUnqualCompName + -- ^ Executables. + , Set StackUnqualCompName + -- ^ Test-suites. + , Set StackUnqualCompName + -- ^ Benchmarks. + ) +splitComponents = + go id id id id + where + run c = Set.fromList $ c [] + go s e t b [] = (run s, run e, run t, run b) + go s e t b (CLib : xs) = go s e t b xs + go s e t b (CSubLib x : xs) = go (s . (x:)) e t b xs + -- Ignore foreign libraries, for now. + go s e t b (CFlib _ : xs) = go s e t b xs + go s e t b (CExe x : xs) = go s (e . (x:)) t b xs + go s e t b (CTest x : xs) = go s e (t . (x:)) b xs + go s e t b (CBench x : xs) = go s e t (b . (x:)) xs diff --git a/src/Stack/Types/Nix.hs b/src/Stack/Types/Nix.hs index a1b977029b..4fcc61a968 100644 --- a/src/Stack/Types/Nix.hs +++ b/src/Stack/Types/Nix.hs @@ -1,64 +1,93 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedStrings #-} --- | Nix types. +{-| +Module : Stack.Types.Nix +Description : Nix types. +License : BSD-3-Clause -module Stack.Types.Nix where +Nix types. +-} -import Pantry.Internal.AesonExtended -import Stack.Prelude -import Generics.Deriving.Monoid (mappenddefault, memptydefault) +module Stack.Types.Nix + ( NixOpts (..) + , NixOptsMonoid (..) + , nixAddGCRootsArgName + , nixEnableArgName + , nixInitFileArgName + , nixPackagesArgName + , nixPathArgName + , nixPureShellArgName + , nixShellOptsArgName + ) where --- | Nix configuration. Parameterize by resolver type to avoid cyclic +import Data.Aeson.Types ( FromJSON (..) ) +import Data.Aeson.WarningParser + ( WithJSONWarnings, (..:?), withObjectWarnings ) +import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) +import Stack.Prelude + +-- | Nix configuration. Parameterize by snapshot type to avoid cyclic -- dependency. data NixOpts = NixOpts - {nixEnable :: !Bool - ,nixPureShell :: !Bool - ,nixPackages :: ![Text] + { enable :: !Bool + , pureShell :: !Bool + , packages :: ![Text] -- ^ The system packages to be installed in the environment before it runs - ,nixInitFile :: !(Maybe FilePath) - -- ^ The path of a file containing preconfiguration of the environment (e.g shell.nix) - ,nixShellOptions :: ![Text] + , initFile :: !(Maybe FilePath) + -- ^ The path of a file containing preconfiguration of the environment + -- (e.g shell.nix) + , shellOptions :: ![Text] -- ^ Options to be given to the nix-shell command line - ,nixAddGCRoots :: !Bool - -- ^ Should we register gc roots so running nix-collect-garbage doesn't remove nix dependencies + , addGCRoots :: !Bool + -- ^ Should we register gc roots so running nix-collect-garbage doesn't + -- remove nix dependencies } - deriving (Show) + deriving Show -- | An uninterpreted representation of nix options. -- Configurations may be "cascaded" using mappend (left-biased). data NixOptsMonoid = NixOptsMonoid - {nixMonoidEnable :: !(First Bool) + { enable :: !(First Bool) -- ^ Is using nix-shell enabled? - ,nixMonoidPureShell :: !(First Bool) + , pureShell :: !(First Bool) -- ^ Should the nix-shell be pure - ,nixMonoidPackages :: !(First [Text]) + , packages :: !(First [Text]) -- ^ System packages to use (given to nix-shell) - ,nixMonoidInitFile :: !(First FilePath) - -- ^ The path of a file containing preconfiguration of the environment (e.g shell.nix) - ,nixMonoidShellOptions :: !(First [Text]) + , initFile :: !(First FilePath) + -- ^ The path of a file containing preconfiguration of the environment (e.g + -- shell.nix) + , shellOptions :: !(First [Text]) -- ^ Options to be given to the nix-shell command line - ,nixMonoidPath :: !(First [Text]) - -- ^ Override parts of NIX_PATH (notably 'nixpkgs') - ,nixMonoidAddGCRoots :: !FirstFalse - -- ^ Should we register gc roots so running nix-collect-garbage doesn't remove nix dependencies + , path :: !(First [Text]) + -- ^ Override parts of NIX_PATH (notably \'nixpkgs\') + , addGCRoots :: !FirstFalse + -- ^ Should we register gc roots so running nix-collect-garbage doesn't + -- remove nix dependencies } - deriving (Eq, Show, Generic) + deriving (Eq, Generic, Show) -- | Decode uninterpreted nix options from JSON/YAML. instance FromJSON (WithJSONWarnings NixOptsMonoid) where - parseJSON = withObjectWarnings "NixOptsMonoid" - (\o -> do nixMonoidEnable <- First <$> o ..:? nixEnableArgName - nixMonoidPureShell <- First <$> o ..:? nixPureShellArgName - nixMonoidPackages <- First <$> o ..:? nixPackagesArgName - nixMonoidInitFile <- First <$> o ..:? nixInitFileArgName - nixMonoidShellOptions <- First <$> o ..:? nixShellOptsArgName - nixMonoidPath <- First <$> o ..:? nixPathArgName - nixMonoidAddGCRoots <- FirstFalse <$> o ..:? nixAddGCRootsArgName - return NixOptsMonoid{..}) + parseJSON = withObjectWarnings "NixOptsMonoid" $ \o -> do + enable <- First <$> o ..:? nixEnableArgName + pureShell <- First <$> o ..:? nixPureShellArgName + packages <- First <$> o ..:? nixPackagesArgName + initFile <- First <$> o ..:? nixInitFileArgName + shellOptions <- First <$> o ..:? nixShellOptsArgName + path <- First <$> o ..:? nixPathArgName + addGCRoots <- FirstFalse <$> o ..:? nixAddGCRootsArgName + pure NixOptsMonoid + { enable + , pureShell + , packages + , initFile + , shellOptions + , path + , addGCRoots + } -- | Left-biased combine Nix options instance Semigroup NixOptsMonoid where diff --git a/src/Stack/Types/Package.hs b/src/Stack/Types/Package.hs index afae34dfec..488415ddea 100644 --- a/src/Stack/Types/Package.hs +++ b/src/Stack/Types/Package.hs @@ -1,34 +1,91 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE ConstraintKinds #-} -module Stack.Types.Package where +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.Package +License : BSD-3-Clause +-} + +module Stack.Types.Package + ( BioInput (..) + , BuildInfoOpts (..) + , ExeName (..) + , InstallLocation (..) + , Installed (..) + , InstalledLibraryInfo (..) + , InstalledPackageLocation (..) + , LocalPackage (..) + , MemoizedWith (..) + , Package (..) + , PackageConfig (..) + , PackageDatabase (..) + , PackageDbVariety (..) + , PackageException (..) + , PackageSource (..) + , dotCabalCFilePath + , dotCabalGetPath + , dotCabalMain + , dotCabalMainPath + , dotCabalModule + , dotCabalModulePath + , installedMapGhcPkgId + , installedPackageToGhcPkgId + , lpFiles + , lpFilesForComponents + , memoizeRefWith + , packageDefinedFlags + , packageIdentifier + , psVersion + , runMemoizedWith + , simpleInstalledLib + , toCabalMungedPackageName + , toPackageDbVariety + ) where -import Stack.Prelude -import qualified RIO.Text as T -import Data.Aeson (ToJSON (..), FromJSON (..), (.=), (.:), object, withObject) import qualified Data.Map as M import qualified Data.Set as Set -import Distribution.Parsec (PError (..), PWarning (..), showPos) +import Distribution.CabalSpecVersion +import Distribution.Parsec ( PError (..), PWarning (..), showPos ) import qualified Distribution.SPDX.License as SPDX -import Distribution.License (License) -import Distribution.ModuleName (ModuleName) -import Distribution.PackageDescription (TestSuiteInterface, BuildType) -import Distribution.System (Platform (..)) -import Stack.Types.Compiler -import Stack.Types.Config -import Stack.Types.GhcPkgId -import Stack.Types.NamedComponent -import Stack.Types.SourceMap -import Stack.Types.Version - --- | All exceptions thrown by the library. +import Distribution.License ( License ) +import Distribution.ModuleName ( ModuleName ) +import Distribution.PackageDescription ( BuildType ) +import Distribution.System ( Platform (..) ) +import Distribution.Types.MungedPackageName + ( encodeCompatPackageName ) +import qualified RIO.Text as T +import Stack.Prelude +import Stack.Types.Cache ( FileCache ) +import Stack.Types.CompCollection ( CompCollection ) +import Stack.Types.Compiler ( ActualCompiler ) +import Stack.Types.Component + ( StackBenchmark, StackBuildInfo, StackExecutable + , StackForeignLibrary, StackLibrary, StackTestSuite + , StackUnqualCompName + ) +import Stack.Types.ComponentUtils (toCabalName) +import Stack.Types.Dependency ( DepValue ) +import Stack.Types.EnvConfig ( EnvConfig, HasEnvConfig (..) ) +import Stack.Types.GhcPkgId ( GhcPkgId ) +import Stack.Types.Installed + ( InstallLocation (..), InstallMap, Installed (..) + , InstalledLibraryInfo (..), InstalledMap + , InstalledPackageLocation (..), PackageDatabase (..) + , PackageDbVariety(..), simpleInstalledLib + , toPackageDbVariety + ) +import Stack.Types.NamedComponent ( NamedComponent ) +import Stack.Types.PackageFile + ( DotCabalDescriptor (..), DotCabalPath (..) + , StackPackageFile + ) +import Stack.Types.SourceMap ( CommonPackage, FromSnapshot ) + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Package" module. data PackageException = PackageInvalidCabalFile !(Either PackageIdentifierRevision (Path Abs File)) @@ -36,268 +93,235 @@ data PackageException ![PError] ![PWarning] | MismatchedCabalIdentifier !PackageIdentifierRevision !PackageIdentifier - deriving Typeable -instance Exception PackageException -instance Show PackageException where - show (PackageInvalidCabalFile loc _mversion errs warnings) = concat - [ "Unable to parse cabal file " - , case loc of - Left pir -> "for " ++ T.unpack (utf8BuilderToText (display pir)) - Right fp -> toFilePath fp - {- - - Not actually needed, the errors will indicate if a newer version exists. - Also, it seems that this is set to Just the version even if we support it. - - , case mversion of - Nothing -> "" - Just version -> "\nRequires newer Cabal file parser version: " ++ - versionString version - -} - , "\n\n" - , unlines $ map - (\(PError pos msg) -> concat - [ "- " - , showPos pos - , ": " - , msg - ]) - errs - , unlines $ map - (\(PWarning _ pos msg) -> concat - [ "- " - , showPos pos - , ": " - , msg - ]) - warnings - ] - show (MismatchedCabalIdentifier pir ident) = concat - [ "Mismatched package identifier." - , "\nFound: " - , packageIdentifierString ident - , "\nExpected: " - , T.unpack $ utf8BuilderToText $ display pir - ] - --- | Libraries in a package. Since Cabal 2.0, internal libraries are a --- thing. -data PackageLibraries - = NoLibraries - | HasLibraries !(Set Text) -- ^ the foreign library names, sub libraries get built automatically without explicit component name passing - deriving (Show,Typeable) + | CabalFileNameParseFail FilePath + | CabalFileNameInvalidPackageName FilePath + | ComponentNotParsedBug String + deriving Show + +instance Exception PackageException where + displayException (PackageInvalidCabalFile loc _mversion errs warnings) = concat + [ "Error: [S-8072]\n" + , "Unable to parse Cabal file " + , case loc of + Left pir -> "for " ++ T.unpack (utf8BuilderToText (display pir)) + Right fp -> toFilePath fp + {- + Not actually needed, the errors will indicate if a newer version exists. + Also, it seems that this is set to Just the version even if we support it. + , case mversion of + Nothing -> "" + Just version -> "\nRequires newer Cabal file parser version: " ++ + versionString version + -} + , "\n\n" + , unlines $ map + (\(PError pos msg) -> concat + [ "- " + , showPos pos + , ": " + , msg + ]) + errs + , unlines $ map + (\(PWarning _ pos msg) -> concat + [ "- " + , showPos pos + , ": " + , msg + ]) + warnings + ] + displayException (MismatchedCabalIdentifier pir ident) = concat + [ "Error: [S-5394]\n" + , "Mismatched package identifier." + , "\nFound: " + , packageIdentifierString ident + , "\nExpected: " + , T.unpack $ utf8BuilderToText $ display pir + ] + displayException (CabalFileNameParseFail fp) = concat + [ "Error: [S-2203]\n" + , "Invalid file path for Cabal file, must have a .cabal extension: " + , fp + ] + displayException (CabalFileNameInvalidPackageName fp) = concat + [ "Error: [S-8854]\n" + , "Cabal file names must use valid package names followed by a .cabal \ + \extension, the following is invalid: " + , fp + ] + displayException (ComponentNotParsedBug name) = bugReport "[S-4623]" + ( "Component names should always parse as directory names. The component \ + \name without a directory is '" + <> name + <> "'." + ) -- | Name of an executable. -newtype ExeName = ExeName { unExeName :: Text } - deriving (Show, Eq, Ord, Hashable, IsString, Generic, NFData, Data, Typeable) +newtype ExeName + = ExeName { exeName :: Text } + deriving (Data, Eq, Generic, Hashable, IsString, NFData, Ord, Show) -- | Some package info. -data Package = - Package {packageName :: !PackageName -- ^ Name of the package. - ,packageVersion :: !Version -- ^ Version of the package - ,packageLicense :: !(Either SPDX.License License) -- ^ The license the package was released under. - ,packageFiles :: !GetPackageFiles -- ^ Get all files of the package. - ,packageDeps :: !(Map PackageName DepValue) -- ^ Packages that the package depends on, both as libraries and build tools. - ,packageUnknownTools :: !(Set ExeName) -- ^ Build tools specified in the legacy manner (build-tools:) that failed the hard-coded lookup. - ,packageAllDeps :: !(Set PackageName) -- ^ Original dependencies (not sieved). - ,packageGhcOptions :: ![Text] -- ^ Ghc options used on package. - ,packageCabalConfigOpts :: ![Text] -- ^ Additional options passed to ./Setup.hs configure - ,packageFlags :: !(Map FlagName Bool) -- ^ Flags used on package. - ,packageDefaultFlags :: !(Map FlagName Bool) -- ^ Defaults for unspecified flags. - ,packageLibraries :: !PackageLibraries -- ^ does the package have a buildable library stanza? - ,packageInternalLibraries :: !(Set Text) -- ^ names of internal libraries - ,packageTests :: !(Map Text TestSuiteInterface) -- ^ names and interfaces of test suites - ,packageBenchmarks :: !(Set Text) -- ^ names of benchmarks - ,packageExes :: !(Set Text) -- ^ names of executables - ,packageOpts :: !GetPackageOpts -- ^ Args to pass to GHC. - ,packageHasExposedModules :: !Bool -- ^ Does the package have exposed modules? - ,packageBuildType :: !BuildType -- ^ Package build-type. - ,packageSetupDeps :: !(Maybe (Map PackageName VersionRange)) - -- ^ If present: custom-setup dependencies - ,packageCabalSpec :: !VersionRange -- ^ Cabal spec range - } - deriving (Show,Typeable) - -packageIdent :: Package -> PackageIdentifier -packageIdent p = PackageIdentifier (packageName p) (packageVersion p) - --- | The value for a map from dependency name. This contains both the --- version range and the type of dependency, and provides a semigroup --- instance. -data DepValue = DepValue - { dvVersionRange :: !VersionRange - , dvType :: !DepType +data Package = Package + { name :: !PackageName + -- ^ Name of the package. + , version :: !Version + -- ^ Version of the package + , license :: !(Either SPDX.License License) + -- ^ The license the package was released under. + , ghcOptions :: ![Text] + -- ^ Ghc options used on package. + , cabalConfigOpts :: ![Text] + -- ^ Additional options passed to ./Setup.hs configure + , flags :: !(Map FlagName Bool) + -- ^ Flags used on package. + , defaultFlags :: !(Map FlagName Bool) + -- ^ Defaults for unspecified flags. + , library :: !(Maybe StackLibrary) + -- ^ Does the package have a buildable main library stanza? + , subLibraries :: !(CompCollection StackLibrary) + -- ^ The sub-libraries of the package. + , foreignLibraries :: !(CompCollection StackForeignLibrary) + -- ^ The foreign libraries of the package. + , testSuites :: !(CompCollection StackTestSuite) + -- ^ The test suites of the package. + , benchmarks :: !(CompCollection StackBenchmark) + -- ^ The benchmarks of the package. + , executables :: !(CompCollection StackExecutable) + -- ^ The executables of the package. + , buildType :: !BuildType + -- ^ Package build-type. + , setupDeps :: !(Maybe (Map PackageName DepValue)) + -- ^ If present: custom-setup dependencies + , cabalSpec :: !CabalSpecVersion + -- ^ Cabal spec range + , file :: StackPackageFile + -- ^ The Cabal sourced files related to the package at the package level + -- The components may have file information in their own types + , testEnabled :: Bool + -- ^ This is a requirement because when tests are not enabled, Stack's + -- package dependencies should ignore test dependencies. Directly set from + -- 'enableTests'. + , benchmarkEnabled :: Bool + -- ^ This is a requirement because when benchmark are not enabled, Stack's + -- package dependencies should ignore benchmark dependencies. Directly set + -- from 'enableBenchmarks'. } - deriving (Show,Typeable) -instance Semigroup DepValue where - DepValue a x <> DepValue b y = DepValue (intersectVersionRanges a b) (x <> y) - --- | Is this package being used as a library, or just as a build tool? --- If the former, we need to ensure that a library actually --- exists. See --- -data DepType = AsLibrary | AsBuildTool - deriving (Show, Eq) -instance Semigroup DepType where - AsLibrary <> _ = AsLibrary - AsBuildTool <> x = x + deriving Show packageIdentifier :: Package -> PackageIdentifier -packageIdentifier pkg = - PackageIdentifier (packageName pkg) (packageVersion pkg) +packageIdentifier p = PackageIdentifier p.name p.version packageDefinedFlags :: Package -> Set FlagName -packageDefinedFlags = M.keysSet . packageDefaultFlags - -type InstallMap = Map PackageName (InstallLocation, Version) - --- | Files that the package depends on, relative to package directory. --- Argument is the location of the .cabal file -newtype GetPackageOpts = GetPackageOpts - { getPackageOpts :: forall env. HasEnvConfig env - => InstallMap - -> InstalledMap - -> [PackageName] - -> [PackageName] - -> Path Abs File - -> RIO env - (Map NamedComponent (Map ModuleName (Path Abs File)) - ,Map NamedComponent [DotCabalPath] - ,Map NamedComponent BuildInfoOpts) - } -instance Show GetPackageOpts where - show _ = "" +packageDefinedFlags = M.keysSet . (.defaultFlags) -- | GHC options based on cabal information and ghc-options. data BuildInfoOpts = BuildInfoOpts - { bioOpts :: [String] - , bioOneWordOpts :: [String] - , bioPackageFlags :: [String] - -- ^ These options can safely have 'nubOrd' applied to them, as - -- there are no multi-word options (see + { opts :: [String] + , oneWordOpts :: [String] + , packageFlags :: [String] + -- ^ These options can safely have 'nubOrd' applied to them, as there are no + -- multi-word options (see -- https://github.com/commercialhaskell/stack/issues/1255) - , bioCabalMacros :: Path Abs File - } deriving Show - --- | Files to get for a cabal package. -data CabalFileType - = AllFiles - | Modules - --- | Files that the package depends on, relative to package directory. --- Argument is the location of the .cabal file -newtype GetPackageFiles = GetPackageFiles - { getPackageFiles :: forall env. HasEnvConfig env - => Path Abs File - -> RIO env - (Map NamedComponent (Map ModuleName (Path Abs File)) - ,Map NamedComponent [DotCabalPath] - ,Set (Path Abs File) - ,[PackageWarning]) - } -instance Show GetPackageFiles where - show _ = "" - --- | Warning generated when reading a package -data PackageWarning - = UnlistedModulesWarning NamedComponent [ModuleName] - -- ^ Modules found that are not listed in cabal file - - -- TODO: bring this back - see - -- https://github.com/commercialhaskell/stack/issues/2649 - {- - | MissingModulesWarning (Path Abs File) (Maybe String) [ModuleName] - -- ^ Modules not found in file system, which are listed in cabal file - -} + , cabalMacros :: Path Abs File + } + deriving Show -- | Package build configuration -data PackageConfig = - PackageConfig {packageConfigEnableTests :: !Bool -- ^ Are tests enabled? - ,packageConfigEnableBenchmarks :: !Bool -- ^ Are benchmarks enabled? - ,packageConfigFlags :: !(Map FlagName Bool) -- ^ Configured flags. - ,packageConfigGhcOptions :: ![Text] -- ^ Configured ghc options. - ,packageConfigCabalConfigOpts :: ![Text] -- ^ ./Setup.hs configure options - ,packageConfigCompilerVersion :: ActualCompiler -- ^ GHC version - ,packageConfigPlatform :: !Platform -- ^ host platform - } - deriving (Show,Typeable) +data PackageConfig = PackageConfig + { enableTests :: !Bool + -- ^ Are tests enabled? + , enableBenchmarks :: !Bool + -- ^ Are benchmarks enabled? + , flags :: !(Map FlagName Bool) + -- ^ Configured flags. + , ghcOptions :: ![Text] + -- ^ Configured ghc options. + , cabalConfigOpts :: ![Text] + -- ^ ./Setup.hs configure options + , compilerVersion :: ActualCompiler + -- ^ GHC version + , platform :: !Platform + -- ^ host platform + } + deriving Show -- | Compares the package name. instance Ord Package where - compare = on compare packageName + compare = on compare (.name) -- | Compares the package name. instance Eq Package where - (==) = on (==) packageName + (==) = on (==) (.name) -- | Where the package's source is located: local directory or package index data PackageSource = PSFilePath LocalPackage - -- ^ Package which exist on the filesystem + -- ^ Package which exist on the filesystem | PSRemote PackageLocationImmutable Version FromSnapshot CommonPackage - -- ^ Package which is downloaded remotely. + -- ^ Package which is downloaded remotely. instance Show PackageSource where - show (PSFilePath lp) = concat ["PSFilePath (", show lp, ")"] - show (PSRemote pli v fromSnapshot _) = - concat - [ "PSRemote" - , "(", show pli, ")" - , "(", show v, ")" - , show fromSnapshot - , "" - ] - + show (PSFilePath lp) = concat ["PSFilePath (", show lp, ")"] + show (PSRemote pli v fromSnapshot _) = + concat + [ "PSRemote" + , "(", show pli, ")" + , "(", show v, ")" + , show fromSnapshot + , "" + ] psVersion :: PackageSource -> Version -psVersion (PSFilePath lp) = packageVersion $ lpPackage lp +psVersion (PSFilePath lp) = lp.package.version psVersion (PSRemote _ v _ _) = v --- | Information on a locally available package of source code +-- | Information on a locally available package of source code. data LocalPackage = LocalPackage - { lpPackage :: !Package - -- ^ The @Package@ info itself, after resolution with package flags, - -- with tests and benchmarks disabled - , lpComponents :: !(Set NamedComponent) + { package :: !Package + -- ^ The @Package@ info itself, after resolution with package flags, with + -- tests and benchmarks disabled + , components :: !(Set NamedComponent) -- ^ Components to build, not including the library component. - , lpUnbuildable :: !(Set NamedComponent) + , unbuildable :: !(Set NamedComponent) -- ^ Components explicitly requested for build, that are marked -- "buildable: false". - , lpWanted :: !Bool -- FIXME Should completely drop this "wanted" terminology, it's unclear + , wanted :: !Bool -- ^ Whether this package is wanted as a target. - , lpTestDeps :: !(Map PackageName VersionRange) - -- ^ Used for determining if we can use --enable-tests in a normal build. - , lpBenchDeps :: !(Map PackageName VersionRange) - -- ^ Used for determining if we can use --enable-benchmarks in a normal - -- build. - , lpTestBench :: !(Maybe Package) - -- ^ This stores the 'Package' with tests and benchmarks enabled, if - -- either is asked for by the user. - , lpCabalFile :: !(Path Abs File) - -- ^ The .cabal file - , lpBuildHaddocks :: !Bool - , lpForceDirty :: !Bool - , lpDirtyFiles :: !(MemoizedWith EnvConfig (Maybe (Set FilePath))) + + -- FIXME Should completely drop this "wanted" terminology, it's unclear. + , testBench :: !(Maybe Package) + -- ^ This stores the t'Package' with tests and benchmarks enabled, if either + -- is asked for by the user. + , cabalFP :: !(Path Abs File) + -- ^ Absolute path to the Cabal file. + , buildHaddocks :: !Bool + -- ^ Is Haddock documentation being built for this package? + , forceDirty :: !Bool + , dirtyFiles :: !(MemoizedWith EnvConfig (Maybe (Set FilePath))) -- ^ Nothing == not dirty, Just == dirty. Note that the Set may be empty if -- we forced the build to treat packages as dirty. Also, the Set may not -- include all modified files. - , lpNewBuildCaches :: !(MemoizedWith EnvConfig (Map NamedComponent (Map FilePath FileCacheInfo))) - -- ^ current state of the files - , lpComponentFiles :: !(MemoizedWith EnvConfig (Map NamedComponent (Set (Path Abs File)))) - -- ^ all files used by this package - } - deriving Show + , newBuildCaches :: !(MemoizedWith EnvConfig (Map NamedComponent FileCache)) + -- ^ Current state of the files. + , componentFiles :: !( MemoizedWith + EnvConfig + (Map NamedComponent (Set (Path Abs File))) + ) + -- ^ All files used by this package. + } + deriving Show -newtype MemoizedWith env a = MemoizedWith { unMemoizedWith :: RIO env a } - deriving (Functor, Applicative, Monad) +newtype MemoizedWith env a + = MemoizedWith { memoizedWith :: RIO env a } + deriving (Applicative, Functor, Monad) memoizeRefWith :: MonadIO m => RIO env a -> m (MemoizedWith env a) memoizeRefWith action = do ref <- newIORef Nothing pure $ MemoizedWith $ do - mres <- readIORef ref - res <- - case mres of + res <- readIORef ref >>= \case Just res -> pure res Nothing -> do res <- tryAny action @@ -305,8 +329,8 @@ memoizeRefWith action = do pure res either throwIO pure res -runMemoizedWith - :: (HasEnvConfig env, MonadReader env m, MonadIO m) +runMemoizedWith :: + (HasEnvConfig env, MonadReader env m, MonadIO m) => MemoizedWith EnvConfig a -> m a runMemoizedWith (MemoizedWith action) = do @@ -317,60 +341,17 @@ instance Show (MemoizedWith env a) where show _ = "<>" lpFiles :: HasEnvConfig env => LocalPackage -> RIO env (Set.Set (Path Abs File)) -lpFiles = runMemoizedWith . fmap (Set.unions . M.elems) . lpComponentFiles +lpFiles = runMemoizedWith . fmap (Set.unions . M.elems) . (.componentFiles) -lpFilesForComponents :: HasEnvConfig env - => Set NamedComponent - -> LocalPackage - -> RIO env (Set.Set (Path Abs File)) +lpFilesForComponents :: + HasEnvConfig env + => Set NamedComponent + -> LocalPackage + -> RIO env (Set.Set (Path Abs File)) lpFilesForComponents components lp = runMemoizedWith $ do - componentFiles <- lpComponentFiles lp + componentFiles <- lp.componentFiles pure $ mconcat (M.elems (M.restrictKeys componentFiles components)) --- | A location to install a package into, either snapshot or local -data InstallLocation = Snap | Local - deriving (Show, Eq) -instance Semigroup InstallLocation where - Local <> _ = Local - _ <> Local = Local - Snap <> Snap = Snap -instance Monoid InstallLocation where - mempty = Snap - mappend = (<>) - -data InstalledPackageLocation = InstalledTo InstallLocation | ExtraGlobal - deriving (Show, Eq) - -newtype FileCacheInfo = FileCacheInfo - { fciHash :: SHA256 - } - deriving (Generic, Show, Eq, Typeable) -instance NFData FileCacheInfo - --- Provided for storing the BuildCache values in a file. But maybe --- JSON/YAML isn't the right choice here, worth considering. -instance ToJSON FileCacheInfo where - toJSON (FileCacheInfo hash') = object - [ "hash" .= hash' - ] -instance FromJSON FileCacheInfo where - parseJSON = withObject "FileCacheInfo" $ \o -> FileCacheInfo - <$> o .: "hash" - --- | A descriptor from a .cabal file indicating one of the following: --- --- exposed-modules: Foo --- other-modules: Foo --- or --- main-is: Foo.hs --- -data DotCabalDescriptor - = DotCabalModule !ModuleName - | DotCabalMain !FilePath - | DotCabalFile !FilePath - | DotCabalCFile !FilePath - deriving (Eq,Ord,Show) - -- | Maybe get the module name from the .cabal descriptor. dotCabalModule :: DotCabalDescriptor -> Maybe ModuleName dotCabalModule (DotCabalModule m) = Just m @@ -381,15 +362,6 @@ dotCabalMain :: DotCabalDescriptor -> Maybe FilePath dotCabalMain (DotCabalMain m) = Just m dotCabalMain _ = Nothing --- | A path resolved from the .cabal file, which is either main-is or --- an exposed/internal/referenced module. -data DotCabalPath - = DotCabalModulePath !(Path Abs File) - | DotCabalMainPath !(Path Abs File) - | DotCabalFilePath !(Path Abs File) - | DotCabalCFilePath !(Path Abs File) - deriving (Eq,Ord,Show) - -- | Get the module path. dotCabalModulePath :: DotCabalPath -> Maybe (Path Abs File) dotCabalModulePath (DotCabalModulePath fp) = Just fp @@ -408,25 +380,62 @@ dotCabalCFilePath _ = Nothing -- | Get the path. dotCabalGetPath :: DotCabalPath -> Path Abs File dotCabalGetPath dcp = - case dcp of - DotCabalModulePath fp -> fp - DotCabalMainPath fp -> fp - DotCabalFilePath fp -> fp - DotCabalCFilePath fp -> fp - -type InstalledMap = Map PackageName (InstallLocation, Installed) - -data Installed - = Library PackageIdentifier GhcPkgId (Maybe (Either SPDX.License License)) - | Executable PackageIdentifier - deriving (Show, Eq) - -installedPackageIdentifier :: Installed -> PackageIdentifier -installedPackageIdentifier (Library pid _ _) = pid -installedPackageIdentifier (Executable pid) = pid - --- | Get the installed Version. -installedVersion :: Installed -> Version -installedVersion i = - let PackageIdentifier _ version = installedPackageIdentifier i - in version + case dcp of + DotCabalModulePath fp -> fp + DotCabalMainPath fp -> fp + DotCabalFilePath fp -> fp + DotCabalCFilePath fp -> fp + +-- | Gathers all the GhcPkgId provided by a library into a map +installedMapGhcPkgId :: + PackageIdentifier + -> InstalledLibraryInfo + -> Map PackageIdentifier GhcPkgId +installedMapGhcPkgId pkgId@(PackageIdentifier pkgName version) installedLib = + finalMap + where + finalMap = M.insert pkgId installedLib.ghcPkgId baseMap + baseMap = + M.mapKeysMonotonic + (toCabalMungedPackageIdentifier pkgName version) + installedLib.subLib + +installedPackageToGhcPkgId :: + PackageIdentifier + -> Installed + -> Map PackageIdentifier GhcPkgId +installedPackageToGhcPkgId ident (Library ident' libInfo) = + assert (ident == ident') (installedMapGhcPkgId ident libInfo) +installedPackageToGhcPkgId _ (Executable _) = mempty + +-- | Creates a t'MungedPackageName' identifier. +toCabalMungedPackageIdentifier :: + PackageName + -> Version + -> StackUnqualCompName + -> PackageIdentifier +toCabalMungedPackageIdentifier pkgName version = flip PackageIdentifier version + . encodeCompatPackageName . toCabalMungedPackageName pkgName + +toCabalMungedPackageName :: + PackageName + -> StackUnqualCompName + -> MungedPackageName +toCabalMungedPackageName pkgName = + MungedPackageName pkgName . LSubLibName . toCabalName + +-- | Type representing inputs to 'Stack.Package.generateBuildInfoOpts'. +data BioInput = BioInput + { installMap :: !InstallMap + , installedMap :: !InstalledMap + , cabalDir :: !(Path Abs Dir) + , distDir :: !(Path Abs Dir) + , omitPackages :: ![PackageName] + , addPackages :: ![PackageName] + , buildInfo :: !StackBuildInfo + , dotCabalPaths :: ![DotCabalPath] + , configLibDirs :: ![FilePath] + , configIncludeDirs :: ![FilePath] + , componentName :: !NamedComponent + , cabalVersion :: !Version + } diff --git a/src/Stack/Types/PackageFile.hs b/src/Stack/Types/PackageFile.hs new file mode 100644 index 0000000000..9c28ca8556 --- /dev/null +++ b/src/Stack/Types/PackageFile.hs @@ -0,0 +1,135 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.PackageFile +License : BSD-3-Clause + +The facility for retrieving all files from the main Stack 'Stack.Types.Package' +type. This was moved into its own module to allow component-level file-gathering +without circular dependency at the Package level. +-} + +module Stack.Types.PackageFile + ( GetPackageFileContext (..) + , DotCabalPath (..) + , DotCabalDescriptor (..) + , PackageWarning (..) + , StackPackageFile (..) + , PackageComponentFile (..) + ) where + +import Distribution.ModuleName ( ModuleName ) +import RIO.Process ( HasProcessContext (..) ) +import Stack.Prelude +import Stack.Types.BuildConfig + ( BuildConfig (..), HasBuildConfig (..) ) +import Stack.Types.Config ( HasConfig (..) ) +import Stack.Types.GHCVariant ( HasGHCVariant (..) ) +import Stack.Types.NamedComponent ( NamedComponent ) +import Stack.Types.Platform ( HasPlatform (..) ) +import Stack.Types.Runner ( HasRunner (..) ) + +-- | Type representing environments in which Stack gets all files referenced by +-- a package. +data GetPackageFileContext = GetPackageFileContext + { file :: !(Path Abs File) + , distDir :: !(Path Abs Dir) + , buildConfig :: !BuildConfig + , cabalVer :: !Version + } + +instance HasPlatform GetPackageFileContext where + platformL = configL . platformL + {-# INLINE platformL #-} + platformVariantL = configL . platformVariantL + {-# INLINE platformVariantL #-} + +instance HasGHCVariant GetPackageFileContext where + ghcVariantL = configL . ghcVariantL + {-# INLINE ghcVariantL #-} + +instance HasLogFunc GetPackageFileContext where + logFuncL = configL . logFuncL + +instance HasRunner GetPackageFileContext where + runnerL = configL . runnerL + +instance HasStylesUpdate GetPackageFileContext where + stylesUpdateL = runnerL . stylesUpdateL + +instance HasTerm GetPackageFileContext where + useColorL = runnerL . useColorL + termWidthL = runnerL . termWidthL + +instance HasConfig GetPackageFileContext where + configL = buildConfigL . lens (.config) (\x y -> x { config = y }) + {-# INLINE configL #-} + +instance HasBuildConfig GetPackageFileContext where + buildConfigL = lens (.buildConfig) (\x y -> x { buildConfig = y }) + +instance HasPantryConfig GetPackageFileContext where + pantryConfigL = configL . pantryConfigL + +instance HasProcessContext GetPackageFileContext where + processContextL = configL . processContextL + +-- | A path resolved from the Cabal file, which is either main-is or +-- an exposed/internal/referenced module. +data DotCabalPath + = DotCabalModulePath !(Path Abs File) + | DotCabalMainPath !(Path Abs File) + | DotCabalFilePath !(Path Abs File) + | DotCabalCFilePath !(Path Abs File) + deriving (Eq, Ord, Show) + +-- | A descriptor from a Cabal file indicating one of the following: +-- +-- exposed-modules: Foo +-- other-modules: Foo +-- or +-- main-is: Foo.hs +-- +data DotCabalDescriptor + = DotCabalModule !ModuleName + | DotCabalMain !FilePath + | DotCabalFile !FilePath + | DotCabalCFile !FilePath + deriving (Eq, Ord, Show) + +-- | Warning generated when reading a package +data PackageWarning + = UnlistedModulesWarning NamedComponent [ModuleName] + -- ^ Modules found that are not listed in Cabal file + -- TODO: bring this back - see + -- https://github.com/commercialhaskell/stack/issues/2649 + {- + | MissingModulesWarning (Path Abs File) (Maybe String) [ModuleName] + -- ^ Modules not found in file system, which are listed in Cabal file + -} + +-- | This is the information from Cabal we need at the package level to track +-- files. +data StackPackageFile = StackPackageFile + { extraSrcFiles :: [FilePath] + , dataDir :: FilePath + , dataFiles :: [FilePath] + } + deriving Show + +-- | Files that the package depends on, relative to package directory. +data PackageComponentFile = PackageComponentFile + { modulePathMap :: Map NamedComponent (Map ModuleName (Path Abs File)) + , cabalFileMap :: !(Map NamedComponent [DotCabalPath]) + , packageExtraFile :: Set (Path Abs File) + , warnings :: [PackageWarning] + } + +instance Semigroup PackageComponentFile where + PackageComponentFile x1 x2 x3 x4 <> PackageComponentFile y1 y2 y3 y4 = + PackageComponentFile (x1 <> y1) (x2 <> y2) (x3 <> y3) (x4 <> y4) + +instance Monoid PackageComponentFile where + mempty = PackageComponentFile mempty mempty mempty mempty diff --git a/src/Stack/Types/PackageName.hs b/src/Stack/Types/PackageName.hs index f0a26c85c7..be973d400e 100644 --- a/src/Stack/Types/PackageName.hs +++ b/src/Stack/Types/PackageName.hs @@ -1,30 +1,43 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleInstances #-} --- | Names for packages. +{-| +Module : Stack.Types.PackageName +Description : Names for packages. +License : BSD-3-Clause + +Names for packages. +-} module Stack.Types.PackageName - ( packageNameArgument - ) where + ( packageNameArgument + ) where -import Stack.Prelude import qualified Options.Applicative as O - +import Stack.Prelude -- | An argument which accepts a template name of the format -- @foo.hsfiles@. -packageNameArgument :: O.Mod O.ArgumentFields PackageName - -> O.Parser PackageName +packageNameArgument :: + O.Mod O.ArgumentFields PackageName + -> O.Parser PackageName packageNameArgument = - O.argument - (do s <- O.str - either O.readerError return (p s)) - where - p s = - case parsePackageName s of - Just x -> Right x - Nothing -> Left $ unlines - [ "Expected valid package name, but got: " ++ s - , "Package names consist of one or more alphanumeric words separated by hyphens." - , "To avoid ambiguity with version numbers, each of these words must contain at least one letter." - ] + O.argument + (do s <- O.str + either O.readerError pure (p s)) + where + p s = + case parsePackageName s of + Just x -> Right x + Nothing -> Left $ unlines + [ "Expected a package name acceptable to Cabal, but got: " ++ s ++ "\n" + , "An acceptable package name comprises an alphanumeric 'word'; or \ + \two or more" + , "such words, with the words separated by a hyphen/minus character ('-'). A \ + \word" + , "cannot be comprised only of the characters '0' to '9'. \n" + , "An alphanumeric character is one in one of the Unicode Letter \ + \categories" + , "(Lu (uppercase), Ll (lowercase), Lt (titlecase), Lm (modifier), or \ + \Lo (other))" + , "or Number categories (Nd (decimal), Nl (letter), or No (other))." + ] diff --git a/src/Stack/Types/ParentMap.hs b/src/Stack/Types/ParentMap.hs new file mode 100644 index 0000000000..04dc940508 --- /dev/null +++ b/src/Stack/Types/ParentMap.hs @@ -0,0 +1,23 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.ParentMap +Description : Module exporting the 'ParentMap' type synonym. +License : BSD-3-Clause + +Module exporting the 'ParentMap' type synonym. +-} + +module Stack.Types.ParentMap + ( ParentMap + ) where + +import Data.Monoid.Map ( MonoidMap (..) ) +import Stack.Prelude +import Stack.Types.Version ( VersionRange ) + +-- | Type synonym representing dictionaries of package names, and a list of +-- pairs of the identifier of a package depending on the package and the +-- version range specified for the dependency by that package. +type ParentMap = + MonoidMap PackageName [(PackageIdentifier, VersionRange)] diff --git a/src/Stack/Types/Plan.hs b/src/Stack/Types/Plan.hs new file mode 100644 index 0000000000..8789f3f0d9 --- /dev/null +++ b/src/Stack/Types/Plan.hs @@ -0,0 +1,143 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.Plan +Description : Plan-related types and functions. +License : BSD-3-Clause + +Plan-related types and functions. +-} + +module Stack.Types.Plan + ( Plan (..) + , Task (..) + , TaskType (..) + , TaskConfigOpts (..) + , taskAnyMissing + , taskIsTarget + , taskLocation + , taskProvides + , taskTargetIsMutable + , taskTypeLocation + , taskTypePackageIdentifier + , installLocationIsMutable + ) where + +import Data.List as L +import qualified RIO.Set as Set +import Stack.Prelude +import Stack.Types.Cache ( CachePkgSrc ) +import Stack.Types.ComponentUtils ( StackUnqualCompName ) +import Stack.Types.ConfigureOpts + ( BaseConfigOpts, PackageConfigureOpts ) +import Stack.Types.EnvConfig ( EnvConfig ) +import Stack.Types.GhcPkgId ( GhcPkgId ) +import Stack.Types.IsMutable ( IsMutable (..) ) +import Stack.Types.Package + ( InstallLocation (..), LocalPackage (..), Package (..) + , packageIdentifier + ) + +-- | A complete plan of what needs to be built and how to do it +data Plan = Plan + { tasks :: !(Map PackageName Task) + , finals :: !(Map PackageName Task) + -- ^ Final actions to be taken (test, benchmark, etc) + , unregisterLocal :: !(Map GhcPkgId (PackageIdentifier, Text)) + -- ^ Text is reason we're unregistering, for display only + , installExes :: !(Map StackUnqualCompName InstallLocation) + -- ^ Executables that should be installed after successful building + } + deriving Show + +-- | A type representing tasks to perform when building. +data Task = Task + { taskType :: !TaskType + -- ^ The task type, telling us how to build this + , configOpts :: !TaskConfigOpts + -- ^ A set of the package identifiers of dependencies for which 'GhcPkgId' + -- are missing and a function which yields configure options, given a + -- dictionary of those identifiers and their 'GhcPkgId'. + , buildHaddocks :: !Bool + , present :: !(Map PackageIdentifier GhcPkgId) + -- ^ A dictionary of the package identifiers of already-installed + -- dependencies, and their 'GhcPkgId'. + , allInOne :: !Bool + -- ^ indicates that the package can be built in one step + , cachePkgSrc :: !CachePkgSrc + , buildTypeConfig :: !Bool + -- ^ Is the build type of this package Configure. Check out + -- ensureConfigureScript in Stack.Build.Execute for the motivation + } + deriving Show + +-- | Type representing different types of task, depending on what is to be +-- built. +data TaskType + = TTLocalMutable LocalPackage + -- ^ Building local source code. + | TTRemotePackage IsMutable Package PackageLocationImmutable + -- ^ Building something from the package index (upstream). + deriving Show + +-- | Given the IDs of any missing packages, produce the configure options +data TaskConfigOpts = TaskConfigOpts + { missing :: !(Set PackageIdentifier) + -- ^ Dependencies for which we don't yet have a 'GhcPkgId' + , envConfig :: !EnvConfig + , baseConfigOpts :: !BaseConfigOpts + , isLocalNonExtraDep :: !Bool + , isMutable :: !IsMutable + , pkgConfigOpts :: PackageConfigureOpts + } + +instance Show TaskConfigOpts where + show tco = "Missing: " ++ show tco.missing + +-- | Were any of the dependencies missing? + +taskAnyMissing :: Task -> Bool +taskAnyMissing task = not $ Set.null task.configOpts.missing + +-- | A function to yield the package name and version of a given 'TaskType' +-- value. +taskTypePackageIdentifier :: TaskType -> PackageIdentifier +taskTypePackageIdentifier (TTLocalMutable lp) = packageIdentifier lp.package +taskTypePackageIdentifier (TTRemotePackage _ p _) = packageIdentifier p + +taskIsTarget :: Task -> Bool +taskIsTarget t = + case t.taskType of + TTLocalMutable lp -> lp.wanted + _ -> False + +-- | A function to yield the relevant database (write-only or mutable) of a +-- given 'TaskType' value. +taskTypeLocation :: TaskType -> InstallLocation +taskTypeLocation (TTLocalMutable _) = Local +taskTypeLocation (TTRemotePackage Mutable _ _) = Local +taskTypeLocation (TTRemotePackage Immutable _ _) = Snap + +-- | A function to yield the relevant database (write-only or mutable) of the +-- given task. +taskLocation :: Task -> InstallLocation +taskLocation = taskTypeLocation . (.taskType) + +-- | A function to yield the package name and version to be built by the given +-- task. +taskProvides :: Task -> PackageIdentifier +taskProvides = taskTypePackageIdentifier . (.taskType) + +taskTargetIsMutable :: Task -> IsMutable +taskTargetIsMutable task = + case task.taskType of + TTLocalMutable _ -> Mutable + TTRemotePackage mutable _ _ -> mutable + +installLocationIsMutable :: InstallLocation -> IsMutable +installLocationIsMutable Snap = Immutable +installLocationIsMutable Local = Mutable diff --git a/src/Stack/Types/Platform.hs b/src/Stack/Types/Platform.hs new file mode 100644 index 0000000000..5e329e886c --- /dev/null +++ b/src/Stack/Types/Platform.hs @@ -0,0 +1,50 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.Platform +License : BSD-3-Clause +-} + +module Stack.Types.Platform + ( PlatformVariant (..) + , HasPlatform (..) + , platformVariantSuffix + , platformOnlyRelDir + ) where + +import Distribution.System ( Platform ) +import Distribution.Text ( display ) +import Lens.Micro ( _1, _2 ) +import Path ( parseRelDir ) +import Stack.Prelude + +-- | A variant of the platform, used to differentiate Docker builds from host +data PlatformVariant + = PlatformVariantNone + | PlatformVariant String + +-- | Class for environment values which have a Platform +class HasPlatform env where + platformL :: Lens' env Platform + platformVariantL :: Lens' env PlatformVariant + +instance HasPlatform (Platform, PlatformVariant) where + platformL = _1 + platformVariantL = _2 + +-- | Render a platform variant to a String suffix. +platformVariantSuffix :: PlatformVariant -> String +platformVariantSuffix PlatformVariantNone = "" +platformVariantSuffix (PlatformVariant v) = "-" ++ v + +-- | Relative directory for the platform identifier +platformOnlyRelDir :: + (MonadReader env m, HasPlatform env, MonadThrow m) + => m (Path Rel Dir) +platformOnlyRelDir = do + platform <- view platformL + platformVariant <- view platformVariantL + parseRelDir + ( Distribution.Text.display platform + ++ platformVariantSuffix platformVariant + ) diff --git a/src/Stack/Types/Project.hs b/src/Stack/Types/Project.hs new file mode 100644 index 0000000000..efce6e53b5 --- /dev/null +++ b/src/Stack/Types/Project.hs @@ -0,0 +1,67 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.Project +License : BSD-3-Clause +-} + +module Stack.Types.Project + ( Project (..) + ) where + +import Data.Aeson.Types ( ToJSON (..), (.=), object ) +import qualified Data.Map as Map +import qualified Data.Set as Set +import Stack.Prelude +import Stack.Types.Curator ( Curator ) + +-- | A project is a collection of packages. We can have multiple stack.yaml +-- files, but only one of them may contain project information. +data Project = Project + { userMsg :: !(Maybe String) + -- ^ A warning message to display to the user when the auto generated + -- config may have issues. + , packages :: ![RelFilePath] + -- ^ Packages which are actually part of the project (as opposed + -- to dependencies). + , extraDeps :: ![RawPackageLocation] + -- ^ Dependencies defined within the stack.yaml file, to be applied on top + -- of the snapshot. + , flagsByPkg :: !(Map PackageName (Map FlagName Bool)) + -- ^ Flags to be applied on top of the snapshot flags. + , snapshot :: !RawSnapshotLocation + -- ^ How we resolve which @Snapshot@ to use + , compiler :: !(Maybe WantedCompiler) + -- ^ Override the compiler in 'snapshot' + , extraPackageDBs :: ![FilePath] + , curator :: !(Maybe Curator) + -- ^ Extra configuration intended exclusively for usage by the curator tool. + -- In other words, this is /not/ part of the documented and exposed Stack + -- API. SUBJECT TO CHANGE. + , dropPackages :: !(Set PackageName) + -- ^ Packages to drop from the 'snapshot'. + } + deriving Show + +instance ToJSON Project where + -- Expanding the constructor fully to ensure we don't miss any fields. + toJSON project = object $ concat + [ maybe [] (\cv -> ["compiler" .= cv]) project.compiler + , maybe [] (\msg -> ["user-message" .= msg]) project.userMsg + , [ "extra-package-dbs" .= project.extraPackageDBs + | not (null project.extraPackageDBs) + ] + , [ "extra-deps" .= project.extraDeps | not (null project.extraDeps) ] + , [ "flags" .= fmap toCabalStringMap (toCabalStringMap project.flagsByPkg) + | not (Map.null project.flagsByPkg) + ] + , ["packages" .= project.packages] + , ["snapshot" .= project.snapshot] + , maybe [] (\c -> ["curator" .= c]) project.curator + , [ "drop-packages" .= Set.map CabalString project.dropPackages + | not (Set.null project.dropPackages) + ] + ] diff --git a/src/Stack/Types/ProjectAndConfigMonoid.hs b/src/Stack/Types/ProjectAndConfigMonoid.hs new file mode 100644 index 0000000000..31eecd334d --- /dev/null +++ b/src/Stack/Types/ProjectAndConfigMonoid.hs @@ -0,0 +1,65 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.ProjectAndConfigMonoid +License : BSD-3-Clause +-} + +module Stack.Types.ProjectAndConfigMonoid + ( ProjectAndConfigMonoid (..) + , parseProjectAndConfigMonoid + ) where + +import Data.Aeson.Types ( Value ) +import Data.Aeson.WarningParser + ( WithJSONWarnings, (...:), (..:?), (..!=), jsonSubWarnings + , jsonSubWarningsT, jsonSubWarningsTT, withObjectWarnings + ) +import qualified Data.Set as Set +import qualified Data.Yaml as Yaml +import Stack.Prelude +import Stack.Types.ConfigMonoid + ( ConfigMonoid, parseConfigMonoidObject ) +import Stack.Types.Project ( Project (..) ) + +data ProjectAndConfigMonoid + = ProjectAndConfigMonoid !Project !ConfigMonoid + +parseProjectAndConfigMonoid :: + Path Abs Dir + -> Value + -> Yaml.Parser (WithJSONWarnings (IO ProjectAndConfigMonoid)) +parseProjectAndConfigMonoid rootDir = + withObjectWarnings "ProjectAndConfigMonoid" $ \o -> do + packages <- o ..:? "packages" ..!= [RelFilePath "."] + deps <- jsonSubWarningsTT (o ..:? "extra-deps") ..!= [] + flags' <- o ..:? "flags" ..!= mempty + let flagsByPkg = unCabalStringMap <$> unCabalStringMap + (flags' :: Map (CabalString PackageName) (Map (CabalString FlagName) Bool)) + + snapshot' <- jsonSubWarnings $ o ...: ["snapshot", "resolver"] + compiler <- o ..:? "compiler" + userMsg <- o ..:? "user-message" + config <- parseConfigMonoidObject rootDir o + extraPackageDBs <- o ..:? "extra-package-dbs" ..!= [] + curator <- jsonSubWarningsT (o ..:? "curator") + drops <- o ..:? "drop-packages" ..!= mempty + let dropPackages = Set.map unCabalString drops + pure $ do + deps' <- mapM (resolvePaths (Just rootDir)) deps + let extraDeps = + concatMap toList (deps' :: [NonEmpty RawPackageLocation]) + snapshot <- resolvePaths (Just rootDir) snapshot' + let project = Project + { userMsg + , snapshot + , compiler -- FIXME make sure snapshot' isn't SLCompiler + , extraPackageDBs + , packages + , extraDeps + , flagsByPkg + , curator + , dropPackages + } + pure $ ProjectAndConfigMonoid project config diff --git a/src/Stack/Types/ProjectConfig.hs b/src/Stack/Types/ProjectConfig.hs new file mode 100644 index 0000000000..412b07b5cc --- /dev/null +++ b/src/Stack/Types/ProjectConfig.hs @@ -0,0 +1,33 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.ProjectConfig +License : BSD-3-Clause +-} + +module Stack.Types.ProjectConfig + ( ProjectConfig (..) + , isPCGlobalProject + ) where + +import Stack.Prelude + +-- | Project configuration information. Not every run of Stack has a +-- true local project; see constructors below. +data ProjectConfig a + = PCProject a + -- ^ Normal run: we want a project, and have one. This comes from + -- either 'Stack.Types.StackYamlLoc.SYLDefault' or + -- 'Stack.Types.StackYamlLoc.SYLOverride'. + | PCGlobalProject + -- ^ No project was found when using 'Stack.Types.StackYamlLoc.SYLDefault'. + -- Instead, use the implicit global. + | PCNoProject ![RawPackageLocationImmutable] + -- ^ Use a no project run. This comes from + -- 'Stack.Types.StackYamlLocSYLNoProject'. + +-- | Yields 'True' only if the project configuration information is for the +-- implicit global project. +isPCGlobalProject :: ProjectConfig a -> Bool +isPCGlobalProject PCGlobalProject = True +isPCGlobalProject _ = False diff --git a/src/Stack/Types/PvpBounds.hs b/src/Stack/Types/PvpBounds.hs new file mode 100644 index 0000000000..473de3abf2 --- /dev/null +++ b/src/Stack/Types/PvpBounds.hs @@ -0,0 +1,62 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.PvpBounds +License : BSD-3-Clause +-} + +module Stack.Types.PvpBounds + ( PvpBounds (..) + , PvpBoundsType (..) + , pvpBoundsText + , parsePvpBounds + ) where + +import Data.Aeson.Types ( FromJSON (..), ToJSON (..), withText ) +import qualified Data.Map as Map +import qualified Data.Text as T +import Stack.Prelude + +-- | How PVP bounds should be added to .cabal files +data PvpBoundsType + = PvpBoundsNone + | PvpBoundsUpper + | PvpBoundsLower + | PvpBoundsBoth + deriving (Bounded, Enum, Eq, Ord, Read, Show) + +data PvpBounds = PvpBounds + { pbType :: !PvpBoundsType + , pbAsRevision :: !Bool + } + deriving (Eq, Ord, Read, Show) + +pvpBoundsText :: PvpBoundsType -> Text +pvpBoundsText PvpBoundsNone = "none" +pvpBoundsText PvpBoundsUpper = "upper" +pvpBoundsText PvpBoundsLower = "lower" +pvpBoundsText PvpBoundsBoth = "both" + +parsePvpBounds :: Text -> Either String PvpBounds +parsePvpBounds t = maybe err Right $ do + (t', asRevision) <- + case T.break (== '-') t of + (x, "") -> Just (x, False) + (x, "-revision") -> Just (x, True) + _ -> Nothing + x <- Map.lookup t' m + Just PvpBounds + { pbType = x + , pbAsRevision = asRevision + } + where + m = Map.fromList $ map (pvpBoundsText &&& id) [minBound..maxBound] + err = Left $ "Invalid PVP bounds: " ++ T.unpack t + +instance ToJSON PvpBounds where + toJSON (PvpBounds typ asRevision) = + toJSON (pvpBoundsText typ <> (if asRevision then "-revision" else "")) + +instance FromJSON PvpBounds where + parseJSON = withText "PvpBounds" (either fail pure . parsePvpBounds) diff --git a/src/Stack/Types/Resolver.hs b/src/Stack/Types/Resolver.hs deleted file mode 100644 index 450a6f85cd..0000000000 --- a/src/Stack/Types/Resolver.hs +++ /dev/null @@ -1,104 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE UndecidableInstances #-} - -module Stack.Types.Resolver - (AbstractResolver(..) - ,readAbstractResolver - ,Snapshots (..) - ) where - -import Pantry.Internal.AesonExtended - (FromJSON, parseJSON, - withObject, (.:), withText) -import qualified Data.HashMap.Strict as HashMap -import qualified Data.IntMap.Strict as IntMap -import qualified Data.Text as T -import Data.Text.Read (decimal) -import Data.Time (Day) -import Options.Applicative (ReadM) -import qualified Options.Applicative.Types as OA -import Stack.Prelude - --- | Either an actual resolver value, or an abstract description of one (e.g., --- latest nightly). -data AbstractResolver - = ARLatestNightly - | ARLatestLTS - | ARLatestLTSMajor !Int - | ARResolver !RawSnapshotLocation - | ARGlobal - -instance Show AbstractResolver where - show = T.unpack . utf8BuilderToText . display - -instance Display AbstractResolver where - display ARLatestNightly = "nightly" - display ARLatestLTS = "lts" - display (ARLatestLTSMajor x) = "lts-" <> display x - display (ARResolver usl) = display usl - display ARGlobal = "global" - -readAbstractResolver :: ReadM (Unresolved AbstractResolver) -readAbstractResolver = do - s <- OA.readerAsk - case s of - "global" -> pure $ pure ARGlobal - "nightly" -> pure $ pure ARLatestNightly - "lts" -> pure $ pure ARLatestLTS - 'l':'t':'s':'-':x | Right (x', "") <- decimal $ T.pack x -> - pure $ pure $ ARLatestLTSMajor x' - _ -> pure $ ARResolver <$> parseRawSnapshotLocation (T.pack s) - -data BuildPlanTypesException - = ParseResolverException !Text - | FilepathInDownloadedSnapshot !Text - deriving Typeable -instance Exception BuildPlanTypesException -instance Show BuildPlanTypesException where - show (ParseResolverException t) = concat - [ "Invalid resolver value: " - , T.unpack t - , ". Possible valid values include lts-2.12, nightly-YYYY-MM-DD, ghc-7.10.2, and ghcjs-0.1.0_ghc-7.10.2. " - , "See https://www.stackage.org/snapshots for a complete list." - ] - show (FilepathInDownloadedSnapshot url) = unlines - [ "Downloaded snapshot specified a 'resolver: { location: filepath }' " - , "field, but filepaths are not allowed in downloaded snapshots.\n" - , "Filepath specified: " ++ T.unpack url - ] - --- | Most recent Nightly and newest LTS version per major release. -data Snapshots = Snapshots - { snapshotsNightly :: !Day - , snapshotsLts :: !(IntMap Int) - } - deriving Show -instance FromJSON Snapshots where - parseJSON = withObject "Snapshots" $ \o -> Snapshots - <$> (o .: "nightly" >>= parseNightly) - <*> fmap IntMap.unions (mapM (parseLTS . snd) - $ filter (isLTS . fst) - $ HashMap.toList o) - where - parseNightly t = - case parseSnapName t of - Left e -> fail $ show e - Right (LTS _ _) -> fail "Unexpected LTS value" - Right (Nightly d) -> return d - - isLTS = ("lts-" `T.isPrefixOf`) - - parseLTS = withText "LTS" $ \t -> - case parseSnapName t of - Left e -> fail $ show e - Right (LTS x y) -> return $ IntMap.singleton x y - Right (Nightly _) -> fail "Unexpected nightly value" diff --git a/src/Stack/Types/Runner.hs b/src/Stack/Types/Runner.hs new file mode 100644 index 0000000000..f4759221fb --- /dev/null +++ b/src/Stack/Types/Runner.hs @@ -0,0 +1,118 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.Runner +License : BSD-3-Clause +-} + +module Stack.Types.Runner + ( Runner (..) + , HasRunner (..) + , HasDockerEntrypointMVar (..) + , globalOptsL + , stackYamlLocL + , lockFileBehaviorL + , terminalL + , reExecL + , rslInLogL + , progNameL + , mExecutablePathL + , viewExecutablePath + ) where + +import RIO.Process ( HasProcessContext (..), ProcessContext ) +import Stack.Prelude hiding ( stylesUpdate ) +import Stack.Types.Config.Exception ( ConfigPrettyException (..) ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.LockFileBehavior ( LockFileBehavior ) +import Stack.Types.StackYamlLoc ( StackYamlLoc ) + +-- | The base environment that almost everything in Stack runs in, based off of +-- parsing command line options in t'GlobalOpts'. Provides logging, process +-- execution, and the MVar used to ensure that the Docker entrypoint is +-- performed exactly once. +data Runner = Runner + { globalOpts :: !GlobalOpts + , useColor :: !Bool + , logFunc :: !LogFunc + , termWidth :: !Int + , processContext :: !ProcessContext + , dockerEntrypointMVar :: !(MVar Bool) + } + +instance HasLogFunc Runner where + logFuncL = lens (.logFunc) (\x y -> x { logFunc = y }) + +instance HasProcessContext Runner where + processContextL = + lens (.processContext) (\x y -> x { processContext = y }) + +instance HasRunner Runner where + runnerL = id + +instance HasStylesUpdate Runner where + stylesUpdateL :: Lens' Runner StylesUpdate + stylesUpdateL = globalOptsL . lens + (.stylesUpdate) + (\x y -> x { stylesUpdate = y }) + +instance HasTerm Runner where + useColorL = lens (.useColor) (\x y -> x { useColor = y }) + termWidthL = lens (.termWidth) (\x y -> x { termWidth = y }) + +instance HasDockerEntrypointMVar Runner where + dockerEntrypointMVarL = + lens (.dockerEntrypointMVar) (\x y -> x { dockerEntrypointMVar = y }) + +-- | Class for environment values which have a t'Runner'. +class (HasProcessContext env, HasLogFunc env) => HasRunner env where + runnerL :: Lens' env Runner + +-- | Class for environment values which have a Docker entrypoint 'MVar'. +class HasRunner env => HasDockerEntrypointMVar env where + dockerEntrypointMVarL :: Lens' env (MVar Bool) + +-- | See the @stackYaml@ field of the v'GlobalOpts' data constructor. +stackYamlLocL :: HasRunner env => Lens' env StackYamlLoc +stackYamlLocL = + globalOptsL . lens (.stackYaml) (\x y -> x { stackYaml = y }) + +-- | See the @lockFileBehavior@ field of the v'GlobalOpts' data constructor. +lockFileBehaviorL :: HasRunner env => SimpleGetter env LockFileBehavior +lockFileBehaviorL = globalOptsL . to (.lockFileBehavior) + +-- | See the t'GlobalOpts' type. +globalOptsL :: HasRunner env => Lens' env GlobalOpts +globalOptsL = runnerL . lens (.globalOpts) (\x y -> x { globalOpts = y }) + +-- | See the @terminal@ field of the v'GlobalOpts' data constructor. +terminalL :: HasRunner env => Lens' env Bool +terminalL = + globalOptsL . lens (.terminal) (\x y -> x { terminal = y }) + +-- | See the @reExecVersion@ field of the v'GlobalOpts' data constructor. +reExecL :: HasRunner env => SimpleGetter env Bool +reExecL = globalOptsL . to (isJust . (.reExecVersion)) + +-- | See the @rslInLog@ field of the v'GlobalOpts' data constructor. +rslInLogL :: HasRunner env => SimpleGetter env Bool +rslInLogL = globalOptsL . to (.rslInLog) + +-- | See the @progNameL@ field of the v'GlobalOpts' data constructor. +progNameL :: HasRunner env => SimpleGetter env String +progNameL = globalOptsL . to (.progName) + +-- | See the @mExecutablePath@ field of the v'GlobalOpts' data constructor. +mExecutablePathL :: HasRunner env => SimpleGetter env (Maybe (Path Abs File)) +mExecutablePathL = globalOptsL . to (.mExecutablePath) + +-- | Yield the path to the current Stack executable, if the operating system +-- provides a reliable way to determine it. Otherwise throw +-- 'Stack.Types.Config.Exception.NoExecutablePath'. +viewExecutablePath :: HasRunner env => RIO env (Path Abs File) +viewExecutablePath = view mExecutablePathL >>= \case + Nothing -> view progNameL >>= prettyThrowM . NoExecutablePath + Just executablePath -> pure executablePath diff --git a/src/Stack/Types/SCM.hs b/src/Stack/Types/SCM.hs new file mode 100644 index 0000000000..d1be2175fe --- /dev/null +++ b/src/Stack/Types/SCM.hs @@ -0,0 +1,29 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.SCM +License : BSD-3-Clause +-} + +module Stack.Types.SCM + ( SCM (..) + ) where + +import Data.Aeson.Types ( FromJSON (..), ToJSON (..) ) +import Stack.Prelude + +-- | A software control system. +data SCM + = Git + deriving Show + +instance FromJSON SCM where + parseJSON v = do + s <- parseJSON v + case s of + "git" -> pure Git + _ -> fail ("Unknown or unsupported SCM: " <> s) + +instance ToJSON SCM where + toJSON Git = toJSON ("git" :: Text) diff --git a/src/Stack/Types/SDistOpts.hs b/src/Stack/Types/SDistOpts.hs new file mode 100644 index 0000000000..ced913053e --- /dev/null +++ b/src/Stack/Types/SDistOpts.hs @@ -0,0 +1,31 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.SDistOpts +Description : Types related to Stack's @sdist@ command. +License : BSD-3-Clause + +Types related to Stack's @sdist@ command. +-} + +module Stack.Types.SDistOpts + ( SDistOpts (..) + ) where + +import Stack.Prelude +import Stack.Types.PvpBounds ( PvpBounds ) + +-- | Type representing command line options for @stack sdist@ command. +data SDistOpts = SDistOpts + { dirsToWorkWith :: [String] + -- ^ Directories to package + , pvpBounds :: Maybe PvpBounds + -- ^ PVP Bounds overrides + , ignoreCheck :: Bool + -- ^ Whether to ignore check of the package for common errors + , buildTarball :: Bool + -- ^ Whether to build the tarball + , tarPath :: Maybe FilePath + -- ^ Where to copy the tarball + } diff --git a/src/Stack/Types/SetupInfo.hs b/src/Stack/Types/SetupInfo.hs new file mode 100644 index 0000000000..906d0fef45 --- /dev/null +++ b/src/Stack/Types/SetupInfo.hs @@ -0,0 +1,74 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} + +{-| +Module : Stack.Types.SetupInfo +License : BSD-3-Clause +-} + +module Stack.Types.SetupInfo + ( SetupInfo (..) + ) where + +import Data.Aeson.Types ( FromJSON (..) ) +import Data.Aeson.WarningParser + ( WithJSONWarnings, (..:?), (..!=), jsonSubWarningsT + , jsonSubWarningsTT, withObjectWarnings + ) +import qualified Data.Map as Map +import Stack.Prelude +import Stack.Types.DownloadInfo ( DownloadInfo ) +import Stack.Types.VersionedDownloadInfo ( VersionedDownloadInfo ) +import Stack.Types.GHCDownloadInfo ( GHCDownloadInfo ) + +data SetupInfo = SetupInfo + { sevenzExe :: Maybe DownloadInfo + , sevenzDll :: Maybe DownloadInfo + , msys2 :: Map Text VersionedDownloadInfo + , ghcByVersion :: Map Text (Map Version GHCDownloadInfo) + , stackByVersion :: Map Text (Map Version DownloadInfo) + } + deriving Show + +instance FromJSON (WithJSONWarnings SetupInfo) where + parseJSON = withObjectWarnings "SetupInfo" $ \o -> do + sevenzExe <- jsonSubWarningsT (o ..:? "sevenzexe-info") + sevenzDll <- jsonSubWarningsT (o ..:? "sevenzdll-info") + msys2 <- jsonSubWarningsT (o ..:? "msys2" ..!= mempty) + (fmap unCabalStringMap -> ghcByVersion) <- + jsonSubWarningsTT (o ..:? "ghc" ..!= mempty) + (fmap unCabalStringMap -> stackByVersion) <- + jsonSubWarningsTT (o ..:? "stack" ..!= mempty) + pure SetupInfo + { sevenzExe + , sevenzDll + , msys2 + , ghcByVersion + , stackByVersion + } + +-- | For the @siGHCs@ field maps are deeply merged. For all fields the values +-- from the first @SetupInfo@ win. +instance Semigroup SetupInfo where + l <> r = + SetupInfo + { sevenzExe = l.sevenzExe <|> r.sevenzExe + , sevenzDll = l.sevenzDll <|> r.sevenzDll + , msys2 = l.msys2 <> r.msys2 + , ghcByVersion = Map.unionWith (<>) l.ghcByVersion r.ghcByVersion + , stackByVersion = Map.unionWith (<>) l.stackByVersion r.stackByVersion + } + +instance Monoid SetupInfo where + mempty = + SetupInfo + { sevenzExe = Nothing + , sevenzDll = Nothing + , msys2 = Map.empty + , ghcByVersion = Map.empty + , stackByVersion = Map.empty + } + mappend = (<>) diff --git a/src/Stack/Types/SetupOpts.hs b/src/Stack/Types/SetupOpts.hs new file mode 100644 index 0000000000..5c44f8e9af --- /dev/null +++ b/src/Stack/Types/SetupOpts.hs @@ -0,0 +1,25 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.SetupOpts +Description : Types related to Stack's @setup@ command. +License : BSD-3-Clause + +Types related to Stack's @setup@ command. +-} + +module Stack.Types.SetupOpts + ( SetupCmdOpts (..) + ) where + +import Stack.Prelude + +-- | Type representing command line options for the @stack setup@ command. +data SetupCmdOpts = SetupCmdOpts + { compilerVersion :: !(Maybe WantedCompiler) + , forceReinstall :: !Bool + , ghcBindistUrl :: !(Maybe String) + , ghcjsBootOpts :: ![String] + , ghcjsBootClean :: !Bool + } diff --git a/src/Stack/Types/Snapshot.hs b/src/Stack/Types/Snapshot.hs new file mode 100644 index 0000000000..e16943338a --- /dev/null +++ b/src/Stack/Types/Snapshot.hs @@ -0,0 +1,115 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +{-| +Module : Stack.Types.Snapshot +License : BSD-3-Clause +-} + +module Stack.Types.Snapshot + ( AbstractSnapshot (..) + , readAbstractSnapshot + , Snapshots (..) + ) where + +import qualified Data.Aeson.Key as Key +import qualified Data.Aeson.KeyMap as KeyMap +import Data.Aeson.Types + ( FromJSON, parseJSON, withObject, withText ) +import Data.Aeson.WarningParser ( (.:) ) +import qualified Data.IntMap.Strict as IntMap +import qualified Data.Text as T +import Data.Text.Read ( decimal ) +import Data.Time ( Day ) +import Options.Applicative ( ReadM ) +import qualified Options.Applicative.Types as OA +import Stack.Prelude + +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Types.Snapshot" module. +data TypesSnapshotException + = ParseSnapshotException !Text + | FilepathInDownloadedSnapshot !Text + deriving Show + +instance Exception TypesSnapshotException where + displayException (ParseSnapshotException t) = concat + [ "Error: [S-8787]\n" + , "Invalid snapshot value: " + , T.unpack t + , ". Possible valid values include lts-2.12, nightly-YYYY-MM-DD, \ + \ghc-7.10.2, and ghcjs-0.1.0_ghc-7.10.2. See \ + \https://www.stackage.org/snapshots for a complete list." + ] + displayException (FilepathInDownloadedSnapshot url) = unlines + [ "Error: [S-4865]" + , "Downloaded snapshot specified 'snapshot: { location: filepath }', " + , "but filepaths are not allowed in downloaded snapshots.\n" + , "Filepath specified: " ++ T.unpack url + ] + +-- | Either an actual snapshot value, or an abstract description of one (e.g., +-- latest nightly). +data AbstractSnapshot + = ASLatestNightly + | ASLatestLTS + | ASLatestLTSMajor !Int + | ASSnapshot !RawSnapshotLocation + | ASGlobal + +instance Show AbstractSnapshot where + show = T.unpack . utf8BuilderToText . display + +instance Display AbstractSnapshot where + display ASLatestNightly = "nightly" + display ASLatestLTS = "lts" + display (ASLatestLTSMajor x) = "lts-" <> display x + display (ASSnapshot usl) = display usl + display ASGlobal = "global" + +instance FromJSON (Unresolved AbstractSnapshot) where + parseJSON = withText "Unresolved AbstractSnapshot" $ \t -> + pure $ parseAbstractSnapshot $ T.unpack t + +readAbstractSnapshot :: ReadM (Unresolved AbstractSnapshot) +readAbstractSnapshot = parseAbstractSnapshot <$> OA.readerAsk + +parseAbstractSnapshot :: String -> Unresolved AbstractSnapshot +parseAbstractSnapshot s = case s of + "global" -> pure ASGlobal + "nightly" -> pure ASLatestNightly + "lts" -> pure ASLatestLTS + 'l':'t':'s':'-':x | Right (x', "") <- decimal $ T.pack x -> + pure $ ASLatestLTSMajor x' + _ ->ASSnapshot <$> parseRawSnapshotLocation (T.pack s) + +-- | Most recent Nightly and newest LTS version per major release. +data Snapshots = Snapshots + { nightly :: !Day + , lts :: !(IntMap Int) + } + deriving Show + +instance FromJSON Snapshots where + parseJSON = withObject "Snapshots" $ \o -> Snapshots + <$> (o .: "nightly" >>= parseNightly) + <*> fmap IntMap.unions (mapM (parseLTS . snd) + $ filter (isLTS . Key.toText . fst) + $ KeyMap.toList o) + where + parseNightly t = + case parseSnapName t of + Left e -> fail $ displayException e + Right (LTS _ _) -> fail "Unexpected LTS value" + Right (Nightly d) -> pure d + + isLTS = ("lts-" `T.isPrefixOf`) + + parseLTS = withText "LTS" $ \t -> + case parseSnapName t of + Left e -> fail $ displayException e + Right (LTS x y) -> pure $ IntMap.singleton x y + Right (Nightly _) -> fail "Unexpected nightly value" diff --git a/src/Stack/Types/SourceMap.hs b/src/Stack/Types/SourceMap.hs index 43ca042d62..699319ccfc 100644 --- a/src/Stack/Types/SourceMap.hs +++ b/src/Stack/Types/SourceMap.hs @@ -1,10 +1,19 @@ -{-# LANGUAGE NoImplicitPrelude #-} --- | A sourcemap maps a package name to how it should be built, --- including source code, flags, options, etc. This module contains --- various stages of source map construction. See the --- @build_overview.md@ doc for details on these stages. +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Types.SourceMap +License : BSD-3-Clause + +A source map maps a package name to how it should be built, including source +code, flags and options. This module exports types used in various stages of +source map construction. See @build_overview.md@ for details on these stages. +-} + module Stack.Types.SourceMap - ( -- * Different source map types + ( -- * Source map types SMWanted (..) , SMActual (..) , Target (..) @@ -15,6 +24,11 @@ module Stack.Types.SourceMap , FromSnapshot (..) , DepPackage (..) , ProjectPackage (..) + , ppComponents + , ppComponentsMaybe + , ppGPD + , ppRoot + , ppVersion , CommonPackage (..) , GlobalPackageVersion (..) , GlobalPackage (..) @@ -23,54 +37,64 @@ module Stack.Types.SourceMap , smRelDir ) where +import qualified Data.Set as Set import qualified Data.Text as T +import Distribution.PackageDescription ( GenericPackageDescription ) +import qualified Distribution.PackageDescription as C import qualified Pantry.SHA256 as SHA256 -import Path -import Stack.Prelude -import Stack.Types.Compiler -import Stack.Types.NamedComponent -import Distribution.PackageDescription (GenericPackageDescription) - --- | Common settings for both dependency and project package. +import Path ( parent, parseRelDir ) +import Stack.Prelude +import Stack.Types.Compiler ( ActualCompiler ) +import Stack.Types.ComponentUtils ( fromCabalName ) +import Stack.Types.NamedComponent ( NamedComponent (..) ) + +-- | Settings common to dependency packages ('Stack.Types.SourceMap.DepPackage') +-- and project packages ('Stack.Types.SourceMap.ProjectPackage'). data CommonPackage = CommonPackage - { cpGPD :: !(IO GenericPackageDescription) - , cpName :: !PackageName - , cpFlags :: !(Map FlagName Bool) - -- ^ overrides default flags - , cpGhcOptions :: ![Text] -- also lets us know if we're doing profiling - , cpCabalConfigOpts :: ![Text] - , cpHaddocks :: !Bool + { gpd :: !(IO GenericPackageDescription) + , name :: !PackageName + , flags :: !(Map FlagName Bool) + -- ^ overrides default flags + , ghcOptions :: ![Text] + -- also lets us know if we're doing profiling + , cabalConfigOpts :: ![Text] + , buildHaddocks :: !Bool + -- ^ Should Haddock documentation be built for this package? } --- | Flag showing if package comes from a snapshot --- needed to ignore dependency bounds between such packages +-- | Flag showing if package comes from a snapshot. Used to ignore dependency +-- bounds between such packages. data FromSnapshot - = FromSnapshot - | NotFromSnapshot - deriving (Show) + = FromSnapshot + | NotFromSnapshot + deriving Show -- | A view of a dependency package, specified in stack.yaml data DepPackage = DepPackage - { dpCommon :: !CommonPackage - , dpLocation :: !PackageLocation - , dpHidden :: !Bool - -- ^ Should the package be hidden after registering? - -- Affects the script interpreter's module name import parser. - , dpFromSnapshot :: !FromSnapshot - -- ^ Needed to ignore bounds between snapshot packages - -- See https://github.com/commercialhaskell/stackage/issues/3185 + { depCommon :: !CommonPackage + , location :: !PackageLocation + , hidden :: !Bool + -- ^ Should the package be hidden after registering? Affects the script + -- interpreter's module name import parser. + , fromSnapshot :: !FromSnapshot + -- ^ Needed to ignore bounds between snapshot packages + -- See https://github.com/commercialhaskell/stackage/issues/3185 } --- | A view of a project package needed for resolving components +-- | A view of a project package. Used to resolve components. data ProjectPackage = ProjectPackage - { ppCommon :: !CommonPackage - , ppCabalFP :: !(Path Abs File) - , ppResolvedDir :: !(ResolvedPath Dir) + { projectCommon :: !CommonPackage + , cabalFP :: !(Path Abs File) + , resolvedDir :: !(ResolvedPath Dir) } --- | A view of a package installed in the global package database also --- could include marker for a replaced global package (could be replaced --- because of a replaced dependency) +-- | A type representing versions of packages in the global package database. +newtype GlobalPackageVersion + = GlobalPackageVersion Version + +-- | A view of a package installed in the global package database or a marker +-- for a replaced global package. A global package could be replaced because of +-- a replaced dependency. data GlobalPackage = GlobalPackage !Version | ReplacedGlobalPackage ![PackageName] @@ -80,80 +104,117 @@ isReplacedGlobal :: GlobalPackage -> Bool isReplacedGlobal (ReplacedGlobalPackage _) = True isReplacedGlobal (GlobalPackage _) = False --- | A source map with information on the wanted (but not actual) --- compiler. This is derived by parsing the @stack.yaml@ file for --- @packages@, @extra-deps@, their configuration (e.g., flags and --- options), and parsing the snapshot it refers to. It does not --- include global packages or any information from the command line. +-- | A type representing how a package is intended to be built. +data Target + = TargetAll !PackageType + -- ^ Build all of the default components. + | TargetComps !(Set NamedComponent) + -- ^ Only build specific components + +-- | A type representing types of packages. +data PackageType + = PTProject + -- ^ The package is a project package. + | PTDependency + -- ^ The package is other than a project package and a dependency. + deriving (Eq, Show) + +-- | A source map with information on the wanted (but not actual) compiler. This +-- is derived by parsing the @stack.yaml@ file for @packages@, @extra-deps@, +-- their configuration (e.g., flags and options), and parsing the snapshot it +-- refers to. It does not include global packages or any information from the +-- command line. -- --- Invariant: a @PackageName@ appears in either 'smwProject' or --- 'smwDeps', but not both. +-- Invariant: a @PackageName@ appears in either 'SMWanted.project' or +-- 'SMWanted.deps', but not both. data SMWanted = SMWanted - { smwCompiler :: !WantedCompiler - , smwProject :: !(Map PackageName ProjectPackage) - , smwDeps :: !(Map PackageName DepPackage) - , smwSnapshotLocation :: !RawSnapshotLocation - -- ^ Where this snapshot is loaded from. + { compiler :: !WantedCompiler + , project :: !(Map PackageName ProjectPackage) + , deps :: !(Map PackageName DepPackage) + , snapshotLocation :: !RawSnapshotLocation + -- ^ Where this snapshot is loaded from. } --- | Adds in actual compiler information to 'SMWanted', in particular --- the contents of the global package database. +-- | A source map with information on the actual compiler, including the +-- contents of its global package database. It does not include any information +-- from the command line. -- -- Invariant: a @PackageName@ appears in only one of the @Map@s. data SMActual global = SMActual - { smaCompiler :: !ActualCompiler - , smaProject :: !(Map PackageName ProjectPackage) - , smaDeps :: !(Map PackageName DepPackage) - , smaGlobal :: !(Map PackageName global) + { compiler :: !ActualCompiler + , project :: !(Map PackageName ProjectPackage) + , deps :: !(Map PackageName DepPackage) + , globals :: !(Map PackageName global) } -newtype GlobalPackageVersion = GlobalPackageVersion Version - --- | How a package is intended to be built -data Target - = TargetAll !PackageType - -- ^ Build all of the default components. - | TargetComps !(Set NamedComponent) - -- ^ Only build specific components - -data PackageType = PTProject | PTDependency - deriving (Eq, Show) - --- | Builds on an 'SMActual' by resolving the targets specified on the --- command line, potentially adding in new dependency packages in the --- process. +-- | Builds on an t'SMActual' by resolving the targets specified on the command +-- line, potentially adding in new dependency packages in the process. data SMTargets = SMTargets - { smtTargets :: !(Map PackageName Target) - , smtDeps :: !(Map PackageName DepPackage) + { targets :: !(Map PackageName Target) + , deps :: !(Map PackageName DepPackage) } --- | The final source map, taking an 'SMTargets' and applying all --- command line flags and GHC options. +-- | The final source map, taking an t'SMTargets' and applying all command line +-- flags and GHC options. +-- +-- One source map value is distinguished from another by a hash of the parts of +-- the value that are immutable. data SourceMap = SourceMap - { smTargets :: !SMTargets - -- ^ Doesn't need to be included in the hash, does not affect the - -- source map. - , smCompiler :: !ActualCompiler - -- ^ Need to hash the compiler version _and_ its installation - -- path. Ideally there would be some kind of output from GHC - -- telling us some unique ID for the compiler itself. - , smProject :: !(Map PackageName ProjectPackage) - -- ^ Doesn't need to be included in hash, doesn't affect any of - -- the packages that get stored in the snapshot database. - , smDeps :: !(Map PackageName DepPackage) - -- ^ Need to hash all of the immutable dependencies, can ignore - -- the mutable dependencies. - , smGlobal :: !(Map PackageName GlobalPackage) - -- ^ Doesn't actually need to be hashed, implicitly captured by - -- smCompiler. Can be broken if someone installs new global - -- packages. We can document that as not supported, _or_ we could - -- actually include all of this in the hash and make Stack more - -- resilient. + { targets :: !SMTargets + -- ^ Doesn't need to be included in the hash, does not affect the source + -- map. + , compiler :: !ActualCompiler + -- ^ Need to hash the compiler version _and_ its installation path. Ideally + -- there would be some kind of output from GHC telling us some unique ID for + -- the compiler itself. + , project :: !(Map PackageName ProjectPackage) + -- ^ Doesn't need to be included in hash, doesn't affect any of the packages + -- that get stored in the snapshot database. + , deps :: !(Map PackageName DepPackage) + -- ^ Need to hash all of the immutable dependencies, can ignore the mutable + -- dependencies. + , globalPkgs :: !(Map PackageName GlobalPackage) + -- ^ Doesn't actually need to be hashed, implicitly captured by smCompiler. + -- Can be broken if someone installs new global packages. We can document + -- that as not supported, _or_ we could actually include all of this in the + -- hash and make Stack more resilient. } --- | A unique hash for the immutable portions of a 'SourceMap'. -newtype SourceMapHash = SourceMapHash SHA256 +-- | A unique hash for the immutable portions of a t'SourceMap'. +newtype SourceMapHash + = SourceMapHash SHA256 -- | Returns relative directory name with source map's hash smRelDir :: (MonadThrow m) => SourceMapHash -> m (Path Rel Dir) smRelDir (SourceMapHash smh) = parseRelDir $ T.unpack $ SHA256.toHexText smh + +ppGPD :: MonadIO m => ProjectPackage -> m GenericPackageDescription +ppGPD = liftIO . (.projectCommon.gpd) + +-- | Root directory for the given t'ProjectPackage' +ppRoot :: ProjectPackage -> Path Abs Dir +ppRoot = parent . (.cabalFP) + +-- | All components available in the given t'ProjectPackage' +ppComponents :: MonadIO m => ProjectPackage -> m (Set NamedComponent) +ppComponents = ppComponentsMaybe Just + +ppComponentsMaybe :: + MonadIO m + => (NamedComponent -> Maybe NamedComponent) + -> ProjectPackage + -> m (Set NamedComponent) +ppComponentsMaybe compType pp = do + gpd <- ppGPD pp + pure $ Set.fromList $ concat + [ maybe [] (const $ catMaybes [compType CLib]) (C.condLibrary gpd) + , mapMaybe ((compType . CExe . fromCabalName) . fst) (C.condExecutables gpd) + , mapMaybe ((compType . CTest . fromCabalName) . fst) (C.condTestSuites gpd) + , mapMaybe + ((compType . CBench . fromCabalName) . fst) + (C.condBenchmarks gpd) + ] + +-- | Version for the given t'ProjectPackage +ppVersion :: MonadIO m => ProjectPackage -> m Version +ppVersion = fmap gpdVersion . ppGPD diff --git a/src/Stack/Types/StackYamlLoc.hs b/src/Stack/Types/StackYamlLoc.hs new file mode 100644 index 0000000000..57827bfced --- /dev/null +++ b/src/Stack/Types/StackYamlLoc.hs @@ -0,0 +1,26 @@ + +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.StackYamlLoc +License : BSD-3-Clause +-} + +module Stack.Types.StackYamlLoc + ( StackYamlLoc (..) + ) where + +import Stack.Prelude + +-- | Location for the project's stack.yaml file. +data StackYamlLoc + = SYLDefault + -- ^ Use the standard parent-directory-checking logic + | SYLOverride !(Path Abs File) + -- ^ Use a specific stack.yaml file provided + | SYLNoProject ![RawPackageLocationImmutable] + -- ^ Do not load up a project, just user configuration. Include + -- the given extra dependencies with the snapshot. + | SYLGlobalProject + -- ^ Do not look for a project configuration, and use the implicit global. + deriving Show diff --git a/src/Stack/Types/Storage.hs b/src/Stack/Types/Storage.hs new file mode 100644 index 0000000000..323b97e587 --- /dev/null +++ b/src/Stack/Types/Storage.hs @@ -0,0 +1,78 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.Storage +Description : Types used by @Stack.Storage@ modules. +License : BSD-3-Clause + +Types used by @Stack.Storage@ modules. +-} + +module Stack.Types.Storage + ( StoragePrettyException (..) + , ProjectStorage (..) + , UserStorage (..) + ) where + +import Pantry.SQLite ( Storage ) +import Stack.Prelude + +-- | Type representing \'pretty\' exceptions thrown by functions exported by +-- modules beginning @Stack.Storage@. +data StoragePrettyException + = StorageMigrationFailure !Text !(Path Abs File) !SomeException + deriving Show + +instance Pretty StoragePrettyException where + pretty (StorageMigrationFailure desc fp ex) = + "[S-8835]" + <> line + <> fillSep + [ flow "Stack could not migrate the the database" + , style File (fromString $ show desc) + , flow "located at" + , pretty fp + ] + <> "." + <> blankLine + <> flow "While migrating the database, Stack encountered the error:" + <> blankLine + <> string exMsg + <> blankLine + <> fillSep + [ flow "Please report this as an issue at" + , style Url "https://github.com/commercialhaskell/stack/issues" + ] + <> "." + <> blankLine + -- See https://github.com/commercialhaskell/stack/issues/5851 + <> if exMsg == winIOGHCRTSMsg + then + flow "This error can be caused by a bug that arises if GHC's \ + \'--io-manager=native' RTS option is set using the GHCRTS \ + \environment variable. As a workaround try setting the option \ + \in the project's Cabal file, Stack's configuration file or at \ + \the command line." + else + flow "As a workaround you may delete the database. This \ + \will cause the database to be recreated." + where + exMsg = displayException ex + winIOGHCRTSMsg = + "\\\\.\\NUL: hDuplicateTo: illegal operation (handles are incompatible)" + +instance Exception StoragePrettyException + +-- | Type representing SQL database connections to the user database. This +-- provides a bit of type safety to ensure we're talking to the right database. +newtype UserStorage = UserStorage + { userStorage :: Storage + } + +-- | Type representing SQL database connections to the project database for +-- caches. This provides a bit of type safety to ensure we're talking to the +-- right database. +newtype ProjectStorage = ProjectStorage + { projectStorage :: Storage + } diff --git a/src/Stack/Types/TemplateName.hs b/src/Stack/Types/TemplateName.hs index bf74cfee3c..df36489ed6 100644 --- a/src/Stack/Types/TemplateName.hs +++ b/src/Stack/Types/TemplateName.hs @@ -1,7 +1,13 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} --- | Template name handling. +{-| +Module : Stack.Types.TemplateName +Description : Template name handling. +License : BSD-3-Clause + +Template name handling. +-} module Stack.Types.TemplateName ( TemplateName @@ -17,92 +23,111 @@ module Stack.Types.TemplateName , defaultTemplateName ) where -import Data.Aeson (FromJSON (..), withText) +import Data.Aeson ( FromJSON (..), withText ) import qualified Data.Text as T -import Network.HTTP.StackClient (parseRequest) +import Network.HTTP.StackClient ( parseRequest ) import qualified Options.Applicative as O -import Path +import Path ( parseAbsFile, parseRelFile ) import Stack.Prelude +-- | Type representing exceptions thrown by functions exported by the +-- "Stack.Types.TemplateName" module. +newtype TypeTemplateNameException + = DefaultTemplateNameNotParsedBug String + deriving Show + +instance Exception TypeTemplateNameException where + displayException (DefaultTemplateNameNotParsedBug s) = bugReport "[S-7410]" $ + "Cannot parse default template name: " + ++ s + -- | A template name. -data TemplateName = TemplateName !Text !TemplatePath - deriving (Ord,Eq,Show) - -data TemplatePath = AbsPath (Path Abs File) - -- ^ an absolute path on the filesystem - | RelPath String (Path Rel File) - -- ^ a relative path on the filesystem, or relative to - -- the template repository. To avoid path separator conversion - -- on Windows, the raw command-line parameter passed is also - -- given as the first field (possibly with @.hsfiles@ appended). - | UrlPath String - -- ^ a full URL - | RepoPath RepoTemplatePath +data TemplateName + = TemplateName !Text !TemplatePath + deriving (Eq, Ord, Show) + +data TemplatePath + = AbsPath (Path Abs File) + -- ^ an absolute path on the filesystem + | RelPath String (Path Rel File) + -- ^ a relative path on the filesystem, or relative to the template + -- repository. To avoid path separator conversion on Windows, the raw + -- command-line parameter passed is also given as the first field (possibly + -- with @.hsfiles@ appended). + | UrlPath String + -- ^ a full URL + | RepoPath RepoTemplatePath deriving (Eq, Ord, Show) -- | Details for how to access a template from a remote repo. data RepoTemplatePath = RepoTemplatePath - { rtpService :: RepoService - , rtpUser :: Text - , rtpTemplate :: Text - } - deriving (Eq, Ord, Show) + { service :: RepoService + , user :: Text + , template :: Text + } + deriving (Eq, Ord, Show) -- | Services from which templates can be retrieved from a repository. -data RepoService = Github | Gitlab | Bitbucket - deriving (Eq, Ord, Show) +data RepoService + = GitHub + | GitLab + | Bitbucket + | Codeberg + deriving (Eq, Ord, Show) instance FromJSON TemplateName where - parseJSON = withText "TemplateName" $ - either fail return . parseTemplateNameFromString . T.unpack - --- | An argument which accepts a template name of the format --- @foo.hsfiles@ or @foo@, ultimately normalized to @foo@. -templateNameArgument :: O.Mod O.ArgumentFields TemplateName - -> O.Parser TemplateName + parseJSON = withText "TemplateName" $ + either fail pure . parseTemplateNameFromString . T.unpack + +-- | An argument which accepts a template name of the format @foo.hsfiles@ or +-- @foo@, ultimately normalized to @foo@. +templateNameArgument :: + O.Mod O.ArgumentFields TemplateName + -> O.Parser TemplateName templateNameArgument = - O.argument - (do string <- O.str - either O.readerError return (parseTemplateNameFromString string)) + O.argument + (do s <- O.str + either O.readerError pure (parseTemplateNameFromString s)) -- | An argument which accepts a @key:value@ pair for specifying parameters. -templateParamArgument :: O.Mod O.OptionFields (Text,Text) - -> O.Parser (Text,Text) +templateParamArgument :: + O.Mod O.OptionFields (Text,Text) + -> O.Parser (Text,Text) templateParamArgument = - O.option - (do string <- O.str - either O.readerError return (parsePair string)) - where - parsePair :: String -> Either String (Text, Text) - parsePair s = - case break (==':') s of - (key,':':value@(_:_)) -> Right (T.pack key, T.pack value) - _ -> Left ("Expected key:value format for argument: " <> s) + O.option + (do s <- O.str + either O.readerError pure (parsePair s)) + where + parsePair :: String -> Either String (Text, Text) + parsePair s = + case break (==':') s of + (key,':':value@(_:_)) -> Right (T.pack key, T.pack value) + _ -> Left ("Expected key:value format for argument: " <> s) -- | Parse a template name from a string. parseTemplateNameFromString :: String -> Either String TemplateName parseTemplateNameFromString fname = - case T.stripSuffix ".hsfiles" (T.pack fname) of - Nothing -> parseValidFile (T.pack fname) (fname <> ".hsfiles") fname - Just prefix -> parseValidFile prefix fname fname - where - parseValidFile prefix hsf orig = maybe (Left expected) Right - $ asum (validParses prefix hsf orig) - validParses prefix hsf orig = - -- NOTE: order is important - [ TemplateName prefix . RepoPath <$> parseRepoPath hsf - , TemplateName (T.pack orig) . UrlPath <$> (parseRequest orig *> Just orig) - , TemplateName prefix . AbsPath <$> parseAbsFile hsf - , TemplateName prefix . RelPath hsf <$> parseRelFile hsf - ] - expected = "Expected a template like: foo or foo.hsfiles or\ - \ https://example.com/foo.hsfiles or github:user/foo" + case T.stripSuffix ".hsfiles" (T.pack fname) of + Nothing -> parseValidFile (T.pack fname) (fname <> ".hsfiles") fname + Just prefix -> parseValidFile prefix fname fname + where + parseValidFile prefix hsf orig = + maybe (Left expected) Right $ asum (validParses prefix hsf orig) + validParses prefix hsf orig = + -- NOTE: order is important + [ TemplateName prefix . RepoPath <$> parseRepoPath hsf + , TemplateName (T.pack orig) . UrlPath <$> (parseRequest orig *> Just orig) + , TemplateName prefix . AbsPath <$> parseAbsFile hsf + , TemplateName prefix . RelPath hsf <$> parseRelFile hsf + ] + expected = "Expected a template like: foo or foo.hsfiles or \ + \https://example.com/foo.hsfiles or github:user/foo" -- | The default template name you can use if you don't have one. defaultTemplateName :: TemplateName defaultTemplateName = case parseTemplateNameFromString "new-template" of - Left s -> error $ "Bug in Stack codebase, cannot parse default template name: " ++ s + Left s -> impureThrow $ DefaultTemplateNameNotParsedBug s Right x -> x -- | Get a text representation of the template name. @@ -114,16 +139,17 @@ templatePath :: TemplateName -> TemplatePath templatePath (TemplateName _ fp) = fp defaultRepoUserForService :: RepoService -> Maybe Text -defaultRepoUserForService Github = Just "commercialhaskell" +defaultRepoUserForService GitHub = Just "commercialhaskell" defaultRepoUserForService _ = Nothing -- | Parses a template path of the form @github:user/template@. parseRepoPath :: String -> Maybe RepoTemplatePath parseRepoPath s = case T.splitOn ":" (T.pack s) of - ["github" , rest] -> parseRepoPathWithService Github rest - ["gitlab" , rest] -> parseRepoPathWithService Gitlab rest + ["github" , rest] -> parseRepoPathWithService GitHub rest + ["gitlab" , rest] -> parseRepoPathWithService GitLab rest ["bitbucket" , rest] -> parseRepoPathWithService Bitbucket rest + ["codeberg" , rest] -> parseRepoPathWithService Codeberg rest _ -> Nothing -- | Parses a template path of the form @user/template@, given a service @@ -131,7 +157,7 @@ parseRepoPathWithService :: RepoService -> Text -> Maybe RepoTemplatePath parseRepoPathWithService service path = case T.splitOn "/" path of [user, name] -> Just $ RepoTemplatePath service user name - [name] -> do - repoUser <- defaultRepoUserForService service - Just $ RepoTemplatePath service repoUser name - _ -> Nothing + [name] -> do + repoUser <- defaultRepoUserForService service + Just $ RepoTemplatePath service repoUser name + _ -> Nothing diff --git a/src/Stack/Types/UnusedFlags.hs b/src/Stack/Types/UnusedFlags.hs new file mode 100644 index 0000000000..bd92162502 --- /dev/null +++ b/src/Stack/Types/UnusedFlags.hs @@ -0,0 +1,28 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.UnusedFlags +License : BSD-3-Clause +-} + +module Stack.Types.UnusedFlags + ( UnusedFlags (..) + , FlagSource (..) + ) where + +import Stack.Prelude + +data FlagSource + = FSCommandLine + | FSStackYaml + deriving (Eq, Ord, Show) + +data UnusedFlags + = UFNoPackage FlagSource PackageName + | UFFlagsNotDefined + FlagSource + PackageName + (Set FlagName) -- defined in package + (Set FlagName) -- not defined + | UFSnapshot PackageName + deriving (Eq, Ord, Show) diff --git a/src/Stack/Types/UpgradeOpts.hs b/src/Stack/Types/UpgradeOpts.hs new file mode 100644 index 0000000000..dac7d5777a --- /dev/null +++ b/src/Stack/Types/UpgradeOpts.hs @@ -0,0 +1,46 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.UpgradeOpts +Description : Types for command line options for Stack's @upgrade@ command. +License : BSD-3-Clause + +Types for command line options for Stack's @upgrade@ command. +-} + +module Stack.Types.UpgradeOpts + ( UpgradeOpts (..) + , BinaryOpts (..) + , SourceOpts (..) + ) where + +import Stack.Prelude + +-- | Type representing command line options for the @stack upgrade@ command. +data UpgradeOpts = UpgradeOpts + { binary :: !(Maybe BinaryOpts) + , source :: !(Maybe SourceOpts) + } + deriving Show + +-- | Type representing options for upgrading Stack with a binary executable +-- file. +data BinaryOpts = BinaryOpts + { platform :: !(Maybe String) + , force :: !Bool + -- ^ Force a download, even if the downloaded version is older than what we + -- are. + , onlyLocalBin :: !Bool + -- ^ Only download to Stack's local binary directory. + , version :: !(Maybe String) + -- ^ Specific version to download + , gitHubOrg :: !(Maybe String) + , gitHubRepo :: !(Maybe String) + } + deriving Show + +-- | Type representing options for upgrading Stack from source code. +newtype SourceOpts + = SourceOpts (Maybe (String, String)) -- repo and branch + deriving Show diff --git a/src/Stack/Types/UploadOpts.hs b/src/Stack/Types/UploadOpts.hs new file mode 100644 index 0000000000..04418f92b9 --- /dev/null +++ b/src/Stack/Types/UploadOpts.hs @@ -0,0 +1,40 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} + +{-| +Module : Stack.Types.UploadOpts +Description : Types for command line options for Stack's @upload@ command. +License : BSD-3-Clause + +Types for command line options for Stack's @upload@ command. +-} + +module Stack.Types.UploadOpts + ( UploadOpts (..) + , UploadVariant (..) + ) where + +import Stack.Prelude +import Stack.Types.PvpBounds (PvpBounds) + +-- | Type representing command line options for the @stack upload@ command. +data UploadOpts = UploadOpts + { itemsToWorkWith :: ![String] + -- ^ The items to work with. + , documentation :: !Bool + -- ^ Uploading documentation for packages? + , pvpBounds :: !(Maybe PvpBounds) + , check :: !Bool + , buildPackage :: !Bool + , tarPath :: !(Maybe FilePath) + , uploadVariant :: !UploadVariant + , saveHackageCreds :: !FirstTrue + -- ^ Save user's Hackage username and password in a local file? + } + +-- | Type representing variants for uploading to Hackage. +data UploadVariant + = Publishing + -- ^ Publish the package/a published package. + | Candidate + -- ^ Create a package candidate/a package candidate. diff --git a/src/Stack/Types/Version.hs b/src/Stack/Types/Version.hs index 6b35171935..244bed5cc6 100644 --- a/src/Stack/Types/Version.hs +++ b/src/Stack/Types/Version.hs @@ -1,56 +1,69 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE ViewPatterns #-} --- | Versions for packages. +{-| +Module : Stack.Types.Version +Description : Versions for packages. +License : BSD-3-Clause + +Versions for packages. +-} module Stack.Types.Version - (Version - ,Cabal.VersionRange -- TODO in the future should have a newtype wrapper - ,IntersectingVersionRange(..) - ,VersionCheck(..) - ,versionRangeText - ,Cabal.withinRange - ,Stack.Types.Version.intersectVersionRanges - ,toMajorVersion - ,latestApplicableVersion - ,checkVersion - ,nextMajorVersion - ,minorVersion - ,stackVersion - ,stackMinorVersion) - where - -import Stack.Prelude hiding (Vector) -import Pantry.Internal.AesonExtended -import Data.List (find) + ( Cabal.VersionRange -- TODO in the future should have a newtype wrapper + , IntersectingVersionRange (..) + , VersionCheck (..) + , versionRangeText + , Cabal.withinRange + , Stack.Types.Version.intersectVersionRanges + , toMajorVersion + , latestApplicableVersion + , checkVersion + , nextMajorVersion + , minorVersion + , stackVersion + , showStackVersion + , stackMajorVersion + , stackMinorVersion + ) where + +import Data.Aeson.Types + ( FromJSON (..), ToJSON (..), Value (..), withText ) +import Data.List ( find ) import qualified Data.Set as Set import qualified Data.Text as T -import Distribution.Pretty (pretty) +import Data.Version ( showVersion ) +import Distribution.Pretty ( pretty ) import qualified Distribution.Version as Cabal import qualified Paths_stack as Meta -import Text.PrettyPrint (render) +import Stack.Prelude hiding ( Vector, pretty ) +import Text.PrettyPrint ( render ) -newtype IntersectingVersionRange = - IntersectingVersionRange { getIntersectingVersionRange :: Cabal.VersionRange } - deriving Show +newtype IntersectingVersionRange = IntersectingVersionRange + { intersectingVersionRange :: Cabal.VersionRange } + deriving Show instance Semigroup IntersectingVersionRange where - IntersectingVersionRange l <> IntersectingVersionRange r = - IntersectingVersionRange (l `Cabal.intersectVersionRanges` r) + IntersectingVersionRange l <> IntersectingVersionRange r = + IntersectingVersionRange (l `Cabal.intersectVersionRanges` r) instance Monoid IntersectingVersionRange where - mempty = IntersectingVersionRange Cabal.anyVersion - mappend = (<>) + mempty = IntersectingVersionRange Cabal.anyVersion + mappend = (<>) -- | Display a version range versionRangeText :: Cabal.VersionRange -> Text versionRangeText = T.pack . render . pretty -- | A modified intersection which also simplifies, for better display. -intersectVersionRanges :: Cabal.VersionRange -> Cabal.VersionRange -> Cabal.VersionRange -intersectVersionRanges x y = Cabal.simplifyVersionRange $ Cabal.intersectVersionRanges x y +intersectVersionRanges :: + Cabal.VersionRange + -> Cabal.VersionRange + -> Cabal.VersionRange +intersectVersionRanges x y = + Cabal.simplifyVersionRange $ Cabal.intersectVersionRanges x y -- | Returns the first two components, defaulting to 0 if not present toMajorVersion :: Version -> Version @@ -74,41 +87,43 @@ nextMajorVersion v = a:b:_ -> Cabal.mkVersion [a, b + 1] data VersionCheck - = MatchMinor - | MatchExact - | NewerMinor - deriving (Show, Eq, Ord) + = MatchMinor + | MatchExact + | NewerMinor + deriving (Eq, Ord, Show) + instance ToJSON VersionCheck where - toJSON MatchMinor = String "match-minor" - toJSON MatchExact = String "match-exact" - toJSON NewerMinor = String "newer-minor" + toJSON MatchMinor = String "match-minor" + toJSON MatchExact = String "match-exact" + toJSON NewerMinor = String "newer-minor" + instance FromJSON VersionCheck where - parseJSON = withText expected $ \t -> - case t of - "match-minor" -> return MatchMinor - "match-exact" -> return MatchExact - "newer-minor" -> return NewerMinor - _ -> fail ("Expected " ++ expected ++ ", but got " ++ show t) - where - expected = "VersionCheck value (match-minor, match-exact, or newer-minor)" + parseJSON = withText expected $ \t -> + case t of + "match-minor" -> pure MatchMinor + "match-exact" -> pure MatchExact + "newer-minor" -> pure NewerMinor + _ -> fail ("Expected " ++ expected ++ ", but got " ++ show t) + where + expected = "VersionCheck value (match-minor, match-exact, or newer-minor)" checkVersion :: VersionCheck -> Version -> Version -> Bool checkVersion check (Cabal.versionNumbers -> wanted) (Cabal.versionNumbers -> actual) = - case check of - MatchMinor -> and (take 3 matching) - MatchExact -> length wanted == length actual && and matching - NewerMinor -> and (take 2 matching) && newerMinor - where - matching = zipWith (==) wanted actual - - getMinor (_a:_b:c:_) = Just c - getMinor _ = Nothing - - newerMinor = - case (getMinor wanted, getMinor actual) of - (Nothing, _) -> True - (Just _, Nothing) -> False - (Just w, Just a) -> a >= w + case check of + MatchMinor -> and (take 3 matching) + MatchExact -> length wanted == length actual && and matching + NewerMinor -> and (take 2 matching) && newerMinor + where + matching = zipWith (==) wanted actual + + getMinor (_a:_b:c:_) = Just c + getMinor _ = Nothing + + newerMinor = + case (getMinor wanted, getMinor actual) of + (Nothing, _) -> True + (Just _, Nothing) -> False + (Just w, Just a) -> a >= w -- | Get minor version (excludes any patchlevel) minorVersion :: Version -> Version @@ -118,6 +133,16 @@ minorVersion = Cabal.mkVersion . take 3 . Cabal.versionNumbers stackVersion :: Version stackVersion = Cabal.mkVersion' Meta.version +-- | Current Stack version in the same format as yielded by +-- 'Data.Version.showVersion'. +showStackVersion :: String +showStackVersion = showVersion Meta.version + -- | Current Stack minor version (excludes patchlevel) stackMinorVersion :: Version stackMinorVersion = minorVersion stackVersion + +-- | Current Stack major version. Returns the first two components, defaulting +-- to 0 if not present +stackMajorVersion :: Version +stackMajorVersion = toMajorVersion stackVersion diff --git a/src/Stack/Types/VersionIntervals.hs b/src/Stack/Types/VersionIntervals.hs deleted file mode 100644 index d30188f953..0000000000 --- a/src/Stack/Types/VersionIntervals.hs +++ /dev/null @@ -1,82 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveDataTypeable #-} -module Stack.Types.VersionIntervals -- to be removed with https://github.com/commercialhaskell/stack/issues/4213 - ( VersionIntervals - , toVersionRange - , fromVersionRange - , withinIntervals - , unionVersionIntervals - , intersectVersionIntervals - ) where - -import Stack.Types.Version -import qualified Distribution.Version as C -import Stack.Prelude - -newtype VersionIntervals = VersionIntervals [VersionInterval] - deriving (Generic, Show, Eq, Data, Typeable) -instance NFData VersionIntervals - -data VersionInterval = VersionInterval - { viLowerVersion :: !Version - , viLowerBound :: !Bound - , viUpper :: !(Maybe (Version, Bound)) - } - deriving (Generic, Show, Eq, Data, Typeable) -instance NFData VersionInterval - -data Bound = ExclusiveBound | InclusiveBound - deriving (Generic, Show, Eq, Data, Typeable) -instance NFData Bound - -toVersionRange :: VersionIntervals -> C.VersionRange -toVersionRange = C.fromVersionIntervals . toCabal - -fromVersionRange :: C.VersionRange -> VersionIntervals -fromVersionRange = fromCabal . C.toVersionIntervals - -withinIntervals :: Version -> VersionIntervals -> Bool -withinIntervals v vi = C.withinIntervals v (toCabal vi) - -unionVersionIntervals :: VersionIntervals -> VersionIntervals -> VersionIntervals -unionVersionIntervals x y = fromCabal $ C.unionVersionIntervals - (toCabal x) - (toCabal y) - -intersectVersionIntervals :: VersionIntervals -> VersionIntervals -> VersionIntervals -intersectVersionIntervals x y = fromCabal $ C.intersectVersionIntervals - (toCabal x) - (toCabal y) - -toCabal :: VersionIntervals -> C.VersionIntervals -toCabal (VersionIntervals vi) = - C.mkVersionIntervals $ map go vi - where - go (VersionInterval lowerV lowerB mupper) = - ( C.LowerBound lowerV (toCabalBound lowerB) - , case mupper of - Nothing -> C.NoUpperBound - Just (v, b) -> C.UpperBound v (toCabalBound b) - ) - -fromCabal :: C.VersionIntervals -> VersionIntervals -fromCabal = - VersionIntervals . map go . C.versionIntervals - where - go (C.LowerBound lowerV lowerB, upper) = VersionInterval - { viLowerVersion = lowerV - , viLowerBound = fromCabalBound lowerB - , viUpper = - case upper of - C.NoUpperBound -> Nothing - C.UpperBound v b -> Just (v, fromCabalBound b) - } - -toCabalBound :: Bound -> C.Bound -toCabalBound ExclusiveBound = C.ExclusiveBound -toCabalBound InclusiveBound = C.InclusiveBound - -fromCabalBound :: C.Bound -> Bound -fromCabalBound C.ExclusiveBound = ExclusiveBound -fromCabalBound C.InclusiveBound = InclusiveBound diff --git a/src/Stack/Types/VersionedDownloadInfo.hs b/src/Stack/Types/VersionedDownloadInfo.hs new file mode 100644 index 0000000000..e678b3b63a --- /dev/null +++ b/src/Stack/Types/VersionedDownloadInfo.hs @@ -0,0 +1,34 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Types.VersionedDownloadInfo +License : BSD-3-Clause +-} + +module Stack.Types.VersionedDownloadInfo + ( VersionedDownloadInfo (..) + ) where + +import Data.Aeson.Types ( FromJSON (..) ) +import Data.Aeson.WarningParser + ( WithJSONWarnings (..), (..:), withObjectWarnings ) +import Stack.Prelude +import Stack.Types.DownloadInfo + ( DownloadInfo, parseDownloadInfoFromObject ) + +data VersionedDownloadInfo = VersionedDownloadInfo + { version :: Version + , downloadInfo :: DownloadInfo + } + deriving Show + +instance FromJSON (WithJSONWarnings VersionedDownloadInfo) where + parseJSON = withObjectWarnings "VersionedDownloadInfo" $ \o -> do + CabalString version <- o ..: "version" + downloadInfo <- parseDownloadInfoFromObject o + pure VersionedDownloadInfo + { version + , downloadInfo + } diff --git a/src/Stack/Types/WantedCompilerSetter.hs b/src/Stack/Types/WantedCompilerSetter.hs new file mode 100644 index 0000000000..6946faed55 --- /dev/null +++ b/src/Stack/Types/WantedCompilerSetter.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Types.WantedCompilerSetter +License : BSD-3-Clause +-} + +module Stack.Types.WantedCompilerSetter + ( WantedCompilerSetter (..) + ) where + +import Stack.Prelude + +-- | Type representing ways that a wanted compiler is set. +data WantedCompilerSetter + = CompilerAtCommandLine + -- ^ At the command line with --compiler option. + | SnapshotAtCommandLine + -- ^ At the command line with --snapshot or (deprecated) --resolver option. + | YamlConfiguration (Maybe (Path Abs File)) + -- ^ Via a configuration file. + deriving Show diff --git a/src/Stack/Uninstall.hs b/src/Stack/Uninstall.hs new file mode 100644 index 0000000000..8378087e0b --- /dev/null +++ b/src/Stack/Uninstall.hs @@ -0,0 +1,94 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Uninstall +Description : Function related to Stack's @uninstall@ command. +License : BSD-3-Clause + +Function related to Stack's @uninstall@ command. +-} + +module Stack.Uninstall + ( uninstallCmd + ) where + +import Stack.Constants ( osIsWindows ) +import Stack.Prelude +import Stack.Runners ( ShouldReexec (..), withConfig ) +import Stack.Types.Config + ( Config (..), configL, stackRootL, userGlobalConfigFileL ) +import Stack.Types.Runner ( Runner ) + +-- | Function underlying the @stack uninstall@ command. Display help for the +-- command. +uninstallCmd :: () -> RIO Runner () +uninstallCmd () = withConfig NoReexec $ do + stackRoot <- view stackRootL + userGlobalConfigFile <- view userGlobalConfigFileL + programsDir <- view $ configL . to (.localProgramsBase) + localBinDir <- view $ configL . to (.localBin) + let toStyleDoc = style Dir . fromString . toFilePath + stackRoot' = toStyleDoc stackRoot + userGlobalConfigFile' = toStyleDoc userGlobalConfigFile + programsDir' = toStyleDoc programsDir + localBinDir' = toStyleDoc localBinDir + putUtf8Builder =<< displayWithColor + ( vsep + [ flow "To uninstall Stack, it should be sufficient to delete:" + , hang 4 $ fillSep + [ flow "(1) the directory containing Stack's tools" + , "(" <> softbreak <> programsDir' <> softbreak <> ");" + ] + , hang 4 $ fillSep + [ flow "(2) the Stack root directory" + , "(" <> softbreak <> stackRoot' <> softbreak <> ");" + ] + , hang 4 $ fillSep + [ flow "(3) if different, the directory containing " + , flow "Stack's user-specific global configuration file" + , parens userGlobalConfigFile' <> ";" + , "and" + ] + , hang 4 $ fillSep + [ flow "(4) the 'stack' executable file (see the output" + , flow "of command" + , howToFindStack <> "," + , flow "if Stack is on the PATH;" + , flow "Stack is often installed in" + , localBinDir' <> softbreak <> ")." + ] + , fillSep + [flow "You may also want to delete" + , style File ".stack-work" + , flow "directories in any Haskell projects that you have built." + ] + ] + <> blankLine + <> vsep + [ fillSep + [ flow "To uninstall completely a Stack-supplied tool (such as \ + \GHC or, on Windows, MSYS2), delete from Stack's tools \ + \directory" + , parens programsDir' <> ":" + ] + , hang 4 $ fillSep + [ flow "(1) the tool's subdirectory;" + ] + , hang 4 $ fillSep + [ flow "(2) if present, the tool's archive file" + , parens (style File ".tar.xz") <> "; and" + ] + , hang 4 $ fillSep + [ flow "(3) the file marking that the tool is installed" + , parens (style File ".installed") <> "." + ] + ] + <> blankLine + ) + where + styleShell = style Shell + howToFindStack + | osIsWindows = styleShell "where.exe stack" + | otherwise = styleShell "which stack" diff --git a/src/Stack/Unpack.hs b/src/Stack/Unpack.hs index 16309ae0a4..3af72c8476 100644 --- a/src/Stack/Unpack.hs +++ b/src/Stack/Unpack.hs @@ -1,112 +1,222 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Unpack +Description : Functions related to Stack's @unpack@ command. +License : BSD-3-Clause + +Functions related to Stack's @unpack@ command. +-} + module Stack.Unpack - ( unpackPackages + ( UnpackOpts (..) + , UnpackTarget + , unpackCmd + , unpackPackages ) where -import Stack.Prelude -import qualified RIO.Text as T +import Data.List.Extra ( notNull ) +import Path ( SomeBase (..), (), parseRelDir ) +import Path.IO ( doesDirExist, getCurrentDir ) import qualified RIO.Map as Map +import RIO.Process ( HasProcessContext ) import qualified RIO.Set as Set -import RIO.List (intercalate) -import RIO.Process (HasProcessContext) -import Path ((), parseRelDir) -import Path.IO (doesDirExist) +import qualified RIO.Text as T +import Stack.Config ( getRawSnapshot ) +import Stack.Constants ( relDirRoot ) +import Stack.Prelude +import Stack.Runners ( ShouldReexec (..), withConfig ) +import Stack.Types.Config ( Config (..), HasConfig, configL ) +import Stack.Types.Runner ( Runner ) -data UnpackException +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Unpack" module. +data UnpackPrettyException = UnpackDirectoryAlreadyExists (Set (Path Abs Dir)) - | CouldNotParsePackageSelectors [String] - deriving Typeable -instance Exception UnpackException -instance Show UnpackException where - show (UnpackDirectoryAlreadyExists dirs) = unlines - $ "Unable to unpack due to already present directories:" - : map ((" " ++) . toFilePath) (Set.toList dirs) - show (CouldNotParsePackageSelectors strs) = unlines - $ "The following package selectors are not valid package names or identifiers:" - : map ("- " ++) strs + | CouldNotParsePackageSelectors [StyleDoc] + | PackageCandidatesRequireVersions [PackageName] + | PackageLocationInvalid PackageIdentifierRevision + deriving Show + +instance Pretty UnpackPrettyException where + pretty (UnpackDirectoryAlreadyExists dirs) = + "[S-3515]" + <> line + <> flow "Stack was unable to unpack due to directories already being \ + \present:" + <> line + <> bulletedList (map pretty $ Set.toList dirs) + pretty (CouldNotParsePackageSelectors errs) = + "[S-2628]" + <> line + <> flow "The following package selectors are not valid package names or \ + \identifiers:" + <> line + <> bulletedList errs + pretty (PackageCandidatesRequireVersions names) = + "[S-6114]" + <> line + <> flow "Package candidates to unpack cannot be identified by name only. \ + \The following do not specify a version:" + <> line + <> bulletedList (map fromPackageName names) + pretty (PackageLocationInvalid pir) = + "[S-5170]" + <> line + <> fillSep + [ flow "While trying to unpack" + , style Target (fromString $ T.unpack $ textDisplay pir) <> "," + , flow "Stack encountered an error." + ] + +instance Exception UnpackPrettyException + +-- | Type synonymn representing packages to be unpacked by the @stack unpack@ +-- command, identified either by name only or by an identifier (including +-- Hackage revision). +type UnpackTarget = Either PackageName PackageIdentifierRevision + +-- | Type representing options for the @stack unpack@ command. +data UnpackOpts = UnpackOpts + { targets :: [UnpackTarget] + -- ^ The packages or package candidates to be unpacked. + , areCandidates :: Bool + -- ^ Whether the targets are Hackage package candidates. + , dest :: Maybe (SomeBase Dir) + -- ^ The optional directory into which a target will be unpacked into a + -- subdirectory. + } + +-- | Function underlying the @stack unpack@ command. Unpack packages or package +-- candidates to the filesystem. +unpackCmd :: + UnpackOpts + -> RIO Runner () +unpackCmd (UnpackOpts targets areCandidates Nothing) = + unpackCmd (UnpackOpts targets areCandidates (Just $ Rel relDirRoot)) +unpackCmd (UnpackOpts targets areCandidates (Just dstPath)) = + withConfig NoReexec $ do + mSnapshot <- getRawSnapshot + dstPath' <- case dstPath of + Abs path -> pure path + Rel path -> do + wd <- getCurrentDir + pure $ wd path + unpackPackages mSnapshot dstPath' targets areCandidates -- | Intended to work for the command line command. -unpackPackages - :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) - => Maybe RawSnapshot -- ^ when looking up by name, take from this build plan - -> Path Abs Dir -- ^ destination - -> [String] -- ^ names or identifiers +unpackPackages :: + forall env. + (HasConfig env, HasPantryConfig env, HasProcessContext env, HasTerm env) + => Maybe RawSnapshot -- ^ When looking up by name, take from this build plan. + -> Path Abs Dir -- ^ Destination. + -> [UnpackTarget] + -> Bool + -- ^ Whether the targets are package candidates. -> RIO env () -unpackPackages mSnapshot dest input = do - let (errs1, (names, pirs1)) = - fmap partitionEithers $ partitionEithers $ map parse input - locs1 <- forM pirs1 $ \pir -> do - loc <- fmap cplComplete $ completePackageLocation $ RPLIHackage pir Nothing - pure (loc, packageLocationIdent loc) - (errs2, locs2) <- partitionEithers <$> traverse toLoc names - case errs1 ++ errs2 of - [] -> pure () - errs -> throwM $ CouldNotParsePackageSelectors errs - locs <- Map.fromList <$> mapM - (\(pir, ident) -> do - suffix <- parseRelDir $ packageIdentifierString ident - pure (pir, dest suffix) - ) - (locs1 ++ locs2) - - alreadyUnpacked <- filterM doesDirExist $ Map.elems locs - - unless (null alreadyUnpacked) $ - throwM $ UnpackDirectoryAlreadyExists $ Set.fromList alreadyUnpacked - - forM_ (Map.toList locs) $ \(loc, dest') -> do - unpackPackageLocation dest' loc - logInfo $ - "Unpacked " <> - display loc <> - " to " <> - fromString (toFilePath dest') - where - toLoc | Just snapshot <- mSnapshot = toLocSnapshot snapshot - | otherwise = toLocNoSnapshot - - toLocNoSnapshot :: PackageName -> RIO env (Either String (PackageLocationImmutable, PackageIdentifier)) - toLocNoSnapshot name = do - mloc1 <- getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions - mloc <- - case mloc1 of - Just _ -> pure mloc1 - Nothing -> do - updated <- updateHackageIndex $ Just $ "Could not find package " <> fromString (packageNameString name) <> ", updating" - case updated of - UpdateOccurred -> getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions - NoUpdateOccurred -> pure Nothing - case mloc of - Nothing -> do - candidates <- getHackageTypoCorrections name - pure $ Left $ concat - [ "Could not find package " - , packageNameString name - , " on Hackage" - , if null candidates - then "" - else ". Perhaps you meant: " ++ intercalate ", " (map packageNameString candidates) - ] - Just loc -> pure $ Right (loc, packageLocationIdent loc) - - toLocSnapshot :: RawSnapshot -> PackageName -> RIO env (Either String (PackageLocationImmutable, PackageIdentifier)) - toLocSnapshot snapshot name = - case Map.lookup name (rsPackages snapshot) of - Nothing -> - pure $ Left $ "Package does not appear in snapshot: " ++ packageNameString name - Just sp -> do - loc <- cplComplete <$> completePackageLocation (rspLocation sp) - pure $ Right (loc, packageLocationIdent loc) - - -- Possible future enhancement: parse names as name + version range - parse s = - case parsePackageName (T.unpack t) of - Just x -> Right $ Left x - Nothing -> - case parsePackageIdentifierRevision t of - Right x -> Right $ Right x - Left _ -> Left $ "Could not parse as package name or identifier: " ++ s - where - t = T.pack s +unpackPackages mSnapshot dest targets areCandidates = do + let (names, pirs) = partitionEithers targets + pisWithRevisions = any hasRevision pirs + hasRevision (PackageIdentifierRevision _ _ CFILatest) = False + hasRevision _ = True + when (areCandidates && notNull names) $ + prettyThrowIO $ PackageCandidatesRequireVersions names + when (areCandidates && pisWithRevisions) $ + prettyWarn $ + flow "Package revisions are not meaningful for package candidates and \ + \will be ignored." + <> line + locs1 <- forM pirs $ \pir -> do + hackageBaseUrl <- view $ configL . to (.hackageBaseUrl) + let rpli = if areCandidates + then + let -- Ignoring revisions for package candidates. + PackageIdentifierRevision candidateName candidateVersion _ = pir + candidatePkgId = + PackageIdentifier candidateName candidateVersion + candidatePkgIdText = + T.pack $ packageIdentifierString candidatePkgId + candidateUrl = + hackageBaseUrl + <> "package/" + <> candidatePkgIdText + <> "/candidate/" + <> candidatePkgIdText + <> ".tar.gz" + candidateLoc = ALUrl candidateUrl + candidateArchive = RawArchive candidateLoc Nothing Nothing "" + candidateMetadata = RawPackageMetadata Nothing Nothing Nothing + in RPLIArchive candidateArchive candidateMetadata + else RPLIHackage pir Nothing + loc <- cplComplete <$> completePackageLocation rpli + `catch` \(_ :: SomeException) -> prettyThrowIO $ PackageLocationInvalid pir + pure (loc, packageLocationIdent loc) + (errs, locs2) <- partitionEithers <$> traverse toLoc names + unless (null errs) $ prettyThrowM $ CouldNotParsePackageSelectors errs + locs <- Map.fromList <$> mapM + (\(pir, ident) -> do + suffix <- parseRelDir $ packageIdentifierString ident + pure (pir, dest suffix) + ) + (locs1 ++ locs2) + + alreadyUnpacked <- filterM doesDirExist $ Map.elems locs + + unless (null alreadyUnpacked) $ + prettyThrowM $ UnpackDirectoryAlreadyExists $ Set.fromList alreadyUnpacked + + forM_ (Map.toList locs) $ \(loc, dest') -> do + unpackPackageLocation dest' loc + prettyInfoL + [ "Unpacked" + , fromString $ T.unpack $ textDisplay loc + , "to" + , pretty dest' <> "." + ] + where + toLoc name | Just snapshot <- mSnapshot = toLocSnapshot snapshot name + | otherwise = do + void $ updateHackageIndex $ Just "Updating the package index." + toLocNoSnapshot name + + toLocNoSnapshot :: + PackageName + -> RIO env (Either StyleDoc (PackageLocationImmutable, PackageIdentifier)) + toLocNoSnapshot name = do + mLoc <- getLatestHackageLocation + YesRequireHackageIndex + name + UsePreferredVersions + case mLoc of + Nothing -> do + candidates <- getHackageTypoCorrections name + pure $ Left $ fillSep + [ flow "Could not find package" + , style Current (fromPackageName name) + , flow "on Hackage." + , if null candidates + then mempty + else fillSep $ + flow "Perhaps you meant one of:" + : mkNarrativeList (Just Good) False + (map fromPackageName candidates :: [StyleDoc]) + ] + Just loc -> pure $ Right (loc, packageLocationIdent loc) + + toLocSnapshot :: + RawSnapshot + -> PackageName + -> RIO env (Either StyleDoc (PackageLocationImmutable, PackageIdentifier)) + toLocSnapshot snapshot name = + case Map.lookup name (rsPackages snapshot) of + Nothing -> + pure $ Left $ fillSep + [ flow "Package does not appear in snapshot:" + , style Current (fromPackageName name) <> "." + ] + Just sp -> do + loc <- cplComplete <$> completePackageLocation (rspLocation sp) + pure $ Right (loc, packageLocationIdent loc) diff --git a/src/Stack/Update.hs b/src/Stack/Update.hs new file mode 100644 index 0000000000..6b40fbe14a --- /dev/null +++ b/src/Stack/Update.hs @@ -0,0 +1,21 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Update +Description : Functions related to Stack's @update@ command. +License : BSD-3-Clause + +Functions related to Stack's @update@ command. +-} + +module Stack.Update + ( updateCmd + ) where + +import Stack.Prelude +import Stack.Runners ( ShouldReexec (..), withConfig ) +import Stack.Types.Runner ( Runner ) + +-- | Function underlying the @stack update@ command. Update the package index. +updateCmd :: () -> RIO Runner () +updateCmd () = withConfig NoReexec (void (updateHackageIndex Nothing)) diff --git a/src/Stack/Upgrade.hs b/src/Stack/Upgrade.hs index 41bfbf844e..f34f4b7017 100644 --- a/src/Stack/Upgrade.hs +++ b/src/Stack/Upgrade.hs @@ -1,243 +1,273 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Upgrade +Description : Function related to Stack's @upgrade@ command. +License : BSD-3-Clause + +Function related to Stack's @upgrade@ command. +-} + module Stack.Upgrade - ( upgrade - , UpgradeOpts - , upgradeOpts - ) where + ( upgradeCmd + ) where -import Stack.Prelude hiding (force, Display (..)) import qualified Data.Text as T -import Distribution.Version (mkVersion') -import Options.Applicative -import Path -import qualified Paths_stack as Paths -import Stack.Build -import Stack.Build.Target (NeedTargets(..)) -import Stack.Constants +import Path ( (), parseRelDir ) +import RIO.Process ( proc, runProcess_, withWorkingDir ) +import Stack.Build ( build ) +import Stack.Build.Target ( NeedTargets (..) ) +import Stack.BuildInfo ( maybeGitHash ) +import Stack.Constants ( relDirStackProgName, stackDotYaml ) +import Stack.Prelude hiding ( force, Display (..) ) import Stack.Runners + ( ShouldReexec (..), withConfig, withEnvConfig + , withGlobalProject ) import Stack.Setup -import Stack.Types.Config -import System.Console.ANSI (hSupportsANSIWithoutEmulation) -import System.Process (rawSystem, readProcess) -import RIO.PrettyPrint -import RIO.Process - -upgradeOpts :: Parser UpgradeOpts -upgradeOpts = UpgradeOpts - <$> (sourceOnly <|> optional binaryOpts) - <*> (binaryOnly <|> optional sourceOpts) - where - binaryOnly = flag' Nothing (long "binary-only" <> help "Do not use a source upgrade path") - sourceOnly = flag' Nothing (long "source-only" <> help "Do not use a binary upgrade path") + ( downloadStackExe, downloadStackReleaseInfo + , getDownloadVersion, preferredPlatforms, stackVersion + ) +import Stack.Types.BuildOpts ( buildOptsInstallExesL ) +import Stack.Types.BuildOptsCLI + ( BuildOptsCLI (..), defaultBuildOptsCLI ) +import Stack.Types.Config ( Config (..), HasConfig (..), buildOptsL ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Runner ( Runner, globalOptsL ) +import Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) +import Stack.Types.UpgradeOpts + ( BinaryOpts (..), SourceOpts (..), UpgradeOpts (..) ) +import System.Process ( rawSystem, readProcess ) - binaryOpts = BinaryOpts - <$> optional (strOption - ( long "binary-platform" - <> help "Platform type for archive to download" - <> showDefault)) - <*> switch - (long "force-download" <> - help "Download the latest available stack executable") - <*> optional (strOption - (long "binary-version" <> - help "Download a specific stack version")) - <*> optional (strOption - (long "github-org" <> - help "Github organization name")) - <*> optional (strOption - (long "github-repo" <> - help "Github repository name")) +-- | Type representing \'pretty\' exceptions thrown by functions in the +-- "Stack.Upgrade" module. +data UpgradePrettyException + = SnapshotOptionInvalid + | NeitherBinaryOrSourceSpecified + | ExecutableFailure + | CommitsNotFound String String + | StackInPackageIndexNotFound + | VersionWithNoRevision + deriving Show - sourceOpts = SourceOpts - <$> ((\fromGit repo branch -> if fromGit then Just (repo, branch) else Nothing) - <$> switch - ( long "git" - <> help "Clone from Git instead of downloading from Hackage (more dangerous)" ) - <*> strOption - ( long "git-repo" - <> help "Clone from specified git repository" - <> value "https://github.com/commercialhaskell/stack" - <> showDefault ) - <*> strOption - ( long "git-branch" - <> help "Clone from this git branch" - <> value "master" - <> showDefault )) +instance Pretty UpgradePrettyException where + pretty SnapshotOptionInvalid = + "[S-8761]" + <> line + <> fillSep + [ "The" + , style Shell "--snapshot" + , flow "option cannot be used with Stack's" + , style Shell "upgrade" + , "command." + ] + pretty NeitherBinaryOrSourceSpecified = + "[S-3642]" + <> line + <> flow "You must allow either binary or source upgrade paths." + pretty ExecutableFailure = + "[S-8716]" + <> line + <> flow "Non-success exit code from running newly downloaded executable." + pretty (CommitsNotFound branch repo) = + "[S-7114]" + <> line + <> fillSep + [ flow "No commits found for branch" + , style Current (fromString branch) + , flow "on repo" + , style Url (fromString repo) <> "." + ] + pretty StackInPackageIndexNotFound = + "[S-9668]" + <> line + <> flow "No Stack version found in package indices." + pretty VersionWithNoRevision = + "[S-6648]" + <> line + <> flow "Latest version with no revision." -data BinaryOpts = BinaryOpts - { _boPlatform :: !(Maybe String) - , _boForce :: !Bool - -- ^ force a download, even if the downloaded version is older - -- than what we are - , _boVersion :: !(Maybe String) - -- ^ specific version to download - , _boGithubOrg :: !(Maybe String) - , _boGithubRepo :: !(Maybe String) - } - deriving Show -newtype SourceOpts = SourceOpts (Maybe (String, String)) -- repo and branch - deriving Show +instance Exception UpgradePrettyException -data UpgradeOpts = UpgradeOpts - { _uoBinary :: !(Maybe BinaryOpts) - , _uoSource :: !(Maybe SourceOpts) - } - deriving Show +-- | Function underlying the @stack upgrade@ command. +upgradeCmd :: UpgradeOpts -> RIO Runner () +upgradeCmd upgradeOpts = do + go <- view globalOptsL + case go.snapshot of + Just _ -> prettyThrowIO SnapshotOptionInvalid + Nothing -> withGlobalProject $ upgrade maybeGitHash upgradeOpts -upgrade :: Maybe String -- ^ git hash at time of building, if known - -> UpgradeOpts - -> RIO Runner () -upgrade builtHash (UpgradeOpts mbo mso) = - case (mbo, mso) of - -- FIXME It would be far nicer to capture this case in the - -- options parser itself so we get better error messages, but - -- I can't think of a way to make it happen. - (Nothing, Nothing) -> throwString "You must allow either binary or source upgrade paths" - (Just bo, Nothing) -> binary bo - (Nothing, Just so) -> source so - -- See #2977 - if --git or --git-repo is specified, do source upgrade. - (_, Just so@(SourceOpts (Just _))) -> source so - (Just bo, Just so) -> binary bo `catchAny` \e -> do - prettyWarnL - [ flow "Exception occured when trying to perform binary upgrade:" - , fromString . show $ e - , line <> flow "Falling back to source upgrade" - ] - - source so - where - binary bo = binaryUpgrade bo - source so = sourceUpgrade builtHash so +upgrade :: + Maybe String -- ^ git hash at time of building, if known + -> UpgradeOpts + -> RIO Runner () +upgrade builtHash (UpgradeOpts mbo mso) = case (mbo, mso) of + -- FIXME It would be far nicer to capture this case in the options parser + -- itself so we get better error messages, but I can't think of a way to + -- make it happen. + (Nothing, Nothing) -> prettyThrowIO NeitherBinaryOrSourceSpecified + (Just bo, Nothing) -> binary bo + (Nothing, Just so) -> source so + -- See #2977 - if --git or --git-repo is specified, do source upgrade. + (_, Just so@(SourceOpts (Just _))) -> source so + (Just bo, Just so) -> binary bo `catchAny` \e -> do + prettyWarn $ + flow "When trying to perform binary upgrade, Stack encountered the \ + \following error:" + <> blankLine + <> ppException e + <> blankLine + <> flow "Falling back to source upgrade." + source so + where + binary = binaryUpgrade + source = sourceUpgrade builtHash binaryUpgrade :: BinaryOpts -> RIO Runner () -binaryUpgrade (BinaryOpts mplatform force' mver morg mrepo) = withConfig NoReexec $ do +binaryUpgrade (BinaryOpts mplatform force' onlyLocalBin mver morg mrepo) = + withConfig NoReexec $ do platforms0 <- case mplatform of Nothing -> preferredPlatforms - Just p -> return [("windows" `T.isInfixOf` T.pack p, p)] + Just p -> pure [("windows" `T.isInfixOf` T.pack p, p)] archiveInfo <- downloadStackReleaseInfo morg mrepo mver - let mdownloadVersion = getDownloadVersion archiveInfo force = case mver of Nothing -> force' Just _ -> True -- specifying a version implies we're forcing things isNewer <- - case mdownloadVersion of - Nothing -> do - prettyErrorL $ - flow "Unable to determine upstream version from Github metadata" - : - [ line <> flow "Rerun with --force-download to force an upgrade" - | not force] - return False - Just downloadVersion -> do - prettyInfoL - [ flow "Current Stack version:" - , fromString (versionString stackVersion) <> "," - , flow "available download version:" - , fromString (versionString downloadVersion) - ] - return $ downloadVersion > stackVersion - + case mdownloadVersion of + Nothing -> do + prettyError $ + flow "Unable to determine upstream version from GitHub metadata." + <> if force + then mempty + else + line + <> fillSep + [ flow "Rerun with" + , style Shell "--force-download" + , flow "to force an upgrade." + ] + pure False + Just downloadVersion -> do + prettyInfoL + [ flow "Current Stack version:" + , fromString (versionString stackVersion) <> ";" + , flow "available download version:" + , fromString (versionString downloadVersion) <> "." + ] + pure $ downloadVersion > stackVersion toUpgrade <- case (force, isNewer) of - (False, False) -> do - prettyInfoS "Skipping binary upgrade, you are already running the most recent version" - return False - (True, False) -> do - prettyInfoS "Forcing binary upgrade" - return True - (_, True) -> do - prettyInfoS "Newer version detected, downloading" - return True + (False, False) -> do + prettyInfoS "Skipping binary upgrade, you are already running the most \ + \recent version." + pure False + (True, False) -> do + prettyInfoS "Forcing binary upgrade." + pure True + (_, True) -> do + prettyInfoS "Newer version detected, downloading." + pure True when toUpgrade $ do - config <- view configL - downloadStackExe platforms0 archiveInfo (configLocalBin config) True $ \tmpFile -> do + config <- view configL + downloadStackExe + platforms0 archiveInfo config.localBin (not onlyLocalBin) $ + \tmpFile -> do -- Sanity check! ec <- rawSystem (toFilePath tmpFile) ["--version"] + unless (ec == ExitSuccess) (prettyThrowIO ExecutableFailure) - unless (ec == ExitSuccess) - $ throwString "Non-success exit code from running newly downloaded executable" - -sourceUpgrade - :: Maybe String +sourceUpgrade :: + Maybe String -> SourceOpts -> RIO Runner () sourceUpgrade builtHash (SourceOpts gitRepo) = withSystemTempDir "stack-upgrade" $ \tmp -> do mdir <- case gitRepo of Just (repo, branch) -> do - remote <- liftIO $ System.Process.readProcess "git" ["ls-remote", repo, branch] [] + remote <- liftIO $ System.Process.readProcess + "git" + ["ls-remote", repo, branch] + [] latestCommit <- case words remote of - [] -> throwString $ "No commits found for branch " ++ branch ++ " on repo " ++ repo - x:_ -> return x + [] -> prettyThrowIO $ CommitsNotFound branch repo + x:_ -> pure x when (isNothing builtHash) $ - prettyWarnS $ - "Information about the commit this version of stack was " - <> "built from is not available due to how it was built. " - <> "Will continue by assuming an upgrade is needed " - <> "because we have no information to the contrary." + prettyWarnS + "Information about the commit this version of Stack was built from \ + \is not available due to how it was built. Will continue by \ + \assuming an upgrade is needed because we have no information to \ + \the contrary." if builtHash == Just latestCommit then do - prettyInfoS "Already up-to-date, no upgrade required" - return Nothing + prettyInfoS "Already up-to-date, no upgrade required." + pure Nothing else do - prettyInfoS "Cloning stack" - -- NOTE: "--recursive" was added after v1.0.0 (and before the - -- next release). This means that we can't use submodules in - -- the stack repo until we're comfortable with "stack upgrade - -- --git" not working for earlier versions. - let args = [ "clone", repo , "stack", "--depth", "1", "--recursive", "--branch", branch] - withWorkingDir (toFilePath tmp) $ proc "git" args runProcess_ - -- On Windows 10, an upstream issue with the `git clone` command - -- means that command clears, but does not then restore, the - -- ENABLE_VIRTUAL_TERMINAL_PROCESSING flag for native terminals. - -- The following hack re-enables the lost ANSI-capability. - when osIsWindows $ - void $ liftIO $ hSupportsANSIWithoutEmulation stdout - return $ Just $ tmp relDirStackProgName - -- We need to access the Pantry database to find out about the - -- latest Stack available on Hackage. We first use a standard - -- Config to do this, and once we have the source load up the - -- stack.yaml from inside that source. + prettyInfoS "Cloning stack." + -- NOTE: "--recursive" was added after v1.0.0 (and before the next + -- release). This means that we can't use submodules in the Stack + -- repo until we're comfortable with "stack upgrade --git" not + -- working for earlier versions. + let args = + [ "clone" + , repo + , "stack" + , "--depth" + , "1" + , "--recursive" + , "--branch" + , branch + ] + withWorkingDir (toFilePath tmp) $ proc "git" args runProcess_ + pure $ Just $ tmp relDirStackProgName + -- We need to access the Pantry database to find out about the latest + -- Stack available on Hackage. We first use a standard Config to do this, + -- and once we have the source load up the stack.yaml from inside that + -- source. Nothing -> withConfig NoReexec $ do - void $ updateHackageIndex - $ Just "Updating index to make sure we find the latest Stack version" - mversion <- getLatestHackageVersion YesRequireHackageIndex "stack" UsePreferredVersions + void + $ updateHackageIndex + $ Just "Updating index to make sure we find the latest Stack version." + mversion <- getLatestHackageVersion + YesRequireHackageIndex + "stack" + UsePreferredVersions (PackageIdentifierRevision _ version _) <- case mversion of - Nothing -> throwString "No stack found in package indices" + Nothing -> prettyThrowIO StackInPackageIndexNotFound Just version -> pure version - - if version <= mkVersion' Paths.version - then do - prettyInfoS "Already at latest version, no upgrade required" - return Nothing - else do - suffix <- parseRelDir $ "stack-" ++ versionString version - let dir = tmp suffix - mrev <- getLatestHackageRevision YesRequireHackageIndex "stack" version - case mrev of - Nothing -> throwString "Latest version with no revision" - Just (_rev, cfKey, treeKey) -> do - let ident = PackageIdentifier "stack" version - unpackPackageLocation dir $ PLIHackage ident cfKey treeKey - pure $ Just dir + if version <= stackVersion + then do + prettyInfoS "Already at latest version, no upgrade required." + pure Nothing + else do + suffix <- parseRelDir $ "stack-" ++ versionString version + let dir = tmp suffix + mrev <- getLatestHackageRevision + YesRequireHackageIndex + "stack" + version + case mrev of + Nothing -> prettyThrowIO VersionWithNoRevision + Just (_rev, cfKey, treeKey) -> do + let ident = PackageIdentifier "stack" version + unpackPackageLocation dir $ PLIHackage ident cfKey treeKey + pure $ Just dir let modifyGO dir go = go - { globalResolver = Nothing -- always use the resolver settings in the stack.yaml file - , globalStackYaml = SYLOverride $ dir stackDotYaml - } - boptsCLI = defaultBuildOptsCLI - { boptsCLITargets = ["stack"] + { snapshot = Nothing -- always use the snapshot settings in the + -- stack.yaml file + , stackYaml = SYLOverride $ dir stackDotYaml } + boptsCLI = defaultBuildOptsCLI { targetsCLI = ["stack"] } forM_ mdir $ \dir -> - local (over globalOptsL (modifyGO dir)) $ - withConfig NoReexec $ withEnvConfig AllowNoTargets boptsCLI $ - local (set (buildOptsL.buildOptsInstallExesL) True) $ - build Nothing + local (over globalOptsL (modifyGO dir)) + $ withConfig NoReexec + $ withEnvConfig AllowNoTargets boptsCLI + $ local (set (buildOptsL . buildOptsInstallExesL) True) + $ build Nothing diff --git a/src/Stack/Upload.hs b/src/Stack/Upload.hs index 2743d659c0..413d6b289f 100644 --- a/src/Stack/Upload.hs +++ b/src/Stack/Upload.hs @@ -1,79 +1,334 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TupleSections #-} --- | Provide ability to upload tarballs to Hackage. +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : Stack.Upload +Description : Types and functions related to Stack's @upload@ command. +License : BSD-3-Clause + +Types and functions related to Stack's @upload@ command. +-} + module Stack.Upload - ( -- * Upload - upload - , uploadBytes - , uploadRevision - -- * Credentials - , HackageCreds - , loadCreds - , writeFilePrivate - ) where + ( -- * Upload + UploadOpts (..) + , SDistOpts (..) + , UploadContent (..) + , UploadVariant (..) + , uploadCmd + , upload + , uploadBytes + , uploadRevision + -- * Credentials + , HackageCreds + , HackageAuth (..) + , HackageKey (..) + , loadAuth + , writeFilePrivate + -- * Internal + , maybeGetHackageKey + ) where +import Conduit ( mapOutput, sinkList ) +import Data.Aeson + ( FromJSON (..), ToJSON (..), (.:), (.=), decode' + , fromEncoding, object, toEncoding, withObject + ) +import Data.ByteString.Builder ( lazyByteString ) +import qualified Data.ByteString.Char8 as S +import qualified Data.ByteString.Lazy as L +import qualified Data.Conduit.Binary as CB +import qualified Data.Text as T +import Network.HTTP.StackClient + ( Request, RequestBody (RequestBodyLBS), Response + , applyDigestAuth, displayDigestAuthException, formDataBody + , getGlobalManager, getResponseBody, getResponseStatusCode + , httpNoBody, method, methodPost, methodPut, parseRequest + , partBS, partFileRequestBody, partLBS, requestBody + , setRequestHeader, setRequestHeaders, withResponse + ) +import Path ( (), addExtension, parseRelFile ) +import Path.IO ( resolveDir', resolveFile' ) +import qualified Path.IO as Path +import Stack.Constants ( isStackUploadDisabled ) +import Stack.Constants.Config ( distDirFromDir ) import Stack.Prelude -import Data.Aeson (FromJSON (..), - ToJSON (..), - decode', toEncoding, fromEncoding, - object, withObject, - (.:), (.=)) -import Data.ByteString.Builder (lazyByteString) -import qualified Data.ByteString.Char8 as S -import qualified Data.ByteString.Lazy as L -import qualified Data.Conduit.Binary as CB -import qualified Data.Text as T -import Network.HTTP.StackClient (Request, RequestBody(RequestBodyLBS), Response, withResponse, httpNoBody, getGlobalManager, getResponseStatusCode, - getResponseBody, - setRequestHeader, - parseRequest, - formDataBody, partFileRequestBody, - partBS, partLBS, - applyDigestAuth, - displayDigestAuthException) -import Stack.Types.Config -import System.Directory (createDirectoryIfMissing, - removeFile, renameFile) -import System.Environment (lookupEnv) -import System.FilePath ((), takeFileName, takeDirectory) -import System.IO (putStrLn, putStr, print) -- TODO remove putStrLn, use logInfo -import System.PosixCompat.Files (setFileMode) +import Stack.Runners + ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) +import Stack.SDist + ( SDistOpts (..), checkSDistTarball, checkSDistTarball' + , getSDistTarball, readLocalPackage + ) +import Stack.Types.Config ( Config (..), configL, stackRootL ) +import qualified Stack.Types.Config as Config +import Stack.Types.EnvConfig ( HasEnvConfig ) +import Stack.Types.Package ( LocalPackage (..), packageIdentifier ) +import Stack.Types.Runner ( Runner ) +import Stack.Types.UploadOpts ( UploadOpts (..), UploadVariant (..) ) +import System.Directory + ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist + , removeFile, renameFile + ) +import System.Environment ( lookupEnv ) +import qualified System.FilePath as FP +import System.PosixCompat.Files ( setFileMode ) + +-- | Type representing \'pretty\' exceptions thrown by functions exported by the +-- "Stack.Upload" module. +data UploadPrettyException + = AuthenticationFailure + | ArchiveUploadFailure !Int ![String] !String + | DocsTarballInvalid ![(String, Path Abs File)] + | ItemsInvalid ![FilePath] + | NoItemSpecified !String + | PackageDirectoryInvalid ![FilePath] + | PackageIdNotSpecifiedForDocsUploadBug + | PackageIdSpecifiedForPackageUploadBug + | TarGzFileNameInvalidBug !String + deriving Show + +instance Pretty UploadPrettyException where + pretty AuthenticationFailure = + "[S-2256]" + <> line + <> flow "authentification failure" + <> line + <> flow "Authentication failure uploading to server" + pretty (ArchiveUploadFailure code res tarName) = + "[S-6108]" + <> line + <> flow "unhandled status code:" <+> fromString (show code) + <> line + <> flow "Upload failed on" <+> style File (fromString tarName) + <> line + <> vsep (map string res) + pretty (DocsTarballInvalid invalidItems) = + "[S-2837]" + <> line + <> flow "Stack can't find:" + <> line + <> invalidList + where + invalidItem (pkgIdName, tarGzFile) = fillSep + [ pretty tarGzFile + , "for" + , style Current (fromString pkgIdName) <> "." + ] + invalidList = bulletedList $ map invalidItem invalidItems + pretty (ItemsInvalid invalidItems) = + "[S-3179]" + <> line + <> flow "For package upload, Stack expects a list of relative paths to \ + \tosdist tarballs or package directories. Stack can't find:" + <> line + <> invalidList + where + invalidList = bulletedList $ map (style File . fromString) invalidItems + pretty (NoItemSpecified subject) = + "[S-3030]" + <> line + <> fillSep + [ flow "An item must be specified. To upload" + , flow subject + , flow "please run" + , style Shell "stack upload ." + , flow "(with the period at the end)." + ] + pretty (PackageDirectoryInvalid invalidItems) = + "[S-5908]" + <> line + <> flow "For documentation upload, Stack expects a list of relative paths \ + \to package directories. Stack can't find:" + <> line + <> invalidList + where + invalidList = bulletedList $ map (style Current . fromString) invalidItems + pretty PackageIdNotSpecifiedForDocsUploadBug = bugPrettyReport "[S-7274]" $ + flow "uploadBytes: Documentation upload but package identifier not \ + \specified." + pretty PackageIdSpecifiedForPackageUploadBug = bugPrettyReport "[S-5860]" $ + flow "uploadBytes: Package upload but package identifier specified." + pretty (TarGzFileNameInvalidBug name) = bugPrettyReport "[S-5955]" $ + fillSep + [ flow "uploadCmd: the name of the" + , fromString name <> ".tar.gz" + , flow "file could not be parsed." + ] + +instance Exception UploadPrettyException + +-- | Type representing forms of content for upload to Hackage. +data UploadContent + = SDist + -- ^ Content in the form of an sdist tarball. + | DocArchive + -- ^ Content in the form of an archive file of package documentation. + +-- | Function underlying the @stack upload@ command. Upload to Hackage. +uploadCmd :: UploadOpts -> RIO Runner () +uploadCmd (UploadOpts [] uoDocumentation _ _ _ _ _ _) = do + let subject = if uoDocumentation + then "documentation for the current package," + else "the current package," + prettyThrowIO $ NoItemSpecified subject +uploadCmd uo = do + let setSaveHackageCreds config = + let saveHackageCreds = config.saveHackageCreds <> uo.saveHackageCreds + in config { Config.saveHackageCreds = saveHackageCreds } + withConfig YesReexec $ local setSaveHackageCreds $ withDefaultEnvConfig $ do + config <- view configL + let hackageUrl = T.unpack config.hackageBaseUrl + if uo.documentation + then do + (dirs, invalid) <- + liftIO $ partitionM doesDirectoryExist uo.itemsToWorkWith + unless (null invalid) $ + prettyThrowIO $ PackageDirectoryInvalid invalid + (failed, items) <- partitionEithers <$> forM dirs checkDocsTarball + unless (null failed) $ do + prettyThrowIO $ DocsTarballInvalid failed + getCreds <- memoizeRef $ loadAuth config + forM_ items $ \(pkgIdName, tarGzFile) -> do + creds <- runMemoized getCreds + upload + hackageUrl + creds + DocArchive + (Just pkgIdName) + (toFilePath tarGzFile) + uo.uploadVariant + else do + (files, nonFiles) <- + liftIO $ partitionM doesFileExist uo.itemsToWorkWith + (dirs, invalid) <- liftIO $ partitionM doesDirectoryExist nonFiles + unless (null invalid) $ do + prettyThrowIO $ ItemsInvalid invalid + let sdistOpts = SDistOpts + uo.itemsToWorkWith + uo.pvpBounds + uo.check + uo.buildPackage + uo.tarPath + getCreds <- memoizeRef $ loadAuth config + mapM_ (resolveFile' >=> checkSDistTarball sdistOpts) files + forM_ files $ \file -> do + tarFile <- resolveFile' file + creds <- runMemoized getCreds + upload + hackageUrl + creds + SDist + Nothing + (toFilePath tarFile) + uo.uploadVariant + forM_ dirs $ \dir -> do + pkgDir <- resolveDir' dir + (tarName, tarBytes, mcabalRevision) <- + getSDistTarball uo.pvpBounds pkgDir + checkSDistTarball' sdistOpts tarName tarBytes + creds <- runMemoized getCreds + uploadBytes + hackageUrl + creds + SDist + Nothing + tarName + uo.uploadVariant + tarBytes + forM_ mcabalRevision $ uncurry $ uploadRevision hackageUrl creds + where + checkDocsTarball :: + HasEnvConfig env + => FilePath + -> RIO env (Either (String, Path Abs File) (String, Path Abs File)) + checkDocsTarball dir = do + pkgDir <- resolveDir' dir + distDir <- distDirFromDir pkgDir + lp <- readLocalPackage pkgDir + let pkgId = packageIdentifier lp.package + pkgIdName = packageIdentifierString pkgId + name = pkgIdName <> "-docs" + tarGzFileName <- maybe + (prettyThrowIO $ TarGzFileNameInvalidBug name) + pure + ( do nameRelFile <- parseRelFile name + addExtension ".gz" =<< addExtension ".tar" nameRelFile + ) + let tarGzFile = distDir Path. tarGzFileName + isFile <- Path.doesFileExist tarGzFile + pure $ (if isFile then Right else Left) (pkgIdName, tarGzFile) + partitionM _ [] = pure ([], []) + partitionM f (x:xs) = do + r <- f x + (as, bs) <- partitionM f xs + pure $ if r then (x:as, bs) else (as, x:bs) + +-- | Type representing Hackage API authentification tokens. +newtype HackageKey = HackageKey Text + deriving (Eq, Show) -- | Username and password to log into Hackage. -- -- Since 0.1.0.0 data HackageCreds = HackageCreds - { hcUsername :: !Text - , hcPassword :: !Text - , hcCredsFile :: !FilePath - } - deriving Show + { username :: !Text + , password :: !Text + , credsFile :: !FilePath + } + deriving (Eq, Show) + +-- | Type representing Hackage authentifications +data HackageAuth + = HAKey HackageKey + -- ^ With a Hackage API authentification token registered by a user. + | HACreds HackageCreds + -- ^ With a Hackage user's credentials. + deriving (Eq, Show) instance ToJSON HackageCreds where - toJSON (HackageCreds u p _) = object - [ "username" .= u - , "password" .= p - ] + toJSON (HackageCreds u p _) = object + [ "username" .= u + , "password" .= p + ] + instance FromJSON (FilePath -> HackageCreds) where - parseJSON = withObject "HackageCreds" $ \o -> HackageCreds - <$> o .: "username" - <*> o .: "password" + parseJSON = withObject "HackageCreds" $ \o -> HackageCreds + <$> o .: "username" + <*> o .: "password" withEnvVariable :: Text -> IO Text -> IO Text -withEnvVariable varName fromPrompt = lookupEnv (T.unpack varName) >>= maybe fromPrompt (pure . T.pack) +withEnvVariable varName fromPrompt = + lookupEnv (T.unpack varName) >>= maybe fromPrompt (pure . T.pack) + +-- | Optionally, load Hackage API authentification token from the @HACKAGE_KEY@ +-- environment variable, if it exists. +maybeGetHackageKey :: RIO m (Maybe HackageKey) +maybeGetHackageKey = + liftIO $ fmap (HackageKey . T.pack) <$> lookupEnv "HACKAGE_KEY" + +-- | Load Hackage authentification from the environment, if applicable, or from +-- the given configuration. +loadAuth :: (HasLogFunc m, HasTerm m) => Config -> RIO m HackageAuth +loadAuth config = maybeGetHackageKey >>= \case + Just key -> do + prettyInfoL + [ style Shell "HACKAGE_KEY" + , flow "environment variable found, using that for credentials." + ] + pure $ HAKey key + Nothing -> HACreds <$> loadUserAndPassword config -- | Load Hackage credentials, either from a save file or the command -- line. -- -- Since 0.1.0.0 -loadCreds :: Config -> IO HackageCreds -loadCreds config = do - fp <- credsFile config - elbs <- tryIO $ L.readFile fp +loadUserAndPassword :: HasTerm m => Config -> RIO m HackageCreds +loadUserAndPassword config = do + fp <- liftIO $ credsFile config + elbs <- liftIO $ tryIO $ L.readFile fp case either (const Nothing) Just elbs >>= \lbs -> (lbs, ) <$> decode' lbs of Nothing -> fromPrompt fp Just (lbs, mkCreds) -> do @@ -81,31 +336,41 @@ loadCreds config = do -- didn't do this writeFilePrivate fp $ lazyByteString lbs - unless (configSaveHackageCreds config) $ do - putStrLn "WARNING: You've set save-hackage-creds to false" - putStrLn "However, credentials were found at:" - putStrLn $ " " ++ fp - return $ mkCreds fp - where - fromPrompt fp = do - username <- withEnvVariable "HACKAGE_USERNAME" (prompt "Hackage username: ") - password <- withEnvVariable "HACKAGE_PASSWORD" (promptPassword "Hackage password: ") - let hc = HackageCreds - { hcUsername = username - , hcPassword = password - , hcCredsFile = fp - } - - when (configSaveHackageCreds config) $ do - shouldSave <- promptBool $ T.pack $ - "Save hackage credentials to file at " ++ fp ++ " [y/n]? " - putStrLn "NOTE: Avoid this prompt in the future by using: save-hackage-creds: false" - when shouldSave $ do - writeFilePrivate fp $ fromEncoding $ toEncoding hc - putStrLn "Saved!" - hFlush stdout - - return hc + unless (fromFirstTrue config.saveHackageCreds) $ do + prettyWarnL + [ flow "You've set" + , style Shell "save-hackage-creds" + , "to" + , style Shell "false" <> "." + , flow "However, credentials were found at:" + , style File (fromString fp) <> "." + ] + pure $ mkCreds fp + where + fromPrompt :: HasTerm m => FilePath -> RIO m HackageCreds + fromPrompt fp = do + username <- liftIO $ withEnvVariable "HACKAGE_USERNAME" (prompt "Hackage username: ") + password <- liftIO $ withEnvVariable "HACKAGE_PASSWORD" (promptPassword "Hackage password: ") + let hc = HackageCreds + { username + , password + , credsFile = fp + } + + when (fromFirstTrue config.saveHackageCreds) $ do + shouldSave <- promptBool $ T.pack $ + "Save Hackage credentials to file at " ++ fp ++ " [y/n]? " + prettyNoteL + [ flow "Avoid this prompt in the future by using the configuration \ + \file option" + , style Shell (flow "save-hackage-creds: false") <> "." + ] + when shouldSave $ do + writeFilePrivate fp $ fromEncoding $ toEncoding hc + prettyInfoS "Saved!" + hFlush stdout + + pure hc -- | Write contents to a file which is always private. -- @@ -115,81 +380,174 @@ loadCreds config = do -- -- * https://github.com/commercialhaskell/stack/pull/4665 writeFilePrivate :: MonadIO m => FilePath -> Builder -> m () -writeFilePrivate fp builder = liftIO $ withTempFile (takeDirectory fp) (takeFileName fp) $ \fpTmp h -> do - -- Temp file is created such that only current user can read and write it. - -- See docs for openTempFile: https://www.stackage.org/haddock/lts-13.14/base-4.12.0.0/System-IO.html#v:openTempFile +writeFilePrivate fp builder = + liftIO $ withTempFile (FP.takeDirectory fp) (FP.takeFileName fp) $ \fpTmp h -> do + -- Temp file is created such that only current user can read and write it. + -- See docs for openTempFile: + -- https://www.stackage.org/haddock/lts-13.14/base-4.12.0.0/System-IO.html#v:openTempFile - -- Write to the file and close the handle. - hPutBuilder h builder - hClose h + -- Write to the file and close the handle. + hPutBuilder h builder + hClose h - -- Make sure the destination file, if present, is writeable - void $ tryIO $ setFileMode fp 0o600 + -- Make sure the destination file, if present, is writeable + void $ tryIO $ setFileMode fp 0o600 - -- And atomically move - renameFile fpTmp fp + -- And atomically move + renameFile fpTmp fp credsFile :: Config -> IO FilePath credsFile config = do - let dir = toFilePath (view stackRootL config) "upload" - createDirectoryIfMissing True dir - return $ dir "credentials.json" + let dir = toFilePath (view stackRootL config) FP. "upload" + createDirectoryIfMissing True dir + pure $ dir FP. "credentials.json" + +addAPIKey :: HackageKey -> Request -> Request +addAPIKey (HackageKey key) = setRequestHeader + "Authorization" + [fromString $ "X-ApiKey" ++ " " ++ T.unpack key] + +applyAuth :: + (HasLogFunc m, HasTerm m) + => HackageAuth + -> Request + -> RIO m Request +applyAuth haAuth req0 = + case haAuth of + HAKey key -> pure (addAPIKey key req0) + HACreds creds -> applyCreds creds req0 -applyCreds :: HackageCreds -> Request -> IO Request +applyCreds :: + (HasLogFunc m, HasTerm m) + => HackageCreds + -> Request + -> RIO m Request applyCreds creds req0 = do - manager <- getGlobalManager - ereq <- applyDigestAuth - (encodeUtf8 $ hcUsername creds) - (encodeUtf8 $ hcPassword creds) - req0 - manager + manager <- liftIO getGlobalManager + ereq <- if isStackUploadDisabled + then do + debugRequest "applyCreds" req0 + pure (Left $ toException ExitSuccess ) + else + liftIO $ applyDigestAuth + (encodeUtf8 creds.username) + (encodeUtf8 creds.password) + req0 + manager case ereq of - Left e -> do - putStrLn "WARNING: No HTTP digest prompt found, this will probably fail" - case fromException e of - Just e' -> putStrLn $ displayDigestAuthException e' - Nothing -> print e - return req0 - Right req -> return req - --- | Upload a single tarball with the given @Uploader@. Instead of --- sending a file like 'upload', this sends a lazy bytestring. + Left e -> do + prettyWarn $ + flow "No HTTP digest prompt found, this will probably fail." + <> blankLine + <> string + ( case fromException e of + Just e' -> displayDigestAuthException e' + Nothing -> displayException e + ) + pure req0 + Right req -> pure req + +-- | Upload a single tarball with the given @Uploader@. Instead of sending a +-- file like 'upload', this sends a lazy bytestring. -- -- Since 0.1.2.1 -uploadBytes :: String -- ^ Hackage base URL - -> HackageCreds - -> String -- ^ tar file name - -> L.ByteString -- ^ tar file contents - -> IO () -uploadBytes baseUrl creds tarName bytes = do - let req1 = setRequestHeader "Accept" ["text/plain"] - (fromString $ baseUrl <> "packages/") - formData = [partFileRequestBody "package" tarName (RequestBodyLBS bytes)] - req2 <- formDataBody formData req1 - req3 <- applyCreds creds req2 - putStr $ "Uploading " ++ tarName ++ "... " - hFlush stdout - withResponse req3 $ \res -> - case getResponseStatusCode res of - 200 -> putStrLn "done!" - 401 -> do - putStrLn "authentication failure" - handleIO (const $ return ()) (removeFile (hcCredsFile creds)) - throwString "Authentication failure uploading to server" - 403 -> do - putStrLn "forbidden upload" - putStrLn "Usually means: you've already uploaded this package/version combination" - putStrLn "Ignoring error and continuing, full message from Hackage below:\n" - printBody res - 503 -> do - putStrLn "service unavailable" - putStrLn "This error some times gets sent even though the upload succeeded" - putStrLn "Check on Hackage to see if your pacakge is present" - printBody res - code -> do - putStrLn $ "unhandled status code: " ++ show code - printBody res - throwString $ "Upload failed on " ++ tarName +uploadBytes :: + HasTerm m + => String -- ^ Hackage base URL + -> HackageAuth + -> UploadContent + -- ^ Form of the content to be uploaded. + -> Maybe String + -- ^ Optional package identifier name, applies only to the upload of + -- documentation. + -> String -- ^ tar file name + -> UploadVariant + -> L.ByteString -- ^ tar file contents + -> RIO m () +uploadBytes baseUrl auth contentForm mPkgIdName tarName uploadVariant bytes = do + (url, headers, uploadMethod) <- case contentForm of + SDist -> do + unless (isNothing mPkgIdName) $ + prettyThrowIO PackageIdSpecifiedForPackageUploadBug + let variant = case uploadVariant of + Publishing -> "" + Candidate -> "candidates/" + pure + ( baseUrl <> "packages/" <> variant + , [("Accept", "text/plain")] + , methodPost + ) + DocArchive -> case mPkgIdName of + Nothing -> prettyThrowIO PackageIdNotSpecifiedForDocsUploadBug + Just pkgIdName -> do + let variant = case uploadVariant of + Publishing -> "" + Candidate -> "candidate/" + pure + ( baseUrl <> "package/" <> pkgIdName <> "/" <> variant <> "docs" + , [ ("Content-Type", "application/x-tar") + , ("Content-Encoding", "gzip") + ] + , methodPut + ) + let req1 = setRequestHeaders headers (fromString url) + reqData = RequestBodyLBS bytes + formData = [partFileRequestBody "package" tarName reqData] + + req2 <- case contentForm of + SDist -> liftIO $ formDataBody formData req1 + DocArchive -> pure $ req1 { requestBody = reqData } + let req3 = req2 { method = uploadMethod } + req4 <- applyAuth auth req3 + prettyInfoL + [ "Uploading" + , style Current (fromString tarName) <> "..." + ] + hFlush stdout + if isStackUploadDisabled + then + debugRequest "uploadBytes" req4 + else + withRunInIO $ \runInIO -> withResponse req4 (runInIO . inner) + where + inner :: HasTerm m => Response (ConduitM () S.ByteString IO ()) -> RIO m () + inner res = + case getResponseStatusCode res of + 200 -> prettyInfoS "done!" + 401 -> do + case auth of + HACreds creds -> + handleIO + (const $ pure ()) + (liftIO $ removeFile creds.credsFile) + _ -> pure () + prettyThrowIO AuthenticationFailure + 403 -> do + prettyError $ + "[S-2804]" + <> line + <> flow "forbidden upload" + <> line + <> flow "Usually means: you've already uploaded this package/version \ + \combination. Ignoring error and continuing. The full \ + \message from Hackage is below:" + <> blankLine + liftIO $ printBody res + 503 -> do + prettyError $ + "[S-4444]" + <> line + <> flow "service unavailable" + <> line + <> flow "This error some times gets sent even though the upload \ + \succeeded. Check on Hackage to see if your package is \ + \present. The full message form Hackage is below:" + <> blankLine + liftIO $ printBody res + code -> do + let resBody = mapOutput show (getResponseBody res) + resBody' <- liftIO $ runConduit $ resBody .| sinkList + prettyThrowIO (ArchiveUploadFailure code resBody' tarName) printBody :: Response (ConduitM () S.ByteString IO ()) -> IO () printBody res = runConduit $ getResponseBody res .| CB.sinkHandle stdout @@ -197,18 +555,32 @@ printBody res = runConduit $ getResponseBody res .| CB.sinkHandle stdout -- | Upload a single tarball with the given @Uploader@. -- -- Since 0.1.0.0 -upload :: String -- ^ Hackage base URL - -> HackageCreds - -> FilePath - -> IO () -upload baseUrl creds fp = uploadBytes baseUrl creds (takeFileName fp) =<< L.readFile fp - -uploadRevision :: String -- ^ Hackage base URL - -> HackageCreds - -> PackageIdentifier - -> L.ByteString - -> IO () -uploadRevision baseUrl creds ident@(PackageIdentifier name _) cabalFile = do +upload :: + (HasLogFunc m, HasTerm m) + => String -- ^ Hackage base URL + -> HackageAuth + -> UploadContent + -> Maybe String + -- ^ Optional package identifier name, applies only to the upload of + -- documentation. + -> FilePath + -- ^ Path to archive file. + -> UploadVariant + -> RIO m () +upload baseUrl auth contentForm mPkgIdName fp uploadVariant = + uploadBytes + baseUrl auth contentForm mPkgIdName (FP.takeFileName fp) uploadVariant + =<< liftIO (L.readFile fp) + +-- | Upload a revised Cabal file for the given package. +uploadRevision :: + (HasLogFunc m, HasTerm m) + => String -- ^ Hackage base URL + -> HackageAuth + -> PackageIdentifier + -> L.ByteString + -> RIO m () +uploadRevision baseUrl auth ident@(PackageIdentifier name _) cabalFile = do req0 <- parseRequest $ concat [ baseUrl , "package/" @@ -222,5 +594,18 @@ uploadRevision baseUrl creds ident@(PackageIdentifier name _) cabalFile = do , partBS "publish" "on" ] req0 - req2 <- applyCreds creds req1 - void $ httpNoBody req2 + req2 <- applyAuth auth req1 + if isStackUploadDisabled + then + debugRequest "uploadRevision" req2 + else + void $ httpNoBody req2 + +debugRequest :: HasTerm env => String -> Request -> RIO env () +debugRequest callSite req = prettyInfo $ + fillSep + [ fromString callSite <> ":" + , flow "When enabled, would apply the following request:" + ] + <> line + <> fromString (show req) diff --git a/src/System/Process/Pager.hs b/src/System/Process/Pager.hs index 3c80125748..ac696351ce 100644 --- a/src/System/Process/Pager.hs +++ b/src/System/Process/Pager.hs @@ -1,63 +1,73 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ScopedTypeVariables, RankNTypes, DeriveDataTypeable #-} +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : System.Process.Pager +Description : Run external pagers (@$PAGER@, @less@, @more@). +License : BSD-3-Clause + +Run external pagers (@$PAGER@, @less@, @more@). +-} --- | Run external pagers (@$PAGER@, @less@, @more@). module System.Process.Pager ( pageWriter , pageText , PagerException (..) ) where -import Stack.Prelude -import System.Directory (findExecutable) -import System.Environment (lookupEnv) -import System.Process ( createProcess, cmdspec, shell, proc, waitForProcess - , CmdSpec (ShellCommand, RawCommand) - , StdStream (CreatePipe) - , CreateProcess (std_in, close_fds, delegate_ctlc) - ) -import Control.Monad.Trans.Maybe (MaybeT (runMaybeT, MaybeT)) +import Control.Monad.Trans.Maybe ( MaybeT (runMaybeT, MaybeT) ) import qualified Data.Text.IO as T +import Stack.Prelude +import System.Directory ( findExecutable ) +import System.Environment ( lookupEnv ) +import System.Process + ( createProcess, cmdspec, shell, proc, waitForProcess + , CmdSpec (ShellCommand, RawCommand) + , StdStream (CreatePipe) + , CreateProcess (std_in, close_fds, delegate_ctlc) + ) + +-- | Type representing exceptions thrown by functions exported by the +-- "System.Process.Pager" module. +data PagerException + = PagerExitFailure CmdSpec Int + deriving Show + +instance Exception PagerException where + displayException (PagerExitFailure cmd n) = + let getStr (ShellCommand c) = c + getStr (RawCommand exePath _) = exePath + in concat + [ "Error: [S-9392]\n" + , "Pager (`" + , getStr cmd + , "') exited with non-zero status: " + , show n + ] -- | Run pager, providing a function that writes to the pager's input. pageWriter :: (Handle -> IO ()) -> IO () -pageWriter writer = - do mpager <- runMaybeT $ cmdspecFromEnvVar - <|> cmdspecFromExeName "less" - <|> cmdspecFromExeName "more" - case mpager of - Just pager -> - do (Just h,_,_,procHandle) <- createProcess pager - { std_in = CreatePipe - , close_fds = True - , delegate_ctlc = True - } - (_ :: Either IOException ()) <- try (do writer h - hClose h) - exit <- waitForProcess procHandle - case exit of - ExitSuccess -> return () - ExitFailure n -> throwIO (PagerExitFailure (cmdspec pager) n) - return () - Nothing -> writer stdout - where - cmdspecFromEnvVar = shell <$> MaybeT (lookupEnv "PAGER") - cmdspecFromExeName = - fmap (\path -> proc path []) . MaybeT . findExecutable +pageWriter writer = do + let alternatives = + cmdspecFromEnvVar + <|> cmdspecFromExeName "less" + <|> cmdspecFromExeName "more" + runMaybeT alternatives >>= \case + Just pager -> do + (Just h,_,_,procHandle) <- createProcess pager + { std_in = CreatePipe + , close_fds = True + , delegate_ctlc = True + } + (_ :: Either IOException ()) <- try (do writer h; hClose h) + waitForProcess procHandle >>= \case + ExitSuccess -> pure () + ExitFailure n -> throwIO (PagerExitFailure (cmdspec pager) n) + Nothing -> writer stdout + where + cmdspecFromEnvVar = shell <$> MaybeT (lookupEnv "PAGER") + cmdspecFromExeName = + fmap (\command -> proc command []) . MaybeT . findExecutable -- | Run pager to display a 'Text' pageText :: Text -> IO () pageText = pageWriter . flip T.hPutStr - --- | Exception running pager. -data PagerException = PagerExitFailure CmdSpec Int - deriving Typeable -instance Show PagerException where - show (PagerExitFailure cmd n) = - let - getStr (ShellCommand c) = c - getStr (RawCommand exePath _) = exePath - in - "Pager (`" ++ getStr cmd ++ "') exited with non-zero status: " ++ show n - -instance Exception PagerException diff --git a/src/main/BuildInfo.hs b/src/main/BuildInfo.hs deleted file mode 100644 index 00c2f69a8a..0000000000 --- a/src/main/BuildInfo.hs +++ /dev/null @@ -1,95 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} - -#ifdef USE_GIT_INFO -{-# LANGUAGE TemplateHaskell #-} -#endif - --- Extracted from Main so that the Main module does not use CPP or TH, --- and therefore doesn't need to be recompiled as often. -module BuildInfo - ( versionString' - , maybeGitHash - , hpackVersion - ) where - -import Stack.Prelude -import qualified Paths_stack as Meta -import qualified Distribution.Text as Cabal (display) -import Distribution.System (buildArch) - -#ifndef HIDE_DEP_VERSIONS -import qualified Build_stack -#endif - -#ifdef USE_GIT_INFO -import GitHash (giCommitCount, giHash, tGitInfoCwdTry) -#endif - -#ifdef USE_GIT_INFO -import Options.Applicative.Simple (simpleVersion) -#endif - -#ifdef USE_GIT_INFO -import Data.Version (versionBranch) -#else -import Data.Version (showVersion, versionBranch) -#endif - -versionString' :: String -#ifdef USE_GIT_INFO -versionString' = concat $ concat - [ [$(simpleVersion Meta.version)] - -- Leave out number of commits for --depth=1 clone - -- See https://github.com/commercialhaskell/stack/issues/792 - , case giCommitCount <$> $$tGitInfoCwdTry of - Left _ -> [] - Right 1 -> [] - Right count -> [" (", show count, " commits)"] - , [afterVersion] - ] -#else -versionString' = showVersion Meta.version ++ afterVersion -#endif - where - afterVersion = concat - [ preReleaseString - , ' ' : Cabal.display buildArch - , depsString - , warningString - ] - preReleaseString = - case versionBranch Meta.version of - (_:y:_) | even y -> " PRE-RELEASE" - (_:_:z:_) | even z -> " RELEASE-CANDIDATE" - _ -> "" -#ifdef HIDE_DEP_VERSIONS - depsString = " hpack-" ++ VERSION_hpack -#else - depsString = "\nCompiled with:\n" ++ unlines (map ("- " ++) Build_stack.deps) -#endif -#ifdef SUPPORTED_BUILD - warningString = "" -#else - warningString = unlines - [ "" - , "Warning: this is an unsupported build that may use different versions of" - , "dependencies and GHC than the officially released binaries, and therefore may" - , "not behave identically. If you encounter problems, please try the latest" - , "official build by running 'stack upgrade --force-download'." - ] -#endif - --- | If USE_GIT_INFO is enabled, the Git hash in the build directory, otherwise Nothing. -maybeGitHash :: Maybe String -maybeGitHash = -#ifdef USE_GIT_INFO - (either (const Nothing) (Just . giHash) $$tGitInfoCwdTry) -#else - Nothing -#endif - --- | Hpack version we're compiled against -hpackVersion :: String -hpackVersion = VERSION_hpack diff --git a/src/main/Main.hs b/src/main/Main.hs deleted file mode 100644 index ac8d42d7f9..0000000000 --- a/src/main/Main.hs +++ /dev/null @@ -1,892 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE RecordWildCards #-} - --- | Main stack tool entry point. - -module Main (main) where - -import BuildInfo -import Stack.Prelude hiding (Display (..)) -import Conduit (runConduitRes, sourceLazy, sinkFileCautious) -import Control.Monad.Trans.Except (ExceptT) -import Control.Monad.Writer.Lazy (Writer) -import Data.Attoparsec.Args (parseArgs, EscapingMode (Escaping)) -import Data.Attoparsec.Interpreter (getInterpreterArgs) -import Data.List -import qualified Data.Map.Strict as Map -import qualified Data.Set as Set -import qualified Data.Text as T -import Data.Version (showVersion) -import RIO.Process -import Distribution.Version (mkVersion') -import GHC.IO.Encoding (mkTextEncoding, textEncodingName) -import Options.Applicative -import Options.Applicative.Help (errorHelp, stringChunk, vcatChunks) -import Options.Applicative.Builder.Extra -import Options.Applicative.Complicated -import Pantry (loadSnapshot) -import Path -import Path.IO -import qualified Paths_stack as Meta -import RIO.PrettyPrint -import qualified RIO.PrettyPrint as PP (style) -import Stack.Build -import Stack.Build.Target (NeedTargets(..)) -import Stack.Clean (CleanCommand(..), CleanOpts(..), clean) -import Stack.Config -import Stack.ConfigCmd as ConfigCmd -import Stack.Constants -import Stack.Constants.Config -import Stack.Coverage -import qualified Stack.Docker as Docker -import Stack.Dot -import Stack.GhcPkg (findGhcPkgField) -import qualified Stack.Nix as Nix -import Stack.FileWatch -import Stack.Ghci -import Stack.Hoogle -import Stack.List -import Stack.Ls -import qualified Stack.IDE as IDE -import Stack.Init -import Stack.New -import Stack.Options.BuildParser -import Stack.Options.CleanParser -import Stack.Options.DockerParser -import Stack.Options.DotParser -import Stack.Options.ExecParser -import Stack.Options.GhciParser -import Stack.Options.GlobalParser - -import Stack.Options.HpcReportParser -import Stack.Options.NewParser -import Stack.Options.NixParser -import Stack.Options.ScriptParser -import Stack.Options.SDistParser -import Stack.Options.Utils -import qualified Stack.Path -import Stack.Runners -import Stack.Script -import Stack.SDist (getSDistTarball, checkSDistTarball, checkSDistTarball', SDistOpts(..)) -import Stack.Setup (withNewLocalBuildTargets) -import Stack.SetupCmd -import Stack.Types.Version -import Stack.Types.Config -import Stack.Types.NamedComponent -import Stack.Types.SourceMap -import Stack.Unpack -import Stack.Upgrade -import qualified Stack.Upload as Upload -import qualified System.Directory as D -import System.Environment (getProgName, getArgs, withArgs) -import System.FilePath (isValid, pathSeparator, takeDirectory) -import qualified System.FilePath as FP -import System.IO (hPutStrLn, hGetEncoding, hSetEncoding) -import System.Terminal (hIsTerminalDeviceOrMinTTY) - --- | Change the character encoding of the given Handle to transliterate --- on unsupported characters instead of throwing an exception -hSetTranslit :: Handle -> IO () -hSetTranslit h = do - menc <- hGetEncoding h - case fmap textEncodingName menc of - Just name - | '/' `notElem` name -> do - enc' <- mkTextEncoding $ name ++ "//TRANSLIT" - hSetEncoding h enc' - _ -> return () - -main :: IO () -main = do - -- Line buffer the output by default, particularly for non-terminal runs. - -- See https://github.com/commercialhaskell/stack/pull/360 - hSetBuffering stdout LineBuffering - hSetBuffering stdin LineBuffering - hSetBuffering stderr LineBuffering - hSetTranslit stdout - hSetTranslit stderr - args <- getArgs - progName <- getProgName - isTerminal <- hIsTerminalDeviceOrMinTTY stdout - -- On Windows, where applicable, defaultColorWhen has the side effect of - -- enabling ANSI for ANSI-capable native (ConHost) terminals, if not already - -- ANSI-enabled. - execExtraHelp args - Docker.dockerHelpOptName - (dockerOptsParser False) - ("Only showing --" ++ Docker.dockerCmdName ++ "* options.") - execExtraHelp args - Nix.nixHelpOptName - (nixOptsParser False) - ("Only showing --" ++ Nix.nixCmdName ++ "* options.") - - currentDir <- D.getCurrentDirectory - eGlobalRun <- try $ commandLineHandler currentDir progName False - case eGlobalRun of - Left (exitCode :: ExitCode) -> - throwIO exitCode - Right (globalMonoid,run) -> do - global <- globalOptsFromMonoid isTerminal globalMonoid - when (globalLogLevel global == LevelDebug) $ hPutStrLn stderr versionString' - case globalReExecVersion global of - Just expectVersion -> do - expectVersion' <- parseVersionThrowing expectVersion - unless (checkVersion MatchMinor expectVersion' (mkVersion' Meta.version)) - $ throwIO $ InvalidReExecVersion expectVersion (showVersion Meta.version) - _ -> return () - withRunnerGlobal global $ run `catch` \e -> - -- This special handler stops "stack: " from being printed before the - -- exception - case fromException e of - Just ec -> exitWith ec - Nothing -> do - logError $ fromString $ displayException e - exitFailure - --- Vertically combine only the error component of the first argument with the --- error component of the second. -vcatErrorHelp :: ParserHelp -> ParserHelp -> ParserHelp -vcatErrorHelp h1 h2 = h2 { helpError = vcatChunks [helpError h2, helpError h1] } - -commandLineHandler - :: FilePath - -> String - -> Bool - -> IO (GlobalOptsMonoid, RIO Runner ()) -commandLineHandler currentDir progName isInterpreter = complicatedOptions - (mkVersion' Meta.version) - (Just versionString') - hpackVersion - "stack - The Haskell Tool Stack" - "" - "stack's documentation is available at https://docs.haskellstack.org/" - (globalOpts OuterGlobalOpts) - (Just failureCallback) - addCommands - where - failureCallback f args = - case stripPrefix "Invalid argument" (fst (renderFailure f "")) of - Just _ -> if isInterpreter - then parseResultHandler args f - else secondaryCommandHandler args f - >>= interpreterHandler currentDir args - Nothing -> parseResultHandler args f - - parseResultHandler args f = - if isInterpreter - then do - let hlp = errorHelp $ stringChunk - (unwords ["Error executing interpreter command:" - , progName - , unwords args]) - handleParseResult (overFailure (vcatErrorHelp hlp) (Failure f)) - else handleParseResult (Failure f) - - addCommands = do - unless isInterpreter (do - addBuildCommand' "build" - "Build the package(s) in this directory/configuration" - buildCmd - (buildOptsParser Build) - addBuildCommand' "install" - "Shortcut for 'build --copy-bins'" - buildCmd - (buildOptsParser Install) - addCommand' "uninstall" - "DEPRECATED: This command performs no actions, and is present for documentation only" - uninstallCmd - (many $ strArgument $ metavar "IGNORED") - addBuildCommand' "test" - "Shortcut for 'build --test'" - buildCmd - (buildOptsParser Test) - addBuildCommand' "bench" - "Shortcut for 'build --bench'" - buildCmd - (buildOptsParser Bench) - addBuildCommand' "haddock" - "Shortcut for 'build --haddock'" - buildCmd - (buildOptsParser Haddock) - addCommand' "new" - (unwords [ "Create a new project from a template." - , "Run `stack templates' to see available templates." - , "Note: you can also specify a local file or a" - , "remote URL as a template." - ] ) - newCmd - newOptsParser - addCommand' "templates" - (unwords [ "Show how to find templates available for `stack new'." - , "`stack new' can accept a template from a remote repository" - , "(default: github), local file or remote URL." - , "Note: this downloads the help file." - ] ) - templatesCmd - (pure ()) - addCommand' "init" - "Create stack project config from cabal or hpack package specifications" - initCmd - initOptsParser - addCommand' "setup" - "Get the appropriate GHC for your project" - setupCmd - setupParser - addCommand' "path" - "Print out handy path information" - Stack.Path.path - Stack.Path.pathParser - addCommand' "ls" - "List command. (Supports snapshots, dependencies and stack's styles)" - lsCmd - lsParser - addCommand' "unpack" - "Unpack one or more packages locally" - unpackCmd - ((,) <$> some (strArgument $ metavar "PACKAGE") - <*> optional (textOption $ long "to" <> - help "Optional path to unpack the package into (will unpack into subdirectory)")) - addCommand' "update" - "Update the package index" - updateCmd - (pure ()) - addCommand' "upgrade" - "Upgrade to the latest stack" - upgradeCmd - upgradeOpts - addCommand' - "upload" - "Upload a package to Hackage" - uploadCmd - sdistOptsParser - addCommand' - "sdist" - "Create source distribution tarballs" - sdistCmd - sdistOptsParser - addCommand' "dot" - "Visualize your project's dependency graph using Graphviz dot" - dot - (dotOptsParser False) -- Default for --external is False. - addCommand' "ghc" - "Run ghc" - execCmd - (execOptsParser $ Just ExecGhc) - addCommand' "hoogle" - ("Run hoogle, the Haskell API search engine. Use the '-- ARGUMENT(S)' syntax " ++ - "to pass Hoogle arguments, e.g. stack hoogle -- --count=20, or " ++ - "stack hoogle -- server --local.") - hoogleCmd - ((,,,) <$> many (strArgument - (metavar "-- ARGUMENT(S) (e.g. stack hoogle -- server --local)")) - <*> boolFlags - True - "setup" - "If needed: install hoogle, build haddocks and generate a hoogle database" - idm - <*> switch - (long "rebuild" <> - help "Rebuild the hoogle database") - <*> switch - (long "server" <> - help "Start local Hoogle server")) - ) - - -- These are the only commands allowed in interpreter mode as well - addCommand' "exec" - "Execute a command. If the command is absent, the first of any arguments is taken as the command." - execCmd - (execOptsParser Nothing) - addCommand' "run" - "Build and run an executable. Defaults to the first available executable if none is provided as the first argument." - execCmd - (execOptsParser $ Just ExecRun) - addGhciCommand' "ghci" - "Run ghci in the context of package(s) (experimental)" - ghciCmd - ghciOptsParser - addGhciCommand' "repl" - "Run ghci in the context of package(s) (experimental) (alias for 'ghci')" - ghciCmd - ghciOptsParser - addCommand' "runghc" - "Run runghc" - execCmd - (execOptsParser $ Just ExecRunGhc) - addCommand' "runhaskell" - "Run runghc (alias for 'runghc')" - execCmd - (execOptsParser $ Just ExecRunGhc) - addCommand "script" - "Run a Stack Script" - globalFooter - scriptCmd - (\so gom -> - gom - { globalMonoidResolverRoot = First $ Just $ takeDirectory $ soFile so - }) - (globalOpts OtherCmdGlobalOpts) - scriptOptsParser - - unless isInterpreter (do - addCommand' "eval" - "Evaluate some haskell code inline. Shortcut for 'stack exec ghc -- -e CODE'" - evalCmd - (evalOptsParser "CODE") - addCommand' "clean" - "Delete build artefacts for the project packages." - cleanCmd - (cleanOptsParser Clean) - addCommand' "purge" - "Delete the project stack working directories (.stack-work by default). Shortcut for 'stack clean --full'" - cleanCmd - (cleanOptsParser Purge) - addCommand' "query" - "Query general build information (experimental)" - queryCmd - (many $ strArgument $ metavar "SELECTOR...") - addCommand' "list" - "List package id's in snapshot (experimental)" - listCmd - (many $ strArgument $ metavar "PACKAGE") - addSubCommands' - "ide" - "IDE-specific commands" - (let outputFlag = flag - IDE.OutputLogInfo - IDE.OutputStdout - (long "stdout" <> - help "Send output to stdout instead of the default, stderr") - cabalFileFlag = flag - IDE.ListPackageNames - IDE.ListPackageCabalFiles - (long "cabal-files" <> - help "Print paths to package cabal-files instead of package names") - in - do addCommand' - "packages" - "List all available local loadable packages" - idePackagesCmd - ((,) <$> outputFlag <*> cabalFileFlag) - addCommand' - "targets" - "List all available stack targets" - ideTargetsCmd - outputFlag) - addSubCommands' - Docker.dockerCmdName - "Subcommands specific to Docker use" - (do addCommand' Docker.dockerPullCmdName - "Pull latest version of Docker image from registry" - dockerPullCmd - (pure ()) - addCommand' "reset" - "Reset the Docker sandbox" - dockerResetCmd - (switch (long "keep-home" <> - help "Do not delete sandbox's home directory"))) - addSubCommands' - ConfigCmd.cfgCmdName - "Subcommands for accessing and modifying configuration values" - (do - addCommand' ConfigCmd.cfgCmdSetName - "Sets a field in the project's stack.yaml to value" - (withConfig NoReexec . cfgCmdSet) - configCmdSetParser - addCommand' ConfigCmd.cfgCmdEnvName - "Print environment variables for use in a shell" - (withConfig YesReexec . withDefaultEnvConfig . cfgCmdEnv) - configCmdEnvParser) - addSubCommands' - "hpc" - "Subcommands specific to Haskell Program Coverage" - (addCommand' "report" - "Generate unified HPC coverage report from tix files and project targets" - hpcReportCmd - hpcReportOptsParser) - ) - where - -- addCommand hiding global options - addCommand' :: String -> String -> (a -> RIO Runner ()) -> Parser a - -> AddCommand - addCommand' cmd title constr = - addCommand cmd title globalFooter constr (\_ gom -> gom) (globalOpts OtherCmdGlobalOpts) - - addSubCommands' :: String -> String -> AddCommand - -> AddCommand - addSubCommands' cmd title = - addSubCommands cmd title globalFooter (globalOpts OtherCmdGlobalOpts) - - -- Additional helper that hides global options and shows build options - addBuildCommand' :: String -> String -> (a -> RIO Runner ()) -> Parser a - -> AddCommand - addBuildCommand' cmd title constr = - addCommand cmd title globalFooter constr (\_ gom -> gom) (globalOpts BuildCmdGlobalOpts) - - -- Additional helper that hides global options and shows some ghci options - addGhciCommand' :: String -> String -> (a -> RIO Runner ()) -> Parser a - -> AddCommand - addGhciCommand' cmd title constr = - addCommand cmd title globalFooter constr (\_ gom -> gom) (globalOpts GhciCmdGlobalOpts) - - globalOpts :: GlobalOptsContext -> Parser GlobalOptsMonoid - globalOpts kind = - extraHelpOption hide progName (Docker.dockerCmdName ++ "*") Docker.dockerHelpOptName <*> - extraHelpOption hide progName (Nix.nixCmdName ++ "*") Nix.nixHelpOptName <*> - globalOptsParser currentDir kind - (if isInterpreter - -- Silent except when errors occur - see #2879 - then Just LevelError - else Nothing) - where hide = kind /= OuterGlobalOpts - - globalFooter = "Run 'stack --help' for global options that apply to all subcommands." - -type AddCommand = - ExceptT (RIO Runner ()) (Writer (Mod CommandFields (RIO Runner (), GlobalOptsMonoid))) () - --- | fall-through to external executables in `git` style if they exist --- (i.e. `stack something` looks for `stack-something` before --- failing with "Invalid argument `something'") -secondaryCommandHandler - :: [String] - -> ParserFailure ParserHelp - -> IO (ParserFailure ParserHelp) -secondaryCommandHandler args f = - -- don't even try when the argument looks like a path or flag - if elem pathSeparator cmd || "-" `isPrefixOf` head args - then return f - else do - mExternalExec <- D.findExecutable cmd - case mExternalExec of - Just ex -> withProcessContextNoLogging $ do - -- TODO show the command in verbose mode - -- hPutStrLn stderr $ unwords $ - -- ["Running", "[" ++ ex, unwords (tail args) ++ "]"] - _ <- exec ex (tail args) - return f - Nothing -> return $ fmap (vcatErrorHelp (noSuchCmd cmd)) f - where - -- FIXME this is broken when any options are specified before the command - -- e.g. stack --verbosity silent cmd - cmd = stackProgName ++ "-" ++ head args - noSuchCmd name = errorHelp $ stringChunk - ("Auxiliary command not found in path `" ++ name ++ "'") - -interpreterHandler - :: Monoid t - => FilePath - -> [String] - -> ParserFailure ParserHelp - -> IO (GlobalOptsMonoid, (RIO Runner (), t)) -interpreterHandler currentDir args f = do - -- args can include top-level config such as --extra-lib-dirs=... (set by - -- nix-shell) - we need to find the first argument which is a file, everything - -- afterwards is an argument to the script, everything before is an argument - -- to Stack - (stackArgs, fileArgs) <- spanM (fmap not . D.doesFileExist) args - case fileArgs of - (file:fileArgs') -> runInterpreterCommand file stackArgs fileArgs' - [] -> parseResultHandler (errorCombine (noSuchFile firstArg)) - where - firstArg = head args - - spanM _ [] = return ([], []) - spanM p xs@(x:xs') = do - r <- p x - if r - then do - (ys, zs) <- spanM p xs' - return (x:ys, zs) - else - return ([], xs) - - -- if the first argument contains a path separator then it might be a file, - -- or a Stack option referencing a file. In that case we only show the - -- interpreter error message and exclude the command related error messages. - errorCombine = - if pathSeparator `elem` firstArg - then overrideErrorHelp - else vcatErrorHelp - - overrideErrorHelp h1 h2 = h2 { helpError = helpError h1 } - - parseResultHandler fn = handleParseResult (overFailure fn (Failure f)) - noSuchFile name = errorHelp $ stringChunk - ("File does not exist or is not a regular file `" ++ name ++ "'") - - runInterpreterCommand path stackArgs fileArgs = do - progName <- getProgName - iargs <- getInterpreterArgs path - let parseCmdLine = commandLineHandler currentDir progName True - -- Implicit file arguments are put before other arguments that - -- occur after "--". See #3658 - cmdArgs = stackArgs ++ case break (== "--") iargs of - (beforeSep, []) -> beforeSep ++ ["--"] ++ [path] ++ fileArgs - (beforeSep, optSep : afterSep) -> - beforeSep ++ [optSep] ++ [path] ++ fileArgs ++ afterSep - -- TODO show the command in verbose mode - -- hPutStrLn stderr $ unwords $ - -- ["Running", "[" ++ progName, unwords cmdArgs ++ "]"] - (a,b) <- withArgs cmdArgs parseCmdLine - return (a,(b,mempty)) - -setupCmd :: SetupCmdOpts -> RIO Runner () -setupCmd sco@SetupCmdOpts{..} = withConfig YesReexec $ withBuildConfig $ do - (wantedCompiler, compilerCheck, mstack) <- - case scoCompilerVersion of - Just v -> return (v, MatchMinor, Nothing) - Nothing -> (,,) - <$> view wantedCompilerVersionL - <*> view (configL.to configCompilerCheck) - <*> (Just <$> view stackYamlL) - setup sco wantedCompiler compilerCheck mstack - -cleanCmd :: CleanOpts -> RIO Runner () -cleanCmd = withConfig NoReexec . withBuildConfig . clean - --- | Helper for build and install commands -buildCmd :: BuildOptsCLI -> RIO Runner () -buildCmd opts = do - when (any (("-prof" `elem`) . fromRight [] . parseArgs Escaping) (boptsCLIGhcOptions opts)) $ do - logError "Error: When building with stack, you should not use the -prof GHC option" - logError "Instead, please use --library-profiling and --executable-profiling" - logError "See: https://github.com/commercialhaskell/stack/issues/1015" - exitFailure - local (over globalOptsL modifyGO) $ - case boptsCLIFileWatch opts of - FileWatchPoll -> fileWatchPoll (inner . Just) - FileWatch -> fileWatch (inner . Just) - NoFileWatch -> inner Nothing - where - inner - :: Maybe (Set (Path Abs File) -> IO ()) - -> RIO Runner () - inner setLocalFiles = withConfig YesReexec $ withEnvConfig NeedTargets opts $ - Stack.Build.build setLocalFiles - -- Read the build command from the CLI and enable it to run - modifyGO = - case boptsCLICommand opts of - Test -> set (globalOptsBuildOptsMonoidL.buildOptsMonoidTestsL) (Just True) - Haddock -> set (globalOptsBuildOptsMonoidL.buildOptsMonoidHaddockL) (Just True) - Bench -> set (globalOptsBuildOptsMonoidL.buildOptsMonoidBenchmarksL) (Just True) - Install -> set (globalOptsBuildOptsMonoidL.buildOptsMonoidInstallExesL) (Just True) - Build -> id -- Default case is just Build - -uninstallCmd :: [String] -> RIO Runner () -uninstallCmd _ = do - prettyErrorL - [ flow "stack does not manage installations in global locations." - , flow "The only global mutation stack performs is executable copying." - , flow "For the default executable destination, please run" - , PP.style Shell "stack path --local-bin" - ] - liftIO exitFailure - --- | Unpack packages to the filesystem -unpackCmd :: ([String], Maybe Text) -> RIO Runner () -unpackCmd (names, Nothing) = unpackCmd (names, Just ".") -unpackCmd (names, Just dstPath) = withConfig NoReexec $ do - mresolver <- view $ globalOptsL.to globalResolver - mSnapshot <- forM mresolver $ \resolver -> do - concrete <- makeConcreteResolver resolver - loc <- completeSnapshotLocation concrete - loadSnapshot loc - dstPath' <- resolveDir' $ T.unpack dstPath - unpackPackages mSnapshot dstPath' names - --- | Update the package index -updateCmd :: () -> RIO Runner () -updateCmd () = withConfig NoReexec (void (updateHackageIndex Nothing)) - -upgradeCmd :: UpgradeOpts -> RIO Runner () -upgradeCmd upgradeOpts' = do - go <- view globalOptsL - case globalResolver go of - Just _ -> do - logError "You cannot use the --resolver option with the upgrade command" - liftIO exitFailure - Nothing -> - withGlobalProject $ - upgrade - maybeGitHash - upgradeOpts' - --- | Upload to Hackage -uploadCmd :: SDistOpts -> RIO Runner () -uploadCmd (SDistOpts [] _ _ _ _) = do - prettyErrorL - [ flow "To upload the current package, please run" - , PP.style Shell "stack upload ." - , flow "(with the period at the end)" - ] - liftIO exitFailure -uploadCmd sdistOpts = do - let partitionM _ [] = return ([], []) - partitionM f (x:xs) = do - r <- f x - (as, bs) <- partitionM f xs - return $ if r then (x:as, bs) else (as, x:bs) - (files, nonFiles) <- liftIO $ partitionM D.doesFileExist (sdoptsDirsToWorkWith sdistOpts) - (dirs, invalid) <- liftIO $ partitionM D.doesDirectoryExist nonFiles - withConfig YesReexec $ withDefaultEnvConfig $ do - unless (null invalid) $ do - let invalidList = bulletedList $ map (PP.style File . fromString) invalid - prettyErrorL - [ PP.style Shell "stack upload" - , flow "expects a list of sdist tarballs or package directories." - , flow "Can't find:" - , line <> invalidList - ] - exitFailure - when (null files && null dirs) $ do - prettyErrorL - [ PP.style Shell "stack upload" - , flow "expects a list of sdist tarballs or package directories, but none were specified." - ] - exitFailure - config <- view configL - let hackageUrl = T.unpack $ configHackageBaseUrl config - getCreds <- liftIO $ memoizeRef $ Upload.loadCreds config - mapM_ (resolveFile' >=> checkSDistTarball sdistOpts) files - forM_ - files - (\file -> - do tarFile <- resolveFile' file - liftIO $ do - creds <- runMemoized getCreds - Upload.upload hackageUrl creds (toFilePath tarFile)) - unless (null dirs) $ - forM_ dirs $ \dir -> do - pkgDir <- resolveDir' dir - (tarName, tarBytes, mcabalRevision) <- getSDistTarball (sdoptsPvpBounds sdistOpts) pkgDir - checkSDistTarball' sdistOpts tarName tarBytes - liftIO $ do - creds <- runMemoized getCreds - Upload.uploadBytes hackageUrl creds tarName tarBytes - forM_ mcabalRevision $ uncurry $ Upload.uploadRevision hackageUrl creds - -sdistCmd :: SDistOpts -> RIO Runner () -sdistCmd sdistOpts = - withConfig YesReexec $ withDefaultEnvConfig $ do - -- If no directories are specified, build all sdist tarballs. - dirs' <- if null (sdoptsDirsToWorkWith sdistOpts) - then do - dirs <- view $ buildConfigL.to (map ppRoot . Map.elems . smwProject . bcSMWanted) - when (null dirs) $ do - stackYaml <- view stackYamlL - prettyErrorL - [ PP.style Shell "stack sdist" - , flow "expects a list of targets, and otherwise defaults to all of the project's packages." - , flow "However, the configuration at" - , pretty stackYaml - , flow "contains no packages, so no sdist tarballs will be generated." - ] - exitFailure - return dirs - else mapM resolveDir' (sdoptsDirsToWorkWith sdistOpts) - forM_ dirs' $ \dir -> do - (tarName, tarBytes, _mcabalRevision) <- getSDistTarball (sdoptsPvpBounds sdistOpts) dir - distDir <- distDirFromDir dir - tarPath <- (distDir ) <$> parseRelFile tarName - ensureDir (parent tarPath) - runConduitRes $ - sourceLazy tarBytes .| - sinkFileCautious (toFilePath tarPath) - prettyInfoL [flow "Wrote sdist tarball to", pretty tarPath] - checkSDistTarball sdistOpts tarPath - forM_ (sdoptsTarPath sdistOpts) $ copyTarToTarPath tarPath tarName - where - copyTarToTarPath tarPath tarName targetDir = liftIO $ do - let targetTarPath = targetDir FP. tarName - D.createDirectoryIfMissing True $ FP.takeDirectory targetTarPath - D.copyFile (toFilePath tarPath) targetTarPath - --- | Execute a command. -execCmd :: ExecOpts -> RIO Runner () -execCmd ExecOpts {..} = - withConfig YesReexec $ withEnvConfig AllowNoTargets boptsCLI $ do - unless (null targets) $ Stack.Build.build Nothing - - config <- view configL - menv <- liftIO $ configProcessContextSettings config eoEnvSettings - withProcessContext menv $ do - -- Add RTS options to arguments - let argsWithRts args = if null eoRtsOptions - then args :: [String] - else args ++ ["+RTS"] ++ eoRtsOptions ++ ["-RTS"] - (cmd, args) <- case (eoCmd, argsWithRts eoArgs) of - (ExecCmd cmd, args) -> return (cmd, args) - (ExecRun, args) -> getRunCmd args - (ExecGhc, args) -> getGhcCmd eoPackages args - (ExecRunGhc, args) -> getRunGhcCmd eoPackages args - - runWithPath eoCwd $ exec cmd args - where - ExecOptsExtra {..} = eoExtra - - targets = concatMap words eoPackages - boptsCLI = defaultBuildOptsCLI - { boptsCLITargets = map T.pack targets - } - - -- return the package-id of the first package in GHC_PACKAGE_PATH - getPkgId name = do - pkg <- getGhcPkgExe - mId <- findGhcPkgField pkg [] name "id" - case mId of - Just i -> return (head $ words (T.unpack i)) - -- should never happen as we have already installed the packages - _ -> do - logError ("Could not find package id of package " <> fromString name) - exitFailure - - getPkgOpts pkgs = - map ("-package-id=" ++) <$> mapM getPkgId pkgs - - getRunCmd args = do - packages <- view $ buildConfigL.to (smwProject . bcSMWanted) - pkgComponents <- for (Map.elems packages) ppComponents - let executables = filter isCExe $ concatMap Set.toList pkgComponents - let (exe, args') = case args of - [] -> (firstExe, args) - x:xs -> case find (\y -> y == CExe (T.pack x)) executables of - Nothing -> (firstExe, args) - argExe -> (argExe, xs) - where - firstExe = listToMaybe executables - case exe of - Just (CExe exe') -> do - withNewLocalBuildTargets [T.cons ':' exe'] $ Stack.Build.build Nothing - return (T.unpack exe', args') - _ -> do - logError "No executables found." - exitFailure - - getGhcCmd pkgs args = do - pkgopts <- getPkgOpts pkgs - compiler <- view $ compilerPathsL.to cpCompiler - return (toFilePath compiler, pkgopts ++ args) - - getRunGhcCmd pkgs args = do - pkgopts <- getPkgOpts pkgs - interpret <- view $ compilerPathsL.to cpInterpreter - return (toFilePath interpret, pkgopts ++ args) - - runWithPath :: Maybe FilePath -> RIO EnvConfig () -> RIO EnvConfig () - runWithPath path callback = case path of - Nothing -> callback - Just p | not (isValid p) -> throwIO $ InvalidPathForExec p - Just p -> withUnliftIO $ \ul -> D.withCurrentDirectory p $ unliftIO ul callback - --- | Evaluate some haskell code inline. -evalCmd :: EvalOpts -> RIO Runner () -evalCmd EvalOpts {..} = execCmd execOpts - where - execOpts = - ExecOpts { eoCmd = ExecGhc - , eoArgs = ["-e", evalArg] - , eoExtra = evalExtra - } - --- | Run GHCi in the context of a project. -ghciCmd :: GhciOpts -> RIO Runner () -ghciCmd ghciOpts = - let boptsCLI = defaultBuildOptsCLI - -- using only additional packages, targets then get overriden in `ghci` - { boptsCLITargets = map T.pack (ghciAdditionalPackages ghciOpts) - , boptsCLIInitialBuildSteps = True - , boptsCLIFlags = ghciFlags ghciOpts - , boptsCLIGhcOptions = map T.pack (ghciGhcOptions ghciOpts) - } - in withConfig YesReexec $ withEnvConfig AllowNoTargets boptsCLI $ do - bopts <- view buildOptsL - -- override env so running of tests and benchmarks is disabled - let boptsLocal = bopts - { boptsTestOpts = (boptsTestOpts bopts) { toDisableRun = True } - , boptsBenchmarkOpts = (boptsBenchmarkOpts bopts) { beoDisableRun = True } - } - local (set buildOptsL boptsLocal) - (ghci ghciOpts) - --- | List packages in the project. -idePackagesCmd :: (IDE.OutputStream, IDE.ListPackagesCmd) -> RIO Runner () -idePackagesCmd = withConfig NoReexec . withBuildConfig . uncurry IDE.listPackages - --- | List targets in the project. -ideTargetsCmd :: IDE.OutputStream -> RIO Runner () -ideTargetsCmd = withConfig NoReexec . withBuildConfig . IDE.listTargets - --- | Pull the current Docker image. -dockerPullCmd :: () -> RIO Runner () -dockerPullCmd () = withConfig NoReexec $ Docker.preventInContainer Docker.pull - --- | Reset the Docker sandbox. -dockerResetCmd :: Bool -> RIO Runner () -dockerResetCmd = withConfig NoReexec . Docker.preventInContainer . Docker.reset - --- | Project initialization -initCmd :: InitOpts -> RIO Runner () -initCmd initOpts = do - pwd <- getCurrentDir - go <- view globalOptsL - withGlobalProject $ withConfig YesReexec (initProject pwd initOpts (globalResolver go)) - --- | Create a project directory structure and initialize the stack config. -newCmd :: (NewOpts,InitOpts) -> RIO Runner () -newCmd (newOpts,initOpts) = - withGlobalProject $ withConfig YesReexec $ do - dir <- new newOpts (forceOverwrite initOpts) - exists <- doesFileExist $ dir stackDotYaml - when (forceOverwrite initOpts || not exists) $ do - go <- view globalOptsL - initProject dir initOpts (globalResolver go) - --- | Display instructions for how to use templates -templatesCmd :: () -> RIO Runner () -templatesCmd () = withConfig NoReexec templatesHelp - --- | Query build information -queryCmd :: [String] -> RIO Runner () -queryCmd selectors = withConfig YesReexec $ withDefaultEnvConfig $ queryBuildInfo $ map T.pack selectors - --- | List packages -listCmd :: [String] -> RIO Runner () -listCmd names = withConfig NoReexec $ do - mresolver <- view $ globalOptsL.to globalResolver - mSnapshot <- forM mresolver $ \resolver -> do - concrete <- makeConcreteResolver resolver - loc <- completeSnapshotLocation concrete - loadSnapshot loc - listPackages mSnapshot names - --- | generate a combined HPC report -hpcReportCmd :: HpcReportOpts -> RIO Runner () -hpcReportCmd hropts = do - let (tixFiles, targetNames) = partition (".tix" `T.isSuffixOf`) (hroptsInputs hropts) - boptsCLI = defaultBuildOptsCLI - { boptsCLITargets = if hroptsAll hropts then [] else targetNames } - withConfig YesReexec $ withEnvConfig AllowNoTargets boptsCLI $ - generateHpcReportForTargets hropts tixFiles targetNames - -data MainException = InvalidReExecVersion String String - | InvalidPathForExec FilePath - deriving (Typeable) -instance Exception MainException -instance Show MainException where - show (InvalidReExecVersion expected actual) = concat - [ "When re-executing '" - , stackProgName - , "' in a container, the incorrect version was found\nExpected: " - , expected - , "; found: " - , actual] - show (InvalidPathForExec path) = concat - [ "Got an invalid --cwd argument for stack exec (" - , path - , ")"] diff --git a/src/setup-shim/StackSetupShim.hs b/src/setup-shim/StackSetupShim.hs index 1a6450feaf..a87ec81fc9 100644 --- a/src/setup-shim/StackSetupShim.hs +++ b/src/setup-shim/StackSetupShim.hs @@ -1,31 +1,165 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE PackageImports #-} + module StackSetupShim where + +-- Stack no longer supports Cabal < 2.2 and, consequently, GHC versions before +-- GHC 8.4 or base < 4.11.0.0. Consequently, we do not need to test for the +-- existence of the MIN_VERSION_Cabal macro (provided from GHC 8.0). + +import Data.List ( stripPrefix ) +import Distribution.ReadE ( ReadE (..) ) +import Distribution.Simple.Configure ( getPersistBuildConfig ) +-- Temporary, can be removed if initialBuildSteps restored to Cabal's API. +#if MIN_VERSION_Cabal(3,11,0) +import Distribution.Simple.Build ( writeBuiltinAutogenFiles ) +#else +import Distribution.Simple.Build ( initialBuildSteps ) +#endif +#if MIN_VERSION_Cabal(3,11,0) +import Distribution.Simple.Errors ( exceptionMessage ) +#endif +-- Temporary, can be removed if initialBuildSteps restored to Cabal's API. +#if MIN_VERSION_Cabal(3,11,0) +import Distribution.Simple.LocalBuildInfo + ( componentBuildDir, withAllComponentsInBuildOrder ) +#endif +#if MIN_VERSION_Cabal(3,8,1) +import Distribution.Simple.PackageDescription ( readGenericPackageDescription ) +#else +-- Avoid confusion with Cabal-syntax module of same name +import "Cabal" Distribution.PackageDescription.Parsec + ( readGenericPackageDescription ) +#endif +import Distribution.Simple.Utils + ( createDirectoryIfMissingVerbose, findPackageDesc ) +#if MIN_VERSION_Cabal(3,8,1) +import Distribution.Types.GenericPackageDescription + ( GenericPackageDescription (..) ) +#else +-- Avoid confusion with Cabal-syntax module of same name +import "Cabal" Distribution.Types.GenericPackageDescription + ( GenericPackageDescription (..) ) +#endif +-- Temporary, can be removed if initialBuildSteps restored to Cabal's API. +#if MIN_VERSION_Cabal(3,11,0) +import Distribution.Types.ComponentLocalBuildInfo ( ComponentLocalBuildInfo ) +import Distribution.Types.LocalBuildInfo ( LocalBuildInfo ) +import Distribution.Types.PackageDescription ( PackageDescription ) +#if MIN_VERSION_Cabal(3,14,0) +import Distribution.Utils.Path + ( interpretSymbolicPathCWD, makeSymbolicPath, relativeSymbolicPath ) +#endif +import Distribution.Verbosity ( Verbosity ) +#endif +import Distribution.Verbosity ( flagToVerbosity ) import Main -import Distribution.PackageDescription (PackageDescription, emptyHookedBuildInfo) -import Distribution.Simple -import Distribution.Simple.Build -import Distribution.Simple.Setup (ReplFlags, fromFlag, replDistPref, replVerbosity) -import Distribution.Simple.LocalBuildInfo (LocalBuildInfo) -import System.Environment (getArgs) +import System.Environment ( getArgs ) mainOverride :: IO () mainOverride = do - args <- getArgs - if "repl" `elem` args && "stack-initial-build-steps" `elem` args - then do - defaultMainWithHooks simpleUserHooks - { preRepl = \_ _ -> return emptyHookedBuildInfo - , replHook = stackReplHook - , postRepl = \_ _ _ _ -> return () - } - else main + args <- getArgs + case args of + [arg1, arg2, "repl", "stack-initial-build-steps"] -> stackReplHook arg1 arg2 + _ -> main + +-- | The name of the function is a mismomer, but is kept for historical reasons. +-- This function relies on Stack calling the 'setup' executable with: +-- +-- --verbose= +-- --builddir= +-- repl +-- stack-initial-build-steps +stackReplHook :: String -> String -> IO () +stackReplHook arg1 arg2 = do + let mRawVerbosity = stripPrefix "--verbose=" arg1 + mRawBuildDir = stripPrefix "--builddir=" arg2 + case (mRawVerbosity, mRawBuildDir) of + (Nothing, _) -> fail $ + "Misuse of running Setup.hs with stack-initial-build-steps, expected " <> + "first argument to start --verbose=" + (_, Nothing) -> fail $ + "Misuse of running Setup.hs with stack-initial-build-steps, expected" <> + "second argument to start --builddir=" + (Just rawVerbosity, Just rawBuildDir) -> do + let eVerbosity = runReadE flagToVerbosity rawVerbosity + case eVerbosity of + Left msg1 -> fail $ + "Unexpected happened running Setup.hs with " <> + "stack-initial-build-steps, expected to parse Cabal verbosity: " <> + msg1 + Right verbosity -> do + eFp <- +#if MIN_VERSION_Cabal(3,14,0) + findPackageDesc Nothing +#else + findPackageDesc "" +#endif + case eFp of + Left err -> fail $ + "Unexpected happened running Setup.hs with " <> + "stack-initial-build-steps, expected to find a Cabal file: " <> + msg2 + where +#if MIN_VERSION_Cabal(3,11,0) + -- The type of findPackageDesc changed in Cabal-3.11.0.0. + msg2 = exceptionMessage err +#else + msg2 = err +#endif + Right fp -> do + gpd <- + readGenericPackageDescription + verbosity +#if MIN_VERSION_Cabal(3,14,0) + Nothing + (relativeSymbolicPath fp) +#else + fp +#endif + let pd = packageDescription gpd + lbi <- getPersistBuildConfig +#if MIN_VERSION_Cabal(3,14,0) + Nothing + (makeSymbolicPath rawBuildDir) +#else + rawBuildDir +#endif + initialBuildSteps rawBuildDir pd lbi verbosity + +-- | Temporary, can be removed if initialBuildSteps restored to Cabal's API. +-- Based on the functions of the same name provided by Cabal-3.10.3.0. +#if MIN_VERSION_Cabal(3,11,0) +-- | Runs 'componentInitialBuildSteps' on every configured component. +initialBuildSteps :: + FilePath -- ^"dist" prefix + -> PackageDescription -- ^mostly information from the .cabal file + -> LocalBuildInfo -- ^Configuration information + -> Verbosity -- ^The verbosity to use + -> IO () +initialBuildSteps distPref pkg_descr lbi verbosity = + withAllComponentsInBuildOrder pkg_descr lbi $ \_comp clbi -> + componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity -stackReplHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> ReplFlags -> [String] -> IO () -stackReplHook pkg_descr lbi hooks flags args = do - let distPref = fromFlag (replDistPref flags) - verbosity = fromFlag (replVerbosity flags) - case args of - ("stack-initial-build-steps":rest) - | null rest -> initialBuildSteps distPref pkg_descr lbi verbosity - | otherwise -> - fail "Misuse of running Setup.hs with stack-initial-build-steps, expected no arguments" - _ -> replHook simpleUserHooks pkg_descr lbi hooks flags args +-- | Creates the autogenerated files for a particular configured component. +componentInitialBuildSteps :: + FilePath -- ^"dist" prefix + -> PackageDescription -- ^mostly information from the .cabal file + -> LocalBuildInfo -- ^Configuration information + -> ComponentLocalBuildInfo + -> Verbosity -- ^The verbosity to use + -> IO () +componentInitialBuildSteps _distPref pkg_descr lbi clbi verbosity = do + createDirectoryIfMissingVerbose + verbosity + True +#if MIN_VERSION_Cabal(3,14,0) + (interpretSymbolicPathCWD $ componentBuildDir lbi clbi) +#else + (componentBuildDir lbi clbi) +#endif + -- Cabal-3.10.3.0 used writeAutogenFiles, that generated and wrote out the + -- Paths_.hs, PackageInfo_.hs, and cabal_macros.h files. This + -- appears to be the equivalent function for Cabal-3.11.0.0. + writeBuiltinAutogenFiles verbosity pkg_descr lbi clbi +#endif diff --git a/src/test/Spec.hs b/src/test/Spec.hs deleted file mode 100644 index a824f8c30c..0000000000 --- a/src/test/Spec.hs +++ /dev/null @@ -1 +0,0 @@ -{-# OPTIONS_GHC -F -pgmF hspec-discover #-} diff --git a/src/test/Stack/ArgsSpec.hs b/src/test/Stack/ArgsSpec.hs deleted file mode 100644 index 3febd37761..0000000000 --- a/src/test/Stack/ArgsSpec.hs +++ /dev/null @@ -1,166 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} --- | Args parser test suite. - -module Stack.ArgsSpec where - -import Control.Monad -import Data.Attoparsec.Args (EscapingMode(..), parseArgsFromString) -import Data.Attoparsec.Interpreter (interpreterArgsParser) -import qualified Data.Attoparsec.Text as P -import Data.Text (pack) -import Stack.Prelude -import Test.Hspec -import Prelude (head) - --- | Test spec. -spec :: Spec -spec = do - argsSpec - interpreterArgsSpec - -argsSpec :: Spec -argsSpec = forM_ argsInputOutput - (\(input,output) -> it input (parseArgsFromString Escaping input == output)) - --- | Fairly comprehensive checks. -argsInputOutput :: [(String, Either String [String])] -argsInputOutput = - [ ("x", Right ["x"]) - , ("x y z", Right ["x", "y", "z"]) - , ("aaa bbb ccc", Right ["aaa", "bbb", "ccc"]) - , (" aaa bbb ccc ", Right ["aaa", "bbb", "ccc"]) - , ("aaa\"", Left "unterminated string: endOfInput") - , ("\"", Left "unterminated string: endOfInput") - , ("\"\"", Right [""]) - , ("\"aaa", Left "unterminated string: endOfInput") - , ("\"aaa\" bbb ccc \"ddd\"", Right ["aaa", "bbb", "ccc", "ddd"]) - , ("\"aa\\\"a\" bbb ccc \"ddd\"", Right ["aa\"a", "bbb", "ccc", "ddd"]) - , ("\"aa\\\"a\" bb\\b ccc \"ddd\"", Right ["aa\"a", "bb\\b", "ccc", "ddd"]) - , ("\"\" \"\" c", Right ["","","c"])] - -interpreterArgsSpec :: Spec -interpreterArgsSpec = - describe "Script interpreter parser" $ do - describe "Success cases" $ do - describe "Line comments" $ do - checkLines "" - checkLines " --x" - checkLines " --x --y" - describe "Literate line comments" $ do - checkLiterateLines "" - checkLiterateLines " --x" - checkLiterateLines " --x --y" - describe "Block comments" $ do - checkBlocks "" - checkBlocks "\n" - checkBlocks " --x" - checkBlocks "\n--x" - checkBlocks " --x --y" - checkBlocks "\n--x\n--y" - checkBlocks "\n\t--x\n\t--y" - describe "Literate block comments" $ do - checkLiterateBlocks "" "" - checkLiterateBlocks "\n>" "" - checkLiterateBlocks " --x" " --x" - checkLiterateBlocks "\n>--x" "--x" - checkLiterateBlocks " --x --y " "--x --y" - checkLiterateBlocks "\n>--x\n>--y" "--x --y" - checkLiterateBlocks "\n>\t--x\n>\t--y" "--x --y" - describe "Failure cases" $ do - checkFailures - describe "Bare directives in literate files" $ do - forM_ (interpreterGenValid lineComment []) $ - testAndCheck (acceptFailure True) [] - forM_ (interpreterGenValid blockComment []) $ - testAndCheck (acceptFailure True) [] - where - parse isLiterate s = - P.parseOnly (interpreterArgsParser isLiterate stackProgName) (pack s) - - acceptSuccess :: Bool -> String -> String -> Bool - acceptSuccess isLiterate args s = case parse isLiterate s of - Right x | words x == words args -> True - _ -> False - - acceptFailure isLiterate _ s = case parse isLiterate s of - Left _ -> True - Right _ -> False - - testAndCheck checker out inp = it (show inp) $ checker out inp - - checkLines args = forM_ - (interpreterGenValid lineComment args) - (testAndCheck (acceptSuccess False) args) - - checkLiterateLines args = forM_ - (interpreterGenValid literateLineComment args) - (testAndCheck (acceptSuccess True) args) - - checkBlocks args = forM_ - (interpreterGenValid blockComment args) - (testAndCheck (acceptSuccess False) args) - - checkLiterateBlocks inp args = forM_ - (interpreterGenValid literateBlockComment inp) - (testAndCheck (acceptSuccess True) args) - - checkFailures = forM_ - interpreterGenInvalid - (testAndCheck (acceptFailure False) "unused") - - -- Generate a set of acceptable inputs for given format and args - interpreterGenValid fmt args = shebang <++> newLine <++> fmt args - - interpreterGenInvalid :: [String] - -- Generate a set of Invalid inputs - interpreterGenInvalid = - ["-stack\n"] -- random input - -- just the shebang - <|> shebang <++> ["\n"] - -- invalid shebang - <|> blockSpace <++> [head (interpreterGenValid lineComment args)] - -- something between shebang and stack comment - <|> shebang - <++> newLine - <++> blockSpace - <++> ([head (lineComment args)] <|> [head (blockComment args)]) - -- unterminated block comment - -- just chop the closing chars from a valid block comment - <|> shebang - <++> ["\n"] - <++> let - c = head (blockComment args) - l = length c - 2 - in [assert (drop l c == "-}") (take l c)] - -- nested block comment - <|> shebang - <++> ["\n"] - <++> [head (blockComment "--x {- nested -} --y")] - where args = " --x --y" - (<++>) = liftA2 (++) - - -- Generative grammar for the interpreter comments - shebang = ["#!/usr/bin/env stack"] - newLine = ["\n"] <|> ["\r\n"] - - -- A comment may be the last line or followed by something else - postComment = [""] <|> newLine - - -- A command starts with zero or more whitespace followed by "stack" - makeComment maker space args = - let makePrefix s = (s <|> [""]) <++> [stackProgName] - in (maker <$> (makePrefix space <++> [args])) <++> postComment - - lineSpace = [" "] <|> ["\t"] - lineComment = makeComment makeLine lineSpace - where makeLine s = "--" ++ s - - literateLineComment = makeComment ("> --" ++) lineSpace - - blockSpace = lineSpace <|> newLine - blockComment = makeComment makeBlock blockSpace - where makeBlock s = "{-" ++ s ++ "-}" - - literateBlockComment = makeComment - (\s -> "> {-" ++ s ++ "-}") - (lineSpace <|> map (++ ">") newLine) diff --git a/src/test/Stack/Build/ExecuteSpec.hs b/src/test/Stack/Build/ExecuteSpec.hs deleted file mode 100644 index f316ba94ab..0000000000 --- a/src/test/Stack/Build/ExecuteSpec.hs +++ /dev/null @@ -1,11 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -module Stack.Build.ExecuteSpec (main, spec) where - -import Stack.Prelude -import Test.Hspec - -main :: IO () -main = hspec spec - -spec :: Spec -spec = return () diff --git a/src/test/Stack/Config/DockerSpec.hs b/src/test/Stack/Config/DockerSpec.hs deleted file mode 100644 index c46f8bfe62..0000000000 --- a/src/test/Stack/Config/DockerSpec.hs +++ /dev/null @@ -1,26 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -module Stack.Config.DockerSpec (spec) where - -import Test.Hspec -import Stack.Prelude -import Stack.Types.Resolver -import RIO.Time (fromGregorian) -import Stack.Config.Docker (addDefaultTag) - -spec :: Spec -spec = do - describe "addDefaultTag" $ do - it "succeeds fails no resolver" $ addDefaultTag "foo/bar" Nothing Nothing `shouldBe` Nothing - it "succeeds on LTS" $ - addDefaultTag - "foo/bar" - Nothing - (Just $ ARResolver $ RSLSynonym $ LTS 1 2) - `shouldBe` Just "foo/bar:lts-1.2" - it "fails on nightly" $ - addDefaultTag - "foo/bar" - Nothing - (Just $ ARResolver $ RSLSynonym $ Nightly $ fromGregorian 2018 1 1) - `shouldBe` Nothing diff --git a/src/test/Stack/ConfigSpec.hs b/src/test/Stack/ConfigSpec.hs deleted file mode 100644 index 0b1b93f254..0000000000 --- a/src/test/Stack/ConfigSpec.hs +++ /dev/null @@ -1,227 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RecordWildCards #-} -module Stack.ConfigSpec where - -import Control.Arrow -import Pantry.Internal.AesonExtended -import Data.Yaml -import Pantry.Internal (pcHpackExecutable) -import Path -import Path.IO hiding (withSystemTempDir) -import Stack.Config -import Stack.Prelude -import Stack.Runners -import Stack.Types.Config -import Stack.Options.GlobalParser (globalOptsFromMonoid) -import System.Directory -import System.Environment -import System.IO (writeFile) -import Test.Hspec - -sampleConfig :: String -sampleConfig = - "resolver: lts-2.10\n" ++ - "packages: ['.']\n" - -buildOptsConfig :: String -buildOptsConfig = - "resolver: lts-2.10\n" ++ - "packages: ['.']\n" ++ - "build:\n" ++ - " library-profiling: true\n" ++ - " executable-profiling: true\n" ++ - " haddock: true\n" ++ - " haddock-deps: true\n" ++ - " copy-bins: true\n" ++ - " prefetch: true\n" ++ - " force-dirty: true\n" ++ - " keep-going: true\n" ++ - " keep-tmp-files: true\n" ++ - " test: true\n" ++ - " test-arguments:\n" ++ - " rerun-tests: true\n" ++ - " additional-args: ['-fprof']\n" ++ - " coverage: true\n" ++ - " no-run-tests: true\n" ++ - " bench: true\n" ++ - " benchmark-opts:\n" ++ - " benchmark-arguments: -O2\n" ++ - " no-run-benchmarks: true\n" ++ - " reconfigure: true\n" ++ - " cabal-verbose: true\n" - -hpackConfig :: String -hpackConfig = - "resolver: lts-2.10\n" ++ - "with-hpack: /usr/local/bin/hpack\n" ++ - "packages: ['.']\n" - -resolverConfig :: String -resolverConfig = - "resolver: lts-2.10\n" ++ - "packages: ['.']\n" - -snapshotConfig :: String -snapshotConfig = - "snapshot: lts-2.10\n" ++ - "packages: ['.']\n" - -resolverSnapshotConfig :: String -resolverSnapshotConfig = - "resolver: lts-2.10\n" ++ - "snapshot: lts-2.10\n" ++ - "packages: ['.']\n" - -stackDotYaml :: Path Rel File -stackDotYaml = either impureThrow id (parseRelFile "stack.yaml") - -setup :: IO () -setup = unsetEnv "STACK_YAML" - -noException :: Selector SomeException -noException = const False - -spec :: Spec -spec = beforeAll setup $ do - let logLevel = LevelOther "silent" - -- TODO(danburton): not use inTempDir - let inTempDir action = do - currentDirectory <- getCurrentDirectory - withSystemTempDirectory "Stack_ConfigSpec" $ \tempDir -> do - let enterDir = setCurrentDirectory tempDir - let exitDir = setCurrentDirectory currentDirectory - bracket_ enterDir exitDir action - -- TODO(danburton): a safer version of this? - let withEnvVar name newValue action = do - originalValue <- fromMaybe "" <$> lookupEnv name - let setVar = setEnv name newValue - let resetVar = setEnv name originalValue - bracket_ setVar resetVar action - - describe "parseProjectAndConfigMonoid" $ do - let loadProject' fp inner = do - globalOpts <- globalOptsFromMonoid False mempty - withRunnerGlobal globalOpts { globalLogLevel = logLevel } $ do - iopc <- loadConfigYaml ( - parseProjectAndConfigMonoid (parent fp) - ) fp - ProjectAndConfigMonoid project _ <- liftIO iopc - liftIO $ inner project - - toAbsPath path = do - parentDir <- getCurrentDirectory >>= parseAbsDir - return (parentDir path) - - loadProject config inner = do - yamlAbs <- toAbsPath stackDotYaml - writeFile (toFilePath yamlAbs) config - loadProject' yamlAbs inner - - it "parses snapshot using 'resolver'" $ inTempDir $ do - loadProject resolverConfig $ \Project{..} -> - projectResolver `shouldBe` RSLSynonym (LTS 2 10) - - it "parses snapshot using 'snapshot'" $ inTempDir $ do - loadProject snapshotConfig $ \Project{..} -> - projectResolver `shouldBe` RSLSynonym (LTS 2 10) - - it "throws if both 'resolver' and 'snapshot' are present" $ inTempDir $ do - loadProject resolverSnapshotConfig (const (return ())) - `shouldThrow` anyException - - describe "loadConfig" $ do - let loadConfig' inner = do - globalOpts <- globalOptsFromMonoid False mempty - withRunnerGlobal globalOpts { globalLogLevel = logLevel } $ - loadConfig inner - -- TODO(danburton): make sure parent dirs also don't have config file - it "works even if no config file exists" $ example $ - loadConfig' $ const $ return () - - it "works with a blank config file" $ inTempDir $ do - writeFile (toFilePath stackDotYaml) "" - -- TODO(danburton): more specific test for exception - loadConfig' (const (return ())) `shouldThrow` anyException - - let configOverrideHpack = pcHpackExecutable . view pantryConfigL - - it "parses config option with-hpack" $ inTempDir $ do - writeFile (toFilePath stackDotYaml) hpackConfig - loadConfig' $ \config -> - liftIO $ configOverrideHpack config `shouldBe` - HpackCommand "/usr/local/bin/hpack" - - it "parses config bundled hpack" $ inTempDir $ do - writeFile (toFilePath stackDotYaml) sampleConfig - loadConfig' $ \config -> - liftIO $ configOverrideHpack config `shouldBe` HpackBundled - - it "parses build config options" $ inTempDir $ do - writeFile (toFilePath stackDotYaml) buildOptsConfig - loadConfig' $ \config -> liftIO $ do - let BuildOpts{..} = configBuild config - boptsLibProfile `shouldBe` True - boptsExeProfile `shouldBe` True - boptsHaddock `shouldBe` True - boptsHaddockDeps `shouldBe` Just True - boptsInstallExes `shouldBe` True - boptsPreFetch `shouldBe` True - boptsKeepGoing `shouldBe` Just True - boptsKeepTmpFiles `shouldBe` True - boptsForceDirty `shouldBe` True - boptsTests `shouldBe` True - boptsTestOpts `shouldBe` TestOpts {toRerunTests = True - ,toAdditionalArgs = ["-fprof"] - ,toCoverage = True - ,toDisableRun = True - ,toMaximumTimeSeconds = Nothing} - boptsBenchmarks `shouldBe` True - boptsBenchmarkOpts `shouldBe` BenchmarkOpts {beoAdditionalArgs = Just "-O2" - ,beoDisableRun = True} - boptsReconfigure `shouldBe` True - boptsCabalVerbose `shouldBe` True - - it "finds the config file in a parent directory" $ inTempDir $ do - writeFile "package.yaml" "name: foo" - writeFile (toFilePath stackDotYaml) sampleConfig - parentDir <- getCurrentDirectory >>= parseAbsDir - let childDir = "child" - createDirectory childDir - setCurrentDirectory childDir - loadConfig' $ \config -> liftIO $ do - bc <- runRIO config $ withBuildConfig ask - view projectRootL bc `shouldBe` parentDir - - it "respects the STACK_YAML env variable" $ inTempDir $ do - withSystemTempDir "config-is-here" $ \dir -> do - let stackYamlFp = toFilePath (dir stackDotYaml) - writeFile stackYamlFp sampleConfig - writeFile (toFilePath dir ++ "/package.yaml") "name: foo" - withEnvVar "STACK_YAML" stackYamlFp $ loadConfig' $ \config -> liftIO $ do - BuildConfig{..} <- runRIO config $ withBuildConfig ask - bcStackYaml `shouldBe` dir stackDotYaml - parent bcStackYaml `shouldBe` dir - - it "STACK_YAML can be relative" $ inTempDir $ do - parentDir <- getCurrentDirectory >>= parseAbsDir - let childRel = either impureThrow id (parseRelDir "child") - yamlRel = childRel either impureThrow id (parseRelFile "some-other-name.config") - yamlAbs = parentDir yamlRel - packageYaml = childRel either impureThrow id (parseRelFile "package.yaml") - createDirectoryIfMissing True $ toFilePath $ parent yamlAbs - writeFile (toFilePath yamlAbs) "resolver: ghc-7.8" - writeFile (toFilePath packageYaml) "name: foo" - withEnvVar "STACK_YAML" (toFilePath yamlRel) $ loadConfig' $ \config -> liftIO $ do - BuildConfig{..} <- runRIO config $ withBuildConfig ask - bcStackYaml `shouldBe` yamlAbs - - describe "defaultConfigYaml" $ - it "is parseable" $ \_ -> do - curDir <- getCurrentDir - let parsed :: Either String (Either String (WithJSONWarnings ConfigMonoid)) - parsed = parseEither (parseConfigMonoid curDir) <$> left show (decodeEither' defaultConfigYaml) - case parsed of - Right (Right _) -> return () :: IO () - _ -> fail "Failed to parse default config yaml" diff --git a/src/test/Stack/DotSpec.hs b/src/test/Stack/DotSpec.hs deleted file mode 100644 index 418bb3e55e..0000000000 --- a/src/test/Stack/DotSpec.hs +++ /dev/null @@ -1,121 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE CPP #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TupleSections #-} --- | Test suite for Stack.Dot -module Stack.DotSpec where - -import Data.Functor.Identity -import Data.List ((\\)) -import qualified Data.Map as Map -import qualified Data.Set as Set -import Distribution.License (License (BSD3)) -import qualified RIO.Text as T -import Stack.Prelude hiding (pkgName) -import Test.Hspec -import Test.Hspec.QuickCheck (prop) -import Test.QuickCheck (forAll,choose,Gen) - -import Stack.Dot - -dummyPayload :: DotPayload -dummyPayload = DotPayload (parseVersion "0.0.0.0") (Just (Right BSD3)) Nothing - -spec :: Spec -spec = do - let graph = - Map.mapKeys pkgName - . fmap (\p -> (Set.map pkgName p, dummyPayload)) - . Map.fromList $ [("one",Set.fromList ["base","free"]) - ,("two",Set.fromList ["base","free","mtl","transformers","one"]) - ] - describe "Stack.Dot" $ do - it "does nothing if depth is 0" $ - resolveDependencies (Just 0) graph stubLoader `shouldBe` return graph - - it "with depth 1, more dependencies are resolved" $ do - let graph' = Map.insert (pkgName "cycle") - (Set.singleton (pkgName "cycle"), dummyPayload) - graph - resultGraph = runIdentity (resolveDependencies (Just 0) graph stubLoader) - resultGraph' = runIdentity (resolveDependencies (Just 1) graph' stubLoader) - Map.size resultGraph < Map.size resultGraph' `shouldBe` True - - it "cycles are ignored" $ do - let graph' = Map.insert (pkgName "cycle") - (Set.singleton (pkgName "cycle"), dummyPayload) - graph - resultGraph = resolveDependencies Nothing graph stubLoader - resultGraph' = resolveDependencies Nothing graph' stubLoader - fmap Map.size resultGraph' `shouldBe` fmap ((+1) . Map.size) resultGraph - - let graphElem e = Set.member e . Set.unions . Map.elems - - prop "requested packages are pruned" $ do - let resolvedGraph = runIdentity (resolveDependencies Nothing graph stubLoader) - allPackages g = Map.keysSet g `Set.union` foldMap fst g - forAll (sublistOf (Set.toList (allPackages resolvedGraph))) $ \toPrune -> - let pruned = pruneGraph [pkgName "one", pkgName "two"] toPrune resolvedGraph - in Set.null (allPackages pruned `Set.intersection` Set.fromList toPrune) - - prop "pruning removes orhpans" $ do - let resolvedGraph = runIdentity (resolveDependencies Nothing graph stubLoader) - allPackages g = Map.keysSet g `Set.union` foldMap fst g - orphans g = Map.filterWithKey (\k _ -> not (graphElem k g)) g - forAll (sublistOf (Set.toList (allPackages resolvedGraph))) $ \toPrune -> - let pruned = pruneGraph [pkgName "one", pkgName "two"] toPrune resolvedGraph - in null (Map.keys (orphans (fmap fst pruned)) \\ [pkgName "one", pkgName "two"]) - -{- Helper functions below -} --- Backport from QuickCheck 2.8 to 2.7.6 -sublistOf :: [a] -> Gen [a] -sublistOf = filterM (\_ -> choose (False, True)) - --- Unsafe internal helper to create a package name -pkgName :: Text -> PackageName -pkgName = fromMaybe failure . parsePackageName . T.unpack - where - failure = error "Internal error during package name creation in DotSpec.pkgName" - --- Stub, simulates the function to load package dependecies -stubLoader :: PackageName -> Identity (Set PackageName, DotPayload) -stubLoader name = return . (, dummyPayload) . Set.fromList . map pkgName $ case show name of - "StateVar" -> ["stm","transformers"] - "array" -> [] - "bifunctors" -> ["semigroupoids","semigroups","tagged"] - "binary" -> ["array","bytestring","containers"] - "bytestring" -> ["deepseq","ghc-prim","integer-gmp"] - "comonad" -> ["containers","contravariant","distributive" - ,"semigroups","tagged","transformers","transformers-compat" - ] - "cont" -> ["StateVar","semigroups","transformers","transformers-compat","void"] - "containers" -> ["array","deepseq","ghc-prim"] - "deepseq" -> ["array"] - "distributive" -> ["ghc-prim","tagged","transformers","transformers-compat"] - "free" -> ["bifunctors","comonad","distributive","mtl" - ,"prelude-extras","profunctors","semigroupoids" - ,"semigroups","template-haskell","transformers" - ] - "ghc" -> [] - "hashable" -> ["bytestring","ghc-prim","integer-gmp","text"] - "integer" -> [] - "mtl" -> ["transformers"] - "nats" -> [] - "one" -> ["free"] - "prelude" -> [] - "profunctors" -> ["comonad","distributive","semigroupoids","tagged","transformers"] - "semigroupoids" -> ["comonad","containers","contravariant","distributive" - ,"semigroups","transformers","transformers-compat" - ] - "semigroups" -> ["bytestring","containers","deepseq","hashable" - ,"nats","text","unordered-containers" - ] - "stm" -> ["array"] - "tagged" -> ["template-haskell"] - "template" -> [] - "text" -> ["array","binary","bytestring","deepseq","ghc-prim","integer-gmp"] - "transformers" -> [] - "two" -> ["free","mtl","one","transformers"] - "unordered" -> ["deepseq","hashable"] - "void" -> ["ghc-prim","hashable","semigroups"] - _ -> [] diff --git a/src/test/Stack/Ghci/PortableFakePaths.hs b/src/test/Stack/Ghci/PortableFakePaths.hs deleted file mode 100644 index 62a1daf2bb..0000000000 --- a/src/test/Stack/Ghci/PortableFakePaths.hs +++ /dev/null @@ -1,12 +0,0 @@ -{-# LANGUAGE CPP #-} - --- | Helpers for writing fake paths for test suite for the GhciScript DSL. --- This must be a separate module because it is used in Teplate Haskell splices. -module Stack.Ghci.PortableFakePaths where - -defaultDrive :: FilePath -#ifdef WINDOWS -defaultDrive = "C:\\" -#else -defaultDrive = "/" -#endif diff --git a/src/test/Stack/Ghci/ScriptSpec.hs b/src/test/Stack/Ghci/ScriptSpec.hs deleted file mode 100644 index 70e64c507f..0000000000 --- a/src/test/Stack/Ghci/ScriptSpec.hs +++ /dev/null @@ -1,63 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE OverloadedLists #-} -{-# LANGUAGE TemplateHaskell #-} - --- | Test suite for the GhciScript DSL -module Stack.Ghci.ScriptSpec where - -import qualified Data.Set as S -import Distribution.ModuleName -import Test.Hspec -import qualified System.FilePath as FP -import Stack.Ghci.PortableFakePaths -import Stack.Prelude hiding (fromString) -import Path -import Path.Extra (pathToLazyByteString) - -import Stack.Ghci.Script - -spec :: Spec -spec = do - describe "GHCi" $ do - describe "Script DSL" $ do - - describe "script" $ do - it "should seperate commands with a newline" $ do - let dir = $(mkAbsDir $ defaultDrive FP. "src" FP. "package-a") - script = cmdCdGhc dir - <> cmdAdd [Left (fromString "Lib.A")] - scriptToLazyByteString script `shouldBe` - ":cd-ghc " <> pathToLazyByteString dir <> "\n:add Lib.A\n" - - describe ":add" $ do - it "should not render empty add commands" $ do - let script = cmdAdd S.empty - scriptToLazyByteString script `shouldBe` "" - - it "should ensure that a space exists between each module in an add command" $ do - let script = cmdAdd (S.fromList [Left (fromString "Lib.A"), Left (fromString "Lib.B")]) - scriptToLazyByteString script `shouldBe` ":add Lib.A Lib.B\n" - - describe ":add (by file)" $ do - it "should render a full file path" $ do - let file = $(mkAbsFile $ defaultDrive FP. "Users" FP. "someone" FP. "src" FP. "project" FP. "package-a" FP. "src" FP. "Main.hs") - script = cmdAdd (S.fromList [Right file]) - scriptToLazyByteString script `shouldBe` - ":add " <> pathToLazyByteString file <> "\n" - - describe ":cd-ghc" $ do - it "should render a full absolute path" $ do - let dir = $(mkAbsDir $ defaultDrive FP. "Users" FP. "someone" FP. "src" FP. "project" FP. "package-a") - script = cmdCdGhc dir - scriptToLazyByteString script `shouldBe` - ":cd-ghc " <> pathToLazyByteString dir <> "\n" - - describe ":module" $ do - it "should render empty module as ':module +'" $ do - let script = cmdModule [] - scriptToLazyByteString script `shouldBe` ":module +\n" - - it "should ensure that a space exists between each module in a module command" $ do - let script = cmdModule [fromString "Lib.A", fromString "Lib.B"] - scriptToLazyByteString script `shouldBe` ":module + Lib.A Lib.B\n" diff --git a/src/test/Stack/NixSpec.hs b/src/test/Stack/NixSpec.hs deleted file mode 100644 index 10b8fb7dcb..0000000000 --- a/src/test/Stack/NixSpec.hs +++ /dev/null @@ -1,105 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -module Stack.NixSpec where - -import Data.Maybe (fromJust) -import Options.Applicative -import Path -import Prelude (writeFile) -import Stack.Config -import Stack.Config.Nix -import Stack.Constants -import Stack.Options.GlobalParser (globalOptsFromMonoid) -import Stack.Options.NixParser -import Stack.Prelude -import Stack.Runners -import Stack.Types.Config -import Stack.Types.Nix -import System.Directory -import System.Environment -import Test.Hspec - -sampleConfigNixEnabled :: String -sampleConfigNixEnabled = - "resolver: lts-2.10\n" ++ - "packages: ['.']\n" ++ - "system-ghc: true\n" ++ - "nix:\n" ++ - " enable: True\n" ++ - " packages: [glpk]" - -sampleConfigNixDisabled :: String -sampleConfigNixDisabled = - "resolver: lts-2.10\n" ++ - "packages: ['.']\n" ++ - "nix:\n" ++ - " enable: False" - -setup :: IO () -setup = unsetEnv "STACK_YAML" - -spec :: Spec -spec = beforeAll setup $ do - let loadConfig' :: ConfigMonoid -> (Config -> IO ()) -> IO () - loadConfig' cmdLineArgs inner = do - globalOpts <- globalOptsFromMonoid False mempty { globalMonoidConfigMonoid = cmdLineArgs } - withRunnerGlobal globalOpts { globalLogLevel = LevelOther "silent" } $ - loadConfig (liftIO . inner) - inTempDir test = do - currentDirectory <- getCurrentDirectory - withSystemTempDirectory "Stack_ConfigSpec" $ \tempDir -> do - let enterDir = setCurrentDirectory tempDir - exitDir = setCurrentDirectory currentDirectory - bracket_ enterDir exitDir test - withStackDotYaml config test = inTempDir $ do - writeFile (toFilePath stackDotYaml) config - test - parseNixOpts cmdLineOpts = fromJust $ getParseResult $ execParserPure - defaultPrefs - (info (nixOptsParser False) mempty) - cmdLineOpts - parseOpts cmdLineOpts = mempty { configMonoidNixOpts = parseNixOpts cmdLineOpts } - let trueOnNonWindows = not osIsWindows - describe "nix disabled in config file" $ - around_ (withStackDotYaml sampleConfigNixDisabled) $ do - it "sees that the nix shell is not enabled" $ loadConfig' mempty $ \config -> - nixEnable (configNix config) `shouldBe` False - describe "--nix given on command line" $ - it "sees that the nix shell is enabled" $ - loadConfig' (parseOpts ["--nix"]) $ \config -> - nixEnable (configNix config) `shouldBe` trueOnNonWindows - describe "--nix-pure given on command line" $ - it "sees that the nix shell is enabled" $ - loadConfig' (parseOpts ["--nix-pure"]) $ \config -> - nixEnable (configNix config) `shouldBe` trueOnNonWindows - describe "--no-nix given on command line" $ - it "sees that the nix shell is not enabled" $ - loadConfig' (parseOpts ["--no-nix"]) $ \config -> - nixEnable (configNix config) `shouldBe` False - describe "--no-nix-pure given on command line" $ - it "sees that the nix shell is not enabled" $ - loadConfig' (parseOpts ["--no-nix-pure"]) $ \config -> - nixEnable (configNix config) `shouldBe` False - describe "nix enabled in config file" $ - around_ (withStackDotYaml sampleConfigNixEnabled) $ do - it "sees that the nix shell is enabled" $ - loadConfig' mempty $ \config -> - nixEnable (configNix config) `shouldBe` trueOnNonWindows - describe "--no-nix given on command line" $ - it "sees that the nix shell is not enabled" $ - loadConfig' (parseOpts ["--no-nix"]) $ \config -> - nixEnable (configNix config) `shouldBe` False - describe "--nix-pure given on command line" $ - it "sees that the nix shell is enabled" $ - loadConfig' (parseOpts ["--nix-pure"]) $ \config -> - nixEnable (configNix config) `shouldBe` trueOnNonWindows - describe "--no-nix-pure given on command line" $ - it "sees that the nix shell is enabled" $ - loadConfig' (parseOpts ["--no-nix-pure"]) $ \config -> - nixEnable (configNix config) `shouldBe` trueOnNonWindows - it "sees that the only package asked for is glpk and asks for the correct GHC derivation" $ loadConfig' mempty $ \config -> do - nixPackages (configNix config) `shouldBe` ["glpk"] - v <- parseVersionThrowing "7.10.3" - ghc <- either throwIO return $ nixCompiler (WCGhc v) - ghc `shouldBe` "haskell.compiler.ghc7103" diff --git a/src/test/Stack/PackageDumpSpec.hs b/src/test/Stack/PackageDumpSpec.hs deleted file mode 100644 index 8df07431e6..0000000000 --- a/src/test/Stack/PackageDumpSpec.hs +++ /dev/null @@ -1,269 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TupleSections #-} -module Stack.PackageDumpSpec where - -import Conduit -import qualified Data.Conduit.List as CL -import Data.Conduit.Text (decodeUtf8) -import qualified Data.Map as Map -import qualified Data.Set as Set -import Distribution.License (License(..)) -import Distribution.Types.PackageName (mkPackageName) -import Distribution.Version (mkVersion) -import Path (parseAbsFile) -import Stack.PackageDump -import Stack.Prelude -import Stack.Types.Config -import Stack.Types.GhcPkgId -import RIO.Process -import Test.Hspec -import Test.Hspec.QuickCheck - -main :: IO () -main = hspec spec - -spec :: Spec -spec = do - describe "eachSection" $ do - let test name content expected = it name $ do - actual <- runConduit $ yield content .| eachSection CL.consume .| CL.consume - actual `shouldBe` expected - test - "unix line endings" - "foo\nbar\n---\nbaz---\nbin\n---\n" - [ ["foo", "bar"] - , ["baz---", "bin"] - ] - test - "windows line endings" - "foo\r\nbar\r\n---\r\nbaz---\r\nbin\r\n---\r\n" - [ ["foo", "bar"] - , ["baz---", "bin"] - ] - - it "eachPair" $ do - let bss = - [ "key1: val1" - , "key2: val2a" - , " val2b" - , "key3:" - , "key4:" - , " val4a" - , " val4b" - ] - sink k = fmap (k, ) CL.consume - actual <- runConduit $ mapM_ yield bss .| eachPair sink .| CL.consume - actual `shouldBe` - [ ("key1", ["val1"]) - , ("key2", ["val2a", "val2b"]) - , ("key3", []) - , ("key4", ["val4a", "val4b"]) - ] - - describe "conduitDumpPackage" $ do - it "ghc 7.8" $ do - haskell2010:_ <- - withSourceFile "test/package-dump/ghc-7.8.txt" $ \src -> - runConduit - $ src - .| decodeUtf8 - .| conduitDumpPackage - .| CL.consume - ghcPkgId <- parseGhcPkgId "haskell2010-1.1.2.0-05c8dd51009e08c6371c82972d40f55a" - packageIdent <- maybe (fail "Not parsable package id") return $ - parsePackageIdentifier "haskell2010-1.1.2.0" - depends <- mapM parseGhcPkgId - [ "array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b" - , "base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1" - , "ghc-prim-0.3.1.0-a24f9c14c632d75b683d0f93283aea37" - ] - haskell2010 { dpExposedModules = mempty } `shouldBe` DumpPackage - { dpGhcPkgId = ghcPkgId - , dpPackageIdent = packageIdent - , dpParentLibIdent = Nothing - , dpLicense = Just BSD3 - , dpLibDirs = ["/opt/ghc/7.8.4/lib/ghc-7.8.4/haskell2010-1.1.2.0"] - , dpDepends = depends - , dpLibraries = ["HShaskell2010-1.1.2.0"] - , dpHasExposedModules = True - , dpHaddockInterfaces = ["/opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell2010-1.1.2.0/haskell2010.haddock"] - , dpHaddockHtml = Just "/opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell2010-1.1.2.0" - , dpIsExposed = False - , dpExposedModules = mempty - } - - it "ghc 7.10" $ do - haskell2010:_ <- - withSourceFile "test/package-dump/ghc-7.10.txt" $ \src -> - runConduit - $ src - .| decodeUtf8 - .| conduitDumpPackage - .| CL.consume - ghcPkgId <- parseGhcPkgId "ghc-7.10.1-325809317787a897b7a97d646ceaa3a3" - pkgIdent <- maybe (fail "Not parsable package id") return $ - parsePackageIdentifier "ghc-7.10.1" - depends <- mapM parseGhcPkgId - [ "array-0.5.1.0-e29cdbe82692341ebb7ce6e2798294f9" - , "base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a" - , "bin-package-db-0.0.0.0-708fc7d634a370b311371a5bcde40b62" - , "bytestring-0.10.6.0-0909f8f31271f3d75749190bf2ee35db" - , "containers-0.5.6.2-2114032c163425cc264e6e1169dc2f6d" - , "directory-1.2.2.0-b4959b472d9eee380c6b32291ade29e0" - , "filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6" - , "hoopl-3.10.0.2-8c8dfc4c3140e5f7c982da224c3cb1f0" - , "hpc-0.6.0.2-ac9064885aa8cb08a93314222939ead4" - , "process-1.2.3.0-3b1e9bca6ac38225806ff7bbf3f845b1" - , "template-haskell-2.10.0.0-e895139a0ffff267d412e3d0191ce93b" - , "time-1.5.0.1-e17a9220d438435579d2914e90774246" - , "transformers-0.4.2.0-c1a7bb855a176fe475d7b665301cd48f" - , "unix-2.7.1.0-e5915eb989e568b732bc7286b0d0817f" - ] - haskell2010 { dpExposedModules = mempty } `shouldBe` DumpPackage - { dpGhcPkgId = ghcPkgId - , dpPackageIdent = pkgIdent - , dpParentLibIdent = Nothing - , dpLicense = Just BSD3 - , dpLibDirs = ["/opt/ghc/7.10.1/lib/ghc-7.10.1/ghc_EMlWrQ42XY0BNVbSrKixqY"] - , dpHaddockInterfaces = ["/opt/ghc/7.10.1/share/doc/ghc/html/libraries/ghc-7.10.1/ghc.haddock"] - , dpHaddockHtml = Just "/opt/ghc/7.10.1/share/doc/ghc/html/libraries/ghc-7.10.1" - , dpDepends = depends - , dpLibraries = ["HSghc-7.10.1-EMlWrQ42XY0BNVbSrKixqY"] - , dpHasExposedModules = True - , dpIsExposed = False - , dpExposedModules = mempty - } - it "ghc 7.8.4 (osx)" $ do - hmatrix:_ <- - withSourceFile "test/package-dump/ghc-7.8.4-osx.txt" $ \src -> - runConduit - $ src - .| decodeUtf8 - .| conduitDumpPackage - .| CL.consume - ghcPkgId <- parseGhcPkgId "hmatrix-0.16.1.5-12d5d21f26aa98774cdd8edbc343fbfe" - pkgId <- maybe (fail "Not parsable package id") return $ - parsePackageIdentifier "hmatrix-0.16.1.5" - depends <- mapM parseGhcPkgId - [ "array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b" - , "base-4.7.0.2-918c7ac27f65a87103264a9f51652d63" - , "binary-0.7.1.0-108d06eea2ef05e517f9c1facf10f63c" - , "bytestring-0.10.4.0-78bc8f2c724c765c78c004a84acf6cc3" - , "deepseq-1.3.0.2-0ddc77716bd2515426e1ba39f6788a4f" - , "random-1.1-822c19b7507b6ac1aaa4c66731e775ae" - , "split-0.2.2-34cfb851cc3784e22bfae7a7bddda9c5" - , "storable-complex-0.2.2-e962c368d58acc1f5b41d41edc93da72" - , "vector-0.10.12.3-f4222db607fd5fdd7545d3e82419b307"] - hmatrix `shouldBe` DumpPackage - { dpGhcPkgId = ghcPkgId - , dpPackageIdent = pkgId - , dpParentLibIdent = Nothing - , dpLicense = Just BSD3 - , dpLibDirs = - [ "/Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/lib/x86_64-osx-ghc-7.8.4/hmatrix-0.16.1.5" - , "/opt/local/lib/" - , "/usr/local/lib/" - , "C:/Program Files/Example/"] - , dpHaddockInterfaces = ["/Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/doc/html/hmatrix.haddock"] - , dpHaddockHtml = Just "/Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/doc/html" - , dpDepends = depends - , dpLibraries = ["HShmatrix-0.16.1.5"] - , dpHasExposedModules = True - , dpIsExposed = True - , dpExposedModules = Set.fromList ["Data.Packed","Data.Packed.Vector","Data.Packed.Matrix","Data.Packed.Foreign","Data.Packed.ST","Data.Packed.Development","Numeric.LinearAlgebra","Numeric.LinearAlgebra.LAPACK","Numeric.LinearAlgebra.Algorithms","Numeric.Container","Numeric.LinearAlgebra.Util","Numeric.LinearAlgebra.Devel","Numeric.LinearAlgebra.Data","Numeric.LinearAlgebra.HMatrix","Numeric.LinearAlgebra.Static"] - } - it "ghc HEAD" $ do - ghcBoot:_ <- - withSourceFile "test/package-dump/ghc-head.txt" $ \src -> - runConduit - $ src - .| decodeUtf8 - .| conduitDumpPackage - .| CL.consume - ghcPkgId <- parseGhcPkgId "ghc-boot-0.0.0.0" - pkgId <- maybe (fail "Not parsable package id") return $ - parsePackageIdentifier "ghc-boot-0.0.0.0" - depends <- mapM parseGhcPkgId - [ "base-4.9.0.0" - , "binary-0.7.5.0" - , "bytestring-0.10.7.0" - , "directory-1.2.5.0" - , "filepath-1.4.1.0" - ] - ghcBoot `shouldBe` DumpPackage - { dpGhcPkgId = ghcPkgId - , dpPackageIdent = pkgId - , dpParentLibIdent = Nothing - , dpLicense = Just BSD3 - , dpLibDirs = - ["/opt/ghc/head/lib/ghc-7.11.20151213/ghc-boot-0.0.0.0"] - , dpHaddockInterfaces = ["/opt/ghc/head/share/doc/ghc/html/libraries/ghc-boot-0.0.0.0/ghc-boot.haddock"] - , dpHaddockHtml = Just "/opt/ghc/head/share/doc/ghc/html/libraries/ghc-boot-0.0.0.0" - , dpDepends = depends - , dpLibraries = ["HSghc-boot-0.0.0.0"] - , dpHasExposedModules = True - , dpIsExposed = True - , dpExposedModules = Set.fromList ["GHC.Lexeme", "GHC.PackageDb"] - } - - - it "sinkMatching" $ runEnvNoLogging $ \pkgexe -> do - m <- ghcPkgDump pkgexe [] - $ conduitDumpPackage - .| sinkMatching (Map.singleton (mkPackageName "transformers") (mkVersion [0, 0, 0, 0, 0, 0, 1])) - case Map.lookup (mkPackageName "base") m of - Nothing -> error "base not present" - Just _ -> return () - liftIO $ do - Map.lookup (mkPackageName "transformers") m `shouldBe` Nothing - Map.lookup (mkPackageName "ghc") m `shouldBe` Nothing - - describe "pruneDeps" $ do - it "sanity check" $ do - let prunes = - [ ((1, 'a'), []) - , ((1, 'b'), []) - , ((2, 'a'), [(1, 'b')]) - , ((2, 'b'), [(1, 'a')]) - , ((3, 'a'), [(1, 'c')]) - , ((4, 'a'), [(2, 'a')]) - ] - actual = fst <$> pruneDeps fst fst snd bestPrune prunes - actual `shouldBe` Map.fromList - [ (1, (1, 'b')) - , (2, (2, 'a')) - , (4, (4, 'a')) - ] - - prop "invariant holds" $ \prunes' -> - -- Force uniqueness - let prunes = Map.toList $ Map.fromList prunes' - in checkDepsPresent prunes $ fst <$> pruneDeps fst fst snd bestPrune prunes - -type PruneCheck = ((Int, Char), [(Int, Char)]) - -bestPrune :: PruneCheck -> PruneCheck -> PruneCheck -bestPrune x y - | fst x > fst y = x - | otherwise = y - -checkDepsPresent :: [PruneCheck] -> Map Int (Int, Char) -> Bool -checkDepsPresent prunes selected = - all hasDeps $ Set.toList allIds - where - depMap = Map.fromList prunes - allIds = Set.fromList $ Map.elems selected - - hasDeps ident = - case Map.lookup ident depMap of - Nothing -> error "checkDepsPresent: missing in depMap" - Just deps -> Set.null $ Set.difference (Set.fromList deps) allIds - -runEnvNoLogging :: (GhcPkgExe -> RIO LoggedProcessContext a) -> IO a -runEnvNoLogging inner = do - envVars <- view envVarsL <$> mkDefaultProcessContext - menv <- mkProcessContext $ Map.delete "GHC_PACKAGE_PATH" envVars - let find name = runRIO menv (findExecutable name) >>= either throwIO parseAbsFile - pkg <- GhcPkgExe <$> find "ghc-pkg" - runRIO (LoggedProcessContext menv mempty) (inner pkg) diff --git a/src/test/Stack/UploadSpec.hs b/src/test/Stack/UploadSpec.hs deleted file mode 100644 index 2806a05131..0000000000 --- a/src/test/Stack/UploadSpec.hs +++ /dev/null @@ -1,28 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -module Stack.UploadSpec (spec) where - -import RIO -import RIO.Directory -import RIO.FilePath (()) -import Stack.Upload -import Test.Hspec -import System.Permissions (osIsWindows) -import System.PosixCompat.Files (getFileStatus, fileMode) -import Data.Bits ((.&.)) - -spec :: Spec -spec = do - it "writeFilePrivate" $ example $ withSystemTempDirectory "writeFilePrivate" $ \dir -> replicateM_ 2 $ do - let fp = dir "filename" - contents :: IsString s => s - contents = "These are the contents" - writeFilePrivate fp contents - actual <- readFileBinary fp - actual `shouldBe` contents - perms <- getPermissions fp - perms `shouldBe` setOwnerWritable True (setOwnerReadable True emptyPermissions) - - unless osIsWindows $ do - status <- getFileStatus fp - (fileMode status .&. 0o777) `shouldBe` 0o600 diff --git a/src/unix/Stack/Build/TestSuiteTimeout.hs b/src/unix/Stack/Build/TestSuiteTimeout.hs new file mode 100644 index 0000000000..e496a661d9 --- /dev/null +++ b/src/unix/Stack/Build/TestSuiteTimeout.hs @@ -0,0 +1,40 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Build.TestSuiteTimeout +Description : OS-specific test suite timeout termination helpers. +License : BSD-3-Clause +-} + +module Stack.Build.TestSuiteTimeout + ( prepareForEscalation + , terminateGracefully + , forceKill + ) where + +import RIO.Process ( ProcessConfig, setNewSession ) +import qualified RIO.Process as RP ( Process, unsafeProcessHandle ) +import Stack.Prelude +import System.Posix.Signals + ( sigKILL, sigTERM, signalProcess, signalProcessGroup ) +import qualified System.Process as Process + +prepareForEscalation :: ProcessConfig stdin stdout stderr -> ProcessConfig stdin stdout stderr +prepareForEscalation = setNewSession True + +terminateGracefully :: RP.Process stdin stdout stderr -> RIO env () +terminateGracefully p = do + let processHandle = RP.unsafeProcessHandle p + mpid <- liftIO $ Process.getPid processHandle + forM_ mpid $ \pid -> do + -- In a new session, the initial pid is also the process group id. + void $ tryAny $ liftIO $ signalProcessGroup sigTERM pid + void $ tryAny $ liftIO $ signalProcess sigTERM pid + +forceKill :: RP.Process stdin stdout stderr -> RIO env () +forceKill p = do + let processHandle = RP.unsafeProcessHandle p + mpid <- liftIO $ Process.getPid processHandle + forM_ mpid $ \pid -> do + void $ tryAny $ liftIO $ signalProcessGroup sigKILL pid + void $ tryAny $ liftIO $ signalProcess sigKILL pid diff --git a/src/unix/Stack/Constants/UsrLibDirs.hs b/src/unix/Stack/Constants/UsrLibDirs.hs new file mode 100644 index 0000000000..ac99a49ae4 --- /dev/null +++ b/src/unix/Stack/Constants/UsrLibDirs.hs @@ -0,0 +1,27 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE TemplateHaskell #-} + +{-| +Module : Stack.Constants.UsrLibDirs +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the non-Windows version. +-} + +module Stack.Constants.UsrLibDirs + ( libDirs + , usrLibDirs + ) where + +import Path ( mkAbsDir ) +import Stack.Prelude + +-- | Used in Stack.Setup for detecting libc.musl-x86_64.so.1, see comments at +-- use site +libDirs :: [Path Abs Dir] +libDirs = [$(mkAbsDir "/lib"), $(mkAbsDir "/lib64")] + +-- | Used in Stack.Setup for detecting libtinfo, see comments at use site +usrLibDirs :: [Path Abs Dir] +usrLibDirs = [$(mkAbsDir "/usr/lib"), $(mkAbsDir "/usr/lib64")] diff --git a/src/unix/Stack/Docker/Handlers.hs b/src/unix/Stack/Docker/Handlers.hs new file mode 100644 index 0000000000..ca60462054 --- /dev/null +++ b/src/unix/Stack/Docker/Handlers.hs @@ -0,0 +1,69 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Docker.Handlers +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the non-Windows version. +-} + +module Stack.Docker.Handlers + ( handleSetGroups + , handleSignals + ) where + +import RIO.Process + ( ExitCodeException, proc, runProcess_, setDelegateCtlc ) +import Stack.Prelude +import Stack.Types.Config ( HasConfig ) +import Stack.Types.Docker ( DockerOpts (..) ) +import System.Posix.Signals + ( Handler (..), installHandler, sigABRT, sigHUP, sigINT + , sigPIPE, sigTERM, sigUSR1, sigUSR2 + ) +import qualified System.Posix.User as PosixUser +import System.PosixCompat.Types ( GroupID ) + +handleSetGroups :: [GroupID] -> IO () +handleSetGroups = PosixUser.setGroups + +-- MSS 2018-08-30 can the CPP below be removed entirely, and instead exec the +-- `docker` process so that it can handle the signals directly? +handleSignals :: + (Exception e, HasConfig env) + => DockerOpts + -> Bool + -> String + -> RIO env (Either e ()) +handleSignals docker keepStdinOpen containerID = do + run <- askRunInIO + oldHandlers <- forM signals $ \sig -> do + let sigHandler = run $ do + readProcessNull + "docker" + ["kill", "--signal=" ++ show sig, containerID] + when (sig `elem` [sigTERM, sigABRT]) $ do + -- Give the container 30 seconds to exit gracefully, then send a + -- sigKILL to force it + threadDelay 30000000 + readProcessNull "docker" ["kill", containerID] + oldHandler <- liftIO $ installHandler sig (Catch sigHandler) Nothing + pure (sig, oldHandler) + let args' = concat + [ ["start"] + , ["-a" | not docker.detach] + , ["-i" | keepStdinOpen] + , [containerID] + ] + finally + (try $ proc "docker" args' $ runProcess_ . setDelegateCtlc False) + ( do unless (docker.persist || docker.detach) $ + readProcessNull "docker" ["rm", "-f", containerID] + `catch` (\(_ :: ExitCodeException) -> pure ()) + forM_ oldHandlers $ \(sig, oldHandler) -> + liftIO $ installHandler sig oldHandler Nothing + ) + where + signals = [sigINT, sigABRT, sigHUP, sigPIPE, sigTERM, sigUSR1, sigUSR2] diff --git a/src/unix/System/Info/ShortPathName.hs b/src/unix/System/Info/ShortPathName.hs index 1486971e95..09182d47a5 100644 --- a/src/unix/System/Info/ShortPathName.hs +++ b/src/unix/System/Info/ShortPathName.hs @@ -1,12 +1,20 @@ {-# LANGUAGE NoImplicitPrelude #-} +{-| +Module : System.Info.ShortPathName +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the non-Windows version. +-} + module System.Info.ShortPathName ( getShortPathName ) where -import RIO.FilePath (FilePath) -import RIO.Prelude (pure) -import RIO.Prelude.Types (IO) +import RIO.FilePath ( FilePath ) +import RIO.Prelude ( pure ) +import RIO.Prelude.Types ( IO ) getShortPathName :: FilePath -> IO FilePath getShortPathName = pure diff --git a/src/unix/System/Permissions.hs b/src/unix/System/Permissions.hs index f95c600e43..e1b9927b69 100644 --- a/src/unix/System/Permissions.hs +++ b/src/unix/System/Permissions.hs @@ -1,24 +1,39 @@ {-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : System.Permissions +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the non-Windows version. +-} + module System.Permissions - ( setScriptPerms + ( osIsMacOS , osIsWindows , setFileExecutable + , setScriptPerms ) where +import RIO import qualified System.Posix.Files as Posix -import RIO +import System.Info ( os ) + +-- | True if using macOS. +osIsMacOS :: Bool +osIsMacOS = os == "darwin" --- | True if using Windows OS. +-- | False if not using Windows. osIsWindows :: Bool osIsWindows = False -setScriptPerms :: MonadIO m => FilePath -> m () -setScriptPerms fp = do - liftIO $ Posix.setFileMode fp $ - Posix.ownerReadMode `Posix.unionFileModes` - Posix.ownerWriteMode `Posix.unionFileModes` - Posix.groupReadMode `Posix.unionFileModes` - Posix.otherReadMode - setFileExecutable :: MonadIO m => FilePath -> m () setFileExecutable fp = liftIO $ Posix.setFileMode fp 0o755 + +setScriptPerms :: MonadIO m => FilePath -> m () +setScriptPerms fp = + liftIO $ Posix.setFileMode fp $ + Posix.ownerReadMode `Posix.unionFileModes` + Posix.ownerWriteMode `Posix.unionFileModes` + Posix.groupReadMode `Posix.unionFileModes` + Posix.otherReadMode diff --git a/src/unix/System/Terminal.hsc b/src/unix/System/Terminal.hsc index e17b35ae5d..0b57ecbbdf 100644 --- a/src/unix/System/Terminal.hsc +++ b/src/unix/System/Terminal.hsc @@ -1,13 +1,21 @@ -{-# LANGUAGE ForeignFunctionInterface #-} +{-# LANGUAGE CApiFFI #-} + +{-| +Module : System.Terminal +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the non-Windows version. +-} + module System.Terminal -( fixCodePage -, getTerminalWidth +( getTerminalWidth , hIsTerminalDeviceOrMinTTY ) where import Foreign import Foreign.C.Types -import RIO (MonadIO, Handle, hIsTerminalDevice) +import RIO ( Handle, MonadIO, hIsTerminalDevice ) #include #include @@ -23,21 +31,21 @@ instance Storable WindowWidth where poke p (WindowWidth w) = do (#poke struct winsize, ws_col) p w -foreign import ccall "sys/ioctl.h ioctl" +-- `ioctl` is variadic, so `capi` is needed, see: +-- https://www.haskell.org/ghc/blog/20210709-capi-usage.html +foreign import capi "sys/ioctl.h ioctl" ioctl :: CInt -> CInt -> Ptr WindowWidth -> IO CInt +-- | If available, yields the width of the terminal. getTerminalWidth :: IO (Maybe Int) getTerminalWidth = - alloca $ \p -> do - errno <- ioctl (#const STDOUT_FILENO) (#const TIOCGWINSZ) p - if errno < 0 - then return Nothing - else do - WindowWidth w <- peek p - return . Just . fromIntegral $ w - -fixCodePage :: x -> y -> a -> a -fixCodePage _ _ = id + alloca $ \p -> do + errno <- ioctl (#const STDOUT_FILENO) (#const TIOCGWINSZ) p + if errno < 0 + then return Nothing + else do + WindowWidth w <- peek p + return . Just . fromIntegral $ w -- | hIsTerminaDevice does not recognise handles to mintty terminals as terminal -- devices, but isMinTTYHandle does. diff --git a/src/unix/System/Uname.hsc b/src/unix/System/Uname.hsc index 3f6a1a465b..2d19a0e429 100644 --- a/src/unix/System/Uname.hsc +++ b/src/unix/System/Uname.hsc @@ -1,3 +1,11 @@ +{-| +Module : System.Uname +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the non-Windows version. +-} + module System.Uname ( getRelease ) @@ -5,8 +13,8 @@ module System.Uname #include -import Foreign -import Foreign.C +import Foreign +import Foreign.C getRelease :: IO String getRelease = do @@ -31,10 +39,10 @@ foreign import ccall unsafe "haskell_uname" data Utsname instance Storable Utsname where - sizeOf = const #size struct utsname - alignment = const #alignment struct utsname - poke = error "Storable Utsname: peek: unsupported operation" - peek = error "Storable Utsname: poke: unsupported operation" + sizeOf = const #size struct utsname + alignment = const #alignment struct utsname + poke = error "Storable Utsname: peek: unsupported operation" + peek = error "Storable Utsname: poke: unsupported operation" release :: Ptr Utsname -> CString release = (#ptr struct utsname, release) diff --git a/src/windows/Stack/Build/TestSuiteTimeout.hs b/src/windows/Stack/Build/TestSuiteTimeout.hs new file mode 100644 index 0000000000..2603b01ce1 --- /dev/null +++ b/src/windows/Stack/Build/TestSuiteTimeout.hs @@ -0,0 +1,29 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Build.TestSuiteTimeout +Description : OS-specific test suite timeout termination helpers. +License : BSD-3-Clause +-} + +module Stack.Build.TestSuiteTimeout + ( prepareForEscalation + , terminateGracefully + , forceKill + ) where + +import RIO.Process ( ProcessConfig ) +import qualified RIO.Process as RP ( Process, unsafeProcessHandle ) +import Stack.Prelude +import qualified System.Process as Process + +prepareForEscalation :: ProcessConfig stdin stdout stderr -> ProcessConfig stdin stdout stderr +prepareForEscalation = id + +terminateGracefully :: RP.Process stdin stdout stderr -> RIO env () +terminateGracefully p = + void $ tryAny $ liftIO $ Process.terminateProcess $ RP.unsafeProcessHandle p + +forceKill :: RP.Process stdin stdout stderr -> RIO env () +forceKill p = + void $ tryAny $ liftIO $ Process.terminateProcess $ RP.unsafeProcessHandle p diff --git a/src/windows/Stack/Constants/UsrLibDirs.hs b/src/windows/Stack/Constants/UsrLibDirs.hs new file mode 100644 index 0000000000..527f530fa5 --- /dev/null +++ b/src/windows/Stack/Constants/UsrLibDirs.hs @@ -0,0 +1,25 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +{-| +Module : Stack.Constants.UsrLibDirs +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the Windows version. +-} + +module Stack.Constants.UsrLibDirs + ( libDirs + , usrLibDirs + ) where + +import Stack.Prelude + +-- | Used in Stack.Setup for detecting libc.musl-x86_64.so.1, see comments at +-- use site +libDirs :: [Path Abs Dir] +libDirs = [] + +-- | Used in Stack.Setup for detecting libtinfo, see comments at use site +usrLibDirs :: [Path Abs Dir] +usrLibDirs = [] diff --git a/src/windows/Stack/Docker/Handlers.hs b/src/windows/Stack/Docker/Handlers.hs new file mode 100644 index 0000000000..2bb07ca9ff --- /dev/null +++ b/src/windows/Stack/Docker/Handlers.hs @@ -0,0 +1,45 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : Stack.Docker.Handlers +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the Windows version. +-} + +module Stack.Docker.Handlers + ( handleSetGroups + , handleSignals + ) where + +import RIO.Process + ( ExitCodeException, proc , runProcess_, setDelegateCtlc ) +import Stack.Types.Config ( HasConfig ) +import Stack.Types.Docker ( DockerOpts (..)) +import Stack.Prelude +import System.PosixCompat.Types ( GroupID ) + +handleSetGroups :: [GroupID] -> IO () +handleSetGroups _ = pure () + +handleSignals :: + (Exception e, HasConfig env) + => DockerOpts + -> Bool + -> String + -> RIO env (Either e ()) +handleSignals docker keepStdinOpen containerID = do + let args' = concat + [ ["start"] + , ["-a" | not docker.detach] + , ["-i" | keepStdinOpen] + , [containerID] + ] + finally + (try $ proc "docker" args' $ runProcess_ . setDelegateCtlc False) + ( unless (docker.persist || docker.detach) $ + readProcessNull "docker" ["rm", "-f", containerID] + `catch` (\(_ :: ExitCodeException) -> pure ()) + ) diff --git a/src/windows/System/Info/ShortPathName.hs b/src/windows/System/Info/ShortPathName.hs index 71a1a7e235..ef2fdcbccb 100644 --- a/src/windows/System/Info/ShortPathName.hs +++ b/src/windows/System/Info/ShortPathName.hs @@ -1,5 +1,13 @@ +{-| +Module : System.Info.ShortPathName +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the Windows version. +-} + module System.Info.ShortPathName ( getShortPathName ) where -import System.Win32.Info (getShortPathName) +import System.Win32.Info ( getShortPathName ) diff --git a/src/windows/System/Permissions.hs b/src/windows/System/Permissions.hs index 2a49f8d8dc..0c2101fb33 100644 --- a/src/windows/System/Permissions.hs +++ b/src/windows/System/Permissions.hs @@ -1,15 +1,28 @@ +{-| +Module : System.Permissions +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the Windows version. +-} + module System.Permissions - ( setScriptPerms + ( osIsMacOS , osIsWindows , setFileExecutable + , setScriptPerms ) where --- | True if using Windows OS. +-- | False if using Windows. +osIsMacOS :: Bool +osIsMacOS = False + +-- | True if using Windows. osIsWindows :: Bool osIsWindows = True -setScriptPerms :: Monad m => FilePath -> m () -setScriptPerms _ = return () - setFileExecutable :: Monad m => FilePath -> m () -setFileExecutable _ = return () +setFileExecutable _ = pure () + +setScriptPerms :: Monad m => FilePath -> m () +setScriptPerms _ = pure () diff --git a/src/windows/System/Posix/User.hs b/src/windows/System/Posix/User.hs new file mode 100644 index 0000000000..fdb5872fdb --- /dev/null +++ b/src/windows/System/Posix/User.hs @@ -0,0 +1,60 @@ +{-# LANGUAGE NoFieldSelectors #-} +{-# LANGUAGE OverloadedRecordDot #-} + +{-| +Module : System.Posix.User +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the Windows version. Non-Windows builds rely on the unix package, which +exposes a module of the same name. +-} + +module System.Posix.User + ( getEffectiveUserID + , getEffectiveGroupID + , getGroups + , getUserEntryForName + , homeDirectory + , setGroupID + , setUserID + ) where + +import System.IO.Error ( illegalOperationErrorType, mkIOError ) +import System.PosixCompat.Types ( GroupID, UserID ) + +unsupported :: String -> IO a +unsupported f = ioError $ mkIOError illegalOperationErrorType x Nothing Nothing + where + x = "System.Posix.User." ++ f ++ ": not supported on Windows." + +getEffectiveUserID :: IO UserID +getEffectiveUserID = unsupported "getEffectiveUserID" + +getEffectiveGroupID :: IO GroupID +getEffectiveGroupID = unsupported "getEffectiveGroupID" + +getGroups :: IO [GroupID] +getGroups = return [] + +getUserEntryForName :: String -> IO UserEntry +getUserEntryForName _ = unsupported "getUserEntryForName" + +setGroupID :: GroupID -> IO () +setGroupID _ = return () + +setUserID :: UserID -> IO () +setUserID _ = return () + +data UserEntry = UserEntry + { userName :: String + , userPassword :: String + , userID :: UserID + , userGroupID :: GroupID + , userGecos :: String + , homeDirectory :: String + , userShell :: String + } deriving (Eq, Read, Show) + +homeDirectory :: UserEntry -> String +homeDirectory ue = ue.homeDirectory diff --git a/src/windows/System/Terminal.hs b/src/windows/System/Terminal.hs index fe34c570b3..4d65a50f73 100644 --- a/src/windows/System/Terminal.hs +++ b/src/windows/System/Terminal.hs @@ -1,22 +1,29 @@ -{-# LANGUAGE ForeignFunctionInterface #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +{-| +Module : System.Terminal +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the Windows version. +-} + module System.Terminal -( fixCodePage -, getTerminalWidth -, hIsTerminalDeviceOrMinTTY -) where + ( getTerminalWidth + , hIsTerminalDeviceOrMinTTY + ) where -import Distribution.Types.Version (mkVersion) -import Foreign.Ptr -import Foreign.Storable -import Foreign.Marshal.Alloc -import Stack.Prelude -import System.IO hiding (hIsTerminalDevice) -import System.Process -import System.Win32 (isMinTTYHandle, withHandleToHANDLE) -import System.Win32.Console (setConsoleCP, setConsoleOutputCP, getConsoleCP, getConsoleOutputCP) -import RIO.Partial (read) +import Foreign.Marshal.Alloc ( allocaBytes ) +import Foreign.Ptr ( Ptr ) +import Foreign.Storable ( peekByteOff ) +import Stack.Prelude +import System.IO ( hGetContents ) +import System.Process + ( StdStream (..), createProcess, shell, std_err, std_in + , std_out, waitForProcess + ) +import System.Win32 ( isMinTTYHandle, withHandleToHANDLE ) type HANDLE = Ptr () @@ -32,88 +39,41 @@ c_STD_OUTPUT_HANDLE :: Int c_STD_OUTPUT_HANDLE = -11 foreign import ccall unsafe "windows.h GetConsoleScreenBufferInfo" - c_GetConsoleScreenBufferInfo :: HANDLE -> Ptr CONSOLE_SCREEN_BUFFER_INFO -> IO Bool + c_GetConsoleScreenBufferInfo :: HANDLE -> Ptr CONSOLE_SCREEN_BUFFER_INFO -> IO Bool foreign import ccall unsafe "windows.h GetStdHandle" - c_GetStdHandle :: Int -> IO HANDLE - + c_GetStdHandle :: Int -> IO HANDLE +-- | If available, yields the width of the terminal. getTerminalWidth :: IO (Maybe Int) getTerminalWidth = do - hdl <- c_GetStdHandle c_STD_OUTPUT_HANDLE - allocaBytes sizeCONSOLE_SCREEN_BUFFER_INFO $ \p -> do - b <- c_GetConsoleScreenBufferInfo hdl p - if not b - then do -- This could happen on Cygwin or MSYS - let stty = (shell "stty size") { - std_in = UseHandle stdin - , std_out = CreatePipe - , std_err = CreatePipe - } - (_, mbStdout, _, rStty) <- createProcess stty - exStty <- waitForProcess rStty - case exStty of - ExitFailure _ -> return Nothing - ExitSuccess -> - maybe (return Nothing) - (\hSize -> do - sizeStr <- hGetContents hSize - case map read $ words sizeStr :: [Int] of - [_r, c] -> return $ Just c - _ -> return Nothing - ) - mbStdout - else do - [left,_top,right,_bottom] <- forM [0..3] $ \i -> do - v <- peekByteOff p ((i*2) + posCONSOLE_SCREEN_BUFFER_INFO_srWindow) - return $ fromIntegral (v :: Word16) - return $ Just (1+right-left) - --- | Set the code page for this process as necessary. Only applies to Windows. --- See: https://github.com/commercialhaskell/stack/issues/738 -fixCodePage - :: HasLogFunc env - => Bool -- ^ modify code page? - -> Version -- ^ GHC version - -> RIO env a - -> RIO env a -fixCodePage mcp ghcVersion inner = do - if mcp && ghcVersion < mkVersion [7, 10, 3] - then fixCodePage' - -- GHC >=7.10.3 doesn't need this code page hack. - else inner - where - fixCodePage' = do - origCPI <- liftIO getConsoleCP - origCPO <- liftIO getConsoleOutputCP - - let setInput = origCPI /= expected - setOutput = origCPO /= expected - fixInput - | setInput = bracket_ - (liftIO $ do - setConsoleCP expected) - (liftIO $ setConsoleCP origCPI) - | otherwise = id - fixOutput - | setOutput = bracket_ - (liftIO $ do - setConsoleOutputCP expected) - (liftIO $ setConsoleOutputCP origCPO) - | otherwise = id - - case (setInput, setOutput) of - (False, False) -> return () - (True, True) -> warn "" - (True, False) -> warn " input" - (False, True) -> warn " output" - - fixInput $ fixOutput inner - expected = 65001 -- UTF-8 - warn typ = logInfo $ - "Setting" <> - typ <> - " codepage to UTF-8 (65001) to ensure correct output from GHC" + hdl <- c_GetStdHandle c_STD_OUTPUT_HANDLE + allocaBytes sizeCONSOLE_SCREEN_BUFFER_INFO $ \p -> do + b <- c_GetConsoleScreenBufferInfo hdl p + if not b + then do -- This could happen on Cygwin or MSYS + let stty = (shell "stty size") { + std_in = UseHandle stdin + , std_out = CreatePipe + , std_err = CreatePipe + } + (_, mbStdout, _, rStty) <- createProcess stty + waitForProcess rStty >>= \case + ExitFailure _ -> pure Nothing + ExitSuccess -> + maybe (pure Nothing) + (\hSize -> do + sizeStr <- hGetContents hSize + case map readMaybe $ words sizeStr :: [Maybe Int] of + [Just _r, Just c] -> pure $ Just c + _ -> pure Nothing + ) + mbStdout + else do + [left,_top,right,_bottom] <- forM [0..3] $ \i -> do + v <- peekByteOff p (i * 2 + posCONSOLE_SCREEN_BUFFER_INFO_srWindow) + pure $ fromIntegral (v :: Word16) + pure $ Just (1 + right - left) -- | hIsTerminaDevice does not recognise handles to mintty terminals as terminal -- devices, but isMinTTYHandle does. @@ -121,5 +81,5 @@ hIsTerminalDeviceOrMinTTY :: MonadIO m => Handle -> m Bool hIsTerminalDeviceOrMinTTY h = do isTD <- hIsTerminalDevice h if isTD - then return True + then pure True else liftIO $ withHandleToHANDLE h isMinTTYHandle diff --git a/src/windows/System/Uname.hs b/src/windows/System/Uname.hs index 9507233f38..0ae7ce7275 100644 --- a/src/windows/System/Uname.hs +++ b/src/windows/System/Uname.hs @@ -1,3 +1,11 @@ +{-| +Module : System.Uname +License : BSD-3-Clause + +The module of this name differs as between Windows and non-Windows builds. This +is the Windows version. +-} + module System.Uname ( getRelease ) where diff --git a/stack-ghc-810.yaml b/stack-ghc-810.yaml deleted file mode 100644 index 67f8c1470a..0000000000 --- a/stack-ghc-810.yaml +++ /dev/null @@ -1,30 +0,0 @@ -resolver: nightly-2020-12-07 - -packages: -- . - -docker: - enable: false - repo: fpco/stack-build:lts-16.0 - -nix: - # --nix on the command-line to enable. - packages: - - zlib - - unzip -flags: - stack: - developer-mode: true - -ghc-options: - "$locals": -fhide-source-paths - -extra-deps: -- persistent-2.11.0.1@rev:0 -- persistent-sqlite-2.11.0.0@rev:0 -- persistent-template-2.9.1.0@rev:0 -- optparse-applicative-0.16.1.0@rev:0 - -drop-packages: -# See https://github.com/commercialhaskell/stack/pull/4712 -- cabal-install diff --git a/stack-ghc-84.yaml b/stack-ghc-84.yaml deleted file mode 100644 index 22f22e5e40..0000000000 --- a/stack-ghc-84.yaml +++ /dev/null @@ -1,54 +0,0 @@ -resolver: lts-12.26 - -packages: -- . - -flags: - stack: - developer-mode: true - -docker: - enable: false - repo: fpco/stack-build-small:lts-12.26 - -nix: - # --nix on the command-line to enable. - enable: false - packages: - - zlib - - unzip - -ghc-options: - "$locals": -fhide-source-paths - -extra-deps: -- Cabal-3.0.0.0@rev:0 -- cabal-doctest-1.0.8@sha256:471f182dabe23bac29d47ae55a33d11f85a0b0ab64d3b8f2907b437ab47d5ba7,1467 -- hpack-0.32.0@sha256:31178d0c454ec1fd2222e7c4bdd4db937b030a43cc79cb5bb62b8048c38cd64b,4406 -- hackage-security-0.5.3.0@rev:6 -- aeson-1.4.6.0@sha256:560575b008a23960403a128331f0e59594786b5cd19a35be0cd74b9a7257958e,6980 -- infer-license-0.2.0@rev:0 #for hpack-0.31 -- time-compat-1.9.2.2@sha256:ccf268e6ec91a6d9a79392697634c670c095a34a60d1ccfa1be1c84f20bb24c5,4254 -- base-orphans-0.8.2@sha256:40ef37ed043aac2cbb6c538fdebfc62e601ee65ee161e4a6327452133b574d7e,2958 -- tar-conduit-0.3.1@rev:0 -- yaml-0.10.4.0@rev:0 #for hpack-0.31 -- persistent-2.9.2@rev:0 -- persistent-sqlite-2.9.3@rev:0 -- rio-0.1.19.0@rev:0 -- unliftio-0.2.12@rev:0 #for rio-0.1.19.0 -- ansi-terminal-0.9@rev:0 -- ansi-wl-pprint-0.6.8.2@rev:1 # for ansi-terminal-0.9 -- hedgehog-0.6.1@rev:4 # for ansi-terminal-0.9 -- optparse-simple-0.1.1.2 -- typed-process-0.2.6.0@rev:0 # for rio-0.1.19.0 -- rio-prettyprint-0.1.1.0@rev:0 -- hi-file-parser-0.1.0.0@rev:0 -- http-download-0.2.0.0@rev:0 -- pantry-0.5.1.3@rev:0 -- casa-client-0.0.1@rev:0 -- casa-types-0.0.1@rev:0 -- filelock-0.1.1.5@rev:0 - -drop-packages: -# See https://github.com/commercialhaskell/stack/pull/4712 -- cabal-install diff --git a/stack-ghc-86.yaml b/stack-ghc-86.yaml deleted file mode 120000 index 2df91e0ca9..0000000000 --- a/stack-ghc-86.yaml +++ /dev/null @@ -1 +0,0 @@ -stack.yaml \ No newline at end of file diff --git a/stack-ghc-88.yaml b/stack-ghc-88.yaml deleted file mode 100644 index 94ebe17d7b..0000000000 --- a/stack-ghc-88.yaml +++ /dev/null @@ -1,37 +0,0 @@ -resolver: lts-15.5 - -packages: -- . - -docker: - enable: false - repo: fpco/stack-build:lts-15.5 - -nix: - # --nix on the command-line to enable. - packages: - - zlib - - unzip -flags: - stack: - developer-mode: true - -ghc-options: - "$locals": -fhide-source-paths - -extra-deps: -- hackage-security-0.6.0.0@sha256:69987d46e7b55fe5f0fc537021c3873c5f6f44a6665d349ee6995fd593df8147,11976 -- regex-applicative-text-0.1.0.1@sha256:52463fdc8daf130f40b82fec84bad2d4b8600227751c2a5b04679a1de8bd7f7a,1155 -- lukko-0.1.1.1@sha256:5c674bdd8a06b926ba55d872abe254155ed49a58df202b4d842b643e5ed6bcc9,4289 -- hpack-0.33.0@rev:0 -- http-download-0.2.0.0@rev:0 -- filelock-0.1.1.5@rev:0 -- pantry-0.5.1.3@rev:0 -- casa-client-0.0.1@rev:0 -- casa-types-0.0.1@rev:0 -- rio-0.1.19.0@rev:0 -- rio-prettyprint-0.1.1.0@rev:0 - -drop-packages: -# See https://github.com/commercialhaskell/stack/pull/4712 -- cabal-install diff --git a/stack-ghc-9.12.4.yaml b/stack-ghc-9.12.4.yaml new file mode 100644 index 0000000000..40933d3278 --- /dev/null +++ b/stack-ghc-9.12.4.yaml @@ -0,0 +1,36 @@ +# This is an experimental project-level configuration, to see if Stack can be +# built with GHC 9.12.4. +snapshot: nightly-2026-04-17 # GHC 9.12.4 + +extra-deps: +# nightly-2026-04-17 specifies Cabal-3.14.2.0 +- Cabal-3.16.0.0@sha256:9972c2bd263168a20bd990962a68d4fd024f50c30a00519a6b942e8871d1bd67,14455 +- Cabal-syntax-3.16.0.0@sha256:6a35036763557301876c5b7a448de4f1cb54fe1e223ff6c4c0c1fdd6df635a65,7509 +# nightly-2026-04-17 specifies crypton-x509-1.7.7 +- crypton-x509-1.8.0@sha256:d4822ba8dcb19ee3233fc98152f5afda383ac952770a1d07f1d01168e9fcdbc2,2006 +# nightly-2026-04-17 specifies crypton-x509-store-1.6.14 +- crypton-x509-store-1.8.0@sha256:075ba50a3daa0fdbb493481a665926e1ced2135c6b4ed56f97398aa855f0aecb,1674 +# nightly-2026-04-17 specifies crypton-x509-system-1.6.8 +- crypton-x509-system-1.8.0@sha256:76bab32c7d9cb3ea356a905f85829c70967fb6f9b4b890f00d67dc54130d45ca,1521 +# nightly-2026-04-17 specifies crypton-x509-validation-1.6.14 +- crypton-x509-validation-1.8.0@sha256:63acb2df06f28c3ffdddaf6d9402105b9026796036aa10d7347ae4f7db51c36b,2018 +# nightly-2026-04-17 specifies hpack-0.39.3 +- hpack-0.39.5@sha256:c214f49ed3df75ce75201709927fa27e08f642023ed4b4ca8cc43604e5199ced,5263 +# nightly-2026-04-17 specifies rio-prettyprint-0.1.8.0 +- rio-prettyprint-0.1.9.0@sha256:f48afbfe28ed7be4d1a1bb96705669593dd5df12f28ed908516c6dfc4e0becbb,1375 +# nightly-2026-04-17 specifies tls-2.1.14 +- tls-2.2.2@sha256:95f5acd4ce76cbd6bdc46b737370dcbd93c59cf1cd1934a30e55c61c1dc140e9,7283 + +docker: + enable: false + repo: quay.io/benz0li/ghc-musl:9.12.4 + +nix: + # --nix on the command-line to enable. + packages: + - zlib + - unzip + +flags: + stack: + developer-mode: true diff --git a/stack-ghc-9.12.4.yaml.lock b/stack-ghc-9.12.4.yaml.lock new file mode 100644 index 0000000000..bc32ccd938 --- /dev/null +++ b/stack-ghc-9.12.4.yaml.lock @@ -0,0 +1,75 @@ +# This file was autogenerated by Stack. +# You should not edit this file by hand. +# For more information, please see the documentation at: +# https://docs.haskellstack.org/en/stable/topics/lock_files + +packages: +- completed: + hackage: Cabal-3.16.0.0@sha256:9972c2bd263168a20bd990962a68d4fd024f50c30a00519a6b942e8871d1bd67,14455 + pantry-tree: + sha256: ddc689c4070a034cacd73c39c1df2673ed16e0b38b5ca476fc6ef2aa87b6f40f + size: 12139 + original: + hackage: Cabal-3.16.0.0@sha256:9972c2bd263168a20bd990962a68d4fd024f50c30a00519a6b942e8871d1bd67,14455 +- completed: + hackage: Cabal-syntax-3.16.0.0@sha256:6a35036763557301876c5b7a448de4f1cb54fe1e223ff6c4c0c1fdd6df635a65,7509 + pantry-tree: + sha256: 4de976261238dd4045dd3addebf6a006d4aff3873ee680bac7af83dcbf068280 + size: 11238 + original: + hackage: Cabal-syntax-3.16.0.0@sha256:6a35036763557301876c5b7a448de4f1cb54fe1e223ff6c4c0c1fdd6df635a65,7509 +- completed: + hackage: crypton-x509-1.8.0@sha256:d4822ba8dcb19ee3233fc98152f5afda383ac952770a1d07f1d01168e9fcdbc2,2006 + pantry-tree: + sha256: 882a699e305fffcb77dd40bd40f959f062a8997a53560dca64462087aed3e4cd + size: 1132 + original: + hackage: crypton-x509-1.8.0@sha256:d4822ba8dcb19ee3233fc98152f5afda383ac952770a1d07f1d01168e9fcdbc2,2006 +- completed: + hackage: crypton-x509-store-1.8.0@sha256:075ba50a3daa0fdbb493481a665926e1ced2135c6b4ed56f97398aa855f0aecb,1674 + pantry-tree: + sha256: 2c7e00e593d399624264172cbd5e2fa55feaf8239cf33bee7e33bbc7509d0a2e + size: 458 + original: + hackage: crypton-x509-store-1.8.0@sha256:075ba50a3daa0fdbb493481a665926e1ced2135c6b4ed56f97398aa855f0aecb,1674 +- completed: + hackage: crypton-x509-system-1.8.0@sha256:76bab32c7d9cb3ea356a905f85829c70967fb6f9b4b890f00d67dc54130d45ca,1521 + pantry-tree: + sha256: 9ac33c993a82ac4da84804028c5398caec7aada7a83beeec3f28883fe470d68f + size: 512 + original: + hackage: crypton-x509-system-1.8.0@sha256:76bab32c7d9cb3ea356a905f85829c70967fb6f9b4b890f00d67dc54130d45ca,1521 +- completed: + hackage: crypton-x509-validation-1.8.0@sha256:63acb2df06f28c3ffdddaf6d9402105b9026796036aa10d7347ae4f7db51c36b,2018 + pantry-tree: + sha256: 64baf1be6e65ade9ad8eaef819c9cc4c0fcb7d3b968641dcf28a3f859fe87dc5 + size: 691 + original: + hackage: crypton-x509-validation-1.8.0@sha256:63acb2df06f28c3ffdddaf6d9402105b9026796036aa10d7347ae4f7db51c36b,2018 +- completed: + hackage: hpack-0.39.5@sha256:c214f49ed3df75ce75201709927fa27e08f642023ed4b4ca8cc43604e5199ced,5263 + pantry-tree: + sha256: 75fe87db5a37bf3941e29e2273f530ee7b2932d64bf813b55151a889e97305ca + size: 3799 + original: + hackage: hpack-0.39.5@sha256:c214f49ed3df75ce75201709927fa27e08f642023ed4b4ca8cc43604e5199ced,5263 +- completed: + hackage: rio-prettyprint-0.1.9.0@sha256:f48afbfe28ed7be4d1a1bb96705669593dd5df12f28ed908516c6dfc4e0becbb,1375 + pantry-tree: + sha256: c840f1193718b7af6c004fc62a67919e223b56cc1658a168c8e490e53a632aa0 + size: 779 + original: + hackage: rio-prettyprint-0.1.9.0@sha256:f48afbfe28ed7be4d1a1bb96705669593dd5df12f28ed908516c6dfc4e0becbb,1375 +- completed: + hackage: tls-2.2.2@sha256:95f5acd4ce76cbd6bdc46b737370dcbd93c59cf1cd1934a30e55c61c1dc140e9,7283 + pantry-tree: + sha256: 6ddac1e644efe75dbf62bbd35fa38897caca307d62bac538ae29b0c54bf6ff00 + size: 7056 + original: + hackage: tls-2.2.2@sha256:95f5acd4ce76cbd6bdc46b737370dcbd93c59cf1cd1934a30e55c61c1dc140e9,7283 +snapshots: +- completed: + sha256: f4d90db84aefb7c9590dd224ebdabfd8304f81bbad4652a715ee8350c4b9d396 + size: 738802 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/nightly/2026/4/17.yaml + original: nightly-2026-04-17 diff --git a/stack-ghc-9.14.1.yaml b/stack-ghc-9.14.1.yaml new file mode 100644 index 0000000000..27af9a4ec9 --- /dev/null +++ b/stack-ghc-9.14.1.yaml @@ -0,0 +1,41 @@ +# This is an experimental project-level configuration, to see if Stack can be +# built with GHC 9.14.1. +snapshot: nightly-2026-04-17 # GHC 9.12.4 +compiler: ghc-9.14.1 + +extra-deps: +# nightly-2026-04-17 specifies Cabal-3.14.2.0 +- Cabal-3.16.0.0@sha256:9972c2bd263168a20bd990962a68d4fd024f50c30a00519a6b942e8871d1bd67,14455 +- Cabal-syntax-3.16.0.0@sha256:6a35036763557301876c5b7a448de4f1cb54fe1e223ff6c4c0c1fdd6df635a65,7509 +# nightly-2026-04-17 specifies crypton-x509-1.7.7 +- crypton-x509-1.8.0@sha256:d4822ba8dcb19ee3233fc98152f5afda383ac952770a1d07f1d01168e9fcdbc2,2006 +# nightly-2026-04-17 specifies crypton-x509-store-1.6.14 +- crypton-x509-store-1.8.0@sha256:075ba50a3daa0fdbb493481a665926e1ced2135c6b4ed56f97398aa855f0aecb,1674 +# nightly-2026-04-17 specifies crypton-x509-system-1.6.8 +- crypton-x509-system-1.8.0@sha256:76bab32c7d9cb3ea356a905f85829c70967fb6f9b4b890f00d67dc54130d45ca,1521 +# nightly-2026-04-17 specifies crypton-x509-validation-1.6.14 +- crypton-x509-validation-1.8.0@sha256:63acb2df06f28c3ffdddaf6d9402105b9026796036aa10d7347ae4f7db51c36b,2018 +# nightly-2026-04-17 specifies http-api-data-0.6.7 +- http-api-data-0.7@sha256:b8145755fced98db1ac67f4f98c8010cd5f82c121a82b65addea2556b5fbd6d5,3157 +# nightly-2026-04-17 specifies hpack-0.39.3 +- hpack-0.39.5@sha256:c214f49ed3df75ce75201709927fa27e08f642023ed4b4ca8cc43604e5199ced,5263 +# nightly-2026-04-17 specifies rio-prettyprint-0.1.8.0 +- rio-prettyprint-0.1.9.0@sha256:f48afbfe28ed7be4d1a1bb96705669593dd5df12f28ed908516c6dfc4e0becbb,1375 +# nightly-2026-04-17 specifies tagged-0.8.9 +- tagged-0.8.10@sha256:e9b97c98e9827981d62f37c5febf9e6bbb67acec92b8bd41fd9f7ace5eb31d32,2201 +# nightly-2026-04-17 specifies tls-2.1.14 +- tls-2.2.2@sha256:95f5acd4ce76cbd6bdc46b737370dcbd93c59cf1cd1934a30e55c61c1dc140e9,7283 + +docker: + enable: false + repo: quay.io/benz0li/ghc-musl:9.14.1 + +nix: + # --nix on the command-line to enable. + packages: + - zlib + - unzip + +flags: + stack: + developer-mode: true diff --git a/stack-ghc-9.14.1.yaml.lock b/stack-ghc-9.14.1.yaml.lock new file mode 100644 index 0000000000..741ab25b36 --- /dev/null +++ b/stack-ghc-9.14.1.yaml.lock @@ -0,0 +1,89 @@ +# This file was autogenerated by Stack. +# You should not edit this file by hand. +# For more information, please see the documentation at: +# https://docs.haskellstack.org/en/stable/topics/lock_files + +packages: +- completed: + hackage: Cabal-3.16.0.0@sha256:9972c2bd263168a20bd990962a68d4fd024f50c30a00519a6b942e8871d1bd67,14455 + pantry-tree: + sha256: ddc689c4070a034cacd73c39c1df2673ed16e0b38b5ca476fc6ef2aa87b6f40f + size: 12139 + original: + hackage: Cabal-3.16.0.0@sha256:9972c2bd263168a20bd990962a68d4fd024f50c30a00519a6b942e8871d1bd67,14455 +- completed: + hackage: Cabal-syntax-3.16.0.0@sha256:6a35036763557301876c5b7a448de4f1cb54fe1e223ff6c4c0c1fdd6df635a65,7509 + pantry-tree: + sha256: 4de976261238dd4045dd3addebf6a006d4aff3873ee680bac7af83dcbf068280 + size: 11238 + original: + hackage: Cabal-syntax-3.16.0.0@sha256:6a35036763557301876c5b7a448de4f1cb54fe1e223ff6c4c0c1fdd6df635a65,7509 +- completed: + hackage: crypton-x509-1.8.0@sha256:d4822ba8dcb19ee3233fc98152f5afda383ac952770a1d07f1d01168e9fcdbc2,2006 + pantry-tree: + sha256: 882a699e305fffcb77dd40bd40f959f062a8997a53560dca64462087aed3e4cd + size: 1132 + original: + hackage: crypton-x509-1.8.0@sha256:d4822ba8dcb19ee3233fc98152f5afda383ac952770a1d07f1d01168e9fcdbc2,2006 +- completed: + hackage: crypton-x509-store-1.8.0@sha256:075ba50a3daa0fdbb493481a665926e1ced2135c6b4ed56f97398aa855f0aecb,1674 + pantry-tree: + sha256: 2c7e00e593d399624264172cbd5e2fa55feaf8239cf33bee7e33bbc7509d0a2e + size: 458 + original: + hackage: crypton-x509-store-1.8.0@sha256:075ba50a3daa0fdbb493481a665926e1ced2135c6b4ed56f97398aa855f0aecb,1674 +- completed: + hackage: crypton-x509-system-1.8.0@sha256:76bab32c7d9cb3ea356a905f85829c70967fb6f9b4b890f00d67dc54130d45ca,1521 + pantry-tree: + sha256: 9ac33c993a82ac4da84804028c5398caec7aada7a83beeec3f28883fe470d68f + size: 512 + original: + hackage: crypton-x509-system-1.8.0@sha256:76bab32c7d9cb3ea356a905f85829c70967fb6f9b4b890f00d67dc54130d45ca,1521 +- completed: + hackage: crypton-x509-validation-1.8.0@sha256:63acb2df06f28c3ffdddaf6d9402105b9026796036aa10d7347ae4f7db51c36b,2018 + pantry-tree: + sha256: 64baf1be6e65ade9ad8eaef819c9cc4c0fcb7d3b968641dcf28a3f859fe87dc5 + size: 691 + original: + hackage: crypton-x509-validation-1.8.0@sha256:63acb2df06f28c3ffdddaf6d9402105b9026796036aa10d7347ae4f7db51c36b,2018 +- completed: + hackage: http-api-data-0.7@sha256:b8145755fced98db1ac67f4f98c8010cd5f82c121a82b65addea2556b5fbd6d5,3157 + pantry-tree: + sha256: 4e67cbe25b355b6af68aee79c36b0f3c78428dbdd1be4298f4afaf000f3bc9a9 + size: 819 + original: + hackage: http-api-data-0.7@sha256:b8145755fced98db1ac67f4f98c8010cd5f82c121a82b65addea2556b5fbd6d5,3157 +- completed: + hackage: hpack-0.39.5@sha256:c214f49ed3df75ce75201709927fa27e08f642023ed4b4ca8cc43604e5199ced,5263 + pantry-tree: + sha256: 75fe87db5a37bf3941e29e2273f530ee7b2932d64bf813b55151a889e97305ca + size: 3799 + original: + hackage: hpack-0.39.5@sha256:c214f49ed3df75ce75201709927fa27e08f642023ed4b4ca8cc43604e5199ced,5263 +- completed: + hackage: rio-prettyprint-0.1.9.0@sha256:f48afbfe28ed7be4d1a1bb96705669593dd5df12f28ed908516c6dfc4e0becbb,1375 + pantry-tree: + sha256: c840f1193718b7af6c004fc62a67919e223b56cc1658a168c8e490e53a632aa0 + size: 779 + original: + hackage: rio-prettyprint-0.1.9.0@sha256:f48afbfe28ed7be4d1a1bb96705669593dd5df12f28ed908516c6dfc4e0becbb,1375 +- completed: + hackage: tagged-0.8.10@sha256:e9b97c98e9827981d62f37c5febf9e6bbb67acec92b8bd41fd9f7ace5eb31d32,2201 + pantry-tree: + sha256: 5ba1ff7ad9a676b89a2a6ab00ac2100f30a21440ef8e04094614ab5b5f494b3f + size: 438 + original: + hackage: tagged-0.8.10@sha256:e9b97c98e9827981d62f37c5febf9e6bbb67acec92b8bd41fd9f7ace5eb31d32,2201 +- completed: + hackage: tls-2.2.2@sha256:95f5acd4ce76cbd6bdc46b737370dcbd93c59cf1cd1934a30e55c61c1dc140e9,7283 + pantry-tree: + sha256: 6ddac1e644efe75dbf62bbd35fa38897caca307d62bac538ae29b0c54bf6ff00 + size: 7056 + original: + hackage: tls-2.2.2@sha256:95f5acd4ce76cbd6bdc46b737370dcbd93c59cf1cd1934a30e55c61c1dc140e9,7283 +snapshots: +- completed: + sha256: f4d90db84aefb7c9590dd224ebdabfd8304f81bbad4652a715ee8350c4b9d396 + size: 738802 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/nightly/2026/4/17.yaml + original: nightly-2026-04-17 diff --git a/stack.cabal b/stack.cabal index dd3a82b63e..e32080aef3 100644 --- a/stack.cabal +++ b/stack.cabal @@ -1,32 +1,36 @@ -cabal-version: 2.0 +cabal-version: 2.2 --- This file has been generated from package.yaml by hpack version 0.33.0. +-- This file has been generated from package.yaml by hpack version 0.39.1. -- -- see: https://github.com/sol/hpack --- --- hash: e0fafccff758c3515731220888162806b25dd39ae06a130f30cba3f9955f20dc name: stack -version: 2.6.0 -synopsis: The Haskell Tool Stack -description: Please see the documentation at - for usage information. +version: 3.10.0 +synopsis: A program for developing Haskell projects +description: Stack (the Haskell Tool Stack) is a program for developing Haskell projects. + It is aimed at new and experienced users of Haskell and seeks to support them + fully on Linux, macOS and Windows. + . + For information about how to use Stack, see . . If building a 'stack' executable for distribution, please download the source code from - and build it using Stack itself in order to ensure identical behaviour - to official binaries. This package on Hackage is provided for convenience - and bootstrapping purposes. + and build it with Stack in order to ensure identical behaviour to official + binaries. . - Note that the API for the library is not currently stable, and may - change significantly, even between minor releases. It is - currently only intended for use by the executable. + This package is provided on Hackage for convenience and bootstrapping + purposes. + . + Currently, the library exposed by the package is intended for use only by the + executable. The library's API may change significantly, even between minor + releases. category: Development homepage: http://haskellstack.org bug-reports: https://github.com/commercialhaskell/stack/issues author: Commercial Haskell SIG -maintainer: manny@fpcomplete.com -license: BSD3 +maintainer: Mike Pilgrem , + Emanuel Borsboom +license: BSD-3-Clause license-file: LICENSE build-type: Custom extra-source-files: @@ -34,38 +38,122 @@ extra-source-files: ChangeLog.md README.md stack.yaml - doc/azure_ci.md - doc/build_command.md doc/build_overview.md doc/ChangeLog.md + doc/commands/bench_command.md + doc/commands/build_command.md + doc/commands/clean_command.md + doc/commands/config_command.md + doc/commands/docker_command.md + doc/commands/dot_command.md + doc/commands/eval_command.md + doc/commands/exec_command.md + doc/commands/ghc_command.md + doc/commands/ghci_command.md + doc/commands/haddock_command.md + doc/commands/hoogle_command.md + doc/commands/hpc_command.md + doc/commands/ide_command.md + doc/commands/index.md + doc/commands/init_command.md + doc/commands/install_command.md + doc/commands/list_command.md + doc/commands/ls_command.md + doc/commands/new_command.md + doc/commands/path_command.md + doc/commands/purge_command.md + doc/commands/query_command.md + doc/commands/repl_command.md + doc/commands/run_command.md + doc/commands/runghc_command.md + doc/commands/runhaskell_command.md + doc/commands/script_command.md + doc/commands/sdist_command.md + doc/commands/setup_command.md + doc/commands/templates_command.md + doc/commands/test_command.md + doc/commands/uninstall_command.md + doc/commands/unpack_command.md + doc/commands/update_command.md + doc/commands/upgrade_command.md + doc/commands/upload_command.md + doc/community/index.md + doc/configure/customisation_scripts.md + doc/configure/environment_variables.md + doc/configure/global_flags.md + doc/configure/index.md + doc/configure/yaml/include.md + doc/configure/yaml/index.md + doc/configure/yaml/non-project.md + doc/configure/yaml/project.md doc/CONTRIBUTING.md - doc/coverage.md - doc/custom_snapshot.md - doc/dependency_visualization.md - doc/developing_on_windows.md - doc/docker_integration.md + doc/dev_containers.md doc/faq.md - doc/ghci.md - doc/ghcjs.md - doc/GUIDE.md + doc/glossary.md doc/install_and_upgrade.md - doc/lock_files.md - doc/nix_integration.md - doc/nonstandard_project_init.md - doc/pantry.md + doc/maintainers/7zip.md + doc/maintainers/archive/docker.md + doc/maintainers/archive/docker_images.md + doc/maintainers/archive/README.md + doc/maintainers/archive/releases.md + doc/maintainers/ghc.md + doc/maintainers/haskellstack.org.md + doc/maintainers/msys.md + doc/maintainers/releases.md + doc/maintainers/self-hosted_runners.md + doc/maintainers/stack_errors.md + doc/maintainers/team_process.md + doc/maintainers/version_scheme.md + doc/other_resources.md doc/README.md - doc/shell_autocompletion.md doc/SIGNING_KEY.md - doc/stack_yaml_vs_cabal_package_file.md - doc/travis_ci.md - doc/yaml_configuration.md + doc/topics/azure_ci.md + doc/topics/CI.md + doc/topics/custom_snapshot.md + doc/topics/debugging.md + doc/topics/developing_on_windows.md + doc/topics/docker_integration.md + doc/topics/editor_integration.md + doc/topics/GHC_from_source.md + doc/topics/haskell_and_c_code.md + doc/topics/index.md + doc/topics/lock_files.md + doc/topics/nix_integration.md + doc/topics/nonstandard_project_init.md + doc/topics/package_location.md + doc/topics/scripts.md + doc/topics/shell_autocompletion.md + doc/topics/snapshot_location.md + doc/topics/Stack_and_VS_Code.md + doc/topics/stack_root.md + doc/topics/stack_work.md + doc/topics/stack_yaml_vs_cabal_package_file.md + doc/topics/travis_ci.md + doc/tutorial/building_existing_projects.md + doc/tutorial/building_your_project.md + doc/tutorial/cabal_flags_and_ghc_options.md + doc/tutorial/executing_commands.md + doc/tutorial/hello_world_example.md + doc/tutorial/index.md + doc/tutorial/installed_package_databases.md + doc/tutorial/locations_used_by_stack.md + doc/tutorial/multi-package_projects.md + doc/tutorial/package_description.md + doc/tutorial/project_configuration.md + doc/tutorial/stack_build_synonyms.md + doc/tutorial/stack_build_targets.md + doc/tutorial/stack_configuration.md + doc/tutorial/tutorial_conclusion.md + doc/tutorial/using_ghc_interactively.md src/setup-shim/StackSetupShim.hs - test/package-dump/ghc-7.10.txt - test/package-dump/ghc-7.8.4-osx.txt - test/package-dump/ghc-7.8.txt - test/package-dump/ghc-head.txt - src/test/Stack/Untar/test1.tar.gz - src/test/Stack/Untar/test2.tar.gz + tests/unit/package-dump/ghc-7.10.txt + tests/unit/package-dump/ghc-7.8.4-osx.txt + tests/unit/package-dump/ghc-7.8.txt + tests/unit/package-dump/ghc-head.txt + tests/unit/Stack/Untar/test1.tar.gz + tests/unit/Stack/Untar/test2.tar.gz + cabal.project + cabal.config source-repository head type: git @@ -73,37 +161,42 @@ source-repository head custom-setup setup-depends: - Cabal - , base >=4.10 && <5 + Cabal >=3.14 && <3.18 + , base >=4.14.3.0 && <5 , filepath flag developer-mode - description: By default, should extra developer information be output? + description: By default, output extra developer information. manual: True default: False flag disable-git-info - description: Disable compile-time inclusion of current git info in stack + description: Disable inclusion of current Git information in the Stack executable when it is built. + manual: True + default: False + +flag disable-stack-upload + description: For use only during development and debugging. Disable 'stack upload' so that it does not make HTTP requests. Stack will output information about the HTTP request(s) that it would have made if the command was enabled. manual: True default: False flag hide-dependency-versions - description: Hides dependency versions from 'stack --version', used only by building Stack and the default 'stack.yaml'. Note to packagers/distributors: DO NOT OVERRIDE THIS FLAG IF YOU ARE BUILDING 'stack' ANY OTHER WAY (e.g. using cabal or from Hackage), as it makes debugging support requests more difficult. + description: Hides dependency versions from 'stack --version'. Used only when building a Stack executable for official release. Note to packagers/distributors: DO NOT OVERRIDE THIS FLAG IF YOU ARE BUILDING STACK ANY OTHER WAY (e.g. using Cabal or from Hackage), as it makes debugging support requests more difficult. manual: True default: False flag integration-tests - description: Run the integration test suite + description: Run the integration test suite. manual: True default: False flag static - description: Pass -static/-pthread to ghc when linking the stack binary. + description: When building the Stack executable, or the stack-integration-test executable, pass the -static and -pthread flags to the linker used by GHC. manual: True default: False flag supported-build - description: If false, causes 'stack --version' to issue a warning about the build being unsupported. Should be True only if building with Stack and the default 'stack.yaml'. Note to packagers/distributors: DO NOT OVERRIDE THIS FLAG IF YOU ARE BUILDING 'stack' ANY OTHER WAY (e.g. using cabal or from Hackage), as it makes debugging support requests more difficult. + description: If false, causes 'stack --version' to issue a warning about the build being unsupported. Used only when building a Stack executable for official release. Note to packagers/distributors: DO NOT OVERRIDE THIS FLAG IF YOU ARE BUILDING STACK ANY OTHER WAY (e.g. using Cabal or from Hackage), as it makes debugging support requests more difficult. manual: True default: False @@ -114,6 +207,7 @@ library Data.Attoparsec.Combinators Data.Attoparsec.Interpreter Data.Monoid.Map + GHC.Utils.GhcPkg.Main.Compat Network.HTTP.StackClient Options.Applicative.Args Options.Applicative.Builder.Extra @@ -121,27 +215,42 @@ library Path.CheckInstall Path.Extra Path.Find + Stack Stack.Build Stack.Build.Cache Stack.Build.ConstructPlan Stack.Build.Execute + Stack.Build.ExecuteEnv + Stack.Build.ExecutePackage Stack.Build.Haddock Stack.Build.Installed Stack.Build.Source Stack.Build.Target + Stack.BuildInfo + Stack.BuildOpts Stack.BuildPlan + Stack.CLI Stack.Clean + Stack.Component + Stack.ComponentFile Stack.Config Stack.Config.Build + Stack.Config.ConfigureScript Stack.Config.Docker Stack.Config.Nix + Stack.ConfigureOpts Stack.ConfigCmd Stack.Constants Stack.Constants.Config + Stack.Constants.StackProgName Stack.Coverage Stack.DefaultColorWhen + Stack.DependencyGraph Stack.Docker + Stack.DockerCmd Stack.Dot + Stack.Eval + Stack.Exec Stack.FileWatch Stack.GhcPkg Stack.Ghci @@ -159,29 +268,45 @@ library Stack.Options.BuildParser Stack.Options.CleanParser Stack.Options.ConfigParser + Stack.Options.ConfigEnvParser + Stack.Options.ConfigSetParser Stack.Options.Completion Stack.Options.DockerParser Stack.Options.DotParser + Stack.Options.EvalParser Stack.Options.ExecParser + Stack.Options.FlagsParser Stack.Options.GhcBuildParser Stack.Options.GhciParser Stack.Options.GhcVariantParser Stack.Options.GlobalParser Stack.Options.HaddockParser Stack.Options.HpcReportParser + Stack.Options.IdeParser + Stack.Options.InitParser Stack.Options.LogLevelParser + Stack.Options.LsParser Stack.Options.NewParser Stack.Options.NixParser Stack.Options.PackageParser - Stack.Options.ResolverParser - Stack.Options.ScriptParser + Stack.Options.PackagesParser + Stack.Options.PathParser + Stack.Options.PvpBoundsParser Stack.Options.SDistParser + Stack.Options.ScriptParser + Stack.Options.SetupParser + Stack.Options.SnapshotParser Stack.Options.TestParser + Stack.Options.UnpackParser + Stack.Options.UpgradeParser + Stack.Options.UploadParser Stack.Options.Utils Stack.Package Stack.PackageDump + Stack.PackageFile Stack.Path Stack.Prelude + Stack.Query Stack.Runners Stack.Script Stack.SDist @@ -192,58 +317,126 @@ library Stack.Storage.Project Stack.Storage.User Stack.Storage.Util + Stack.Templates + Stack.Types.AddCommand + Stack.Types.AllowNewerDeps + Stack.Types.ApplyGhcOptions + Stack.Types.ApplyProgOptions Stack.Types.Build + Stack.Types.Build.ConstructPlan + Stack.Types.Build.Exception + Stack.Types.BuildConfig + Stack.Types.BuildOpts + Stack.Types.BuildOptsCLI + Stack.Types.BuildOptsMonoid + Stack.Types.CabalConfigKey + Stack.Types.Cache + Stack.Types.Casa + Stack.Types.ColorWhen + Stack.Types.CompCollection Stack.Types.CompilerBuild + Stack.Types.CompilerPaths Stack.Types.Compiler + Stack.Types.Component + Stack.Types.ComponentUtils Stack.Types.Config - Stack.Types.Config.Build + Stack.Types.Config.Exception + Stack.Types.ConfigMonoid + Stack.Types.ConfigSetOpts + Stack.Types.ConfigureOpts + Stack.Types.Curator + Stack.Types.Dependency + Stack.Types.DependencyTree Stack.Types.Docker + Stack.Types.DockerEntrypoint + Stack.Types.DotConfig + Stack.Types.DotOpts + Stack.Types.DownloadInfo + Stack.Types.DumpLogs + Stack.Types.DumpPackage + Stack.Types.EnvConfig + Stack.Types.EnvSettings + Stack.Types.ExtraDirs + Stack.Types.FileDigestCache + Stack.Types.GHCDownloadInfo + Stack.Types.GHCVariant + Stack.Types.GhcOptionKey + Stack.Types.GhcOptions + Stack.Types.GhcPkgExe Stack.Types.GhcPkgId + Stack.Types.GhciOpts + Stack.Types.GlobalOpts + Stack.Types.GlobalOptsMonoid + Stack.Types.HpcReportOpts + Stack.Types.IdeOpts + Stack.Types.Installed + Stack.Types.InterfaceOpt + Stack.Types.IsMutable + Stack.Types.LockFileBehavior + Stack.Types.LsOpts + Stack.Types.MsysEnvironment Stack.Types.NamedComponent Stack.Types.Nix Stack.Types.Package + Stack.Types.PackageFile Stack.Types.PackageName - Stack.Types.Resolver + Stack.Types.ParentMap + Stack.Types.Plan + Stack.Types.Platform + Stack.Types.Project + Stack.Types.ProjectAndConfigMonoid + Stack.Types.ProjectConfig + Stack.Types.PvpBounds + Stack.Types.Runner + Stack.Types.SCM + Stack.Types.SDistOpts + Stack.Types.SetupInfo + Stack.Types.SetupOpts + Stack.Types.Snapshot Stack.Types.SourceMap + Stack.Types.StackYamlLoc + Stack.Types.Storage Stack.Types.TemplateName + Stack.Types.UnusedFlags + Stack.Types.UpgradeOpts + Stack.Types.UploadOpts Stack.Types.Version - Stack.Types.VersionIntervals + Stack.Types.VersionedDownloadInfo + Stack.Types.WantedCompilerSetter + Stack.Uninstall Stack.Unpack + Stack.Update Stack.Upgrade Stack.Upload System.Info.ShortPathName System.Permissions System.Process.Pager System.Terminal + Build_stack Paths_stack - other-modules: - Path.Extended - Stack.Types.Cache autogen-modules: + Build_stack Paths_stack hs-source-dirs: - src/ - ghc-options: -Wall -fwarn-tabs -fwarn-incomplete-uni-patterns -fwarn-incomplete-record-updates -optP-Wno-nonportable-include-path -fwarn-identities + src + ghc-options: -fwrite-ide-info -hiedir=.hie -Wall -Wmissing-export-lists -optP-Wno-nonportable-include-path -Widentities build-depends: - Cabal - , aeson - , annotated-wl-pprint - , ansi-terminal + Cabal >=3.14 && <3.17 + , aeson >=2.0.3.0 + , aeson-warning-parser >=0.1.1 + , ansi-terminal >=1.0.2 , array , async , attoparsec - , base >=4.10 && <5 + , base >=4.16.0.0 && <5 , base64-bytestring , bytestring - , casa-client - , casa-types - , colour + , casa-client >=0.0.2 + , companion , conduit , conduit-extra , containers - , cryptonite - , cryptonite-conduit - , deepseq + , crypton , directory , echo , exceptions @@ -251,64 +444,50 @@ library , file-embed , filelock , filepath - , fsnotify + , fsnotify >=0.4.1 , generic-deriving - , hackage-security + , ghc-boot , hashable - , hi-file-parser - , hpack + , hi-file-parser >=0.1.8.0 + , hpack >=0.36.0 , hpc , http-client - , http-client-tls + , http-client-tls >=0.3.6.2 , http-conduit - , http-download + , http-download >=0.2.1.0 , http-types , memory , microlens - , mintty - , mono-traversable , mtl , mustache , neat-interpolation - , network-uri , open-browser - , optparse-applicative >=0.14.3.0 - , pantry >=0.5.1.3 - , path + , optparse-applicative >=0.18.1.0 + , pantry >=0.11.0 + , path >=0.9.5 , path-io - , persistent + , persistent >=2.14.0.0 && <2.19 , persistent-sqlite - , persistent-template , pretty - , primitive - , process + , process >=1.6.13.2 , project-template - , regex-applicative-text - , retry - , rio >=0.1.18.0 - , rio-prettyprint >=0.1.1.0 - , semigroups + , random + , rio >=0.1.22.0 && (<0.1.23.0 || >0.1.23.0) + , rio-prettyprint >=0.1.8.0 + , semaphore-compat , split , stm - , streaming-commons - , tar + , tar >=0.6.2.0 , template-haskell - , temporary , text - , text-metrics - , th-reify-many , time - , tls , transformers - , typed-process - , unicode-transforms , unix-compat - , unliftio , unordered-containers , vector , yaml - , zip-archive , zlib + default-language: GHC2024 if os(windows) cpp-options: -DWINDOWS build-depends: @@ -318,56 +497,72 @@ library unix build-tool-depends: hsc2hs:hsc2hs + if impl(ghc >= 9.4.5) && os(windows) + build-depends: + network >=3.1.2.9 if flag(developer-mode) cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=True else cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=False + if flag(disable-stack-upload) + cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=True + else + cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=False if os(windows) other-modules: + Stack.Build.TestSuiteTimeout + Stack.Constants.UsrLibDirs + Stack.Docker.Handlers + System.Posix.User System.Uname hs-source-dirs: src/windows/ else other-modules: + Stack.Build.TestSuiteTimeout + Stack.Constants.UsrLibDirs + Stack.Docker.Handlers System.Uname hs-source-dirs: src/unix/ c-sources: src/unix/cbits/uname.c - default-language: Haskell2010 + if !(flag(disable-git-info)) + cpp-options: -DUSE_GIT_INFO + build-depends: + githash + , optparse-simple + if flag(hide-dependency-versions) + cpp-options: -DHIDE_DEP_VERSIONS + if flag(supported-build) + cpp-options: -DSUPPORTED_BUILD executable stack main-is: Main.hs other-modules: - BuildInfo - Build_stack Paths_stack autogen-modules: - Build_stack Paths_stack hs-source-dirs: - src/main - ghc-options: -Wall -fwarn-tabs -fwarn-incomplete-uni-patterns -fwarn-incomplete-record-updates -optP-Wno-nonportable-include-path -threaded -rtsopts + app + ghc-options: -fwrite-ide-info -hiedir=.hie -Wall -Wmissing-export-lists -optP-Wno-nonportable-include-path -threaded -rtsopts build-depends: - Cabal - , aeson - , annotated-wl-pprint - , ansi-terminal + Cabal >=3.14 && <3.17 + , aeson >=2.0.3.0 + , aeson-warning-parser >=0.1.1 + , ansi-terminal >=1.0.2 , array , async , attoparsec - , base >=4.10 && <5 + , base >=4.16.0.0 && <5 , base64-bytestring , bytestring - , casa-client - , casa-types - , colour + , casa-client >=0.0.2 + , companion , conduit , conduit-extra , containers - , cryptonite - , cryptonite-conduit - , deepseq + , crypton , directory , echo , exceptions @@ -375,65 +570,51 @@ executable stack , file-embed , filelock , filepath - , fsnotify + , fsnotify >=0.4.1 , generic-deriving - , hackage-security + , ghc-boot , hashable - , hi-file-parser - , hpack + , hi-file-parser >=0.1.8.0 + , hpack >=0.36.0 , hpc , http-client - , http-client-tls + , http-client-tls >=0.3.6.2 , http-conduit - , http-download + , http-download >=0.2.1.0 , http-types , memory , microlens - , mintty - , mono-traversable , mtl , mustache , neat-interpolation - , network-uri , open-browser - , optparse-applicative >=0.14.3.0 - , pantry >=0.5.1.3 - , path + , optparse-applicative >=0.18.1.0 + , pantry >=0.11.0 + , path >=0.9.5 , path-io - , persistent + , persistent >=2.14.0.0 && <2.19 , persistent-sqlite - , persistent-template , pretty - , primitive - , process + , process >=1.6.13.2 , project-template - , regex-applicative-text - , retry - , rio >=0.1.18.0 - , rio-prettyprint >=0.1.1.0 - , semigroups + , random + , rio >=0.1.22.0 && (<0.1.23.0 || >0.1.23.0) + , rio-prettyprint >=0.1.8.0 + , semaphore-compat , split , stack , stm - , streaming-commons - , tar + , tar >=0.6.2.0 , template-haskell - , temporary , text - , text-metrics - , th-reify-many , time - , tls , transformers - , typed-process - , unicode-transforms , unix-compat - , unliftio , unordered-containers , vector , yaml - , zip-archive , zlib + default-language: GHC2024 if os(windows) cpp-options: -DWINDOWS build-depends: @@ -443,52 +624,49 @@ executable stack unix build-tool-depends: hsc2hs:hsc2hs + if impl(ghc >= 9.4.5) && os(windows) + build-depends: + network >=3.1.2.9 if flag(developer-mode) cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=True else cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=False + if flag(disable-stack-upload) + cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=True + else + cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=False if flag(static) ld-options: -static -pthread - if !(flag(disable-git-info)) - cpp-options: -DUSE_GIT_INFO - build-depends: - githash - , optparse-simple - if flag(hide-dependency-versions) - cpp-options: -DHIDE_DEP_VERSIONS - if flag(supported-build) - cpp-options: -DSUPPORTED_BUILD - default-language: Haskell2010 executable stack-integration-test main-is: IntegrationSpec.hs other-modules: StackTest + StackTest.Repl + Paths_stack + autogen-modules: Paths_stack hs-source-dirs: - test/integration - test/integration/lib - ghc-options: -Wall -fwarn-tabs -fwarn-incomplete-uni-patterns -fwarn-incomplete-record-updates -optP-Wno-nonportable-include-path -threaded -rtsopts -with-rtsopts=-N + tests/integration + tests/integration/lib + ghc-options: -fwrite-ide-info -hiedir=.hie -Wall -Wmissing-export-lists -optP-Wno-nonportable-include-path -threaded -rtsopts -with-rtsopts=-N build-depends: - Cabal - , aeson - , annotated-wl-pprint - , ansi-terminal + Cabal >=3.14 && <3.17 + , aeson >=2.0.3.0 + , aeson-warning-parser >=0.1.1 + , ansi-terminal >=1.0.2 , array , async , attoparsec - , base >=4.10 && <5 + , base >=4.16.0.0 && <5 , base64-bytestring , bytestring - , casa-client - , casa-types - , colour + , casa-client >=0.0.2 + , companion , conduit , conduit-extra , containers - , cryptonite - , cryptonite-conduit - , deepseq + , crypton , directory , echo , exceptions @@ -496,66 +674,52 @@ executable stack-integration-test , file-embed , filelock , filepath - , fsnotify + , fsnotify >=0.4.1 , generic-deriving - , hackage-security + , ghc-boot , hashable - , hi-file-parser - , hpack + , hi-file-parser >=0.1.8.0 + , hpack >=0.36.0 , hpc , hspec , http-client - , http-client-tls + , http-client-tls >=0.3.6.2 , http-conduit - , http-download + , http-download >=0.2.1.0 , http-types , memory , microlens - , mintty - , mono-traversable , mtl , mustache , neat-interpolation - , network-uri , open-browser - , optparse-applicative >=0.14.3.0 + , optparse-applicative >=0.18.1.0 , optparse-generic - , pantry >=0.5.1.3 - , path + , pantry >=0.11.0 + , path >=0.9.5 , path-io - , persistent + , persistent >=2.14.0.0 && <2.19 , persistent-sqlite - , persistent-template , pretty - , primitive - , process + , process >=1.6.13.2 , project-template - , regex-applicative-text - , retry - , rio >=0.1.18.0 - , rio-prettyprint >=0.1.1.0 - , semigroups + , random + , rio >=0.1.22.0 && (<0.1.23.0 || >0.1.23.0) + , rio-prettyprint >=0.1.8.0 + , semaphore-compat , split , stm - , streaming-commons - , tar + , tar >=0.6.2.0 , template-haskell - , temporary , text - , text-metrics - , th-reify-many , time - , tls , transformers - , typed-process - , unicode-transforms , unix-compat - , unliftio , unordered-containers , vector , yaml - , zip-archive , zlib + default-language: GHC2024 if os(windows) cpp-options: -DWINDOWS build-depends: @@ -565,17 +729,23 @@ executable stack-integration-test unix build-tool-depends: hsc2hs:hsc2hs + if impl(ghc >= 9.4.5) && os(windows) + build-depends: + network >=3.1.2.9 if flag(developer-mode) cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=True else cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=False + if flag(disable-stack-upload) + cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=True + else + cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=False if !(flag(integration-tests)) buildable: False if flag(static) ld-options: -static -pthread - default-language: Haskell2010 -test-suite stack-test +test-suite stack-unit-test type: exitcode-stdio-1.0 main-is: Spec.hs other-modules: @@ -585,7 +755,6 @@ test-suite stack-test Stack.Config.DockerSpec Stack.ConfigSpec Stack.DotSpec - Stack.Ghci.PortableFakePaths Stack.Ghci.ScriptSpec Stack.GhciSpec Stack.LockSpec @@ -594,30 +763,29 @@ test-suite stack-test Stack.Types.TemplateNameSpec Stack.UploadSpec Paths_stack + autogen-modules: + Paths_stack hs-source-dirs: - src/test - ghc-options: -Wall -fwarn-tabs -fwarn-incomplete-uni-patterns -fwarn-incomplete-record-updates -optP-Wno-nonportable-include-path -threaded + tests/unit + ghc-options: -fwrite-ide-info -hiedir=.hie -Wall -Wmissing-export-lists -optP-Wno-nonportable-include-path -threaded build-depends: - Cabal + Cabal >=3.14 && <3.17 , QuickCheck - , aeson - , annotated-wl-pprint - , ansi-terminal + , aeson >=2.0.3.0 + , aeson-warning-parser >=0.1.1 + , ansi-terminal >=1.0.2 , array , async , attoparsec - , base >=4.10 && <5 + , base >=4.16.0.0 && <5 , base64-bytestring , bytestring - , casa-client - , casa-types - , colour + , casa-client >=0.0.2 + , companion , conduit , conduit-extra , containers - , cryptonite - , cryptonite-conduit - , deepseq + , crypton , directory , echo , exceptions @@ -625,68 +793,53 @@ test-suite stack-test , file-embed , filelock , filepath - , fsnotify + , fsnotify >=0.4.1 , generic-deriving - , hackage-security + , ghc-boot , hashable - , hi-file-parser - , hpack + , hi-file-parser >=0.1.8.0 + , hpack >=0.36.0 , hpc , hspec , http-client - , http-client-tls + , http-client-tls >=0.3.6.2 , http-conduit - , http-download + , http-download >=0.2.1.0 , http-types , memory , microlens - , mintty - , mono-traversable , mtl , mustache , neat-interpolation - , network-uri , open-browser - , optparse-applicative >=0.14.3.0 - , pantry >=0.5.1.3 - , path + , optparse-applicative >=0.18.1.0 + , pantry >=0.11.0 + , path >=0.9.5 , path-io - , persistent + , persistent >=2.14.0.0 && <2.19 , persistent-sqlite - , persistent-template , pretty - , primitive - , process + , process >=1.6.13.2 , project-template + , random , raw-strings-qq - , regex-applicative-text - , retry - , rio >=0.1.18.0 - , rio-prettyprint >=0.1.1.0 - , semigroups - , smallcheck + , rio >=0.1.22.0 && (<0.1.23.0 || >0.1.23.0) + , rio-prettyprint >=0.1.8.0 + , semaphore-compat , split , stack , stm - , streaming-commons - , tar + , tar >=0.6.2.0 , template-haskell - , temporary , text - , text-metrics - , th-reify-many , time - , tls , transformers - , typed-process - , unicode-transforms , unix-compat - , unliftio , unordered-containers , vector , yaml - , zip-archive , zlib + default-language: GHC2024 if os(windows) cpp-options: -DWINDOWS build-depends: @@ -696,8 +849,26 @@ test-suite stack-test unix build-tool-depends: hsc2hs:hsc2hs + if impl(ghc >= 9.4.5) && os(windows) + build-depends: + network >=3.1.2.9 if flag(developer-mode) cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=True else cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=False - default-language: Haskell2010 + if flag(disable-stack-upload) + cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=True + else + cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=False + if os(windows) + other-modules: + Stack.Ghci.FakePaths + hs-source-dirs: + tests/unit/windows/ + else + other-modules: + Stack.Ghci.FakePaths + hs-source-dirs: + tests/unit/unix/ + build-tool-depends: + hspec-discover:hspec-discover diff --git a/stack.yaml b/stack.yaml index c8c9c51cca..26339dc97d 100644 --- a/stack.yaml +++ b/stack.yaml @@ -1,39 +1,48 @@ -resolver: lts-14.27 +snapshot: lts-24.37 # GHC 9.10.3 -packages: -- . +extra-deps: +# lts-24.37 specifies Cabal-3.12.1.0 +- Cabal-3.16.0.0@sha256:9972c2bd263168a20bd990962a68d4fd024f50c30a00519a6b942e8871d1bd67,14455 +- Cabal-syntax-3.16.0.0@sha256:6a35036763557301876c5b7a448de4f1cb54fe1e223ff6c4c0c1fdd6df635a65,7509 +# lts-24.37 does not include crypton-asn1-* +- crypton-asn1-encoding-0.10.0@sha256:45494a1723a047a815d0d620481c1028dca28a4ea5cf2554868687da90753961,2497 +- crypton-asn1-parse-0.10.0@sha256:4a2cfc4980957d1a279ef69137ee5f665c247ccd8bb962812d5b071d543893fb,1359 +- crypton-asn1-types-0.4.1@sha256:02f3ec473011b3da92f7bf738bea19cadf88a6470b25a6cb5042216c7549c912,1326 +# lts-24.37 specifies crypton-x509-1.7.7 +- crypton-x509-1.8.0@sha256:d4822ba8dcb19ee3233fc98152f5afda383ac952770a1d07f1d01168e9fcdbc2,2006 +# lts-24.37 specifies crypton-x509-store-1.6.14 +- crypton-x509-store-1.8.0@sha256:075ba50a3daa0fdbb493481a665926e1ced2135c6b4ed56f97398aa855f0aecb,1674 +# lts-24.37 specifies crypton-x509-system-1.6.8 +- crypton-x509-system-1.8.0@sha256:76bab32c7d9cb3ea356a905f85829c70967fb6f9b4b890f00d67dc54130d45ca,1521 +# lts-24.37 specifies crypton-x509-validation-1.6.14 +- crypton-x509-validation-1.8.0@sha256:63acb2df06f28c3ffdddaf6d9402105b9026796036aa10d7347ae4f7db51c36b,2018 +# lts-24.37 specifies hpack-0.38.3 +- hpack-0.39.5@sha256:c214f49ed3df75ce75201709927fa27e08f642023ed4b4ca8cc43604e5199ced,5263 +# lts-24.37 specifies pantry-0.10.1 +- pantry-0.11.2@sha256:bc14e75f512deb22e0d9d645e62eb63b319d1732bfed6509491601215ecbd307,7896 +# lts-24.37 specifies persistent-2.17.1.0 +- persistent-2.18.0.0@sha256:baa3e0959cf10bbd1da462efeb61d4f073d0cc924a149325494ba5ce29bc17a4,7096 +# lts-24.37 specifies rio-prettyprint-0.1.8.0 +- rio-prettyprint-0.1.9.0@sha256:f48afbfe28ed7be4d1a1bb96705669593dd5df12f28ed908516c6dfc4e0becbb,1375 +# lts-24.37 does not include time-hourglass +- time-hourglass-0.3.0@sha256:ee02356fe24919ec43ae17fc0007398c2fd0bbe822833b2d7a9c849537b90580,3114 +# lts-24.37 specifies tls-2.1.8 +- tls-2.2.2@sha256:95f5acd4ce76cbd6bdc46b737370dcbd93c59cf1cd1934a30e55c61c1dc140e9,7283 docker: enable: false + repo: quay.io/benz0li/ghc-musl:9.10.3 - #image: fpco/alpine-haskell-stack:8.6.5 - image: fpco/alpine-haskell-stack@sha256:49e7e15f3b1d3f882ba5bb701463b1d508fbf40e5aafce6ea31acd210da570ba +# See LICENSE. In order to build a statically-linked Stack executable for Linux +# that is not linked against GMP, use this alternative Docker image: +# repo: quay.io/benz0li/ghc-musl:9.10.3-int-native nix: # --nix on the command-line to enable. packages: - - zlib - - unzip + - zlib + - unzip + flags: stack: - hide-dependency-versions: true - supported-build: true developer-mode: true - -ghc-options: - "$locals": -fhide-source-paths - -extra-deps: -- Cabal-3.0.0.0@rev:0 -- hpack-0.33.0@rev:0 -- http-download-0.2.0.0@rev:0 -- filelock-0.1.1.5@rev:0 -- rio-0.1.19.0@rev:0 -- rio-prettyprint-0.1.1.0@rev:0 -- casa-client-0.0.1@rev:0 -- casa-types-0.0.1@rev:0 -- pantry-0.5.1.3@rev:0 - -drop-packages: -# See https://github.com/commercialhaskell/stack/pull/4712 -- cabal-install diff --git a/stack.yaml.lock b/stack.yaml.lock new file mode 100644 index 0000000000..1a119f0797 --- /dev/null +++ b/stack.yaml.lock @@ -0,0 +1,117 @@ +# This file was autogenerated by Stack. +# You should not edit this file by hand. +# For more information, please see the documentation at: +# https://docs.haskellstack.org/en/stable/topics/lock_files + +packages: +- completed: + hackage: Cabal-3.16.0.0@sha256:9972c2bd263168a20bd990962a68d4fd024f50c30a00519a6b942e8871d1bd67,14455 + pantry-tree: + sha256: ddc689c4070a034cacd73c39c1df2673ed16e0b38b5ca476fc6ef2aa87b6f40f + size: 12139 + original: + hackage: Cabal-3.16.0.0@sha256:9972c2bd263168a20bd990962a68d4fd024f50c30a00519a6b942e8871d1bd67,14455 +- completed: + hackage: Cabal-syntax-3.16.0.0@sha256:6a35036763557301876c5b7a448de4f1cb54fe1e223ff6c4c0c1fdd6df635a65,7509 + pantry-tree: + sha256: 4de976261238dd4045dd3addebf6a006d4aff3873ee680bac7af83dcbf068280 + size: 11238 + original: + hackage: Cabal-syntax-3.16.0.0@sha256:6a35036763557301876c5b7a448de4f1cb54fe1e223ff6c4c0c1fdd6df635a65,7509 +- completed: + hackage: crypton-asn1-encoding-0.10.0@sha256:45494a1723a047a815d0d620481c1028dca28a4ea5cf2554868687da90753961,2497 + pantry-tree: + sha256: c2ff6c426e6910075a971ee2801d200c8dc0f88cfcb8619534097902e6e655a8 + size: 1011 + original: + hackage: crypton-asn1-encoding-0.10.0@sha256:45494a1723a047a815d0d620481c1028dca28a4ea5cf2554868687da90753961,2497 +- completed: + hackage: crypton-asn1-parse-0.10.0@sha256:4a2cfc4980957d1a279ef69137ee5f665c247ccd8bb962812d5b071d543893fb,1359 + pantry-tree: + sha256: c78bd2c09da1390c124cff07844ddb99778a8cff82df98c5d53b52adb04dfef9 + size: 326 + original: + hackage: crypton-asn1-parse-0.10.0@sha256:4a2cfc4980957d1a279ef69137ee5f665c247ccd8bb962812d5b071d543893fb,1359 +- completed: + hackage: crypton-asn1-types-0.4.1@sha256:02f3ec473011b3da92f7bf738bea19cadf88a6470b25a6cb5042216c7549c912,1326 + pantry-tree: + sha256: 03e810ce724980eacfd49ed816d769ae74769f3941d8cc3c9067de4a896eedf2 + size: 722 + original: + hackage: crypton-asn1-types-0.4.1@sha256:02f3ec473011b3da92f7bf738bea19cadf88a6470b25a6cb5042216c7549c912,1326 +- completed: + hackage: crypton-x509-1.8.0@sha256:d4822ba8dcb19ee3233fc98152f5afda383ac952770a1d07f1d01168e9fcdbc2,2006 + pantry-tree: + sha256: 882a699e305fffcb77dd40bd40f959f062a8997a53560dca64462087aed3e4cd + size: 1132 + original: + hackage: crypton-x509-1.8.0@sha256:d4822ba8dcb19ee3233fc98152f5afda383ac952770a1d07f1d01168e9fcdbc2,2006 +- completed: + hackage: crypton-x509-store-1.8.0@sha256:075ba50a3daa0fdbb493481a665926e1ced2135c6b4ed56f97398aa855f0aecb,1674 + pantry-tree: + sha256: 2c7e00e593d399624264172cbd5e2fa55feaf8239cf33bee7e33bbc7509d0a2e + size: 458 + original: + hackage: crypton-x509-store-1.8.0@sha256:075ba50a3daa0fdbb493481a665926e1ced2135c6b4ed56f97398aa855f0aecb,1674 +- completed: + hackage: crypton-x509-system-1.8.0@sha256:76bab32c7d9cb3ea356a905f85829c70967fb6f9b4b890f00d67dc54130d45ca,1521 + pantry-tree: + sha256: 9ac33c993a82ac4da84804028c5398caec7aada7a83beeec3f28883fe470d68f + size: 512 + original: + hackage: crypton-x509-system-1.8.0@sha256:76bab32c7d9cb3ea356a905f85829c70967fb6f9b4b890f00d67dc54130d45ca,1521 +- completed: + hackage: crypton-x509-validation-1.8.0@sha256:63acb2df06f28c3ffdddaf6d9402105b9026796036aa10d7347ae4f7db51c36b,2018 + pantry-tree: + sha256: 64baf1be6e65ade9ad8eaef819c9cc4c0fcb7d3b968641dcf28a3f859fe87dc5 + size: 691 + original: + hackage: crypton-x509-validation-1.8.0@sha256:63acb2df06f28c3ffdddaf6d9402105b9026796036aa10d7347ae4f7db51c36b,2018 +- completed: + hackage: hpack-0.39.5@sha256:c214f49ed3df75ce75201709927fa27e08f642023ed4b4ca8cc43604e5199ced,5263 + pantry-tree: + sha256: 75fe87db5a37bf3941e29e2273f530ee7b2932d64bf813b55151a889e97305ca + size: 3799 + original: + hackage: hpack-0.39.5@sha256:c214f49ed3df75ce75201709927fa27e08f642023ed4b4ca8cc43604e5199ced,5263 +- completed: + hackage: pantry-0.11.2@sha256:bc14e75f512deb22e0d9d645e62eb63b319d1732bfed6509491601215ecbd307,7896 + pantry-tree: + sha256: 56e76aecdbe8f9d13de1360b11e4a68aae5e072b17dfda49a333274079841e99 + size: 2722 + original: + hackage: pantry-0.11.2@sha256:bc14e75f512deb22e0d9d645e62eb63b319d1732bfed6509491601215ecbd307,7896 +- completed: + hackage: persistent-2.18.0.0@sha256:baa3e0959cf10bbd1da462efeb61d4f073d0cc924a149325494ba5ce29bc17a4,7096 + pantry-tree: + sha256: 41480a227b9c2ff72b29a8f6a8fddb4888bc656be052ad58f2fef508378089f3 + size: 7184 + original: + hackage: persistent-2.18.0.0@sha256:baa3e0959cf10bbd1da462efeb61d4f073d0cc924a149325494ba5ce29bc17a4,7096 +- completed: + hackage: rio-prettyprint-0.1.9.0@sha256:f48afbfe28ed7be4d1a1bb96705669593dd5df12f28ed908516c6dfc4e0becbb,1375 + pantry-tree: + sha256: c840f1193718b7af6c004fc62a67919e223b56cc1658a168c8e490e53a632aa0 + size: 779 + original: + hackage: rio-prettyprint-0.1.9.0@sha256:f48afbfe28ed7be4d1a1bb96705669593dd5df12f28ed908516c6dfc4e0becbb,1375 +- completed: + hackage: time-hourglass-0.3.0@sha256:ee02356fe24919ec43ae17fc0007398c2fd0bbe822833b2d7a9c849537b90580,3114 + pantry-tree: + sha256: 7d6acc1a643fe8692d1858c96cc04a417b8da53e53b6bdba6fe0ce6aa6aba774 + size: 1594 + original: + hackage: time-hourglass-0.3.0@sha256:ee02356fe24919ec43ae17fc0007398c2fd0bbe822833b2d7a9c849537b90580,3114 +- completed: + hackage: tls-2.2.2@sha256:95f5acd4ce76cbd6bdc46b737370dcbd93c59cf1cd1934a30e55c61c1dc140e9,7283 + pantry-tree: + sha256: 6ddac1e644efe75dbf62bbd35fa38897caca307d62bac538ae29b0c54bf6ff00 + size: 7056 + original: + hackage: tls-2.2.2@sha256:95f5acd4ce76cbd6bdc46b737370dcbd93c59cf1cd1934a30e55c61c1dc140e9,7283 +snapshots: +- completed: + sha256: 655e468f774beee1badf07dc4c45fb50288d5c66ce7bef6f487b7f92891a90b0 + size: 728965 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/24/37.yaml + original: lts-24.37 diff --git a/test/integration/IntegrationSpec.hs b/test/integration/IntegrationSpec.hs deleted file mode 100644 index 7c2d5830c1..0000000000 --- a/test/integration/IntegrationSpec.hs +++ /dev/null @@ -1,254 +0,0 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE ScopedTypeVariables #-} - -import Conduit -import Data.List (stripPrefix) -import Options.Generic -import RIO -import RIO.Char (toLower) -import RIO.Directory hiding (findExecutable) -import RIO.FilePath -import RIO.List (isInfixOf, partition) -import qualified RIO.Map as Map -import RIO.Process -import qualified RIO.Set as Set -import qualified RIO.Text as T -import System.Environment (lookupEnv, getExecutablePath) -import System.Info (os) -import System.PosixCompat.Files - --- This code does not use a test framework so that we get direct --- control of how the output is displayed. - -main :: IO () -main = runSimpleApp $ do - logInfo "Initiating Stack integration test running" - - options <- getRecord "Stack integration tests" - results <- runApp options $ do - logInfo "Running with the following environment" - proc "env" [] runProcess_ - tests <- asks appTestDirs - let count = Set.size tests - loop !idx rest !accum = - case rest of - [] -> pure accum - next:rest' -> do - logInfo $ "Running integration test " - <> display idx - <> "/" - <> display count - <> ": " - <> fromString (takeFileName next) - res <- test next - loop (idx + 1) rest' (res <> accum) - - loop (1 :: Int) (Set.toList tests) mempty - - let (successes, failures) = partition ((== ExitSuccess) . snd) - $ Map.toList results - - unless (null successes) $ do - logInfo "Successful tests:" - for_ successes $ \(x, _) -> logInfo $ "- " <> display x - logInfo "" - - if null failures - then logInfo "No failures!" - else do - logInfo "Failed tests:" - for_ failures $ \(x, ec) -> logInfo $ "- " <> display x <> " - " <> displayShow ec - exitFailure - -data Options = Options - { optSpeed :: Maybe Speed - , optMatch :: Maybe String - } deriving Generic - -instance ParseRecord Options where - parseRecord = parseRecordWithModifiers modifiers - where - optName = map toLower . drop 3 - modifiers = defaultModifiers { fieldNameModifier = optName - , shortNameModifier = firstLetter . optName - } - -data Speed = Fast | Normal | Superslow - deriving (Read, Generic) - -instance ParseField Speed - -exeExt :: String -exeExt = if isWindows then ".exe" else "" - -isWindows :: Bool -isWindows = os == "mingw32" - -runApp :: Options -> RIO App a -> RIO SimpleApp a -runApp options inner = do - let speed = fromMaybe Normal $ optSpeed options - simpleApp <- ask - runghc <- findExecutable "runghc" >>= either throwIO pure - srcDir <- canonicalizePath "" - testsRoot <- canonicalizePath $ srcDir "test/integration" - libdir <- canonicalizePath $ testsRoot "lib" - myPath <- liftIO getExecutablePath - - stack <- canonicalizePath $ takeDirectory myPath "stack" ++ exeExt - logInfo $ "Using stack located at " <> fromString stack - proc stack ["--version"] runProcess_ - - let matchTest = case optMatch options of - Nothing -> const True - Just str -> (str `isInfixOf`) - testDirs - <- runConduitRes - $ sourceDirectory (testsRoot "tests") - .| filterMC (liftIO . hasTest) - .| filterC matchTest - .| foldMapC Set.singleton - - let modifyEnvCommon - = Map.insert "SRC_DIR" (fromString srcDir) - . Map.insert "STACK_EXE" (fromString stack) - . Map.delete "GHC_PACKAGE_PATH" - . Map.insert "STACK_TEST_SPEED" - (case speed of - Superslow -> "SUPERSLOW" - _ -> "NORMAL") - . Map.fromList - . map (first T.toUpper) - . Map.toList - - case speed of - Fast -> do - let app = App - { appSimpleApp = simpleApp - , appRunghc = runghc - , appLibDir = libdir - , appSetupHome = id - , appTestDirs = testDirs - } - runRIO app $ withModifyEnvVars modifyEnvCommon inner - _ -> do - morigStackRoot <- liftIO $ lookupEnv "STACK_ROOT" - origStackRoot <- - case morigStackRoot of - Nothing -> getAppUserDataDirectory "stack" - Just x -> pure x - - logInfo "Initializing/updating the original Pantry store" - proc stack ["update"] runProcess_ - - pantryRoot <- canonicalizePath $ origStackRoot "pantry" - let modifyEnv - = Map.insert "PANTRY_ROOT" (fromString pantryRoot) - . modifyEnvCommon - - app = App - { appSimpleApp = simpleApp - , appRunghc = runghc - , appLibDir = libdir - , appSetupHome = \inner' -> withSystemTempDirectory "home" $ \newHome -> do - let newStackRoot = newHome ".stack" - createDirectoryIfMissing True newStackRoot - let modifyEnv' - = Map.insert "HOME" (fromString newHome) - . Map.insert "APPDATA" (fromString newHome) - . Map.insert "STACK_ROOT" (fromString newStackRoot) - writeFileBinary (newStackRoot "config.yaml") "system-ghc: true\ninstall-ghc: false\n" - withModifyEnvVars modifyEnv' inner' - , appTestDirs = testDirs - } - - runRIO app $ withModifyEnvVars modifyEnv inner - - -hasTest :: FilePath -> IO Bool -hasTest dir = doesFileExist $ dir "Main.hs" - -data App = App - { appRunghc :: !FilePath - , appLibDir :: !FilePath - , appSetupHome :: !(forall a. RIO App a -> RIO App a) - , appSimpleApp :: !SimpleApp - , appTestDirs :: !(Set FilePath) - } -simpleAppL :: Lens' App SimpleApp -simpleAppL = lens appSimpleApp (\x y -> x { appSimpleApp = y }) -instance HasLogFunc App where - logFuncL = simpleAppL.logFuncL -instance HasProcessContext App where - processContextL = simpleAppL.processContextL - --- | Call 'appSetupHome' on the inner action -withHome :: RIO App a -> RIO App a -withHome inner = do - app <- ask - appSetupHome app inner - -test :: FilePath -- ^ test dir - -> RIO App (Map Text ExitCode) -test testDir = withDir $ \dir -> withHome $ do - runghc <- asks appRunghc - libDir <- asks appLibDir - let mainFile = testDir "Main.hs" - - copyTree (testDir "files") dir - - withSystemTempFile (name <.> "log") $ \logfp logh -> do - ec <- withWorkingDir dir - $ withModifyEnvVars (Map.insert "TEST_DIR" $ fromString testDir) - $ proc runghc - [ "-clear-package-db" - , "-global-package-db" - , "-i" ++ libDir - , mainFile - ] - $ runProcess - . setStdin closed - . setStdout (useHandleOpen logh) - . setStderr (useHandleOpen logh) - hClose logh - - case ec of - ExitSuccess -> logInfo "Success!" - _ -> do - logError "Failure, dumping log\n\n" - withSourceFile logfp $ \src -> - runConduit $ src .| stderrC - logError $ "\n\nEnd of log for " <> fromString name - pure $ Map.singleton (fromString name) ec - where - name = takeFileName testDir - withDir = withSystemTempDirectory ("stack-integration-" ++ name) - -copyTree :: MonadIO m => FilePath -> FilePath -> m () -copyTree src dst = - liftIO $ - runResourceT (sourceDirectoryDeep False src `connect` mapM_C go) - `catch` \(_ :: IOException) -> return () - where - go srcfp = liftIO $ do - Just suffix <- return $ stripPrefix src srcfp - let dstfp = dst stripHeadSeparator suffix - createDirectoryIfMissing True $ takeDirectory dstfp - -- copying yaml files so lock files won't get created in - -- the source directory - if takeFileName srcfp /= "package.yaml" && - (takeExtensions srcfp == ".yaml" || takeExtensions srcfp == ".yml") - then - copyFile srcfp dstfp - else - createSymbolicLink srcfp dstfp `catch` \(_ :: IOException) -> - copyFile srcfp dstfp -- for Windows - - stripHeadSeparator :: FilePath -> FilePath - stripHeadSeparator [] = [] - stripHeadSeparator fp@(x:xs) = if isPathSeparator x - then xs - else fp diff --git a/test/integration/README.md b/test/integration/README.md deleted file mode 100644 index 95d4384f34..0000000000 --- a/test/integration/README.md +++ /dev/null @@ -1,44 +0,0 @@ -# Stack Integration Tests - -This directory contains a bunch of integration tests for Stack. Each -directory inside the `tests` subdirectory represents a single -test. Each of those directories has: - -* A `Main.hs` file, which provides the script to be run -* A `files `directory, providing the working directory the script will - be run from. (If you have a test that doesn't require any specific - working directory, there may be no `files` directory.) - -It would be great to expand this file into a full tutorial, but for -now, the easiest way to get started with writing an integration test -is to copy an existing example. - -## Running - -One simple way to run a single test is: - -* Change into the `files` directory -* Run the command `stack runghc -- -i../../../lib ../Main.hs` - -A more thorough way to run the tests is with - -```shell -$ stack build --flag stack:integration-tests stack --exec stack-integration-test -``` - -Note that this command can take a _long_ time. It's also more thorough -than the quick command given above, as it will run each test with a -clean `STACK_ROOT`. - -## Helper scripts - -There are two helper scripts in this directory. Note that these may -not always work as anticipated, since some of the tests expect a clean -`STACK_ROOT`, and these scripts do not set that up. - -* `run-sort-tests.sh` will run all of the tests in the `tests` - directory, and move the successful ones into `tests-success`, and - the failing ones into `tests-fail`. It will keep the logs of failing - tests in `logs`. -* `run-single-test.sh` takes a single argument (the name of a test), - and runs just that test. diff --git a/test/integration/lib/StackTest.hs b/test/integration/lib/StackTest.hs deleted file mode 100644 index 4355ed2c76..0000000000 --- a/test/integration/lib/StackTest.hs +++ /dev/null @@ -1,310 +0,0 @@ -{-#LANGUAGE ScopedTypeVariables#-} - -module StackTest where - -import Control.Monad -import Control.Monad.IO.Class -import Control.Monad.Trans.Reader -import Control.Concurrent -import Control.Exception -import System.Environment -import System.Directory -import System.IO -import System.IO.Error -import System.Process -import System.Exit -import System.Info (arch, os) -import GHC.Stack (HasCallStack) - -run' :: HasCallStack => FilePath -> [String] -> IO ExitCode -run' cmd args = do - logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args) - (Nothing, Nothing, Nothing, ph) <- createProcess (proc cmd args) - waitForProcess ph - -run :: HasCallStack => FilePath -> [String] -> IO () -run cmd args = do - ec <- run' cmd args - unless (ec == ExitSuccess) $ error $ "Exited with exit code: " ++ show ec - -runShell :: HasCallStack => String -> IO () -runShell cmd = do - logInfo $ "Running: " ++ cmd - (Nothing, Nothing, Nothing, ph) <- createProcess (shell cmd) - ec <- waitForProcess ph - unless (ec == ExitSuccess) $ error $ "Exited with exit code: " ++ show ec - -runWithCwd :: HasCallStack => FilePath -> String -> [String] -> IO String -runWithCwd cwdPath cmd args = do - logInfo $ "Running: " ++ cmd - let cp = proc cmd args - (ec, stdoutStr, _) <- readCreateProcessWithExitCode (cp { cwd = Just cwdPath }) "" - unless (ec == ExitSuccess) $ error $ "Exited with exit code: " ++ show ec - return stdoutStr - -stackExe :: IO String -stackExe = getEnv "STACK_EXE" - -stackSrc :: IO String -stackSrc = getEnv "SRC_DIR" - -testDir :: IO String -testDir = getEnv "TEST_DIR" - -stack' :: HasCallStack => [String] -> IO ExitCode -stack' args = do - stackEnv <- stackExe - run' stackEnv args - -stack :: HasCallStack => [String] -> IO () -stack args = do - ec <- stack' args - unless (ec == ExitSuccess) $ error $ "Exited with exit code: " ++ show ec - --- Temporary workaround for Windows to ignore exceptions arising out --- of Windows when we do stack clean. More info here: https://github.com/commercialhaskell/stack/issues/4936 -stackCleanFull :: HasCallStack => IO () -stackCleanFull = stackIgnoreException ["clean", "--full"] - --- Temporary workaround for Windows to ignore exceptions arising out --- of Windows when we do stack clean. More info here: https://github.com/commercialhaskell/stack/issues/4936 -stackIgnoreException :: HasCallStack => [String] -> IO () -stackIgnoreException args = if isWindows - then void (stack' args) `catch` (\(_e :: IOException) -> return ()) - else stack args - -stackErr :: HasCallStack => [String] -> IO () -stackErr args = do - ec <- stack' args - when (ec == ExitSuccess) $ error "stack was supposed to fail, but didn't" - -type Repl = ReaderT ReplConnection IO - -data ReplConnection - = ReplConnection - { replStdin :: Handle - , replStdout :: Handle - } - -nextPrompt :: Repl () -nextPrompt = do - (ReplConnection _ inputHandle) <- ask - c <- liftIO $ hGetChar inputHandle - if c == '>' - then do _ <- liftIO $ hGetChar inputHandle - return () - else nextPrompt - -replCommand :: String -> Repl () -replCommand cmd = do - (ReplConnection input _) <- ask - liftIO $ hPutStrLn input cmd - -replGetLine :: Repl String -replGetLine = ask >>= liftIO . hGetLine . replStdout - -replGetChar :: Repl Char -replGetChar = ask >>= liftIO . hGetChar . replStdout - -runRepl :: HasCallStack => FilePath -> [String] -> ReaderT ReplConnection IO () -> IO ExitCode -runRepl cmd args actions = do - logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args) - (Just rStdin, Just rStdout, Just rStderr, ph) <- - createProcess (proc cmd args) - { std_in = CreatePipe - , std_out = CreatePipe - , std_err = CreatePipe - } - hSetBuffering rStdin NoBuffering - hSetBuffering rStdout NoBuffering - hSetBuffering rStderr NoBuffering - - _ <- forkIO $ withFile "/tmp/stderr" WriteMode - $ \err -> do - hSetBuffering err NoBuffering - forever $ catch (hGetChar rStderr >>= hPutChar err) - $ \e -> unless (isEOFError e) $ throw e - - runReaderT (nextPrompt >> actions) (ReplConnection rStdin rStdout) - waitForProcess ph - -repl :: HasCallStack => [String] -> Repl () -> IO () -repl args action = do - stackExe' <- stackExe - ec <- runRepl stackExe' ("repl":args) action - unless (ec == ExitSuccess) $ return () - -- TODO: Understand why the exit code is 1 despite running GHCi tests - -- successfully. - -- else error $ "Exited with exit code: " ++ show ec - -stackStderr :: HasCallStack => [String] -> IO (ExitCode, String) -stackStderr args = do - stackExe' <- stackExe - logInfo $ "Running: " ++ stackExe' ++ " " ++ unwords (map showProcessArgDebug args) - (ec, _, err) <- readProcessWithExitCode stackExe' args "" - hPutStr stderr err - return (ec, err) - --- | Run stack with arguments and apply a check to the resulting --- stderr output if the process succeeded. -stackCheckStderr :: HasCallStack => [String] -> (String -> IO ()) -> IO () -stackCheckStderr args check = do - (ec, err) <- stackStderr args - if ec /= ExitSuccess - then error $ "Exited with exit code: " ++ show ec - else check err - --- | Same as 'stackCheckStderr', but ensures that the Stack process --- fails. -stackErrStderr :: HasCallStack => [String] -> (String -> IO ()) -> IO () -stackErrStderr args check = do - (ec, err) <- stackStderr args - if ec == ExitSuccess - then error "Stack process succeeded, but it shouldn't" - else check err - -runEx :: HasCallStack => FilePath -> String -> IO (ExitCode, String, String) -runEx cmd args = runEx' cmd $ words args - -runEx' :: HasCallStack => FilePath -> [String] -> IO (ExitCode, String, String) -runEx' cmd args = do - logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args) - (ec, out, err) <- readProcessWithExitCode cmd args "" - putStr out - hPutStr stderr err - return (ec, out, err) - --- | Run stack with arguments and apply a check to the resulting --- stdout output if the process succeeded. --- --- Take care with newlines; if the output includes a newline character that --- should not be there, use 'Data.List.Extra.trimEnd' to remove it. -stackCheckStdout :: HasCallStack => [String] -> (String -> IO ()) -> IO () -stackCheckStdout args check = do - stackExe' <- stackExe - (ec, out, _) <- runEx' stackExe' args - if ec /= ExitSuccess - then error $ "Exited with exit code: " ++ show ec - else check out - -doesNotExist :: HasCallStack => FilePath -> IO () -doesNotExist fp = do - logInfo $ "doesNotExist " ++ fp - exists <- doesFileOrDirExist fp - case exists of - (Right msg) -> error msg - (Left _) -> return () - -doesExist :: HasCallStack => FilePath -> IO () -doesExist fp = do - logInfo $ "doesExist " ++ fp - exists <- doesFileOrDirExist fp - case exists of - (Right _) -> return () - (Left _) -> error "No file or directory exists" - -doesFileOrDirExist :: HasCallStack => FilePath -> IO (Either () String) -doesFileOrDirExist fp = do - isFile <- doesFileExist fp - if isFile - then return (Right ("File exists: " ++ fp)) - else do - isDir <- doesDirectoryExist fp - if isDir - then return (Right ("Directory exists: " ++ fp)) - else return (Left ()) - -copy :: HasCallStack => FilePath -> FilePath -> IO () -copy src dest = do - logInfo ("Copy " ++ show src ++ " to " ++ show dest) - System.Directory.copyFile src dest - -fileContentsMatch :: HasCallStack => FilePath -> FilePath -> IO () -fileContentsMatch f1 f2 = do - doesExist f1 - doesExist f2 - f1Contents <- readFile f1 - f2Contents <- readFile f2 - unless (f1Contents == f2Contents) $ - error ("contents do not match for " ++ show f1 ++ " " ++ show f2) - -logInfo :: String -> IO () -logInfo = hPutStrLn stderr - --- TODO: use stack's process running utilties? (better logging) --- for now just copy+modifying this one from System.Process.Log - --- | Show a process arg including speechmarks when necessary. Just for --- debugging purposes, not functionally important. -showProcessArgDebug :: String -> String -showProcessArgDebug x - | any special x = show x - | otherwise = x - where special '"' = True - special ' ' = True - special _ = False - --- | Extension of executables -exeExt :: String -exeExt = if isWindows then ".exe" else "" - --- | Is the OS Windows? -isWindows :: Bool -isWindows = os == "mingw32" - --- | Is the OS Alpine Linux? -getIsAlpine :: IO Bool -getIsAlpine = doesFileExist "/etc/alpine-release" - --- | Is the architecture ARM? -isARM :: Bool -isARM = arch == "arm" - --- | Is the OS Mac OS X? -isMacOSX :: Bool -isMacOSX = os == "darwin" - --- | To avoid problems with GHC version mismatch when a new LTS major --- version is released, pass this argument to @stack@ when running in --- a global context. The LTS major version here should match that of --- the main @stack.yaml@. --- -defaultResolverArg :: String -defaultResolverArg = "--resolver=lts-14.27" - --- | Remove a file and ignore any warnings about missing files. -removeFileIgnore :: HasCallStack => FilePath -> IO () -removeFileIgnore fp = removeFile fp `catch` \e -> - if isDoesNotExistError e - then return () - else throwIO e - --- | Remove a directory and ignore any warnings about missing files. -removeDirIgnore :: HasCallStack => FilePath -> IO () -removeDirIgnore fp = removeDirectoryRecursive fp `catch` \e -> - if isDoesNotExistError e - then return () - else throwIO e - --- | Changes working directory to Stack source directory -withSourceDirectory :: HasCallStack => IO () -> IO () -withSourceDirectory action = do - dir <- stackSrc - currentDirectory <- getCurrentDirectory - let enterDir = setCurrentDirectory dir - exitDir = setCurrentDirectory currentDirectory - bracket_ enterDir exitDir action - --- | Mark a test as superslow, only to be run when explicitly requested. -superslow :: HasCallStack => IO () -> IO () -superslow inner = do - mres <- lookupEnv "STACK_TEST_SPEED" - case mres of - Just "NORMAL" -> logInfo "Skipping superslow test" - Just "SUPERSLOW" -> do - logInfo "Running superslow test, hold on to your butts" - inner - Nothing -> do - logInfo "No STACK_TEST_SPEED specified. Executing superslow test, hold on to your butts" - inner - Just x -> error $ "Invalid value for STACK_TEST_SPEED env var: " ++ show x diff --git a/test/integration/tests/111-custom-snapshot/Main.hs b/test/integration/tests/111-custom-snapshot/Main.hs deleted file mode 100644 index e61b083aa4..0000000000 --- a/test/integration/tests/111-custom-snapshot/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stack ["build"] diff --git a/test/integration/tests/111-custom-snapshot/files/files.cabal b/test/integration/tests/111-custom-snapshot/files/files.cabal deleted file mode 100644 index 66525ce056..0000000000 --- a/test/integration/tests/111-custom-snapshot/files/files.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5, mtl - default-language: Haskell2010 diff --git a/test/integration/tests/111-custom-snapshot/files/my-snapshot.yaml b/test/integration/tests/111-custom-snapshot/files/my-snapshot.yaml deleted file mode 100644 index f365539a82..0000000000 --- a/test/integration/tests/111-custom-snapshot/files/my-snapshot.yaml +++ /dev/null @@ -1,4 +0,0 @@ -compiler: ghc-8.6.5 -name: my-snapshot -packages: -- mtl-2.2.1 diff --git a/test/integration/tests/111-custom-snapshot/files/src/Lib.hs b/test/integration/tests/111-custom-snapshot/files/src/Lib.hs deleted file mode 100644 index 4844564d1b..0000000000 --- a/test/integration/tests/111-custom-snapshot/files/src/Lib.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Lib - ( someFunc - ) where - -import Control.Monad.Reader () - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/111-custom-snapshot/files/stack.yaml b/test/integration/tests/111-custom-snapshot/files/stack.yaml deleted file mode 100644 index 6ae11d4e7a..0000000000 --- a/test/integration/tests/111-custom-snapshot/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: my-snapshot.yaml diff --git a/test/integration/tests/1198-multiple-exes-with-same-name/Main.hs b/test/integration/tests/1198-multiple-exes-with-same-name/Main.hs deleted file mode 100644 index 4e8344263d..0000000000 --- a/test/integration/tests/1198-multiple-exes-with-same-name/Main.hs +++ /dev/null @@ -1,32 +0,0 @@ -import Control.Monad (unless,when) -import Data.List (isInfixOf) -import StackTest - -main :: IO () -main = do - stack [defaultResolverArg, "clean"] - stack [defaultResolverArg, "init", "--force"] - stackCheckStderr - ["build", "also-has-exe-foo", "has-exe-foo"] - (expectMessage buildMessage1) - stackCheckStderr - ["build", "has-exe-foo-too"] - (expectMessage buildMessage2) - -expectMessage :: String -> String -> IO () -expectMessage msg stderr = - unless (msg `isInfixOf` stderr) - (error $ "Expected a warning: \n" ++ show msg) - -buildMessage1 = - unlines - [ "Building several executables with the same name: 'also-has-exe-foo:foo', 'has-exe-foo:foo'." - , "Only one of them will be available via 'stack exec' or locally installed." - , "Other executables with the same name might be overwritten: 'has-exe-foo-too:foo'." - ] - -buildMessage2 = - unlines - [ "Building executable 'has-exe-foo-too:foo'." - , "Other executables with the same name might be overwritten: 'also-has-exe-foo:foo', 'has-exe-foo:foo'." - ] diff --git a/test/integration/tests/1198-multiple-exes-with-same-name/files/also-has-exe-foo/also-has-exe-foo.cabal b/test/integration/tests/1198-multiple-exes-with-same-name/files/also-has-exe-foo/also-has-exe-foo.cabal deleted file mode 100644 index 97d9c1fd90..0000000000 --- a/test/integration/tests/1198-multiple-exes-with-same-name/files/also-has-exe-foo/also-has-exe-foo.cabal +++ /dev/null @@ -1,11 +0,0 @@ -name: also-has-exe-foo -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -executable foo - hs-source-dirs: app - main-is: Main.hs - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/1198-multiple-exes-with-same-name/files/also-has-exe-foo/app/Main.hs b/test/integration/tests/1198-multiple-exes-with-same-name/files/also-has-exe-foo/app/Main.hs deleted file mode 100644 index ed0f47faf7..0000000000 --- a/test/integration/tests/1198-multiple-exes-with-same-name/files/also-has-exe-foo/app/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Main where - -main :: IO () -main = putStrLn "This is foo from also-has-exe-foo" diff --git a/test/integration/tests/1198-multiple-exes-with-same-name/files/app/Main.hs b/test/integration/tests/1198-multiple-exes-with-same-name/files/app/Main.hs deleted file mode 100644 index d62e2f2117..0000000000 --- a/test/integration/tests/1198-multiple-exes-with-same-name/files/app/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Main where - -main :: IO () -main = putStrLn "This is foo from has-exe-foo" diff --git a/test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo-too/app/Main.hs b/test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo-too/app/Main.hs deleted file mode 100644 index 90ac2ae946..0000000000 --- a/test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo-too/app/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Main where - -main :: IO () -main = putStrLn "This is foo from has-exe-foo-too" diff --git a/test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo-too/has-exe-foo-too.cabal b/test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo-too/has-exe-foo-too.cabal deleted file mode 100644 index 05db41b4b7..0000000000 --- a/test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo-too/has-exe-foo-too.cabal +++ /dev/null @@ -1,11 +0,0 @@ -name: has-exe-foo-too -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -executable foo - hs-source-dirs: app - main-is: Main.hs - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo.cabal b/test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo.cabal deleted file mode 100644 index 19b222b3a1..0000000000 --- a/test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo.cabal +++ /dev/null @@ -1,11 +0,0 @@ -name: has-exe-foo -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -executable foo - hs-source-dirs: app - main-is: Main.hs - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/1265-extensible-snapshots/Main.hs b/test/integration/tests/1265-extensible-snapshots/Main.hs deleted file mode 100644 index 606ea188a7..0000000000 --- a/test/integration/tests/1265-extensible-snapshots/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["build", "async"] - stackErr ["build", "zlib-bindings"] - stack ["build", "--stack-yaml", "stack-modify-lts.yaml", "async"] - stack ["build", "--stack-yaml", "stack-local-snapshot.yaml", "async"] - stack ["build", "--stack-yaml", "stack-remote-snapshot.yaml", "async"] - stackErr ["build", "--stack-yaml", "stack-modify-lts.yaml", "zlib-bindings"] diff --git a/test/integration/tests/1265-extensible-snapshots/files/snapshot-2.yaml b/test/integration/tests/1265-extensible-snapshots/files/snapshot-2.yaml deleted file mode 100644 index f3423ae07d..0000000000 --- a/test/integration/tests/1265-extensible-snapshots/files/snapshot-2.yaml +++ /dev/null @@ -1,9 +0,0 @@ -resolver: ghc-8.6.5 -name: test-snapshot-2 -packages: -- stm-2.5.0.0 -- async-2.1.0 -- zlib-0.6.2.1@sha256:201b3f7d4edd0a6a0b235105a4d1aa8d30dcb7c8b4043102487407a4753657ef,4508 -# FIXME: test these here -flags: {} -ghc-options: {} diff --git a/test/integration/tests/1265-extensible-snapshots/files/snapshot-modify-lts.yaml b/test/integration/tests/1265-extensible-snapshots/files/snapshot-modify-lts.yaml deleted file mode 100644 index 002d43c0a0..0000000000 --- a/test/integration/tests/1265-extensible-snapshots/files/snapshot-modify-lts.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: lts-14.27 -name: snapshot-modify-lts -drop-packages: -- zlib diff --git a/test/integration/tests/1265-extensible-snapshots/files/snapshot.yaml b/test/integration/tests/1265-extensible-snapshots/files/snapshot.yaml deleted file mode 100644 index fb53016c66..0000000000 --- a/test/integration/tests/1265-extensible-snapshots/files/snapshot.yaml +++ /dev/null @@ -1,6 +0,0 @@ -resolver: snapshot-2.yaml -name: test-snapshot -packages: -- microlens-0.4.3.0 -drop-packages: -- zlib diff --git a/test/integration/tests/1265-extensible-snapshots/files/snapshots/local-snapshot.yaml b/test/integration/tests/1265-extensible-snapshots/files/snapshots/local-snapshot.yaml deleted file mode 100644 index 441d9062db..0000000000 --- a/test/integration/tests/1265-extensible-snapshots/files/snapshots/local-snapshot.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -resolver: lts-14.27 -name: local-snapshot -packages: -- archive: package-0.1.2.3.tar.gz diff --git a/test/integration/tests/1265-extensible-snapshots/files/snapshots/package-0.1.2.3.tar.gz b/test/integration/tests/1265-extensible-snapshots/files/snapshots/package-0.1.2.3.tar.gz deleted file mode 100644 index a28a03742b..0000000000 Binary files a/test/integration/tests/1265-extensible-snapshots/files/snapshots/package-0.1.2.3.tar.gz and /dev/null differ diff --git a/test/integration/tests/1265-extensible-snapshots/files/snapshots/remote-snapshot.yaml b/test/integration/tests/1265-extensible-snapshots/files/snapshots/remote-snapshot.yaml deleted file mode 100644 index a3f5c11b78..0000000000 --- a/test/integration/tests/1265-extensible-snapshots/files/snapshots/remote-snapshot.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -resolver: lts-14.27 -name: remote-snapshot -packages: -- archive: https://s3.amazonaws.com/hackage.fpcomplete.com/package/ghc-prim-0.5.3.tar.gz diff --git a/test/integration/tests/1265-extensible-snapshots/files/stack-local-snapshot.yaml b/test/integration/tests/1265-extensible-snapshots/files/stack-local-snapshot.yaml deleted file mode 100644 index 067e75d93f..0000000000 --- a/test/integration/tests/1265-extensible-snapshots/files/stack-local-snapshot.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: snapshots/local-snapshot.yaml -packages: [] -allow-newer: true diff --git a/test/integration/tests/1265-extensible-snapshots/files/stack-modify-lts.yaml b/test/integration/tests/1265-extensible-snapshots/files/stack-modify-lts.yaml deleted file mode 100644 index cf6706ac8a..0000000000 --- a/test/integration/tests/1265-extensible-snapshots/files/stack-modify-lts.yaml +++ /dev/null @@ -1,2 +0,0 @@ -resolver: snapshot-modify-lts.yaml -packages: [] diff --git a/test/integration/tests/1265-extensible-snapshots/files/stack-remote-snapshot.yaml b/test/integration/tests/1265-extensible-snapshots/files/stack-remote-snapshot.yaml deleted file mode 100644 index 1829e19131..0000000000 --- a/test/integration/tests/1265-extensible-snapshots/files/stack-remote-snapshot.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: snapshots/remote-snapshot.yaml -packages: [] -allow-newer: true diff --git a/test/integration/tests/1265-extensible-snapshots/files/stack.yaml b/test/integration/tests/1265-extensible-snapshots/files/stack.yaml deleted file mode 100644 index a6b61436c4..0000000000 --- a/test/integration/tests/1265-extensible-snapshots/files/stack.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: snapshot.yaml -packages: [] -allow-newer: true diff --git a/test/integration/tests/1336-1337-new-package-names/Main.hs b/test/integration/tests/1336-1337-new-package-names/Main.hs deleted file mode 100644 index 603fdc4341..0000000000 --- a/test/integration/tests/1336-1337-new-package-names/Main.hs +++ /dev/null @@ -1,31 +0,0 @@ -import StackTest -import Control.Monad -import System.Directory -import System.FilePath - -{-# ANN module "HLint: ignore Use unless" #-} -main :: IO () -main = - if isWindows - then logInfo "Disabled on Windows (see https://github.com/commercialhaskell/stack/issues/1337#issuecomment-166118678)" - else do - safeNew "1234a-4b-b4-abc-12b34" - doesExist "./1234a-4b-b4-abc-12b34/stack.yaml" - stackErr ["new", "1234-abc"] - doesNotExist "./1234-abc/stack.yaml" - doesNotExist "./1234-abc" - stackErr ["new", "1-abc"] - stackErr ["new", "44444444444444"] - stackErr ["new", "abc-1"] - stackErr ["new", "444-ば日本-4本"] - unless isMacOSX $ safeNew "ば日本-4本" - safeNew "אבהץש" - safeNew "ΔΘΩϬ" - doesExist "./ΔΘΩϬ/stack.yaml" - doesExist "./ΔΘΩϬ/ΔΘΩϬ.cabal" - -safeNew :: String -> IO () -safeNew name = do - exists <- doesDirectoryExist name - when exists $ removeDirectoryRecursive name - stack ["new", name] diff --git a/test/integration/tests/1337-unicode-everywhere/Main.hs b/test/integration/tests/1337-unicode-everywhere/Main.hs deleted file mode 100644 index 2b34396fbd..0000000000 --- a/test/integration/tests/1337-unicode-everywhere/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest - -{-# ANN module "HLint: ignore Use unless" #-} -main :: IO () -main = - if isWindows - then logInfo "Disabled on Windows (see https://github.com/commercialhaskell/stack/issues/1337#issuecomment-166118678)" - else do - stack ["build"] - stack ["exec", "以-exe"] diff --git a/test/integration/tests/1337-unicode-everywhere/files/app/Main.hs b/test/integration/tests/1337-unicode-everywhere/files/app/Main.hs deleted file mode 100644 index 0bb0cef9be..0000000000 --- a/test/integration/tests/1337-unicode-everywhere/files/app/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Main where - -import Пσε - -main :: IO () -main = putStrLn θυπε diff --git a/test/integration/tests/1337-unicode-everywhere/files/stack.yaml b/test/integration/tests/1337-unicode-everywhere/files/stack.yaml deleted file mode 100644 index 6716730dfc..0000000000 --- a/test/integration/tests/1337-unicode-everywhere/files/stack.yaml +++ /dev/null @@ -1,6 +0,0 @@ -resolver: lts-14.27 -packages: -- '.' -extra-deps: [] -flags: {} -extra-package-dbs: [] diff --git "a/test/integration/tests/1337-unicode-everywhere/files/\327\220\327\250\327\245/\320\237\317\203\316\265.hs" "b/test/integration/tests/1337-unicode-everywhere/files/\327\220\327\250\327\245/\320\237\317\203\316\265.hs" deleted file mode 100644 index c3df827706..0000000000 --- "a/test/integration/tests/1337-unicode-everywhere/files/\327\220\327\250\327\245/\320\237\317\203\316\265.hs" +++ /dev/null @@ -1,6 +0,0 @@ -module Пσε - ( θυπε - ) where - -θυπε :: String -θυπε = "以呂波耳本部止" diff --git "a/test/integration/tests/1337-unicode-everywhere/files/\343\201\204\343\202\215\343\201\257-LICENSE" "b/test/integration/tests/1337-unicode-everywhere/files/\343\201\204\343\202\215\343\201\257-LICENSE" deleted file mode 100644 index ce91414e00..0000000000 --- "a/test/integration/tests/1337-unicode-everywhere/files/\343\201\204\343\202\215\343\201\257-LICENSE" +++ /dev/null @@ -1 +0,0 @@ -Whatever license. This is a project that only exists for integration testing and all rights are granted to it to whoever wants to do anything with it. It's mostly automatically generated content anyway. diff --git "a/test/integration/tests/1337-unicode-everywhere/files/\344\273\245.cabal" "b/test/integration/tests/1337-unicode-everywhere/files/\344\273\245.cabal" deleted file mode 100644 index 8c683dac94..0000000000 --- "a/test/integration/tests/1337-unicode-everywhere/files/\344\273\245.cabal" +++ /dev/null @@ -1,72 +0,0 @@ -name: 以 -version: 0.1.0.0 -synopsis: سقوط المدن من ذات. -description: - 以呂波耳本部止 - 千利奴流乎和加 - 餘多連曽津祢那 - 良牟有為能於久 - 耶万計不己衣天 - 阿佐伎喩女美之 - 恵比毛勢須 - - いろはにほへと - ちりぬるを - わかよたれそ - つねならむ - うゐのおくやま - けふこえて - あさきゆめみし - ゑひもせす - - 永 - - The quick brown fox jumps over the lazy dog - - Victor jagt zwölf Boxkämpfer quer über den großen Sylter Deich - - Δάματρα μέλπω Κόραν τε Κλυμένοι᾽ ἄλοχον - μελιβόαν ὕμνον ἀναγνέων - Αἰολίδ᾽ ἂμ βαρύβρομον ἁρμονίαν - - Ед эож алььтэрюм витюпэраторебуз, фалля пожйдонёюм нэ квуй. Зюаз атоморюм эю вэл, экз агам магна жкряпшэрит нам. Примич вокынт дэлььякатезшимя эа мэль, ыам факэтэ пытынтёюм волуптатум ку. Квуым квюаэчтио йн пэр, дольор форынчйбюж ут еюж. Эжт нонюмэш янвыняры эю. - - Υθ φιμ λιβερ δισερετ κυαεστιο. Νε δυο σονγυε φιθυπερατοριβυς, θε φις αθκυι σενσεριτ δεφινιεβας, μολλις θαμκυαμ ηας εα. Ιν φιμ εραντ μυσιυς, αλιι δισαντ σομμοδο νο συμ. Πρι αμετ πορρω σονσεκυυντυρ ατ. Ιδ σεα ηομερω αδιπισι, ομνεσκυε επισυρει ετ μελ, σεα αφφερθ σωνσεκυαθ θε. - - नीचे खरिदे समस्याओ व्रुद्धि सुना शीघ्र व्याख्या निरपेक्ष शुरुआत असरकारक अविरोधता खरिदे मेमत उसीएक् असरकारक आंतरकार्यक्षमता केवल करता। असक्षम सामूहिक विवरण हीकम सुनत सदस्य खरिदने उदेशीत - - 引全堀記物質行上初野年謝止質警細物競。委目態政業諸好岡積米真香冒班分団時大一夏。帝同手怠問来視旧記次禁身妨性直権員。州駅都稿頂風著報計個勢意時言進整。作敗約秘都並断旬検面事真区。則局世紙文般百校車社金名室権金練危。水夫動間始旅円典中所場針仁暮中。書稿準提選別推五玲常能船岡味。詩聞誌寧真身性業遠国無軽春臨個鳥相契断者。 - - 紙ウ主転ね加必キト聴4水そない岩者づど会前ラシ総過権ぜ聞快にクげそ庫辺はぴず意掲ひに真価トしざが。体ろ名録ふいに公問築ムミ数基ー原多ぞぎすど済進ハレ見況ヤラエロ館刊キイ構読増禎ヨモメ提信最フ済席ワモキル弘28思弾5健はーろ提館ぱっるけ社除左てがひ。全てれ理時渉ン勝境チ測試ヌエマラ年9億ち込全クテヒカ土止びぎこぐ署進アカモヌ同賞誕傾吹敏こ。 - - سقوط المدن من ذات. ٠٨٠٤ تعديل إبّان عن هذه, لمّ في هامش الدمج. فرنسية الجنوب ولكسمبورغ حتى أم, مع بحث لكون الشمل استطاعوا, فعل بلاده انتهت تم. كما مع واحدة الخاسر, أخذ وإعلان الشمال رجوعهم أن. تعد تم بشكل مشارف الشرقي, هو مئات مهمّات أخذ. - - בקר את הארץ מיתולוגיה. ובמתן העזרה דת מלא, שמו זקוק לחיבור אגרונומיה על. אל זכר התוכן מונחונים ארכיאולוגיה, ויש קצרמרים פסיכולוגיה אל. כתב אם בארגז אחרונים וכמקובל. ספינות ומדעים האטמוספירה ויש מה, צ'ט משחקים הקהילה ארכיאולוגיה על. אל כימיה המדינה סדר, זכר של שמות להפוך, ייִדיש לעריכת דת ויש. - - 😀😁😂😃😄😅😆😇😈😉😊😋😌😍😎😏 - ➀➁➂➃➄➅➆➇➈➉ - ♀♁♂♃♄♅♆♇♈♉♊♋♌♍♎♏ - 💩 -homepage: http://invalid/以#readme -license: BSD3 -license-file: いろは-LICENSE -author: Fake 💩💩💩💩💩 Name -maintainer: 以呂波耳本部止@invalid -copyright: BSD3 -category: Δσαντ -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: ארץ - exposed-modules: Пσε - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - -executable 以-exe - hs-source-dirs: app - main-is: Main.hs - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: base - , 以 - default-language: Haskell2010 diff --git a/test/integration/tests/1438-configure-options/Main.hs b/test/integration/tests/1438-configure-options/Main.hs deleted file mode 100644 index 5e98e1d8ef..0000000000 --- a/test/integration/tests/1438-configure-options/Main.hs +++ /dev/null @@ -1,18 +0,0 @@ -import StackTest -import Control.Monad (unless) -import Data.Foldable (for_) -import Data.List (isInfixOf) - -main :: IO () -main = do - stackCleanFull - let stackYamlFiles = words "stack-locals.yaml stack-everything.yaml stack-targets.yaml stack-name.yaml" - for_ stackYamlFiles $ \stackYaml -> - stackErrStderr ["build", "--stack-yaml", stackYaml] $ \str -> - unless ("this is an invalid option" `isInfixOf` str) $ - error "Configure option is not present" - - stack ["build", "--stack-yaml", "stack-locals.yaml", "acme-dont"] - stack ["build", "--stack-yaml", "stack-targets.yaml", "acme-dont"] - stackErr ["build", "--stack-yaml", "stack-name.yaml", "acme-dont"] - stackErr ["build", "--stack-yaml", "stack-everything.yaml", "acme-dont"] diff --git a/test/integration/tests/1438-configure-options/files/.gitignore b/test/integration/tests/1438-configure-options/files/.gitignore deleted file mode 100644 index e9c64431ea..0000000000 --- a/test/integration/tests/1438-configure-options/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -name.cabal diff --git a/test/integration/tests/1438-configure-options/files/package.yaml b/test/integration/tests/1438-configure-options/files/package.yaml deleted file mode 100644 index 13ccbf73ec..0000000000 --- a/test/integration/tests/1438-configure-options/files/package.yaml +++ /dev/null @@ -1,5 +0,0 @@ -name: name -version: 0 - -dependencies: base -library: {} diff --git a/test/integration/tests/1438-configure-options/files/stack-name.yaml b/test/integration/tests/1438-configure-options/files/stack-name.yaml deleted file mode 100644 index ae947d8985..0000000000 --- a/test/integration/tests/1438-configure-options/files/stack-name.yaml +++ /dev/null @@ -1,10 +0,0 @@ -resolver: ghc-8.6.5 - -extra-deps: -- acme-dont-1.1@rev:0 - -configure-options: - name: - - this is an invalid option - acme-dont: - - this is an invalid option diff --git a/test/integration/tests/1659-skip-component/Main.hs b/test/integration/tests/1659-skip-component/Main.hs deleted file mode 100644 index 47ff86f83e..0000000000 --- a/test/integration/tests/1659-skip-component/Main.hs +++ /dev/null @@ -1,16 +0,0 @@ -import StackTest - -import Control.Exception (bracket) -import qualified Data.ByteString as S - -main :: IO () -main = do - -- we need to build all the executables first to be able to skip them later (see issue #3229) - stack ["build"] - bracket - (S.readFile "app/MainFail.hs") - (S.writeFile "app/MainFail.hs") - (const $ do - writeFile "app/MainFail.hs" "bdsf" - stack ["build", "--test", "--bench", "--skip", "failing-test", "--skip", "failing-bench", "--skip", "failing-exe"] - stack ["build", ":failing-test", ":failing-bench", ":exe", ":failing-exe", "--skip", "failing-test", "--skip", "failing-bench", "--skip", "failing-exe"]) diff --git a/test/integration/tests/1659-skip-component/files/README.md b/test/integration/tests/1659-skip-component/files/README.md deleted file mode 100644 index 560a646766..0000000000 --- a/test/integration/tests/1659-skip-component/files/README.md +++ /dev/null @@ -1 +0,0 @@ -# files diff --git a/test/integration/tests/1659-skip-component/files/app/Main.hs b/test/integration/tests/1659-skip-component/files/app/Main.hs deleted file mode 100644 index d82a4bd93b..0000000000 --- a/test/integration/tests/1659-skip-component/files/app/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Main where - -main :: IO () -main = return () diff --git a/test/integration/tests/1659-skip-component/files/app/MainFail.hs b/test/integration/tests/1659-skip-component/files/app/MainFail.hs deleted file mode 100644 index d82a4bd93b..0000000000 --- a/test/integration/tests/1659-skip-component/files/app/MainFail.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Main where - -main :: IO () -main = return () diff --git a/test/integration/tests/1659-skip-component/files/bench/Bench.hs b/test/integration/tests/1659-skip-component/files/bench/Bench.hs deleted file mode 100644 index d3f940c1ef..0000000000 --- a/test/integration/tests/1659-skip-component/files/bench/Bench.hs +++ /dev/null @@ -1,2 +0,0 @@ -main :: IO () -main = asdf diff --git a/test/integration/tests/1659-skip-component/files/skip-component.cabal b/test/integration/tests/1659-skip-component/files/skip-component.cabal deleted file mode 100644 index 2b4def57e6..0000000000 --- a/test/integration/tests/1659-skip-component/files/skip-component.cabal +++ /dev/null @@ -1,44 +0,0 @@ -name: skip-component -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - -executable exe - hs-source-dirs: app - main-is: Main.hs - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: base - default-language: Haskell2010 - -executable failing-exe - hs-source-dirs: app - main-is: MainFail.hs - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: base - default-language: Haskell2010 - -test-suite failing-test - type: exitcode-stdio-1.0 - hs-source-dirs: test - main-is: Spec.hs - build-depends: base - ghc-options: -threaded -rtsopts -with-rtsopts=-N - default-language: Haskell2010 - -benchmark failing-bench - type: exitcode-stdio-1.0 - hs-source-dirs: bench - main-is: Bench.hs - build-depends: base - ghc-options: -threaded -rtsopts -with-rtsopts=-N - default-language: Haskell2010 - -source-repository head - type: git - location: https://github.com/githubuser/files diff --git a/test/integration/tests/1659-skip-component/files/stack.yaml b/test/integration/tests/1659-skip-component/files/stack.yaml deleted file mode 100644 index 6716730dfc..0000000000 --- a/test/integration/tests/1659-skip-component/files/stack.yaml +++ /dev/null @@ -1,6 +0,0 @@ -resolver: lts-14.27 -packages: -- '.' -extra-deps: [] -flags: {} -extra-package-dbs: [] diff --git a/test/integration/tests/1659-skip-component/files/test/Spec.hs b/test/integration/tests/1659-skip-component/files/test/Spec.hs deleted file mode 100644 index d3f940c1ef..0000000000 --- a/test/integration/tests/1659-skip-component/files/test/Spec.hs +++ /dev/null @@ -1,2 +0,0 @@ -main :: IO () -main = asdf diff --git a/test/integration/tests/1884-url-to-tarball/Main.hs b/test/integration/tests/1884-url-to-tarball/Main.hs deleted file mode 100644 index c93c2e734d..0000000000 --- a/test/integration/tests/1884-url-to-tarball/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stackCleanFull - stack ["build", "--dry-run"] diff --git a/test/integration/tests/1884-url-to-tarball/files/.gitignore b/test/integration/tests/1884-url-to-tarball/files/.gitignore deleted file mode 100644 index d43d807c0d..0000000000 --- a/test/integration/tests/1884-url-to-tarball/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.cabal diff --git a/test/integration/tests/1884-url-to-tarball/files/package.yaml b/test/integration/tests/1884-url-to-tarball/files/package.yaml deleted file mode 100644 index 2ab76b4002..0000000000 --- a/test/integration/tests/1884-url-to-tarball/files/package.yaml +++ /dev/null @@ -1,5 +0,0 @@ -name: foo -dependencies: -- base -- half -library: {} diff --git a/test/integration/tests/1884-url-to-tarball/files/stack.yaml b/test/integration/tests/1884-url-to-tarball/files/stack.yaml deleted file mode 100644 index a9fe2cb1d4..0000000000 --- a/test/integration/tests/1884-url-to-tarball/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -extra-deps: -- location: https://hackage.haskell.org/package/half-0.2.2.3/half-0.2.2.3.tar.gz - sha256: 85c244c80d1c889a3d79073a6f5a99d9e769dbe3c574ca11d992b2b4f7599a5c - size: 6050 -resolver: lts-14.27 diff --git a/test/integration/tests/2195-depend-on-exe/Main.hs b/test/integration/tests/2195-depend-on-exe/Main.hs deleted file mode 100644 index 11f81255a9..0000000000 --- a/test/integration/tests/2195-depend-on-exe/Main.hs +++ /dev/null @@ -1,11 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest - -main :: IO () -main = stackErrStderr ["build"] (expectMessage "package provides no library") - -expectMessage :: String -> String -> IO () -expectMessage msg stderr = - unless (msg `isInfixOf` stderr) - (error $ "Expected a warning: \n" ++ show msg) diff --git a/test/integration/tests/2195-depend-on-exe/files/files.cabal b/test/integration/tests/2195-depend-on-exe/files/files.cabal deleted file mode 100644 index 5edf065804..0000000000 --- a/test/integration/tests/2195-depend-on-exe/files/files.cabal +++ /dev/null @@ -1,7 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: happy diff --git a/test/integration/tests/2195-depend-on-exe/files/stack.yaml b/test/integration/tests/2195-depend-on-exe/files/stack.yaml deleted file mode 100644 index 785b1469c7..0000000000 --- a/test/integration/tests/2195-depend-on-exe/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-14.27 diff --git a/test/integration/tests/2433-ghc-by-version/Main.hs b/test/integration/tests/2433-ghc-by-version/Main.hs deleted file mode 100644 index 271c259449..0000000000 --- a/test/integration/tests/2433-ghc-by-version/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -import System.Process (rawSystem) -import Control.Exception (throwIO) -import StackTest -import Control.Monad (unless) - -main :: IO () -main = unless isWindows $ rawSystem "bash" ["run.sh"] >>= throwIO diff --git a/test/integration/tests/2433-ghc-by-version/files/run.sh b/test/integration/tests/2433-ghc-by-version/files/run.sh deleted file mode 100755 index b217e13e39..0000000000 --- a/test/integration/tests/2433-ghc-by-version/files/run.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -set -exuo pipefail - -export PATH=$(pwd)/fake-path:$("$STACK_EXE" path --resolver ghc-8.6.5 --compiler-bin):$PATH -export STACK_ROOT=$(pwd)/fake-root - -which ghc - -"$STACK_EXE" --system-ghc --no-install-ghc --resolver ghc-8.6.5 ghc -- --info -"$STACK_EXE" --system-ghc --no-install-ghc --resolver ghc-8.6.5 runghc foo.hs diff --git a/test/integration/tests/2465-init-no-packages/Main.hs b/test/integration/tests/2465-init-no-packages/Main.hs deleted file mode 100644 index 0fae42bd2d..0000000000 --- a/test/integration/tests/2465-init-no-packages/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest -import System.Directory -import Control.Monad (unless) - -main :: IO () -main = do - removeFileIgnore "stack.yaml" - stack ["init", "--resolver", "ghc-8.6.5"] - exists <- doesFileExist "stack.yaml" - unless exists $ error "stack.yaml not created!" diff --git a/test/integration/tests/2465-init-no-packages/files/.gitignore b/test/integration/tests/2465-init-no-packages/files/.gitignore deleted file mode 100644 index 684dbffa96..0000000000 --- a/test/integration/tests/2465-init-no-packages/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -stack.yaml diff --git a/test/integration/tests/2643-copy-compiler-tool/Main.hs b/test/integration/tests/2643-copy-compiler-tool/Main.hs deleted file mode 100644 index 085311d230..0000000000 --- a/test/integration/tests/2643-copy-compiler-tool/Main.hs +++ /dev/null @@ -1,61 +0,0 @@ -import StackTest -import System.Directory -import Control.Monad (unless) - -main :: IO () -main = do - -- init - removeFileIgnore "stack.yaml" - removeDirIgnore ".stack-work" - stack ["init", defaultResolverArg] - - -- place to throw some exes - removeDirIgnore "binny" - createDirectory "binny" - - -- check assumptions on exec and the build flags and clean - stack ["build", "--flag", "copy-compiler-tool-test:build-baz"] - stack ["exec", "--", "baz-exe" ++ exeExt] - stackErr ["exec", "--", "bar-exe" ++ exeExt] - stackCleanFull - -- See #4936 for details regarding the windows condition - unless isWindows $ stackErr ["exec", "--", "baz-exe" ++ exeExt] - - -- install one exe normally - stack ["install", - "--local-bin-path", "./binny", - "--flag", "*:build-foo" - ] - - -- and install two compiler-tools, opposite ways - -- (build or install) - stack ["build", - "--local-bin-path", "./binny", - "--copy-compiler-tool", - "--flag", "*:build-bar" - ] - stack ["install", - "--local-bin-path", "./binny", - "--copy-compiler-tool", - "--flag", "*:build-baz" - ] - - -- nuke the built things that go in .stack-work/, so we can test if - -- the installed ones exist for sure - stackCleanFull - - -- bar and baz were installed as compiler tools, should work fine - stack ["exec", "--", "bar-exe" ++ exeExt] - stack ["exec", "--", "baz-exe" ++ exeExt] - - -- foo was installed as a normal exe (in .binny/, which can't be on PATH), - -- so shouldn't - stackErr ["exec", "--", "foo-exe" ++ exeExt] - - -- check existences make sense - doesExist $ "./binny/foo-exe" ++ exeExt - doesNotExist $ "./binny/bar-exe" ++ exeExt - doesNotExist $ "./binny/baz-exe" ++ exeExt - - -- just check that this exists - stack ["path", "--compiler-tools-bin"] diff --git a/test/integration/tests/2643-copy-compiler-tool/files/Setup.hs b/test/integration/tests/2643-copy-compiler-tool/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/2643-copy-compiler-tool/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/2781-shadow-bug/Main.hs b/test/integration/tests/2781-shadow-bug/Main.hs deleted file mode 100644 index 057606262e..0000000000 --- a/test/integration/tests/2781-shadow-bug/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest -import System.Directory - -main :: IO () -main = do - createDirectoryIfMissing True "foo/src" - readFile "foo/v1/Foo.hs" >>= writeFile "foo/src/Foo.hs" - stack ["bench"] - readFile "foo/v2/Foo.hs" >>= writeFile "foo/src/Foo.hs" - stack ["bench"] diff --git a/test/integration/tests/2781-shadow-bug/files/.gitignore b/test/integration/tests/2781-shadow-bug/files/.gitignore deleted file mode 100644 index da86f0dbe2..0000000000 --- a/test/integration/tests/2781-shadow-bug/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -foo/src/ diff --git a/test/integration/tests/2781-shadow-bug/files/bar/bar.cabal b/test/integration/tests/2781-shadow-bug/files/bar/bar.cabal deleted file mode 100644 index 19985b85a0..0000000000 --- a/test/integration/tests/2781-shadow-bug/files/bar/bar.cabal +++ /dev/null @@ -1,18 +0,0 @@ -name: bar -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Bar - build-depends: base, foo - default-language: Haskell2010 - -benchmark bench - type: exitcode-stdio-1.0 - hs-source-dirs: bench - main-is: bench.hs - build-depends: base - , bar - default-language: Haskell2010 diff --git a/test/integration/tests/2781-shadow-bug/files/bar/bench/bench.hs b/test/integration/tests/2781-shadow-bug/files/bar/bench/bench.hs deleted file mode 100644 index 377b6b5516..0000000000 --- a/test/integration/tests/2781-shadow-bug/files/bar/bench/bench.hs +++ /dev/null @@ -1,2 +0,0 @@ -main :: IO () -main = return () diff --git a/test/integration/tests/2781-shadow-bug/files/bar/src/Bar.hs b/test/integration/tests/2781-shadow-bug/files/bar/src/Bar.hs deleted file mode 100644 index 3eef52eeed..0000000000 --- a/test/integration/tests/2781-shadow-bug/files/bar/src/Bar.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Bar - ( bar - ) where - -import Foo - -bar :: IO () -bar = foo diff --git a/test/integration/tests/2781-shadow-bug/files/foo/foo.cabal b/test/integration/tests/2781-shadow-bug/files/foo/foo.cabal deleted file mode 100644 index 4b9dc19942..0000000000 --- a/test/integration/tests/2781-shadow-bug/files/foo/foo.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: foo -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Foo - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/2781-shadow-bug/files/foo/v1/Foo.hs b/test/integration/tests/2781-shadow-bug/files/foo/v1/Foo.hs deleted file mode 100644 index 0783893feb..0000000000 --- a/test/integration/tests/2781-shadow-bug/files/foo/v1/Foo.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Foo - ( foo - ) where - -foo :: IO () -foo = putStrLn "foo1" diff --git a/test/integration/tests/2781-shadow-bug/files/foo/v2/Foo.hs b/test/integration/tests/2781-shadow-bug/files/foo/v2/Foo.hs deleted file mode 100644 index 819d884e1e..0000000000 --- a/test/integration/tests/2781-shadow-bug/files/foo/v2/Foo.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Foo - ( foo - ) where - -foo :: IO () -foo = putStrLn "foo2" diff --git a/test/integration/tests/2781-shadow-bug/files/stack.yaml b/test/integration/tests/2781-shadow-bug/files/stack.yaml deleted file mode 100644 index dc21490bd7..0000000000 --- a/test/integration/tests/2781-shadow-bug/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: ghc-8.6.5 -packages: -- foo -- bar diff --git a/test/integration/tests/2997-ensure-warnings-output/Main.hs b/test/integration/tests/2997-ensure-warnings-output/Main.hs deleted file mode 100644 index ff41be9abb..0000000000 --- a/test/integration/tests/2997-ensure-warnings-output/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest -import Data.List (isInfixOf) - -main :: IO () -main = do - stackCleanFull - stackCheckStderr ["build", "--terminal", "--color=always"] $ \str -> - if "no type signature" `isInfixOf` str - then pure () - else error "Warnings are not being shown" diff --git a/test/integration/tests/2997-ensure-warnings-output/files/.gitignore b/test/integration/tests/2997-ensure-warnings-output/files/.gitignore deleted file mode 100644 index d43d807c0d..0000000000 --- a/test/integration/tests/2997-ensure-warnings-output/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.cabal diff --git a/test/integration/tests/2997-ensure-warnings-output/files/bar/package.yaml b/test/integration/tests/2997-ensure-warnings-output/files/bar/package.yaml deleted file mode 100644 index 0c7f62c5c5..0000000000 --- a/test/integration/tests/2997-ensure-warnings-output/files/bar/package.yaml +++ /dev/null @@ -1,9 +0,0 @@ -name: bar -version: 0 - -dependencies: -- base - -library: - source-dirs: src - ghc-options: -Wall diff --git a/test/integration/tests/2997-ensure-warnings-output/files/bar/src/Bar.hs b/test/integration/tests/2997-ensure-warnings-output/files/bar/src/Bar.hs deleted file mode 100644 index 08a38856f5..0000000000 --- a/test/integration/tests/2997-ensure-warnings-output/files/bar/src/Bar.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Bar where - -bar = () diff --git a/test/integration/tests/2997-ensure-warnings-output/files/foo/package.yaml b/test/integration/tests/2997-ensure-warnings-output/files/foo/package.yaml deleted file mode 100644 index 0de19a6188..0000000000 --- a/test/integration/tests/2997-ensure-warnings-output/files/foo/package.yaml +++ /dev/null @@ -1,9 +0,0 @@ -name: foo -version: 0 - -dependencies: -- base - -library: - source-dirs: src - ghc-options: -Wall diff --git a/test/integration/tests/2997-ensure-warnings-output/files/foo/src/Foo.hs b/test/integration/tests/2997-ensure-warnings-output/files/foo/src/Foo.hs deleted file mode 100644 index 11391a4814..0000000000 --- a/test/integration/tests/2997-ensure-warnings-output/files/foo/src/Foo.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Foo where - -foo = () diff --git a/test/integration/tests/2997-ensure-warnings-output/files/stack.yaml b/test/integration/tests/2997-ensure-warnings-output/files/stack.yaml deleted file mode 100644 index c5aff2469c..0000000000 --- a/test/integration/tests/2997-ensure-warnings-output/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -resolver: ghc-8.6.5 -packages: -- foo -- bar -dump-logs: warning diff --git a/test/integration/tests/32-unlisted-module/Main.hs b/test/integration/tests/32-unlisted-module/Main.hs deleted file mode 100644 index 1559b530c7..0000000000 --- a/test/integration/tests/32-unlisted-module/Main.hs +++ /dev/null @@ -1,25 +0,0 @@ -import Control.Concurrent -import StackTest - -main :: IO () -main = do - copy "src/Unlisted_OK.hs" "src/Unlisted.hs" - copy "embed_OK.txt" "embed.txt" - stack ["build"] - pause - copy "src/Unlisted_FAIL.hs" "src/Unlisted.hs" - stackErr ["build"] - pause - copy "src/Unlisted_OK.hs" "src/Unlisted.hs" - stack ["build"] - stack ["exec", "files-exe"] - pause - copy "embed_FAIL.txt" "embed.txt" - stack ["build"] - stackErr ["exec", "files-exe"] - pause - copy "embed_OK.txt" "embed.txt" - stack ["build"] - stack ["exec", "files-exe"] - -pause = threadDelay 1000000 diff --git a/test/integration/tests/32-unlisted-module/files/.gitignore b/test/integration/tests/32-unlisted-module/files/.gitignore deleted file mode 100644 index 6c87a5ea83..0000000000 --- a/test/integration/tests/32-unlisted-module/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -embed.txt -src/Unlisted.hs diff --git a/test/integration/tests/32-unlisted-module/files/files.cabal b/test/integration/tests/32-unlisted-module/files/files.cabal deleted file mode 100644 index b789af591e..0000000000 --- a/test/integration/tests/32-unlisted-module/files/files.cabal +++ /dev/null @@ -1,17 +0,0 @@ -name: files -version: 0.1.0.0 -synopsis: Initial project template from stack -description: Please see README.md -homepage: http://github.com/githubuser/files#readme -license: BSD3 -build-type: Simple --- extra-source-files: -cabal-version: >=1.10 - -executable files-exe - hs-source-dirs: src/../src - main-is: Main.hs - build-depends: base >= 4.7 && < 5 - , bytestring - , file-embed - default-language: Haskell2010 diff --git a/test/integration/tests/32-unlisted-module/files/src/Main.hs b/test/integration/tests/32-unlisted-module/files/src/Main.hs deleted file mode 100644 index dde0892cc5..0000000000 --- a/test/integration/tests/32-unlisted-module/files/src/Main.hs +++ /dev/null @@ -1,16 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE OverloadedStrings #-} -module Main where - -import Control.Monad -import qualified Data.ByteString.Char8 as C8 -import Data.FileEmbed -import Data.List -import Unlisted - -main :: IO () -main = do - putStrLn ("main " ++ show foo ++ " " ++ show embedded) - when ("FAIL" `C8.isPrefixOf` embedded) $ error "embedded contains FAIL" - -embedded = $(embedFile "embed.txt") diff --git a/test/integration/tests/32-unlisted-module/files/src/Unlisted_FAIL.hs b/test/integration/tests/32-unlisted-module/files/src/Unlisted_FAIL.hs deleted file mode 100644 index b15130820b..0000000000 --- a/test/integration/tests/32-unlisted-module/files/src/Unlisted_FAIL.hs +++ /dev/null @@ -1,5 +0,0 @@ --- | Version of Unlisted with different export that causes failure to compile. -module Unlisted where - -fooRenamed :: String -fooRenamed = "foo" diff --git a/test/integration/tests/32-unlisted-module/files/src/Unlisted_OK.hs b/test/integration/tests/32-unlisted-module/files/src/Unlisted_OK.hs deleted file mode 100644 index 73caa00bcc..0000000000 --- a/test/integration/tests/32-unlisted-module/files/src/Unlisted_OK.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Unlisted where - -foo :: String -foo = "foo" diff --git a/test/integration/tests/32-unlisted-module/files/src/main/Main.hs b/test/integration/tests/32-unlisted-module/files/src/main/Main.hs deleted file mode 100644 index b768742deb..0000000000 --- a/test/integration/tests/32-unlisted-module/files/src/main/Main.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Main where - -main = do putStrLn "Hello, world." diff --git a/test/integration/tests/32-unlisted-module/files/stack.yaml b/test/integration/tests/32-unlisted-module/files/stack.yaml deleted file mode 100644 index 6b6b64cc34..0000000000 --- a/test/integration/tests/32-unlisted-module/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -flags: {} -packages: -- '.' -extra-deps: [] -resolver: lts-14.27 diff --git a/test/integration/tests/3229-exe-targets/Main.hs b/test/integration/tests/3229-exe-targets/Main.hs deleted file mode 100644 index 502030659d..0000000000 --- a/test/integration/tests/3229-exe-targets/Main.hs +++ /dev/null @@ -1,42 +0,0 @@ --- | Stack should build all executables once, and in subsequent --- invocations only build those executables requested by the program --- arguments. --- --- Issue: https://github.com/commercialhaskell/stack/issues/3229 - -module Main where - -import Control.Exception -import Control.Monad (unless, when) -import qualified Data.ByteString as S -import Data.List (isInfixOf) -import StackTest - -main :: IO () -main = do - removeDirIgnore ".stack-work" - removeFileIgnore "stack.yaml" - stack [defaultResolverArg, "init"] - stack ["build", ":alpha"] - bracket - (S.readFile alphaFile) - (S.writeFile alphaFile) - (const - (do appendFile alphaFile "\n--" - stackCheckStderr - ["build", ":alpha"] - (rejectMessage - (unlines - ["Preprocessing executable 'beta' for foo-0..."])))) - where - alphaFile = "app/Alpha.hs" - -expectMessage :: String -> String -> IO () -expectMessage msg stderr = - unless (msg `isInfixOf` stderr) - (error $ "Expected in output: \n" ++ show msg) - -rejectMessage :: String -> String -> IO () -rejectMessage msg stderr = - when (msg `isInfixOf` stderr) - (error $ "Did not expect message here: \n" ++ show msg) diff --git a/test/integration/tests/3229-exe-targets/files/.gitignore b/test/integration/tests/3229-exe-targets/files/.gitignore deleted file mode 100644 index 684dbffa96..0000000000 --- a/test/integration/tests/3229-exe-targets/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -stack.yaml diff --git a/test/integration/tests/3229-exe-targets/files/app/Alpha.hs b/test/integration/tests/3229-exe-targets/files/app/Alpha.hs deleted file mode 100644 index b3549c2fe3..0000000000 --- a/test/integration/tests/3229-exe-targets/files/app/Alpha.hs +++ /dev/null @@ -1 +0,0 @@ -main = return () diff --git a/test/integration/tests/3229-exe-targets/files/app/Beta.hs b/test/integration/tests/3229-exe-targets/files/app/Beta.hs deleted file mode 100644 index b3549c2fe3..0000000000 --- a/test/integration/tests/3229-exe-targets/files/app/Beta.hs +++ /dev/null @@ -1 +0,0 @@ -main = return () diff --git a/test/integration/tests/3229-exe-targets/files/foo.cabal b/test/integration/tests/3229-exe-targets/files/foo.cabal deleted file mode 100644 index 4a68648e6d..0000000000 --- a/test/integration/tests/3229-exe-targets/files/foo.cabal +++ /dev/null @@ -1,22 +0,0 @@ -name: foo -version: 0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Foo - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - -executable alpha - hs-source-dirs: app - main-is: Alpha.hs - build-depends: base, foo - default-language: Haskell2010 - -executable beta - hs-source-dirs: app - main-is: Beta.hs - build-depends: base, foo - default-language: Haskell2010 diff --git a/test/integration/tests/3229-exe-targets/files/src/Foo.hs b/test/integration/tests/3229-exe-targets/files/src/Foo.hs deleted file mode 100644 index efbf93bbde..0000000000 --- a/test/integration/tests/3229-exe-targets/files/src/Foo.hs +++ /dev/null @@ -1 +0,0 @@ -module Foo where diff --git a/test/integration/tests/3315-multi-ghc-options/Main.hs b/test/integration/tests/3315-multi-ghc-options/Main.hs deleted file mode 100644 index 1a0ea36fce..0000000000 --- a/test/integration/tests/3315-multi-ghc-options/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["build", "--ghc-options=-ddump-simpl -ddump-asm -DBAR -DBAZ"] - repl ["--ghc-options=-ddump-simpl -ddump-asm"] (pure ()) diff --git a/test/integration/tests/3315-multi-ghc-options/files/multi-ghc-options.cabal b/test/integration/tests/3315-multi-ghc-options/files/multi-ghc-options.cabal deleted file mode 100644 index 2f25278446..0000000000 --- a/test/integration/tests/3315-multi-ghc-options/files/multi-ghc-options.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: multi-ghc-options -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/3315-multi-ghc-options/files/src/Lib.hs b/test/integration/tests/3315-multi-ghc-options/files/src/Lib.hs deleted file mode 100644 index 58ced13fba..0000000000 --- a/test/integration/tests/3315-multi-ghc-options/files/src/Lib.hs +++ /dev/null @@ -1,11 +0,0 @@ -{-# LANGUAGE CPP #-} - -module Lib where - -#ifndef BAR -#error BAR isn't defined -#endif - -#ifndef BAZ -#error BAZ isn't defined -#endif diff --git a/test/integration/tests/3315-multi-ghc-options/files/stack.yaml b/test/integration/tests/3315-multi-ghc-options/files/stack.yaml deleted file mode 100644 index 3732cb9c9f..0000000000 --- a/test/integration/tests/3315-multi-ghc-options/files/stack.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: lts-14.27 -packages: -- '.' diff --git a/test/integration/tests/335-multi-package-flags/Main.hs b/test/integration/tests/335-multi-package-flags/Main.hs deleted file mode 100644 index e9ff7fa6ac..0000000000 --- a/test/integration/tests/335-multi-package-flags/Main.hs +++ /dev/null @@ -1,8 +0,0 @@ -import StackTest - -main :: IO () -main = do - stackErr ["build"] - stack ["build", "--flag", "new-template:necessary"] - stackErr ["build"] - stack ["build", "--flag", "*:necessary"] diff --git a/test/integration/tests/335-multi-package-flags/files/Setup.hs b/test/integration/tests/335-multi-package-flags/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/335-multi-package-flags/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/335-multi-package-flags/files/new-template.cabal b/test/integration/tests/335-multi-package-flags/files/new-template.cabal deleted file mode 100644 index 50876be0ce..0000000000 --- a/test/integration/tests/335-multi-package-flags/files/new-template.cabal +++ /dev/null @@ -1,17 +0,0 @@ -name: new-template -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -flag necessary - manual: True - default: False - description: Won't build without this flag on - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base - default-language: Haskell2010 - if flag(necessary) - cpp-options: -DWORK diff --git a/test/integration/tests/335-multi-package-flags/files/src/Lib.hs b/test/integration/tests/335-multi-package-flags/files/src/Lib.hs deleted file mode 100644 index 3f119fcab7..0000000000 --- a/test/integration/tests/335-multi-package-flags/files/src/Lib.hs +++ /dev/null @@ -1,11 +0,0 @@ -{-# LANGUAGE CPP #-} -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" - -#if !WORK -#error Not going to work, sorry -#endif diff --git a/test/integration/tests/335-multi-package-flags/files/stack.yaml b/test/integration/tests/335-multi-package-flags/files/stack.yaml deleted file mode 100644 index 6b6b64cc34..0000000000 --- a/test/integration/tests/335-multi-package-flags/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -flags: {} -packages: -- '.' -extra-deps: [] -resolver: lts-14.27 diff --git a/test/integration/tests/3390-unbuildable-test/Main.hs b/test/integration/tests/3390-unbuildable-test/Main.hs deleted file mode 100644 index fb222afc69..0000000000 --- a/test/integration/tests/3390-unbuildable-test/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stack ["test"] diff --git a/test/integration/tests/3390-unbuildable-test/files/files.cabal b/test/integration/tests/3390-unbuildable-test/files/files.cabal deleted file mode 100644 index 4909fb438a..0000000000 --- a/test/integration/tests/3390-unbuildable-test/files/files.cabal +++ /dev/null @@ -1,18 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base - default-language: Haskell2010 - -test-suite test - type: exitcode-stdio-1.0 - hs-source-dirs: test - main-is: Spec.hs - build-depends: base - default-language: Haskell2010 - buildable: False \ No newline at end of file diff --git a/test/integration/tests/3390-unbuildable-test/files/src/Lib.hs b/test/integration/tests/3390-unbuildable-test/files/src/Lib.hs deleted file mode 100644 index e1ee993010..0000000000 --- a/test/integration/tests/3390-unbuildable-test/files/src/Lib.hs +++ /dev/null @@ -1 +0,0 @@ -module Lib () where diff --git a/test/integration/tests/3390-unbuildable-test/files/stack.yaml b/test/integration/tests/3390-unbuildable-test/files/stack.yaml deleted file mode 100644 index 14f23e9aa6..0000000000 --- a/test/integration/tests/3390-unbuildable-test/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.6.5 diff --git a/test/integration/tests/3396-package-indices/Main.hs b/test/integration/tests/3396-package-indices/Main.hs deleted file mode 100644 index 1c6cc3ba8c..0000000000 --- a/test/integration/tests/3396-package-indices/Main.hs +++ /dev/null @@ -1,15 +0,0 @@ -import StackTest -import System.Directory (createDirectoryIfMissing) -import System.Environment (getEnv, setEnv) -import System.FilePath (()) - -main :: IO () -main = do - putStrLn "With pantry, non-Hackage Security indices are no longer supported, skipping test" - {- - home <- getEnv "HOME" - setEnv "STACK_ROOT" (home ".stack") -- Needed for Windows - createDirectoryIfMissing True (home ".stack" "indices" "CustomIndex") - copy "CustomIndex/01-index.tar" (home ".stack" "indices" "CustomIndex" "01-index.tar") - stack ["build"] - -} diff --git a/test/integration/tests/3396-package-indices/files/CustomIndex/01-index.tar b/test/integration/tests/3396-package-indices/files/CustomIndex/01-index.tar deleted file mode 100644 index 1388955d58..0000000000 Binary files a/test/integration/tests/3396-package-indices/files/CustomIndex/01-index.tar and /dev/null differ diff --git a/test/integration/tests/3396-package-indices/files/files.cabal b/test/integration/tests/3396-package-indices/files/files.cabal deleted file mode 100644 index 5f500a7dd5..0000000000 --- a/test/integration/tests/3396-package-indices/files/files.cabal +++ /dev/null @@ -1,19 +0,0 @@ -name: files -version: 0.1.0.0 --- synopsis: --- description: -homepage: https://github.com/githubuser/files#readme -license: BSD3 -author: Author name here -maintainer: example@example.com -copyright: 2000 Author name here -category: Web -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - diff --git a/test/integration/tests/3396-package-indices/files/my-snapshot.yaml b/test/integration/tests/3396-package-indices/files/my-snapshot.yaml deleted file mode 100644 index e296386e23..0000000000 --- a/test/integration/tests/3396-package-indices/files/my-snapshot.yaml +++ /dev/null @@ -1,11 +0,0 @@ -compiler: ghc-8.6.5 -name: my-snapshot - -packages: - - base-4.10.0.0 - - dinamo-0.1.0.0 - -flags: - dinamo: - debug: true - diff --git a/test/integration/tests/3396-package-indices/files/src/Lib.hs b/test/integration/tests/3396-package-indices/files/src/Lib.hs deleted file mode 100644 index f323c86f10..0000000000 --- a/test/integration/tests/3396-package-indices/files/src/Lib.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Lib - ( someFunc - ) where - - -someFunc :: IO () -someFunc = print "some func" - diff --git a/test/integration/tests/3396-package-indices/files/stack.yaml b/test/integration/tests/3396-package-indices/files/stack.yaml deleted file mode 100644 index 4b0fc7bcb4..0000000000 --- a/test/integration/tests/3396-package-indices/files/stack.yaml +++ /dev/null @@ -1,18 +0,0 @@ -resolver: my-snapshot.yaml - -packages: -- . - -extra-deps: [] - -# Override default flag values for local packages and extra-deps -flags: {} - -# Extra package databases containing global packages -extra-package-dbs: [] - -package-indices: - - name: CustomIndex - download-prefix: http://0.0.0.0:8080/ - http: http://0.0.0.0:8080/custom.index.tar.gz - diff --git a/test/integration/tests/3397-ghc-solver/Main.hs b/test/integration/tests/3397-ghc-solver/Main.hs deleted file mode 100644 index 137c3db9a2..0000000000 --- a/test/integration/tests/3397-ghc-solver/Main.hs +++ /dev/null @@ -1,15 +0,0 @@ -{-- - -import StackTest - -main :: IO () -main = do - removeFileIgnore "stack.yaml" - removeFileIgnore "issue3397.cabal" - stack ["init", "--solver", "--resolver", "ghc-8.2.2"] - stack ["solver", "--update-config"] - -// --} - -main :: IO () -main = putStrLn "This test is disabled (see https://github.com/commercialhaskell/stack/issues/4410)." diff --git a/test/integration/tests/3397-ghc-solver/files/.gitignore b/test/integration/tests/3397-ghc-solver/files/.gitignore deleted file mode 100644 index 85a0a53562..0000000000 --- a/test/integration/tests/3397-ghc-solver/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.cabal -stack.yaml diff --git a/test/integration/tests/3397-ghc-solver/files/package.yaml b/test/integration/tests/3397-ghc-solver/files/package.yaml deleted file mode 100644 index b39857997a..0000000000 --- a/test/integration/tests/3397-ghc-solver/files/package.yaml +++ /dev/null @@ -1,9 +0,0 @@ -name: issue3397 - -library: - dependencies: - - base - - stm - exposed-modules: - - Lib - diff --git a/test/integration/tests/3431-precompiled-works/Main.hs b/test/integration/tests/3431-precompiled-works/Main.hs deleted file mode 100644 index 7d9e8ce5c5..0000000000 --- a/test/integration/tests/3431-precompiled-works/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest -import Control.Monad -import Data.List - -main :: IO () -main = do - stack ["build", "random-1.1", "--stack-yaml", "custom1/stack.yaml"] - stackCheckStderr ["build", "random-1.1", "--stack-yaml", "custom2/stack.yaml"] $ \out -> do - print out - unless ("precompiled" `isInfixOf` out) $ error "Didn't use precompiled!" diff --git a/test/integration/tests/3431-precompiled-works/files/custom1/custom1.yaml b/test/integration/tests/3431-precompiled-works/files/custom1/custom1.yaml deleted file mode 100644 index 434970fc43..0000000000 --- a/test/integration/tests/3431-precompiled-works/files/custom1/custom1.yaml +++ /dev/null @@ -1,5 +0,0 @@ -resolver: ghc-8.6.5 -name: custom1 -packages: -- stm-2.5.0.0 -- acme-missiles-0.3 diff --git a/test/integration/tests/3431-precompiled-works/files/custom1/stack.yaml b/test/integration/tests/3431-precompiled-works/files/custom1/stack.yaml deleted file mode 100644 index 818b612b05..0000000000 --- a/test/integration/tests/3431-precompiled-works/files/custom1/stack.yaml +++ /dev/null @@ -1,2 +0,0 @@ -resolver: custom1.yaml -packages: [] diff --git a/test/integration/tests/3431-precompiled-works/files/custom2/custom2.yaml b/test/integration/tests/3431-precompiled-works/files/custom2/custom2.yaml deleted file mode 100644 index 781a342f9a..0000000000 --- a/test/integration/tests/3431-precompiled-works/files/custom2/custom2.yaml +++ /dev/null @@ -1,5 +0,0 @@ -resolver: ghc-8.6.5 -name: custom2 -packages: -- stm-2.5.0.0 -- acme-missiles-0.2 diff --git a/test/integration/tests/3431-precompiled-works/files/custom2/stack.yaml b/test/integration/tests/3431-precompiled-works/files/custom2/stack.yaml deleted file mode 100644 index ab08e1142a..0000000000 --- a/test/integration/tests/3431-precompiled-works/files/custom2/stack.yaml +++ /dev/null @@ -1,2 +0,0 @@ -resolver: custom2.yaml -packages: [] diff --git a/test/integration/tests/345-override-bytestring/Main.hs b/test/integration/tests/345-override-bytestring/Main.hs deleted file mode 100644 index 248a59a3df..0000000000 --- a/test/integration/tests/345-override-bytestring/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["build", "--dry-run"] -- for useful error output - stack ["build"] diff --git a/test/integration/tests/345-override-bytestring/files/Foo.hs b/test/integration/tests/345-override-bytestring/files/Foo.hs deleted file mode 100644 index d282cc6fb6..0000000000 --- a/test/integration/tests/345-override-bytestring/files/Foo.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Foo where - -import Data.Binary () -import Data.ByteString () diff --git a/test/integration/tests/345-override-bytestring/files/foo.cabal b/test/integration/tests/345-override-bytestring/files/foo.cabal deleted file mode 100644 index 4c23cac18d..0000000000 --- a/test/integration/tests/345-override-bytestring/files/foo.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: foo -version: 0.0.0 -synopsis: foo -build-type: Simple -cabal-version: >=1.10 - -library - exposed-modules: Foo - build-depends: base, bytestring, binary - default-language: Haskell2010 diff --git a/test/integration/tests/345-override-bytestring/files/stack.yaml b/test/integration/tests/345-override-bytestring/files/stack.yaml deleted file mode 100644 index 157d1d6c7a..0000000000 --- a/test/integration/tests/345-override-bytestring/files/stack.yaml +++ /dev/null @@ -1,6 +0,0 @@ -resolver: lts-14.27 -extra-deps: -- bytestring-0.10.10.0@sha256:06b2e84f1bc9ab71a162c0ca9e88358dd6bbe5cb7fdda2d6d34b6863c367ec95,8944 -- binary-0.8.8.0@sha256:e9387a7ef2b34c6a23b09664c306e37cc01ae2cb4e4511a1c96ffb14008c24b0,6262 -packages: -- . diff --git a/test/integration/tests/3520-revision-matching/Main.hs b/test/integration/tests/3520-revision-matching/Main.hs deleted file mode 100644 index 5d01c83bf3..0000000000 --- a/test/integration/tests/3520-revision-matching/Main.hs +++ /dev/null @@ -1,16 +0,0 @@ -import StackTest -import Control.Monad -import Data.List -import System.Directory - -main :: IO () -main = do - putStrLn "Test disabled due to switch to pantry" - {- - copyFile "bad-stack.yaml" "stack.yaml" - stackErrStderr ["build", "--dry-run"] $ \msg -> - unless ("legacy 00-index.tar.gz" `isInfixOf` msg) $ - error "Expected a warning about 00-index usage" - copyFile "good-stack.yaml" "stack.yaml" - stack ["build", "--dry-run"] - -} diff --git a/test/integration/tests/3520-revision-matching/files/.gitignore b/test/integration/tests/3520-revision-matching/files/.gitignore deleted file mode 100644 index b894371d9b..0000000000 --- a/test/integration/tests/3520-revision-matching/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -stack.yaml -*.cabal diff --git a/test/integration/tests/3520-revision-matching/files/bad-stack.yaml b/test/integration/tests/3520-revision-matching/files/bad-stack.yaml deleted file mode 100644 index 9cc624df28..0000000000 --- a/test/integration/tests/3520-revision-matching/files/bad-stack.yaml +++ /dev/null @@ -1,6 +0,0 @@ -resolver: lts-10.10 - -package-indices: -- name: Hackage00 - download-prefix: https://hackage.haskell.org/package - http: https://hackage.haskell.org/00-index.tar.gz diff --git a/test/integration/tests/3520-revision-matching/files/good-stack.yaml b/test/integration/tests/3520-revision-matching/files/good-stack.yaml deleted file mode 100644 index bb0f9c3e3c..0000000000 --- a/test/integration/tests/3520-revision-matching/files/good-stack.yaml +++ /dev/null @@ -1,8 +0,0 @@ -resolver: lts-10.10 - -package-indices: -- name: Hackage00 - download-prefix: https://hackage.haskell.org/package - http: https://hackage.haskell.org/00-index.tar.gz - -ignore-revision-mismatch: true diff --git a/test/integration/tests/3520-revision-matching/files/package.yaml b/test/integration/tests/3520-revision-matching/files/package.yaml deleted file mode 100644 index ed129d64f0..0000000000 --- a/test/integration/tests/3520-revision-matching/files/package.yaml +++ /dev/null @@ -1,9 +0,0 @@ -name: issue3520 -version: 0.1.0.0 - -dependencies: -- base -- mtl - -library: - source-dirs: src diff --git a/test/integration/tests/3520-revision-matching/files/src/Foo.hs b/test/integration/tests/3520-revision-matching/files/src/Foo.hs deleted file mode 100644 index efbf93bbde..0000000000 --- a/test/integration/tests/3520-revision-matching/files/src/Foo.hs +++ /dev/null @@ -1 +0,0 @@ -module Foo where diff --git a/test/integration/tests/3533-extra-deps-solver/Main.hs b/test/integration/tests/3533-extra-deps-solver/Main.hs deleted file mode 100644 index b5bafedd7b..0000000000 --- a/test/integration/tests/3533-extra-deps-solver/Main.hs +++ /dev/null @@ -1,15 +0,0 @@ -{-- - -import StackTest -import System.Directory - -main :: IO () -main = do - copyFile "orig-stack.yaml" "stack.yaml" - stack [defaultResolverArg, "solver", "--update-config"] - stack ["build"] - -// --} - -main :: IO () -main = putStrLn "This test is disabled (see https://github.com/commercialhaskell/stack/issues/4410)." diff --git a/test/integration/tests/3533-extra-deps-solver/files/.gitignore b/test/integration/tests/3533-extra-deps-solver/files/.gitignore deleted file mode 100644 index 4c65edffd7..0000000000 --- a/test/integration/tests/3533-extra-deps-solver/files/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -.stack-work/ -*.cabal -stack.yaml diff --git a/test/integration/tests/3533-extra-deps-solver/files/local-mmorph/package.yaml b/test/integration/tests/3533-extra-deps-solver/files/local-mmorph/package.yaml deleted file mode 100644 index 05a16e68a5..0000000000 --- a/test/integration/tests/3533-extra-deps-solver/files/local-mmorph/package.yaml +++ /dev/null @@ -1,10 +0,0 @@ -name: mmorph -version: 1.2.0 - -dependencies: -- base -- acme-missiles - -library: - source-dirs: - - src diff --git a/test/integration/tests/3533-extra-deps-solver/files/local-mmorph/src/Lib.hs b/test/integration/tests/3533-extra-deps-solver/files/local-mmorph/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/3533-extra-deps-solver/files/local-mmorph/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/3533-extra-deps-solver/files/orig-stack.yaml b/test/integration/tests/3533-extra-deps-solver/files/orig-stack.yaml deleted file mode 100644 index 53e85eadd3..0000000000 --- a/test/integration/tests/3533-extra-deps-solver/files/orig-stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -resolver: lts-14.27 - -packages: -- ./local-mmorph -- ./uses-mmorph diff --git a/test/integration/tests/3533-extra-deps-solver/files/uses-mmorph/package.yaml b/test/integration/tests/3533-extra-deps-solver/files/uses-mmorph/package.yaml deleted file mode 100644 index cd73e5ab0f..0000000000 --- a/test/integration/tests/3533-extra-deps-solver/files/uses-mmorph/package.yaml +++ /dev/null @@ -1,10 +0,0 @@ -name: uses-mmorph -version: 1.2.0 - -dependencies: -- base -- mmorph >= 1.1 - -library: - source-dirs: - - src diff --git a/test/integration/tests/3533-extra-deps-solver/files/uses-mmorph/src/Lib.hs b/test/integration/tests/3533-extra-deps-solver/files/uses-mmorph/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/3533-extra-deps-solver/files/uses-mmorph/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/3574-extra-dep-local/Main.hs b/test/integration/tests/3574-extra-dep-local/Main.hs deleted file mode 100644 index b1b93683df..0000000000 --- a/test/integration/tests/3574-extra-dep-local/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stack ["build", "foo"] diff --git a/test/integration/tests/3574-extra-dep-local/files/foo/Foo.hs b/test/integration/tests/3574-extra-dep-local/files/foo/Foo.hs deleted file mode 100644 index a23b501b27..0000000000 --- a/test/integration/tests/3574-extra-dep-local/files/foo/Foo.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Foo(foo) where - -foo = "foo" diff --git a/test/integration/tests/3574-extra-dep-local/files/foo/foo.cabal b/test/integration/tests/3574-extra-dep-local/files/foo/foo.cabal deleted file mode 100644 index b37e615a97..0000000000 --- a/test/integration/tests/3574-extra-dep-local/files/foo/foo.cabal +++ /dev/null @@ -1,8 +0,0 @@ -cabal-version: >= 1.2 -build-type: Simple -name: foo -version: 0 - -library - build-depends: base - exposed-modules: Foo diff --git a/test/integration/tests/3574-extra-dep-local/files/stack.yaml b/test/integration/tests/3574-extra-dep-local/files/stack.yaml deleted file mode 100644 index 9359f53127..0000000000 --- a/test/integration/tests/3574-extra-dep-local/files/stack.yaml +++ /dev/null @@ -1,9 +0,0 @@ -resolver: ghc-8.6.5 - -packages: [] - -extra-deps: -- foo - -ghc-options: - $locals: -bob diff --git a/test/integration/tests/3591-cabal-warnings-once/Main.hs b/test/integration/tests/3591-cabal-warnings-once/Main.hs deleted file mode 100644 index 71a35c1e1a..0000000000 --- a/test/integration/tests/3591-cabal-warnings-once/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest -import Data.List (isInfixOf) - -main :: IO () -main = do - stackCheckStderr ["build", "--dry-run"] $ \str -> - case filter ("unknown-field-name" `isInfixOf`) (lines str) of - [] -> error "unknown-field-name didn't appear once" - [_] -> return () - _:_:_ -> error "unknown-field-name appeared multiple times" diff --git a/test/integration/tests/3591-cabal-warnings-once/files/foo.cabal b/test/integration/tests/3591-cabal-warnings-once/files/foo.cabal deleted file mode 100644 index 411bd8742d..0000000000 --- a/test/integration/tests/3591-cabal-warnings-once/files/foo.cabal +++ /dev/null @@ -1,16 +0,0 @@ --- This file has been generated from package.yaml by hpack version 0.20.0. --- --- see: https://github.com/sol/hpack --- --- hash: 43a4e1612fc5dee2ab88c588fee639840be01569a600ab2955961c341b89058d - -name: foo -version: 0.1.0.0 -build-type: Simple -cabal-version: >= 1.10 - -unknown-field-name: makes a warning! - -library - hs-source-dirs: src - exposed-modules: Lib diff --git a/test/integration/tests/3591-cabal-warnings-once/files/stack.yaml b/test/integration/tests/3591-cabal-warnings-once/files/stack.yaml deleted file mode 100644 index 785b1469c7..0000000000 --- a/test/integration/tests/3591-cabal-warnings-once/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-14.27 diff --git a/test/integration/tests/3631-build-http2/Main.hs b/test/integration/tests/3631-build-http2/Main.hs deleted file mode 100644 index e5c2bd0142..0000000000 --- a/test/integration/tests/3631-build-http2/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["build", defaultResolverArg, "--dry-run", "http2"] - stack ["build", defaultResolverArg, "http2"] diff --git a/test/integration/tests/365-invalid-success/Main.hs b/test/integration/tests/365-invalid-success/Main.hs deleted file mode 100644 index b978282dd4..0000000000 --- a/test/integration/tests/365-invalid-success/Main.hs +++ /dev/null @@ -1,11 +0,0 @@ -import StackTest - -main :: IO () -main = do - stackErr ["build"] - stackErr ["build"] - writeFile "app/Foo.hs" "module Foo where" - stack ["build"] - writeFile "app/Foo.hs" "module Foo wher e" - stackErr ["build"] - stackErr ["build"] diff --git a/test/integration/tests/365-invalid-success/files/.gitignore b/test/integration/tests/365-invalid-success/files/.gitignore deleted file mode 100644 index 8fec4903f5..0000000000 --- a/test/integration/tests/365-invalid-success/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -app/Foo.hs diff --git a/test/integration/tests/365-invalid-success/files/app/Main.hs b/test/integration/tests/365-invalid-success/files/app/Main.hs deleted file mode 100644 index db996f2e57..0000000000 --- a/test/integration/tests/365-invalid-success/files/app/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import Foo - -main :: IO () -main = return () diff --git a/test/integration/tests/365-invalid-success/files/new-template.cabal b/test/integration/tests/365-invalid-success/files/new-template.cabal deleted file mode 100644 index 6ab719dc58..0000000000 --- a/test/integration/tests/365-invalid-success/files/new-template.cabal +++ /dev/null @@ -1,12 +0,0 @@ -name: new-template -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -executable new-template-exe - hs-source-dirs: app - main-is: Main.hs - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: base - default-language: Haskell2010 - other-modules: Foo diff --git a/test/integration/tests/365-invalid-success/files/stack.yaml b/test/integration/tests/365-invalid-success/files/stack.yaml deleted file mode 100644 index 6b6b64cc34..0000000000 --- a/test/integration/tests/365-invalid-success/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -flags: {} -packages: -- '.' -extra-deps: [] -resolver: lts-14.27 diff --git a/test/integration/tests/366-non-root-dir/Main.hs b/test/integration/tests/366-non-root-dir/Main.hs deleted file mode 100644 index f1dd62cefb..0000000000 --- a/test/integration/tests/366-non-root-dir/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest -import System.Directory - -main :: IO () -main = do - removeDirIgnore ".stack-work" - stackErr ["exec", "hello-world"] - setCurrentDirectory "app" - stack ["build"] - stack ["exec", "hello-world"] diff --git a/test/integration/tests/366-non-root-dir/files/app/Main.hs b/test/integration/tests/366-non-root-dir/files/app/Main.hs deleted file mode 100644 index 377b6b5516..0000000000 --- a/test/integration/tests/366-non-root-dir/files/app/Main.hs +++ /dev/null @@ -1,2 +0,0 @@ -main :: IO () -main = return () diff --git a/test/integration/tests/366-non-root-dir/files/new-template.cabal b/test/integration/tests/366-non-root-dir/files/new-template.cabal deleted file mode 100644 index 1ce3458647..0000000000 --- a/test/integration/tests/366-non-root-dir/files/new-template.cabal +++ /dev/null @@ -1,11 +0,0 @@ -name: new-template -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -executable hello-world - hs-source-dirs: app - main-is: Main.hs - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/366-non-root-dir/files/stack.yaml b/test/integration/tests/366-non-root-dir/files/stack.yaml deleted file mode 100644 index 6b6b64cc34..0000000000 --- a/test/integration/tests/366-non-root-dir/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -flags: {} -packages: -- '.' -extra-deps: [] -resolver: lts-14.27 diff --git a/test/integration/tests/3685-config-yaml-for-allow-newer/Main.hs b/test/integration/tests/3685-config-yaml-for-allow-newer/Main.hs deleted file mode 100644 index 6b56e9f48b..0000000000 --- a/test/integration/tests/3685-config-yaml-for-allow-newer/Main.hs +++ /dev/null @@ -1,24 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest -import System.Directory - -main :: IO () -main = do - removeFileIgnore "stack.yaml" - stack ["init", defaultResolverArg] - (_, stdErr) <- stackStderr ["install", "intero-0.1.23"] - -- here we check stderr for 'allow-newer: true' and - -- config.yaml sitting either on the same line or on - -- two consecutive lines - let errLines = lines stdErr - hasNewer l = "'allow-newer: true'" `isInfixOf` l - withNewer = map hasNewer errLines - userConfig = "config.yaml" - hasConfigForAllowNewer prevNewer l = - (prevNewer || hasNewer l) && - userConfig `isInfixOf` l - hasProperLines = - or $ zipWith hasConfigForAllowNewer (False:withNewer) errLines - unless hasProperLines $ - error $ "Not stderr lines with 'allow-newer: true' and " ++ userConfig diff --git a/test/integration/tests/3685-config-yaml-for-allow-newer/files/.gitignore b/test/integration/tests/3685-config-yaml-for-allow-newer/files/.gitignore deleted file mode 100644 index 684dbffa96..0000000000 --- a/test/integration/tests/3685-config-yaml-for-allow-newer/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -stack.yaml diff --git a/test/integration/tests/3685-config-yaml-for-allow-newer/files/files.cabal b/test/integration/tests/3685-config-yaml-for-allow-newer/files/files.cabal deleted file mode 100644 index 917c8f3f46..0000000000 --- a/test/integration/tests/3685-config-yaml-for-allow-newer/files/files.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >= 2.0 - -library - exposed-modules: Src - hs-source-dirs: src - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/3685-config-yaml-for-allow-newer/files/src/Src.hs b/test/integration/tests/3685-config-yaml-for-allow-newer/files/src/Src.hs deleted file mode 100644 index 0f8db7fb77..0000000000 --- a/test/integration/tests/3685-config-yaml-for-allow-newer/files/src/Src.hs +++ /dev/null @@ -1,5 +0,0 @@ -module Src where - --- | A function of the main library -funMainLib :: Int -> Int -funMainLib = succ diff --git a/test/integration/tests/370-invalid-setup-hs/Main.hs b/test/integration/tests/370-invalid-setup-hs/Main.hs deleted file mode 100644 index e61b083aa4..0000000000 --- a/test/integration/tests/370-invalid-setup-hs/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stack ["build"] diff --git a/test/integration/tests/370-invalid-setup-hs/files/LICENSE b/test/integration/tests/370-invalid-setup-hs/files/LICENSE deleted file mode 100644 index d05408d876..0000000000 --- a/test/integration/tests/370-invalid-setup-hs/files/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -Copyright (c) 2000 - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Your name here nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/test/integration/tests/370-invalid-setup-hs/files/Setup.hs b/test/integration/tests/370-invalid-setup-hs/files/Setup.hs deleted file mode 100644 index 4b147db695..0000000000 --- a/test/integration/tests/370-invalid-setup-hs/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Make (defaultMain) -main = defaultMain diff --git a/test/integration/tests/370-invalid-setup-hs/files/new-template.cabal b/test/integration/tests/370-invalid-setup-hs/files/new-template.cabal deleted file mode 100644 index 7306959dcc..0000000000 --- a/test/integration/tests/370-invalid-setup-hs/files/new-template.cabal +++ /dev/null @@ -1,18 +0,0 @@ -name: new-template -version: 0.1.0.0 -synopsis: Initial project template from stack -description: Please see README.md -homepage: http://github.com/name/project -license: BSD3 -license-file: LICENSE -author: Your name here -maintainer: your.address@example.com -category: Web -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/370-invalid-setup-hs/files/src/Lib.hs b/test/integration/tests/370-invalid-setup-hs/files/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/370-invalid-setup-hs/files/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/370-invalid-setup-hs/files/stack.yaml b/test/integration/tests/370-invalid-setup-hs/files/stack.yaml deleted file mode 100644 index 14f23e9aa6..0000000000 --- a/test/integration/tests/370-invalid-setup-hs/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.6.5 diff --git a/test/integration/tests/3787-internal-libs-with-no-main-lib/Main.hs b/test/integration/tests/3787-internal-libs-with-no-main-lib/Main.hs deleted file mode 100644 index 09bd4aa3b0..0000000000 --- a/test/integration/tests/3787-internal-libs-with-no-main-lib/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["clean"] - stack ["build"] diff --git a/test/integration/tests/3787-internal-libs-with-no-main-lib/files/exe/Main.hs b/test/integration/tests/3787-internal-libs-with-no-main-lib/files/exe/Main.hs deleted file mode 100644 index 83db768ed3..0000000000 --- a/test/integration/tests/3787-internal-libs-with-no-main-lib/files/exe/Main.hs +++ /dev/null @@ -1 +0,0 @@ -main = putStrLn "OK" diff --git a/test/integration/tests/3787-internal-libs-with-no-main-lib/files/files.cabal b/test/integration/tests/3787-internal-libs-with-no-main-lib/files/files.cabal deleted file mode 100644 index 631f10c48d..0000000000 --- a/test/integration/tests/3787-internal-libs-with-no-main-lib/files/files.cabal +++ /dev/null @@ -1,16 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >= 2.0 - -library sublib - exposed-modules: B - hs-source-dirs: src-sublib - build-depends: base - default-language: Haskell2010 - -executable exe - main-is: Main.hs - hs-source-dirs: exe - build-depends: base, sublib - default-language: Haskell2010 diff --git a/test/integration/tests/3787-internal-libs-with-no-main-lib/files/src-sublib/B.hs b/test/integration/tests/3787-internal-libs-with-no-main-lib/files/src-sublib/B.hs deleted file mode 100644 index 53253d5dcc..0000000000 --- a/test/integration/tests/3787-internal-libs-with-no-main-lib/files/src-sublib/B.hs +++ /dev/null @@ -1,5 +0,0 @@ -module B where - --- | A function of the internal library -funInternal :: Int -> Int -funInternal = pred diff --git a/test/integration/tests/3787-internal-libs-with-no-main-lib/files/stack.yaml b/test/integration/tests/3787-internal-libs-with-no-main-lib/files/stack.yaml deleted file mode 100644 index 785b1469c7..0000000000 --- a/test/integration/tests/3787-internal-libs-with-no-main-lib/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-14.27 diff --git a/test/integration/tests/384-local-deps/Main.hs b/test/integration/tests/384-local-deps/Main.hs deleted file mode 100644 index fe0186368f..0000000000 --- a/test/integration/tests/384-local-deps/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -import StackTest - -main :: IO () -main = do - removeFileIgnore "stack.yaml" - stack ["init", defaultResolverArg] - stack ["test"] diff --git a/test/integration/tests/384-local-deps/files/.gitignore b/test/integration/tests/384-local-deps/files/.gitignore deleted file mode 100644 index 684dbffa96..0000000000 --- a/test/integration/tests/384-local-deps/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -stack.yaml diff --git a/test/integration/tests/384-local-deps/files/dir1/dir1.cabal b/test/integration/tests/384-local-deps/files/dir1/dir1.cabal deleted file mode 100644 index 87ea412999..0000000000 --- a/test/integration/tests/384-local-deps/files/dir1/dir1.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: dir1 -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/384-local-deps/files/dir1/src/Lib.hs b/test/integration/tests/384-local-deps/files/dir1/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/384-local-deps/files/dir1/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/384-local-deps/files/dir2/dir2.cabal b/test/integration/tests/384-local-deps/files/dir2/dir2.cabal deleted file mode 100644 index 40d8a85084..0000000000 --- a/test/integration/tests/384-local-deps/files/dir2/dir2.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: dir2 -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5, dir1 - default-language: Haskell2010 diff --git a/test/integration/tests/384-local-deps/files/dir2/src/Lib.hs b/test/integration/tests/384-local-deps/files/dir2/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/384-local-deps/files/dir2/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/3850-cached-templates-network-errors/Main.hs b/test/integration/tests/3850-cached-templates-network-errors/Main.hs deleted file mode 100644 index 0683007e0e..0000000000 --- a/test/integration/tests/3850-cached-templates-network-errors/Main.hs +++ /dev/null @@ -1,38 +0,0 @@ -import StackTest -import Control.Monad (unless) -import Data.List (isInfixOf) -import Data.Maybe (fromMaybe) -import System.Directory -import System.Environment (lookupEnv, setEnv) -import System.FilePath - -main :: IO () -main = do - performCachingTest templateUrl - performCachingTest githubTemplate - where - performCachingTest :: String -> IO () - performCachingTest template = do - let arguments = ["new", "tmp", template] - originalHttpProxy <- lookupEnv "HTTPS_PROXY" - stack arguments - removeDirectoryRecursive "tmp" - setEnv "HTTPS_PROXY" "http://sdsgsfgslfgsjflgkjs" -- make https requests fail - stackCheckStderr arguments $ \stderr -> - unless ("Using cached local version" `isInfixOf` stderr) - (error "stack didn't load the cached template") - - removeDirectoryRecursive "tmp" - setEnv "HTTPS_PROXY" (fromMaybe "" originalHttpProxy) - - -- this template has a `stack.yaml` file - -- so `stack new` does not have to `stack init` - -- and therefore the test runs faster - templateUrl :: String - templateUrl = - "https://raw.githubusercontent.com/commercialhaskell/stack-templates/986836cc85b0c8c5bbb78d7b94347ba095089b03/tasty-discover.hsfiles" - - -- the same template, cached differently - githubTemplate :: String - githubTemplate = "github:commercialhaskell/tasty-discover.hsfiles" - diff --git a/test/integration/tests/3861-ignore-bounds-in-snapshots/Main.hs b/test/integration/tests/3861-ignore-bounds-in-snapshots/Main.hs deleted file mode 100644 index 38e0123f5b..0000000000 --- a/test/integration/tests/3861-ignore-bounds-in-snapshots/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -import StackTest - -main :: IO () -main = do - stackIgnoreException ["clean", "--stack-yaml", "stack-good.yaml", "--full"] - stackErr ["build", "--stack-yaml", "stack-bad.yaml"] - stack ["build", "--stack-yaml", "stack-good.yaml"] diff --git a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/.gitignore b/test/integration/tests/3861-ignore-bounds-in-snapshots/files/.gitignore deleted file mode 100644 index 0afa51175a..0000000000 --- a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -foo.cabal diff --git a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/package.yaml b/test/integration/tests/3861-ignore-bounds-in-snapshots/files/package.yaml deleted file mode 100644 index aaa0f35c7c..0000000000 --- a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/package.yaml +++ /dev/null @@ -1,8 +0,0 @@ -name: foo -version: 0 - -dependencies: -- base -- bad-bounds - -library: {} diff --git a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/snapshot.yaml b/test/integration/tests/3861-ignore-bounds-in-snapshots/files/snapshot.yaml deleted file mode 100644 index cb2dffab08..0000000000 --- a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/snapshot.yaml +++ /dev/null @@ -1,8 +0,0 @@ -resolver: ghc-8.6.5 -packages: -- ./bad-bounds.tar - -# Include a flag to get a different snapshot hash -flags: - bad-bounds: - unimportant: false diff --git a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-bad.yaml b/test/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-bad.yaml deleted file mode 100644 index f5a25a75c7..0000000000 --- a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-bad.yaml +++ /dev/null @@ -1,8 +0,0 @@ -resolver: ghc-8.6.5 -extra-deps: -- ./bad-bounds.tar - -# Include a flag to get a different snapshot hash -flags: - bad-bounds: - unimportant: true diff --git a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-good.yaml b/test/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-good.yaml deleted file mode 100644 index 6de0480e0e..0000000000 --- a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-good.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: snapshot.yaml diff --git a/test/integration/tests/3863-purge-command/Main.hs b/test/integration/tests/3863-purge-command/Main.hs deleted file mode 100644 index 28b3bd6426..0000000000 --- a/test/integration/tests/3863-purge-command/Main.hs +++ /dev/null @@ -1,48 +0,0 @@ -{-# LANGUAGE ViewPatterns #-} - -import StackTest -import Data.Char (isSpace) -import Data.List (dropWhileEnd) -import Data.Maybe (listToMaybe, fromMaybe) -import Control.Monad (unless) -import System.Directory -import System.FilePath - -trimEnd :: String -> String -trimEnd = dropWhileEnd isSpace - -main :: IO () -main = - -- For these commands, we'll need to know the `dist` directory. - -- This is usually `.stack-work/dist/$compiler-variant/Cabal-xxxx` - stackCheckStdout [defaultResolverArg, "path", "--dist-dir"] $ \(trimEnd -> distDir) -> do - stackCheckStdout [defaultResolverArg, "path", "--local-install-root"] $ \(trimEnd -> localInstallRoot) -> do - -- Usually `.stack-work` - let stackWork = fromMaybe (error "There must be a stack working directory.") $ - listToMaybe (splitDirectories distDir) - - -- First, clean the .stack-work directory. - -- This is only necessary when running individual tests. - stackIgnoreException [defaultResolverArg, "purge"] - -- See #4936 for details regarding the windows condition - unless isWindows $ doesNotExist stackWork - - -- The dist directory should exist after a build - stack [defaultResolverArg, "build"] - doesExist distDir - doesExist localInstallRoot - doesExist stackWork - - -- The dist directory should not exist after a clean, whereas the - -- .stack-work directory should - stackIgnoreException [defaultResolverArg, "clean"] - -- See #4936 for details regarding the windows condition - unless isWindows $ do - doesNotExist distDir - doesExist localInstallRoot - doesExist stackWork - - -- The .stack-work directory should not exist after a purge - stackIgnoreException [defaultResolverArg, "purge"] - -- See #4936 for details regarding the windows condition - unless isWindows $ doesNotExist stackWork diff --git a/test/integration/tests/3863-purge-command/files/new-template.cabal b/test/integration/tests/3863-purge-command/files/new-template.cabal deleted file mode 100644 index 192e0b2dfb..0000000000 --- a/test/integration/tests/3863-purge-command/files/new-template.cabal +++ /dev/null @@ -1,11 +0,0 @@ -name: new-template -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/3863-purge-command/files/src/Lib.hs b/test/integration/tests/3863-purge-command/files/src/Lib.hs deleted file mode 100644 index 1c88a82644..0000000000 --- a/test/integration/tests/3863-purge-command/files/src/Lib.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Lib where - -someFunc :: () -someFunc = () diff --git a/test/integration/tests/3863-purge-command/files/stack.yaml b/test/integration/tests/3863-purge-command/files/stack.yaml deleted file mode 100644 index 6b6b64cc34..0000000000 --- a/test/integration/tests/3863-purge-command/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -flags: {} -packages: -- '.' -extra-deps: [] -resolver: lts-14.27 diff --git a/test/integration/tests/3899-dont-rebuild-sublibraries/Main.hs b/test/integration/tests/3899-dont-rebuild-sublibraries/Main.hs deleted file mode 100644 index fa6835ebe7..0000000000 --- a/test/integration/tests/3899-dont-rebuild-sublibraries/Main.hs +++ /dev/null @@ -1,14 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest - -main :: IO () -main = do - stack ["clean"] - stack ["build"] - res <- compilingModulesLines . snd <$> stackStderr ["build"] - unless (null res) $ fail "Stack recompiled code" - --- Returns the lines where a module is compiled -compilingModulesLines :: String -> [String] -compilingModulesLines = filter (isInfixOf " Compiling ") . lines diff --git a/test/integration/tests/3899-dont-rebuild-sublibraries/files/Setup.hs b/test/integration/tests/3899-dont-rebuild-sublibraries/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/3899-dont-rebuild-sublibraries/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/3899-dont-rebuild-sublibraries/files/files.cabal b/test/integration/tests/3899-dont-rebuild-sublibraries/files/files.cabal deleted file mode 100644 index 867797cb7b..0000000000 --- a/test/integration/tests/3899-dont-rebuild-sublibraries/files/files.cabal +++ /dev/null @@ -1,22 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >= 2.0 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base, lib - default-language: Haskell2010 - -library lib - hs-source-dirs: src-internal - exposed-modules: Internal - build-depends: base - default-language: Haskell2010 - -executable exe - hs-source-dirs: src-exe - main-is: Main.hs - build-depends: base, files - default-language: Haskell2010 diff --git a/test/integration/tests/3899-dont-rebuild-sublibraries/files/src-exe/Main.hs b/test/integration/tests/3899-dont-rebuild-sublibraries/files/src-exe/Main.hs deleted file mode 100644 index cafae24793..0000000000 --- a/test/integration/tests/3899-dont-rebuild-sublibraries/files/src-exe/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -module Main where - -import Lib - -main :: IO () -main = do - putStrLn "hello world" diff --git a/test/integration/tests/3899-dont-rebuild-sublibraries/files/stack.yaml b/test/integration/tests/3899-dont-rebuild-sublibraries/files/stack.yaml deleted file mode 100644 index 6698e5edf6..0000000000 --- a/test/integration/tests/3899-dont-rebuild-sublibraries/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: ghc-8.6.5 -extra-deps: -- stm-2.4.4.1 -- mtl-2.2.1 diff --git a/test/integration/tests/3926-ghci-with-sublibraries/Main.hs b/test/integration/tests/3926-ghci-with-sublibraries/Main.hs deleted file mode 100644 index 408cfddeb3..0000000000 --- a/test/integration/tests/3926-ghci-with-sublibraries/Main.hs +++ /dev/null @@ -1,45 +0,0 @@ -import Control.Concurrent -import Control.Monad.IO.Class -import Control.Monad -import Data.List -import StackTest - -main :: IO () -main = do - stack ["clean"] -- to make sure we can load the code even after a clean - copy "src/Lib.v1" "src/Lib.hs" - copy "src-internal/Internal.v1" "src-internal/Internal.hs" - stack ["build"] -- need a build before ghci at the moment, see #4148 - forkIO fileEditingThread - replThread - -replThread :: IO () -replThread = repl [] $ do - replCommand ":main" - line <- replGetLine - when (line /= "hello world") $ error "Main module didn't load correctly." - liftIO $ threadDelay 1000000 -- wait for an edit of the internal library - reloadAndTest "testInt" "42" "Internal library didn't reload." - liftIO $ threadDelay 1000000 -- wait for an edit of the internal library - reloadAndTest "testStr" "\"OK\"" "Main library didn't reload." - -fileEditingThread :: IO () -fileEditingThread = do - threadDelay 1000000 - -- edit the internal library and return to ghci - copy "src-internal/Internal.v2" "src-internal/Internal.hs" - threadDelay 1000000 - -- edit the internal library and end thread, returning to ghci - copy "src/Lib.v2" "src/Lib.hs" - -reloadAndTest :: String -> String -> String -> Repl () -reloadAndTest cmd exp err = do - reload - replCommand cmd - line <- replGetLine - unless (exp `isSuffixOf` line) $ error err - -reload :: Repl () -reload = replCommand ":reload" >> loop - where - loop = replGetLine >>= \line -> unless ("Ok" `isInfixOf` line) loop diff --git a/test/integration/tests/3926-ghci-with-sublibraries/files/.gitignore b/test/integration/tests/3926-ghci-with-sublibraries/files/.gitignore deleted file mode 100644 index 54bdefd6ee..0000000000 --- a/test/integration/tests/3926-ghci-with-sublibraries/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -src/Lib.hs -src-internal/Internal.hs diff --git a/test/integration/tests/3926-ghci-with-sublibraries/files/Setup.hs b/test/integration/tests/3926-ghci-with-sublibraries/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/3926-ghci-with-sublibraries/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/3926-ghci-with-sublibraries/files/files.cabal b/test/integration/tests/3926-ghci-with-sublibraries/files/files.cabal deleted file mode 100644 index 867797cb7b..0000000000 --- a/test/integration/tests/3926-ghci-with-sublibraries/files/files.cabal +++ /dev/null @@ -1,22 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >= 2.0 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base, lib - default-language: Haskell2010 - -library lib - hs-source-dirs: src-internal - exposed-modules: Internal - build-depends: base - default-language: Haskell2010 - -executable exe - hs-source-dirs: src-exe - main-is: Main.hs - build-depends: base, files - default-language: Haskell2010 diff --git a/test/integration/tests/3926-ghci-with-sublibraries/files/src-exe/Main.hs b/test/integration/tests/3926-ghci-with-sublibraries/files/src-exe/Main.hs deleted file mode 100644 index cafae24793..0000000000 --- a/test/integration/tests/3926-ghci-with-sublibraries/files/src-exe/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -module Main where - -import Lib - -main :: IO () -main = do - putStrLn "hello world" diff --git a/test/integration/tests/3926-ghci-with-sublibraries/files/src-internal/Internal.v2 b/test/integration/tests/3926-ghci-with-sublibraries/files/src-internal/Internal.v2 deleted file mode 100644 index da8a642c7b..0000000000 --- a/test/integration/tests/3926-ghci-with-sublibraries/files/src-internal/Internal.v2 +++ /dev/null @@ -1,4 +0,0 @@ -module Internal where - -testInt :: Int -testInt = 42 diff --git a/test/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v2 b/test/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v2 deleted file mode 100644 index d9892d6826..0000000000 --- a/test/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v2 +++ /dev/null @@ -1,6 +0,0 @@ -module Lib where - -import Internal - -testStr :: String -testStr = "OK" diff --git a/test/integration/tests/3926-ghci-with-sublibraries/files/stack.yaml b/test/integration/tests/3926-ghci-with-sublibraries/files/stack.yaml deleted file mode 100644 index 6698e5edf6..0000000000 --- a/test/integration/tests/3926-ghci-with-sublibraries/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: ghc-8.6.5 -extra-deps: -- stm-2.4.4.1 -- mtl-2.2.1 diff --git a/test/integration/tests/3940-base-upgrade-warning/Main.hs b/test/integration/tests/3940-base-upgrade-warning/Main.hs deleted file mode 100644 index f79bbfb7a5..0000000000 --- a/test/integration/tests/3940-base-upgrade-warning/Main.hs +++ /dev/null @@ -1,25 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest - -unattainableBaseWarning :: String -unattainableBaseWarning = - "Build requires unattainable version of base. Since base is a part of GHC, \ - \you most likely need to use a different GHC version with the matching base." - -noBaseUpgradeWarning :: String -noBaseUpgradeWarning = - "You are trying to upgrade/downgrade base, which is almost certainly \ - \not what you really want. Please, consider using another GHC version \ - \if you need a certain version of base, or removing base from extra-deps. \ - \See more at https://github.com/commercialhaskell/stack/issues/3940." - -main :: IO () -main = do - stackErrStderr ["build", "--stack-yaml", "unattainable-base.yaml"] (expectMessage unattainableBaseWarning) - stackErrStderr ["build", "--stack-yaml", "no-base-upgrade.yaml"] (expectMessage noBaseUpgradeWarning) - -expectMessage :: String -> String -> IO () -expectMessage msg stderr = - unless (words msg `isInfixOf` words stderr) - (error $ "Expected a warning: \n" ++ show msg) diff --git a/test/integration/tests/3940-base-upgrade-warning/files/files.cabal b/test/integration/tests/3940-base-upgrade-warning/files/files.cabal deleted file mode 100644 index 1a0eb3c82e..0000000000 --- a/test/integration/tests/3940-base-upgrade-warning/files/files.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: files -version: 0.0.1.0 -build-type: Simple -cabal-version: >= 1.18 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base < 4.10 && >= 4.6.0.1 - default-language: Haskell2010 diff --git a/test/integration/tests/3940-base-upgrade-warning/files/no-base-upgrade.yaml b/test/integration/tests/3940-base-upgrade-warning/files/no-base-upgrade.yaml deleted file mode 100644 index 2bc5fd7a22..0000000000 --- a/test/integration/tests/3940-base-upgrade-warning/files/no-base-upgrade.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: lts-14.27 -extra-deps: - - base-4.10.1.0 diff --git a/test/integration/tests/3940-base-upgrade-warning/files/unattainable-base.yaml b/test/integration/tests/3940-base-upgrade-warning/files/unattainable-base.yaml deleted file mode 100644 index 785b1469c7..0000000000 --- a/test/integration/tests/3940-base-upgrade-warning/files/unattainable-base.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-14.27 diff --git a/test/integration/tests/3942-solver-error-output/Main.hs b/test/integration/tests/3942-solver-error-output/Main.hs deleted file mode 100644 index 2e3482683c..0000000000 --- a/test/integration/tests/3942-solver-error-output/Main.hs +++ /dev/null @@ -1,16 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest - -planFailure :: String -planFailure = - "While constructing the build plan, the following exceptions were encountered:" - -main :: IO () -main = do - stackErrStderr ["./script.hs"] (expectMessage planFailure) - -expectMessage :: String -> String -> IO () -expectMessage msg stderr = do - unless (words msg `isInfixOf` words stderr) - (error $ "Expected a warning: \n" ++ show msg) diff --git a/test/integration/tests/3942-solver-error-output/files/.gitignore b/test/integration/tests/3942-solver-error-output/files/.gitignore deleted file mode 100644 index 4c65edffd7..0000000000 --- a/test/integration/tests/3942-solver-error-output/files/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -.stack-work/ -*.cabal -stack.yaml diff --git a/test/integration/tests/3942-solver-error-output/files/no-deps/package.yaml b/test/integration/tests/3942-solver-error-output/files/no-deps/package.yaml deleted file mode 100644 index aaf7113620..0000000000 --- a/test/integration/tests/3942-solver-error-output/files/no-deps/package.yaml +++ /dev/null @@ -1,15 +0,0 @@ -name: no-deps -version: 0.1.0.0 -synopsis: A package with no dependencies, other than base -license: BSD3 -author: Author name here -copyright: 2000 Author name here -category: Development -extra-source-files: [] - -dependencies: - - base - -library: - source-dirs: '.' - exposed-modules: [] diff --git a/test/integration/tests/3942-solver-error-output/files/one-deps/package.yaml b/test/integration/tests/3942-solver-error-output/files/one-deps/package.yaml deleted file mode 100644 index 740536b2c4..0000000000 --- a/test/integration/tests/3942-solver-error-output/files/one-deps/package.yaml +++ /dev/null @@ -1,16 +0,0 @@ -name: one-deps -version: 0.1.0.0 -synopsis: A package with one dependency (no-deps) other than base -license: BSD3 -author: Author name here -copyright: 2000 Author name here -category: Development -extra-source-files: [] - -dependencies: - - base - - no-deps - -library: - source-dirs: '.' - exposed-modules: [] diff --git a/test/integration/tests/3942-solver-error-output/files/script.hs b/test/integration/tests/3942-solver-error-output/files/script.hs deleted file mode 100755 index 51083db266..0000000000 --- a/test/integration/tests/3942-solver-error-output/files/script.hs +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env stack --- stack runhaskell --stack-yaml test-stack.yml --package one-deps - -main :: IO () -main = putStrLn "yo" diff --git a/test/integration/tests/3942-solver-error-output/files/test-stack.yml b/test/integration/tests/3942-solver-error-output/files/test-stack.yml deleted file mode 100644 index cdfbbf482a..0000000000 --- a/test/integration/tests/3942-solver-error-output/files/test-stack.yml +++ /dev/null @@ -1,6 +0,0 @@ -resolver: lts-14.27 - -packages: [] - -extra-deps: -- ./one-deps diff --git a/test/integration/tests/3959-order-of-flags/Main.hs b/test/integration/tests/3959-order-of-flags/Main.hs deleted file mode 100644 index 650bf92469..0000000000 --- a/test/integration/tests/3959-order-of-flags/Main.hs +++ /dev/null @@ -1,21 +0,0 @@ -import StackTest - -import Control.Monad (unless) -import Data.List (isInfixOf) - --- Integration test for https://github.com/commercialhaskell/stack/issues/3959 -main :: IO () -main = do - checkFlagsBeforeCommand - checkFlagsAfterCommand - -checkFlagsBeforeCommand :: IO () -checkFlagsBeforeCommand = stackCheckStderr ["--test", "--no-run-tests", "build"] checker - -checkFlagsAfterCommand :: IO () -checkFlagsAfterCommand = stackCheckStderr ["build", "--test", "--no-run-tests"] checker - -checker :: String -> IO () -checker output = do - let testsAreDisabled = any (\ln -> "Test running disabled by" `isInfixOf` ln) (lines output) - unless testsAreDisabled $ fail "Tests should not be run" diff --git a/test/integration/tests/3959-order-of-flags/files/.gitignore b/test/integration/tests/3959-order-of-flags/files/.gitignore deleted file mode 100644 index d43d807c0d..0000000000 --- a/test/integration/tests/3959-order-of-flags/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.cabal diff --git a/test/integration/tests/3959-order-of-flags/files/package.yaml b/test/integration/tests/3959-order-of-flags/files/package.yaml deleted file mode 100644 index 43895dbbda..0000000000 --- a/test/integration/tests/3959-order-of-flags/files/package.yaml +++ /dev/null @@ -1,10 +0,0 @@ -name: issue3959 -version: 0.1.0.0 - -dependencies: -- base - -tests: - test: - main: Spec.hs - source-dirs: test diff --git a/test/integration/tests/3959-order-of-flags/files/stack.yaml b/test/integration/tests/3959-order-of-flags/files/stack.yaml deleted file mode 100644 index 785b1469c7..0000000000 --- a/test/integration/tests/3959-order-of-flags/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-14.27 diff --git a/test/integration/tests/397-case-insensitive-flags/Main.hs b/test/integration/tests/397-case-insensitive-flags/Main.hs deleted file mode 100644 index 65bbe49cc7..0000000000 --- a/test/integration/tests/397-case-insensitive-flags/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest - -main :: IO () -main = do - stackErr ["build"] - stack ["build", "--flag", "new-template:fixIt"] - stack ["build", "--flag", "new-template:fixit"] - stack ["build", "--flag", "new-template:fiXit"] - stack ["build", "--flag", "*:fiXit"] - stackErr ["build", "--flag", "*:fiXit-else"] diff --git a/test/integration/tests/397-case-insensitive-flags/files/new-template.cabal b/test/integration/tests/397-case-insensitive-flags/files/new-template.cabal deleted file mode 100644 index f9c9dd087a..0000000000 --- a/test/integration/tests/397-case-insensitive-flags/files/new-template.cabal +++ /dev/null @@ -1,18 +0,0 @@ -name: new-template -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -flag fixIt - manual: True - default: False - description: Make the build work - -library - hs-source-dirs: src - exposed-modules: Lib - default-language: Haskell2010 - if flag(fixIt) - build-depends: base >= 4.7 - else - build-depends: base < 4.7 && > 4.7 diff --git a/test/integration/tests/397-case-insensitive-flags/files/src/Lib.hs b/test/integration/tests/397-case-insensitive-flags/files/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/397-case-insensitive-flags/files/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/397-case-insensitive-flags/files/stack.yaml b/test/integration/tests/397-case-insensitive-flags/files/stack.yaml deleted file mode 100644 index 785b1469c7..0000000000 --- a/test/integration/tests/397-case-insensitive-flags/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-14.27 diff --git a/test/integration/tests/3996-sublib-not-depended-upon/Main.hs b/test/integration/tests/3996-sublib-not-depended-upon/Main.hs deleted file mode 100644 index d2479bd056..0000000000 --- a/test/integration/tests/3996-sublib-not-depended-upon/Main.hs +++ /dev/null @@ -1,8 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest - -main :: IO () -main = do - stack ["clean"] - stack ["build"] diff --git a/test/integration/tests/3996-sublib-not-depended-upon/files/Setup.hs b/test/integration/tests/3996-sublib-not-depended-upon/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/3996-sublib-not-depended-upon/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/3996-sublib-not-depended-upon/files/files.cabal b/test/integration/tests/3996-sublib-not-depended-upon/files/files.cabal deleted file mode 100644 index bf9a55bd17..0000000000 --- a/test/integration/tests/3996-sublib-not-depended-upon/files/files.cabal +++ /dev/null @@ -1,16 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >= 2.0 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base - default-language: Haskell2010 - -library lib - hs-source-dirs: src-internal - exposed-modules: Internal - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/3996-sublib-not-depended-upon/files/src-internal/Internal.hs b/test/integration/tests/3996-sublib-not-depended-upon/files/src-internal/Internal.hs deleted file mode 100644 index 462baca786..0000000000 --- a/test/integration/tests/3996-sublib-not-depended-upon/files/src-internal/Internal.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Internal where - -test :: Int -test = 42 diff --git a/test/integration/tests/3996-sublib-not-depended-upon/files/src/Lib.hs b/test/integration/tests/3996-sublib-not-depended-upon/files/src/Lib.hs deleted file mode 100644 index a35e444b85..0000000000 --- a/test/integration/tests/3996-sublib-not-depended-upon/files/src/Lib.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Lib where - -testLib :: Int -testLib = 42 diff --git a/test/integration/tests/3996-sublib-not-depended-upon/files/stack.yaml b/test/integration/tests/3996-sublib-not-depended-upon/files/stack.yaml deleted file mode 100644 index 6698e5edf6..0000000000 --- a/test/integration/tests/3996-sublib-not-depended-upon/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: ghc-8.6.5 -extra-deps: -- stm-2.4.4.1 -- mtl-2.2.1 diff --git a/test/integration/tests/3997-coverage-with-cabal-3/Main.hs b/test/integration/tests/3997-coverage-with-cabal-3/Main.hs deleted file mode 100644 index 56d487f527..0000000000 --- a/test/integration/tests/3997-coverage-with-cabal-3/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest - -main :: IO () -main = do - stack ["setup"] - stackCheckStderr ["test", "--coverage"] $ \out -> do - unless ("The coverage report for foo's test-suite \"foo-test\" is available at" `isInfixOf` out) $ - fail "Coverage report didn't build" diff --git a/test/integration/tests/3997-coverage-with-cabal-3/files/package.yaml b/test/integration/tests/3997-coverage-with-cabal-3/files/package.yaml deleted file mode 100644 index d6ed694cec..0000000000 --- a/test/integration/tests/3997-coverage-with-cabal-3/files/package.yaml +++ /dev/null @@ -1,14 +0,0 @@ -name: foo - -dependencies: - - base - -library: - source-dirs: src - -tests: - foo-test: - source-dirs: test - main: Main.hs - dependencies: - - foo diff --git a/test/integration/tests/3997-coverage-with-cabal-3/files/stack.yaml b/test/integration/tests/3997-coverage-with-cabal-3/files/stack.yaml deleted file mode 100644 index 4a8edf30d1..0000000000 --- a/test/integration/tests/3997-coverage-with-cabal-3/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-15.0 diff --git a/test/integration/tests/4001-excess-recompilation/Main.hs b/test/integration/tests/4001-excess-recompilation/Main.hs deleted file mode 100644 index 20b99b9a5a..0000000000 --- a/test/integration/tests/4001-excess-recompilation/Main.hs +++ /dev/null @@ -1,19 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest - -main :: IO () -main = do - copy "test/Main1.hs" "test/Main.hs" - copy "bench/Main1.hs" "bench/Main.hs" - stack ["build"] - - copy "test/Main2.hs" "test/Main.hs" - copy "bench/Main2.hs" "bench/Main.hs" - res <- unregisteringLines . snd <$> stackStderr ["build"] - removeFileIgnore "test/Main.hs" - removeFileIgnore "bench/Main.hs" - unless (null res) $ fail "Stack recompiled when a test or benchmark file was changed, but only the library was targeted" - -unregisteringLines :: String -> [String] -unregisteringLines = filter (isInfixOf " unregistering ") . lines diff --git a/test/integration/tests/4001-excess-recompilation/files/Setup.hs b/test/integration/tests/4001-excess-recompilation/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/4001-excess-recompilation/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/4001-excess-recompilation/files/bench/Main1.hs b/test/integration/tests/4001-excess-recompilation/files/bench/Main1.hs deleted file mode 100644 index dbd8a1e159..0000000000 --- a/test/integration/tests/4001-excess-recompilation/files/bench/Main1.hs +++ /dev/null @@ -1,5 +0,0 @@ -import Lib - -main :: IO () -main = putStrLn "I am Main1" - diff --git a/test/integration/tests/4001-excess-recompilation/files/bench/Main2.hs b/test/integration/tests/4001-excess-recompilation/files/bench/Main2.hs deleted file mode 100644 index 9c51c0f9ef..0000000000 --- a/test/integration/tests/4001-excess-recompilation/files/bench/Main2.hs +++ /dev/null @@ -1,5 +0,0 @@ -import Lib - -main :: IO () -main = putStrLn "I am Main2" - diff --git a/test/integration/tests/4001-excess-recompilation/files/files.cabal b/test/integration/tests/4001-excess-recompilation/files/files.cabal deleted file mode 100644 index 00c3ca0303..0000000000 --- a/test/integration/tests/4001-excess-recompilation/files/files.cabal +++ /dev/null @@ -1,25 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >= 2.0 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base - default-language: Haskell2010 - -test-suite test - hs-source-dirs: test - main-is: Main.hs - build-depends: base, files - default-language: Haskell2010 - type: exitcode-stdio-1.0 - -benchmark bench - hs-source-dirs: bench - main-is: Main.hs - build-depends: base, files - default-language: Haskell2010 - type: exitcode-stdio-1.0 - diff --git a/test/integration/tests/4001-excess-recompilation/files/stack.yaml b/test/integration/tests/4001-excess-recompilation/files/stack.yaml deleted file mode 100644 index 14f23e9aa6..0000000000 --- a/test/integration/tests/4001-excess-recompilation/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.6.5 diff --git a/test/integration/tests/4001-excess-recompilation/files/test/Main1.hs b/test/integration/tests/4001-excess-recompilation/files/test/Main1.hs deleted file mode 100644 index dbd8a1e159..0000000000 --- a/test/integration/tests/4001-excess-recompilation/files/test/Main1.hs +++ /dev/null @@ -1,5 +0,0 @@ -import Lib - -main :: IO () -main = putStrLn "I am Main1" - diff --git a/test/integration/tests/4001-excess-recompilation/files/test/Main2.hs b/test/integration/tests/4001-excess-recompilation/files/test/Main2.hs deleted file mode 100644 index 9c51c0f9ef..0000000000 --- a/test/integration/tests/4001-excess-recompilation/files/test/Main2.hs +++ /dev/null @@ -1,5 +0,0 @@ -import Lib - -main :: IO () -main = putStrLn "I am Main2" - diff --git a/test/integration/tests/4044-no-run-tests-config/Main.hs b/test/integration/tests/4044-no-run-tests-config/Main.hs deleted file mode 100644 index 81d3e5a402..0000000000 --- a/test/integration/tests/4044-no-run-tests-config/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["test"] - stack ["build", "foo:test:foo"] diff --git a/test/integration/tests/4044-no-run-tests-config/files/.gitignore b/test/integration/tests/4044-no-run-tests-config/files/.gitignore deleted file mode 100644 index d43d807c0d..0000000000 --- a/test/integration/tests/4044-no-run-tests-config/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.cabal diff --git a/test/integration/tests/4044-no-run-tests-config/files/Test.hs b/test/integration/tests/4044-no-run-tests-config/files/Test.hs deleted file mode 100644 index 72891941af..0000000000 --- a/test/integration/tests/4044-no-run-tests-config/files/Test.hs +++ /dev/null @@ -1,2 +0,0 @@ -main :: IO () -main = error "You shall not pass!" diff --git a/test/integration/tests/4044-no-run-tests-config/files/package.yaml b/test/integration/tests/4044-no-run-tests-config/files/package.yaml deleted file mode 100644 index ba64b2afad..0000000000 --- a/test/integration/tests/4044-no-run-tests-config/files/package.yaml +++ /dev/null @@ -1,10 +0,0 @@ -name: foo -version: 0 -dependencies: -- base - -library: {} - -tests: - foo: - main: Test.hs diff --git a/test/integration/tests/4044-no-run-tests-config/files/stack.yaml b/test/integration/tests/4044-no-run-tests-config/files/stack.yaml deleted file mode 100644 index 426711aec1..0000000000 --- a/test/integration/tests/4044-no-run-tests-config/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -build: - test-arguments: - no-run-tests: true -resolver: ghc-8.6.5 diff --git a/test/integration/tests/4085-insufficient-error-message/Main.hs b/test/integration/tests/4085-insufficient-error-message/Main.hs deleted file mode 100644 index e04128f69c..0000000000 --- a/test/integration/tests/4085-insufficient-error-message/Main.hs +++ /dev/null @@ -1,98 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - -{-- - -import StackTest -import System.Environment (getEnv) -import System.Directory -import System.FilePath -import System.Process -import System.Exit -import Control.Exception.Base (bracket, bracket_) -import Control.Monad (guard, when, unless, msum) -import Control.Concurrent (threadDelay) -import Data.List (isInfixOf, delete, repeat) - -createDockerVolume :: Int -> IO String -createDockerVolume sizeInMB = do - (ec, stdout, stderr) <- runEx "docker" $ "volume create" - ++ " --driver local" - ++ " --opt type=tmpfs" - ++ " --opt device=tmpfs" - ++ " --opt o=size=" ++ show sizeInMB ++ "m" - unless (ec == ExitSuccess) $ error $ "Exited with exit code: " ++ show ec - return $ delete '\n' stdout - -removeDockerVolume :: Int -> String -> IO () -removeDockerVolume attempts name | attempts <= 0 = - error $ "Can't remove docker volume " ++ name -removeDockerVolume attempts name = do - (ec, _, stderr) <- runEx "docker" $ "volume rm --force " ++ name - let wasRemoved = (ec == ExitSuccess) || isInfixOf "No such volume" stderr - unless wasRemoved $ - threadDelay 3000000 >> -- sometimes docker releases a volume slowly - removeDockerVolume (attempts - 1) name - -withDockerVolume :: Int -> (String -> IO a) -> IO a -withDockerVolume sizeInMB = - bracket (createDockerVolume sizeInMB) (removeDockerVolume 5) - -buildDockerImageWithStackSourceInside :: String -> IO () -buildDockerImageWithStackSourceInside tag = withSourceDirectory $ do - dir <- testDir - runShell ("docker build" - ++ " --file " ++ (dir "Dockerfile") - ++ " --tag " ++ tag - ++ " --memory-swap -1" - ++ " .") - removeDanglingImages - -removeDanglingImages :: IO () -removeDanglingImages = - runShell "docker rmi -f $(docker images --quiet --filter 'dangling=true')" - -runDockerContainerWithVolume - :: String - -> String - -> String - -> String - -> IO (ExitCode, String, String) -runDockerContainerWithVolume imageTag volumeName volumeLocation cmd = - runEx "docker" $ "run" - ++ " --rm" - ++ " --workdir " ++ volumeLocation - ++ " --mount type=volume,dst=" ++ volumeLocation ++ ",src=" ++ volumeName - ++ " " ++ imageTag - ++ " " ++ cmd - -validateSrderr :: String -> Bool -validateSrderr = isInfixOf "No space left on device" - -imageTag :: String -imageTag = "4085-fix" - -spaceInMBJustEnoughToFailInTheExactMoment :: Int -spaceInMBJustEnoughToFailInTheExactMoment = 2000 - -main :: IO () -main = do - buildDockerImageWithStackSourceInside imageTag - (ec, _, stderr) <- withDockerVolume - spaceInMBJustEnoughToFailInTheExactMoment - (\volumeName -> - runDockerContainerWithVolume imageTag volumeName "/app" $ - "stack" - ++ " --stack-root " ++ "/app" - ++ " --resolver nightly-2018-06-05" - ++ " --no-terminal" - ++ " --install-ghc" - ++ " test") - unless (ec /= ExitSuccess) $ - error "stack process succeeded, but it shouldn't" - unless (validateSrderr stderr) $ - error "stderr validation failed" - -// --} - -main :: IO () -main = putStrLn "This test is disabled (see https://github.com/commercialhaskell/stack/issues/4427)." diff --git a/test/integration/tests/4095-utf8-pure-nix/Main.hs b/test/integration/tests/4095-utf8-pure-nix/Main.hs deleted file mode 100644 index 01a713672f..0000000000 --- a/test/integration/tests/4095-utf8-pure-nix/Main.hs +++ /dev/null @@ -1,9 +0,0 @@ -import StackTest - -main :: IO () -main - | isWindows = logInfo "Disabled on Windows as Nix is not currently supported on Windows." - | isMacOSX = logInfo "Takes too long to run, since it tries to build GHC" - | otherwise = do - stack ["build", "--nix-pure"] - stack ["exec", "--nix-pure", "ShowUnicode"] diff --git a/test/integration/tests/4095-utf8-pure-nix/files/.gitignore b/test/integration/tests/4095-utf8-pure-nix/files/.gitignore deleted file mode 100644 index d43d807c0d..0000000000 --- a/test/integration/tests/4095-utf8-pure-nix/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.cabal diff --git a/test/integration/tests/4095-utf8-pure-nix/files/ShowUnicode.hs b/test/integration/tests/4095-utf8-pure-nix/files/ShowUnicode.hs deleted file mode 100644 index 91011a6576..0000000000 --- a/test/integration/tests/4095-utf8-pure-nix/files/ShowUnicode.hs +++ /dev/null @@ -1,5 +0,0 @@ -import System.IO (stdout) -import Text.Printf (hPrintf) - -main :: IO () -main = hPrintf stdout "平和" diff --git a/test/integration/tests/4095-utf8-pure-nix/files/package.yaml b/test/integration/tests/4095-utf8-pure-nix/files/package.yaml deleted file mode 100644 index 5ee0230a5f..0000000000 --- a/test/integration/tests/4095-utf8-pure-nix/files/package.yaml +++ /dev/null @@ -1,4 +0,0 @@ -executables: - ShowUnicode: - dependencies: [base] - main: ShowUnicode.hs diff --git a/test/integration/tests/4095-utf8-pure-nix/files/stack.yaml b/test/integration/tests/4095-utf8-pure-nix/files/stack.yaml deleted file mode 100644 index 785b1469c7..0000000000 --- a/test/integration/tests/4095-utf8-pure-nix/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-14.27 diff --git a/test/integration/tests/4101-dependency-tree/Main.hs b/test/integration/tests/4101-dependency-tree/Main.hs deleted file mode 100644 index a264329025..0000000000 --- a/test/integration/tests/4101-dependency-tree/Main.hs +++ /dev/null @@ -1,157 +0,0 @@ -import Control.Monad (when) -import StackTest -import System.Directory (getCurrentDirectory) - -main :: IO () -main = - if isWindows - then do return () - else do - - stackCheckStdout ["ls", "dependencies", "tree"] $ \stdOut -> do - let expected = unlines [ "Packages" - , "├─┬ files 0.1.0.0" - , "│ ├─┬ base 4.12.0.0" - , "│ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ └── rts 1.0" - , "│ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ └── rts 1.0" - , "│ │ └── rts 1.0" - , "│ ├─┬ filelock 0.1.1.2" - , "│ │ ├─┬ base 4.12.0.0" - , "│ │ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ └── rts 1.0" - , "│ │ └─┬ unix 2.7.2.2" - , "│ │ ├─┬ base 4.12.0.0" - , "│ │ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ └── rts 1.0" - , "│ │ ├─┬ bytestring 0.10.8.2" - , "│ │ │ ├─┬ base 4.12.0.0" - , "│ │ │ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ │ │ └── rts 1.0" - , "│ │ │ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ │ │ └── rts 1.0" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ ├─┬ deepseq 1.4.4.0" - , "│ │ │ │ ├─┬ array 0.5.3.0" - , "│ │ │ │ │ └─┬ base 4.12.0.0" - , "│ │ │ │ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ │ │ │ └── rts 1.0" - , "│ │ │ │ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ │ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ │ │ │ └── rts 1.0" - , "│ │ │ │ │ └── rts 1.0" - , "│ │ │ │ └─┬ base 4.12.0.0" - , "│ │ │ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ │ │ └── rts 1.0" - , "│ │ │ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ │ │ └── rts 1.0" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ └─┬ integer-gmp 1.0.2.0" - , "│ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ └── rts 1.0" - , "│ │ └─┬ time 1.8.0.2" - , "│ │ ├─┬ base 4.12.0.0" - , "│ │ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ └── rts 1.0" - , "│ │ └─┬ deepseq 1.4.4.0" - , "│ │ ├─┬ array 0.5.3.0" - , "│ │ │ └─┬ base 4.12.0.0" - , "│ │ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ └── rts 1.0" - , "│ │ └─┬ base 4.12.0.0" - , "│ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ └── rts 1.0" - , "│ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ └── rts 1.0" - , "│ │ └── rts 1.0" - , "│ ├─┬ mtl 2.2.2" - , "│ │ ├─┬ base 4.12.0.0" - , "│ │ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ │ └── rts 1.0" - , "│ │ │ └── rts 1.0" - , "│ │ └─┬ transformers 0.5.6.2" - , "│ │ └─┬ base 4.12.0.0" - , "│ │ ├─┬ ghc-prim 0.5.3" - , "│ │ │ └── rts 1.0" - , "│ │ ├─┬ integer-gmp 1.0.2.0" - , "│ │ │ └─┬ ghc-prim 0.5.3" - , "│ │ │ └── rts 1.0" - , "│ │ └── rts 1.0" - , "│ └─┬ subproject 0.1.0.0" - , "│ └─┬ base 4.12.0.0" - , "│ ├─┬ ghc-prim 0.5.3" - , "│ │ └── rts 1.0" - , "│ ├─┬ integer-gmp 1.0.2.0" - , "│ │ └─┬ ghc-prim 0.5.3" - , "│ │ └── rts 1.0" - , "│ └── rts 1.0" - , "└─┬ subproject 0.1.0.0" - , " └─┬ base 4.12.0.0" - , " ├─┬ ghc-prim 0.5.3" - , " │ └── rts 1.0" - , " ├─┬ integer-gmp 1.0.2.0" - , " │ └─┬ ghc-prim 0.5.3" - , " │ └── rts 1.0" - , " └── rts 1.0" - ] - when (stdOut /= expected) $ - error $ unlines [ "Expected:", expected, "Actual:", stdOut ] - - stackCheckStdout ["ls", "dependencies", "tree", "--depth=1"] $ \stdOut -> do - let expected = unlines [ "Packages" - , "├─┬ files 0.1.0.0" - , "│ ├── base 4.12.0.0" - , "│ ├── filelock 0.1.1.2" - , "│ ├── mtl 2.2.2" - , "│ └── subproject 0.1.0.0" - , "└─┬ subproject 0.1.0.0" - , " └── base 4.12.0.0" - ] - when (stdOut /= expected) $ - error $ unlines [ "Expected:", expected, "Actual:", stdOut ] - - stackCheckStdout ["ls", "dependencies", "tree", "subproject"] $ \stdOut -> do - let expected = unlines [ "Packages" - , "└─┬ subproject 0.1.0.0" - , " └─┬ base 4.12.0.0" - , " ├─┬ ghc-prim 0.5.3" - , " │ └── rts 1.0" - , " ├─┬ integer-gmp 1.0.2.0" - , " │ └─┬ ghc-prim 0.5.3" - , " │ └── rts 1.0" - , " └── rts 1.0" - ] - when (stdOut /= expected) $ - error $ unlines [ "Expected:", expected, "Actual:", stdOut ] - - stackCheckStdout ["ls", "dependencies", "json"] $ \stdOut -> do - currdir <- getCurrentDirectory - let expected = "[{\"dependencies\":[\"base\",\"bytestring\",\"time\"],\"name\":\"unix\",\"version\":\"2.7.2.2\",\"license\":\"BSD-3\"},{\"dependencies\":[\"base\"],\"name\":\"transformers\",\"version\":\"0.5.6.2\",\"license\":\"BSD-3\"},{\"dependencies\":[\"base\",\"deepseq\"],\"name\":\"time\",\"version\":\"1.8.0.2\",\"license\":\"BSD-3\"},{\"location\":{\"url\":\"file://" ++ currdir ++ "/subproject/\",\"type\":\"project package\"},\"dependencies\":[\"base\"],\"name\":\"subproject\",\"version\":\"0.1.0.0\",\"license\":\"AllRightsReserved\"},{\"dependencies\":[],\"name\":\"rts\",\"version\":\"1.0\",\"license\":\"BSD-3\"},{\"location\":{\"url\":\"https://hackage.haskell.org/package/mtl-2.2.2\",\"type\":\"hackage\"},\"dependencies\":[\"base\",\"transformers\"],\"name\":\"mtl\",\"version\":\"2.2.2\",\"license\":\"BSD3\"},{\"dependencies\":[\"ghc-prim\"],\"name\":\"integer-gmp\",\"version\":\"1.0.2.0\",\"license\":\"BSD-3\"},{\"dependencies\":[\"rts\"],\"name\":\"ghc-prim\",\"version\":\"0.5.3\",\"license\":\"BSD-3\"},{\"location\":{\"url\":\"file://" ++ currdir ++ "/\",\"type\":\"project package\"},\"dependencies\":[\"base\",\"filelock\",\"mtl\",\"subproject\"],\"name\":\"files\",\"version\":\"0.1.0.0\",\"license\":\"AllRightsReserved\"},{\"location\":{\"size\":9228,\"url\":\"https://github.com/snoyberg/filelock/archive/4f080496d8bf153fbe26e64d1f52cf73c7db25f6.tar.gz\",\"type\":\"archive\",\"sha256\":\"c27641e26137f52b27e3ef9e27e7ac3f845f719ea54a12475f00f2ea7e6d9afc\"},\"dependencies\":[\"base\",\"unix\"],\"name\":\"filelock\",\"version\":\"0.1.1.2\",\"license\":\"PublicDomain\"},{\"dependencies\":[\"array\",\"base\"],\"name\":\"deepseq\",\"version\":\"1.4.4.0\",\"license\":\"BSD-3\"},{\"dependencies\":[\"base\",\"deepseq\",\"ghc-prim\",\"integer-gmp\"],\"name\":\"bytestring\",\"version\":\"0.10.8.2\",\"license\":\"BSD-3\"},{\"dependencies\":[\"ghc-prim\",\"integer-gmp\",\"rts\"],\"name\":\"base\",\"version\":\"4.12.0.0\",\"license\":\"BSD-3\"},{\"dependencies\":[\"base\"],\"name\":\"array\",\"version\":\"0.5.3.0\",\"license\":\"BSD-3\"}]\n" - when (stdOut /= expected) $ - error $ unlines [ "Expected:", expected, "Actual:", stdOut ] diff --git a/test/integration/tests/4101-dependency-tree/files/files.cabal b/test/integration/tests/4101-dependency-tree/files/files.cabal deleted file mode 100644 index bd96bcbf9c..0000000000 --- a/test/integration/tests/4101-dependency-tree/files/files.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5, mtl, subproject, filelock - default-language: Haskell2010 diff --git a/test/integration/tests/4101-dependency-tree/files/src/Main.hs b/test/integration/tests/4101-dependency-tree/files/src/Main.hs deleted file mode 100644 index 9cd992d9e5..0000000000 --- a/test/integration/tests/4101-dependency-tree/files/src/Main.hs +++ /dev/null @@ -1,5 +0,0 @@ -module Main where - -main :: IO () -main = do - putStrLn "hello world" diff --git a/test/integration/tests/4101-dependency-tree/files/stack.yaml b/test/integration/tests/4101-dependency-tree/files/stack.yaml deleted file mode 100644 index 17d208d24e..0000000000 --- a/test/integration/tests/4101-dependency-tree/files/stack.yaml +++ /dev/null @@ -1,7 +0,0 @@ -resolver: lts-14.27 -packages: -- . -- subproject -extra-deps: -- github: snoyberg/filelock - commit: 4f080496d8bf153fbe26e64d1f52cf73c7db25f6 diff --git a/test/integration/tests/4101-dependency-tree/files/subproject/src/Main.hs b/test/integration/tests/4101-dependency-tree/files/subproject/src/Main.hs deleted file mode 100644 index 9cd992d9e5..0000000000 --- a/test/integration/tests/4101-dependency-tree/files/subproject/src/Main.hs +++ /dev/null @@ -1,5 +0,0 @@ -module Main where - -main :: IO () -main = do - putStrLn "hello world" diff --git a/test/integration/tests/4101-dependency-tree/files/subproject/subproject.cabal b/test/integration/tests/4101-dependency-tree/files/subproject/subproject.cabal deleted file mode 100644 index 6c3a2f939d..0000000000 --- a/test/integration/tests/4101-dependency-tree/files/subproject/subproject.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: subproject -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/4105-test-coverage-of-internal-lib/Main.hs b/test/integration/tests/4105-test-coverage-of-internal-lib/Main.hs deleted file mode 100644 index f03c0433fc..0000000000 --- a/test/integration/tests/4105-test-coverage-of-internal-lib/Main.hs +++ /dev/null @@ -1,19 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf, isPrefixOf) -import StackTest - -main :: IO () -main = do - stack ["clean"] - stack ["build"] - res <- getCoverageLines . snd <$> stackStderr ["test", "--coverage", "--color", "never"] - case res of - _:exprs:_ -> unless ("2/2" `isInfixOf` exprs) testFail - _ -> testFail - where - testFail = fail "Stack didn't generate coverage from both libraries" - -getCoverageLines :: String -> [String] -getCoverageLines = dropWhile (not . isCoverageHeader) . lines - where - isCoverageHeader = isPrefixOf "Generating coverage report for " diff --git a/test/integration/tests/4105-test-coverage-of-internal-lib/files/files.cabal b/test/integration/tests/4105-test-coverage-of-internal-lib/files/files.cabal deleted file mode 100644 index 49cd5a2431..0000000000 --- a/test/integration/tests/4105-test-coverage-of-internal-lib/files/files.cabal +++ /dev/null @@ -1,23 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >= 2.0 - -library - exposed-modules: Src - hs-source-dirs: src - build-depends: base - default-language: Haskell2010 - -library sublib - exposed-modules: B - hs-source-dirs: src-sublib - build-depends: base - default-language: Haskell2010 - -test-suite test - type: exitcode-stdio-1.0 - main-is: Main.hs - hs-source-dirs: test - build-depends: base, files, sublib - default-language: Haskell2010 diff --git a/test/integration/tests/4105-test-coverage-of-internal-lib/files/src-sublib/B.hs b/test/integration/tests/4105-test-coverage-of-internal-lib/files/src-sublib/B.hs deleted file mode 100644 index 53253d5dcc..0000000000 --- a/test/integration/tests/4105-test-coverage-of-internal-lib/files/src-sublib/B.hs +++ /dev/null @@ -1,5 +0,0 @@ -module B where - --- | A function of the internal library -funInternal :: Int -> Int -funInternal = pred diff --git a/test/integration/tests/4105-test-coverage-of-internal-lib/files/src/Src.hs b/test/integration/tests/4105-test-coverage-of-internal-lib/files/src/Src.hs deleted file mode 100644 index 0f8db7fb77..0000000000 --- a/test/integration/tests/4105-test-coverage-of-internal-lib/files/src/Src.hs +++ /dev/null @@ -1,5 +0,0 @@ -module Src where - --- | A function of the main library -funMainLib :: Int -> Int -funMainLib = succ diff --git a/test/integration/tests/4105-test-coverage-of-internal-lib/files/stack.yaml b/test/integration/tests/4105-test-coverage-of-internal-lib/files/stack.yaml deleted file mode 100644 index 785b1469c7..0000000000 --- a/test/integration/tests/4105-test-coverage-of-internal-lib/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-14.27 diff --git a/test/integration/tests/4105-test-coverage-of-internal-lib/files/test/Main.hs b/test/integration/tests/4105-test-coverage-of-internal-lib/files/test/Main.hs deleted file mode 100644 index b1cf81b0dc..0000000000 --- a/test/integration/tests/4105-test-coverage-of-internal-lib/files/test/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import Control.Monad (when) - -import Src -import B - -main = when (funMainLib 41 /= funInternal 43) $ error "test failed" diff --git a/test/integration/tests/4181-clean-wo-dl-ghc/Main.hs b/test/integration/tests/4181-clean-wo-dl-ghc/Main.hs deleted file mode 100644 index 415380a7e6..0000000000 --- a/test/integration/tests/4181-clean-wo-dl-ghc/Main.hs +++ /dev/null @@ -1,12 +0,0 @@ --- | --- The integration tests have no ghc present, initially. Stack should not --- require ghc present to run the `clean` command. - -import StackTest - -main :: IO () -main = do - -- `stack clean` should succeed even though there is no ghc available. - -- See the stack.yaml file for how this works. - stackIgnoreException ["clean"] - stackCleanFull diff --git a/test/integration/tests/4181-clean-wo-dl-ghc/files/foo.cabal b/test/integration/tests/4181-clean-wo-dl-ghc/files/foo.cabal deleted file mode 100644 index 45446b7f9c..0000000000 --- a/test/integration/tests/4181-clean-wo-dl-ghc/files/foo.cabal +++ /dev/null @@ -1,13 +0,0 @@ -cabal-version: >= 1.10 - --- This file has been generated from package.yaml by hpack version 0.29.6. --- --- see: https://github.com/sol/hpack --- --- hash: 941a1ab4bea2f0ee229dd6ab7fe9730517a0397fb9141fe2841a0f9748dbfd57 - -name: foo -version: 0.1.0.0 -build-type: Simple - -library diff --git a/test/integration/tests/4181-clean-wo-dl-ghc/files/stack.yaml b/test/integration/tests/4181-clean-wo-dl-ghc/files/stack.yaml deleted file mode 100644 index 654a70d662..0000000000 --- a/test/integration/tests/4181-clean-wo-dl-ghc/files/stack.yaml +++ /dev/null @@ -1,8 +0,0 @@ -# Update the resolver as necessary -resolver: ghc-8.22 -# Do not use the system ghc, as ghc must not be available -system-ghc: false -# Do not install any other ghc, as ghc must not be available -install-ghc: false -packages: -- '.' diff --git a/test/integration/tests/4215-missing-unregister/Main.hs b/test/integration/tests/4215-missing-unregister/Main.hs deleted file mode 100644 index 83056eb580..0000000000 --- a/test/integration/tests/4215-missing-unregister/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["build", "--stack-yaml", "stack1.yaml"] - stack ["build", "--stack-yaml", "stack2.yaml"] diff --git a/test/integration/tests/4215-missing-unregister/files/.gitignore b/test/integration/tests/4215-missing-unregister/files/.gitignore deleted file mode 100644 index 0afa51175a..0000000000 --- a/test/integration/tests/4215-missing-unregister/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -foo.cabal diff --git a/test/integration/tests/4215-missing-unregister/files/stack1.yaml b/test/integration/tests/4215-missing-unregister/files/stack1.yaml deleted file mode 100644 index 5a7ccc2a25..0000000000 --- a/test/integration/tests/4215-missing-unregister/files/stack1.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: ghc-8.6.5 -packages: -- v1 diff --git a/test/integration/tests/4215-missing-unregister/files/stack2.yaml b/test/integration/tests/4215-missing-unregister/files/stack2.yaml deleted file mode 100644 index b42f9df5c2..0000000000 --- a/test/integration/tests/4215-missing-unregister/files/stack2.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: ghc-8.6.5 -packages: -- v2 diff --git a/test/integration/tests/4215-missing-unregister/files/v1/package.yaml b/test/integration/tests/4215-missing-unregister/files/v1/package.yaml deleted file mode 100644 index 7bcacfcb87..0000000000 --- a/test/integration/tests/4215-missing-unregister/files/v1/package.yaml +++ /dev/null @@ -1,7 +0,0 @@ -name: foo -version: 1 - -dependencies: -- base - -library: {} diff --git a/test/integration/tests/4215-missing-unregister/files/v2/package.yaml b/test/integration/tests/4215-missing-unregister/files/v2/package.yaml deleted file mode 100644 index e49b4fdc65..0000000000 --- a/test/integration/tests/4215-missing-unregister/files/v2/package.yaml +++ /dev/null @@ -1,7 +0,0 @@ -name: foo -version: 2 - -dependencies: -- base - -library: {} diff --git a/test/integration/tests/4270-files-order/Main.hs b/test/integration/tests/4270-files-order/Main.hs deleted file mode 100644 index 8bbb1f82aa..0000000000 --- a/test/integration/tests/4270-files-order/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import Control.Monad -import StackTest - -main :: IO () -main = do - stack ["build"] - repl [] $ do - replCommand "putStrLn greeting" - line <- replGetLine - when (line /= "Hello, world!") $ error "Didn't load correctly." diff --git a/test/integration/tests/4270-files-order/files/cbits-ordering.cabal b/test/integration/tests/4270-files-order/files/cbits-ordering.cabal deleted file mode 100644 index e943116685..0000000000 --- a/test/integration/tests/4270-files-order/files/cbits-ordering.cabal +++ /dev/null @@ -1,27 +0,0 @@ --- This file has been generated from package.yaml by hpack version 0.28.2. --- --- see: https://github.com/sol/hpack --- --- hash: e76488d28476ba7cd8aa51be80b2c7eb71e870238b2a548581cf6093bd7c7994 - -name: cbits-ordering -version: 0.0.0 -build-type: Simple -cabal-version: >= 1.10 - -library - exposed-modules: - Lib - other-modules: - Paths_cbits_ordering - hs-source-dirs: - src - include-dirs: - cbits - c-sources: - cbits/the_dependency.c - cbits/a_dependent.c - - build-depends: - base >=4.7 && <5 - default-language: Haskell2010 diff --git a/test/integration/tests/4270-files-order/files/src/Lib.hs b/test/integration/tests/4270-files-order/files/src/Lib.hs deleted file mode 100644 index 9668a03fe0..0000000000 --- a/test/integration/tests/4270-files-order/files/src/Lib.hs +++ /dev/null @@ -1,13 +0,0 @@ -{-# LANGUAGE ForeignFunctionInterface #-} - -module Lib where - -import Foreign.C.String -import System.IO.Unsafe - -foreign import ccall unsafe "a_dependent.h greeting" - c_greeting :: CString - -{-# NOINLINE greeting #-} -greeting :: String -greeting = unsafePerformIO $ peekCString c_greeting diff --git a/test/integration/tests/4270-files-order/files/stack.yaml b/test/integration/tests/4270-files-order/files/stack.yaml deleted file mode 100644 index a55ce3d6c5..0000000000 --- a/test/integration/tests/4270-files-order/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: lts-14.27 - -packages: -- . diff --git a/test/integration/tests/4324-dot-includes-boot-packages/Main.hs b/test/integration/tests/4324-dot-includes-boot-packages/Main.hs deleted file mode 100644 index 493cd2b8ca..0000000000 --- a/test/integration/tests/4324-dot-includes-boot-packages/Main.hs +++ /dev/null @@ -1,9 +0,0 @@ -import StackTest -import Control.Monad (unless) -import Data.List (isInfixOf) - -main :: IO () -main = do - stackCheckStdout ["dot", "--external"] $ \str -> - unless ("\n\"process\" ->" `isInfixOf` str) $ - error "Not showing dependencies of process" diff --git a/test/integration/tests/4324-dot-includes-boot-packages/files/.gitignore b/test/integration/tests/4324-dot-includes-boot-packages/files/.gitignore deleted file mode 100644 index d43d807c0d..0000000000 --- a/test/integration/tests/4324-dot-includes-boot-packages/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.cabal diff --git a/test/integration/tests/4324-dot-includes-boot-packages/files/package.yaml b/test/integration/tests/4324-dot-includes-boot-packages/files/package.yaml deleted file mode 100644 index 0708d2f2d3..0000000000 --- a/test/integration/tests/4324-dot-includes-boot-packages/files/package.yaml +++ /dev/null @@ -1,7 +0,0 @@ -name: foo - -dependencies: -- base -- process - -library: {} diff --git a/test/integration/tests/4324-dot-includes-boot-packages/files/stack.yaml b/test/integration/tests/4324-dot-includes-boot-packages/files/stack.yaml deleted file mode 100644 index 14f23e9aa6..0000000000 --- a/test/integration/tests/4324-dot-includes-boot-packages/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.6.5 diff --git a/test/integration/tests/4390-dot-no-ghc/Main.hs b/test/integration/tests/4390-dot-no-ghc/Main.hs deleted file mode 100644 index ae94e7eeda..0000000000 --- a/test/integration/tests/4390-dot-no-ghc/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["ls", "dependencies", "--global-hints"] - stack ["dot", "--global-hints"] diff --git a/test/integration/tests/4390-dot-no-ghc/files/.gitignore b/test/integration/tests/4390-dot-no-ghc/files/.gitignore deleted file mode 100644 index d43d807c0d..0000000000 --- a/test/integration/tests/4390-dot-no-ghc/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.cabal diff --git a/test/integration/tests/4390-dot-no-ghc/files/package.yaml b/test/integration/tests/4390-dot-no-ghc/files/package.yaml deleted file mode 100644 index b1eda63d9f..0000000000 --- a/test/integration/tests/4390-dot-no-ghc/files/package.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: foo -version: 0 -dependencies: -- base -- process -library: {} diff --git a/test/integration/tests/4390-dot-no-ghc/files/stack.yaml b/test/integration/tests/4390-dot-no-ghc/files/stack.yaml deleted file mode 100644 index 14f23e9aa6..0000000000 --- a/test/integration/tests/4390-dot-no-ghc/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.6.5 diff --git a/test/integration/tests/4408-init-internal-libs/Main.hs b/test/integration/tests/4408-init-internal-libs/Main.hs deleted file mode 100644 index 47d192749a..0000000000 --- a/test/integration/tests/4408-init-internal-libs/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stack ["init", "--resolver", "ghc-8.6.5", "--force"] diff --git a/test/integration/tests/4408-init-internal-libs/files/.gitignore b/test/integration/tests/4408-init-internal-libs/files/.gitignore deleted file mode 100644 index 684dbffa96..0000000000 --- a/test/integration/tests/4408-init-internal-libs/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -stack.yaml diff --git a/test/integration/tests/4408-init-internal-libs/files/foo.cabal b/test/integration/tests/4408-init-internal-libs/files/foo.cabal deleted file mode 100644 index 0e4d4898ae..0000000000 --- a/test/integration/tests/4408-init-internal-libs/files/foo.cabal +++ /dev/null @@ -1,15 +0,0 @@ -cabal-version: 2.0 -name: foo -version: 0 -build-type: Simple - -library - hs-source-dirs: src - build-depends: base, some-internal-lib - default-language: Haskell2010 - -library some-internal-lib - hs-source-dirs: src-internal - exposed-modules: Internal - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/443-specify-path/.gitignore b/test/integration/tests/443-specify-path/.gitignore deleted file mode 100644 index 027271b9b2..0000000000 --- a/test/integration/tests/443-specify-path/.gitignore +++ /dev/null @@ -1 +0,0 @@ -files diff --git a/test/integration/tests/443-specify-path/Main.hs b/test/integration/tests/443-specify-path/Main.hs deleted file mode 100644 index 12a8c5e3e8..0000000000 --- a/test/integration/tests/443-specify-path/Main.hs +++ /dev/null @@ -1,30 +0,0 @@ -import StackTest -import System.Directory -import System.FilePath -import System.Info (os) - -main :: IO () -main = do - -- install in relative path - removeDirIgnore "bin" - createDirectory "bin" - stack [defaultResolverArg, "--local-bin-path", "./bin", "install" , "happy"] - doesExist ("./bin/happy" ++ exeExt) - - -- Default install - -- This seems to fail due to direcory being cleaned up, - -- a manual test of the default stack install is required - -- defaultDir <- getAppUserDataDirectory "local" - -- stack ["install", "happy"] - -- doesExist (defaultDir ++ "/bin/happy" ++ exeExt) - - -- install in current dir - stack [defaultResolverArg, "--local-bin-path", ".", "install", "happy" ] - doesExist ("happy" ++ exeExt) - - -- install in absolute path - tmpDirectory <- fmap ( "absolute-bin") getCurrentDirectory - removeDirIgnore tmpDirectory - createDirectory tmpDirectory - stack [defaultResolverArg, "--local-bin-path", tmpDirectory, "install", "happy" ] - doesExist (tmpDirectory ("happy" ++ exeExt)) diff --git a/test/integration/tests/444-package-option/Main.hs b/test/integration/tests/444-package-option/Main.hs deleted file mode 100644 index 1ecb9f564d..0000000000 --- a/test/integration/tests/444-package-option/Main.hs +++ /dev/null @@ -1,8 +0,0 @@ -import StackTest - -main :: IO () -main = do - isAlpine <- getIsAlpine - if isAlpine || isARM - then logInfo "Disabled on Alpine Linux and ARM since it cannot yet install its own GHC." - else stack [defaultResolverArg, "--install-ghc", "runghc", "--package", "safe", "Test.hs"] diff --git a/test/integration/tests/4453-detailed/Main.hs b/test/integration/tests/4453-detailed/Main.hs deleted file mode 100644 index 8f13e72bcd..0000000000 --- a/test/integration/tests/4453-detailed/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -import StackTest -import System.Directory - -main :: IO () -main = do - removeDirIgnore ".stack-work" - stack [defaultResolverArg, "test"] diff --git a/test/integration/tests/4453-detailed/files/src/Lib.hs b/test/integration/tests/4453-detailed/files/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/4453-detailed/files/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/4453-detailed/files/stack.yaml b/test/integration/tests/4453-detailed/files/stack.yaml deleted file mode 100644 index 3732cb9c9f..0000000000 --- a/test/integration/tests/4453-detailed/files/stack.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: lts-14.27 -packages: -- '.' diff --git a/test/integration/tests/4453-detailed/files/test-detailed-example.cabal b/test/integration/tests/4453-detailed/files/test-detailed-example.cabal deleted file mode 100644 index 87a973ce4d..0000000000 --- a/test/integration/tests/4453-detailed/files/test-detailed-example.cabal +++ /dev/null @@ -1,18 +0,0 @@ -name: test-detailed-example -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.20 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - -test-suite test-detailed-example-test - type: detailed-0.9 - hs-source-dirs: test - test-module: Spec - build-depends: base >= 4.7, - Cabal >= 1.20 - default-language: Haskell2010 diff --git a/test/integration/tests/4453-detailed/files/test/Spec.hs b/test/integration/tests/4453-detailed/files/test/Spec.hs deleted file mode 100644 index fdce306520..0000000000 --- a/test/integration/tests/4453-detailed/files/test/Spec.hs +++ /dev/null @@ -1,20 +0,0 @@ -module Spec (tests) where - -import Distribution.TestSuite - -tests :: IO [Test] -tests = do - return [ - test "foo" Pass - ] - -test :: String -> Result -> Test -test name r = Test t - where - t = TestInstance { - run = return (Finished r) - , name = name - , tags = [] - , options = [] - , setOption = \_ _ -> Right t - } diff --git a/test/integration/tests/4488-newer-cabal-version/Main.hs b/test/integration/tests/4488-newer-cabal-version/Main.hs deleted file mode 100644 index 4970d2250a..0000000000 --- a/test/integration/tests/4488-newer-cabal-version/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stackErr ["--stack-yaml", "stack-bad.yaml", "build", "--dry-run"] - stack ["--stack-yaml", "stack-good.yaml", "build"] diff --git a/test/integration/tests/4488-newer-cabal-version/files/Setup.hs b/test/integration/tests/4488-newer-cabal-version/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/4488-newer-cabal-version/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/4488-newer-cabal-version/files/foo.cabal b/test/integration/tests/4488-newer-cabal-version/files/foo.cabal deleted file mode 100644 index 7d7c730aee..0000000000 --- a/test/integration/tests/4488-newer-cabal-version/files/foo.cabal +++ /dev/null @@ -1,7 +0,0 @@ -cabal-version: 3.0 -name: foo -version: 0 -build-type: Simple - -library - default-language: Haskell2010 diff --git a/test/integration/tests/4488-newer-cabal-version/files/stack-bad.yaml b/test/integration/tests/4488-newer-cabal-version/files/stack-bad.yaml deleted file mode 100644 index 14f23e9aa6..0000000000 --- a/test/integration/tests/4488-newer-cabal-version/files/stack-bad.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.6.5 diff --git a/test/integration/tests/4488-newer-cabal-version/files/stack-good.yaml b/test/integration/tests/4488-newer-cabal-version/files/stack-good.yaml deleted file mode 100644 index 6edbd50517..0000000000 --- a/test/integration/tests/4488-newer-cabal-version/files/stack-good.yaml +++ /dev/null @@ -1,6 +0,0 @@ -resolver: ghc-8.6.5 -extra-deps: -- Cabal-3.0.0.0@sha256:1ba37b8d80e89213b17db7b8b9ea0108da55ca65f8c0cbb7433881a284c5cf67,26027 -- mtl-2.2.2@sha256:1050fb71acd9f5d67da7d992583f5bd0eb14407b9dc7acc122af1b738b706ca3,2261 -- parsec-3.1.13.0@sha256:d56210cf092db6e43024d9e8871c7e05a8e9e36e9aec09356e5bd401ea8f5a0c,4121 -- text-1.2.3.1@sha256:fc9719142e5cdd6f254b3f4831b133d3fad697ae9b54fe26424f0e023c8cc87d,8539 diff --git a/test/integration/tests/4706-ignore-ghc-env-files/Main.hs b/test/integration/tests/4706-ignore-ghc-env-files/Main.hs deleted file mode 100644 index 7c963b085b..0000000000 --- a/test/integration/tests/4706-ignore-ghc-env-files/Main.hs +++ /dev/null @@ -1,27 +0,0 @@ -import StackTest -import Control.Exception (bracket_) -import Control.Monad (when) -import System.Environment -import System.Directory -import System.Info (arch, os) - -main :: IO () -main = when False $ do -- skip this test until we start using GHC 8.4.4 or later for integration tests - let ghcVer = "8.4.4" - fp = concat - [ ".ghc.environment." - , arch - , "-" - , os - , "-" - , ghcVer - ] - writeFile "stack.yaml" $ "resolver: ghc-" ++ ghcVer - bracket_ - (writeFile fp "This is an invalid GHC environment file") - (removeFile fp) $ do - envFile <- canonicalizePath fp - setEnv "GHC_ENVIRONMENT" envFile - stack ["clean"] - stack ["build"] - stack ["runghc", "Main.hs"] diff --git a/test/integration/tests/4706-ignore-ghc-env-files/files/.gitignore b/test/integration/tests/4706-ignore-ghc-env-files/files/.gitignore deleted file mode 100644 index 0afaa560b3..0000000000 --- a/test/integration/tests/4706-ignore-ghc-env-files/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -foo.cabal -stack.yaml diff --git a/test/integration/tests/4706-ignore-ghc-env-files/files/package.yaml b/test/integration/tests/4706-ignore-ghc-env-files/files/package.yaml deleted file mode 100644 index 9bd223a0a0..0000000000 --- a/test/integration/tests/4706-ignore-ghc-env-files/files/package.yaml +++ /dev/null @@ -1,7 +0,0 @@ -name: foo -version: 0 - -dependencies: -- base - -library: {} diff --git a/test/integration/tests/4754-rebuild-haddocks/Main.hs b/test/integration/tests/4754-rebuild-haddocks/Main.hs deleted file mode 100644 index ad3239c26c..0000000000 --- a/test/integration/tests/4754-rebuild-haddocks/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest - -main :: IO () -main = do - stackCleanFull - stackErr ["haddock"] - stackCleanFull - stackErr ["haddock", "--no-haddock-deps"] - stack ["build"] - stackErr ["haddock", "--no-haddock-deps"] diff --git a/test/integration/tests/4754-rebuild-haddocks/files/.gitignore b/test/integration/tests/4754-rebuild-haddocks/files/.gitignore deleted file mode 100644 index 0afa51175a..0000000000 --- a/test/integration/tests/4754-rebuild-haddocks/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -foo.cabal diff --git a/test/integration/tests/4754-rebuild-haddocks/files/package.yaml b/test/integration/tests/4754-rebuild-haddocks/files/package.yaml deleted file mode 100644 index f49c20528e..0000000000 --- a/test/integration/tests/4754-rebuild-haddocks/files/package.yaml +++ /dev/null @@ -1,8 +0,0 @@ -name: foo -version: 0 - -dependencies: -- base - -library: - source-dirs: src diff --git a/test/integration/tests/4754-rebuild-haddocks/files/src/Foo.hs b/test/integration/tests/4754-rebuild-haddocks/files/src/Foo.hs deleted file mode 100644 index 3caec84bf1..0000000000 --- a/test/integration/tests/4754-rebuild-haddocks/files/src/Foo.hs +++ /dev/null @@ -1,5 +0,0 @@ -module Foo where - --- | The function below intentionally contains invalid Haddock -foo :: () -foo = () -- ^ this should fail!!! diff --git a/test/integration/tests/4754-rebuild-haddocks/files/stack.yaml b/test/integration/tests/4754-rebuild-haddocks/files/stack.yaml deleted file mode 100644 index 14f23e9aa6..0000000000 --- a/test/integration/tests/4754-rebuild-haddocks/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.6.5 diff --git a/test/integration/tests/4783-doctest-deps/Main.hs b/test/integration/tests/4783-doctest-deps/Main.hs deleted file mode 100644 index d35e0cf24c..0000000000 --- a/test/integration/tests/4783-doctest-deps/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -import StackTest - -main :: IO () -main = do - stackCleanFull - stack ["build", "acme-dont-copy"] - stack ["test"] diff --git a/test/integration/tests/4783-doctest-deps/files/.gitignore b/test/integration/tests/4783-doctest-deps/files/.gitignore deleted file mode 100644 index 0afa51175a..0000000000 --- a/test/integration/tests/4783-doctest-deps/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -foo.cabal diff --git a/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/Acme/Dont.hs b/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/Acme/Dont.hs deleted file mode 100644 index 9ec33e0a79..0000000000 --- a/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/Acme/Dont.hs +++ /dev/null @@ -1,31 +0,0 @@ --- | --- Module : Acme.Dont --- Copyright : Gracjan Polak 2009 --- License : BSD-style --- Maintainer : Gracjan Polak --- Stability : experimental --- Portability : portable --- --- The Acme.Dont module provides the indispensable don't command, --- ported from Perl. --- --- For more information see influential documentation: --- --- --- Usage: --- --- > main = don't $ do --- > name <- getLine --- > putStrLn $ "hello " ++ name --- -module Acme.Dont where - --- | The Acme.Dont module provides a don't command, which is the --- opposite of Haskell's built-in do. It is used exactly like the do --- monadic construct except that, instead of executing the block it --- controls, it... well... doesn't. --- --- Regardless of the contents of the block, don't returns (). --- -don't :: (Monad m) => m a -> m () -don't _action = return () diff --git a/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/COPYRIGHT b/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/COPYRIGHT deleted file mode 100644 index bfa1719aa5..0000000000 --- a/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/COPYRIGHT +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (C) 2009 Gracjan Polak - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - -=============================================================================== - -(end of COPYRIGHT) diff --git a/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/Setup.hs b/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/Setup.hs deleted file mode 100644 index c2d38c4f4a..0000000000 --- a/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain \ No newline at end of file diff --git a/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/acme-dont-copy.cabal b/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/acme-dont-copy.cabal deleted file mode 100644 index 3e1c668a7a..0000000000 --- a/test/integration/tests/4783-doctest-deps/files/acme-dont-copy/acme-dont-copy.cabal +++ /dev/null @@ -1,19 +0,0 @@ -Name: acme-dont-copy -Version: 1.1 -Stability: stable -Exposed-modules: Acme.Dont -Build-type: Simple -License: BSD3 -License-File: COPYRIGHT -Copyright: 2009, Gracjan Polak -Author: Gracjan Polak -Maintainer: Gracjan Polak -Synopsis: A don't construct -Description: - The Acme.Dont module provides a don't command, which is the - opposite of Haskell's built-in do. It is used exactly like the do - monadic construct except that, instead of executing the block it - controls, it... well... doesn't. -Category: Acme -Build-depends: base<1000000 -Hs-source-dirs: . diff --git a/test/integration/tests/4783-doctest-deps/files/package.yaml b/test/integration/tests/4783-doctest-deps/files/package.yaml deleted file mode 100644 index 251fbfb148..0000000000 --- a/test/integration/tests/4783-doctest-deps/files/package.yaml +++ /dev/null @@ -1,16 +0,0 @@ -name: foo -version: 0 - -dependencies: -- base - -library: - source-dirs: src - -tests: - doctest: - source-dirs: test - main: Main.hs - dependencies: - - doctest - - acme-dont diff --git a/test/integration/tests/4783-doctest-deps/files/snapshot.yaml b/test/integration/tests/4783-doctest-deps/files/snapshot.yaml deleted file mode 100644 index dc0c4c1d21..0000000000 --- a/test/integration/tests/4783-doctest-deps/files/snapshot.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: lts-14.27 -name: foo -packages: -- acme-dont-1.1@sha256:8264ad3e5113d3e0417b46e71d5a9c0914a1f03b5b81319cc329f1dc0f49b96c,602 diff --git a/test/integration/tests/4783-doctest-deps/files/src/Foo.hs b/test/integration/tests/4783-doctest-deps/files/src/Foo.hs deleted file mode 100644 index 936d0ccc01..0000000000 --- a/test/integration/tests/4783-doctest-deps/files/src/Foo.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Foo where - --- | --- --- >>> import Acme.Dont --- >>> don't foo -foo :: IO () -foo = error "foo" diff --git a/test/integration/tests/4783-doctest-deps/files/stack.yaml b/test/integration/tests/4783-doctest-deps/files/stack.yaml deleted file mode 100644 index 9ec39b7861..0000000000 --- a/test/integration/tests/4783-doctest-deps/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: snapshot.yaml -extra-deps: -# Include a package with a duplicated module name -- acme-dont-copy diff --git a/test/integration/tests/4783-doctest-deps/files/test/Main.hs b/test/integration/tests/4783-doctest-deps/files/test/Main.hs deleted file mode 100644 index 20e4883829..0000000000 --- a/test/integration/tests/4783-doctest-deps/files/test/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import Test.DocTest - -main :: IO () -main = doctest ["src/Foo.hs"] diff --git a/test/integration/tests/4897-boot-package-pruned/Main.hs b/test/integration/tests/4897-boot-package-pruned/Main.hs deleted file mode 100644 index 7ba05377d9..0000000000 --- a/test/integration/tests/4897-boot-package-pruned/Main.hs +++ /dev/null @@ -1,16 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest - -planFailure :: String -planFailure = - "but this GHC boot package has been pruned (issue #4510);" - -main :: IO () -main = do - stackErrStderr ["build"] (expectMessage planFailure) - -expectMessage :: String -> String -> IO () -expectMessage msg stderr = do - unless (words msg `isInfixOf` words stderr) - (error $ "Expected an error: \n" ++ show msg) diff --git a/test/integration/tests/4897-boot-package-pruned/files/directory/directory.cabal b/test/integration/tests/4897-boot-package-pruned/files/directory/directory.cabal deleted file mode 100644 index 78abfda5cf..0000000000 --- a/test/integration/tests/4897-boot-package-pruned/files/directory/directory.cabal +++ /dev/null @@ -1,8 +0,0 @@ -name: directory -version: 1.3.3.0 -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/4897-boot-package-pruned/files/files.cabal b/test/integration/tests/4897-boot-package-pruned/files/files.cabal deleted file mode 100644 index fd7c274440..0000000000 --- a/test/integration/tests/4897-boot-package-pruned/files/files.cabal +++ /dev/null @@ -1,8 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: base, directory, process - default-language: Haskell2010 diff --git a/test/integration/tests/4897-boot-package-pruned/files/stack.yaml b/test/integration/tests/4897-boot-package-pruned/files/stack.yaml deleted file mode 100644 index 12c3aaca2e..0000000000 --- a/test/integration/tests/4897-boot-package-pruned/files/stack.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: lts-14.27 -packages: [.] -extra-deps: [./directory] diff --git a/test/integration/tests/4938-non-ascii-module-names/Main.hs b/test/integration/tests/4938-non-ascii-module-names/Main.hs deleted file mode 100644 index 059bc6a529..0000000000 --- a/test/integration/tests/4938-non-ascii-module-names/Main.hs +++ /dev/null @@ -1,9 +0,0 @@ -import StackTest -import Control.Monad (unless) - -main :: IO () -main = do - -- Disabled on Windows due to an error occured in the integration tests - -- regarding Unicode character. Tried to fix it (https://github.com/commercialhaskell/stack/pull/5162/commits/8f04ad9e4cbaa54370dc5af476e3307a16c84405) - -- but it didn't work - unless isWindows $ stack ["build"] diff --git a/test/integration/tests/4938-non-ascii-module-names/files/.gitignore b/test/integration/tests/4938-non-ascii-module-names/files/.gitignore deleted file mode 100644 index d43d807c0d..0000000000 --- a/test/integration/tests/4938-non-ascii-module-names/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.cabal diff --git a/test/integration/tests/4938-non-ascii-module-names/files/Lib.hs b/test/integration/tests/4938-non-ascii-module-names/files/Lib.hs deleted file mode 100644 index d54290bd9b..0000000000 --- a/test/integration/tests/4938-non-ascii-module-names/files/Lib.hs +++ /dev/null @@ -1,9 +0,0 @@ -module Lib - ( foo - , bar - ) where - -import Täst - -bar :: Int -bar = 42 diff --git "a/test/integration/tests/4938-non-ascii-module-names/files/T\303\244st.hs" "b/test/integration/tests/4938-non-ascii-module-names/files/T\303\244st.hs" deleted file mode 100644 index 16c2b18123..0000000000 --- "a/test/integration/tests/4938-non-ascii-module-names/files/T\303\244st.hs" +++ /dev/null @@ -1,4 +0,0 @@ -module Täst where - -foo :: Int -foo = 42 diff --git a/test/integration/tests/4938-non-ascii-module-names/files/app/Main.hs b/test/integration/tests/4938-non-ascii-module-names/files/app/Main.hs deleted file mode 100644 index 26e9751007..0000000000 --- a/test/integration/tests/4938-non-ascii-module-names/files/app/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Main where - -import Täst - -main :: IO () -main = putStrLn "42" diff --git a/test/integration/tests/4938-non-ascii-module-names/files/package.yaml b/test/integration/tests/4938-non-ascii-module-names/files/package.yaml deleted file mode 100644 index 96a4d01977..0000000000 --- a/test/integration/tests/4938-non-ascii-module-names/files/package.yaml +++ /dev/null @@ -1,10 +0,0 @@ -name: umlaut -dependencies: [base] -library: - source-dirs: . -executables: - exe: - main: Main.hs - source-dirs: app - dependencies: - - umlaut diff --git a/test/integration/tests/4938-non-ascii-module-names/files/stack.yaml b/test/integration/tests/4938-non-ascii-module-names/files/stack.yaml deleted file mode 100644 index 785b1469c7..0000000000 --- a/test/integration/tests/4938-non-ascii-module-names/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-14.27 diff --git a/test/integration/tests/5272-only-locals/Main.hs b/test/integration/tests/5272-only-locals/Main.hs deleted file mode 100644 index 57b02a7d58..0000000000 --- a/test/integration/tests/5272-only-locals/Main.hs +++ /dev/null @@ -1,11 +0,0 @@ -import StackTest -import Control.Monad (void) - -main :: IO () -main = do - void $ stack' ["exec", "ghc-pkg", "unregister", "unliftio-core"] - stack ["clean"] - - stackErr ["build", "--only-locals"] - stack ["build", "--only-snapshot"] - stack ["build", "--only-locals"] diff --git a/test/integration/tests/5272-only-locals/files/.gitignore b/test/integration/tests/5272-only-locals/files/.gitignore deleted file mode 100644 index 0afa51175a..0000000000 --- a/test/integration/tests/5272-only-locals/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -foo.cabal diff --git a/test/integration/tests/5272-only-locals/files/package.yaml b/test/integration/tests/5272-only-locals/files/package.yaml deleted file mode 100644 index 11e610c467..0000000000 --- a/test/integration/tests/5272-only-locals/files/package.yaml +++ /dev/null @@ -1,9 +0,0 @@ -name: foo -version: 0.0.0 - -dependencies: -- base -- unliftio-core - -library: - source-dirs: src/ diff --git a/test/integration/tests/5272-only-locals/files/src/Foo.hs b/test/integration/tests/5272-only-locals/files/src/Foo.hs deleted file mode 100644 index 72e1004bad..0000000000 --- a/test/integration/tests/5272-only-locals/files/src/Foo.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Foo () where - -import Control.Monad.IO.Unlift () diff --git a/test/integration/tests/5272-only-locals/files/stack.yaml b/test/integration/tests/5272-only-locals/files/stack.yaml deleted file mode 100644 index 66d5361e8c..0000000000 --- a/test/integration/tests/5272-only-locals/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -resolver: ghc-8.6.5 -extra-deps: -- unliftio-core-0.1.2.0@rev:2 -# Force a unique snapshot -- acme-missiles-0.3@rev:0 diff --git a/test/integration/tests/606-local-version-not-exist/Main.hs b/test/integration/tests/606-local-version-not-exist/Main.hs deleted file mode 100644 index 30d17c8729..0000000000 --- a/test/integration/tests/606-local-version-not-exist/Main.hs +++ /dev/null @@ -1,8 +0,0 @@ -import StackTest - -main :: IO () -main = do - stackErr ["build", "files-3"] - stackErr ["build", "files-0.1.0.0"] - stack ["build", "files"] - stack ["build", "."] diff --git a/test/integration/tests/606-local-version-not-exist/files/files.cabal b/test/integration/tests/606-local-version-not-exist/files/files.cabal deleted file mode 100644 index 6ec3b1e471..0000000000 --- a/test/integration/tests/606-local-version-not-exist/files/files.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/606-local-version-not-exist/files/src/Lib.hs b/test/integration/tests/606-local-version-not-exist/files/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/606-local-version-not-exist/files/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/606-local-version-not-exist/files/stack.yaml b/test/integration/tests/606-local-version-not-exist/files/stack.yaml deleted file mode 100644 index 6b6b64cc34..0000000000 --- a/test/integration/tests/606-local-version-not-exist/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -flags: {} -packages: -- '.' -extra-deps: [] -resolver: lts-14.27 diff --git a/test/integration/tests/617-extra-dep-flag/Main.hs b/test/integration/tests/617-extra-dep-flag/Main.hs deleted file mode 100644 index 50fcae4fa6..0000000000 --- a/test/integration/tests/617-extra-dep-flag/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stack ["build", "acme-dont-1.1"] diff --git a/test/integration/tests/617-extra-dep-flag/files/stack.yaml b/test/integration/tests/617-extra-dep-flag/files/stack.yaml deleted file mode 100644 index 5001bb9140..0000000000 --- a/test/integration/tests/617-extra-dep-flag/files/stack.yaml +++ /dev/null @@ -1,7 +0,0 @@ -resolver: ghc-8.6.5 -flags: - text: - integer-simple: false -extra-deps: -- text-1.2.0.3 -packages: [] diff --git a/test/integration/tests/617-unused-flag-cli/Main.hs b/test/integration/tests/617-unused-flag-cli/Main.hs deleted file mode 100644 index f9a2b093ea..0000000000 --- a/test/integration/tests/617-unused-flag-cli/Main.hs +++ /dev/null @@ -1,8 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["build"] - stackErr ["build", "--flag", "foo:bar"] - stackErr ["build", "--flag", "files:bar"] - stack ["build", "--flag", "*:bar"] diff --git a/test/integration/tests/617-unused-flag-cli/files/files.cabal b/test/integration/tests/617-unused-flag-cli/files/files.cabal deleted file mode 100644 index 6ec3b1e471..0000000000 --- a/test/integration/tests/617-unused-flag-cli/files/files.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/617-unused-flag-cli/files/src/Lib.hs b/test/integration/tests/617-unused-flag-cli/files/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/617-unused-flag-cli/files/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/617-unused-flag-cli/files/stack.yaml b/test/integration/tests/617-unused-flag-cli/files/stack.yaml deleted file mode 100644 index 14f23e9aa6..0000000000 --- a/test/integration/tests/617-unused-flag-cli/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.6.5 diff --git a/test/integration/tests/617-unused-flag-name-yaml/Main.hs b/test/integration/tests/617-unused-flag-name-yaml/Main.hs deleted file mode 100644 index b16d43227b..0000000000 --- a/test/integration/tests/617-unused-flag-name-yaml/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stackErr ["build"] diff --git a/test/integration/tests/617-unused-flag-name-yaml/files/files.cabal b/test/integration/tests/617-unused-flag-name-yaml/files/files.cabal deleted file mode 100644 index 6ec3b1e471..0000000000 --- a/test/integration/tests/617-unused-flag-name-yaml/files/files.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/617-unused-flag-name-yaml/files/src/Lib.hs b/test/integration/tests/617-unused-flag-name-yaml/files/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/617-unused-flag-name-yaml/files/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/617-unused-flag-name-yaml/files/stack.yaml b/test/integration/tests/617-unused-flag-name-yaml/files/stack.yaml deleted file mode 100644 index 2bc41ab366..0000000000 --- a/test/integration/tests/617-unused-flag-name-yaml/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: ghc-8.6.5 -flags: - files: - does-not-exist: false diff --git a/test/integration/tests/617-unused-flag-yaml/Main.hs b/test/integration/tests/617-unused-flag-yaml/Main.hs deleted file mode 100644 index b16d43227b..0000000000 --- a/test/integration/tests/617-unused-flag-yaml/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stackErr ["build"] diff --git a/test/integration/tests/617-unused-flag-yaml/files/files.cabal b/test/integration/tests/617-unused-flag-yaml/files/files.cabal deleted file mode 100644 index 6ec3b1e471..0000000000 --- a/test/integration/tests/617-unused-flag-yaml/files/files.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/617-unused-flag-yaml/files/src/Lib.hs b/test/integration/tests/617-unused-flag-yaml/files/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/617-unused-flag-yaml/files/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/617-unused-flag-yaml/files/stack.yaml b/test/integration/tests/617-unused-flag-yaml/files/stack.yaml deleted file mode 100644 index 3ed7d2e7d5..0000000000 --- a/test/integration/tests/617-unused-flag-yaml/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: ghc-8.6.5 -flags: - does-not-exist: - foo: false diff --git a/test/integration/tests/620-env-command/Main.hs b/test/integration/tests/620-env-command/Main.hs deleted file mode 100644 index 2a4e394673..0000000000 --- a/test/integration/tests/620-env-command/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -import StackTest -import System.Process -import Control.Exception (throwIO) -import Control.Monad (unless) - -main :: IO () -main = unless isWindows $ rawSystem "bash" ["run.sh"] >>= throwIO diff --git a/test/integration/tests/620-env-command/files/Main.hs b/test/integration/tests/620-env-command/files/Main.hs deleted file mode 100644 index bd555bc39d..0000000000 --- a/test/integration/tests/620-env-command/files/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import Control.Concurrent.Async () - -main :: IO () -main = pure () diff --git a/test/integration/tests/620-env-command/files/run.sh b/test/integration/tests/620-env-command/files/run.sh deleted file mode 100644 index e13091da30..0000000000 --- a/test/integration/tests/620-env-command/files/run.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash - -set -euxo pipefail - -stack build --resolver lts-14.27 async -eval `stack config env --resolver lts-14.27` -ghc Main.hs diff --git a/test/integration/tests/717-sdist-test/Main.hs b/test/integration/tests/717-sdist-test/Main.hs deleted file mode 100644 index cce419b2a5..0000000000 --- a/test/integration/tests/717-sdist-test/Main.hs +++ /dev/null @@ -1,19 +0,0 @@ -import StackTest - -main :: IO () -main = do - -- verify building works - stack ["build"] - -- keep old behavior - stack ["sdist"] - -- successful sdist with --test-tarball - stack ["sdist", "package-with-working-th", "--test-tarball"] - -- fails because package contains TH which depends on files which are not put into sdist tarball - stackErr ["sdist", "package-with-th", "--test-tarball"] - -- same, but inside a subdir - stackErr ["sdist", "subdirs/failing-in-subdir", "--test-tarball"] - -- depends on packagea and packagec - these would fail if they were the target of sdist, - -- but since they are just dependencies, the operation should succeed - stack ["sdist", "subdirs/dependent-on-failing-packages", "--test-tarball"] - -- fails because a test depends on files which are not put into sdist tarball - stackErr ["sdist", "package-with-failing-test", "--test-tarball"] diff --git a/test/integration/tests/717-sdist-test/files/package-with-failing-test/README.md b/test/integration/tests/717-sdist-test/files/package-with-failing-test/README.md deleted file mode 100644 index 8831e9c09c..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-failing-test/README.md +++ /dev/null @@ -1 +0,0 @@ -# thtest diff --git a/test/integration/tests/717-sdist-test/files/package-with-failing-test/Setup.hs b/test/integration/tests/717-sdist-test/files/package-with-failing-test/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-failing-test/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/717-sdist-test/files/package-with-failing-test/files/file.txt b/test/integration/tests/717-sdist-test/files/package-with-failing-test/files/file.txt deleted file mode 100644 index 72943a16fb..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-failing-test/files/file.txt +++ /dev/null @@ -1 +0,0 @@ -aaa diff --git a/test/integration/tests/717-sdist-test/files/package-with-failing-test/package-with-failing-test.cabal b/test/integration/tests/717-sdist-test/files/package-with-failing-test/package-with-failing-test.cabal deleted file mode 100644 index 75e8f7ae2e..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-failing-test/package-with-failing-test.cabal +++ /dev/null @@ -1,27 +0,0 @@ -name: package-with-failing-test -version: 0.1.0.0 -synopsis: Some package -description: Some package -homepage: https://invalid -license: BSD3 -license-file: LICENSE -author: Author name here -maintainer: example@example.com -copyright: 2000 Author name here -category: Web -build-type: Simple -extra-source-files: README.md -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5, - template-haskell - default-language: Haskell2010 - -test-suite tests - type: exitcode-stdio-1.0 - hs-source-dirs: test - main-is: Test.hs - build-depends: base >= 4.7 && < 5 diff --git a/test/integration/tests/717-sdist-test/files/package-with-failing-test/src/Lib.hs b/test/integration/tests/717-sdist-test/files/package-with-failing-test/src/Lib.hs deleted file mode 100644 index 0d8704c625..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-failing-test/src/Lib.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Lib - ( someFunc - ) where - -import Language.Haskell.TH - -someFunc :: IO () -someFunc = putStrLn "aaa" diff --git a/test/integration/tests/717-sdist-test/files/package-with-failing-test/test/Test.hs b/test/integration/tests/717-sdist-test/files/package-with-failing-test/test/Test.hs deleted file mode 100644 index 39a26a86b4..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-failing-test/test/Test.hs +++ /dev/null @@ -1,4 +0,0 @@ -main :: IO () -main = do - readFile "files/file.txt" - return () diff --git a/test/integration/tests/717-sdist-test/files/package-with-th/README.md b/test/integration/tests/717-sdist-test/files/package-with-th/README.md deleted file mode 100644 index 8831e9c09c..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-th/README.md +++ /dev/null @@ -1 +0,0 @@ -# thtest diff --git a/test/integration/tests/717-sdist-test/files/package-with-th/Setup.hs b/test/integration/tests/717-sdist-test/files/package-with-th/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-th/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/717-sdist-test/files/package-with-th/files/file.txt b/test/integration/tests/717-sdist-test/files/package-with-th/files/file.txt deleted file mode 100644 index 72943a16fb..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-th/files/file.txt +++ /dev/null @@ -1 +0,0 @@ -aaa diff --git a/test/integration/tests/717-sdist-test/files/package-with-th/package-with-th.cabal b/test/integration/tests/717-sdist-test/files/package-with-th/package-with-th.cabal deleted file mode 100644 index 972934006d..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-th/package-with-th.cabal +++ /dev/null @@ -1,22 +0,0 @@ -name: package-with-th -version: 0.1.0.0 -synopsis: Some package -description: Some package -homepage: https://invalid -license: BSD3 -license-file: LICENSE -author: Author name here -maintainer: example@example.com -copyright: 2000 Author name here -category: Web -build-type: Simple -extra-source-files: README.md -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib, - TH - build-depends: base >= 4.7 && < 5, - template-haskell - default-language: Haskell2010 diff --git a/test/integration/tests/717-sdist-test/files/package-with-th/src/Lib.hs b/test/integration/tests/717-sdist-test/files/package-with-th/src/Lib.hs deleted file mode 100644 index 26d2841b8f..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-th/src/Lib.hs +++ /dev/null @@ -1,10 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module Lib - ( someFunc - ) where - -import TH -import Language.Haskell.TH - -someFunc :: IO () -someFunc = print $(thFunc) diff --git a/test/integration/tests/717-sdist-test/files/package-with-th/src/TH.hs b/test/integration/tests/717-sdist-test/files/package-with-th/src/TH.hs deleted file mode 100644 index df44b9d8ae..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-th/src/TH.hs +++ /dev/null @@ -1,8 +0,0 @@ -module TH (thFunc) where - -import Language.Haskell.TH - -thFunc :: Q Exp -thFunc = runIO $ do - readFile "files/file.txt" - return $ LitE (IntegerL 5) diff --git a/test/integration/tests/717-sdist-test/files/package-with-working-th/README.md b/test/integration/tests/717-sdist-test/files/package-with-working-th/README.md deleted file mode 100644 index 8831e9c09c..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-working-th/README.md +++ /dev/null @@ -1 +0,0 @@ -# thtest diff --git a/test/integration/tests/717-sdist-test/files/package-with-working-th/Setup.hs b/test/integration/tests/717-sdist-test/files/package-with-working-th/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-working-th/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/717-sdist-test/files/package-with-working-th/files/file.txt b/test/integration/tests/717-sdist-test/files/package-with-working-th/files/file.txt deleted file mode 100644 index 72943a16fb..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-working-th/files/file.txt +++ /dev/null @@ -1 +0,0 @@ -aaa diff --git a/test/integration/tests/717-sdist-test/files/package-with-working-th/package-with-working-th.cabal b/test/integration/tests/717-sdist-test/files/package-with-working-th/package-with-working-th.cabal deleted file mode 100644 index cbeb18c3ee..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-working-th/package-with-working-th.cabal +++ /dev/null @@ -1,22 +0,0 @@ -name: package-with-working-th -version: 0.1.0.0 -synopsis: Some package -description: Some package -homepage: https://invalid -license: BSD3 -license-file: LICENSE -author: Author name here -maintainer: example@example.com -copyright: 2000 Author name here -category: Web -build-type: Simple -extra-source-files: README.md -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib, - TH - build-depends: base >= 4.7 && < 5, - template-haskell - default-language: Haskell2010 diff --git a/test/integration/tests/717-sdist-test/files/package-with-working-th/src/Lib.hs b/test/integration/tests/717-sdist-test/files/package-with-working-th/src/Lib.hs deleted file mode 100644 index 26d2841b8f..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-working-th/src/Lib.hs +++ /dev/null @@ -1,10 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module Lib - ( someFunc - ) where - -import TH -import Language.Haskell.TH - -someFunc :: IO () -someFunc = print $(thFunc) diff --git a/test/integration/tests/717-sdist-test/files/package-with-working-th/src/TH.hs b/test/integration/tests/717-sdist-test/files/package-with-working-th/src/TH.hs deleted file mode 100644 index 5f13e7814a..0000000000 --- a/test/integration/tests/717-sdist-test/files/package-with-working-th/src/TH.hs +++ /dev/null @@ -1,7 +0,0 @@ -module TH (thFunc) where - -import Language.Haskell.TH - -thFunc :: Q Exp -thFunc = - return $ LitE (IntegerL 5) diff --git a/test/integration/tests/717-sdist-test/files/stack.yaml b/test/integration/tests/717-sdist-test/files/stack.yaml deleted file mode 100644 index 95dc305fa2..0000000000 --- a/test/integration/tests/717-sdist-test/files/stack.yaml +++ /dev/null @@ -1,10 +0,0 @@ -resolver: lts-14.27 -packages: -- package-with-th -- package-with-working-th -- package-with-failing-test -- subdirs/dependent-on-failing-packages -- subdirs/failing-in-subdir -extra-deps: [] -flags: {} -extra-package-dbs: [] diff --git a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/README.md b/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/README.md deleted file mode 100644 index 8831e9c09c..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/README.md +++ /dev/null @@ -1 +0,0 @@ -# thtest diff --git a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/Setup.hs b/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/dependent-on-failing-packages.cabal b/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/dependent-on-failing-packages.cabal deleted file mode 100644 index cb1ddff94c..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/dependent-on-failing-packages.cabal +++ /dev/null @@ -1,23 +0,0 @@ -name: dependent-on-failing-packages -version: 0.1.0.0 -synopsis: Some package -description: Some package -homepage: https://invalid -license: BSD3 -license-file: LICENSE -author: Author name here -maintainer: example@example.com -copyright: 2000 Author name here -category: Web -build-type: Simple -extra-source-files: README.md -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: LibD - build-depends: base >= 4.7 && < 5, - template-haskell, - package-with-th, - failing-in-subdir - default-language: Haskell2010 diff --git a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/files/file.txt b/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/files/file.txt deleted file mode 100644 index 72943a16fb..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/files/file.txt +++ /dev/null @@ -1 +0,0 @@ -aaa diff --git a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/src/LibD.hs b/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/src/LibD.hs deleted file mode 100644 index a0c7ad615e..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/src/LibD.hs +++ /dev/null @@ -1,11 +0,0 @@ -module LibD - ( someFunc - ) where - -import Lib -import LibC - -someFuncD :: IO () -someFuncD = do - someFunc - someFuncC diff --git a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/LICENSE b/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/LICENSE deleted file mode 100644 index 60aaf72cfa..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -Copyright Author name here (c) 2000 - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Author name here nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/README.md b/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/README.md deleted file mode 100644 index 8831e9c09c..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/README.md +++ /dev/null @@ -1 +0,0 @@ -# thtest diff --git a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/Setup.hs b/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/failing-in-subdir.cabal b/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/failing-in-subdir.cabal deleted file mode 100644 index 944a17a598..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/failing-in-subdir.cabal +++ /dev/null @@ -1,22 +0,0 @@ -name: failing-in-subdir -version: 0.1.0.0 -synopsis: Some package -description: Some package -homepage: https://invalid -license: BSD3 -license-file: LICENSE -author: Author name here -maintainer: example@example.com -copyright: 2000 Author name here -category: Web -build-type: Simple -extra-source-files: README.md -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: LibC, - THInSubdir - build-depends: base >= 4.7 && < 5, - template-haskell - default-language: Haskell2010 diff --git a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/files/file.txt b/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/files/file.txt deleted file mode 100644 index 72943a16fb..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/files/file.txt +++ /dev/null @@ -1 +0,0 @@ -aaa diff --git a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/LibC.hs b/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/LibC.hs deleted file mode 100644 index f951ce4a28..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/LibC.hs +++ /dev/null @@ -1,10 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module LibC - ( someFuncC - ) where - -import THInSubdir -import Language.Haskell.TH - -someFuncC :: IO () -someFuncC = print $(thFuncC) diff --git a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/THInSubdir.hs b/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/THInSubdir.hs deleted file mode 100644 index d5c2609120..0000000000 --- a/test/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/THInSubdir.hs +++ /dev/null @@ -1,8 +0,0 @@ -module THInSubdir (thFuncC) where - -import Language.Haskell.TH - -thFuncC :: Q Exp -thFuncC = runIO $ do - readFile "files/file.txt" - return $ LitE (IntegerL 5) diff --git a/test/integration/tests/763-buildable-false/Main.hs b/test/integration/tests/763-buildable-false/Main.hs deleted file mode 100644 index 89d44c5eea..0000000000 --- a/test/integration/tests/763-buildable-false/Main.hs +++ /dev/null @@ -1,9 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["build"] - stack ["build", "--flag", "*:force-enable"] - stack ["build", ":enabled"] - stackErr ["build", ":disabled"] - stack ["build", ":disabled", "--flag", "files:force-enable"] diff --git a/test/integration/tests/763-buildable-false/files/app/Main.hs b/test/integration/tests/763-buildable-false/files/app/Main.hs deleted file mode 100644 index d82a4bd93b..0000000000 --- a/test/integration/tests/763-buildable-false/files/app/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Main where - -main :: IO () -main = return () diff --git a/test/integration/tests/763-buildable-false/files/files.cabal b/test/integration/tests/763-buildable-false/files/files.cabal deleted file mode 100644 index a4c6069896..0000000000 --- a/test/integration/tests/763-buildable-false/files/files.cabal +++ /dev/null @@ -1,25 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -flag force-enable - default: False - description: foo - -executable enabled - hs-source-dirs: app - main-is: Main.hs - build-depends: base - default-language: Haskell2010 - buildable: True - -executable disabled - hs-source-dirs: app - main-is: Main.hs - build-depends: base - default-language: Haskell2010 - if flag(force-enable) - buildable: True - else - buildable: False diff --git a/test/integration/tests/763-buildable-false/files/stack.yaml b/test/integration/tests/763-buildable-false/files/stack.yaml deleted file mode 100644 index 6b6b64cc34..0000000000 --- a/test/integration/tests/763-buildable-false/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -flags: {} -packages: -- '.' -extra-deps: [] -resolver: lts-14.27 diff --git a/test/integration/tests/796-ghc-options/Main.hs b/test/integration/tests/796-ghc-options/Main.hs deleted file mode 100644 index 25ee1e2869..0000000000 --- a/test/integration/tests/796-ghc-options/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack ["build"] - stackErr ["build", "--ghc-options=-DBAZ"] - stack ["build", "--ghc-options=-DQUX"] diff --git a/test/integration/tests/796-ghc-options/files/ghc-options.cabal b/test/integration/tests/796-ghc-options/files/ghc-options.cabal deleted file mode 100644 index 642090489e..0000000000 --- a/test/integration/tests/796-ghc-options/files/ghc-options.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: ghc-options -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/796-ghc-options/files/src/Lib.hs b/test/integration/tests/796-ghc-options/files/src/Lib.hs deleted file mode 100644 index cb23e74c8c..0000000000 --- a/test/integration/tests/796-ghc-options/files/src/Lib.hs +++ /dev/null @@ -1,19 +0,0 @@ -{-# LANGUAGE CPP #-} -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" - -#ifndef FOO -#error FOO isn't defined -#endif - -#ifndef BAR -#error BAR isn't defined -#endif - -#ifdef BAZ -#error BAZ is defined -#endif diff --git a/test/integration/tests/796-ghc-options/files/stack.yaml b/test/integration/tests/796-ghc-options/files/stack.yaml deleted file mode 100644 index bee42c6e9c..0000000000 --- a/test/integration/tests/796-ghc-options/files/stack.yaml +++ /dev/null @@ -1,5 +0,0 @@ -resolver: ghc-8.6.5 -ghc-options: - "*": -DFOO - ghc-options: -DBAR -rebuild-ghc-options: true diff --git a/test/integration/tests/basic-install/Main.hs b/test/integration/tests/basic-install/Main.hs deleted file mode 100644 index c49f21b659..0000000000 --- a/test/integration/tests/basic-install/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stack [defaultResolverArg, "install", "acme-missiles-0.3"] - doesNotExist "stack.yaml" diff --git a/test/integration/tests/build-ghc/Main.hs b/test/integration/tests/build-ghc/Main.hs deleted file mode 100644 index d2ca66bda4..0000000000 --- a/test/integration/tests/build-ghc/Main.hs +++ /dev/null @@ -1,28 +0,0 @@ -import StackTest -import System.Directory (withCurrentDirectory) - -main :: IO () -main = superslow $ do - -- cleanup previous failing test... - removeDirIgnore "tmpPackage" - - stack ["new", "--resolver=lts-13.11", "tmpPackage"] - - -- use a commit which is known to succeed with hadrian binary-dist - let commitId = "be0dde8e3c27ca56477d1d1801bb77621f3618e1" - flavour = "quick" - - withCurrentDirectory "tmpPackage" $ do - appendFile "stack.yaml" $ unlines - [ "compiler-repository: https://gitlab.haskell.org/ghc/ghc.git" - , "compiler: ghc-git-" ++ commitId ++ "-" ++ flavour - ] - - -- Setup the package - stack ["setup"] - - -- build it with the built GHC - stack ["build"] - - -- cleanup - removeDirIgnore "tmpPackage" diff --git a/test/integration/tests/cabal-non-buildable-bug/Main.hs b/test/integration/tests/cabal-non-buildable-bug/Main.hs deleted file mode 100644 index a8d9e679c1..0000000000 --- a/test/integration/tests/cabal-non-buildable-bug/Main.hs +++ /dev/null @@ -1,18 +0,0 @@ -import StackTest - -main :: IO () -main = do - -- Newer Cabal: dry run and building should succeed, because they'll - -- both ignore the do-not-build - writeFile "stack.yaml" "resolver: ghc-8.6.5" - stack ["build", "--dry-run"] - stack ["build"] - - -- Older Cabal: both should fail, because they'll both try to - -- include the non-buildable component. If there's a regression, the - -- dry run will succeed (because Stack will use the proper logic) - -- and build will fail (because Cabal will be using its broken - -- logic). - writeFile "stack.yaml" "resolver: ghc-7.10.3" - stackErr ["build"] - stackErr ["build", "--dry-run"] diff --git a/test/integration/tests/cabal-non-buildable-bug/files/.gitignore b/test/integration/tests/cabal-non-buildable-bug/files/.gitignore deleted file mode 100644 index 0008ca1636..0000000000 --- a/test/integration/tests/cabal-non-buildable-bug/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -stack.yaml -foo.cabal diff --git a/test/integration/tests/cabal-non-buildable-bug/files/package.yaml b/test/integration/tests/cabal-non-buildable-bug/files/package.yaml deleted file mode 100644 index 67f3a25c46..0000000000 --- a/test/integration/tests/cabal-non-buildable-bug/files/package.yaml +++ /dev/null @@ -1,14 +0,0 @@ -name: foo -version: "0" - -dependencies: -- base - -library: {} - -executables: - not-built: - main: Main.hs - dependencies: - - does-not-exist - buildable: false diff --git a/test/integration/tests/cabal-public-sublibraries/Main.hs b/test/integration/tests/cabal-public-sublibraries/Main.hs deleted file mode 100644 index 987ebbfe2e..0000000000 --- a/test/integration/tests/cabal-public-sublibraries/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - putStrLn "Disabled: CI doesn't have GHC 8.8.1" - --stack ["build"] diff --git a/test/integration/tests/cabal-public-sublibraries/files/Setup.hs b/test/integration/tests/cabal-public-sublibraries/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/cabal-public-sublibraries/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/cabal-public-sublibraries/files/files.cabal b/test/integration/tests/cabal-public-sublibraries/files/files.cabal deleted file mode 100644 index 5255078e44..0000000000 --- a/test/integration/tests/cabal-public-sublibraries/files/files.cabal +++ /dev/null @@ -1,17 +0,0 @@ -cabal-version: 3.0 -name: files -version: 0.1.0.0 -build-type: Simple - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - -library sub - visibility: public - hs-source-dirs: src - exposed-modules: Sub - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/cabal-public-sublibraries/files/src/Lib.hs b/test/integration/tests/cabal-public-sublibraries/files/src/Lib.hs deleted file mode 100644 index d999e20553..0000000000 --- a/test/integration/tests/cabal-public-sublibraries/files/src/Lib.hs +++ /dev/null @@ -1,7 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = do - putStrLn "hello world" diff --git a/test/integration/tests/cabal-public-sublibraries/files/src/Sub.hs b/test/integration/tests/cabal-public-sublibraries/files/src/Sub.hs deleted file mode 100644 index 08c047a5a1..0000000000 --- a/test/integration/tests/cabal-public-sublibraries/files/src/Sub.hs +++ /dev/null @@ -1,7 +0,0 @@ -module Sub - ( someFunc - ) where - -someFunc :: IO () -someFunc = do - putStrLn "hello world" diff --git a/test/integration/tests/cabal-public-sublibraries/files/stack.yaml b/test/integration/tests/cabal-public-sublibraries/files/stack.yaml deleted file mode 100644 index 724dd5b170..0000000000 --- a/test/integration/tests/cabal-public-sublibraries/files/stack.yaml +++ /dev/null @@ -1,7 +0,0 @@ -resolver: nightly-2020-01-17 -packages: -- . -extra-deps: -- github: snoyberg/filelock - commit: 4f080496d8bf153fbe26e64d1f52cf73c7db25f6 -# - Cabal-3.0.0.0@sha256:1ba37b8d80e89213b17db7b8b9ea0108da55ca65f8c0cbb7433881a284c5cf67,26027 diff --git a/test/integration/tests/cabal-sublibrary-dependency/Main.hs b/test/integration/tests/cabal-sublibrary-dependency/Main.hs deleted file mode 100644 index 1fa8ee7d89..0000000000 --- a/test/integration/tests/cabal-sublibrary-dependency/Main.hs +++ /dev/null @@ -1,14 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest - -main :: IO () -main = do - putStrLn "Disabled: CI doesn't have GHC 8.8.1" - {- - stackErrStderr ["build"] $ \str -> - let msg = "SubLibrary dependency is not supported, this will almost certainly fail" in - - unless (msg `isInfixOf` str) $ - error $ "Expected a warning: \n" ++ show msg - -} diff --git a/test/integration/tests/cabal-sublibrary-dependency/files/Setup.hs b/test/integration/tests/cabal-sublibrary-dependency/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/cabal-sublibrary-dependency/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/cabal-sublibrary-dependency/files/files.cabal b/test/integration/tests/cabal-sublibrary-dependency/files/files.cabal deleted file mode 100644 index 7d6db901c0..0000000000 --- a/test/integration/tests/cabal-sublibrary-dependency/files/files.cabal +++ /dev/null @@ -1,11 +0,0 @@ -cabal-version: 3.0 -name: files -version: 0.1.0.0 -build-type: Simple - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - , subproject:{subproject, sub} - default-language: Haskell2010 diff --git a/test/integration/tests/cabal-sublibrary-dependency/files/src/Lib.hs b/test/integration/tests/cabal-sublibrary-dependency/files/src/Lib.hs deleted file mode 100644 index 3bce6931fc..0000000000 --- a/test/integration/tests/cabal-sublibrary-dependency/files/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/cabal-sublibrary-dependency/files/src/Sub/Sub.hs b/test/integration/tests/cabal-sublibrary-dependency/files/src/Sub/Sub.hs deleted file mode 100644 index f38569b912..0000000000 --- a/test/integration/tests/cabal-sublibrary-dependency/files/src/Sub/Sub.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Sub.Sub - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/cabal-sublibrary-dependency/files/stack.yaml b/test/integration/tests/cabal-sublibrary-dependency/files/stack.yaml deleted file mode 100644 index 8b93dddda1..0000000000 --- a/test/integration/tests/cabal-sublibrary-dependency/files/stack.yaml +++ /dev/null @@ -1,7 +0,0 @@ -resolver: nightly-2020-01-17 -packages: -- . -- subproject -extra-deps: -- github: snoyberg/filelock - commit: 4f080496d8bf153fbe26e64d1f52cf73c7db25f6 diff --git a/test/integration/tests/cabal-sublibrary-dependency/files/subproject/src/Lib.hs b/test/integration/tests/cabal-sublibrary-dependency/files/subproject/src/Lib.hs deleted file mode 100644 index d36ff2714d..0000000000 --- a/test/integration/tests/cabal-sublibrary-dependency/files/subproject/src/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/cabal-sublibrary-dependency/files/subproject/src/SubLib.hs b/test/integration/tests/cabal-sublibrary-dependency/files/subproject/src/SubLib.hs deleted file mode 100644 index 17aca821e4..0000000000 --- a/test/integration/tests/cabal-sublibrary-dependency/files/subproject/src/SubLib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module SubLib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/cabal-sublibrary-dependency/files/subproject/subproject.cabal b/test/integration/tests/cabal-sublibrary-dependency/files/subproject/subproject.cabal deleted file mode 100644 index 4c083cac74..0000000000 --- a/test/integration/tests/cabal-sublibrary-dependency/files/subproject/subproject.cabal +++ /dev/null @@ -1,16 +0,0 @@ -name: subproject -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - -library sub - hs-source-dirs: src - exposed-modules: SubLib - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/integration/tests/copy-bins-works/Main.hs b/test/integration/tests/copy-bins-works/Main.hs deleted file mode 100644 index 038e3f1124..0000000000 --- a/test/integration/tests/copy-bins-works/Main.hs +++ /dev/null @@ -1,14 +0,0 @@ -import StackTest -import System.Directory -import Control.Monad (unless) - -main :: IO () -main = do - let test args = do - removeDirIgnore "bin" - stackCleanFull - stack args - exists <- doesDirectoryExist "bin" - unless exists $ error $ "Failed with: " ++ show args - test ["install", "--local-bin-path", "bin"] - test ["build", "--copy-bins", "--local-bin-path", "bin"] diff --git a/test/integration/tests/copy-bins-works/files/.gitignore b/test/integration/tests/copy-bins-works/files/.gitignore deleted file mode 100644 index f1a8228e76..0000000000 --- a/test/integration/tests/copy-bins-works/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.cabal -/bin/ diff --git a/test/integration/tests/copy-bins-works/files/package.yaml b/test/integration/tests/copy-bins-works/files/package.yaml deleted file mode 100644 index 34d45cd594..0000000000 --- a/test/integration/tests/copy-bins-works/files/package.yaml +++ /dev/null @@ -1,9 +0,0 @@ -name: foo -version: 0 - -executables: - bar: - source-dirs: src - main: Main.hs - dependencies: - - base diff --git a/test/integration/tests/copy-bins-works/files/stack.yaml b/test/integration/tests/copy-bins-works/files/stack.yaml deleted file mode 100644 index 818c8da40f..0000000000 --- a/test/integration/tests/copy-bins-works/files/stack.yaml +++ /dev/null @@ -1,2 +0,0 @@ -resolver: ghc-8.6.5 -copy-bins: true diff --git a/test/integration/tests/cyclic-test-deps/.gitignore b/test/integration/tests/cyclic-test-deps/.gitignore deleted file mode 100644 index 027271b9b2..0000000000 --- a/test/integration/tests/cyclic-test-deps/.gitignore +++ /dev/null @@ -1 +0,0 @@ -files diff --git a/test/integration/tests/cyclic-test-deps/Main.hs b/test/integration/tests/cyclic-test-deps/Main.hs deleted file mode 100644 index c602008da2..0000000000 --- a/test/integration/tests/cyclic-test-deps/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest - -main :: IO () -main = do - removeDirIgnore "text-1.2.3.1" - stack ["unpack", "text-1.2.3.1"] - stack ["unpack", "QuickCheck-2.10.1"] - removeFileIgnore "stack.yaml" - stack ["init", defaultResolverArg] - stack ["test", "--dry-run"] diff --git a/test/integration/tests/drop-packages/Main.hs b/test/integration/tests/drop-packages/Main.hs deleted file mode 100644 index b16d43227b..0000000000 --- a/test/integration/tests/drop-packages/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stackErr ["build"] diff --git a/test/integration/tests/drop-packages/files/.gitignore b/test/integration/tests/drop-packages/files/.gitignore deleted file mode 100644 index d43d807c0d..0000000000 --- a/test/integration/tests/drop-packages/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.cabal diff --git a/test/integration/tests/drop-packages/files/package.yaml b/test/integration/tests/drop-packages/files/package.yaml deleted file mode 100644 index 1fe2825944..0000000000 --- a/test/integration/tests/drop-packages/files/package.yaml +++ /dev/null @@ -1,6 +0,0 @@ -name: unimportant -version: 0 -dependencies: -- base -- unliftio-core -library: {} diff --git a/test/integration/tests/drop-packages/files/stack.yaml b/test/integration/tests/drop-packages/files/stack.yaml deleted file mode 100644 index c34f80a87b..0000000000 --- a/test/integration/tests/drop-packages/files/stack.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: lts-14.27 -drop-packages: -- unliftio-core diff --git a/test/integration/tests/duplicate-package-ids/Main.hs b/test/integration/tests/duplicate-package-ids/Main.hs deleted file mode 100644 index 88fd081bc6..0000000000 --- a/test/integration/tests/duplicate-package-ids/Main.hs +++ /dev/null @@ -1,11 +0,0 @@ -import StackTest - -main :: IO () -main = do - readFile "stack1.yaml" >>= writeFile "stack.yaml" - stack ["setup"] - stack ["build", "auto-update"] - readFile "stack2.yaml" >>= writeFile "stack.yaml" - removeDirIgnore "auto-update-0.1.2.1" - stack ["unpack", "auto-update-0.1.2.1"] - stack ["build"] diff --git a/test/integration/tests/duplicate-package-ids/files/.gitignore b/test/integration/tests/duplicate-package-ids/files/.gitignore deleted file mode 100644 index f39970f250..0000000000 --- a/test/integration/tests/duplicate-package-ids/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -stack.yaml -auto-update-0.1.2.1 diff --git a/test/integration/tests/duplicate-package-ids/files/stack1.yaml b/test/integration/tests/duplicate-package-ids/files/stack1.yaml deleted file mode 100644 index d1ea4da297..0000000000 --- a/test/integration/tests/duplicate-package-ids/files/stack1.yaml +++ /dev/null @@ -1,2 +0,0 @@ -resolver: lts-14.27 -packages: [] diff --git a/test/integration/tests/duplicate-package-ids/files/stack2.yaml b/test/integration/tests/duplicate-package-ids/files/stack2.yaml deleted file mode 100644 index e2791f810d..0000000000 --- a/test/integration/tests/duplicate-package-ids/files/stack2.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: lts-14.27 -packages: -- auto-update-0.1.2.1 diff --git a/test/integration/tests/git-submodules/Main.hs b/test/integration/tests/git-submodules/Main.hs deleted file mode 100644 index 9521db130e..0000000000 --- a/test/integration/tests/git-submodules/Main.hs +++ /dev/null @@ -1,78 +0,0 @@ -import StackTest -import System.Directory (createDirectoryIfMissing,withCurrentDirectory, getCurrentDirectory) -import System.Exit (exitFailure) -import System.FilePath (()) -import Data.List (filter) -import System.IO (hPutStrLn, withFile, IOMode(..)) -import Control.Monad (unless) - -main :: IO () -main = unless isWindows $ do - let - gitInit = do - runShell "git init ." - runShell "git config user.name Test" - runShell "git config user.email test@test.com" - runShell "git config commit.gpgsign false" - - let withEmptyDir name inner = do - removeDirIgnore name - createDirectoryIfMissing True name - withCurrentDirectory name inner - - withEmptyDir "tmpSubSubRepo" $ do - gitInit - stack ["new", "pkg ", defaultResolverArg] - runShell "git add pkg" - runShell "git commit -m SubSubCommit" - - withEmptyDir "tmpSubRepo" $ do - gitInit - runShell "git submodule add ../tmpSubSubRepo sub" - runShell "git commit -a -m SubCommit" - - withEmptyDir "tmpRepo" $ do - gitInit - runShell "git submodule add ../tmpSubRepo sub" - runShell "git commit -a -m Commit" - - removeDirIgnore "tmpPackage" - stack ["new", defaultResolverArg, "tmpPackage"] - - curDir <- getCurrentDirectory - let tmpRepoDir = curDir "tmpRepo" - gitHead <- runWithCwd tmpRepoDir "git" ["rev-parse", "HEAD"] - let gitHeadCommit = stripNewline gitHead - - withCurrentDirectory "tmpPackage" $ do - -- add git dependency on repo with recursive submodules - writeToStackFile (tmpRepoDir, gitHeadCommit) - -- Setup the package - stack ["setup"] - - -- cleanup - removeDirIgnore "tmpRepo" - removeDirIgnore "tmpSubRepo" - removeDirIgnore "tmpSubSubRepo" - removeDirIgnore "tmpPackage" - -writeToStackFile :: (String, String) -> IO () -writeToStackFile (tmpRepoDir, gitCommit) = do - curDir <- getCurrentDirectory - let stackFile = curDir "stack.yaml" - let line1 = "extra-deps:" - line2 = "- git: " ++ tmpRepoDir - line3 = " commit: " ++ gitCommit - line4 = " subdir: sub/sub/pkg" - withFile stackFile AppendMode (\handle -> do - hPutStrLn handle line1 - hPutStrLn handle line2 - hPutStrLn handle line3 - hPutStrLn handle line4 - ) - -newline :: Char -newline = '\n' - -stripNewline :: String -> String -stripNewline str = filter (\x -> x /= newline) str diff --git a/test/integration/tests/haddock-options/Main.hs b/test/integration/tests/haddock-options/Main.hs deleted file mode 100644 index 4cc0a88578..0000000000 --- a/test/integration/tests/haddock-options/Main.hs +++ /dev/null @@ -1,14 +0,0 @@ -import StackTest - -main :: IO () -main = do - removeDirIgnore ".stack-work" - - -- Fails to work because BAR is defined here and FOO in stack file - stackErr ["haddock", "--haddock-arguments", "--optghc=-DBAR"] - stack ["clean"] - -- Works just fine, test #3099 while at it. - stack ["haddock", "--no-haddock-hyperlink-source"] - stack ["clean"] - -- Fails to work because we have bad argument - stackErr ["haddock", "--haddock-arguments", "--stack_it_badhaddockargument"] diff --git a/test/integration/tests/haddock-options/files/haddock-options.cabal b/test/integration/tests/haddock-options/files/haddock-options.cabal deleted file mode 100644 index f47b3df0b4..0000000000 --- a/test/integration/tests/haddock-options/files/haddock-options.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: haddock-options -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/haddock-options/files/src/Lib.hs b/test/integration/tests/haddock-options/files/src/Lib.hs deleted file mode 100644 index 2ad999dff9..0000000000 --- a/test/integration/tests/haddock-options/files/src/Lib.hs +++ /dev/null @@ -1,11 +0,0 @@ -{-# LANGUAGE CPP #-} -module Lib - ( someFunc - ) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" - -#if defined(FOO) && defined(BAR) -#error FOO and BAR is defined -#endif diff --git a/test/integration/tests/haddock-options/files/stack.yaml b/test/integration/tests/haddock-options/files/stack.yaml deleted file mode 100644 index a505b7cda3..0000000000 --- a/test/integration/tests/haddock-options/files/stack.yaml +++ /dev/null @@ -1,9 +0,0 @@ -flags: {} -packages: -- '.' -extra-deps: [] -resolver: lts-14.27 -build: - haddock-arguments: - haddock-args: - - --optghc=-DFOO diff --git a/test/integration/tests/hpack-repo/Main.hs b/test/integration/tests/hpack-repo/Main.hs deleted file mode 100644 index 9512c6cca4..0000000000 --- a/test/integration/tests/hpack-repo/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -import StackTest -import System.Directory - -main :: IO () -main = do - removeDirIgnore ".stack-work" - stack ["build"] diff --git a/test/integration/tests/hpack-repo/files/files.cabal b/test/integration/tests/hpack-repo/files/files.cabal deleted file mode 100644 index e167e1ff8c..0000000000 --- a/test/integration/tests/hpack-repo/files/files.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5, validity - default-language: Haskell2010 diff --git a/test/integration/tests/hpack-repo/files/src/Lib.hs b/test/integration/tests/hpack-repo/files/src/Lib.hs deleted file mode 100644 index 65c2b1b231..0000000000 --- a/test/integration/tests/hpack-repo/files/src/Lib.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Lib - ( someFunc - ) where - -import Data.Validity - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/integration/tests/hpack-repo/files/stack.yaml b/test/integration/tests/hpack-repo/files/stack.yaml deleted file mode 100644 index f321596c45..0000000000 --- a/test/integration/tests/hpack-repo/files/stack.yaml +++ /dev/null @@ -1,8 +0,0 @@ -packages: -- . -resolver: lts-14.27 -extra-deps: -- git: https://github.com/NorfairKing/validity.git - commit: d128cc30bc886e31ea7f8161fb7708c08b162937 - subdirs: - - validity diff --git a/test/integration/tests/init-omit-packages/Main.hs b/test/integration/tests/init-omit-packages/Main.hs deleted file mode 100644 index ab85b9bb9c..0000000000 --- a/test/integration/tests/init-omit-packages/Main.hs +++ /dev/null @@ -1,12 +0,0 @@ -import Control.Monad (unless) -import StackTest -import System.IO (readFile) - -main :: IO () -main = do - removeFileIgnore "stack.yaml" - stackErr ["init", "--resolver", "lts-14.27"] - stack ["init", "--resolver", "lts-14.27", "--omit-packages"] - contents <- lines <$> readFile "stack.yaml" - unless ("#- bad" `elem` contents) $ - error "commented out 'bad' package was expected" diff --git a/test/integration/tests/init-omit-packages/files/.gitignore b/test/integration/tests/init-omit-packages/files/.gitignore deleted file mode 100644 index 684dbffa96..0000000000 --- a/test/integration/tests/init-omit-packages/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -stack.yaml diff --git a/test/integration/tests/init-omit-packages/files/bad/Bad.hs b/test/integration/tests/init-omit-packages/files/bad/Bad.hs deleted file mode 100644 index cd8351649d..0000000000 --- a/test/integration/tests/init-omit-packages/files/bad/Bad.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Bad where - -bad :: Int -bad = error "Sommething bad here" diff --git a/test/integration/tests/init-omit-packages/files/bad/bad.cabal b/test/integration/tests/init-omit-packages/files/bad/bad.cabal deleted file mode 100644 index 789ad7d101..0000000000 --- a/test/integration/tests/init-omit-packages/files/bad/bad.cabal +++ /dev/null @@ -1,9 +0,0 @@ -name: bad -version: 1.0.0 -build-type: Simple -cabal-version: >= 1.8 - -library - exposed-modules: Bad - build-depends: base, not-existing-package == 666.0 - default-language: Haskell2010 \ No newline at end of file diff --git a/test/integration/tests/init-omit-packages/files/good/good.cabal b/test/integration/tests/init-omit-packages/files/good/good.cabal deleted file mode 100644 index 944e5795cf..0000000000 --- a/test/integration/tests/init-omit-packages/files/good/good.cabal +++ /dev/null @@ -1,9 +0,0 @@ -name: good -version: 1.0.0 -build-type: Simple -cabal-version: >= 1.8 - -library - exposed-modules: Good - build-depends: base - default-language: Haskell2010 \ No newline at end of file diff --git a/test/integration/tests/internal-libraries/Main.hs b/test/integration/tests/internal-libraries/Main.hs deleted file mode 100644 index e61b083aa4..0000000000 --- a/test/integration/tests/internal-libraries/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stack ["build"] diff --git a/test/integration/tests/internal-libraries/files/Setup.hs b/test/integration/tests/internal-libraries/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/internal-libraries/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/internal-libraries/files/files.cabal b/test/integration/tests/internal-libraries/files/files.cabal deleted file mode 100644 index 9391e63a57..0000000000 --- a/test/integration/tests/internal-libraries/files/files.cabal +++ /dev/null @@ -1,31 +0,0 @@ -name: files -version: 0.1.0.0 -build-type: Simple -cabal-version: >=2.0 - -library - hs-source-dirs: src - exposed-modules: Files - build-depends: base - default-language: Haskell2010 - -library foo - hs-source-dirs: src-foo - exposed-modules: Foo - build-depends: base, files, stm - default-language: Haskell2010 - -executable bar - hs-source-dirs: src-bar - main-is: Main.hs - build-depends: base, files, foo - default-language: Haskell2010 - -foreign-library baz - type: native-shared - other-modules: Baz - build-depends: base, files, foo, mtl - hs-source-dirs: src-baz - default-language: Haskell2010 - if os(Windows) - options: standalone diff --git a/test/integration/tests/internal-libraries/files/src-bar/Main.hs b/test/integration/tests/internal-libraries/files/src-bar/Main.hs deleted file mode 100644 index cc7e42ba70..0000000000 --- a/test/integration/tests/internal-libraries/files/src-bar/Main.hs +++ /dev/null @@ -1,11 +0,0 @@ -module Main where - -import Files -import Foo - -main :: IO () -main = do - putStrLn "files:" - print files - putStrLn "foo" - foo >>= print diff --git a/test/integration/tests/internal-libraries/files/src-baz/Baz.hs b/test/integration/tests/internal-libraries/files/src-baz/Baz.hs deleted file mode 100644 index 7e4e962ff3..0000000000 --- a/test/integration/tests/internal-libraries/files/src-baz/Baz.hs +++ /dev/null @@ -1,12 +0,0 @@ -module Baz where - -import Files -import Foo -import Control.Monad.Reader - -baz :: IO () -baz = flip runReaderT () $ lift $ do - putStrLn "files:" - print files - putStrLn "foo" - foo >>= print diff --git a/test/integration/tests/internal-libraries/files/src-foo/Foo.hs b/test/integration/tests/internal-libraries/files/src-foo/Foo.hs deleted file mode 100644 index 9a9b8e2205..0000000000 --- a/test/integration/tests/internal-libraries/files/src-foo/Foo.hs +++ /dev/null @@ -1,7 +0,0 @@ -module Foo where - -import Control.Monad.STM -import Files - -foo :: IO String -foo = atomically $ return $ "foo using " ++ files diff --git a/test/integration/tests/internal-libraries/files/src/Files.hs b/test/integration/tests/internal-libraries/files/src/Files.hs deleted file mode 100644 index b9c9eeac03..0000000000 --- a/test/integration/tests/internal-libraries/files/src/Files.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Files where - -files :: String -files = "files" diff --git a/test/integration/tests/internal-libraries/files/stack.yaml b/test/integration/tests/internal-libraries/files/stack.yaml deleted file mode 100644 index 788ec6ae98..0000000000 --- a/test/integration/tests/internal-libraries/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: ghc-8.6.5 -extra-deps: -- stm-2.5.0.0 -- mtl-2.2.1 diff --git a/test/integration/tests/lock-files/Main.hs b/test/integration/tests/lock-files/Main.hs deleted file mode 100644 index 8f7c89700f..0000000000 --- a/test/integration/tests/lock-files/Main.hs +++ /dev/null @@ -1,17 +0,0 @@ -import Control.Monad (unless, when) -import Data.List (isInfixOf) -import StackTest -import System.Directory - -main :: IO () -main = do - copyFile "stack-2-extras" "stack.yaml" - stack ["build"] - lock1 <- readFile "stack.yaml.lock" - unless ("acme-dont" `isInfixOf` lock1) $ - error "Package acme-dont wasn't found in Stack lock file" - copyFile "stack-1-extra" "stack.yaml" - stack ["build"] - lock2 <- readFile "stack.yaml.lock" - when ("acme-dont" `isInfixOf` lock2) $ - error "Package acme-dont shouldn't be in Stack lock file anymore" diff --git a/test/integration/tests/lock-files/files/Lib.hs b/test/integration/tests/lock-files/files/Lib.hs deleted file mode 100644 index a3b82e6e83..0000000000 --- a/test/integration/tests/lock-files/files/Lib.hs +++ /dev/null @@ -1,2 +0,0 @@ -foo :: Int -foo = 42 diff --git a/test/integration/tests/lock-files/files/package.yaml b/test/integration/tests/lock-files/files/package.yaml deleted file mode 100644 index 36e02ec5e7..0000000000 --- a/test/integration/tests/lock-files/files/package.yaml +++ /dev/null @@ -1,4 +0,0 @@ -name: example -library: - dependencies: - - base diff --git a/test/integration/tests/lock-files/files/stack-1-extra b/test/integration/tests/lock-files/files/stack-1-extra deleted file mode 100644 index 2fb724f00d..0000000000 --- a/test/integration/tests/lock-files/files/stack-1-extra +++ /dev/null @@ -1,3 +0,0 @@ -resolver: lts-14.27 -extra-deps: -- acme-cuteboy-0.1.0.0 diff --git a/test/integration/tests/lock-files/files/stack-2-extras b/test/integration/tests/lock-files/files/stack-2-extras deleted file mode 100644 index 382e3b0238..0000000000 --- a/test/integration/tests/lock-files/files/stack-2-extras +++ /dev/null @@ -1,4 +0,0 @@ -resolver: lts-14.27 -extra-deps: -- acme-cuteboy-0.1.0.0 -- acme-dont-1.1 diff --git a/test/integration/tests/module-added-multiple-times/Main.hs b/test/integration/tests/module-added-multiple-times/Main.hs deleted file mode 100644 index ec3226b48a..0000000000 --- a/test/integration/tests/module-added-multiple-times/Main.hs +++ /dev/null @@ -1,12 +0,0 @@ - - -import Control.Monad -import Data.List -import StackTest - -main :: IO () -main = repl [] $ do - replCommand ":main" - line <- replGetLine - when (line /= "Hello World!") - $ error "Main module didn't load correctly." diff --git a/test/integration/tests/module-added-multiple-times/files/LICENSE b/test/integration/tests/module-added-multiple-times/files/LICENSE deleted file mode 100644 index d05408d876..0000000000 --- a/test/integration/tests/module-added-multiple-times/files/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -Copyright (c) 2000 - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Your name here nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/test/integration/tests/module-added-multiple-times/files/Setup.hs b/test/integration/tests/module-added-multiple-times/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/module-added-multiple-times/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/module-added-multiple-times/files/exe/Main.hs b/test/integration/tests/module-added-multiple-times/files/exe/Main.hs deleted file mode 100644 index 504eb17dda..0000000000 --- a/test/integration/tests/module-added-multiple-times/files/exe/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Main where - -import Lib.A - -main :: IO () -main = putStrLn messageA diff --git a/test/integration/tests/module-added-multiple-times/files/project-a.cabal b/test/integration/tests/module-added-multiple-times/files/project-a.cabal deleted file mode 100644 index 4e230ef90b..0000000000 --- a/test/integration/tests/module-added-multiple-times/files/project-a.cabal +++ /dev/null @@ -1,33 +0,0 @@ -name: project-a -version: 0.1.0.0 -synopsis: Simple project template from stack -description: Please see README.md -homepage: http://github.com/githubuser/project-a#readme -license: MIT -license-file: LICENSE -author: Author name here -maintainer: example@example.com -copyright: 2010 Author Here -category: Web -build-type: Simple -cabal-version: >=1.10 - -library - hs-source-dirs: src - default-language: Haskell2010 - - exposed-modules: Lib.A - - build-depends: base - - -executable project-a-exe - main-is: Main.hs - hs-source-dirs: exe - default-language: Haskell2010 - - ghc-options: -rtsopts - - build-depends: base - - , project-a \ No newline at end of file diff --git a/test/integration/tests/module-added-multiple-times/files/src/Lib/A.hs b/test/integration/tests/module-added-multiple-times/files/src/Lib/A.hs deleted file mode 100644 index 788d4e22d5..0000000000 --- a/test/integration/tests/module-added-multiple-times/files/src/Lib/A.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Lib.A where - -messageA :: String -messageA = "Hello World!" diff --git a/test/integration/tests/module-added-multiple-times/files/stack.yaml b/test/integration/tests/module-added-multiple-times/files/stack.yaml deleted file mode 100644 index 619df55da8..0000000000 --- a/test/integration/tests/module-added-multiple-times/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: lts-14.27 -extra-deps: [] -flags: {} -extra-package-dbs: [] diff --git a/test/integration/tests/multi-test/Main.hs b/test/integration/tests/multi-test/Main.hs deleted file mode 100644 index dcec42e2b4..0000000000 --- a/test/integration/tests/multi-test/Main.hs +++ /dev/null @@ -1,19 +0,0 @@ -import Control.Monad (unless) -import Data.List (isInfixOf) -import StackTest - -main :: IO () -main = do - stack ["build"] - stack ["test"] - -- FIXME: Make 'clean' unnecessary (see #1411) - stack ["clean"] - stackCheckStderr ["test", "--coverage"] $ \out -> do - unless ("The coverage report for multi-test-suite's test-suite \"multi-test-suite-test\" is available at" `isInfixOf` out) $ - fail "Didn't get expected report for multi-test-suite-test" - unless ("Error: The coverage report for multi-test-suite's test-suite \"multi-test-suite-test-2\" did not consider any code." `isInfixOf` out) $ - fail "Didn't get expected empty report for multi-test-suite-test-2" - -- Test then build works too. - stack ["clean"] - stack ["test"] - stack ["build"] diff --git a/test/integration/tests/multi-test/files/LICENSE b/test/integration/tests/multi-test/files/LICENSE deleted file mode 100644 index d05408d876..0000000000 --- a/test/integration/tests/multi-test/files/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -Copyright (c) 2000 - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Your name here nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/test/integration/tests/multi-test/files/Setup.hs b/test/integration/tests/multi-test/files/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/multi-test/files/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/multi-test/files/app/Main.hs b/test/integration/tests/multi-test/files/app/Main.hs deleted file mode 100644 index de1c1ab35c..0000000000 --- a/test/integration/tests/multi-test/files/app/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Main where - -import Lib - -main :: IO () -main = someFunc diff --git a/test/integration/tests/multi-test/files/cyclic/Cyclic.hs b/test/integration/tests/multi-test/files/cyclic/Cyclic.hs deleted file mode 100644 index 57c0764899..0000000000 --- a/test/integration/tests/multi-test/files/cyclic/Cyclic.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Cyclic where - -cyclic :: String -cyclic = "Hello from cyclic" diff --git a/test/integration/tests/multi-test/files/cyclic/LICENSE b/test/integration/tests/multi-test/files/cyclic/LICENSE deleted file mode 100644 index d05408d876..0000000000 --- a/test/integration/tests/multi-test/files/cyclic/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -Copyright (c) 2000 - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Your name here nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/test/integration/tests/multi-test/files/cyclic/Setup.hs b/test/integration/tests/multi-test/files/cyclic/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/multi-test/files/cyclic/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/multi-test/files/cyclic/Spec.hs b/test/integration/tests/multi-test/files/cyclic/Spec.hs deleted file mode 100644 index 2a7e6b6085..0000000000 --- a/test/integration/tests/multi-test/files/cyclic/Spec.hs +++ /dev/null @@ -1,4 +0,0 @@ -import Lib (cyclicOutput) - -main :: IO () -main = cyclicOutput diff --git a/test/integration/tests/multi-test/files/cyclic/cyclic.cabal b/test/integration/tests/multi-test/files/cyclic/cyclic.cabal deleted file mode 100644 index 4faef11041..0000000000 --- a/test/integration/tests/multi-test/files/cyclic/cyclic.cabal +++ /dev/null @@ -1,33 +0,0 @@ --- Initial cyclic.cabal generated by cabal init. For further --- documentation, see http://haskell.org/cabal/users-guide/ - -name: cyclic -version: 0.1.0.0 --- synopsis: --- description: --- license: -license-file: LICENSE -author: Michael Sloan -maintainer: mgsloan@gmail.com --- copyright: --- category: -build-type: Simple --- extra-source-files: -cabal-version: >=1.10 - -library - exposed-modules: Cyclic - -- other-modules: - -- other-extensions: - build-depends: base >=4.8 && <5.0 - -- hs-source-dirs: - default-language: Haskell2010 - -test-suite cyclic-test-suite - type: exitcode-stdio-1.0 - main-is: Spec.hs - build-depends: base - -- This is cyclic because multi-test-suite depends on this package. - , multi-test-suite - ghc-options: -threaded -rtsopts -with-rtsopts=-N - default-language: Haskell2010 diff --git a/test/integration/tests/multi-test/files/multi-test-suite.cabal b/test/integration/tests/multi-test/files/multi-test-suite.cabal deleted file mode 100644 index deb33d3572..0000000000 --- a/test/integration/tests/multi-test/files/multi-test-suite.cabal +++ /dev/null @@ -1,58 +0,0 @@ -name: multi-test-suite -version: 0.1.0.0 -synopsis: Initial project template from stack -description: Please see README.md -homepage: http://github.com/commercialhaskell/multi-test-suite#readme -license: BSD3 -license-file: LICENSE -author: Your name here -maintainer: your.address@example.com --- copyright: -category: Web -build-type: Simple --- extra-source-files: -cabal-version: >=1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base >= 4.7 && < 5, cyclic - default-language: Haskell2010 - -executable multi-test-suite-exe - hs-source-dirs: app - main-is: Main.hs - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: base - , multi-test-suite - default-language: Haskell2010 - -test-suite multi-test-suite-test - type: exitcode-stdio-1.0 - hs-source-dirs: test - main-is: Spec.hs - build-depends: base - , multi-test-suite - ghc-options: -threaded -rtsopts -with-rtsopts=-N - default-language: Haskell2010 - -test-suite multi-test-suite-test-2 - type: exitcode-stdio-1.0 - hs-source-dirs: test-2 - main-is: Spec.hs - build-depends: base - , multi-test-suite - ghc-options: -threaded -rtsopts -with-rtsopts=-N - default-language: Haskell2010 - -test-suite multi-test-suite-test-3 - type: exitcode-stdio-1.0 - hs-source-dirs: test-3 - main-is: Spec.hs - build-depends: base - ghc-options: -threaded -rtsopts -with-rtsopts=-N - default-language: Haskell2010 - -source-repository head - type: git - location: https://github.com/commercialhaskell/multi-test-suite diff --git a/test/integration/tests/multi-test/files/src/Lib.hs b/test/integration/tests/multi-test/files/src/Lib.hs deleted file mode 100644 index dc9643b828..0000000000 --- a/test/integration/tests/multi-test/files/src/Lib.hs +++ /dev/null @@ -1,16 +0,0 @@ -module Lib - ( someFunc - , someOtherFunc - , cyclicOutput - ) where - -import Cyclic - -someFunc :: IO () -someFunc = putStrLn "someFunc" - -someOtherFunc :: IO () -someOtherFunc = putStrLn "someOtherFunc" - -cyclicOutput :: IO () -cyclicOutput = putStrLn cyclic diff --git a/test/integration/tests/multi-test/files/stack.yaml b/test/integration/tests/multi-test/files/stack.yaml deleted file mode 100644 index 067f74ae03..0000000000 --- a/test/integration/tests/multi-test/files/stack.yaml +++ /dev/null @@ -1,7 +0,0 @@ -flags: {} -packages: -- . -- sub-package -- cyclic -extra-deps: [] -resolver: lts-14.27 diff --git a/test/integration/tests/multi-test/files/sub-package/src/Lib2.hs b/test/integration/tests/multi-test/files/sub-package/src/Lib2.hs deleted file mode 100644 index 92ce236d56..0000000000 --- a/test/integration/tests/multi-test/files/sub-package/src/Lib2.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Lib2 where - -lib2 :: IO () -lib2 = putStrLn "lib2" diff --git a/test/integration/tests/multi-test/files/sub-package/src/main.hs b/test/integration/tests/multi-test/files/sub-package/src/main.hs deleted file mode 100644 index e84bf058a8..0000000000 --- a/test/integration/tests/multi-test/files/sub-package/src/main.hs +++ /dev/null @@ -1,3 +0,0 @@ -import Lib2 - -main = lib2 diff --git a/test/integration/tests/multi-test/files/sub-package/sub-package.cabal b/test/integration/tests/multi-test/files/sub-package/sub-package.cabal deleted file mode 100644 index 0e94272961..0000000000 --- a/test/integration/tests/multi-test/files/sub-package/sub-package.cabal +++ /dev/null @@ -1,31 +0,0 @@ -name: sub-package -version: 0.1.0.0 -synopsis: Initial project template from stack -description: Please see README.md -homepage: http://github.com/commercialhaskell/multi-test-suite#readme -license: BSD3 -license-file: ../LICENSE -author: Your name here -maintainer: your.address@example.com --- copyright: -category: Web -build-type: Simple --- extra-source-files: -cabal-version: >=1.10 - -executable sub-package-exe - hs-source-dirs: src - main-is: main.hs - other-modules: Lib2 - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - -test-suite sub-package-test - type: exitcode-stdio-1.0 - hs-source-dirs: src, test - main-is: Spec.hs - other-modules: Lib2 - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: base - , multi-test-suite - default-language: Haskell2010 diff --git a/test/integration/tests/multi-test/files/sub-package/test/Spec.hs b/test/integration/tests/multi-test/files/sub-package/test/Spec.hs deleted file mode 100644 index 87852a7dec..0000000000 --- a/test/integration/tests/multi-test/files/sub-package/test/Spec.hs +++ /dev/null @@ -1,7 +0,0 @@ -import Lib -import Lib2 - -main :: IO () -main = do - someOtherFunc - lib2 diff --git a/test/integration/tests/multi-test/files/test-2/Spec.hs b/test/integration/tests/multi-test/files/test-2/Spec.hs deleted file mode 100644 index 0e6a6e0b04..0000000000 --- a/test/integration/tests/multi-test/files/test-2/Spec.hs +++ /dev/null @@ -1,4 +0,0 @@ -import Lib - -main :: IO () -main = return () diff --git a/test/integration/tests/multi-test/files/test-3/Spec.hs b/test/integration/tests/multi-test/files/test-3/Spec.hs deleted file mode 100644 index 377b6b5516..0000000000 --- a/test/integration/tests/multi-test/files/test-3/Spec.hs +++ /dev/null @@ -1,2 +0,0 @@ -main :: IO () -main = return () diff --git a/test/integration/tests/multi-test/files/test/OtherModule.hs b/test/integration/tests/multi-test/files/test/OtherModule.hs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/test/integration/tests/multi-test/files/test/Spec.hs b/test/integration/tests/multi-test/files/test/Spec.hs deleted file mode 100644 index 7153edaf67..0000000000 --- a/test/integration/tests/multi-test/files/test/Spec.hs +++ /dev/null @@ -1,4 +0,0 @@ -import Lib - -main :: IO () -main = someFunc diff --git a/test/integration/tests/mutable-deps/Main.hs b/test/integration/tests/mutable-deps/Main.hs deleted file mode 100644 index f99c63a0e4..0000000000 --- a/test/integration/tests/mutable-deps/Main.hs +++ /dev/null @@ -1,25 +0,0 @@ -import Control.Monad (forM_, unless, when) -import Data.List (isInfixOf, stripPrefix) -import StackTest - -main :: IO () -main = unless isWindows $ do -- depedency issues on Windows - let isBuild package line = - case stripPrefix package line of - Just x -> "> build" `isInfixOf` line - Nothing -> False - expectRecompilation pkgs stderr = forM_ pkgs $ \p -> - unless (any (isBuild p) $ lines stderr) $ - error $ "package " ++ show p ++ " recompilation was expected" - expectNoRecompilation pkgs stderr = forM_ pkgs $ \p -> - when (any (isBuild p) $ lines stderr) $ - error $ "package " ++ show p ++ " recompilation was not expected" - mutablePackages = [ "filepath" - , "directory" - , "filemanip" - , "files" - ] - stackCheckStderr ["build"] $ expectRecompilation mutablePackages - stackCheckStderr ["build" , "--profile"] $ expectRecompilation mutablePackages - stackCheckStderr ["build"] $ expectNoRecompilation mutablePackages - stackCheckStderr ["build" , "--profile"] $ expectNoRecompilation mutablePackages diff --git a/test/integration/tests/mutable-deps/files/app/Main.hs b/test/integration/tests/mutable-deps/files/app/Main.hs deleted file mode 100644 index 5e18155cea..0000000000 --- a/test/integration/tests/mutable-deps/files/app/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -module Main where - -import Files - -main = do - cFiles <- allCFiles - putStrLn $ "C files:" ++ show cFiles diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/LICENSE b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/LICENSE deleted file mode 100644 index 6b4cfeee60..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -Copyright Neil Mitchell 2005-2018. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Neil Mitchell nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/README.md b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/README.md deleted file mode 100644 index ea54613d6f..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/README.md +++ /dev/null @@ -1,33 +0,0 @@ -# FilePath [![Hackage version](https://img.shields.io/hackage/v/filepath.svg?label=Hackage)](https://hackage.haskell.org/package/filepath) [![Linux Build Status](https://img.shields.io/travis/haskell/filepath/master.svg?label=Linux%20build)](https://travis-ci.org/haskell/filepath) [![Windows Build Status](https://img.shields.io/appveyor/ci/ndmitchell/filepath/master.svg?label=Windows%20build)](https://ci.appveyor.com/project/ndmitchell/filepath) - -The `filepath` package provides functionality for manipulating `FilePath` values, and is shipped with both [GHC](https://www.haskell.org/ghc/) and the [Haskell Platform](https://www.haskell.org/platform/). It provides three modules: - -* [`System.FilePath.Posix`](http://hackage.haskell.org/package/filepath/docs/System-FilePath-Posix.html) manipulates POSIX/Linux style `FilePath` values (with `/` as the path separator). -* [`System.FilePath.Windows`](http://hackage.haskell.org/package/filepath/docs/System-FilePath-Windows.html) manipulates Windows style `FilePath` values (with either `\` or `/` as the path separator, and deals with drives). -* [`System.FilePath`](http://hackage.haskell.org/package/filepath/docs/System-FilePath.html) is an alias for the module appropriate to your platform. - -All three modules provide the same API, and the same documentation (calling out differences in the different variants). - -### Should `FilePath` be an abstract data type? - -The answer for this library is "no". While an abstract `FilePath` has some advantages (mostly type safety), it also has some disadvantages: - -* In Haskell the definition is `type FilePath = String`, and all file-oriented functions operate on this type alias, e.g. `readFile`/`writeFile`. Any abstract type would require wrappers for these functions or lots of casts between `String` and the abstraction. -* It is not immediately obvious what a `FilePath` is, and what is just a pure `String`. For example, `/path/file.ext` is a `FilePath`. Is `/`? `/path`? `path`? `file.ext`? `.ext`? `file`? -* Often it is useful to represent invalid files, e.g. `/foo/*.txt` probably isn't an actual file, but a glob pattern. Other programs use `foo//bar` for globs, which is definitely not a file, but might want to be stored as a `FilePath`. -* Some programs use syntactic non-semantic details of the `FilePath` to change their behaviour. For example, `foo`, `foo/` and `foo/.` are all similar, and refer to the same location on disk, but may behave differently when passed to command-line tools. -* A useful step to introducing an abstract `FilePath` is to reduce the amount of manipulating `FilePath` values like lists. This library hopes to help in that effort. - -### Developer notes - -Most of the code is in `System/FilePath/Internal.hs` which is `#include`'d into both `System/FilePath/Posix.hs` and `System/FilePath/Windows.hs` with the `IS_WINDOWS` CPP define set to either `True` or `False`. This Internal module is a bit weird in that it isn't really a Haskell module, but is more an include file. - -The library has extensive doc tests. Anything starting with `-- >` is transformed into a doc test as a predicate that must evaluate to `True`. These tests follow a few rules: - -* Tests prefixed with `Windows:` or `Posix:` are only tested against that specific implementation - otherwise tests are run against both implementations. -* Any single letter variable, e.g. `x`, is considered universal quantification, and is checked with `QuickCheck`. -* If `Valid x =>` appears at the start of a doc test, that means the property will only be tested with `x` passing the `isValid` predicate. - -The tests can be generated by `Generate.hs` in the root of the repo, and will be placed in `tests/TestGen.hs`. The `TestGen.hs` file is checked into the repo, and the CI scripts check that `TestGen.hs` is in sync with what would be generated a fresh - if you don't regenerate `TestGen.hs` the CI will fail. - -The `.ghci` file is set up to allow you to type `ghci` to open the library, then `:go` will regenerate the tests and run them. diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/Setup.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath.hs deleted file mode 100644 index 331ae81818..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath.hs +++ /dev/null @@ -1,29 +0,0 @@ -{-# LANGUAGE CPP #-} -#if __GLASGOW_HASKELL__ >= 704 -{-# LANGUAGE Safe #-} -#endif -{- | -Module : System.FilePath -Copyright : (c) Neil Mitchell 2005-2014 -License : BSD3 - -Maintainer : ndmitchell@gmail.com -Stability : stable -Portability : portable - -A library for 'FilePath' manipulations, using Posix or Windows filepaths -depending on the platform. - -Both "System.FilePath.Posix" and "System.FilePath.Windows" provide the -same interface. See either for examples and a list of the available -functions. --} - - -#if defined(mingw32_HOST_OS) || defined(__MINGW32__) -module System.FilePath(module System.FilePath.Windows) where -import System.FilePath.Windows -#else -module System.FilePath(module System.FilePath.Posix) where -import System.FilePath.Posix -#endif diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath/Internal.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath/Internal.hs deleted file mode 100644 index 816883c0b1..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath/Internal.hs +++ /dev/null @@ -1,1042 +0,0 @@ -#if __GLASGOW_HASKELL__ >= 704 -{-# LANGUAGE Safe #-} -#endif -{-# LANGUAGE PatternGuards #-} - --- This template expects CPP definitions for: --- MODULE_NAME = Posix | Windows --- IS_WINDOWS = False | True - --- | --- Module : System.FilePath.MODULE_NAME --- Copyright : (c) Neil Mitchell 2005-2014 --- License : BSD3 --- --- Maintainer : ndmitchell@gmail.com --- Stability : stable --- Portability : portable --- --- A library for 'FilePath' manipulations, using MODULE_NAME style paths on --- all platforms. Importing "System.FilePath" is usually better. --- --- Given the example 'FilePath': @\/directory\/file.ext@ --- --- We can use the following functions to extract pieces. --- --- * 'takeFileName' gives @\"file.ext\"@ --- --- * 'takeDirectory' gives @\"\/directory\"@ --- --- * 'takeExtension' gives @\".ext\"@ --- --- * 'dropExtension' gives @\"\/directory\/file\"@ --- --- * 'takeBaseName' gives @\"file\"@ --- --- And we could have built an equivalent path with the following expressions: --- --- * @\"\/directory\" '' \"file.ext\"@. --- --- * @\"\/directory\/file" '<.>' \"ext\"@. --- --- * @\"\/directory\/file.txt" '-<.>' \"ext\"@. --- --- Each function in this module is documented with several examples, --- which are also used as tests. --- --- Here are a few examples of using the @filepath@ functions together: --- --- /Example 1:/ Find the possible locations of a Haskell module @Test@ imported from module @Main@: --- --- @['replaceFileName' path_to_main \"Test\" '<.>' ext | ext <- [\"hs\",\"lhs\"] ]@ --- --- /Example 2:/ Download a file from @url@ and save it to disk: --- --- @do let file = 'makeValid' url --- System.Directory.createDirectoryIfMissing True ('takeDirectory' file)@ --- --- /Example 3:/ Compile a Haskell file, putting the @.hi@ file under @interface@: --- --- @'takeDirectory' file '' \"interface\" '' ('takeFileName' file '-<.>' \"hi\")@ --- --- References: --- [1] (Microsoft MSDN) -module System.FilePath.MODULE_NAME - ( - -- * Separator predicates - FilePath, - pathSeparator, pathSeparators, isPathSeparator, - searchPathSeparator, isSearchPathSeparator, - extSeparator, isExtSeparator, - - -- * @$PATH@ methods - splitSearchPath, getSearchPath, - - -- * Extension functions - splitExtension, - takeExtension, replaceExtension, (-<.>), dropExtension, addExtension, hasExtension, (<.>), - splitExtensions, dropExtensions, takeExtensions, replaceExtensions, isExtensionOf, - stripExtension, - - -- * Filename\/directory functions - splitFileName, - takeFileName, replaceFileName, dropFileName, - takeBaseName, replaceBaseName, - takeDirectory, replaceDirectory, - combine, (), - splitPath, joinPath, splitDirectories, - - -- * Drive functions - splitDrive, joinDrive, - takeDrive, hasDrive, dropDrive, isDrive, - - -- * Trailing slash functions - hasTrailingPathSeparator, - addTrailingPathSeparator, - dropTrailingPathSeparator, - - -- * File name manipulations - normalise, equalFilePath, - makeRelative, - isRelative, isAbsolute, - isValid, makeValid - ) - where - -import Control.Applicative ((<$>)) -import Data.Char(toLower, toUpper, isAsciiLower, isAsciiUpper) -import Data.Maybe(isJust) -import Data.List(stripPrefix, isSuffixOf) - -import System.Environment(getEnv) - - -infixr 7 <.>, -<.> -infixr 5 - - - - - ---------------------------------------------------------------------- --- Platform Abstraction Methods (private) - --- | Is the operating system Unix or Linux like -isPosix :: Bool -isPosix = not isWindows - --- | Is the operating system Windows like -isWindows :: Bool -isWindows = IS_WINDOWS - - ---------------------------------------------------------------------- --- The basic functions - --- | The character that separates directories. In the case where more than --- one character is possible, 'pathSeparator' is the \'ideal\' one. --- --- > Windows: pathSeparator == '\\' --- > Posix: pathSeparator == '/' --- > isPathSeparator pathSeparator -pathSeparator :: Char -pathSeparator = if isWindows then '\\' else '/' - --- | The list of all possible separators. --- --- > Windows: pathSeparators == ['\\', '/'] --- > Posix: pathSeparators == ['/'] --- > pathSeparator `elem` pathSeparators -pathSeparators :: String -pathSeparators = if isWindows then "\\/" else "/" - --- | Rather than using @(== 'pathSeparator')@, use this. Test if something --- is a path separator. --- --- > isPathSeparator a == (a `elem` pathSeparators) -isPathSeparator :: Char -> Bool -isPathSeparator '/' = True -isPathSeparator '\\' = isWindows -isPathSeparator _ = False - - --- | The character that is used to separate the entries in the $PATH environment variable. --- --- > Windows: searchPathSeparator == ';' --- > Posix: searchPathSeparator == ':' -searchPathSeparator :: Char -searchPathSeparator = if isWindows then ';' else ':' - --- | Is the character a file separator? --- --- > isSearchPathSeparator a == (a == searchPathSeparator) -isSearchPathSeparator :: Char -> Bool -isSearchPathSeparator = (== searchPathSeparator) - - --- | File extension character --- --- > extSeparator == '.' -extSeparator :: Char -extSeparator = '.' - --- | Is the character an extension character? --- --- > isExtSeparator a == (a == extSeparator) -isExtSeparator :: Char -> Bool -isExtSeparator = (== extSeparator) - - ---------------------------------------------------------------------- --- Path methods (environment $PATH) - --- | Take a string, split it on the 'searchPathSeparator' character. --- Blank items are ignored on Windows, and converted to @.@ on Posix. --- On Windows path elements are stripped of quotes. --- --- Follows the recommendations in --- --- --- > Posix: splitSearchPath "File1:File2:File3" == ["File1","File2","File3"] --- > Posix: splitSearchPath "File1::File2:File3" == ["File1",".","File2","File3"] --- > Windows: splitSearchPath "File1;File2;File3" == ["File1","File2","File3"] --- > Windows: splitSearchPath "File1;;File2;File3" == ["File1","File2","File3"] --- > Windows: splitSearchPath "File1;\"File2\";File3" == ["File1","File2","File3"] -splitSearchPath :: String -> [FilePath] -splitSearchPath = f - where - f xs = case break isSearchPathSeparator xs of - (pre, [] ) -> g pre - (pre, _:post) -> g pre ++ f post - - g "" = ["." | isPosix] - g ('\"':x@(_:_)) | isWindows && last x == '\"' = [init x] - g x = [x] - - --- | Get a list of 'FilePath's in the $PATH variable. -getSearchPath :: IO [FilePath] -getSearchPath = fmap splitSearchPath (getEnv "PATH") - - ---------------------------------------------------------------------- --- Extension methods - --- | Split on the extension. 'addExtension' is the inverse. --- --- > splitExtension "/directory/path.ext" == ("/directory/path",".ext") --- > uncurry (++) (splitExtension x) == x --- > Valid x => uncurry addExtension (splitExtension x) == x --- > splitExtension "file.txt" == ("file",".txt") --- > splitExtension "file" == ("file","") --- > splitExtension "file/file.txt" == ("file/file",".txt") --- > splitExtension "file.txt/boris" == ("file.txt/boris","") --- > splitExtension "file.txt/boris.ext" == ("file.txt/boris",".ext") --- > splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob",".fred") --- > splitExtension "file/path.txt/" == ("file/path.txt/","") -splitExtension :: FilePath -> (String, String) -splitExtension x = case nameDot of - "" -> (x,"") - _ -> (dir ++ init nameDot, extSeparator : ext) - where - (dir,file) = splitFileName_ x - (nameDot,ext) = breakEnd isExtSeparator file - --- | Get the extension of a file, returns @\"\"@ for no extension, @.ext@ otherwise. --- --- > takeExtension "/directory/path.ext" == ".ext" --- > takeExtension x == snd (splitExtension x) --- > Valid x => takeExtension (addExtension x "ext") == ".ext" --- > Valid x => takeExtension (replaceExtension x "ext") == ".ext" -takeExtension :: FilePath -> String -takeExtension = snd . splitExtension - --- | Remove the current extension and add another, equivalent to 'replaceExtension'. --- --- > "/directory/path.txt" -<.> "ext" == "/directory/path.ext" --- > "/directory/path.txt" -<.> ".ext" == "/directory/path.ext" --- > "foo.o" -<.> "c" == "foo.c" -(-<.>) :: FilePath -> String -> FilePath -(-<.>) = replaceExtension - --- | Set the extension of a file, overwriting one if already present, equivalent to '-<.>'. --- --- > replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext" --- > replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext" --- > replaceExtension "file.txt" ".bob" == "file.bob" --- > replaceExtension "file.txt" "bob" == "file.bob" --- > replaceExtension "file" ".bob" == "file.bob" --- > replaceExtension "file.txt" "" == "file" --- > replaceExtension "file.fred.bob" "txt" == "file.fred.txt" --- > replaceExtension x y == addExtension (dropExtension x) y -replaceExtension :: FilePath -> String -> FilePath -replaceExtension x y = dropExtension x <.> y - --- | Add an extension, even if there is already one there, equivalent to 'addExtension'. --- --- > "/directory/path" <.> "ext" == "/directory/path.ext" --- > "/directory/path" <.> ".ext" == "/directory/path.ext" -(<.>) :: FilePath -> String -> FilePath -(<.>) = addExtension - --- | Remove last extension, and the \".\" preceding it. --- --- > dropExtension "/directory/path.ext" == "/directory/path" --- > dropExtension x == fst (splitExtension x) -dropExtension :: FilePath -> FilePath -dropExtension = fst . splitExtension - --- | Add an extension, even if there is already one there, equivalent to '<.>'. --- --- > addExtension "/directory/path" "ext" == "/directory/path.ext" --- > addExtension "file.txt" "bib" == "file.txt.bib" --- > addExtension "file." ".bib" == "file..bib" --- > addExtension "file" ".bib" == "file.bib" --- > addExtension "/" "x" == "/.x" --- > addExtension x "" == x --- > Valid x => takeFileName (addExtension (addTrailingPathSeparator x) "ext") == ".ext" --- > Windows: addExtension "\\\\share" ".txt" == "\\\\share\\.txt" -addExtension :: FilePath -> String -> FilePath -addExtension file "" = file -addExtension file xs@(x:_) = joinDrive a res - where - res = if isExtSeparator x then b ++ xs - else b ++ [extSeparator] ++ xs - - (a,b) = splitDrive file - --- | Does the given filename have an extension? --- --- > hasExtension "/directory/path.ext" == True --- > hasExtension "/directory/path" == False --- > null (takeExtension x) == not (hasExtension x) -hasExtension :: FilePath -> Bool -hasExtension = any isExtSeparator . takeFileName - - --- | Does the given filename have the specified extension? --- --- > "png" `isExtensionOf` "/directory/file.png" == True --- > ".png" `isExtensionOf` "/directory/file.png" == True --- > ".tar.gz" `isExtensionOf` "bar/foo.tar.gz" == True --- > "ar.gz" `isExtensionOf` "bar/foo.tar.gz" == False --- > "png" `isExtensionOf` "/directory/file.png.jpg" == False --- > "csv/table.csv" `isExtensionOf` "/data/csv/table.csv" == False -isExtensionOf :: String -> FilePath -> Bool -isExtensionOf ext@('.':_) = isSuffixOf ext . takeExtensions -isExtensionOf ext = isSuffixOf ('.':ext) . takeExtensions - --- | Drop the given extension from a FilePath, and the @\".\"@ preceding it. --- Returns 'Nothing' if the FilePath does not have the given extension, or --- 'Just' and the part before the extension if it does. --- --- This function can be more predictable than 'dropExtensions', especially if the filename --- might itself contain @.@ characters. --- --- > stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x" --- > stripExtension "hi.o" "foo.x.hs.o" == Nothing --- > dropExtension x == fromJust (stripExtension (takeExtension x) x) --- > dropExtensions x == fromJust (stripExtension (takeExtensions x) x) --- > stripExtension ".c.d" "a.b.c.d" == Just "a.b" --- > stripExtension ".c.d" "a.b..c.d" == Just "a.b." --- > stripExtension "baz" "foo.bar" == Nothing --- > stripExtension "bar" "foobar" == Nothing --- > stripExtension "" x == Just x -stripExtension :: String -> FilePath -> Maybe FilePath -stripExtension [] path = Just path -stripExtension ext@(x:_) path = stripSuffix dotExt path - where dotExt = if isExtSeparator x then ext else '.':ext - - --- | Split on all extensions. --- --- > splitExtensions "/directory/path.ext" == ("/directory/path",".ext") --- > splitExtensions "file.tar.gz" == ("file",".tar.gz") --- > uncurry (++) (splitExtensions x) == x --- > Valid x => uncurry addExtension (splitExtensions x) == x --- > splitExtensions "file.tar.gz" == ("file",".tar.gz") -splitExtensions :: FilePath -> (FilePath, String) -splitExtensions x = (a ++ c, d) - where - (a,b) = splitFileName_ x - (c,d) = break isExtSeparator b - --- | Drop all extensions. --- --- > dropExtensions "/directory/path.ext" == "/directory/path" --- > dropExtensions "file.tar.gz" == "file" --- > not $ hasExtension $ dropExtensions x --- > not $ any isExtSeparator $ takeFileName $ dropExtensions x -dropExtensions :: FilePath -> FilePath -dropExtensions = fst . splitExtensions - --- | Get all extensions. --- --- > takeExtensions "/directory/path.ext" == ".ext" --- > takeExtensions "file.tar.gz" == ".tar.gz" -takeExtensions :: FilePath -> String -takeExtensions = snd . splitExtensions - - --- | Replace all extensions of a file with a new extension. Note --- that 'replaceExtension' and 'addExtension' both work for adding --- multiple extensions, so only required when you need to drop --- all extensions first. --- --- > replaceExtensions "file.fred.bob" "txt" == "file.txt" --- > replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz" -replaceExtensions :: FilePath -> String -> FilePath -replaceExtensions x y = dropExtensions x <.> y - - - ---------------------------------------------------------------------- --- Drive methods - --- | Is the given character a valid drive letter? --- only a-z and A-Z are letters, not isAlpha which is more unicodey -isLetter :: Char -> Bool -isLetter x = isAsciiLower x || isAsciiUpper x - - --- | Split a path into a drive and a path. --- On Posix, \/ is a Drive. --- --- > uncurry (++) (splitDrive x) == x --- > Windows: splitDrive "file" == ("","file") --- > Windows: splitDrive "c:/file" == ("c:/","file") --- > Windows: splitDrive "c:\\file" == ("c:\\","file") --- > Windows: splitDrive "\\\\shared\\test" == ("\\\\shared\\","test") --- > Windows: splitDrive "\\\\shared" == ("\\\\shared","") --- > Windows: splitDrive "\\\\?\\UNC\\shared\\file" == ("\\\\?\\UNC\\shared\\","file") --- > Windows: splitDrive "\\\\?\\UNCshared\\file" == ("\\\\?\\","UNCshared\\file") --- > Windows: splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\","file") --- > Windows: splitDrive "/d" == ("","/d") --- > Posix: splitDrive "/test" == ("/","test") --- > Posix: splitDrive "//test" == ("//","test") --- > Posix: splitDrive "test/file" == ("","test/file") --- > Posix: splitDrive "file" == ("","file") -splitDrive :: FilePath -> (FilePath, FilePath) -splitDrive x | isPosix = span (== '/') x -splitDrive x | Just y <- readDriveLetter x = y -splitDrive x | Just y <- readDriveUNC x = y -splitDrive x | Just y <- readDriveShare x = y -splitDrive x = ("",x) - -addSlash :: FilePath -> FilePath -> (FilePath, FilePath) -addSlash a xs = (a++c,d) - where (c,d) = span isPathSeparator xs - --- See [1]. --- "\\?\D:\" or "\\?\UNC\\" -readDriveUNC :: FilePath -> Maybe (FilePath, FilePath) -readDriveUNC (s1:s2:'?':s3:xs) | all isPathSeparator [s1,s2,s3] = - case map toUpper xs of - ('U':'N':'C':s4:_) | isPathSeparator s4 -> - let (a,b) = readDriveShareName (drop 4 xs) - in Just (s1:s2:'?':s3:take 4 xs ++ a, b) - _ -> case readDriveLetter xs of - -- Extended-length path. - Just (a,b) -> Just (s1:s2:'?':s3:a,b) - Nothing -> Nothing -readDriveUNC _ = Nothing - -{- c:\ -} -readDriveLetter :: String -> Maybe (FilePath, FilePath) -readDriveLetter (x:':':y:xs) | isLetter x && isPathSeparator y = Just $ addSlash [x,':'] (y:xs) -readDriveLetter (x:':':xs) | isLetter x = Just ([x,':'], xs) -readDriveLetter _ = Nothing - -{- \\sharename\ -} -readDriveShare :: String -> Maybe (FilePath, FilePath) -readDriveShare (s1:s2:xs) | isPathSeparator s1 && isPathSeparator s2 = - Just (s1:s2:a,b) - where (a,b) = readDriveShareName xs -readDriveShare _ = Nothing - -{- assume you have already seen \\ -} -{- share\bob -> "share\", "bob" -} -readDriveShareName :: String -> (FilePath, FilePath) -readDriveShareName name = addSlash a b - where (a,b) = break isPathSeparator name - - - --- | Join a drive and the rest of the path. --- --- > Valid x => uncurry joinDrive (splitDrive x) == x --- > Windows: joinDrive "C:" "foo" == "C:foo" --- > Windows: joinDrive "C:\\" "bar" == "C:\\bar" --- > Windows: joinDrive "\\\\share" "foo" == "\\\\share\\foo" --- > Windows: joinDrive "/:" "foo" == "/:\\foo" -joinDrive :: FilePath -> FilePath -> FilePath -joinDrive = combineAlways - --- | Get the drive from a filepath. --- --- > takeDrive x == fst (splitDrive x) -takeDrive :: FilePath -> FilePath -takeDrive = fst . splitDrive - --- | Delete the drive, if it exists. --- --- > dropDrive x == snd (splitDrive x) -dropDrive :: FilePath -> FilePath -dropDrive = snd . splitDrive - --- | Does a path have a drive. --- --- > not (hasDrive x) == null (takeDrive x) --- > Posix: hasDrive "/foo" == True --- > Windows: hasDrive "C:\\foo" == True --- > Windows: hasDrive "C:foo" == True --- > hasDrive "foo" == False --- > hasDrive "" == False -hasDrive :: FilePath -> Bool -hasDrive = not . null . takeDrive - - --- | Is an element a drive --- --- > Posix: isDrive "/" == True --- > Posix: isDrive "/foo" == False --- > Windows: isDrive "C:\\" == True --- > Windows: isDrive "C:\\foo" == False --- > isDrive "" == False -isDrive :: FilePath -> Bool -isDrive x = not (null x) && null (dropDrive x) - - ---------------------------------------------------------------------- --- Operations on a filepath, as a list of directories - --- | Split a filename into directory and file. '' is the inverse. --- The first component will often end with a trailing slash. --- --- > splitFileName "/directory/file.ext" == ("/directory/","file.ext") --- > Valid x => uncurry () (splitFileName x) == x || fst (splitFileName x) == "./" --- > Valid x => isValid (fst (splitFileName x)) --- > splitFileName "file/bob.txt" == ("file/", "bob.txt") --- > splitFileName "file/" == ("file/", "") --- > splitFileName "bob" == ("./", "bob") --- > Posix: splitFileName "/" == ("/","") --- > Windows: splitFileName "c:" == ("c:","") -splitFileName :: FilePath -> (String, String) -splitFileName x = (if null dir then "./" else dir, name) - where - (dir, name) = splitFileName_ x - --- version of splitFileName where, if the FilePath has no directory --- component, the returned directory is "" rather than "./". This --- is used in cases where we are going to combine the returned --- directory to make a valid FilePath, and having a "./" appear would --- look strange and upset simple equality properties. See --- e.g. replaceFileName. -splitFileName_ :: FilePath -> (String, String) -splitFileName_ x = (drv ++ dir, file) - where - (drv,pth) = splitDrive x - (dir,file) = breakEnd isPathSeparator pth - --- | Set the filename. --- --- > replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext" --- > Valid x => replaceFileName x (takeFileName x) == x -replaceFileName :: FilePath -> String -> FilePath -replaceFileName x y = a y where (a,_) = splitFileName_ x - --- | Drop the filename. Unlike 'takeDirectory', this function will leave --- a trailing path separator on the directory. --- --- > dropFileName "/directory/file.ext" == "/directory/" --- > dropFileName x == fst (splitFileName x) -dropFileName :: FilePath -> FilePath -dropFileName = fst . splitFileName - - --- | Get the file name. --- --- > takeFileName "/directory/file.ext" == "file.ext" --- > takeFileName "test/" == "" --- > takeFileName x `isSuffixOf` x --- > takeFileName x == snd (splitFileName x) --- > Valid x => takeFileName (replaceFileName x "fred") == "fred" --- > Valid x => takeFileName (x "fred") == "fred" --- > Valid x => isRelative (takeFileName x) -takeFileName :: FilePath -> FilePath -takeFileName = snd . splitFileName - --- | Get the base name, without an extension or path. --- --- > takeBaseName "/directory/file.ext" == "file" --- > takeBaseName "file/test.txt" == "test" --- > takeBaseName "dave.ext" == "dave" --- > takeBaseName "" == "" --- > takeBaseName "test" == "test" --- > takeBaseName (addTrailingPathSeparator x) == "" --- > takeBaseName "file/file.tar.gz" == "file.tar" -takeBaseName :: FilePath -> String -takeBaseName = dropExtension . takeFileName - --- | Set the base name. --- --- > replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext" --- > replaceBaseName "file/test.txt" "bob" == "file/bob.txt" --- > replaceBaseName "fred" "bill" == "bill" --- > replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar" --- > Valid x => replaceBaseName x (takeBaseName x) == x -replaceBaseName :: FilePath -> String -> FilePath -replaceBaseName pth nam = combineAlways a (nam <.> ext) - where - (a,b) = splitFileName_ pth - ext = takeExtension b - --- | Is an item either a directory or the last character a path separator? --- --- > hasTrailingPathSeparator "test" == False --- > hasTrailingPathSeparator "test/" == True -hasTrailingPathSeparator :: FilePath -> Bool -hasTrailingPathSeparator "" = False -hasTrailingPathSeparator x = isPathSeparator (last x) - - -hasLeadingPathSeparator :: FilePath -> Bool -hasLeadingPathSeparator "" = False -hasLeadingPathSeparator x = isPathSeparator (head x) - - --- | Add a trailing file path separator if one is not already present. --- --- > hasTrailingPathSeparator (addTrailingPathSeparator x) --- > hasTrailingPathSeparator x ==> addTrailingPathSeparator x == x --- > Posix: addTrailingPathSeparator "test/rest" == "test/rest/" -addTrailingPathSeparator :: FilePath -> FilePath -addTrailingPathSeparator x = if hasTrailingPathSeparator x then x else x ++ [pathSeparator] - - --- | Remove any trailing path separators --- --- > dropTrailingPathSeparator "file/test/" == "file/test" --- > dropTrailingPathSeparator "/" == "/" --- > Windows: dropTrailingPathSeparator "\\" == "\\" --- > Posix: not (hasTrailingPathSeparator (dropTrailingPathSeparator x)) || isDrive x -dropTrailingPathSeparator :: FilePath -> FilePath -dropTrailingPathSeparator x = - if hasTrailingPathSeparator x && not (isDrive x) - then let x' = dropWhileEnd isPathSeparator x - in if null x' then [last x] else x' - else x - - --- | Get the directory name, move up one level. --- --- > takeDirectory "/directory/other.ext" == "/directory" --- > takeDirectory x `isPrefixOf` x || takeDirectory x == "." --- > takeDirectory "foo" == "." --- > takeDirectory "/" == "/" --- > takeDirectory "/foo" == "/" --- > takeDirectory "/foo/bar/baz" == "/foo/bar" --- > takeDirectory "/foo/bar/baz/" == "/foo/bar/baz" --- > takeDirectory "foo/bar/baz" == "foo/bar" --- > Windows: takeDirectory "foo\\bar" == "foo" --- > Windows: takeDirectory "foo\\bar\\\\" == "foo\\bar" --- > Windows: takeDirectory "C:\\" == "C:\\" -takeDirectory :: FilePath -> FilePath -takeDirectory = dropTrailingPathSeparator . dropFileName - --- | Set the directory, keeping the filename the same. --- --- > replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext" --- > Valid x => replaceDirectory x (takeDirectory x) `equalFilePath` x -replaceDirectory :: FilePath -> String -> FilePath -replaceDirectory x dir = combineAlways dir (takeFileName x) - - --- | An alias for ''. -combine :: FilePath -> FilePath -> FilePath -combine a b | hasLeadingPathSeparator b || hasDrive b = b - | otherwise = combineAlways a b - --- | Combine two paths, assuming rhs is NOT absolute. -combineAlways :: FilePath -> FilePath -> FilePath -combineAlways a b | null a = b - | null b = a - | hasTrailingPathSeparator a = a ++ b - | otherwise = case a of - [a1,':'] | isWindows && isLetter a1 -> a ++ b - _ -> a ++ [pathSeparator] ++ b - - --- | Combine two paths with a path separator. --- If the second path starts with a path separator or a drive letter, then it returns the second. --- The intention is that @readFile (dir '' file)@ will access the same file as --- @setCurrentDirectory dir; readFile file@. --- --- > Posix: "/directory" "file.ext" == "/directory/file.ext" --- > Windows: "/directory" "file.ext" == "/directory\\file.ext" --- > "directory" "/file.ext" == "/file.ext" --- > Valid x => (takeDirectory x takeFileName x) `equalFilePath` x --- --- Combined: --- --- > Posix: "/" "test" == "/test" --- > Posix: "home" "bob" == "home/bob" --- > Posix: "x:" "foo" == "x:/foo" --- > Windows: "C:\\foo" "bar" == "C:\\foo\\bar" --- > Windows: "home" "bob" == "home\\bob" --- --- Not combined: --- --- > Posix: "home" "/bob" == "/bob" --- > Windows: "home" "C:\\bob" == "C:\\bob" --- --- Not combined (tricky): --- --- On Windows, if a filepath starts with a single slash, it is relative to the --- root of the current drive. In [1], this is (confusingly) referred to as an --- absolute path. --- The current behavior of '' is to never combine these forms. --- --- > Windows: "home" "/bob" == "/bob" --- > Windows: "home" "\\bob" == "\\bob" --- > Windows: "C:\\home" "\\bob" == "\\bob" --- --- On Windows, from [1]: "If a file name begins with only a disk designator --- but not the backslash after the colon, it is interpreted as a relative path --- to the current directory on the drive with the specified letter." --- The current behavior of '' is to never combine these forms. --- --- > Windows: "D:\\foo" "C:bar" == "C:bar" --- > Windows: "C:\\foo" "C:bar" == "C:bar" -() :: FilePath -> FilePath -> FilePath -() = combine - - --- | Split a path by the directory separator. --- --- > splitPath "/directory/file.ext" == ["/","directory/","file.ext"] --- > concat (splitPath x) == x --- > splitPath "test//item/" == ["test//","item/"] --- > splitPath "test/item/file" == ["test/","item/","file"] --- > splitPath "" == [] --- > Windows: splitPath "c:\\test\\path" == ["c:\\","test\\","path"] --- > Posix: splitPath "/file/test" == ["/","file/","test"] -splitPath :: FilePath -> [FilePath] -splitPath x = [drive | drive /= ""] ++ f path - where - (drive,path) = splitDrive x - - f "" = [] - f y = (a++c) : f d - where - (a,b) = break isPathSeparator y - (c,d) = span isPathSeparator b - --- | Just as 'splitPath', but don't add the trailing slashes to each element. --- --- > splitDirectories "/directory/file.ext" == ["/","directory","file.ext"] --- > splitDirectories "test/file" == ["test","file"] --- > splitDirectories "/test/file" == ["/","test","file"] --- > Windows: splitDirectories "C:\\test\\file" == ["C:\\", "test", "file"] --- > Valid x => joinPath (splitDirectories x) `equalFilePath` x --- > splitDirectories "" == [] --- > Windows: splitDirectories "C:\\test\\\\\\file" == ["C:\\", "test", "file"] --- > splitDirectories "/test///file" == ["/","test","file"] -splitDirectories :: FilePath -> [FilePath] -splitDirectories = map dropTrailingPathSeparator . splitPath - - --- | Join path elements back together. --- --- > joinPath ["/","directory/","file.ext"] == "/directory/file.ext" --- > Valid x => joinPath (splitPath x) == x --- > joinPath [] == "" --- > Posix: joinPath ["test","file","path"] == "test/file/path" -joinPath :: [FilePath] -> FilePath --- Note that this definition on c:\\c:\\, join then split will give c:\\. -joinPath = foldr combine "" - - - - - - ---------------------------------------------------------------------- --- File name manipulators - --- | Equality of two 'FilePath's. --- If you call @System.Directory.canonicalizePath@ --- first this has a much better chance of working. --- Note that this doesn't follow symlinks or DOSNAM~1s. --- --- > x == y ==> equalFilePath x y --- > normalise x == normalise y ==> equalFilePath x y --- > equalFilePath "foo" "foo/" --- > not (equalFilePath "foo" "/foo") --- > Posix: not (equalFilePath "foo" "FOO") --- > Windows: equalFilePath "foo" "FOO" --- > Windows: not (equalFilePath "C:" "C:/") -equalFilePath :: FilePath -> FilePath -> Bool -equalFilePath a b = f a == f b - where - f x | isWindows = dropTrailingPathSeparator $ map toLower $ normalise x - | otherwise = dropTrailingPathSeparator $ normalise x - - --- | Contract a filename, based on a relative path. Note that the resulting path --- will never introduce @..@ paths, as the presence of symlinks means @..\/b@ --- may not reach @a\/b@ if it starts from @a\/c@. For a worked example see --- . --- --- The corresponding @makeAbsolute@ function can be found in --- @System.Directory@. --- --- > makeRelative "/directory" "/directory/file.ext" == "file.ext" --- > Valid x => makeRelative (takeDirectory x) x `equalFilePath` takeFileName x --- > makeRelative x x == "." --- > Valid x y => equalFilePath x y || (isRelative x && makeRelative y x == x) || equalFilePath (y makeRelative y x) x --- > Windows: makeRelative "C:\\Home" "c:\\home\\bob" == "bob" --- > Windows: makeRelative "C:\\Home" "c:/home/bob" == "bob" --- > Windows: makeRelative "C:\\Home" "D:\\Home\\Bob" == "D:\\Home\\Bob" --- > Windows: makeRelative "C:\\Home" "C:Home\\Bob" == "C:Home\\Bob" --- > Windows: makeRelative "/Home" "/home/bob" == "bob" --- > Windows: makeRelative "/" "//" == "//" --- > Posix: makeRelative "/Home" "/home/bob" == "/home/bob" --- > Posix: makeRelative "/home/" "/home/bob/foo/bar" == "bob/foo/bar" --- > Posix: makeRelative "/fred" "bob" == "bob" --- > Posix: makeRelative "/file/test" "/file/test/fred" == "fred" --- > Posix: makeRelative "/file/test" "/file/test/fred/" == "fred/" --- > Posix: makeRelative "some/path" "some/path/a/b/c" == "a/b/c" -makeRelative :: FilePath -> FilePath -> FilePath -makeRelative root path - | equalFilePath root path = "." - | takeAbs root /= takeAbs path = path - | otherwise = f (dropAbs root) (dropAbs path) - where - f "" y = dropWhile isPathSeparator y - f x y = let (x1,x2) = g x - (y1,y2) = g y - in if equalFilePath x1 y1 then f x2 y2 else path - - g x = (dropWhile isPathSeparator a, dropWhile isPathSeparator b) - where (a,b) = break isPathSeparator $ dropWhile isPathSeparator x - - -- on windows, need to drop '/' which is kind of absolute, but not a drive - dropAbs x | hasLeadingPathSeparator x && not (hasDrive x) = tail x - dropAbs x = dropDrive x - - takeAbs x | hasLeadingPathSeparator x && not (hasDrive x) = [pathSeparator] - takeAbs x = map (\y -> if isPathSeparator y then pathSeparator else toLower y) $ takeDrive x - --- | Normalise a file --- --- * \/\/ outside of the drive can be made blank --- --- * \/ -> 'pathSeparator' --- --- * .\/ -> \"\" --- --- > Posix: normalise "/file/\\test////" == "/file/\\test/" --- > Posix: normalise "/file/./test" == "/file/test" --- > Posix: normalise "/test/file/../bob/fred/" == "/test/file/../bob/fred/" --- > Posix: normalise "../bob/fred/" == "../bob/fred/" --- > Posix: normalise "./bob/fred/" == "bob/fred/" --- > Windows: normalise "c:\\file/bob\\" == "C:\\file\\bob\\" --- > Windows: normalise "c:\\" == "C:\\" --- > Windows: normalise "C:.\\" == "C:" --- > Windows: normalise "\\\\server\\test" == "\\\\server\\test" --- > Windows: normalise "//server/test" == "\\\\server\\test" --- > Windows: normalise "c:/file" == "C:\\file" --- > Windows: normalise "/file" == "\\file" --- > Windows: normalise "\\" == "\\" --- > Windows: normalise "/./" == "\\" --- > normalise "." == "." --- > Posix: normalise "./" == "./" --- > Posix: normalise "./." == "./" --- > Posix: normalise "/./" == "/" --- > Posix: normalise "/" == "/" --- > Posix: normalise "bob/fred/." == "bob/fred/" --- > Posix: normalise "//home" == "/home" -normalise :: FilePath -> FilePath -normalise path = result ++ [pathSeparator | addPathSeparator] - where - (drv,pth) = splitDrive path - result = joinDrive' (normaliseDrive drv) (f pth) - - joinDrive' "" "" = "." - joinDrive' d p = joinDrive d p - - addPathSeparator = isDirPath pth - && not (hasTrailingPathSeparator result) - && not (isRelativeDrive drv) - - isDirPath xs = hasTrailingPathSeparator xs - || not (null xs) && last xs == '.' && hasTrailingPathSeparator (init xs) - - f = joinPath . dropDots . propSep . splitDirectories - - propSep (x:xs) | all isPathSeparator x = [pathSeparator] : xs - | otherwise = x : xs - propSep [] = [] - - dropDots = filter ("." /=) - -normaliseDrive :: FilePath -> FilePath -normaliseDrive "" = "" -normaliseDrive _ | isPosix = [pathSeparator] -normaliseDrive drive = if isJust $ readDriveLetter x2 - then map toUpper x2 - else x2 - where - x2 = map repSlash drive - - repSlash x = if isPathSeparator x then pathSeparator else x - --- Information for validity functions on Windows. See [1]. -isBadCharacter :: Char -> Bool -isBadCharacter x = x >= '\0' && x <= '\31' || x `elem` ":*?><|\"" - -badElements :: [FilePath] -badElements = - ["CON","PRN","AUX","NUL","CLOCK$" - ,"COM1","COM2","COM3","COM4","COM5","COM6","COM7","COM8","COM9" - ,"LPT1","LPT2","LPT3","LPT4","LPT5","LPT6","LPT7","LPT8","LPT9"] - - --- | Is a FilePath valid, i.e. could you create a file like it? This function checks for invalid names, --- and invalid characters, but does not check if length limits are exceeded, as these are typically --- filesystem dependent. --- --- > isValid "" == False --- > isValid "\0" == False --- > Posix: isValid "/random_ path:*" == True --- > Posix: isValid x == not (null x) --- > Windows: isValid "c:\\test" == True --- > Windows: isValid "c:\\test:of_test" == False --- > Windows: isValid "test*" == False --- > Windows: isValid "c:\\test\\nul" == False --- > Windows: isValid "c:\\test\\prn.txt" == False --- > Windows: isValid "c:\\nul\\file" == False --- > Windows: isValid "\\\\" == False --- > Windows: isValid "\\\\\\foo" == False --- > Windows: isValid "\\\\?\\D:file" == False --- > Windows: isValid "foo\tbar" == False --- > Windows: isValid "nul .txt" == False --- > Windows: isValid " nul.txt" == True -isValid :: FilePath -> Bool -isValid "" = False -isValid x | '\0' `elem` x = False -isValid _ | isPosix = True -isValid path = - not (any isBadCharacter x2) && - not (any f $ splitDirectories x2) && - not (isJust (readDriveShare x1) && all isPathSeparator x1) && - not (isJust (readDriveUNC x1) && not (hasTrailingPathSeparator x1)) - where - (x1,x2) = splitDrive path - f x = map toUpper (dropWhileEnd (== ' ') $ dropExtensions x) `elem` badElements - - --- | Take a FilePath and make it valid; does not change already valid FilePaths. --- --- > isValid (makeValid x) --- > isValid x ==> makeValid x == x --- > makeValid "" == "_" --- > makeValid "file\0name" == "file_name" --- > Windows: makeValid "c:\\already\\/valid" == "c:\\already\\/valid" --- > Windows: makeValid "c:\\test:of_test" == "c:\\test_of_test" --- > Windows: makeValid "test*" == "test_" --- > Windows: makeValid "c:\\test\\nul" == "c:\\test\\nul_" --- > Windows: makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt" --- > Windows: makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt" --- > Windows: makeValid "c:\\nul\\file" == "c:\\nul_\\file" --- > Windows: makeValid "\\\\\\foo" == "\\\\drive" --- > Windows: makeValid "\\\\?\\D:file" == "\\\\?\\D:\\file" --- > Windows: makeValid "nul .txt" == "nul _.txt" -makeValid :: FilePath -> FilePath -makeValid "" = "_" -makeValid path - | isPosix = map (\x -> if x == '\0' then '_' else x) path - | isJust (readDriveShare drv) && all isPathSeparator drv = take 2 drv ++ "drive" - | isJust (readDriveUNC drv) && not (hasTrailingPathSeparator drv) = - makeValid (drv ++ [pathSeparator] ++ pth) - | otherwise = joinDrive drv $ validElements $ validChars pth - where - (drv,pth) = splitDrive path - - validChars = map f - f x = if isBadCharacter x then '_' else x - - validElements x = joinPath $ map g $ splitPath x - g x = h a ++ b - where (a,b) = break isPathSeparator x - h x = if map toUpper (dropWhileEnd (== ' ') a) `elem` badElements then a ++ "_" <.> b else x - where (a,b) = splitExtensions x - - --- | Is a path relative, or is it fixed to the root? --- --- > Windows: isRelative "path\\test" == True --- > Windows: isRelative "c:\\test" == False --- > Windows: isRelative "c:test" == True --- > Windows: isRelative "c:\\" == False --- > Windows: isRelative "c:/" == False --- > Windows: isRelative "c:" == True --- > Windows: isRelative "\\\\foo" == False --- > Windows: isRelative "\\\\?\\foo" == False --- > Windows: isRelative "\\\\?\\UNC\\foo" == False --- > Windows: isRelative "/foo" == True --- > Windows: isRelative "\\foo" == True --- > Posix: isRelative "test/path" == True --- > Posix: isRelative "/test" == False --- > Posix: isRelative "/" == False --- --- According to [1]: --- --- * "A UNC name of any format [is never relative]." --- --- * "You cannot use the "\\?\" prefix with a relative path." -isRelative :: FilePath -> Bool -isRelative x = null drive || isRelativeDrive drive - where drive = takeDrive x - - -{- c:foo -} --- From [1]: "If a file name begins with only a disk designator but not the --- backslash after the colon, it is interpreted as a relative path to the --- current directory on the drive with the specified letter." -isRelativeDrive :: String -> Bool -isRelativeDrive x = - maybe False (not . hasTrailingPathSeparator . fst) (readDriveLetter x) - - --- | @not . 'isRelative'@ --- --- > isAbsolute x == not (isRelative x) -isAbsolute :: FilePath -> Bool -isAbsolute = not . isRelative - - ------------------------------------------------------------------------------ --- dropWhileEnd (>2) [1,2,3,4,1,2,3,4] == [1,2,3,4,1,2]) --- Note that Data.List.dropWhileEnd is only available in base >= 4.5. -dropWhileEnd :: (a -> Bool) -> [a] -> [a] -dropWhileEnd p = reverse . dropWhile p . reverse - --- takeWhileEnd (>2) [1,2,3,4,1,2,3,4] == [3,4]) -takeWhileEnd :: (a -> Bool) -> [a] -> [a] -takeWhileEnd p = reverse . takeWhile p . reverse - --- spanEnd (>2) [1,2,3,4,1,2,3,4] = ([1,2,3,4,1,2], [3,4]) -spanEnd :: (a -> Bool) -> [a] -> ([a], [a]) -spanEnd p xs = (dropWhileEnd p xs, takeWhileEnd p xs) - --- breakEnd (< 2) [1,2,3,4,1,2,3,4] == ([1,2,3,4,1],[2,3,4]) -breakEnd :: (a -> Bool) -> [a] -> ([a], [a]) -breakEnd p = spanEnd (not . p) - --- | The stripSuffix function drops the given suffix from a list. It returns --- Nothing if the list did not end with the suffix given, or Just the list --- before the suffix, if it does. -stripSuffix :: Eq a => [a] -> [a] -> Maybe [a] -stripSuffix xs ys = reverse <$> stripPrefix (reverse xs) (reverse ys) diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath/Posix.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath/Posix.hs deleted file mode 100644 index 3fbd0ffcb1..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath/Posix.hs +++ /dev/null @@ -1,4 +0,0 @@ -{-# LANGUAGE CPP #-} -#define MODULE_NAME Posix -#define IS_WINDOWS False -#include "Internal.hs" diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath/Windows.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath/Windows.hs deleted file mode 100644 index 3e3e9d672e..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/System/FilePath/Windows.hs +++ /dev/null @@ -1,4 +0,0 @@ -{-# LANGUAGE CPP #-} -#define MODULE_NAME Windows -#define IS_WINDOWS True -#include "Internal.hs" diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/changelog.md b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/changelog.md deleted file mode 100644 index 0ef9259bfc..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/changelog.md +++ /dev/null @@ -1,89 +0,0 @@ -# Changelog for [`filepath` package](http://hackage.haskell.org/package/filepath) - -_Note: below all `FilePath` values are unquoted, so `\\` really means two backslashes._ - -## 1.4.2.1 *Jul 2018* - - * Bundled with GHC 8.6.1 - -## 1.4.2 *Jan 2018* - - * Bundled with GHC 8.4.1 - - * Add `isExtensionOf` function. - -## 1.4.1.2 *Feb 2017* - - * Bundled with GHC 8.2.1 - -## 1.4.1.1 *Nov 2016* - - * Bundled with GHC 8.0.2 - - * Documentation improvements - -## 1.4.1.0 *Dec 2015* - - * Bundled with GHC 8.0.1 - - * Add `replaceExtensions` and `stripExtension` functions. - - * Make `isValid` detect more invalid Windows paths, e.g. `nul .txt` and `foo\nbar`. - - * Improve the documentation. - - * Bug fix: `isValid "\0"` now returns `False`, instead of `True` - -## 1.4.0.0 *Mar 2015* - - * Bundled with GHC 7.10.1 - - * New function: Add `-<.>` as an alias for `replaceExtension`. - - * Semantic change: `joinDrive /foo bar` now returns `/foo/bar`, instead of `/foobar` - - * Semantic change: on Windows, `splitSearchPath File1;\"File 2\"` now returns `[File1,File2]` instead of `[File1,\"File2\"]` - - * Bug fix: on Posix systems, `normalise //home` now returns `/home`, instead of `//home` - - * Bug fix: `normalise /./` now returns `/` on Posix and `\` on Windows, instead of `//` and `\\` - - * Bug fix: `isDrive ""` now returns `False`, instead of `True` - - * Bug fix: on Windows, `dropTrailingPathSeparator /` now returns `/` unchanged, instead of the normalised `\` - - * Bug fix: on Windows, `equalFilePath C:\ C:` now returns `False`, instead of `True` - - * Bug fix: on Windows, `isValid \\\foo` now returns `False`, instead of `True` - - * Bug fix: on Windows, `isValid \\?\D:file` now returns `False`, instead of `True` - - * Bug fix: on Windows, `normalise \` now returns `\` unchanged, instead of `\\` - - * Bug fix: on Windows, `normalise C:.\` now returns `C:`, instead of `C:\\` - - * Bug fix: on Windows, `normalise //server/test` now returns `\\server\test`, instead of `//server/test` unchanged - - * Bug fix: on Windows, `makeRelative / //` now returns `//`, instead of `""` - -## 1.3.0.2 *Mar 2014* - - * Bundled with GHC 7.8.1 - - * Update to Cabal 1.10 format - - * Minor Haddock cleanups - -## 1.3.0.1 *Sep 2012* - - * Bundled with GHC 7.6.1 - - * No changes - -## 1.3.0.0 *Feb 2012* - - * Bundled with GHC 7.4.1 - - * Add support for SafeHaskell - - * Bug fix: `normalise /` now returns `/`, instead of `/.` diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/filepath.cabal b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/filepath.cabal deleted file mode 100644 index 28adbb8f9b..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/filepath.cabal +++ /dev/null @@ -1,68 +0,0 @@ -cabal-version: >= 1.18 -name: filepath -version: 1.4.2.1 -x-revision: 1 --- NOTE: Don't forget to update ./changelog.md -license: BSD3 -license-file: LICENSE -author: Neil Mitchell -maintainer: Neil Mitchell -copyright: Neil Mitchell 2005-2018 -bug-reports: https://github.com/haskell/filepath/issues -homepage: https://github.com/haskell/filepath#readme -category: System -build-type: Simple -synopsis: Library for manipulating FilePaths in a cross platform way. -tested-with: GHC==8.6.3, GHC==8.4.4, GHC==8.2.2, GHC==8.0.2, GHC==7.10.3, GHC==7.8.4, GHC==7.6.3, GHC==7.4.2 -description: - This package provides functionality for manipulating @FilePath@ values, and is shipped with both and the . It provides three modules: - . - * "System.FilePath.Posix" manipulates POSIX\/Linux style @FilePath@ values (with @\/@ as the path separator). - . - * "System.FilePath.Windows" manipulates Windows style @FilePath@ values (with either @\\@ or @\/@ as the path separator, and deals with drives). - . - * "System.FilePath" is an alias for the module appropriate to your platform. - . - All three modules provide the same API, and the same documentation (calling out differences in the different variants). - -extra-source-files: - System/FilePath/Internal.hs -extra-doc-files: - README.md - changelog.md - -source-repository head - type: git - location: https://github.com/haskell/filepath.git - -library - default-language: Haskell2010 - other-extensions: - CPP - PatternGuards - if impl(GHC >= 7.2) - other-extensions: Safe - - exposed-modules: - System.FilePath - System.FilePath.Posix - System.FilePath.Windows - - build-depends: - base >= 4 && < 4.14 - - ghc-options: -Wall - -test-suite filepath-tests - type: exitcode-stdio-1.0 - default-language: Haskell2010 - main-is: Test.hs - ghc-options: -main-is Test - hs-source-dirs: tests - other-modules: - TestGen - TestUtil - build-depends: - filepath, - base, - QuickCheck >= 2.7 && < 2.13 diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/tests/Test.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/tests/Test.hs deleted file mode 100644 index b9b695b56b..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/tests/Test.hs +++ /dev/null @@ -1,30 +0,0 @@ - -module Test(main) where - -import System.Environment -import TestGen -import Control.Monad -import Data.Maybe -import Test.QuickCheck - - -main :: IO () -main = do - args <- getArgs - let count = case args of i:_ -> read i; _ -> 10000 - putStrLn $ "Testing with " ++ show count ++ " repetitions" - let total = length tests - let showOutput x = show x{output=""} ++ "\n" ++ output x - bad <- fmap catMaybes $ forM (zip [1..] tests) $ \(i,(msg,prop)) -> do - putStrLn $ "Test " ++ show i ++ " of " ++ show total ++ ": " ++ msg - res <- quickCheckWithResult stdArgs{chatty=False, maxSuccess=count} prop - case res of - Success{} -> return Nothing - bad -> do putStrLn $ showOutput bad; putStrLn "TEST FAILURE!"; return $ Just (msg,bad) - if null bad then - putStrLn $ "Success, " ++ show total ++ " tests passed" - else do - putStrLn $ show (length bad) ++ " FAILURES\n" - forM_ (zip [1..] bad) $ \(i,(a,b)) -> - putStrLn $ "FAILURE " ++ show i ++ ": " ++ a ++ "\n" ++ showOutput b ++ "\n" - fail $ "FAILURE, failed " ++ show (length bad) ++ " of " ++ show total ++ " tests" diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/tests/TestGen.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/tests/TestGen.hs deleted file mode 100644 index 0d78ac00a1..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/tests/TestGen.hs +++ /dev/null @@ -1,461 +0,0 @@ --- GENERATED CODE: See ../Generate.hs -module TestGen(tests) where -import TestUtil -import qualified System.FilePath.Windows as W -import qualified System.FilePath.Posix as P -{-# ANN module "HLint: ignore" #-} -tests :: [(String, Property)] -tests = - [("W.pathSeparator == '\\\\'", property $ W.pathSeparator == '\\') - ,("P.pathSeparator == '/'", property $ P.pathSeparator == '/') - ,("P.isPathSeparator P.pathSeparator", property $ P.isPathSeparator P.pathSeparator) - ,("W.isPathSeparator W.pathSeparator", property $ W.isPathSeparator W.pathSeparator) - ,("W.pathSeparators == ['\\\\', '/']", property $ W.pathSeparators == ['\\', '/']) - ,("P.pathSeparators == ['/']", property $ P.pathSeparators == ['/']) - ,("P.pathSeparator `elem` P.pathSeparators", property $ P.pathSeparator `elem` P.pathSeparators) - ,("W.pathSeparator `elem` W.pathSeparators", property $ W.pathSeparator `elem` W.pathSeparators) - ,("P.isPathSeparator a == (a `elem` P.pathSeparators)", property $ \a -> P.isPathSeparator a == (a `elem` P.pathSeparators)) - ,("W.isPathSeparator a == (a `elem` W.pathSeparators)", property $ \a -> W.isPathSeparator a == (a `elem` W.pathSeparators)) - ,("W.searchPathSeparator == ';'", property $ W.searchPathSeparator == ';') - ,("P.searchPathSeparator == ':'", property $ P.searchPathSeparator == ':') - ,("P.isSearchPathSeparator a == (a == P.searchPathSeparator)", property $ \a -> P.isSearchPathSeparator a == (a == P.searchPathSeparator)) - ,("W.isSearchPathSeparator a == (a == W.searchPathSeparator)", property $ \a -> W.isSearchPathSeparator a == (a == W.searchPathSeparator)) - ,("P.extSeparator == '.'", property $ P.extSeparator == '.') - ,("W.extSeparator == '.'", property $ W.extSeparator == '.') - ,("P.isExtSeparator a == (a == P.extSeparator)", property $ \a -> P.isExtSeparator a == (a == P.extSeparator)) - ,("W.isExtSeparator a == (a == W.extSeparator)", property $ \a -> W.isExtSeparator a == (a == W.extSeparator)) - ,("P.splitSearchPath \"File1:File2:File3\" == [\"File1\", \"File2\", \"File3\"]", property $ P.splitSearchPath "File1:File2:File3" == ["File1", "File2", "File3"]) - ,("P.splitSearchPath \"File1::File2:File3\" == [\"File1\", \".\", \"File2\", \"File3\"]", property $ P.splitSearchPath "File1::File2:File3" == ["File1", ".", "File2", "File3"]) - ,("W.splitSearchPath \"File1;File2;File3\" == [\"File1\", \"File2\", \"File3\"]", property $ W.splitSearchPath "File1;File2;File3" == ["File1", "File2", "File3"]) - ,("W.splitSearchPath \"File1;;File2;File3\" == [\"File1\", \"File2\", \"File3\"]", property $ W.splitSearchPath "File1;;File2;File3" == ["File1", "File2", "File3"]) - ,("W.splitSearchPath \"File1;\\\"File2\\\";File3\" == [\"File1\", \"File2\", \"File3\"]", property $ W.splitSearchPath "File1;\"File2\";File3" == ["File1", "File2", "File3"]) - ,("P.splitExtension \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ P.splitExtension "/directory/path.ext" == ("/directory/path", ".ext")) - ,("W.splitExtension \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ W.splitExtension "/directory/path.ext" == ("/directory/path", ".ext")) - ,("uncurry (++) (P.splitExtension x) == x", property $ \(QFilePath x) -> uncurry (++) (P.splitExtension x) == x) - ,("uncurry (++) (W.splitExtension x) == x", property $ \(QFilePath x) -> uncurry (++) (W.splitExtension x) == x) - ,("uncurry P.addExtension (P.splitExtension x) == x", property $ \(QFilePathValidP x) -> uncurry P.addExtension (P.splitExtension x) == x) - ,("uncurry W.addExtension (W.splitExtension x) == x", property $ \(QFilePathValidW x) -> uncurry W.addExtension (W.splitExtension x) == x) - ,("P.splitExtension \"file.txt\" == (\"file\", \".txt\")", property $ P.splitExtension "file.txt" == ("file", ".txt")) - ,("W.splitExtension \"file.txt\" == (\"file\", \".txt\")", property $ W.splitExtension "file.txt" == ("file", ".txt")) - ,("P.splitExtension \"file\" == (\"file\", \"\")", property $ P.splitExtension "file" == ("file", "")) - ,("W.splitExtension \"file\" == (\"file\", \"\")", property $ W.splitExtension "file" == ("file", "")) - ,("P.splitExtension \"file/file.txt\" == (\"file/file\", \".txt\")", property $ P.splitExtension "file/file.txt" == ("file/file", ".txt")) - ,("W.splitExtension \"file/file.txt\" == (\"file/file\", \".txt\")", property $ W.splitExtension "file/file.txt" == ("file/file", ".txt")) - ,("P.splitExtension \"file.txt/boris\" == (\"file.txt/boris\", \"\")", property $ P.splitExtension "file.txt/boris" == ("file.txt/boris", "")) - ,("W.splitExtension \"file.txt/boris\" == (\"file.txt/boris\", \"\")", property $ W.splitExtension "file.txt/boris" == ("file.txt/boris", "")) - ,("P.splitExtension \"file.txt/boris.ext\" == (\"file.txt/boris\", \".ext\")", property $ P.splitExtension "file.txt/boris.ext" == ("file.txt/boris", ".ext")) - ,("W.splitExtension \"file.txt/boris.ext\" == (\"file.txt/boris\", \".ext\")", property $ W.splitExtension "file.txt/boris.ext" == ("file.txt/boris", ".ext")) - ,("P.splitExtension \"file/path.txt.bob.fred\" == (\"file/path.txt.bob\", \".fred\")", property $ P.splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob", ".fred")) - ,("W.splitExtension \"file/path.txt.bob.fred\" == (\"file/path.txt.bob\", \".fred\")", property $ W.splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob", ".fred")) - ,("P.splitExtension \"file/path.txt/\" == (\"file/path.txt/\", \"\")", property $ P.splitExtension "file/path.txt/" == ("file/path.txt/", "")) - ,("W.splitExtension \"file/path.txt/\" == (\"file/path.txt/\", \"\")", property $ W.splitExtension "file/path.txt/" == ("file/path.txt/", "")) - ,("P.takeExtension \"/directory/path.ext\" == \".ext\"", property $ P.takeExtension "/directory/path.ext" == ".ext") - ,("W.takeExtension \"/directory/path.ext\" == \".ext\"", property $ W.takeExtension "/directory/path.ext" == ".ext") - ,("P.takeExtension x == snd (P.splitExtension x)", property $ \(QFilePath x) -> P.takeExtension x == snd (P.splitExtension x)) - ,("W.takeExtension x == snd (W.splitExtension x)", property $ \(QFilePath x) -> W.takeExtension x == snd (W.splitExtension x)) - ,("P.takeExtension (P.addExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidP x) -> P.takeExtension (P.addExtension x "ext") == ".ext") - ,("W.takeExtension (W.addExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidW x) -> W.takeExtension (W.addExtension x "ext") == ".ext") - ,("P.takeExtension (P.replaceExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidP x) -> P.takeExtension (P.replaceExtension x "ext") == ".ext") - ,("W.takeExtension (W.replaceExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidW x) -> W.takeExtension (W.replaceExtension x "ext") == ".ext") - ,("\"/directory/path.txt\" P.-<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" P.-<.> "ext" == "/directory/path.ext") - ,("\"/directory/path.txt\" W.-<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" W.-<.> "ext" == "/directory/path.ext") - ,("\"/directory/path.txt\" P.-<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" P.-<.> ".ext" == "/directory/path.ext") - ,("\"/directory/path.txt\" W.-<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" W.-<.> ".ext" == "/directory/path.ext") - ,("\"foo.o\" P.-<.> \"c\" == \"foo.c\"", property $ "foo.o" P.-<.> "c" == "foo.c") - ,("\"foo.o\" W.-<.> \"c\" == \"foo.c\"", property $ "foo.o" W.-<.> "c" == "foo.c") - ,("P.replaceExtension \"/directory/path.txt\" \"ext\" == \"/directory/path.ext\"", property $ P.replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext") - ,("W.replaceExtension \"/directory/path.txt\" \"ext\" == \"/directory/path.ext\"", property $ W.replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext") - ,("P.replaceExtension \"/directory/path.txt\" \".ext\" == \"/directory/path.ext\"", property $ P.replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext") - ,("W.replaceExtension \"/directory/path.txt\" \".ext\" == \"/directory/path.ext\"", property $ W.replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext") - ,("P.replaceExtension \"file.txt\" \".bob\" == \"file.bob\"", property $ P.replaceExtension "file.txt" ".bob" == "file.bob") - ,("W.replaceExtension \"file.txt\" \".bob\" == \"file.bob\"", property $ W.replaceExtension "file.txt" ".bob" == "file.bob") - ,("P.replaceExtension \"file.txt\" \"bob\" == \"file.bob\"", property $ P.replaceExtension "file.txt" "bob" == "file.bob") - ,("W.replaceExtension \"file.txt\" \"bob\" == \"file.bob\"", property $ W.replaceExtension "file.txt" "bob" == "file.bob") - ,("P.replaceExtension \"file\" \".bob\" == \"file.bob\"", property $ P.replaceExtension "file" ".bob" == "file.bob") - ,("W.replaceExtension \"file\" \".bob\" == \"file.bob\"", property $ W.replaceExtension "file" ".bob" == "file.bob") - ,("P.replaceExtension \"file.txt\" \"\" == \"file\"", property $ P.replaceExtension "file.txt" "" == "file") - ,("W.replaceExtension \"file.txt\" \"\" == \"file\"", property $ W.replaceExtension "file.txt" "" == "file") - ,("P.replaceExtension \"file.fred.bob\" \"txt\" == \"file.fred.txt\"", property $ P.replaceExtension "file.fred.bob" "txt" == "file.fred.txt") - ,("W.replaceExtension \"file.fred.bob\" \"txt\" == \"file.fred.txt\"", property $ W.replaceExtension "file.fred.bob" "txt" == "file.fred.txt") - ,("P.replaceExtension x y == P.addExtension (P.dropExtension x) y", property $ \(QFilePath x) (QFilePath y) -> P.replaceExtension x y == P.addExtension (P.dropExtension x) y) - ,("W.replaceExtension x y == W.addExtension (W.dropExtension x) y", property $ \(QFilePath x) (QFilePath y) -> W.replaceExtension x y == W.addExtension (W.dropExtension x) y) - ,("\"/directory/path\" P.<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path" P.<.> "ext" == "/directory/path.ext") - ,("\"/directory/path\" W.<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path" W.<.> "ext" == "/directory/path.ext") - ,("\"/directory/path\" P.<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path" P.<.> ".ext" == "/directory/path.ext") - ,("\"/directory/path\" W.<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path" W.<.> ".ext" == "/directory/path.ext") - ,("P.dropExtension \"/directory/path.ext\" == \"/directory/path\"", property $ P.dropExtension "/directory/path.ext" == "/directory/path") - ,("W.dropExtension \"/directory/path.ext\" == \"/directory/path\"", property $ W.dropExtension "/directory/path.ext" == "/directory/path") - ,("P.dropExtension x == fst (P.splitExtension x)", property $ \(QFilePath x) -> P.dropExtension x == fst (P.splitExtension x)) - ,("W.dropExtension x == fst (W.splitExtension x)", property $ \(QFilePath x) -> W.dropExtension x == fst (W.splitExtension x)) - ,("P.addExtension \"/directory/path\" \"ext\" == \"/directory/path.ext\"", property $ P.addExtension "/directory/path" "ext" == "/directory/path.ext") - ,("W.addExtension \"/directory/path\" \"ext\" == \"/directory/path.ext\"", property $ W.addExtension "/directory/path" "ext" == "/directory/path.ext") - ,("P.addExtension \"file.txt\" \"bib\" == \"file.txt.bib\"", property $ P.addExtension "file.txt" "bib" == "file.txt.bib") - ,("W.addExtension \"file.txt\" \"bib\" == \"file.txt.bib\"", property $ W.addExtension "file.txt" "bib" == "file.txt.bib") - ,("P.addExtension \"file.\" \".bib\" == \"file..bib\"", property $ P.addExtension "file." ".bib" == "file..bib") - ,("W.addExtension \"file.\" \".bib\" == \"file..bib\"", property $ W.addExtension "file." ".bib" == "file..bib") - ,("P.addExtension \"file\" \".bib\" == \"file.bib\"", property $ P.addExtension "file" ".bib" == "file.bib") - ,("W.addExtension \"file\" \".bib\" == \"file.bib\"", property $ W.addExtension "file" ".bib" == "file.bib") - ,("P.addExtension \"/\" \"x\" == \"/.x\"", property $ P.addExtension "/" "x" == "/.x") - ,("W.addExtension \"/\" \"x\" == \"/.x\"", property $ W.addExtension "/" "x" == "/.x") - ,("P.addExtension x \"\" == x", property $ \(QFilePath x) -> P.addExtension x "" == x) - ,("W.addExtension x \"\" == x", property $ \(QFilePath x) -> W.addExtension x "" == x) - ,("P.takeFileName (P.addExtension (P.addTrailingPathSeparator x) \"ext\") == \".ext\"", property $ \(QFilePathValidP x) -> P.takeFileName (P.addExtension (P.addTrailingPathSeparator x) "ext") == ".ext") - ,("W.takeFileName (W.addExtension (W.addTrailingPathSeparator x) \"ext\") == \".ext\"", property $ \(QFilePathValidW x) -> W.takeFileName (W.addExtension (W.addTrailingPathSeparator x) "ext") == ".ext") - ,("W.addExtension \"\\\\\\\\share\" \".txt\" == \"\\\\\\\\share\\\\.txt\"", property $ W.addExtension "\\\\share" ".txt" == "\\\\share\\.txt") - ,("P.hasExtension \"/directory/path.ext\" == True", property $ P.hasExtension "/directory/path.ext" == True) - ,("W.hasExtension \"/directory/path.ext\" == True", property $ W.hasExtension "/directory/path.ext" == True) - ,("P.hasExtension \"/directory/path\" == False", property $ P.hasExtension "/directory/path" == False) - ,("W.hasExtension \"/directory/path\" == False", property $ W.hasExtension "/directory/path" == False) - ,("null (P.takeExtension x) == not (P.hasExtension x)", property $ \(QFilePath x) -> null (P.takeExtension x) == not (P.hasExtension x)) - ,("null (W.takeExtension x) == not (W.hasExtension x)", property $ \(QFilePath x) -> null (W.takeExtension x) == not (W.hasExtension x)) - ,("\"png\" `P.isExtensionOf` \"/directory/file.png\" == True", property $ "png" `P.isExtensionOf` "/directory/file.png" == True) - ,("\"png\" `W.isExtensionOf` \"/directory/file.png\" == True", property $ "png" `W.isExtensionOf` "/directory/file.png" == True) - ,("\".png\" `P.isExtensionOf` \"/directory/file.png\" == True", property $ ".png" `P.isExtensionOf` "/directory/file.png" == True) - ,("\".png\" `W.isExtensionOf` \"/directory/file.png\" == True", property $ ".png" `W.isExtensionOf` "/directory/file.png" == True) - ,("\".tar.gz\" `P.isExtensionOf` \"bar/foo.tar.gz\" == True", property $ ".tar.gz" `P.isExtensionOf` "bar/foo.tar.gz" == True) - ,("\".tar.gz\" `W.isExtensionOf` \"bar/foo.tar.gz\" == True", property $ ".tar.gz" `W.isExtensionOf` "bar/foo.tar.gz" == True) - ,("\"ar.gz\" `P.isExtensionOf` \"bar/foo.tar.gz\" == False", property $ "ar.gz" `P.isExtensionOf` "bar/foo.tar.gz" == False) - ,("\"ar.gz\" `W.isExtensionOf` \"bar/foo.tar.gz\" == False", property $ "ar.gz" `W.isExtensionOf` "bar/foo.tar.gz" == False) - ,("\"png\" `P.isExtensionOf` \"/directory/file.png.jpg\" == False", property $ "png" `P.isExtensionOf` "/directory/file.png.jpg" == False) - ,("\"png\" `W.isExtensionOf` \"/directory/file.png.jpg\" == False", property $ "png" `W.isExtensionOf` "/directory/file.png.jpg" == False) - ,("\"csv/table.csv\" `P.isExtensionOf` \"/data/csv/table.csv\" == False", property $ "csv/table.csv" `P.isExtensionOf` "/data/csv/table.csv" == False) - ,("\"csv/table.csv\" `W.isExtensionOf` \"/data/csv/table.csv\" == False", property $ "csv/table.csv" `W.isExtensionOf` "/data/csv/table.csv" == False) - ,("P.stripExtension \"hs.o\" \"foo.x.hs.o\" == Just \"foo.x\"", property $ P.stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x") - ,("W.stripExtension \"hs.o\" \"foo.x.hs.o\" == Just \"foo.x\"", property $ W.stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x") - ,("P.stripExtension \"hi.o\" \"foo.x.hs.o\" == Nothing", property $ P.stripExtension "hi.o" "foo.x.hs.o" == Nothing) - ,("W.stripExtension \"hi.o\" \"foo.x.hs.o\" == Nothing", property $ W.stripExtension "hi.o" "foo.x.hs.o" == Nothing) - ,("P.dropExtension x == fromJust (P.stripExtension (P.takeExtension x) x)", property $ \(QFilePath x) -> P.dropExtension x == fromJust (P.stripExtension (P.takeExtension x) x)) - ,("W.dropExtension x == fromJust (W.stripExtension (W.takeExtension x) x)", property $ \(QFilePath x) -> W.dropExtension x == fromJust (W.stripExtension (W.takeExtension x) x)) - ,("P.dropExtensions x == fromJust (P.stripExtension (P.takeExtensions x) x)", property $ \(QFilePath x) -> P.dropExtensions x == fromJust (P.stripExtension (P.takeExtensions x) x)) - ,("W.dropExtensions x == fromJust (W.stripExtension (W.takeExtensions x) x)", property $ \(QFilePath x) -> W.dropExtensions x == fromJust (W.stripExtension (W.takeExtensions x) x)) - ,("P.stripExtension \".c.d\" \"a.b.c.d\" == Just \"a.b\"", property $ P.stripExtension ".c.d" "a.b.c.d" == Just "a.b") - ,("W.stripExtension \".c.d\" \"a.b.c.d\" == Just \"a.b\"", property $ W.stripExtension ".c.d" "a.b.c.d" == Just "a.b") - ,("P.stripExtension \".c.d\" \"a.b..c.d\" == Just \"a.b.\"", property $ P.stripExtension ".c.d" "a.b..c.d" == Just "a.b.") - ,("W.stripExtension \".c.d\" \"a.b..c.d\" == Just \"a.b.\"", property $ W.stripExtension ".c.d" "a.b..c.d" == Just "a.b.") - ,("P.stripExtension \"baz\" \"foo.bar\" == Nothing", property $ P.stripExtension "baz" "foo.bar" == Nothing) - ,("W.stripExtension \"baz\" \"foo.bar\" == Nothing", property $ W.stripExtension "baz" "foo.bar" == Nothing) - ,("P.stripExtension \"bar\" \"foobar\" == Nothing", property $ P.stripExtension "bar" "foobar" == Nothing) - ,("W.stripExtension \"bar\" \"foobar\" == Nothing", property $ W.stripExtension "bar" "foobar" == Nothing) - ,("P.stripExtension \"\" x == Just x", property $ \(QFilePath x) -> P.stripExtension "" x == Just x) - ,("W.stripExtension \"\" x == Just x", property $ \(QFilePath x) -> W.stripExtension "" x == Just x) - ,("P.splitExtensions \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ P.splitExtensions "/directory/path.ext" == ("/directory/path", ".ext")) - ,("W.splitExtensions \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ W.splitExtensions "/directory/path.ext" == ("/directory/path", ".ext")) - ,("P.splitExtensions \"file.tar.gz\" == (\"file\", \".tar.gz\")", property $ P.splitExtensions "file.tar.gz" == ("file", ".tar.gz")) - ,("W.splitExtensions \"file.tar.gz\" == (\"file\", \".tar.gz\")", property $ W.splitExtensions "file.tar.gz" == ("file", ".tar.gz")) - ,("uncurry (++) (P.splitExtensions x) == x", property $ \(QFilePath x) -> uncurry (++) (P.splitExtensions x) == x) - ,("uncurry (++) (W.splitExtensions x) == x", property $ \(QFilePath x) -> uncurry (++) (W.splitExtensions x) == x) - ,("uncurry P.addExtension (P.splitExtensions x) == x", property $ \(QFilePathValidP x) -> uncurry P.addExtension (P.splitExtensions x) == x) - ,("uncurry W.addExtension (W.splitExtensions x) == x", property $ \(QFilePathValidW x) -> uncurry W.addExtension (W.splitExtensions x) == x) - ,("P.splitExtensions \"file.tar.gz\" == (\"file\", \".tar.gz\")", property $ P.splitExtensions "file.tar.gz" == ("file", ".tar.gz")) - ,("W.splitExtensions \"file.tar.gz\" == (\"file\", \".tar.gz\")", property $ W.splitExtensions "file.tar.gz" == ("file", ".tar.gz")) - ,("P.dropExtensions \"/directory/path.ext\" == \"/directory/path\"", property $ P.dropExtensions "/directory/path.ext" == "/directory/path") - ,("W.dropExtensions \"/directory/path.ext\" == \"/directory/path\"", property $ W.dropExtensions "/directory/path.ext" == "/directory/path") - ,("P.dropExtensions \"file.tar.gz\" == \"file\"", property $ P.dropExtensions "file.tar.gz" == "file") - ,("W.dropExtensions \"file.tar.gz\" == \"file\"", property $ W.dropExtensions "file.tar.gz" == "file") - ,("not $ P.hasExtension $ P.dropExtensions x", property $ \(QFilePath x) -> not $ P.hasExtension $ P.dropExtensions x) - ,("not $ W.hasExtension $ W.dropExtensions x", property $ \(QFilePath x) -> not $ W.hasExtension $ W.dropExtensions x) - ,("not $ any P.isExtSeparator $ P.takeFileName $ P.dropExtensions x", property $ \(QFilePath x) -> not $ any P.isExtSeparator $ P.takeFileName $ P.dropExtensions x) - ,("not $ any W.isExtSeparator $ W.takeFileName $ W.dropExtensions x", property $ \(QFilePath x) -> not $ any W.isExtSeparator $ W.takeFileName $ W.dropExtensions x) - ,("P.takeExtensions \"/directory/path.ext\" == \".ext\"", property $ P.takeExtensions "/directory/path.ext" == ".ext") - ,("W.takeExtensions \"/directory/path.ext\" == \".ext\"", property $ W.takeExtensions "/directory/path.ext" == ".ext") - ,("P.takeExtensions \"file.tar.gz\" == \".tar.gz\"", property $ P.takeExtensions "file.tar.gz" == ".tar.gz") - ,("W.takeExtensions \"file.tar.gz\" == \".tar.gz\"", property $ W.takeExtensions "file.tar.gz" == ".tar.gz") - ,("P.replaceExtensions \"file.fred.bob\" \"txt\" == \"file.txt\"", property $ P.replaceExtensions "file.fred.bob" "txt" == "file.txt") - ,("W.replaceExtensions \"file.fred.bob\" \"txt\" == \"file.txt\"", property $ W.replaceExtensions "file.fred.bob" "txt" == "file.txt") - ,("P.replaceExtensions \"file.fred.bob\" \"tar.gz\" == \"file.tar.gz\"", property $ P.replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz") - ,("W.replaceExtensions \"file.fred.bob\" \"tar.gz\" == \"file.tar.gz\"", property $ W.replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz") - ,("uncurry (++) (P.splitDrive x) == x", property $ \(QFilePath x) -> uncurry (++) (P.splitDrive x) == x) - ,("uncurry (++) (W.splitDrive x) == x", property $ \(QFilePath x) -> uncurry (++) (W.splitDrive x) == x) - ,("W.splitDrive \"file\" == (\"\", \"file\")", property $ W.splitDrive "file" == ("", "file")) - ,("W.splitDrive \"c:/file\" == (\"c:/\", \"file\")", property $ W.splitDrive "c:/file" == ("c:/", "file")) - ,("W.splitDrive \"c:\\\\file\" == (\"c:\\\\\", \"file\")", property $ W.splitDrive "c:\\file" == ("c:\\", "file")) - ,("W.splitDrive \"\\\\\\\\shared\\\\test\" == (\"\\\\\\\\shared\\\\\", \"test\")", property $ W.splitDrive "\\\\shared\\test" == ("\\\\shared\\", "test")) - ,("W.splitDrive \"\\\\\\\\shared\" == (\"\\\\\\\\shared\", \"\")", property $ W.splitDrive "\\\\shared" == ("\\\\shared", "")) - ,("W.splitDrive \"\\\\\\\\?\\\\UNC\\\\shared\\\\file\" == (\"\\\\\\\\?\\\\UNC\\\\shared\\\\\", \"file\")", property $ W.splitDrive "\\\\?\\UNC\\shared\\file" == ("\\\\?\\UNC\\shared\\", "file")) - ,("W.splitDrive \"\\\\\\\\?\\\\UNCshared\\\\file\" == (\"\\\\\\\\?\\\\\", \"UNCshared\\\\file\")", property $ W.splitDrive "\\\\?\\UNCshared\\file" == ("\\\\?\\", "UNCshared\\file")) - ,("W.splitDrive \"\\\\\\\\?\\\\d:\\\\file\" == (\"\\\\\\\\?\\\\d:\\\\\", \"file\")", property $ W.splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\", "file")) - ,("W.splitDrive \"/d\" == (\"\", \"/d\")", property $ W.splitDrive "/d" == ("", "/d")) - ,("P.splitDrive \"/test\" == (\"/\", \"test\")", property $ P.splitDrive "/test" == ("/", "test")) - ,("P.splitDrive \"//test\" == (\"//\", \"test\")", property $ P.splitDrive "//test" == ("//", "test")) - ,("P.splitDrive \"test/file\" == (\"\", \"test/file\")", property $ P.splitDrive "test/file" == ("", "test/file")) - ,("P.splitDrive \"file\" == (\"\", \"file\")", property $ P.splitDrive "file" == ("", "file")) - ,("uncurry P.joinDrive (P.splitDrive x) == x", property $ \(QFilePathValidP x) -> uncurry P.joinDrive (P.splitDrive x) == x) - ,("uncurry W.joinDrive (W.splitDrive x) == x", property $ \(QFilePathValidW x) -> uncurry W.joinDrive (W.splitDrive x) == x) - ,("W.joinDrive \"C:\" \"foo\" == \"C:foo\"", property $ W.joinDrive "C:" "foo" == "C:foo") - ,("W.joinDrive \"C:\\\\\" \"bar\" == \"C:\\\\bar\"", property $ W.joinDrive "C:\\" "bar" == "C:\\bar") - ,("W.joinDrive \"\\\\\\\\share\" \"foo\" == \"\\\\\\\\share\\\\foo\"", property $ W.joinDrive "\\\\share" "foo" == "\\\\share\\foo") - ,("W.joinDrive \"/:\" \"foo\" == \"/:\\\\foo\"", property $ W.joinDrive "/:" "foo" == "/:\\foo") - ,("P.takeDrive x == fst (P.splitDrive x)", property $ \(QFilePath x) -> P.takeDrive x == fst (P.splitDrive x)) - ,("W.takeDrive x == fst (W.splitDrive x)", property $ \(QFilePath x) -> W.takeDrive x == fst (W.splitDrive x)) - ,("P.dropDrive x == snd (P.splitDrive x)", property $ \(QFilePath x) -> P.dropDrive x == snd (P.splitDrive x)) - ,("W.dropDrive x == snd (W.splitDrive x)", property $ \(QFilePath x) -> W.dropDrive x == snd (W.splitDrive x)) - ,("not (P.hasDrive x) == null (P.takeDrive x)", property $ \(QFilePath x) -> not (P.hasDrive x) == null (P.takeDrive x)) - ,("not (W.hasDrive x) == null (W.takeDrive x)", property $ \(QFilePath x) -> not (W.hasDrive x) == null (W.takeDrive x)) - ,("P.hasDrive \"/foo\" == True", property $ P.hasDrive "/foo" == True) - ,("W.hasDrive \"C:\\\\foo\" == True", property $ W.hasDrive "C:\\foo" == True) - ,("W.hasDrive \"C:foo\" == True", property $ W.hasDrive "C:foo" == True) - ,("P.hasDrive \"foo\" == False", property $ P.hasDrive "foo" == False) - ,("W.hasDrive \"foo\" == False", property $ W.hasDrive "foo" == False) - ,("P.hasDrive \"\" == False", property $ P.hasDrive "" == False) - ,("W.hasDrive \"\" == False", property $ W.hasDrive "" == False) - ,("P.isDrive \"/\" == True", property $ P.isDrive "/" == True) - ,("P.isDrive \"/foo\" == False", property $ P.isDrive "/foo" == False) - ,("W.isDrive \"C:\\\\\" == True", property $ W.isDrive "C:\\" == True) - ,("W.isDrive \"C:\\\\foo\" == False", property $ W.isDrive "C:\\foo" == False) - ,("P.isDrive \"\" == False", property $ P.isDrive "" == False) - ,("W.isDrive \"\" == False", property $ W.isDrive "" == False) - ,("P.splitFileName \"/directory/file.ext\" == (\"/directory/\", \"file.ext\")", property $ P.splitFileName "/directory/file.ext" == ("/directory/", "file.ext")) - ,("W.splitFileName \"/directory/file.ext\" == (\"/directory/\", \"file.ext\")", property $ W.splitFileName "/directory/file.ext" == ("/directory/", "file.ext")) - ,("uncurry (P.) (P.splitFileName x) == x || fst (P.splitFileName x) == \"./\"", property $ \(QFilePathValidP x) -> uncurry (P.) (P.splitFileName x) == x || fst (P.splitFileName x) == "./") - ,("uncurry (W.) (W.splitFileName x) == x || fst (W.splitFileName x) == \"./\"", property $ \(QFilePathValidW x) -> uncurry (W.) (W.splitFileName x) == x || fst (W.splitFileName x) == "./") - ,("P.isValid (fst (P.splitFileName x))", property $ \(QFilePathValidP x) -> P.isValid (fst (P.splitFileName x))) - ,("W.isValid (fst (W.splitFileName x))", property $ \(QFilePathValidW x) -> W.isValid (fst (W.splitFileName x))) - ,("P.splitFileName \"file/bob.txt\" == (\"file/\", \"bob.txt\")", property $ P.splitFileName "file/bob.txt" == ("file/", "bob.txt")) - ,("W.splitFileName \"file/bob.txt\" == (\"file/\", \"bob.txt\")", property $ W.splitFileName "file/bob.txt" == ("file/", "bob.txt")) - ,("P.splitFileName \"file/\" == (\"file/\", \"\")", property $ P.splitFileName "file/" == ("file/", "")) - ,("W.splitFileName \"file/\" == (\"file/\", \"\")", property $ W.splitFileName "file/" == ("file/", "")) - ,("P.splitFileName \"bob\" == (\"./\", \"bob\")", property $ P.splitFileName "bob" == ("./", "bob")) - ,("W.splitFileName \"bob\" == (\"./\", \"bob\")", property $ W.splitFileName "bob" == ("./", "bob")) - ,("P.splitFileName \"/\" == (\"/\", \"\")", property $ P.splitFileName "/" == ("/", "")) - ,("W.splitFileName \"c:\" == (\"c:\", \"\")", property $ W.splitFileName "c:" == ("c:", "")) - ,("P.replaceFileName \"/directory/other.txt\" \"file.ext\" == \"/directory/file.ext\"", property $ P.replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext") - ,("W.replaceFileName \"/directory/other.txt\" \"file.ext\" == \"/directory/file.ext\"", property $ W.replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext") - ,("P.replaceFileName x (P.takeFileName x) == x", property $ \(QFilePathValidP x) -> P.replaceFileName x (P.takeFileName x) == x) - ,("W.replaceFileName x (W.takeFileName x) == x", property $ \(QFilePathValidW x) -> W.replaceFileName x (W.takeFileName x) == x) - ,("P.dropFileName \"/directory/file.ext\" == \"/directory/\"", property $ P.dropFileName "/directory/file.ext" == "/directory/") - ,("W.dropFileName \"/directory/file.ext\" == \"/directory/\"", property $ W.dropFileName "/directory/file.ext" == "/directory/") - ,("P.dropFileName x == fst (P.splitFileName x)", property $ \(QFilePath x) -> P.dropFileName x == fst (P.splitFileName x)) - ,("W.dropFileName x == fst (W.splitFileName x)", property $ \(QFilePath x) -> W.dropFileName x == fst (W.splitFileName x)) - ,("P.takeFileName \"/directory/file.ext\" == \"file.ext\"", property $ P.takeFileName "/directory/file.ext" == "file.ext") - ,("W.takeFileName \"/directory/file.ext\" == \"file.ext\"", property $ W.takeFileName "/directory/file.ext" == "file.ext") - ,("P.takeFileName \"test/\" == \"\"", property $ P.takeFileName "test/" == "") - ,("W.takeFileName \"test/\" == \"\"", property $ W.takeFileName "test/" == "") - ,("P.takeFileName x `isSuffixOf` x", property $ \(QFilePath x) -> P.takeFileName x `isSuffixOf` x) - ,("W.takeFileName x `isSuffixOf` x", property $ \(QFilePath x) -> W.takeFileName x `isSuffixOf` x) - ,("P.takeFileName x == snd (P.splitFileName x)", property $ \(QFilePath x) -> P.takeFileName x == snd (P.splitFileName x)) - ,("W.takeFileName x == snd (W.splitFileName x)", property $ \(QFilePath x) -> W.takeFileName x == snd (W.splitFileName x)) - ,("P.takeFileName (P.replaceFileName x \"fred\") == \"fred\"", property $ \(QFilePathValidP x) -> P.takeFileName (P.replaceFileName x "fred") == "fred") - ,("W.takeFileName (W.replaceFileName x \"fred\") == \"fred\"", property $ \(QFilePathValidW x) -> W.takeFileName (W.replaceFileName x "fred") == "fred") - ,("P.takeFileName (x P. \"fred\") == \"fred\"", property $ \(QFilePathValidP x) -> P.takeFileName (x P. "fred") == "fred") - ,("W.takeFileName (x W. \"fred\") == \"fred\"", property $ \(QFilePathValidW x) -> W.takeFileName (x W. "fred") == "fred") - ,("P.isRelative (P.takeFileName x)", property $ \(QFilePathValidP x) -> P.isRelative (P.takeFileName x)) - ,("W.isRelative (W.takeFileName x)", property $ \(QFilePathValidW x) -> W.isRelative (W.takeFileName x)) - ,("P.takeBaseName \"/directory/file.ext\" == \"file\"", property $ P.takeBaseName "/directory/file.ext" == "file") - ,("W.takeBaseName \"/directory/file.ext\" == \"file\"", property $ W.takeBaseName "/directory/file.ext" == "file") - ,("P.takeBaseName \"file/test.txt\" == \"test\"", property $ P.takeBaseName "file/test.txt" == "test") - ,("W.takeBaseName \"file/test.txt\" == \"test\"", property $ W.takeBaseName "file/test.txt" == "test") - ,("P.takeBaseName \"dave.ext\" == \"dave\"", property $ P.takeBaseName "dave.ext" == "dave") - ,("W.takeBaseName \"dave.ext\" == \"dave\"", property $ W.takeBaseName "dave.ext" == "dave") - ,("P.takeBaseName \"\" == \"\"", property $ P.takeBaseName "" == "") - ,("W.takeBaseName \"\" == \"\"", property $ W.takeBaseName "" == "") - ,("P.takeBaseName \"test\" == \"test\"", property $ P.takeBaseName "test" == "test") - ,("W.takeBaseName \"test\" == \"test\"", property $ W.takeBaseName "test" == "test") - ,("P.takeBaseName (P.addTrailingPathSeparator x) == \"\"", property $ \(QFilePath x) -> P.takeBaseName (P.addTrailingPathSeparator x) == "") - ,("W.takeBaseName (W.addTrailingPathSeparator x) == \"\"", property $ \(QFilePath x) -> W.takeBaseName (W.addTrailingPathSeparator x) == "") - ,("P.takeBaseName \"file/file.tar.gz\" == \"file.tar\"", property $ P.takeBaseName "file/file.tar.gz" == "file.tar") - ,("W.takeBaseName \"file/file.tar.gz\" == \"file.tar\"", property $ W.takeBaseName "file/file.tar.gz" == "file.tar") - ,("P.replaceBaseName \"/directory/other.ext\" \"file\" == \"/directory/file.ext\"", property $ P.replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext") - ,("W.replaceBaseName \"/directory/other.ext\" \"file\" == \"/directory/file.ext\"", property $ W.replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext") - ,("P.replaceBaseName \"file/test.txt\" \"bob\" == \"file/bob.txt\"", property $ P.replaceBaseName "file/test.txt" "bob" == "file/bob.txt") - ,("W.replaceBaseName \"file/test.txt\" \"bob\" == \"file/bob.txt\"", property $ W.replaceBaseName "file/test.txt" "bob" == "file/bob.txt") - ,("P.replaceBaseName \"fred\" \"bill\" == \"bill\"", property $ P.replaceBaseName "fred" "bill" == "bill") - ,("W.replaceBaseName \"fred\" \"bill\" == \"bill\"", property $ W.replaceBaseName "fred" "bill" == "bill") - ,("P.replaceBaseName \"/dave/fred/bob.gz.tar\" \"new\" == \"/dave/fred/new.tar\"", property $ P.replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar") - ,("W.replaceBaseName \"/dave/fred/bob.gz.tar\" \"new\" == \"/dave/fred/new.tar\"", property $ W.replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar") - ,("P.replaceBaseName x (P.takeBaseName x) == x", property $ \(QFilePathValidP x) -> P.replaceBaseName x (P.takeBaseName x) == x) - ,("W.replaceBaseName x (W.takeBaseName x) == x", property $ \(QFilePathValidW x) -> W.replaceBaseName x (W.takeBaseName x) == x) - ,("P.hasTrailingPathSeparator \"test\" == False", property $ P.hasTrailingPathSeparator "test" == False) - ,("W.hasTrailingPathSeparator \"test\" == False", property $ W.hasTrailingPathSeparator "test" == False) - ,("P.hasTrailingPathSeparator \"test/\" == True", property $ P.hasTrailingPathSeparator "test/" == True) - ,("W.hasTrailingPathSeparator \"test/\" == True", property $ W.hasTrailingPathSeparator "test/" == True) - ,("P.hasTrailingPathSeparator (P.addTrailingPathSeparator x)", property $ \(QFilePath x) -> P.hasTrailingPathSeparator (P.addTrailingPathSeparator x)) - ,("W.hasTrailingPathSeparator (W.addTrailingPathSeparator x)", property $ \(QFilePath x) -> W.hasTrailingPathSeparator (W.addTrailingPathSeparator x)) - ,("P.hasTrailingPathSeparator x ==> P.addTrailingPathSeparator x == x", property $ \(QFilePath x) -> P.hasTrailingPathSeparator x ==> P.addTrailingPathSeparator x == x) - ,("W.hasTrailingPathSeparator x ==> W.addTrailingPathSeparator x == x", property $ \(QFilePath x) -> W.hasTrailingPathSeparator x ==> W.addTrailingPathSeparator x == x) - ,("P.addTrailingPathSeparator \"test/rest\" == \"test/rest/\"", property $ P.addTrailingPathSeparator "test/rest" == "test/rest/") - ,("P.dropTrailingPathSeparator \"file/test/\" == \"file/test\"", property $ P.dropTrailingPathSeparator "file/test/" == "file/test") - ,("W.dropTrailingPathSeparator \"file/test/\" == \"file/test\"", property $ W.dropTrailingPathSeparator "file/test/" == "file/test") - ,("P.dropTrailingPathSeparator \"/\" == \"/\"", property $ P.dropTrailingPathSeparator "/" == "/") - ,("W.dropTrailingPathSeparator \"/\" == \"/\"", property $ W.dropTrailingPathSeparator "/" == "/") - ,("W.dropTrailingPathSeparator \"\\\\\" == \"\\\\\"", property $ W.dropTrailingPathSeparator "\\" == "\\") - ,("not (P.hasTrailingPathSeparator (P.dropTrailingPathSeparator x)) || P.isDrive x", property $ \(QFilePath x) -> not (P.hasTrailingPathSeparator (P.dropTrailingPathSeparator x)) || P.isDrive x) - ,("P.takeDirectory \"/directory/other.ext\" == \"/directory\"", property $ P.takeDirectory "/directory/other.ext" == "/directory") - ,("W.takeDirectory \"/directory/other.ext\" == \"/directory\"", property $ W.takeDirectory "/directory/other.ext" == "/directory") - ,("P.takeDirectory x `isPrefixOf` x || P.takeDirectory x == \".\"", property $ \(QFilePath x) -> P.takeDirectory x `isPrefixOf` x || P.takeDirectory x == ".") - ,("W.takeDirectory x `isPrefixOf` x || W.takeDirectory x == \".\"", property $ \(QFilePath x) -> W.takeDirectory x `isPrefixOf` x || W.takeDirectory x == ".") - ,("P.takeDirectory \"foo\" == \".\"", property $ P.takeDirectory "foo" == ".") - ,("W.takeDirectory \"foo\" == \".\"", property $ W.takeDirectory "foo" == ".") - ,("P.takeDirectory \"/\" == \"/\"", property $ P.takeDirectory "/" == "/") - ,("W.takeDirectory \"/\" == \"/\"", property $ W.takeDirectory "/" == "/") - ,("P.takeDirectory \"/foo\" == \"/\"", property $ P.takeDirectory "/foo" == "/") - ,("W.takeDirectory \"/foo\" == \"/\"", property $ W.takeDirectory "/foo" == "/") - ,("P.takeDirectory \"/foo/bar/baz\" == \"/foo/bar\"", property $ P.takeDirectory "/foo/bar/baz" == "/foo/bar") - ,("W.takeDirectory \"/foo/bar/baz\" == \"/foo/bar\"", property $ W.takeDirectory "/foo/bar/baz" == "/foo/bar") - ,("P.takeDirectory \"/foo/bar/baz/\" == \"/foo/bar/baz\"", property $ P.takeDirectory "/foo/bar/baz/" == "/foo/bar/baz") - ,("W.takeDirectory \"/foo/bar/baz/\" == \"/foo/bar/baz\"", property $ W.takeDirectory "/foo/bar/baz/" == "/foo/bar/baz") - ,("P.takeDirectory \"foo/bar/baz\" == \"foo/bar\"", property $ P.takeDirectory "foo/bar/baz" == "foo/bar") - ,("W.takeDirectory \"foo/bar/baz\" == \"foo/bar\"", property $ W.takeDirectory "foo/bar/baz" == "foo/bar") - ,("W.takeDirectory \"foo\\\\bar\" == \"foo\"", property $ W.takeDirectory "foo\\bar" == "foo") - ,("W.takeDirectory \"foo\\\\bar\\\\\\\\\" == \"foo\\\\bar\"", property $ W.takeDirectory "foo\\bar\\\\" == "foo\\bar") - ,("W.takeDirectory \"C:\\\\\" == \"C:\\\\\"", property $ W.takeDirectory "C:\\" == "C:\\") - ,("P.replaceDirectory \"root/file.ext\" \"/directory/\" == \"/directory/file.ext\"", property $ P.replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext") - ,("W.replaceDirectory \"root/file.ext\" \"/directory/\" == \"/directory/file.ext\"", property $ W.replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext") - ,("P.replaceDirectory x (P.takeDirectory x) `P.equalFilePath` x", property $ \(QFilePathValidP x) -> P.replaceDirectory x (P.takeDirectory x) `P.equalFilePath` x) - ,("W.replaceDirectory x (W.takeDirectory x) `W.equalFilePath` x", property $ \(QFilePathValidW x) -> W.replaceDirectory x (W.takeDirectory x) `W.equalFilePath` x) - ,("\"/directory\" P. \"file.ext\" == \"/directory/file.ext\"", property $ "/directory" P. "file.ext" == "/directory/file.ext") - ,("\"/directory\" W. \"file.ext\" == \"/directory\\\\file.ext\"", property $ "/directory" W. "file.ext" == "/directory\\file.ext") - ,("\"directory\" P. \"/file.ext\" == \"/file.ext\"", property $ "directory" P. "/file.ext" == "/file.ext") - ,("\"directory\" W. \"/file.ext\" == \"/file.ext\"", property $ "directory" W. "/file.ext" == "/file.ext") - ,("(P.takeDirectory x P. P.takeFileName x) `P.equalFilePath` x", property $ \(QFilePathValidP x) -> (P.takeDirectory x P. P.takeFileName x) `P.equalFilePath` x) - ,("(W.takeDirectory x W. W.takeFileName x) `W.equalFilePath` x", property $ \(QFilePathValidW x) -> (W.takeDirectory x W. W.takeFileName x) `W.equalFilePath` x) - ,("\"/\" P. \"test\" == \"/test\"", property $ "/" P. "test" == "/test") - ,("\"home\" P. \"bob\" == \"home/bob\"", property $ "home" P. "bob" == "home/bob") - ,("\"x:\" P. \"foo\" == \"x:/foo\"", property $ "x:" P. "foo" == "x:/foo") - ,("\"C:\\\\foo\" W. \"bar\" == \"C:\\\\foo\\\\bar\"", property $ "C:\\foo" W. "bar" == "C:\\foo\\bar") - ,("\"home\" W. \"bob\" == \"home\\\\bob\"", property $ "home" W. "bob" == "home\\bob") - ,("\"home\" P. \"/bob\" == \"/bob\"", property $ "home" P. "/bob" == "/bob") - ,("\"home\" W. \"C:\\\\bob\" == \"C:\\\\bob\"", property $ "home" W. "C:\\bob" == "C:\\bob") - ,("\"home\" W. \"/bob\" == \"/bob\"", property $ "home" W. "/bob" == "/bob") - ,("\"home\" W. \"\\\\bob\" == \"\\\\bob\"", property $ "home" W. "\\bob" == "\\bob") - ,("\"C:\\\\home\" W. \"\\\\bob\" == \"\\\\bob\"", property $ "C:\\home" W. "\\bob" == "\\bob") - ,("\"D:\\\\foo\" W. \"C:bar\" == \"C:bar\"", property $ "D:\\foo" W. "C:bar" == "C:bar") - ,("\"C:\\\\foo\" W. \"C:bar\" == \"C:bar\"", property $ "C:\\foo" W. "C:bar" == "C:bar") - ,("P.splitPath \"/directory/file.ext\" == [\"/\", \"directory/\", \"file.ext\"]", property $ P.splitPath "/directory/file.ext" == ["/", "directory/", "file.ext"]) - ,("W.splitPath \"/directory/file.ext\" == [\"/\", \"directory/\", \"file.ext\"]", property $ W.splitPath "/directory/file.ext" == ["/", "directory/", "file.ext"]) - ,("concat (P.splitPath x) == x", property $ \(QFilePath x) -> concat (P.splitPath x) == x) - ,("concat (W.splitPath x) == x", property $ \(QFilePath x) -> concat (W.splitPath x) == x) - ,("P.splitPath \"test//item/\" == [\"test//\", \"item/\"]", property $ P.splitPath "test//item/" == ["test//", "item/"]) - ,("W.splitPath \"test//item/\" == [\"test//\", \"item/\"]", property $ W.splitPath "test//item/" == ["test//", "item/"]) - ,("P.splitPath \"test/item/file\" == [\"test/\", \"item/\", \"file\"]", property $ P.splitPath "test/item/file" == ["test/", "item/", "file"]) - ,("W.splitPath \"test/item/file\" == [\"test/\", \"item/\", \"file\"]", property $ W.splitPath "test/item/file" == ["test/", "item/", "file"]) - ,("P.splitPath \"\" == []", property $ P.splitPath "" == []) - ,("W.splitPath \"\" == []", property $ W.splitPath "" == []) - ,("W.splitPath \"c:\\\\test\\\\path\" == [\"c:\\\\\", \"test\\\\\", \"path\"]", property $ W.splitPath "c:\\test\\path" == ["c:\\", "test\\", "path"]) - ,("P.splitPath \"/file/test\" == [\"/\", \"file/\", \"test\"]", property $ P.splitPath "/file/test" == ["/", "file/", "test"]) - ,("P.splitDirectories \"/directory/file.ext\" == [\"/\", \"directory\", \"file.ext\"]", property $ P.splitDirectories "/directory/file.ext" == ["/", "directory", "file.ext"]) - ,("W.splitDirectories \"/directory/file.ext\" == [\"/\", \"directory\", \"file.ext\"]", property $ W.splitDirectories "/directory/file.ext" == ["/", "directory", "file.ext"]) - ,("P.splitDirectories \"test/file\" == [\"test\", \"file\"]", property $ P.splitDirectories "test/file" == ["test", "file"]) - ,("W.splitDirectories \"test/file\" == [\"test\", \"file\"]", property $ W.splitDirectories "test/file" == ["test", "file"]) - ,("P.splitDirectories \"/test/file\" == [\"/\", \"test\", \"file\"]", property $ P.splitDirectories "/test/file" == ["/", "test", "file"]) - ,("W.splitDirectories \"/test/file\" == [\"/\", \"test\", \"file\"]", property $ W.splitDirectories "/test/file" == ["/", "test", "file"]) - ,("W.splitDirectories \"C:\\\\test\\\\file\" == [\"C:\\\\\", \"test\", \"file\"]", property $ W.splitDirectories "C:\\test\\file" == ["C:\\", "test", "file"]) - ,("P.joinPath (P.splitDirectories x) `P.equalFilePath` x", property $ \(QFilePathValidP x) -> P.joinPath (P.splitDirectories x) `P.equalFilePath` x) - ,("W.joinPath (W.splitDirectories x) `W.equalFilePath` x", property $ \(QFilePathValidW x) -> W.joinPath (W.splitDirectories x) `W.equalFilePath` x) - ,("P.splitDirectories \"\" == []", property $ P.splitDirectories "" == []) - ,("W.splitDirectories \"\" == []", property $ W.splitDirectories "" == []) - ,("W.splitDirectories \"C:\\\\test\\\\\\\\\\\\file\" == [\"C:\\\\\", \"test\", \"file\"]", property $ W.splitDirectories "C:\\test\\\\\\file" == ["C:\\", "test", "file"]) - ,("P.splitDirectories \"/test///file\" == [\"/\", \"test\", \"file\"]", property $ P.splitDirectories "/test///file" == ["/", "test", "file"]) - ,("W.splitDirectories \"/test///file\" == [\"/\", \"test\", \"file\"]", property $ W.splitDirectories "/test///file" == ["/", "test", "file"]) - ,("P.joinPath [\"/\", \"directory/\", \"file.ext\"] == \"/directory/file.ext\"", property $ P.joinPath ["/", "directory/", "file.ext"] == "/directory/file.ext") - ,("W.joinPath [\"/\", \"directory/\", \"file.ext\"] == \"/directory/file.ext\"", property $ W.joinPath ["/", "directory/", "file.ext"] == "/directory/file.ext") - ,("P.joinPath (P.splitPath x) == x", property $ \(QFilePathValidP x) -> P.joinPath (P.splitPath x) == x) - ,("W.joinPath (W.splitPath x) == x", property $ \(QFilePathValidW x) -> W.joinPath (W.splitPath x) == x) - ,("P.joinPath [] == \"\"", property $ P.joinPath [] == "") - ,("W.joinPath [] == \"\"", property $ W.joinPath [] == "") - ,("P.joinPath [\"test\", \"file\", \"path\"] == \"test/file/path\"", property $ P.joinPath ["test", "file", "path"] == "test/file/path") - ,("x == y ==> P.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> x == y ==> P.equalFilePath x y) - ,("x == y ==> W.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> x == y ==> W.equalFilePath x y) - ,("P.normalise x == P.normalise y ==> P.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> P.normalise x == P.normalise y ==> P.equalFilePath x y) - ,("W.normalise x == W.normalise y ==> W.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> W.normalise x == W.normalise y ==> W.equalFilePath x y) - ,("P.equalFilePath \"foo\" \"foo/\"", property $ P.equalFilePath "foo" "foo/") - ,("W.equalFilePath \"foo\" \"foo/\"", property $ W.equalFilePath "foo" "foo/") - ,("not (P.equalFilePath \"foo\" \"/foo\")", property $ not (P.equalFilePath "foo" "/foo")) - ,("not (W.equalFilePath \"foo\" \"/foo\")", property $ not (W.equalFilePath "foo" "/foo")) - ,("not (P.equalFilePath \"foo\" \"FOO\")", property $ not (P.equalFilePath "foo" "FOO")) - ,("W.equalFilePath \"foo\" \"FOO\"", property $ W.equalFilePath "foo" "FOO") - ,("not (W.equalFilePath \"C:\" \"C:/\")", property $ not (W.equalFilePath "C:" "C:/")) - ,("P.makeRelative \"/directory\" \"/directory/file.ext\" == \"file.ext\"", property $ P.makeRelative "/directory" "/directory/file.ext" == "file.ext") - ,("W.makeRelative \"/directory\" \"/directory/file.ext\" == \"file.ext\"", property $ W.makeRelative "/directory" "/directory/file.ext" == "file.ext") - ,("P.makeRelative (P.takeDirectory x) x `P.equalFilePath` P.takeFileName x", property $ \(QFilePathValidP x) -> P.makeRelative (P.takeDirectory x) x `P.equalFilePath` P.takeFileName x) - ,("W.makeRelative (W.takeDirectory x) x `W.equalFilePath` W.takeFileName x", property $ \(QFilePathValidW x) -> W.makeRelative (W.takeDirectory x) x `W.equalFilePath` W.takeFileName x) - ,("P.makeRelative x x == \".\"", property $ \(QFilePath x) -> P.makeRelative x x == ".") - ,("W.makeRelative x x == \".\"", property $ \(QFilePath x) -> W.makeRelative x x == ".") - ,("P.equalFilePath x y || (P.isRelative x && P.makeRelative y x == x) || P.equalFilePath (y P. P.makeRelative y x) x", property $ \(QFilePathValidP x) (QFilePathValidP y) -> P.equalFilePath x y || (P.isRelative x && P.makeRelative y x == x) || P.equalFilePath (y P. P.makeRelative y x) x) - ,("W.equalFilePath x y || (W.isRelative x && W.makeRelative y x == x) || W.equalFilePath (y W. W.makeRelative y x) x", property $ \(QFilePathValidW x) (QFilePathValidW y) -> W.equalFilePath x y || (W.isRelative x && W.makeRelative y x == x) || W.equalFilePath (y W. W.makeRelative y x) x) - ,("W.makeRelative \"C:\\\\Home\" \"c:\\\\home\\\\bob\" == \"bob\"", property $ W.makeRelative "C:\\Home" "c:\\home\\bob" == "bob") - ,("W.makeRelative \"C:\\\\Home\" \"c:/home/bob\" == \"bob\"", property $ W.makeRelative "C:\\Home" "c:/home/bob" == "bob") - ,("W.makeRelative \"C:\\\\Home\" \"D:\\\\Home\\\\Bob\" == \"D:\\\\Home\\\\Bob\"", property $ W.makeRelative "C:\\Home" "D:\\Home\\Bob" == "D:\\Home\\Bob") - ,("W.makeRelative \"C:\\\\Home\" \"C:Home\\\\Bob\" == \"C:Home\\\\Bob\"", property $ W.makeRelative "C:\\Home" "C:Home\\Bob" == "C:Home\\Bob") - ,("W.makeRelative \"/Home\" \"/home/bob\" == \"bob\"", property $ W.makeRelative "/Home" "/home/bob" == "bob") - ,("W.makeRelative \"/\" \"//\" == \"//\"", property $ W.makeRelative "/" "//" == "//") - ,("P.makeRelative \"/Home\" \"/home/bob\" == \"/home/bob\"", property $ P.makeRelative "/Home" "/home/bob" == "/home/bob") - ,("P.makeRelative \"/home/\" \"/home/bob/foo/bar\" == \"bob/foo/bar\"", property $ P.makeRelative "/home/" "/home/bob/foo/bar" == "bob/foo/bar") - ,("P.makeRelative \"/fred\" \"bob\" == \"bob\"", property $ P.makeRelative "/fred" "bob" == "bob") - ,("P.makeRelative \"/file/test\" \"/file/test/fred\" == \"fred\"", property $ P.makeRelative "/file/test" "/file/test/fred" == "fred") - ,("P.makeRelative \"/file/test\" \"/file/test/fred/\" == \"fred/\"", property $ P.makeRelative "/file/test" "/file/test/fred/" == "fred/") - ,("P.makeRelative \"some/path\" \"some/path/a/b/c\" == \"a/b/c\"", property $ P.makeRelative "some/path" "some/path/a/b/c" == "a/b/c") - ,("P.normalise \"/file/\\\\test////\" == \"/file/\\\\test/\"", property $ P.normalise "/file/\\test////" == "/file/\\test/") - ,("P.normalise \"/file/./test\" == \"/file/test\"", property $ P.normalise "/file/./test" == "/file/test") - ,("P.normalise \"/test/file/../bob/fred/\" == \"/test/file/../bob/fred/\"", property $ P.normalise "/test/file/../bob/fred/" == "/test/file/../bob/fred/") - ,("P.normalise \"../bob/fred/\" == \"../bob/fred/\"", property $ P.normalise "../bob/fred/" == "../bob/fred/") - ,("P.normalise \"./bob/fred/\" == \"bob/fred/\"", property $ P.normalise "./bob/fred/" == "bob/fred/") - ,("W.normalise \"c:\\\\file/bob\\\\\" == \"C:\\\\file\\\\bob\\\\\"", property $ W.normalise "c:\\file/bob\\" == "C:\\file\\bob\\") - ,("W.normalise \"c:\\\\\" == \"C:\\\\\"", property $ W.normalise "c:\\" == "C:\\") - ,("W.normalise \"C:.\\\\\" == \"C:\"", property $ W.normalise "C:.\\" == "C:") - ,("W.normalise \"\\\\\\\\server\\\\test\" == \"\\\\\\\\server\\\\test\"", property $ W.normalise "\\\\server\\test" == "\\\\server\\test") - ,("W.normalise \"//server/test\" == \"\\\\\\\\server\\\\test\"", property $ W.normalise "//server/test" == "\\\\server\\test") - ,("W.normalise \"c:/file\" == \"C:\\\\file\"", property $ W.normalise "c:/file" == "C:\\file") - ,("W.normalise \"/file\" == \"\\\\file\"", property $ W.normalise "/file" == "\\file") - ,("W.normalise \"\\\\\" == \"\\\\\"", property $ W.normalise "\\" == "\\") - ,("W.normalise \"/./\" == \"\\\\\"", property $ W.normalise "/./" == "\\") - ,("P.normalise \".\" == \".\"", property $ P.normalise "." == ".") - ,("W.normalise \".\" == \".\"", property $ W.normalise "." == ".") - ,("P.normalise \"./\" == \"./\"", property $ P.normalise "./" == "./") - ,("P.normalise \"./.\" == \"./\"", property $ P.normalise "./." == "./") - ,("P.normalise \"/./\" == \"/\"", property $ P.normalise "/./" == "/") - ,("P.normalise \"/\" == \"/\"", property $ P.normalise "/" == "/") - ,("P.normalise \"bob/fred/.\" == \"bob/fred/\"", property $ P.normalise "bob/fred/." == "bob/fred/") - ,("P.normalise \"//home\" == \"/home\"", property $ P.normalise "//home" == "/home") - ,("P.isValid \"\" == False", property $ P.isValid "" == False) - ,("W.isValid \"\" == False", property $ W.isValid "" == False) - ,("P.isValid \"\\0\" == False", property $ P.isValid "\0" == False) - ,("W.isValid \"\\0\" == False", property $ W.isValid "\0" == False) - ,("P.isValid \"/random_ path:*\" == True", property $ P.isValid "/random_ path:*" == True) - ,("P.isValid x == not (null x)", property $ \(QFilePath x) -> P.isValid x == not (null x)) - ,("W.isValid \"c:\\\\test\" == True", property $ W.isValid "c:\\test" == True) - ,("W.isValid \"c:\\\\test:of_test\" == False", property $ W.isValid "c:\\test:of_test" == False) - ,("W.isValid \"test*\" == False", property $ W.isValid "test*" == False) - ,("W.isValid \"c:\\\\test\\\\nul\" == False", property $ W.isValid "c:\\test\\nul" == False) - ,("W.isValid \"c:\\\\test\\\\prn.txt\" == False", property $ W.isValid "c:\\test\\prn.txt" == False) - ,("W.isValid \"c:\\\\nul\\\\file\" == False", property $ W.isValid "c:\\nul\\file" == False) - ,("W.isValid \"\\\\\\\\\" == False", property $ W.isValid "\\\\" == False) - ,("W.isValid \"\\\\\\\\\\\\foo\" == False", property $ W.isValid "\\\\\\foo" == False) - ,("W.isValid \"\\\\\\\\?\\\\D:file\" == False", property $ W.isValid "\\\\?\\D:file" == False) - ,("W.isValid \"foo\\tbar\" == False", property $ W.isValid "foo\tbar" == False) - ,("W.isValid \"nul .txt\" == False", property $ W.isValid "nul .txt" == False) - ,("W.isValid \" nul.txt\" == True", property $ W.isValid " nul.txt" == True) - ,("P.isValid (P.makeValid x)", property $ \(QFilePath x) -> P.isValid (P.makeValid x)) - ,("W.isValid (W.makeValid x)", property $ \(QFilePath x) -> W.isValid (W.makeValid x)) - ,("P.isValid x ==> P.makeValid x == x", property $ \(QFilePath x) -> P.isValid x ==> P.makeValid x == x) - ,("W.isValid x ==> W.makeValid x == x", property $ \(QFilePath x) -> W.isValid x ==> W.makeValid x == x) - ,("P.makeValid \"\" == \"_\"", property $ P.makeValid "" == "_") - ,("W.makeValid \"\" == \"_\"", property $ W.makeValid "" == "_") - ,("P.makeValid \"file\\0name\" == \"file_name\"", property $ P.makeValid "file\0name" == "file_name") - ,("W.makeValid \"file\\0name\" == \"file_name\"", property $ W.makeValid "file\0name" == "file_name") - ,("W.makeValid \"c:\\\\already\\\\/valid\" == \"c:\\\\already\\\\/valid\"", property $ W.makeValid "c:\\already\\/valid" == "c:\\already\\/valid") - ,("W.makeValid \"c:\\\\test:of_test\" == \"c:\\\\test_of_test\"", property $ W.makeValid "c:\\test:of_test" == "c:\\test_of_test") - ,("W.makeValid \"test*\" == \"test_\"", property $ W.makeValid "test*" == "test_") - ,("W.makeValid \"c:\\\\test\\\\nul\" == \"c:\\\\test\\\\nul_\"", property $ W.makeValid "c:\\test\\nul" == "c:\\test\\nul_") - ,("W.makeValid \"c:\\\\test\\\\prn.txt\" == \"c:\\\\test\\\\prn_.txt\"", property $ W.makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt") - ,("W.makeValid \"c:\\\\test/prn.txt\" == \"c:\\\\test/prn_.txt\"", property $ W.makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt") - ,("W.makeValid \"c:\\\\nul\\\\file\" == \"c:\\\\nul_\\\\file\"", property $ W.makeValid "c:\\nul\\file" == "c:\\nul_\\file") - ,("W.makeValid \"\\\\\\\\\\\\foo\" == \"\\\\\\\\drive\"", property $ W.makeValid "\\\\\\foo" == "\\\\drive") - ,("W.makeValid \"\\\\\\\\?\\\\D:file\" == \"\\\\\\\\?\\\\D:\\\\file\"", property $ W.makeValid "\\\\?\\D:file" == "\\\\?\\D:\\file") - ,("W.makeValid \"nul .txt\" == \"nul _.txt\"", property $ W.makeValid "nul .txt" == "nul _.txt") - ,("W.isRelative \"path\\\\test\" == True", property $ W.isRelative "path\\test" == True) - ,("W.isRelative \"c:\\\\test\" == False", property $ W.isRelative "c:\\test" == False) - ,("W.isRelative \"c:test\" == True", property $ W.isRelative "c:test" == True) - ,("W.isRelative \"c:\\\\\" == False", property $ W.isRelative "c:\\" == False) - ,("W.isRelative \"c:/\" == False", property $ W.isRelative "c:/" == False) - ,("W.isRelative \"c:\" == True", property $ W.isRelative "c:" == True) - ,("W.isRelative \"\\\\\\\\foo\" == False", property $ W.isRelative "\\\\foo" == False) - ,("W.isRelative \"\\\\\\\\?\\\\foo\" == False", property $ W.isRelative "\\\\?\\foo" == False) - ,("W.isRelative \"\\\\\\\\?\\\\UNC\\\\foo\" == False", property $ W.isRelative "\\\\?\\UNC\\foo" == False) - ,("W.isRelative \"/foo\" == True", property $ W.isRelative "/foo" == True) - ,("W.isRelative \"\\\\foo\" == True", property $ W.isRelative "\\foo" == True) - ,("P.isRelative \"test/path\" == True", property $ P.isRelative "test/path" == True) - ,("P.isRelative \"/test\" == False", property $ P.isRelative "/test" == False) - ,("P.isRelative \"/\" == False", property $ P.isRelative "/" == False) - ,("P.isAbsolute x == not (P.isRelative x)", property $ \(QFilePath x) -> P.isAbsolute x == not (P.isRelative x)) - ,("W.isAbsolute x == not (W.isRelative x)", property $ \(QFilePath x) -> W.isAbsolute x == not (W.isRelative x)) - ] diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/tests/TestUtil.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/tests/TestUtil.hs deleted file mode 100644 index b237acd99e..0000000000 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.2.1/tests/TestUtil.hs +++ /dev/null @@ -1,52 +0,0 @@ - -module TestUtil( - (==>), QFilePath(..), QFilePathValidW(..), QFilePathValidP(..), - module Test.QuickCheck, - module Data.List, - module Data.Maybe - ) where - -import Test.QuickCheck hiding ((==>)) -import Data.List -import Data.Maybe -import Control.Monad -import qualified System.FilePath.Windows as W -import qualified System.FilePath.Posix as P - -infixr 0 ==> -a ==> b = not a || b - - -newtype QFilePathValidW = QFilePathValidW FilePath deriving Show - -instance Arbitrary QFilePathValidW where - arbitrary = fmap (QFilePathValidW . W.makeValid) arbitraryFilePath - shrink (QFilePathValidW x) = shrinkValid QFilePathValidW W.makeValid x - -newtype QFilePathValidP = QFilePathValidP FilePath deriving Show - -instance Arbitrary QFilePathValidP where - arbitrary = fmap (QFilePathValidP . P.makeValid) arbitraryFilePath - shrink (QFilePathValidP x) = shrinkValid QFilePathValidP P.makeValid x - -newtype QFilePath = QFilePath FilePath deriving Show - -instance Arbitrary QFilePath where - arbitrary = fmap QFilePath arbitraryFilePath - shrink (QFilePath x) = shrinkValid QFilePath id x - - --- | Generate an arbitrary FilePath use a few special (interesting) characters. -arbitraryFilePath :: Gen FilePath -arbitraryFilePath = sized $ \n -> do - k <- choose (0,n) - replicateM k $ elements "?./:\\a ;_" - --- | Shrink, but also apply a validity function. Try and make shorter, or use more --- @a@ (since @a@ is pretty dull), but make sure you terminate even after valid. -shrinkValid :: (FilePath -> a) -> (FilePath -> FilePath) -> FilePath -> [a] -shrinkValid wrap valid o = - [ wrap y - | y <- map valid $ shrinkList (\x -> ['a' | x /= 'a']) o - , length y < length o || (length y == length o && countA y > countA o)] - where countA = length . filter (== 'a') diff --git a/test/integration/tests/mutable-deps/files/files.cabal b/test/integration/tests/mutable-deps/files/files.cabal deleted file mode 100644 index cdd7a98a9a..0000000000 --- a/test/integration/tests/mutable-deps/files/files.cabal +++ /dev/null @@ -1,17 +0,0 @@ -name: files -version: 1.0.0 -build-type: Simple -cabal-version: >= 1.10 - -library - hs-source-dirs: src - exposed-modules: Files - build-depends: base - , filemanip - default-language: Haskell2010 - -executable test-exe - hs-source-dirs: app - main-is: Main.hs - build-depends: base, files - default-language: Haskell2010 \ No newline at end of file diff --git a/test/integration/tests/mutable-deps/files/src/Files.hs b/test/integration/tests/mutable-deps/files/src/Files.hs deleted file mode 100644 index 5e3452f0b5..0000000000 --- a/test/integration/tests/mutable-deps/files/src/Files.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Files where - -import System.FilePath.Glob - -allCFiles :: IO [FilePath] -allCFiles = namesMatching "*.c" diff --git a/test/integration/tests/mutable-deps/files/stack.yaml b/test/integration/tests/mutable-deps/files/stack.yaml deleted file mode 100644 index 69cc67d4a1..0000000000 --- a/test/integration/tests/mutable-deps/files/stack.yaml +++ /dev/null @@ -1,6 +0,0 @@ -resolver: lts-14.27 -packages: -- . -extra-deps: -- ./filepath-1.4.2.1 -- directory-1.3.6.0 diff --git a/test/integration/tests/nice-resolver-names/Main.hs b/test/integration/tests/nice-resolver-names/Main.hs deleted file mode 100644 index ad28abf98c..0000000000 --- a/test/integration/tests/nice-resolver-names/Main.hs +++ /dev/null @@ -1,18 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -import StackTest -import Control.Exception (throwIO) -import Data.Maybe (mapMaybe) -import Data.Foldable (for_) -import Data.List (stripPrefix) - -main :: IO () -main = do - for_ ["lts-14.27", "nightly-2018-01-01"] $ \snapshot -> do - stack ["init", "--force", "--resolver", snapshot] - str <- readFile "stack.yaml" - case mapMaybe (stripPrefix "resolver: ") $ lines str of - [x] -> - if filter (/= '\r') x == snapshot - then pure () - else error $ "Mismatch: " ++ show (snapshot, x) - _ -> error $ "Wrong number of resolvers: " ++ show str diff --git a/test/integration/tests/nice-resolver-names/files/.gitignore b/test/integration/tests/nice-resolver-names/files/.gitignore deleted file mode 100644 index 66f418505d..0000000000 --- a/test/integration/tests/nice-resolver-names/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -stack.yaml -unimportant.cabal diff --git a/test/integration/tests/nice-resolver-names/files/package.yaml b/test/integration/tests/nice-resolver-names/files/package.yaml deleted file mode 100644 index 4b60802c43..0000000000 --- a/test/integration/tests/nice-resolver-names/files/package.yaml +++ /dev/null @@ -1,3 +0,0 @@ -name: unimportant -version: 0 -library: {} diff --git a/test/integration/tests/no-rerun-tests/Main.hs b/test/integration/tests/no-rerun-tests/Main.hs deleted file mode 100644 index 39d125ad1a..0000000000 --- a/test/integration/tests/no-rerun-tests/Main.hs +++ /dev/null @@ -1,13 +0,0 @@ -import StackTest -import System.Directory -import Control.Monad - -main :: IO () -main = do - stack ["test"] - exists1 <- doesFileExist "foo" - unless exists1 $ error "exists1 should be True" - removeFile "foo" - stack ["test", "--no-rerun-tests"] - exists2 <- doesFileExist "foo" - when exists2 $ error "exists2 should be False" diff --git a/test/integration/tests/no-rerun-tests/files/.gitignore b/test/integration/tests/no-rerun-tests/files/.gitignore deleted file mode 100644 index 04c6af385a..0000000000 --- a/test/integration/tests/no-rerun-tests/files/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -.stack-work/ -files.cabal -*~ -foo diff --git a/test/integration/tests/no-rerun-tests/files/package.yaml b/test/integration/tests/no-rerun-tests/files/package.yaml deleted file mode 100644 index f51d5ebe15..0000000000 --- a/test/integration/tests/no-rerun-tests/files/package.yaml +++ /dev/null @@ -1,12 +0,0 @@ -name: files - -dependencies: -- base >= 4.7 && < 5 - -library: - source-dirs: src - -tests: - test: - main: Spec.hs - source-dirs: test diff --git a/test/integration/tests/no-rerun-tests/files/stack.yaml b/test/integration/tests/no-rerun-tests/files/stack.yaml deleted file mode 100644 index 14f23e9aa6..0000000000 --- a/test/integration/tests/no-rerun-tests/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.6.5 diff --git a/test/integration/tests/no-rerun-tests/files/test/Spec.hs b/test/integration/tests/no-rerun-tests/files/test/Spec.hs deleted file mode 100644 index d030d26e0c..0000000000 --- a/test/integration/tests/no-rerun-tests/files/test/Spec.hs +++ /dev/null @@ -1,2 +0,0 @@ -main :: IO () -main = writeFile "foo" "" diff --git a/test/integration/tests/override-compiler/Main.hs b/test/integration/tests/override-compiler/Main.hs deleted file mode 100644 index 3e78436ed4..0000000000 --- a/test/integration/tests/override-compiler/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -import StackTest -import Control.Monad (unless) - -main :: IO () -main = stackCheckStdout ["exec", "--", "ghc", "--numeric-version"] $ \ver -> - -- get rid of the newline character - unless (concat (lines ver) == "8.6.5") $ error $ "Invalid version: " ++ show ver diff --git a/test/integration/tests/override-compiler/files/stack.yaml b/test/integration/tests/override-compiler/files/stack.yaml deleted file mode 100644 index 9af4ffcf4e..0000000000 --- a/test/integration/tests/override-compiler/files/stack.yaml +++ /dev/null @@ -1,3 +0,0 @@ -resolver: lts-13.10 -compiler: ghc-8.6.5 -packages: [] diff --git a/test/integration/tests/proper-rebuilds/Main.hs b/test/integration/tests/proper-rebuilds/Main.hs deleted file mode 100644 index 643f575e9a..0000000000 --- a/test/integration/tests/proper-rebuilds/Main.hs +++ /dev/null @@ -1,21 +0,0 @@ -import Control.Monad (unless, when) -import Data.List (isInfixOf) -import StackTest -import System.Directory - -main :: IO () -main = do - let expectRecompilation stderr = - unless ("> build" `isInfixOf` stderr) $ - error "package recompilation was expected" - expectNoRecompilation stderr = - when ("> build" `isInfixOf` stderr) $ - error "package recompilation was not expected" - copyFile "src/Lib.hs.v1" "src/Lib.hs" - stackCheckStderr ["build"] expectRecompilation - stackCheckStderr ["build" , "--profile"] expectRecompilation - stackCheckStderr ["build" , "--profile"] expectNoRecompilation - -- changing source file to trigger recompilation - copyFile "src/Lib.hs.v2" "src/Lib.hs" - stackCheckStderr ["build" , "--profile"] expectRecompilation - stackCheckStderr ["build"] expectRecompilation diff --git a/test/integration/tests/proper-rebuilds/files/.gitignore b/test/integration/tests/proper-rebuilds/files/.gitignore deleted file mode 100644 index 25dffca60e..0000000000 --- a/test/integration/tests/proper-rebuilds/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -src/Lib.hs diff --git a/test/integration/tests/proper-rebuilds/files/app/Main.hs b/test/integration/tests/proper-rebuilds/files/app/Main.hs deleted file mode 100644 index a2fa21e3ac..0000000000 --- a/test/integration/tests/proper-rebuilds/files/app/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Main where - -import Lib - -main = do - putStrLn $ "Sample strings: " ++ show someStrings diff --git a/test/integration/tests/proper-rebuilds/files/files.cabal b/test/integration/tests/proper-rebuilds/files/files.cabal deleted file mode 100644 index b04858a5fd..0000000000 --- a/test/integration/tests/proper-rebuilds/files/files.cabal +++ /dev/null @@ -1,17 +0,0 @@ -name: files -version: 1.0.0 -build-type: Simple -cabal-version: >= 1.10 - -library - hs-source-dirs: src - exposed-modules: Lib - build-depends: base - default-language: Haskell2010 - -executable test-exe - hs-source-dirs: app - main-is: Main.hs - ghc-options: -rtsopts - build-depends: base, files - default-language: Haskell2010 \ No newline at end of file diff --git a/test/integration/tests/proper-rebuilds/files/src/Lib.hs.v1 b/test/integration/tests/proper-rebuilds/files/src/Lib.hs.v1 deleted file mode 100644 index fc0ad60719..0000000000 --- a/test/integration/tests/proper-rebuilds/files/src/Lib.hs.v1 +++ /dev/null @@ -1,4 +0,0 @@ -module Lib where - -someStrings :: [String] -someStrings = ["Hello", "world!"] diff --git a/test/integration/tests/proper-rebuilds/files/src/Lib.hs.v2 b/test/integration/tests/proper-rebuilds/files/src/Lib.hs.v2 deleted file mode 100644 index 59c5f8c548..0000000000 --- a/test/integration/tests/proper-rebuilds/files/src/Lib.hs.v2 +++ /dev/null @@ -1,4 +0,0 @@ -module Lib where - -someStrings :: [String] -someStrings = ["Hello", "other", "world!"] diff --git a/test/integration/tests/proper-rebuilds/files/stack.yaml b/test/integration/tests/proper-rebuilds/files/stack.yaml deleted file mode 100644 index 14f23e9aa6..0000000000 --- a/test/integration/tests/proper-rebuilds/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.6.5 diff --git a/test/integration/tests/relative-script-snapshots/Main.hs b/test/integration/tests/relative-script-snapshots/Main.hs deleted file mode 100644 index 0a4044c6af..0000000000 --- a/test/integration/tests/relative-script-snapshots/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stack ["subdir/script.hs"] diff --git a/test/integration/tests/relative-script-snapshots/files/subdir/script.hs b/test/integration/tests/relative-script-snapshots/files/subdir/script.hs deleted file mode 100644 index 2858fcedcf..0000000000 --- a/test/integration/tests/relative-script-snapshots/files/subdir/script.hs +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env stack --- stack --resolver snapshot.yaml script -import Acme.Missiles - -main :: IO () -main = launchMissiles diff --git a/test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml b/test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml deleted file mode 100644 index a67c7f6af0..0000000000 --- a/test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml +++ /dev/null @@ -1,5 +0,0 @@ -resolver: ghc-8.6.5 -name: snapshot -packages: -- acme-missiles-0.3@rev:0 -- stm-2.5.0.0@rev:0 diff --git a/test/integration/tests/sanity/Main.hs b/test/integration/tests/sanity/Main.hs deleted file mode 100644 index 4315b3d938..0000000000 --- a/test/integration/tests/sanity/Main.hs +++ /dev/null @@ -1,25 +0,0 @@ -import StackTest -import Control.Monad (unless) -import System.Directory (doesFileExist) - -main :: IO () -main = do - stack ["--version"] - stack ["--help"] - removeDirIgnore "acme-missiles-0.2" - removeDirIgnore "acme-missiles-0.3" - stack ["unpack", "acme-missiles-0.2"] - stack ["unpack", "acme-missiles"] - stackErr ["command-does-not-exist"] - stackErr ["unpack", "invalid-package-name-"] - - -- When running outside of IntegrationSpec.hs, this will use the - -- stack.yaml from Stack itself - exists <- doesFileExist "../../../../../stack.yaml" - unless exists $ stackErr ["build"] - - doesNotExist "stack.yaml" - - if isWindows - then stack [defaultResolverArg, "exec", "./foo.bat"] - else stack [defaultResolverArg, "exec", "./foo.sh"] diff --git a/test/integration/tests/script-extra-dep/Main.hs b/test/integration/tests/script-extra-dep/Main.hs deleted file mode 100644 index c2e9c88b99..0000000000 --- a/test/integration/tests/script-extra-dep/Main.hs +++ /dev/null @@ -1,4 +0,0 @@ -import StackTest - -main :: IO () -main = stack ["script.hs"] diff --git a/test/integration/tests/script-extra-dep/files/script.hs b/test/integration/tests/script-extra-dep/files/script.hs deleted file mode 100644 index d0ec73965b..0000000000 --- a/test/integration/tests/script-extra-dep/files/script.hs +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env stack --- stack --resolver ghc-8.6.5 script --extra-dep acme-missiles-0.3@rev:0 --extra-dep stm-2.5.0.0@rev:0 -import Acme.Missiles - -main :: IO () -main = launchMissiles diff --git a/test/integration/tests/skip-unreachable-dirs/Main.hs b/test/integration/tests/skip-unreachable-dirs/Main.hs deleted file mode 100644 index 686071bc2d..0000000000 --- a/test/integration/tests/skip-unreachable-dirs/Main.hs +++ /dev/null @@ -1,11 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} -import StackTest -import System.Directory -import Control.Exception (catch, IOException) - -main :: IO () -main = do - removeFileIgnore "stack.yaml" - createDirectory "unreachabledir" `catch` \(e :: IOException) -> pure () - setPermissions "unreachabledir" emptyPermissions - stack ["init"] diff --git a/test/integration/tests/skip-unreachable-dirs/files/.gitignore b/test/integration/tests/skip-unreachable-dirs/files/.gitignore deleted file mode 100644 index 684dbffa96..0000000000 --- a/test/integration/tests/skip-unreachable-dirs/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -stack.yaml diff --git a/test/integration/tests/skip-unreachable-dirs/files/foo.cabal b/test/integration/tests/skip-unreachable-dirs/files/foo.cabal deleted file mode 100644 index 625239b18d..0000000000 --- a/test/integration/tests/skip-unreachable-dirs/files/foo.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: foo -version: 0.0.0 -synopsis: foo -build-type: Simple -cabal-version: >=1.10 - -library - exposed-modules: Foo - build-depends: base - default-language: Haskell2010 diff --git a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/Main.hs b/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/Main.hs deleted file mode 100644 index d4efcc403c..0000000000 --- a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/Main.hs +++ /dev/null @@ -1,6 +0,0 @@ -import StackTest - -main :: IO () -main = do - stackErr ["build", "--stack-yaml", "as-extra-dep.yaml", "--dry-run"] - stack ["build", "--stack-yaml", "as-snapshot.yaml", "--dry-run"] diff --git a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/.gitignore b/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/.gitignore deleted file mode 100644 index 6a9f1098c7..0000000000 --- a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/unimportant.cabal diff --git a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/README.md b/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/README.md deleted file mode 100644 index 5952e88967..0000000000 --- a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/README.md +++ /dev/null @@ -1,18 +0,0 @@ -The test case here is weird enough to warrant an explanation. What we -_really_ want to test is whether building the lts-3.12 snapshot's -semigroupoids package with rev-1 works. See -https://github.com/fpco/stackage/issues/3185. However, that test -requires that we use an older GHC, and as Manny commented: - -> Having integration tests with old resolvers will cause them to fail -> on Linux distributions with GCC with PIE enabled by default (which -> is the latest versions of most distributions now), since older GHC -> versions don't support it. I'm not sure what we should do about -> this, since it obviously does make sense to be able to test against -> old snapshots sometimes. - -So I'm instead testing a totally different case here which repros the -same issue. If we use a custom snapshot with incompatible `stm` and -`async` versions, we want Stack to trust the build plan and allow a -`--dry-run` to succeed. But if we do this via `extra-deps`, we want it -to fail. diff --git a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/as-extra-dep.yaml b/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/as-extra-dep.yaml deleted file mode 100644 index 0fc3d73038..0000000000 --- a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/as-extra-dep.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: ghc-8.6.5 -extra-deps: -- async-2.1.1.1 -- stm-2.1.2.2 diff --git a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/as-snapshot.yaml b/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/as-snapshot.yaml deleted file mode 100644 index 6de0480e0e..0000000000 --- a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/as-snapshot.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: snapshot.yaml diff --git a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/package.yaml b/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/package.yaml deleted file mode 100644 index f403f2761f..0000000000 --- a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/package.yaml +++ /dev/null @@ -1,7 +0,0 @@ -name: unimportant - -dependencies: -- base -- async - -library: {} diff --git a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/snapshot.yaml b/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/snapshot.yaml deleted file mode 100644 index a2ec7e4f07..0000000000 --- a/test/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/snapshot.yaml +++ /dev/null @@ -1,5 +0,0 @@ -resolver: ghc-8.6.5 -name: hackage-revisions-are-annoying -packages: -- async-2.1.1.1 -- stm-2.1.2.2 diff --git a/test/integration/tests/upload/Main.hs b/test/integration/tests/upload/Main.hs deleted file mode 100644 index 2f1d67c276..0000000000 --- a/test/integration/tests/upload/Main.hs +++ /dev/null @@ -1,34 +0,0 @@ -import Control.Concurrent - -import StackTest - -import System.Directory (createDirectoryIfMissing, - getCurrentDirectory) -import System.Environment (getEnv, setEnv) -import System.FilePath (()) -import System.Process - -main :: IO () -main = - withFakeHackage $ do - stackRoot <- getEnv "STACK_ROOT" - -- Ensure there are credentials available for uploading - createDirectoryIfMissing True (stackRoot "upload") - writeFile - (stackRoot "upload" "credentials.json") - "{\"username\":\"fake\",\"password\":\"fake\"}" - stack ["upload", "."] - --- | Start a fake Hackage server to test the upload -withFakeHackage :: IO a -> IO a -withFakeHackage act = do - stackEnv <- stackExe - -- Build the dependencies for the fake server - stack $ withNetworkArgs ++ ["FakeHackageStart.hs"] - -- Start the fake server - withCreateProcess (proc stackEnv $ withNetworkArgs ++ ["FakeHackage.hs"]) $ \_ _ _ _ -> do - -- Wait for the fake server to start accepting requests - threadDelay 2000000 - act - where - withNetworkArgs = ["runghc", "--package", "network"] diff --git a/test/integration/tests/upload/files/FakeHackage.hs b/test/integration/tests/upload/files/FakeHackage.hs deleted file mode 100644 index eef4b9349b..0000000000 --- a/test/integration/tests/upload/files/FakeHackage.hs +++ /dev/null @@ -1,32 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - -import Control.Concurrent -import Control.Monad -import Network.Socket hiding (recv) -import Network.Socket.ByteString (recv, sendAll) -import System.Exit -import System.IO - --- | Fake server that always responds with HTTP OK -main = - withSocketsDo $ do - _ <- forkIO serve - -- Exit after a delay to ensure the process doesn't linger around - threadDelay 10000000 - exitSuccess - -serve :: IO () -serve = do - let hints = defaultHints {addrFlags = [AI_PASSIVE], addrSocketType = Stream} - (addr:_) <- getAddrInfo (Just hints) Nothing (Just "12415") - sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) - setSocketOption sock ReuseAddr 1 - bind sock (addrAddress addr) - listen sock 10 - forever $ do - (conn, _) <- accept sock - _ <- recv conn 1024 - sendAll - conn - "HTTP/1.1 200 OK\r\nContent-Length: 1\r\nContent-Type: text/plain\r\n\r\na" - shutdown conn ShutdownSend diff --git a/test/integration/tests/upload/files/Foo.hs b/test/integration/tests/upload/files/Foo.hs deleted file mode 100644 index efbf93bbde..0000000000 --- a/test/integration/tests/upload/files/Foo.hs +++ /dev/null @@ -1 +0,0 @@ -module Foo where diff --git a/test/integration/tests/upload/files/stack.yaml b/test/integration/tests/upload/files/stack.yaml deleted file mode 100644 index b7044f37cd..0000000000 --- a/test/integration/tests/upload/files/stack.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resolver: lts-14.27 -packages: - - . -hackage-base-url: http://localhost:12415/ diff --git a/test/integration/tests/upload/files/uploadtest.cabal b/test/integration/tests/upload/files/uploadtest.cabal deleted file mode 100644 index fbf051d525..0000000000 --- a/test/integration/tests/upload/files/uploadtest.cabal +++ /dev/null @@ -1,13 +0,0 @@ -build-type: Simple -name: uploadtest -version: 0.0.0 -category: test -maintainer: uploadtest@example.com -cabal-version: 1.20 -synopsis: Upload test -description: Upload test description -license: BSD3 -library - build-depends: base >= 4.7 && < 5 - exposed-modules: Foo - default-language: Haskell2010 diff --git a/test/integration/tests/variables-in-template-file-names/Main.hs b/test/integration/tests/variables-in-template-file-names/Main.hs deleted file mode 100644 index e2572ef0e0..0000000000 --- a/test/integration/tests/variables-in-template-file-names/Main.hs +++ /dev/null @@ -1,10 +0,0 @@ -import StackTest -import System.Directory -import Control.Monad (unless) - -main :: IO () -main = do - removeDirIgnore "somename" - stack ["new", "somename", "./template.hsfiles"] - exists <- doesFileExist "somename/somename.cabal" - unless exists $ error "does not exist" diff --git a/test/integration/tests/variables-in-template-file-names/files/.gitignore b/test/integration/tests/variables-in-template-file-names/files/.gitignore deleted file mode 100644 index 6d57d66dd6..0000000000 --- a/test/integration/tests/variables-in-template-file-names/files/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/somename/ diff --git a/test/integration/tests/watched-files/Main.hs b/test/integration/tests/watched-files/Main.hs deleted file mode 100644 index d683c93a0b..0000000000 --- a/test/integration/tests/watched-files/Main.hs +++ /dev/null @@ -1,14 +0,0 @@ -import StackTest -import Data.Foldable (for_) -import Control.Monad (unless) - -main :: IO () -main = for_ (words "foo bar baz bin") $ \x -> do - writeFile "some-text-file.txt" x - stackCheckStdout ["run"] $ \y -> - unless (x == y) $ error $ concat - [ "Expected: " - , show x - , "\nActual: " - , show y - ] diff --git a/test/integration/tests/watched-files/files/.gitignore b/test/integration/tests/watched-files/files/.gitignore deleted file mode 100644 index d09ee0af48..0000000000 --- a/test/integration/tests/watched-files/files/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -some-text-file.txt -*.cabal diff --git a/test/integration/tests/watched-files/files/Main.hs b/test/integration/tests/watched-files/files/Main.hs deleted file mode 100644 index a462eafd73..0000000000 --- a/test/integration/tests/watched-files/files/Main.hs +++ /dev/null @@ -1,9 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} -module Main where - -import Data.FileEmbed -import qualified Data.ByteString as B -import System.IO (stdout) - -main :: IO () -main = B.hPut stdout $(embedFile "some-text-file.txt") diff --git a/test/integration/tests/watched-files/files/package.yaml b/test/integration/tests/watched-files/files/package.yaml deleted file mode 100644 index 6611f13029..0000000000 --- a/test/integration/tests/watched-files/files/package.yaml +++ /dev/null @@ -1,9 +0,0 @@ -name: watched-files -dependencies: -- base -- bytestring -- file-embed - -executables: - watched: - main: Main.hs diff --git a/test/integration/tests/watched-files/files/stack.yaml b/test/integration/tests/watched-files/files/stack.yaml deleted file mode 100644 index 785b1469c7..0000000000 --- a/test/integration/tests/watched-files/files/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: lts-14.27 diff --git a/test/integration/.gitignore b/tests/integration/.gitignore similarity index 100% rename from test/integration/.gitignore rename to tests/integration/.gitignore diff --git a/tests/integration/IntegrationSpec.hs b/tests/integration/IntegrationSpec.hs new file mode 100644 index 0000000000..a1a9a93add --- /dev/null +++ b/tests/integration/IntegrationSpec.hs @@ -0,0 +1,288 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +import Conduit + ( (.|), connect, filterC, filterMC, foldMapC, mapM_C + , runConduit, runConduitRes, runResourceT, sourceDirectory + , sourceDirectoryDeep, stderrC, withSourceFile + ) +import Data.List ( stripPrefix ) +import Options.Generic + ( ParseField, ParseRecord (..), defaultModifiers + , fieldNameModifier, firstLetter, getRecord + , parseRecordWithModifiers, shortNameModifier + ) +import RIO +import RIO.Char ( toLower ) +import RIO.Directory + ( canonicalizePath, copyFile, createDirectoryIfMissing + , doesFileExist, getAppUserDataDirectory + ) +import RIO.FilePath + ( (), (<.>), isPathSeparator, takeDirectory + , takeExtensions, takeFileName + ) +import RIO.List ( isInfixOf, partition ) +import qualified RIO.Map as Map +import RIO.Process + ( HasProcessContext (..), closed, findExecutable, proc + , runProcess, runProcess_, setStderr, setStdin, setStdout + , useHandleOpen, withModifyEnvVars, withWorkingDir + ) +import qualified RIO.Set as Set +import qualified RIO.Text as T +import RIO.Time ( defaultTimeLocale, diffUTCTime, formatTime, getCurrentTime ) +import System.Environment ( getExecutablePath, lookupEnv ) +import System.Info ( os ) +import System.PosixCompat.Files ( createSymbolicLink ) + +-- This code does not use a test framework so that we get direct +-- control of how the output is displayed. + +main :: IO () +main = runSimpleApp $ do + logInfo "Initiating Stack integration test running" + + options <- getRecord "Stack integration tests" + startTime <- getCurrentTime + results <- runApp options $ do + logInfo "Running with the following environment" + proc "env" [] runProcess_ + tests <- asks appTestDirs + let count = Set.size tests + loop !idx rest !accum = + case rest of + [] -> pure accum + next:rest' -> do + logInfo $ "Running integration test " + <> display idx + <> "/" + <> display count + <> ": " + <> fromString (takeFileName next) + res <- test next + loop (idx + 1) rest' (res <> accum) + + loop (1 :: Int) (Set.toList tests) mempty + finalTime <- getCurrentTime + let (successes, failures) = partition ((== ExitSuccess) . snd) + $ Map.toList results + let timeDiff = diffUTCTime finalTime startTime + let timeDiffStr = formatTime defaultTimeLocale "%H:%M:%S - total %s seconds" timeDiff + logInfo $ "Integration tests ran in : " <> fromString timeDiffStr + unless (null successes) $ do + logInfo "Successful tests:" + for_ successes $ \(x, _) -> logInfo $ "- " <> display x + logInfo "" + + if null failures + then logInfo "No failures!" + else do + logInfo "Failed tests:" + for_ failures $ \(x, ec) -> logInfo $ "- " <> display x <> " - " <> displayShow ec + exitFailure + + +data Options = Options + { optSpeed :: Maybe Speed + , optMatch :: Maybe String + , optNot :: [String] + } + deriving Generic + +instance ParseRecord Options where + parseRecord = parseRecordWithModifiers modifiers + where + optName = map toLower . drop 3 + modifiers = defaultModifiers { fieldNameModifier = optName + , shortNameModifier = firstLetter . optName + } + +data Speed = Fast | Normal | Superslow + deriving (Read, Generic) + +instance ParseField Speed + +exeExt :: String +exeExt = if isWindows then ".exe" else "" + +isWindows :: Bool +isWindows = os == "mingw32" + +isLinux :: Bool +isLinux = os == "linux" + +runApp :: Options -> RIO App a -> RIO SimpleApp a +runApp options inner = do + let speed = fromMaybe Normal $ optSpeed options + simpleApp <- ask + runghc <- findExecutable "runghc" >>= either throwIO pure + srcDir <- canonicalizePath "" + testsRoot <- canonicalizePath $ srcDir "tests/integration" + libdir <- canonicalizePath $ testsRoot "lib" + myPath <- liftIO getExecutablePath + + stack <- canonicalizePath $ takeDirectory myPath "stack" ++ exeExt + logInfo $ "Using Stack located at " <> fromString stack + proc stack ["--version"] runProcess_ + logInfo $ "Using runghc located at " <> fromString runghc + proc runghc ["--version"] runProcess_ + + let matchTest = case (optMatch options, optNot options) of + (Just str, _) -> (str `isInfixOf`) + (_, []) -> const True + (_, nl) -> \a -> all (\b -> not $ b `isInfixOf` a) nl + testDirs + <- runConduitRes + $ sourceDirectory (testsRoot "tests") + .| filterMC (liftIO . hasTest) + .| filterC matchTest + .| foldMapC Set.singleton + + let modifyEnvCommon + = Map.insert "SRC_DIR" (fromString srcDir) + . Map.insert "STACK_EXE" (fromString stack) + . Map.delete "GHC_PACKAGE_PATH" + . Map.insert "STACK_TEST_SPEED" + (case speed of + Superslow -> "SUPERSLOW" + _ -> "NORMAL") + . Map.fromList + . map (first T.toUpper) + . Map.toList + + case speed of + Fast -> do + let app = App + { appSimpleApp = simpleApp + , appRunghc = runghc + , appLibDir = libdir + , appSetupHome = id + , appTestDirs = testDirs + } + runRIO app $ withModifyEnvVars modifyEnvCommon inner + _ -> do + origStackRoot <- liftIO (lookupEnv "STACK_ROOT") >>= \case + Nothing -> getAppUserDataDirectory "stack" + Just x -> pure x + + logInfo "Initializing/updating the original Pantry store" + proc stack ["update"] runProcess_ + + pantryRoot <- canonicalizePath $ origStackRoot "pantry" + let modifyEnv + = Map.insert "PANTRY_ROOT" (fromString pantryRoot) + . modifyEnvCommon + + app = App + { appSimpleApp = simpleApp + , appRunghc = runghc + , appLibDir = libdir + , appSetupHome = \inner' -> withSystemTempDirectory "home" $ \newHome -> do + let newStackRoot = newHome ".stack" + createDirectoryIfMissing True newStackRoot + let modifyEnv' + = Map.insert "HOME" (fromString newHome) + . Map.insert "APPDATA" (fromString newHome) + . Map.insert "STACK_ROOT" (fromString newStackRoot) + writeFileBinary (newStackRoot "config.yaml") "system-ghc: true\ninstall-ghc: false\n" + withModifyEnvVars modifyEnv' inner' + , appTestDirs = testDirs + } + + runRIO app $ withModifyEnvVars modifyEnv inner + + +hasTest :: FilePath -> IO Bool +hasTest dir = doesFileExist $ dir "Main.hs" + +data App = App + { appRunghc :: !FilePath + , appLibDir :: !FilePath + , appSetupHome :: !(forall a. RIO App a -> RIO App a) + , appSimpleApp :: !SimpleApp + , appTestDirs :: !(Set FilePath) + } +simpleAppL :: Lens' App SimpleApp +simpleAppL = lens appSimpleApp (\x y -> x { appSimpleApp = y }) +instance HasLogFunc App where + logFuncL = simpleAppL.logFuncL +instance HasProcessContext App where + processContextL = simpleAppL.processContextL + +-- | Call 'appSetupHome' on the inner action +withHome :: RIO App a -> RIO App a +withHome inner = do + app <- ask + appSetupHome app inner + +test :: FilePath -- ^ test dir + -> RIO App (Map Text ExitCode) +test testDir = withDir $ \dir -> withHome $ do + runghc <- asks appRunghc + libDir <- asks appLibDir + let mainFile = testDir "Main.hs" + + copyTree (testDir "files") dir + + withSystemTempFile (name <.> "log") $ \logfp logh -> do + let args = + [ "-clear-package-db" + , "-global-package-db" + , "-i" ++ libDir + , mainFile + ] + args' = if isLinux + then + -- We seek to use lld as the linker on Linux, as it is much faster + -- than the default linker. The executable assumes lld is on the + -- PATH. + "--" : "-optl-fuse-ld=lld" : args + else + args + ec <- withWorkingDir dir $ + withModifyEnvVars (Map.insert "TEST_DIR" $ fromString testDir) $ + proc runghc args' $ + runProcess + . setStdin closed + . setStdout (useHandleOpen logh) + . setStderr (useHandleOpen logh) + hClose logh + + case ec of + ExitSuccess -> logInfo "Success!" + _ -> do + logError "Failure, dumping log\n\n" + withSourceFile logfp $ \src -> + runConduit $ src .| stderrC + logError $ "\n\nEnd of log for " <> fromString name + pure $ Map.singleton (fromString name) ec + where + name = takeFileName testDir + withDir = withSystemTempDirectory ("stack-integration-" ++ name) + +copyTree :: MonadIO m => FilePath -> FilePath -> m () +copyTree src dst = + liftIO $ + runResourceT (sourceDirectoryDeep False src `connect` mapM_C go) + `catch` \(_ :: IOException) -> pure () + where + go srcfp = liftIO $ do + Just suffix <- pure $ stripPrefix src srcfp + let dstfp = dst stripHeadSeparator suffix + createDirectoryIfMissing True $ takeDirectory dstfp + -- copying yaml files so lock files won't get created in + -- the source directory + if takeFileName srcfp /= "package.yaml" && + (takeExtensions srcfp == ".yaml" || takeExtensions srcfp == ".yml") + then + copyFile srcfp dstfp + else + createSymbolicLink srcfp dstfp `catch` \(_ :: IOException) -> + copyFile srcfp dstfp -- for Windows + + stripHeadSeparator :: FilePath -> FilePath + stripHeadSeparator [] = [] + stripHeadSeparator fp@(x:xs) = if isPathSeparator x + then xs + else fp diff --git a/tests/integration/README.md b/tests/integration/README.md new file mode 100644 index 0000000000..42b430d9a5 --- /dev/null +++ b/tests/integration/README.md @@ -0,0 +1,48 @@ +# Stack Integration Tests + +This directory contains a bunch of integration tests for Stack. Each +directory inside the `tests` subdirectory represents a single +test. Each of those directories has: + +* A `Main.hs` file, which provides the script to be run +* A `files `directory, providing the working directory the script will be run + from. (If you have a test that does not require any specific working + directory, there may be no `files` directory.) + +It would be great to expand this file into a full tutorial, but for now, the +easiest way to get started with writing an integration test is to copy an +existing example. + +## Running + +One simple way to run a single test is: + +* Change into the `files` directory +* Run the command `stack runghc -- -i../../../lib ../Main.hs` + +A more thorough way to run the tests is with command: + +~~~text +stack build --flag stack:integration-tests stack --exec stack-integration-test +~~~ + +Note that this command can take a _long_ time. It is also more thorough +than the quick command given above, as it will run each test with a +clean `STACK_ROOT`. + +On Linux, the `stack-integration-test` executable uses the `lld` linker and +expects it to be on the PATH. The integration tests complete significantly +quicker with `lld` than with the `ld.bfd` linker. + +## Helper scripts + +There are two helper scripts in this directory. Note that these may +not always work as anticipated, since some of the tests expect a clean +`STACK_ROOT`, and these scripts do not set that up. + +* `run-sort-tests.sh` will run all of the tests in the `tests` + directory, and move the successful ones into `tests-success`, and + the failing ones into `tests-fail`. It will keep the logs of failing + tests in `logs`. +* `run-single-test.sh` takes a single argument (the name of a test), + and runs just that test. diff --git a/tests/integration/lib/StackTest.hs b/tests/integration/lib/StackTest.hs new file mode 100644 index 0000000000..744d6f5c76 --- /dev/null +++ b/tests/integration/lib/StackTest.hs @@ -0,0 +1,324 @@ +module StackTest + ( run' + , run + , runShell + , runWithCwd + , stackExe + , stackSrc + , testDir + , stack' + , stack + , stackCleanFull + , stackIgnoreException + , stackErr + , stackStderr + , stackCheckStderr + , stackErrStderr + , runEx + , runEx' + , stackCheckStdout + , doesNotExist + , doesExist + , doesFileOrDirExist + , copy + , fileContentsMatch + , logInfo + , showProcessArgDebug + , exeExt + , isWindows + , isLinux + , getIsAlpine + , isARM + , isAarch64 + , isMacOSX + , defaultSnapshotArg + , removeFileIgnore + , removeDirIgnore + , withCwd + , withSourceDirectory + , superslow + ) where + +import Control.Monad ( unless, void, when ) +import Control.Exception + ( Exception (..), IOException, bracket_, catch + , throwIO + ) +import GHC.Stack ( HasCallStack ) +import System.Environment ( getEnv, lookupEnv ) +import System.Directory + ( copyFile, doesDirectoryExist, doesFileExist + , getCurrentDirectory, removeDirectoryRecursive, removeFile + , setCurrentDirectory + ) +import System.IO + ( hPutStr, hPutStrLn, stderr + ) +import System.IO.Error + ( isDoesNotExistError ) +import System.Process + ( CreateProcess (..), createProcess, proc + , readCreateProcessWithExitCode, readProcessWithExitCode + , shell, waitForProcess + ) +import System.Exit ( ExitCode (..) ) +import System.Info ( arch, os ) + +run' :: HasCallStack => FilePath -> [String] -> IO ExitCode +run' cmd args = do + logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args) + (Nothing, Nothing, Nothing, ph) <- createProcess (proc cmd args) + waitForProcess ph + +run :: HasCallStack => FilePath -> [String] -> IO () +run cmd args = do + ec <- run' cmd args + unless (ec == ExitSuccess) $ + error $ "Exited with exit code: " ++ displayException ec + +runShell :: HasCallStack => String -> IO () +runShell cmd = do + logInfo $ "Running: " ++ cmd + (Nothing, Nothing, Nothing, ph) <- createProcess (shell cmd) + ec <- waitForProcess ph + unless (ec == ExitSuccess) $ + error $ "Exited with exit code: " ++ displayException ec + +runWithCwd :: HasCallStack => FilePath -> String -> [String] -> IO String +runWithCwd cwdPath cmd args = do + logInfo $ "Running: " ++ cmd + let cp = proc cmd args + (ec, stdoutStr, _) <- readCreateProcessWithExitCode (cp { cwd = Just cwdPath }) "" + unless (ec == ExitSuccess) $ + error $ "Exited with exit code: " ++ displayException ec + pure stdoutStr + +stackExe :: IO String +stackExe = getEnv "STACK_EXE" + +stackSrc :: IO String +stackSrc = getEnv "SRC_DIR" + +testDir :: IO String +testDir = getEnv "TEST_DIR" + +stack' :: HasCallStack => [String] -> IO ExitCode +stack' args = do + stackEnv <- stackExe + run' stackEnv args + +stack :: HasCallStack => [String] -> IO () +stack args = do + ec <- stack' args + unless (ec == ExitSuccess) $ + error $ "Exited with exit code: " ++ displayException ec + +-- Temporary workaround for Windows to ignore exceptions arising out of Windows +-- when we do stack clean. More info here: +-- https://github.com/commercialhaskell/stack/issues/4936 +stackCleanFull :: HasCallStack => IO () +stackCleanFull = stackIgnoreException ["clean", "--full"] + +-- Temporary workaround for Windows to ignore exceptions arising out of Windows +-- when we do stack clean. More info here: +-- https://github.com/commercialhaskell/stack/issues/4936 +stackIgnoreException :: HasCallStack => [String] -> IO () +stackIgnoreException args = + if isWindows + then void (stack' args) `catch` (\(_e :: IOException) -> pure ()) + else stack args + +stackErr :: HasCallStack => [String] -> IO () +stackErr args = do + ec <- stack' args + when (ec == ExitSuccess) $ error "stack was supposed to fail, but didn't" + +stackStderr :: HasCallStack => [String] -> IO (ExitCode, String) +stackStderr args = do + stackExe' <- stackExe + logInfo $ + "Running: " + ++ stackExe' + ++ " " + ++ unwords (map showProcessArgDebug args) + (ec, _, err) <- readProcessWithExitCode stackExe' args "" + hPutStr stderr err + pure (ec, err) + +-- | Run stack with arguments and apply a check to the resulting stderr output +-- if the process succeeded. +stackCheckStderr :: HasCallStack => [String] -> (String -> IO ()) -> IO () +stackCheckStderr args check = do + (ec, err) <- stackStderr args + if ec /= ExitSuccess + then error $ "Exited with exit code: " ++ displayException ec + else check err + +-- | Same as 'stackCheckStderr', but ensures that the Stack process +-- fails. +stackErrStderr :: HasCallStack => [String] -> (String -> IO ()) -> IO () +stackErrStderr args check = do + (ec, err) <- stackStderr args + if ec == ExitSuccess + then error "Stack process succeeded, but it shouldn't" + else check err + +runEx :: HasCallStack => FilePath -> String -> IO (ExitCode, String, String) +runEx cmd args = runEx' cmd $ words args + +runEx' :: HasCallStack => FilePath -> [String] -> IO (ExitCode, String, String) +runEx' cmd args = do + logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args) + (ec, out, err) <- readProcessWithExitCode cmd args "" + putStr out + hPutStr stderr err + pure (ec, out, err) + +-- | Run stack with arguments and apply a check to the resulting stdout output +-- if the process succeeded. +-- +-- Take care with newlines; if the output includes a newline character that +-- should not be there, use 'Data.List.Extra.trimEnd' to remove it. +stackCheckStdout :: HasCallStack => [String] -> (String -> IO ()) -> IO () +stackCheckStdout args check = do + stackExe' <- stackExe + (ec, out, _) <- runEx' stackExe' args + if ec /= ExitSuccess + then error $ "Exited with exit code: " ++ displayException ec + else check out + +doesNotExist :: HasCallStack => FilePath -> IO () +doesNotExist fp = do + logInfo $ "doesNotExist " ++ fp + exists <- doesFileOrDirExist fp + case exists of + (Right msg) -> error msg + (Left _) -> pure () + +doesExist :: HasCallStack => FilePath -> IO () +doesExist fp = do + logInfo $ "doesExist " ++ fp + exists <- doesFileOrDirExist fp + case exists of + (Right _) -> pure () + (Left _) -> error "No file or directory exists" + +doesFileOrDirExist :: HasCallStack => FilePath -> IO (Either () String) +doesFileOrDirExist fp = do + isFile <- doesFileExist fp + if isFile + then pure (Right ("File exists: " ++ fp)) + else do + isDir <- doesDirectoryExist fp + if isDir + then pure (Right ("Directory exists: " ++ fp)) + else pure (Left ()) + +copy :: HasCallStack => FilePath -> FilePath -> IO () +copy src dest = do + logInfo ("Copy " ++ show src ++ " to " ++ show dest) + System.Directory.copyFile src dest + +fileContentsMatch :: HasCallStack => FilePath -> FilePath -> IO () +fileContentsMatch f1 f2 = do + doesExist f1 + doesExist f2 + f1Contents <- readFile f1 + f2Contents <- readFile f2 + unless (f1Contents == f2Contents) $ + error ("contents do not match for " ++ show f1 ++ " " ++ show f2) + +logInfo :: String -> IO () +logInfo = hPutStrLn stderr + +-- TODO: use Stack's process running utilities? (better logging) +-- for now just copy+modifying this one from System.Process.Log + +-- | Show a process arg including speechmarks when necessary. Just for +-- debugging purposes, not functionally important. +showProcessArgDebug :: String -> String +showProcessArgDebug x + | any special x = show x + | otherwise = x + where + special '"' = True + special ' ' = True + special _ = False + +-- | Extension of executables +exeExt :: String +exeExt = if isWindows then ".exe" else "" + +-- | Is the OS Windows? +isWindows :: Bool +isWindows = os == "mingw32" + +isLinux :: Bool +isLinux = os == "linux" + +-- | Is the OS Alpine Linux? +getIsAlpine :: IO Bool +getIsAlpine = doesFileExist "/etc/alpine-release" + +-- | Is the architecture ARM? +isARM :: Bool +isARM = arch == "arm" + +-- | Is the architecture Aarch64? +isAarch64 :: Bool +isAarch64 = arch == "aarch64" + +-- | Is the OS Mac OS X? +isMacOSX :: Bool +isMacOSX = os == "darwin" + +-- | To avoid problems with GHC version mismatch when a new LTS major +-- version is released, pass this argument to @stack@ when running in +-- a global context. The LTS major version here should match that of +-- the main @stack.yaml@. +-- +defaultSnapshotArg :: String +defaultSnapshotArg = "--snapshot=lts-24.37" + +-- | Remove a file and ignore any warnings about missing files. +removeFileIgnore :: HasCallStack => FilePath -> IO () +removeFileIgnore fp = removeFile fp `catch` \e -> + if isDoesNotExistError e + then pure () + else throwIO e + +-- | Remove a directory and ignore any warnings about missing files. +removeDirIgnore :: HasCallStack => FilePath -> IO () +removeDirIgnore fp = removeDirectoryRecursive fp `catch` \e -> + if isDoesNotExistError e + then pure () + else throwIO e + +-- | Changes to the specified working directory. +withCwd :: HasCallStack => FilePath -> IO () -> IO () +withCwd dir action = do + currentDirectory <- getCurrentDirectory + let enterDir = setCurrentDirectory dir + exitDir = setCurrentDirectory currentDirectory + bracket_ enterDir exitDir action + +-- | Changes working directory to Stack source directory. +withSourceDirectory :: HasCallStack => IO () -> IO () +withSourceDirectory action = do + dir <- stackSrc + withCwd dir action + +-- | Mark a test as superslow, only to be run when explicitly requested. +superslow :: HasCallStack => IO () -> IO () +superslow inner = do + mres <- lookupEnv "STACK_TEST_SPEED" + case mres of + Just "NORMAL" -> logInfo "Skipping superslow test" + Just "SUPERSLOW" -> do + logInfo "Running superslow test, hold on to your butts" + inner + Nothing -> do + logInfo "No STACK_TEST_SPEED specified. Executing superslow test, hold \ + \on to your butts" + inner + Just x -> error $ "Invalid value for STACK_TEST_SPEED env var: " ++ show x diff --git a/tests/integration/lib/StackTest/Repl.hs b/tests/integration/lib/StackTest/Repl.hs new file mode 100644 index 0000000000..f08bdfceb5 --- /dev/null +++ b/tests/integration/lib/StackTest/Repl.hs @@ -0,0 +1,135 @@ +{- | +Integration-test helpers & fixtures for testing `stack repl` +-} +module StackTest.Repl + ( Repl + , ReplConnection (..) + , nextPrompt + , replCommand + , replGetChar + , replGetLine + , stackRepl + -- * Reexport + , module StackTest + ) where + +import Control.Exception (SomeException, catch, displayException, finally) +import Control.Monad ((>=>), unless, when) +import Control.Monad.IO.Class (liftIO) +import Control.Monad.Trans (lift) +import Control.Monad.Trans.Reader +import Control.Monad.Trans.State qualified as State +import Data.Maybe (fromMaybe) +import Data.Foldable (toList) +import Data.Sequence as Seq (Seq(Empty), (|>), fromList) +import GHC.Stack (HasCallStack) +import System.Directory (removeFile) +import System.Environment (lookupEnv) +import System.Exit (ExitCode (..), exitFailure) +import System.IO + ( BufferMode (NoBuffering, LineBuffering), Handle, IOMode (ReadMode) + , hClose, hGetChar, hGetContents', hGetLine, hPutStrLn, hSetBuffering + , openTempFile + , withFile + ) +import System.Process + ( CreateProcess (std_err, std_in, std_out) + , StdStream (CreatePipe, UseHandle) + , createProcess, proc, waitForProcess + ) + +import StackTest + +type Repl = ReaderT ReplConnection IO + +data ReplConnection = ReplConnection + { replStdin :: Handle + , replStdout :: Handle + } + +replCommand :: String -> Repl () +replCommand cmd = do + (ReplConnection replStdinHandle _) <- ask + -- echo what we send to the test's stdout + liftIO . putStrLn $ "____> " <> cmd + liftIO $ hPutStrLn replStdinHandle cmd + +replGetChar :: Repl Char +replGetChar = asks replStdout >>= liftIO . hGetChar + +replGetLine :: Repl String +replGetLine = ask >>= liftIO . hGetLine . replStdout + +nextPrompt :: Repl () +nextPrompt = State.evalStateT poll Seq.Empty where + poll = do + c <- lift (asks replStdout) >>= liftIO . hGetChar + State.modify (|> c) + when (c == '\n') $ do + State.get >>= liftIO . putStr . ("ghci> " ++) . toList + State.put Seq.Empty + buf <- State.get + unless (buf == Seq.fromList "ghci> ") + poll + +runRepl + :: HasCallStack + => FilePath + -> [String] + -> Repl () + -> IO ExitCode +runRepl cmd args actions = do + (stderrBufPath, stderrBufHandle) <- openTempStderrBufferFile + hSetBuffering stderrBufHandle NoBuffering + + logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args) ++ "\n\ + \ with stderr in " ++ stderrBufPath + + -- launch the GHCi subprocess, grab its FD handles and process handle + (Just rStdin, Just rStdout, Nothing, ph) <- + createProcess (proc cmd args) + { std_in = CreatePipe + , std_out = CreatePipe + , std_err = UseHandle stderrBufHandle + } + hSetBuffering rStdin LineBuffering + hSetBuffering rStdout NoBuffering + + -- run the test script which is to talk to the GHCi subprocess. + runReaderT actions (ReplConnection rStdin rStdout) + -- the nested actions script may fail in arbitrary ways; handle that here, + -- attaching the subprocess stderr as relevant context + `catch` \(e :: SomeException) -> do + putStrLn "==============================" + putStrLn "EXCEPTION in test: " + putStrLn . quote $ displayException e + putStrLn "------[ stderr of repl ]------" + withFile stderrBufPath ReadMode $ hGetContents' >=> putStr . quote + putStrLn "==============================" + `finally` do + hClose stderrBufHandle + removeFile stderrBufPath + + -- once done with the test, signal EOF on stdin for clean termination of ghci + hClose rStdin + -- read out the exit-code + waitForProcess ph + +-- | Roll a bicycle, rather than just `import Path.IO (getTempDir, openTempFile)`, +-- because it's a hassle to use anything beyond base & boot libs here. +openTempStderrBufferFile :: IO (FilePath, Handle) +openTempStderrBufferFile = getTempDir >>= (`openTempFile` "err.log") where + getTempDir | isWindows = fromMaybe "" <$> lookupEnv "TEMP" + | otherwise = pure "/tmp" + +-- | Testing helper to exercise `stack repl`. +stackRepl :: HasCallStack => [String] -> Repl () -> IO () +stackRepl args action = do + stackExe' <- stackExe + ec <- runRepl stackExe' ("repl" : "--ghci-options=-ignore-dot-ghci" : args) action + unless (ec == ExitSuccess) $ do + putStrLn $ "repl exited with " <> show ec + exitFailure + +quote :: String -> String +quote = unlines . map ("> " <>) . lines diff --git a/test/integration/run-single-test.sh b/tests/integration/run-single-test.sh similarity index 100% rename from test/integration/run-single-test.sh rename to tests/integration/run-single-test.sh diff --git a/test/integration/run-sort-tests.sh b/tests/integration/run-sort-tests.sh similarity index 100% rename from test/integration/run-sort-tests.sh rename to tests/integration/run-sort-tests.sh diff --git a/tests/integration/tests/111-custom-snapshot/Main.hs b/tests/integration/tests/111-custom-snapshot/Main.hs new file mode 100644 index 0000000000..9c6bfb055b --- /dev/null +++ b/tests/integration/tests/111-custom-snapshot/Main.hs @@ -0,0 +1,9 @@ +-- Stack should build a package when a custom snapshot is specified in the +-- project-level configuration file. +-- +-- See: https://github.com/commercialhaskell/stack/issues/111 + +import StackTest + +main :: IO () +main = stack ["build"] diff --git a/tests/integration/tests/111-custom-snapshot/files/.gitignore b/tests/integration/tests/111-custom-snapshot/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/111-custom-snapshot/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/111-custom-snapshot/files/my-snapshot.yaml b/tests/integration/tests/111-custom-snapshot/files/my-snapshot.yaml new file mode 100644 index 0000000000..9f6da9150c --- /dev/null +++ b/tests/integration/tests/111-custom-snapshot/files/my-snapshot.yaml @@ -0,0 +1,5 @@ +name: my-snapshot + +compiler: ghc-9.10.3 +packages: +- acme-missiles-0.3 diff --git a/tests/integration/tests/111-custom-snapshot/files/package.yaml b/tests/integration/tests/111-custom-snapshot/files/package.yaml new file mode 100644 index 0000000000..c80a013e52 --- /dev/null +++ b/tests/integration/tests/111-custom-snapshot/files/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- acme-missiles + +library: + source-dirs: src diff --git a/tests/integration/tests/111-custom-snapshot/files/src/Lib.hs b/tests/integration/tests/111-custom-snapshot/files/src/Lib.hs new file mode 100644 index 0000000000..3635ec4a52 --- /dev/null +++ b/tests/integration/tests/111-custom-snapshot/files/src/Lib.hs @@ -0,0 +1,5 @@ +module Lib + ( launchMissiles + ) where + +import Acme.Missiles ( launchMissiles ) diff --git a/tests/integration/tests/111-custom-snapshot/files/stack.yaml b/tests/integration/tests/111-custom-snapshot/files/stack.yaml new file mode 100644 index 0000000000..c00072833e --- /dev/null +++ b/tests/integration/tests/111-custom-snapshot/files/stack.yaml @@ -0,0 +1 @@ +snapshot: my-snapshot.yaml diff --git a/tests/integration/tests/1198-multiple-exes-with-same-name/Main.hs b/tests/integration/tests/1198-multiple-exes-with-same-name/Main.hs new file mode 100644 index 0000000000..1dcedba320 --- /dev/null +++ b/tests/integration/tests/1198-multiple-exes-with-same-name/Main.hs @@ -0,0 +1,28 @@ +-- Stack warns when more than one project package has an executable component of +-- the same name. +-- +-- See: https://github.com/commercialhaskell/stack/issues/1198 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stackCheckStderr + ["build", "myPackageA", "myPackageB"] + (expectMessage buildMessage1) + stackCheckStderr + ["build", "myPackageC"] + (expectMessage buildMessage2) + +expectMessage :: String -> String -> IO () +expectMessage msg stderr = + unless (msg `isInfixOf` stderr) + (error $ "Expected a warning: \n" ++ show msg) + +-- Use short message fragment because prettyWarn formatting and colour +buildMessage1 = "Building several executables with the same name:" + +-- Use short message fragment because prettyWarn formatting and colour +buildMessage2 = "Other executables with the same name" diff --git a/tests/integration/tests/1198-multiple-exes-with-same-name/files/.gitignore b/tests/integration/tests/1198-multiple-exes-with-same-name/files/.gitignore new file mode 100644 index 0000000000..c4ea0f5ab0 --- /dev/null +++ b/tests/integration/tests/1198-multiple-exes-with-same-name/files/.gitignore @@ -0,0 +1,3 @@ +myPackageA.cabal +myPackageB.cabal +myPackageC.cabal diff --git a/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageA/app/Main.hs b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageA/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageA/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageA/package.yaml b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageA/package.yaml new file mode 100644 index 0000000000..7a3a634c04 --- /dev/null +++ b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageA/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +executables: + myExe: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageB/app/Main.hs b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageB/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageB/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageB/package.yaml b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageB/package.yaml new file mode 100644 index 0000000000..9ef40bf495 --- /dev/null +++ b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageB/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base + +executables: + myExe: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageC/app/Main.hs b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageC/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageC/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageC/package.yaml b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageC/package.yaml new file mode 100644 index 0000000000..add5c45908 --- /dev/null +++ b/tests/integration/tests/1198-multiple-exes-with-same-name/files/myPackageC/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackageC + +dependencies: +- base + +executables: + myExe: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/1198-multiple-exes-with-same-name/files/stack.yaml b/tests/integration/tests/1198-multiple-exes-with-same-name/files/stack.yaml new file mode 100644 index 0000000000..14a33de5df --- /dev/null +++ b/tests/integration/tests/1198-multiple-exes-with-same-name/files/stack.yaml @@ -0,0 +1,6 @@ +snapshot: ghc-9.10.3 + +packages: +- myPackageA +- myPackageB +- myPackageC diff --git a/tests/integration/tests/1265-extensible-snapshots/Main.hs b/tests/integration/tests/1265-extensible-snapshots/Main.hs new file mode 100644 index 0000000000..a44bcc205b --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/Main.hs @@ -0,0 +1,28 @@ +-- Stack supports extensible snapshots. +-- +-- See: https://github.com/commercialhaskell/stack/issues/1198 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stack ["build", "myPackageA", "--dry-run"] + -- Should fail because myPackageB depends on acme-box, which has been dropped: + stackErrStderr ["build", "myPackageB", "--dry-run"] (expectMessage acmeBoxNeeded) + stack ["build", "--stack-yaml", "stack-modify-lts.yaml", "myPackageA", "--dry-run"] + stack ["build", "--stack-yaml", "stack-local-snapshot.yaml", "myPackageC", "--dry-run"] + stack ["build", "--stack-yaml", "stack-remote-snapshot.yaml", "myPackageA", "--dry-run"] + -- Should fail because myPackageD depends on zlib, which has been dropped: + stackErrStderr + ["build", "--stack-yaml", "stack-modify-lts.yaml", "myPackageD", "--dry-run"] + (expectMessage zlibNeeded) + where + acmeBoxNeeded = "acme-box needed, but no version" + zlibNeeded = "zlib needed, but no version" + +expectMessage :: String -> String -> IO () +expectMessage msg stderr = do + unless (words msg `isInfixOf` words stderr) + (error $ "Expected a warning: \n" ++ show msg) diff --git a/tests/integration/tests/1265-extensible-snapshots/files/.gitignore b/tests/integration/tests/1265-extensible-snapshots/files/.gitignore new file mode 100644 index 0000000000..5087095541 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/.gitignore @@ -0,0 +1,4 @@ +myPackageA.cabal +myPackageB.cabal +myPackageC.cabal +myPackageD.cabal diff --git a/tests/integration/tests/1265-extensible-snapshots/files/myPackageA/package.yaml b/tests/integration/tests/1265-extensible-snapshots/files/myPackageA/package.yaml new file mode 100644 index 0000000000..ec081f26c0 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/myPackageA/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base +- acme-missiles + +library: + source-dirs: src diff --git a/tests/integration/tests/1265-extensible-snapshots/files/myPackageA/src/Lib.hs b/tests/integration/tests/1265-extensible-snapshots/files/myPackageA/src/Lib.hs new file mode 100644 index 0000000000..889210c825 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/myPackageA/src/Lib.hs @@ -0,0 +1,5 @@ +module Lib + ( launchMissiles + ) where + +import Acme.Missiles ( launchMissiles ) diff --git a/tests/integration/tests/1265-extensible-snapshots/files/myPackageB/package.yaml b/tests/integration/tests/1265-extensible-snapshots/files/myPackageB/package.yaml new file mode 100644 index 0000000000..0ef22e6550 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/myPackageB/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base +- acme-box + +library: + source-dirs: src diff --git a/tests/integration/tests/1265-extensible-snapshots/files/myPackageB/src/Lib.hs b/tests/integration/tests/1265-extensible-snapshots/files/myPackageB/src/Lib.hs new file mode 100644 index 0000000000..af8de0c696 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/myPackageB/src/Lib.hs @@ -0,0 +1,5 @@ +module Lib + ( box + ) where + +import Acme.Box ( box ) diff --git a/tests/integration/tests/1265-extensible-snapshots/files/myPackageC/package.yaml b/tests/integration/tests/1265-extensible-snapshots/files/myPackageC/package.yaml new file mode 100644 index 0000000000..17a516d2c4 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/myPackageC/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackageC + +dependencies: +- base +- myPackage + +library: + source-dirs: src diff --git a/test/integration/tests/3591-cabal-warnings-once/files/src/Lib.hs b/tests/integration/tests/1265-extensible-snapshots/files/myPackageC/src/Lib.hs similarity index 100% rename from test/integration/tests/3591-cabal-warnings-once/files/src/Lib.hs rename to tests/integration/tests/1265-extensible-snapshots/files/myPackageC/src/Lib.hs diff --git a/tests/integration/tests/1265-extensible-snapshots/files/myPackageD/package.yaml b/tests/integration/tests/1265-extensible-snapshots/files/myPackageD/package.yaml new file mode 100644 index 0000000000..766e1dd84a --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/myPackageD/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackageD + +dependencies: +- base +- zlib + +library: + source-dirs: src diff --git a/tests/integration/tests/1265-extensible-snapshots/files/myPackageD/src/Lib.hs b/tests/integration/tests/1265-extensible-snapshots/files/myPackageD/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/myPackageD/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/1265-extensible-snapshots/files/snapshot-2.yaml b/tests/integration/tests/1265-extensible-snapshots/files/snapshot-2.yaml new file mode 100644 index 0000000000..67a1547355 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/snapshot-2.yaml @@ -0,0 +1,7 @@ +name: test-snapshot-2 + +snapshot: ghc-9.10.3 + +packages: +- acme-missiles-0.3 +- acme-box-0.0.0.0 diff --git a/tests/integration/tests/1265-extensible-snapshots/files/snapshot-modify-lts.yaml b/tests/integration/tests/1265-extensible-snapshots/files/snapshot-modify-lts.yaml new file mode 100644 index 0000000000..6f217b4bac --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/snapshot-modify-lts.yaml @@ -0,0 +1,9 @@ +name: snapshot-modify-lts + +snapshot: lts-24.37 + +packages: +- acme-missiles-0.3 + +drop-packages: +- zlib diff --git a/tests/integration/tests/1265-extensible-snapshots/files/snapshot.yaml b/tests/integration/tests/1265-extensible-snapshots/files/snapshot.yaml new file mode 100644 index 0000000000..bd797eccb9 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/snapshot.yaml @@ -0,0 +1,6 @@ +name: test-snapshot + +snapshot: snapshot-2.yaml + +drop-packages: +- acme-box diff --git a/tests/integration/tests/1265-extensible-snapshots/files/snapshots/local-snapshot.yaml b/tests/integration/tests/1265-extensible-snapshots/files/snapshots/local-snapshot.yaml new file mode 100644 index 0000000000..feb04c6e76 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/snapshots/local-snapshot.yaml @@ -0,0 +1,6 @@ +name: local-snapshot + +snapshot: ghc-9.10.3 + +packages: +- archive: myPackage-0.1.0.0.tar.gz diff --git a/tests/integration/tests/1265-extensible-snapshots/files/snapshots/myPackage-0.1.0.0.tar.gz b/tests/integration/tests/1265-extensible-snapshots/files/snapshots/myPackage-0.1.0.0.tar.gz new file mode 100644 index 0000000000..ce1a9e2a00 Binary files /dev/null and b/tests/integration/tests/1265-extensible-snapshots/files/snapshots/myPackage-0.1.0.0.tar.gz differ diff --git a/tests/integration/tests/1265-extensible-snapshots/files/snapshots/remote-snapshot.yaml b/tests/integration/tests/1265-extensible-snapshots/files/snapshots/remote-snapshot.yaml new file mode 100644 index 0000000000..d93db09f54 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/snapshots/remote-snapshot.yaml @@ -0,0 +1,4 @@ +name: remote-snapshot +snapshot: ghc-9.10.3 +packages: +- archive: https://s3.amazonaws.com/hackage.fpcomplete.com/package/acme-missiles-0.3.tar.gz diff --git a/tests/integration/tests/1265-extensible-snapshots/files/stack-local-snapshot.yaml b/tests/integration/tests/1265-extensible-snapshots/files/stack-local-snapshot.yaml new file mode 100644 index 0000000000..85d9a380b5 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/stack-local-snapshot.yaml @@ -0,0 +1,4 @@ +snapshot: snapshots/local-snapshot.yaml + +packages: +- myPackageC diff --git a/tests/integration/tests/1265-extensible-snapshots/files/stack-modify-lts.yaml b/tests/integration/tests/1265-extensible-snapshots/files/stack-modify-lts.yaml new file mode 100644 index 0000000000..cc8cca4759 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/stack-modify-lts.yaml @@ -0,0 +1,5 @@ +snapshot: snapshot-modify-lts.yaml + +packages: +- myPackageA +- myPackageD diff --git a/tests/integration/tests/1265-extensible-snapshots/files/stack-remote-snapshot.yaml b/tests/integration/tests/1265-extensible-snapshots/files/stack-remote-snapshot.yaml new file mode 100644 index 0000000000..faa16c1912 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/stack-remote-snapshot.yaml @@ -0,0 +1,4 @@ +snapshot: snapshots/remote-snapshot.yaml + +packages: +- myPackageA diff --git a/tests/integration/tests/1265-extensible-snapshots/files/stack.yaml b/tests/integration/tests/1265-extensible-snapshots/files/stack.yaml new file mode 100644 index 0000000000..105c1ae1d7 --- /dev/null +++ b/tests/integration/tests/1265-extensible-snapshots/files/stack.yaml @@ -0,0 +1,5 @@ +snapshot: snapshot.yaml + +packages: +- myPackageA +- myPackageB diff --git a/tests/integration/tests/1336-1337-new-package-names/.gitignore b/tests/integration/tests/1336-1337-new-package-names/.gitignore new file mode 100644 index 0000000000..ed73754e8e --- /dev/null +++ b/tests/integration/tests/1336-1337-new-package-names/.gitignore @@ -0,0 +1,7 @@ +1b3d-a2c4/ +1234-abcd/ +abcd-1234/ +1234-ば日本-4本/ +ば日本-4本/ +אבהץש/ +ΔΘΩϬ/ diff --git a/tests/integration/tests/1336-1337-new-package-names/Main.hs b/tests/integration/tests/1336-1337-new-package-names/Main.hs new file mode 100644 index 0000000000..83d15d9d0d --- /dev/null +++ b/tests/integration/tests/1336-1337-new-package-names/Main.hs @@ -0,0 +1,32 @@ +-- Stack's new command accepts project names that are valid Cabal package names +-- and rejects those that are not without creating a project directory. +-- +-- See: https://github.com/commercialhaskell/stack/issues/1336 +-- https://github.com/commercialhaskell/stack/issues/1337 + +import Control.Monad ( unless, when ) +import StackTest +import System.Directory + ( doesDirectoryExist, removeDirectoryRecursive ) + +main :: IO () +main = do + safeNew "1b3d-a2c4" + doesExist "./1b3d-a2c4/stack.yaml" + doesExist "./1b3d-a2c4/1b3d-a2c4.cabal" + stackErr ["new", "1234-abcd"] + doesNotExist "./1234-abcd" + stackErr ["new", "abcd-1234"] + -- The GitHub windows-latest (Microsoft Windows Server 2025) environment + -- appears to be unable to handle these Unicode code points. + unless isWindows $ do + stackErr ["new", "1234-ば日本-4本"] + safeNew "ば日本-4本" + safeNew "אבהץש" + safeNew "ΔΘΩϬ" + +safeNew :: String -> IO () +safeNew name = do + exists <- doesDirectoryExist name + when exists $ removeDirectoryRecursive name + stack ["new", name] diff --git a/tests/integration/tests/1337-unicode-everywhere/Main.hs b/tests/integration/tests/1337-unicode-everywhere/Main.hs new file mode 100644 index 0000000000..4083a0c34f --- /dev/null +++ b/tests/integration/tests/1337-unicode-everywhere/Main.hs @@ -0,0 +1,14 @@ +-- Stack accepts Unicode code points outside of the Basic Latin Unicode block +-- (ASCII). +-- +-- See: https://github.com/commercialhaskell/stack/issues/1337 + +import Control.Monad ( unless ) +import StackTest + +main :: IO () +-- The GitHub windows-latest (Microsoft Windows Server 2025) environment appears +-- to be unable to handle these Unicode code points. +main = unless isWindows $ do + stack ["build"] + stack ["exec", "以-exe"] diff --git a/tests/integration/tests/1337-unicode-everywhere/files/.gitignore b/tests/integration/tests/1337-unicode-everywhere/files/.gitignore new file mode 100644 index 0000000000..207edb0d40 --- /dev/null +++ b/tests/integration/tests/1337-unicode-everywhere/files/.gitignore @@ -0,0 +1 @@ +以.cabal diff --git a/tests/integration/tests/1337-unicode-everywhere/files/app/Main.hs b/tests/integration/tests/1337-unicode-everywhere/files/app/Main.hs new file mode 100644 index 0000000000..c33747aa40 --- /dev/null +++ b/tests/integration/tests/1337-unicode-everywhere/files/app/Main.hs @@ -0,0 +1,6 @@ +module Main where + +import Пσε ( θυπε ) + +main :: IO () +main = putStrLn θυπε diff --git a/tests/integration/tests/1337-unicode-everywhere/files/package.yaml b/tests/integration/tests/1337-unicode-everywhere/files/package.yaml new file mode 100644 index 0000000000..3211b0f2e7 --- /dev/null +++ b/tests/integration/tests/1337-unicode-everywhere/files/package.yaml @@ -0,0 +1,79 @@ +spec-version: 0.36.0 + +name: 以 +synopsis: سقوط المدن من ذات. +description: | + 以呂波耳本部止 + 千利奴流乎和加 + 餘多連曽津祢那 + 良牟有為能於久 + 耶万計不己衣天 + 阿佐伎喩女美之 + 恵比毛勢須 + + いろはにほへと + ちりぬるを + わかよたれそ + つねならむ + うゐのおくやま + けふこえて + あさきゆめみし + ゑひもせす + + 永 + + The quick brown fox jumps over the lazy dog + + Victor jagt zwölf Boxkämpfer quer über den großen Sylter Deich + + Δάματρα μέλπω Κόραν τε Κλυμένοι᾽ ἄλοχον + μελιβόαν ὕμνον ἀναγνέων + Αἰολίδ᾽ ἂμ βαρύβρομον ἁρμονίαν + + Ед эож алььтэрюм витюпэраторебуз, фалля пожйдонёюм нэ квуй. Зюаз атоморюм эю + вэл, экз агам магна жкряпшэрит нам. Примич вокынт дэлььякатезшимя эа мэль, + ыам факэтэ пытынтёюм волуптатум ку. Квуым квюаэчтио йн пэр, дольор + форынчйбюж ут еюж. Эжт нонюмэш янвыняры эю. + + Υθ φιμ λιβερ δισερετ κυαεστιο. Νε δυο σονγυε φιθυπερατοριβυς, θε φις αθκυι + σενσεριτ δεφινιεβας, μολλις θαμκυαμ ηας εα. Ιν φιμ εραντ μυσιυς, αλιι δισαντ + σομμοδο νο συμ. Πρι αμετ πορρω σονσεκυυντυρ ατ. Ιδ σεα ηομερω αδιπισι, + ομνεσκυε επισυρει ετ μελ, σεα αφφερθ σωνσεκυαθ θε. + + नीचे खरिदे समस्याओ व्रुद्धि सुना शीघ्र व्याख्या निरपेक्ष शुरुआत असरकारक अविरोधता खरिदे मेमत उसीएक् + असरकारक आंतरकार्यक्षमता केवल करता। असक्षम सामूहिक विवरण हीकम सुनत सदस्य खरिदने उदेशीत + + 引全堀記物質行上初野年謝止質警細物競。委目態政業諸好岡積米真香冒班分団時大一夏。 + 帝同手怠問来視旧記次禁身妨性直権員。州駅都稿頂風著報計個勢意時言進整。 + 作敗約秘都並断旬検面事真区。則局世紙文般百校車社金名室権金練危。 + 水夫動間始旅円典中所場針仁暮中。書稿準提選別推五玲常能船岡味。 + 詩聞誌寧真身性業遠国無軽春臨個鳥相契断者。 + + 紙ウ主転ね加必キト聴4水そない岩者づど会前ラシ総過権ぜ聞快にクげそ庫辺はぴず意掲ひに真価トしざが。体ろ名録ふいに公問築ムミ数基ー原多ぞぎすど済進ハレ見況ヤラエロ館刊キイ構読増禎ヨモメ提信最フ済席ワモキル弘28思弾5健はーろ提館ぱっるけ社除左てがひ。全てれ理時渉ン勝境チ測試ヌエマラ年9億ち込全クテヒカ土止びぎこぐ署進アカモヌ同賞誕傾吹敏こ。 + + سقوط المدن من ذات. ٠٨٠٤ تعديل إبّان عن هذه, لمّ في هامش الدمج. فرنسية الجنوب ولكسمبورغ حتى أم, مع بحث لكون الشمل استطاعوا, فعل بلاده انتهت تم. كما مع واحدة الخاسر, أخذ وإعلان الشمال رجوعهم أن. تعد تم بشكل مشارف الشرقي, هو مئات مهمّات أخذ. + + בקר את הארץ מיתולוגיה. ובמתן העזרה דת מלא, שמו זקוק לחיבור אגרונומיה על. אל זכר התוכן מונחונים ארכיאולוגיה, ויש קצרמרים פסיכולוגיה אל. כתב אם בארגז אחרונים וכמקובל. ספינות ומדעים האטמוספירה ויש מה, צ'ט משחקים הקהילה ארכיאולוגיה על. אל כימיה המדינה סדר, זכר של שמות להפוך, ייִדיש לעריכת דת ויש. + + 😀😁😂😃😄😅😆😇😈😉😊😋😌😍😎😏 + ➀➁➂➃➄➅➆➇➈➉ + ♀♁♂♃♄♅♆♇♈♉♊♋♌♍♎♏ + 💩 +homepage: http://invalid/以#readme +author: Fake 💩💩💩💩💩 Name +maintainer: 以呂波耳本部止@invalid +license-file: いろは-LICENSE +category: Δσαντ + +dependencies: +- base + +library: + source-dirs: ארץ + +executables: + 以-exe: + source-dirs: app + main: Main.hs + dependencies: + - 以 diff --git a/tests/integration/tests/1337-unicode-everywhere/files/stack.yaml b/tests/integration/tests/1337-unicode-everywhere/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/1337-unicode-everywhere/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git "a/tests/integration/tests/1337-unicode-everywhere/files/\327\220\327\250\327\245/\320\237\317\203\316\265.hs" "b/tests/integration/tests/1337-unicode-everywhere/files/\327\220\327\250\327\245/\320\237\317\203\316\265.hs" new file mode 100644 index 0000000000..197831ba8e --- /dev/null +++ "b/tests/integration/tests/1337-unicode-everywhere/files/\327\220\327\250\327\245/\320\237\317\203\316\265.hs" @@ -0,0 +1,6 @@ +module Пσε + ( θυπε + ) where + +θυπε :: String +θυπε = "以呂波耳本部止" diff --git "a/tests/integration/tests/1337-unicode-everywhere/files/\343\201\204\343\202\215\343\201\257-LICENSE" "b/tests/integration/tests/1337-unicode-everywhere/files/\343\201\204\343\202\215\343\201\257-LICENSE" new file mode 100644 index 0000000000..3d8104feae --- /dev/null +++ "b/tests/integration/tests/1337-unicode-everywhere/files/\343\201\204\343\202\215\343\201\257-LICENSE" @@ -0,0 +1,30 @@ +Copyright فلان الفلاني (c) 2026 + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Author name here nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/tests/integration/tests/1438-configure-options/Main.hs b/tests/integration/tests/1438-configure-options/Main.hs new file mode 100644 index 0000000000..200dd344b2 --- /dev/null +++ b/tests/integration/tests/1438-configure-options/Main.hs @@ -0,0 +1,27 @@ +-- Stack allows Cabal configuration options to be set. +-- +-- See: https://github.com/commercialhaskell/stack/issues/1337 + +import Control.Monad ( unless ) +import Data.Foldable ( for_ ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stackCleanFull + let stackYamlFiles = + [ "stack-locals.yaml" + , "stack-everything.yaml" + , "stack-targets.yaml" + , "stack-myPackage.yaml" + ] + for_ stackYamlFiles $ \stackYaml -> + stackErrStderr ["build", "--stack-yaml", stackYaml] $ \str -> + unless ("invalid option" `isInfixOf` str) $ + error "Configure option is not present" + + stack ["build", "--stack-yaml", "stack-locals.yaml", "acme-dont"] + stack ["build", "--stack-yaml", "stack-targets.yaml", "acme-dont"] + stackErr ["build", "--stack-yaml", "stack-name.yaml", "acme-dont"] + stackErr ["build", "--stack-yaml", "stack-everything.yaml", "acme-dont"] diff --git a/tests/integration/tests/1438-configure-options/files/.gitignore b/tests/integration/tests/1438-configure-options/files/.gitignore new file mode 100644 index 0000000000..eae186286f --- /dev/null +++ b/tests/integration/tests/1438-configure-options/files/.gitignore @@ -0,0 +1,2 @@ +myPackage.cabal +*.yaml.lock diff --git a/tests/integration/tests/1438-configure-options/files/package.yaml b/tests/integration/tests/1438-configure-options/files/package.yaml new file mode 100644 index 0000000000..a7305a77cf --- /dev/null +++ b/tests/integration/tests/1438-configure-options/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/1438-configure-options/files/src/Lib.hs b/tests/integration/tests/1438-configure-options/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/1438-configure-options/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/test/integration/tests/1438-configure-options/files/stack-everything.yaml b/tests/integration/tests/1438-configure-options/files/stack-everything.yaml similarity index 82% rename from test/integration/tests/1438-configure-options/files/stack-everything.yaml rename to tests/integration/tests/1438-configure-options/files/stack-everything.yaml index 5469a72a6b..5346941606 100644 --- a/test/integration/tests/1438-configure-options/files/stack-everything.yaml +++ b/tests/integration/tests/1438-configure-options/files/stack-everything.yaml @@ -1,4 +1,4 @@ -resolver: ghc-8.6.5 +snapshot: ghc-9.10.3 extra-deps: - acme-dont-1.1@rev:0 diff --git a/test/integration/tests/1438-configure-options/files/stack-locals.yaml b/tests/integration/tests/1438-configure-options/files/stack-locals.yaml similarity index 82% rename from test/integration/tests/1438-configure-options/files/stack-locals.yaml rename to tests/integration/tests/1438-configure-options/files/stack-locals.yaml index f8453ec235..cf64377233 100644 --- a/test/integration/tests/1438-configure-options/files/stack-locals.yaml +++ b/tests/integration/tests/1438-configure-options/files/stack-locals.yaml @@ -1,4 +1,4 @@ -resolver: ghc-8.6.5 +snapshot: ghc-9.10.3 extra-deps: - acme-dont-1.1@rev:0 diff --git a/tests/integration/tests/1438-configure-options/files/stack-myPackage.yaml b/tests/integration/tests/1438-configure-options/files/stack-myPackage.yaml new file mode 100644 index 0000000000..2e66b8b9ab --- /dev/null +++ b/tests/integration/tests/1438-configure-options/files/stack-myPackage.yaml @@ -0,0 +1,10 @@ +snapshot: ghc-9.10.3 + +extra-deps: +- acme-dont-1.1@rev:0 + +configure-options: + myPackage: + - this is an invalid option + acme-dont: + - this is an invalid option diff --git a/test/integration/tests/1438-configure-options/files/stack-targets.yaml b/tests/integration/tests/1438-configure-options/files/stack-targets.yaml similarity index 82% rename from test/integration/tests/1438-configure-options/files/stack-targets.yaml rename to tests/integration/tests/1438-configure-options/files/stack-targets.yaml index 2f2dcdfbf3..65bd272f28 100644 --- a/test/integration/tests/1438-configure-options/files/stack-targets.yaml +++ b/tests/integration/tests/1438-configure-options/files/stack-targets.yaml @@ -1,4 +1,4 @@ -resolver: ghc-8.6.5 +snapshot: ghc-9.10.3 extra-deps: - acme-dont-1.1@rev:0 diff --git a/tests/integration/tests/1659-skip-component/Main.hs b/tests/integration/tests/1659-skip-component/Main.hs new file mode 100644 index 0000000000..f5282d88d8 --- /dev/null +++ b/tests/integration/tests/1659-skip-component/Main.hs @@ -0,0 +1,10 @@ +-- Stack can be commanded to skip the building of specific package components. +-- +-- See: https://github.com/commercialhaskell/stack/issues/1659 + +import StackTest + +main :: IO () +main = do + stack ["build", "--test", "--bench", "--skip", "test-failing", "--skip", "bench-failing", "--skip", "myExe-failing"] + stack ["build", ":test-failing", ":bench-failing", ":myExe", ":myExe-failing", "--skip", "test-failing", "--skip", "bench-failing", "--skip", "myExe-failing"] diff --git a/tests/integration/tests/1659-skip-component/files/.gitignore b/tests/integration/tests/1659-skip-component/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/1659-skip-component/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/1659-skip-component/files/app-failing/Main.hs b/tests/integration/tests/1659-skip-component/files/app-failing/Main.hs new file mode 100644 index 0000000000..fed6cef32a --- /dev/null +++ b/tests/integration/tests/1659-skip-component/files/app-failing/Main.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE CPP #-} + +module Main where + +main :: IO () +main = pure () + +-- Avoid problems with CPP and HLint +#ifndef __HLINT__ + +#error Not going to compile, sorry + +#endif diff --git a/tests/integration/tests/1659-skip-component/files/app/Main.hs b/tests/integration/tests/1659-skip-component/files/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/1659-skip-component/files/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/1659-skip-component/files/bench/Main.hs b/tests/integration/tests/1659-skip-component/files/bench/Main.hs new file mode 100644 index 0000000000..55ef017a49 --- /dev/null +++ b/tests/integration/tests/1659-skip-component/files/bench/Main.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE CPP #-} + +main :: IO () +main = pure () + +-- Avoid problems with CPP and HLint +#ifndef __HLINT__ + +#error Not going to compile, sorry + +#endif diff --git a/tests/integration/tests/1659-skip-component/files/package.yaml b/tests/integration/tests/1659-skip-component/files/package.yaml new file mode 100644 index 0000000000..0b15e2fbce --- /dev/null +++ b/tests/integration/tests/1659-skip-component/files/package.yaml @@ -0,0 +1,27 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + +executables: + myExe: + source-dirs: app + main: Main.hs + myExe-failing: + source-dirs: app-failing + main: Main.hs + +tests: + test-failing: + source-dirs: test + main: Main.hs + +benchmarks: + bench-failing: + source-dirs: bench + main: Main.hs diff --git a/tests/integration/tests/1659-skip-component/files/src/Lib.hs b/tests/integration/tests/1659-skip-component/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/1659-skip-component/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/1659-skip-component/files/stack.yaml b/tests/integration/tests/1659-skip-component/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/1659-skip-component/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/1659-skip-component/files/test/Main.hs b/tests/integration/tests/1659-skip-component/files/test/Main.hs new file mode 100644 index 0000000000..55ef017a49 --- /dev/null +++ b/tests/integration/tests/1659-skip-component/files/test/Main.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE CPP #-} + +main :: IO () +main = pure () + +-- Avoid problems with CPP and HLint +#ifndef __HLINT__ + +#error Not going to compile, sorry + +#endif diff --git a/tests/integration/tests/1884-url-to-tarball/Main.hs b/tests/integration/tests/1884-url-to-tarball/Main.hs new file mode 100644 index 0000000000..19e9a55117 --- /dev/null +++ b/tests/integration/tests/1884-url-to-tarball/Main.hs @@ -0,0 +1,8 @@ +-- Stack allows an extra-dep to be specified as a URL for an archive file. +-- +-- See: https://github.com/commercialhaskell/stack/issues/1884 + +import StackTest + +main :: IO () +main = stack ["build", "--dry-run"] diff --git a/tests/integration/tests/1884-url-to-tarball/files/.gitignore b/tests/integration/tests/1884-url-to-tarball/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/1884-url-to-tarball/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/1884-url-to-tarball/files/package.yaml b/tests/integration/tests/1884-url-to-tarball/files/package.yaml new file mode 100644 index 0000000000..a410102517 --- /dev/null +++ b/tests/integration/tests/1884-url-to-tarball/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- acme-missiles + +library: {} diff --git a/tests/integration/tests/1884-url-to-tarball/files/stack.yaml b/tests/integration/tests/1884-url-to-tarball/files/stack.yaml new file mode 100644 index 0000000000..515a3914f1 --- /dev/null +++ b/tests/integration/tests/1884-url-to-tarball/files/stack.yaml @@ -0,0 +1,6 @@ +snapshot: ghc-9.10.3 + +extra-deps: +- location: https://hackage.haskell.org/package/acme-missiles-0.3/acme-missiles-0.3.tar.gz + sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b + size: 1442 diff --git a/tests/integration/tests/2195-depend-on-exe/Main.hs b/tests/integration/tests/2195-depend-on-exe/Main.hs new file mode 100644 index 0000000000..e78a9e7902 --- /dev/null +++ b/tests/integration/tests/2195-depend-on-exe/Main.hs @@ -0,0 +1,17 @@ +-- Stack reports an error if a package component depends on a package that has +-- no library component. +-- + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = stackErrStderr + ["build", "myPackageB"] + (expectMessage "package provides no library") + +expectMessage :: String -> String -> IO () +expectMessage msg stderr = + unless (msg `isInfixOf` stderr) + (error $ "Expected a warning: \n" ++ show msg) diff --git a/tests/integration/tests/2195-depend-on-exe/files/.gitignore b/tests/integration/tests/2195-depend-on-exe/files/.gitignore new file mode 100644 index 0000000000..f9a6e152d2 --- /dev/null +++ b/tests/integration/tests/2195-depend-on-exe/files/.gitignore @@ -0,0 +1,2 @@ +myPackageA.cabal +myPackageB.cabal diff --git a/tests/integration/tests/2195-depend-on-exe/files/myPackageA/app/Main.hs b/tests/integration/tests/2195-depend-on-exe/files/myPackageA/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/2195-depend-on-exe/files/myPackageA/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/2195-depend-on-exe/files/myPackageA/package.yaml b/tests/integration/tests/2195-depend-on-exe/files/myPackageA/package.yaml new file mode 100644 index 0000000000..dc117d34c9 --- /dev/null +++ b/tests/integration/tests/2195-depend-on-exe/files/myPackageA/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +executables: + myExeA: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/2195-depend-on-exe/files/myPackageB/package.yaml b/tests/integration/tests/2195-depend-on-exe/files/myPackageB/package.yaml new file mode 100644 index 0000000000..e5f489eebf --- /dev/null +++ b/tests/integration/tests/2195-depend-on-exe/files/myPackageB/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base +- myPackageA + +library: + source-dirs: src diff --git a/tests/integration/tests/2195-depend-on-exe/files/myPackageB/src/Lib.hs b/tests/integration/tests/2195-depend-on-exe/files/myPackageB/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/2195-depend-on-exe/files/myPackageB/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/2195-depend-on-exe/files/stack.yaml b/tests/integration/tests/2195-depend-on-exe/files/stack.yaml new file mode 100644 index 0000000000..68891a8d7c --- /dev/null +++ b/tests/integration/tests/2195-depend-on-exe/files/stack.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +packages: +- myPackageA +- myPackageB diff --git a/tests/integration/tests/2433-ghc-by-version/Main.hs b/tests/integration/tests/2433-ghc-by-version/Main.hs new file mode 100644 index 0000000000..edec7684ea --- /dev/null +++ b/tests/integration/tests/2433-ghc-by-version/Main.hs @@ -0,0 +1,13 @@ +-- If using a 'system' GHC, Stack uses the specified version of GHC on the PATH +-- and not 'ghc' (without a version) on the PATH. +-- +-- See: https://github.com/commercialhaskell/stack/issues/2433 + +import Control.Exception ( throwIO ) +import Control.Monad ( unless ) +import StackTest +import System.Process ( rawSystem ) + +main :: IO () +main = unless isWindows $ + rawSystem "bash" ["run.sh"] >>= throwIO diff --git a/test/integration/tests/2433-ghc-by-version/files/.gitignore b/tests/integration/tests/2433-ghc-by-version/files/.gitignore similarity index 100% rename from test/integration/tests/2433-ghc-by-version/files/.gitignore rename to tests/integration/tests/2433-ghc-by-version/files/.gitignore diff --git a/test/integration/tests/2433-ghc-by-version/files/fake-path/ghc b/tests/integration/tests/2433-ghc-by-version/files/fake-path/ghc old mode 100755 new mode 100644 similarity index 100% rename from test/integration/tests/2433-ghc-by-version/files/fake-path/ghc rename to tests/integration/tests/2433-ghc-by-version/files/fake-path/ghc diff --git a/test/integration/tests/2433-ghc-by-version/files/fake-path/ghc-pkg b/tests/integration/tests/2433-ghc-by-version/files/fake-path/ghc-pkg old mode 100755 new mode 100644 similarity index 100% rename from test/integration/tests/2433-ghc-by-version/files/fake-path/ghc-pkg rename to tests/integration/tests/2433-ghc-by-version/files/fake-path/ghc-pkg diff --git a/tests/integration/tests/2433-ghc-by-version/files/run.sh b/tests/integration/tests/2433-ghc-by-version/files/run.sh new file mode 100644 index 0000000000..a7d103d1d3 --- /dev/null +++ b/tests/integration/tests/2433-ghc-by-version/files/run.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -exuo pipefail + +export PATH=$(pwd)/fake-path:$("$STACK_EXE" path --snapshot ghc-9.10.3 --compiler-bin):$PATH +export STACK_ROOT=$(pwd)/fake-root + +which ghc + +"$STACK_EXE" --system-ghc --no-install-ghc --snapshot ghc-9.10.3 ghc -- --info +"$STACK_EXE" --system-ghc --no-install-ghc --snapshot ghc-9.10.3 runghc test.hs diff --git a/test/integration/tests/2433-ghc-by-version/files/foo.hs b/tests/integration/tests/2433-ghc-by-version/files/test.hs similarity index 100% rename from test/integration/tests/2433-ghc-by-version/files/foo.hs rename to tests/integration/tests/2433-ghc-by-version/files/test.hs diff --git a/tests/integration/tests/2465-init-no-packages/Main.hs b/tests/integration/tests/2465-init-no-packages/Main.hs new file mode 100644 index 0000000000..3000a9dbc7 --- /dev/null +++ b/tests/integration/tests/2465-init-no-packages/Main.hs @@ -0,0 +1,13 @@ +-- Stack can initialise a project directory that contains no project packages. +-- +-- See: https://github.com/commercialhaskell/stack/issues/2465 + +import Control.Monad ( unless ) +import StackTest +import System.Directory ( doesFileExist ) + +main :: IO () +main = do + stack ["--snapshot", "ghc-9.10.3", "init"] + exists <- doesFileExist "stack.yaml" + unless exists $ error "stack.yaml not created!" diff --git a/test/integration/tests/1198-multiple-exes-with-same-name/files/.gitignore b/tests/integration/tests/2465-init-no-packages/files/.gitignore similarity index 100% rename from test/integration/tests/1198-multiple-exes-with-same-name/files/.gitignore rename to tests/integration/tests/2465-init-no-packages/files/.gitignore diff --git a/tests/integration/tests/2643-copy-compiler-tool/Main.hs b/tests/integration/tests/2643-copy-compiler-tool/Main.hs new file mode 100644 index 0000000000..cbb05b4819 --- /dev/null +++ b/tests/integration/tests/2643-copy-compiler-tool/Main.hs @@ -0,0 +1,64 @@ +import StackTest +import System.Directory +import Control.Monad (unless) + +main :: IO () +main = do + -- init + removeFileIgnore "stack.yaml" + removeDirIgnore ".stack-work" + stack ["init", defaultSnapshotArg] + + -- place to throw some exes + removeDirIgnore "binny" + createDirectory "binny" + + -- check assumptions on exec and the build flags and clean + stack ["build", "--flag", "copy-compiler-tool-test:build-baz"] + stack ["exec", "--", "baz-exe" ++ exeExt] + stackErr ["exec", "--", "bar-exe" ++ exeExt] + stackCleanFull + -- See #4936. The Windows condition is because `stackCleanFull` may have + -- failed. + unless isWindows $ stackErr ["exec", "--", "baz-exe" ++ exeExt] + + -- install one exe normally + stack ["install", + "--local-bin-path", "./binny", + "--flag", "*:build-foo" + ] + + -- and install two compiler-tools, opposite ways + -- (build or install) + stack ["build", + "--local-bin-path", "./binny", + "--copy-compiler-tool", + "--flag", "*:build-bar" + ] + stack ["install", + "--local-bin-path", "./binny", + "--copy-compiler-tool", + "--flag", "*:build-baz" + ] + + -- nuke the built things that go in .stack-work/, so we can test if + -- the installed ones exist for sure + stackCleanFull + + -- bar and baz were installed as compiler tools, should work fine + stack ["exec", "--", "bar-exe" ++ exeExt] + stack ["exec", "--", "baz-exe" ++ exeExt] + + -- foo was installed as a normal exe (in .binny/, which can't be on PATH), + -- so shouldn't + -- See #4936. The Windows condition is because `stackCleanFull` may have + -- failed. + unless isWindows $ stackErr ["exec", "--", "foo-exe" ++ exeExt] + + -- check existences make sense + doesExist $ "./binny/foo-exe" ++ exeExt + doesNotExist $ "./binny/bar-exe" ++ exeExt + doesNotExist $ "./binny/baz-exe" ++ exeExt + + -- just check that this exists + stack ["path", "--compiler-tools-bin"] diff --git a/test/integration/tests/2643-copy-compiler-tool/files/.gitignore b/tests/integration/tests/2643-copy-compiler-tool/files/.gitignore similarity index 100% rename from test/integration/tests/2643-copy-compiler-tool/files/.gitignore rename to tests/integration/tests/2643-copy-compiler-tool/files/.gitignore diff --git a/test/integration/tests/2643-copy-compiler-tool/files/Bar.hs b/tests/integration/tests/2643-copy-compiler-tool/files/Bar.hs similarity index 100% rename from test/integration/tests/2643-copy-compiler-tool/files/Bar.hs rename to tests/integration/tests/2643-copy-compiler-tool/files/Bar.hs diff --git a/test/integration/tests/2643-copy-compiler-tool/files/Baz.hs b/tests/integration/tests/2643-copy-compiler-tool/files/Baz.hs similarity index 100% rename from test/integration/tests/2643-copy-compiler-tool/files/Baz.hs rename to tests/integration/tests/2643-copy-compiler-tool/files/Baz.hs diff --git a/test/integration/tests/2643-copy-compiler-tool/files/Foo.hs b/tests/integration/tests/2643-copy-compiler-tool/files/Foo.hs similarity index 100% rename from test/integration/tests/2643-copy-compiler-tool/files/Foo.hs rename to tests/integration/tests/2643-copy-compiler-tool/files/Foo.hs diff --git a/test/integration/tests/2643-copy-compiler-tool/files/LICENSE b/tests/integration/tests/2643-copy-compiler-tool/files/LICENSE similarity index 100% rename from test/integration/tests/2643-copy-compiler-tool/files/LICENSE rename to tests/integration/tests/2643-copy-compiler-tool/files/LICENSE diff --git a/test/integration/tests/1337-unicode-everywhere/files/Setup.hs b/tests/integration/tests/2643-copy-compiler-tool/files/Setup.hs similarity index 100% rename from test/integration/tests/1337-unicode-everywhere/files/Setup.hs rename to tests/integration/tests/2643-copy-compiler-tool/files/Setup.hs diff --git a/test/integration/tests/2643-copy-compiler-tool/files/copy-compiler-tool-test.cabal b/tests/integration/tests/2643-copy-compiler-tool/files/copy-compiler-tool-test.cabal similarity index 100% rename from test/integration/tests/2643-copy-compiler-tool/files/copy-compiler-tool-test.cabal rename to tests/integration/tests/2643-copy-compiler-tool/files/copy-compiler-tool-test.cabal diff --git a/tests/integration/tests/2781-shadow-bug/Main.hs b/tests/integration/tests/2781-shadow-bug/Main.hs new file mode 100644 index 0000000000..53c215393b --- /dev/null +++ b/tests/integration/tests/2781-shadow-bug/Main.hs @@ -0,0 +1,13 @@ +-- Stack rebuilds a benchmark when an indirect dependency changes. +-- +-- See: https://github.com/commercialhaskell/stack/issues/2781 + +import StackTest +import System.Directory ( createDirectoryIfMissing ) + +main :: IO () +main = do + copy "myPackageB/v1/MyPackageB-v1.hs" "myPackageB/src/MyPackageB.hs" + stack ["bench"] + copy "myPackageB/v2/MyPackageB-v2.hs" "myPackageB/src/MyPackageB.hs" + stack ["bench"] diff --git a/tests/integration/tests/2781-shadow-bug/files/.gitignore b/tests/integration/tests/2781-shadow-bug/files/.gitignore new file mode 100644 index 0000000000..f9a6e152d2 --- /dev/null +++ b/tests/integration/tests/2781-shadow-bug/files/.gitignore @@ -0,0 +1,2 @@ +myPackageA.cabal +myPackageB.cabal diff --git a/test/integration/tests/4706-ignore-ghc-env-files/files/Main.hs b/tests/integration/tests/2781-shadow-bug/files/myPackageA/bench/bench.hs similarity index 100% rename from test/integration/tests/4706-ignore-ghc-env-files/files/Main.hs rename to tests/integration/tests/2781-shadow-bug/files/myPackageA/bench/bench.hs diff --git a/tests/integration/tests/2781-shadow-bug/files/myPackageA/package.yaml b/tests/integration/tests/2781-shadow-bug/files/myPackageA/package.yaml new file mode 100644 index 0000000000..eeb7c4fcc3 --- /dev/null +++ b/tests/integration/tests/2781-shadow-bug/files/myPackageA/package.yaml @@ -0,0 +1,18 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - myPackageB + +benchmarks: + bench: + source-dirs: bench + main: bench.hs + dependencies: + - myPackageA diff --git a/tests/integration/tests/2781-shadow-bug/files/myPackageA/src/MyPackageA.hs b/tests/integration/tests/2781-shadow-bug/files/myPackageA/src/MyPackageA.hs new file mode 100644 index 0000000000..16b38c3d34 --- /dev/null +++ b/tests/integration/tests/2781-shadow-bug/files/myPackageA/src/MyPackageA.hs @@ -0,0 +1,8 @@ +module MyPackageA + ( funcA + ) where + +import MyPackageB ( funcB ) + +funcA :: IO () +funcA = funcB diff --git a/tests/integration/tests/2781-shadow-bug/files/myPackageB/package.yaml b/tests/integration/tests/2781-shadow-bug/files/myPackageB/package.yaml new file mode 100644 index 0000000000..03b4ec87d3 --- /dev/null +++ b/tests/integration/tests/2781-shadow-bug/files/myPackageB/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/2781-shadow-bug/files/myPackageB/src/MyPackageB.hs b/tests/integration/tests/2781-shadow-bug/files/myPackageB/src/MyPackageB.hs new file mode 100644 index 0000000000..e50e86ed07 --- /dev/null +++ b/tests/integration/tests/2781-shadow-bug/files/myPackageB/src/MyPackageB.hs @@ -0,0 +1 @@ +To be replaced. diff --git a/tests/integration/tests/2781-shadow-bug/files/myPackageB/v1/MyPackageB-v1.hs b/tests/integration/tests/2781-shadow-bug/files/myPackageB/v1/MyPackageB-v1.hs new file mode 100644 index 0000000000..38ae9f9985 --- /dev/null +++ b/tests/integration/tests/2781-shadow-bug/files/myPackageB/v1/MyPackageB-v1.hs @@ -0,0 +1,6 @@ +module MyPackageB + ( funcB + ) where + +funcB :: IO () +funcB = putStrLn "version 1" diff --git a/tests/integration/tests/2781-shadow-bug/files/myPackageB/v2/MyPackageB-v2.hs b/tests/integration/tests/2781-shadow-bug/files/myPackageB/v2/MyPackageB-v2.hs new file mode 100644 index 0000000000..a4584de18e --- /dev/null +++ b/tests/integration/tests/2781-shadow-bug/files/myPackageB/v2/MyPackageB-v2.hs @@ -0,0 +1,6 @@ +module MyPackageB + ( funcB + ) where + +funcB :: IO () +funcB = putStrLn "version 2" diff --git a/tests/integration/tests/2781-shadow-bug/files/stack.yaml b/tests/integration/tests/2781-shadow-bug/files/stack.yaml new file mode 100644 index 0000000000..68891a8d7c --- /dev/null +++ b/tests/integration/tests/2781-shadow-bug/files/stack.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +packages: +- myPackageA +- myPackageB diff --git a/tests/integration/tests/2997-ensure-warnings-output/Main.hs b/tests/integration/tests/2997-ensure-warnings-output/Main.hs new file mode 100644 index 0000000000..7b0d615165 --- /dev/null +++ b/tests/integration/tests/2997-ensure-warnings-output/Main.hs @@ -0,0 +1,14 @@ +-- Stack dumps logs with GHC warnings for multi-package projects and +-- non-interleaved output. +-- +-- See: https://github.com/commercialhaskell/stack/issues/2997 + +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stackCheckStderr ["build", "--no-interleaved-output"] $ \str -> + if "no type signature" `isInfixOf` str + then pure () + else error "Warnings are not being shown" diff --git a/tests/integration/tests/2997-ensure-warnings-output/files/.gitignore b/tests/integration/tests/2997-ensure-warnings-output/files/.gitignore new file mode 100644 index 0000000000..f9a6e152d2 --- /dev/null +++ b/tests/integration/tests/2997-ensure-warnings-output/files/.gitignore @@ -0,0 +1,2 @@ +myPackageA.cabal +myPackageB.cabal diff --git a/tests/integration/tests/2997-ensure-warnings-output/files/myPackageA/package.yaml b/tests/integration/tests/2997-ensure-warnings-output/files/myPackageA/package.yaml new file mode 100644 index 0000000000..5cb84d72e6 --- /dev/null +++ b/tests/integration/tests/2997-ensure-warnings-output/files/myPackageA/package.yaml @@ -0,0 +1,12 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +ghc-options: +- -Wall + +library: + source-dirs: src diff --git a/tests/integration/tests/2997-ensure-warnings-output/files/myPackageA/src/Lib.hs b/tests/integration/tests/2997-ensure-warnings-output/files/myPackageA/src/Lib.hs new file mode 100644 index 0000000000..2bcd1d55c1 --- /dev/null +++ b/tests/integration/tests/2997-ensure-warnings-output/files/myPackageA/src/Lib.hs @@ -0,0 +1,3 @@ +module Lib where + +func = () diff --git a/tests/integration/tests/2997-ensure-warnings-output/files/myPackageB/package.yaml b/tests/integration/tests/2997-ensure-warnings-output/files/myPackageB/package.yaml new file mode 100644 index 0000000000..77d87303d7 --- /dev/null +++ b/tests/integration/tests/2997-ensure-warnings-output/files/myPackageB/package.yaml @@ -0,0 +1,12 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base + +ghc-options: +- -Wall + +library: + source-dirs: src diff --git a/tests/integration/tests/2997-ensure-warnings-output/files/myPackageB/src/Lib.hs b/tests/integration/tests/2997-ensure-warnings-output/files/myPackageB/src/Lib.hs new file mode 100644 index 0000000000..2bcd1d55c1 --- /dev/null +++ b/tests/integration/tests/2997-ensure-warnings-output/files/myPackageB/src/Lib.hs @@ -0,0 +1,3 @@ +module Lib where + +func = () diff --git a/tests/integration/tests/2997-ensure-warnings-output/files/stack.yaml b/tests/integration/tests/2997-ensure-warnings-output/files/stack.yaml new file mode 100644 index 0000000000..46f3e0977c --- /dev/null +++ b/tests/integration/tests/2997-ensure-warnings-output/files/stack.yaml @@ -0,0 +1,7 @@ +snapshot: ghc-9.10.3 + +packages: +- myPackageA +- myPackageB + +dump-logs: warning diff --git a/tests/integration/tests/32-unlisted-module/Main.hs b/tests/integration/tests/32-unlisted-module/Main.hs new file mode 100644 index 0000000000..bc0feae92f --- /dev/null +++ b/tests/integration/tests/32-unlisted-module/Main.hs @@ -0,0 +1,30 @@ +-- Stack should rebuild a package when files required for building are dirty, +-- even if the files are not listed in the package's Cabal file. +-- +-- See: https://github.com/commercialhaskell/stack/issues/32 + +import Control.Concurrent ( threadDelay ) +import StackTest + +main :: IO () +main = do + copy "app/Unlisted_OK.hs" "app/Unlisted.hs" + copy "embed_OK.txt" "embed.txt" + stack ["build"] + pause + copy "app/Unlisted_FAIL.hs" "app/Unlisted.hs" + stackErr ["build"] + pause + copy "app/Unlisted_OK.hs" "app/Unlisted.hs" + stack ["build"] + stack ["exec", "fail-if-fail"] + pause + copy "embed_FAIL.txt" "embed.txt" + stack ["build"] + stackErr ["exec", "fail-if-fail"] + pause + copy "embed_OK.txt" "embed.txt" + stack ["build"] + stack ["exec", "fail-if-fail"] + where + pause = threadDelay 1000000 diff --git a/tests/integration/tests/32-unlisted-module/files/.gitignore b/tests/integration/tests/32-unlisted-module/files/.gitignore new file mode 100644 index 0000000000..5983ce17cb --- /dev/null +++ b/tests/integration/tests/32-unlisted-module/files/.gitignore @@ -0,0 +1,3 @@ +embed.txt +app/Unlisted.hs +myPackage.cabal diff --git a/tests/integration/tests/32-unlisted-module/files/app/Main.hs b/tests/integration/tests/32-unlisted-module/files/app/Main.hs new file mode 100644 index 0000000000..6328508c11 --- /dev/null +++ b/tests/integration/tests/32-unlisted-module/files/app/Main.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE OverloadedStrings #-} + +module Main where + +import Control.Monad ( when ) +import qualified Data.ByteString.Char8 as C8 +import Data.FileEmbed ( embedFile ) +import Unlisted ( unlistedFunc ) + +main :: IO () +main = do + unlistedFunc + when ("FAIL" `C8.isPrefixOf` embedded) $ error "embedded contains FAIL" + +embedded = $(embedFile "embed.txt") diff --git a/tests/integration/tests/32-unlisted-module/files/app/Unlisted_FAIL.hs b/tests/integration/tests/32-unlisted-module/files/app/Unlisted_FAIL.hs new file mode 100644 index 0000000000..35b46d7b50 --- /dev/null +++ b/tests/integration/tests/32-unlisted-module/files/app/Unlisted_FAIL.hs @@ -0,0 +1,3 @@ +-- | Version of Unlisted with a different export that causes Main to fail to +-- compile. +module Unlisted where diff --git a/tests/integration/tests/32-unlisted-module/files/app/Unlisted_OK.hs b/tests/integration/tests/32-unlisted-module/files/app/Unlisted_OK.hs new file mode 100644 index 0000000000..000f75684c --- /dev/null +++ b/tests/integration/tests/32-unlisted-module/files/app/Unlisted_OK.hs @@ -0,0 +1,5 @@ +-- | Version of Unlisted that does not cause Main to fail to compile. +module Unlisted where + +unlistedFunc :: IO () +unlistedFunc = pure () diff --git a/test/integration/tests/32-unlisted-module/files/embed_FAIL.txt b/tests/integration/tests/32-unlisted-module/files/embed_FAIL.txt similarity index 100% rename from test/integration/tests/32-unlisted-module/files/embed_FAIL.txt rename to tests/integration/tests/32-unlisted-module/files/embed_FAIL.txt diff --git a/test/integration/tests/32-unlisted-module/files/embed_OK.txt b/tests/integration/tests/32-unlisted-module/files/embed_OK.txt similarity index 100% rename from test/integration/tests/32-unlisted-module/files/embed_OK.txt rename to tests/integration/tests/32-unlisted-module/files/embed_OK.txt diff --git a/tests/integration/tests/32-unlisted-module/files/package.yaml b/tests/integration/tests/32-unlisted-module/files/package.yaml new file mode 100644 index 0000000000..bf5bf390a9 --- /dev/null +++ b/tests/integration/tests/32-unlisted-module/files/package.yaml @@ -0,0 +1,14 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- bytestring +- file-embed + +executables: + fail-if-fail: + source-dirs: app + main: Main.hs + other-modules: [] diff --git a/tests/integration/tests/32-unlisted-module/files/stack.yaml b/tests/integration/tests/32-unlisted-module/files/stack.yaml new file mode 100644 index 0000000000..c292f63385 --- /dev/null +++ b/tests/integration/tests/32-unlisted-module/files/stack.yaml @@ -0,0 +1 @@ +snapshot: lts-24.37 diff --git a/tests/integration/tests/3315-multi-ghc-options/Main.hs b/tests/integration/tests/3315-multi-ghc-options/Main.hs new file mode 100644 index 0000000000..53fde2b204 --- /dev/null +++ b/tests/integration/tests/3315-multi-ghc-options/Main.hs @@ -0,0 +1,10 @@ +-- Stack allows one or more GHC options to be specified on the command line. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3315 + +import StackTest.Repl + +main :: IO () +main = do + stack ["build", "--ghc-options=-ddump-simpl -ddump-asm -DVARIABLE_A -DVARIABLE_B"] + stackRepl ["--ghc-options=-ddump-simpl -ddump-asm"] (pure ()) diff --git a/tests/integration/tests/3315-multi-ghc-options/files/.gitignore b/tests/integration/tests/3315-multi-ghc-options/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3315-multi-ghc-options/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3315-multi-ghc-options/files/package.yaml b/tests/integration/tests/3315-multi-ghc-options/files/package.yaml new file mode 100644 index 0000000000..a7305a77cf --- /dev/null +++ b/tests/integration/tests/3315-multi-ghc-options/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/3315-multi-ghc-options/files/src/Lib.hs b/tests/integration/tests/3315-multi-ghc-options/files/src/Lib.hs new file mode 100644 index 0000000000..71ccd7f83b --- /dev/null +++ b/tests/integration/tests/3315-multi-ghc-options/files/src/Lib.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE CPP #-} + +module Lib where + +-- Avoid problems with CPP and HLint +#ifndef __HLINT__ + +#ifndef VARIABLE_A +#error VARIABLE_A isn't defined +#endif + +#ifndef VARIABLE_B +#error VARIABLE_B isn't defined +#endif + +#endif diff --git a/tests/integration/tests/3315-multi-ghc-options/files/stack.yaml b/tests/integration/tests/3315-multi-ghc-options/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3315-multi-ghc-options/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/335-multi-package-flags/Main.hs b/tests/integration/tests/335-multi-package-flags/Main.hs new file mode 100644 index 0000000000..4e6aa05311 --- /dev/null +++ b/tests/integration/tests/335-multi-package-flags/Main.hs @@ -0,0 +1,13 @@ +-- Stack should be able to specify Cabal flags for all packages that have a +-- Cabal flag of the same name. +-- +-- See: https://github.com/commercialhaskell/stack/issues/335 + +import StackTest + +main :: IO () +main = do + stackErr ["build"] + stack ["build", "--flag", "myPackage:necessary"] + stackErr ["build"] + stack ["build", "--flag", "*:necessary"] diff --git a/tests/integration/tests/335-multi-package-flags/files/.gitignore b/tests/integration/tests/335-multi-package-flags/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/335-multi-package-flags/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/335-multi-package-flags/files/package.yaml b/tests/integration/tests/335-multi-package-flags/files/package.yaml new file mode 100644 index 0000000000..973e01a4f6 --- /dev/null +++ b/tests/integration/tests/335-multi-package-flags/files/package.yaml @@ -0,0 +1,18 @@ +spec-version: 0.36.0 + +name: myPackage + +flags: + necessary: + description: The package will not build unless this flag is true. + manual: true + default: false + +dependencies: +- base + +library: + source-dirs: src + when: + - condition: flag(necessary) + cpp-options: -DWORK diff --git a/tests/integration/tests/335-multi-package-flags/files/src/Lib.hs b/tests/integration/tests/335-multi-package-flags/files/src/Lib.hs new file mode 100644 index 0000000000..eccdf60245 --- /dev/null +++ b/tests/integration/tests/335-multi-package-flags/files/src/Lib.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE CPP #-} + +module Lib where + +-- Avoid problems with CPP and HLint +#ifndef __HLINT__ + +#if !WORK +#error Not going to work, sorry +#endif + +#endif diff --git a/tests/integration/tests/335-multi-package-flags/files/stack.yaml b/tests/integration/tests/335-multi-package-flags/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/335-multi-package-flags/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/3390-unbuildable-test/Main.hs b/tests/integration/tests/3390-unbuildable-test/Main.hs new file mode 100644 index 0000000000..50f4eba276 --- /dev/null +++ b/tests/integration/tests/3390-unbuildable-test/Main.hs @@ -0,0 +1,8 @@ +-- Stack ignores test suites that are not buildable. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3390 + +import StackTest + +main :: IO () +main = stack ["test"] diff --git a/tests/integration/tests/3390-unbuildable-test/files/.gitignore b/tests/integration/tests/3390-unbuildable-test/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3390-unbuildable-test/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3390-unbuildable-test/files/package.yaml b/tests/integration/tests/3390-unbuildable-test/files/package.yaml new file mode 100644 index 0000000000..da5df8ac33 --- /dev/null +++ b/tests/integration/tests/3390-unbuildable-test/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +tests: + myTest: + source-dirs: test + main: Main.hs + buildable: false diff --git a/tests/integration/tests/3390-unbuildable-test/files/stack.yaml b/tests/integration/tests/3390-unbuildable-test/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3390-unbuildable-test/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/test/integration/tests/3390-unbuildable-test/files/test/Spec.hs b/tests/integration/tests/3390-unbuildable-test/files/test/Main.hs similarity index 100% rename from test/integration/tests/3390-unbuildable-test/files/test/Spec.hs rename to tests/integration/tests/3390-unbuildable-test/files/test/Main.hs diff --git a/tests/integration/tests/3431-precompiled-works/Main.hs b/tests/integration/tests/3431-precompiled-works/Main.hs new file mode 100644 index 0000000000..33d2ef971a --- /dev/null +++ b/tests/integration/tests/3431-precompiled-works/Main.hs @@ -0,0 +1,14 @@ +-- Stack uses pre-compiled immutable packages where it can. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3431 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stack ["build", "random-1.1", "--stack-yaml", "custom1/stack.yaml"] + stackCheckStderr ["build", "random-1.1", "--stack-yaml", "custom2/stack.yaml"] $ \out -> do + print out + unless ("precompiled" `isInfixOf` out) $ error "Didn't use precompiled!" diff --git a/tests/integration/tests/3431-precompiled-works/files/custom1/custom1.yaml b/tests/integration/tests/3431-precompiled-works/files/custom1/custom1.yaml new file mode 100644 index 0000000000..5223c8b3e3 --- /dev/null +++ b/tests/integration/tests/3431-precompiled-works/files/custom1/custom1.yaml @@ -0,0 +1,4 @@ +name: custom1 +snapshot: ghc-9.10.3 +packages: +- acme-missiles-0.3 diff --git a/tests/integration/tests/3431-precompiled-works/files/custom1/stack.yaml b/tests/integration/tests/3431-precompiled-works/files/custom1/stack.yaml new file mode 100644 index 0000000000..601e57ecbc --- /dev/null +++ b/tests/integration/tests/3431-precompiled-works/files/custom1/stack.yaml @@ -0,0 +1,2 @@ +snapshot: custom1.yaml +packages: [] diff --git a/tests/integration/tests/3431-precompiled-works/files/custom2/custom2.yaml b/tests/integration/tests/3431-precompiled-works/files/custom2/custom2.yaml new file mode 100644 index 0000000000..566739fa3e --- /dev/null +++ b/tests/integration/tests/3431-precompiled-works/files/custom2/custom2.yaml @@ -0,0 +1,4 @@ +name: custom2 +snapshot: ghc-9.10.3 +packages: +- acme-missiles-0.2 diff --git a/tests/integration/tests/3431-precompiled-works/files/custom2/stack.yaml b/tests/integration/tests/3431-precompiled-works/files/custom2/stack.yaml new file mode 100644 index 0000000000..c43f268e81 --- /dev/null +++ b/tests/integration/tests/3431-precompiled-works/files/custom2/stack.yaml @@ -0,0 +1,2 @@ +snapshot: custom2.yaml +packages: [] diff --git a/tests/integration/tests/345-override-bytestring/Main.hs b/tests/integration/tests/345-override-bytestring/Main.hs new file mode 100644 index 0000000000..1e1d847024 --- /dev/null +++ b/tests/integration/tests/345-override-bytestring/Main.hs @@ -0,0 +1,9 @@ +-- An extra-dep in a project-level configuration file should be able to shadow a +-- GHC boot package such as bytestring. +-- +-- See: https://github.com/commercialhaskell/stack/issues/345 + +import StackTest + +main :: IO () +main = stack ["build"] diff --git a/tests/integration/tests/345-override-bytestring/files/.gitignore b/tests/integration/tests/345-override-bytestring/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/345-override-bytestring/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/345-override-bytestring/files/package.yaml b/tests/integration/tests/345-override-bytestring/files/package.yaml new file mode 100644 index 0000000000..4259e06ace --- /dev/null +++ b/tests/integration/tests/345-override-bytestring/files/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- bytestring + +library: + source-dirs: src diff --git a/tests/integration/tests/345-override-bytestring/files/src/Lib.hs b/tests/integration/tests/345-override-bytestring/files/src/Lib.hs new file mode 100644 index 0000000000..c06ea6cd1f --- /dev/null +++ b/tests/integration/tests/345-override-bytestring/files/src/Lib.hs @@ -0,0 +1,3 @@ +module Lib where + +import Data.ByteString () diff --git a/tests/integration/tests/345-override-bytestring/files/stack.yaml b/tests/integration/tests/345-override-bytestring/files/stack.yaml new file mode 100644 index 0000000000..8d391e6d16 --- /dev/null +++ b/tests/integration/tests/345-override-bytestring/files/stack.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +extra-deps: +# GHC 9.10.3 comes with bytestring-0.12.2.0 +- bytestring-0.12.1.0 diff --git a/tests/integration/tests/3574-extra-dep-local/Main.hs b/tests/integration/tests/3574-extra-dep-local/Main.hs new file mode 100644 index 0000000000..0c73c77b2e --- /dev/null +++ b/tests/integration/tests/3574-extra-dep-local/Main.hs @@ -0,0 +1,9 @@ +-- Stack can target a local extra-dep and distinguishes local extra-deps from +-- local packages, when applying GHC options to local packages. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3574 + +import StackTest + +main :: IO () +main = stack ["build", "myPackage"] diff --git a/tests/integration/tests/3574-extra-dep-local/files/.gitignore b/tests/integration/tests/3574-extra-dep-local/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3574-extra-dep-local/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3574-extra-dep-local/files/myPackage/package.yaml b/tests/integration/tests/3574-extra-dep-local/files/myPackage/package.yaml new file mode 100644 index 0000000000..a7305a77cf --- /dev/null +++ b/tests/integration/tests/3574-extra-dep-local/files/myPackage/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/3574-extra-dep-local/files/myPackage/src/Lib.hs b/tests/integration/tests/3574-extra-dep-local/files/myPackage/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/3574-extra-dep-local/files/myPackage/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/3574-extra-dep-local/files/stack.yaml b/tests/integration/tests/3574-extra-dep-local/files/stack.yaml new file mode 100644 index 0000000000..991c6d6ede --- /dev/null +++ b/tests/integration/tests/3574-extra-dep-local/files/stack.yaml @@ -0,0 +1,9 @@ +snapshot: ghc-9.10.3 + +packages: [] + +extra-deps: +- myPackage + +ghc-options: + $locals: -dummy diff --git a/tests/integration/tests/3591-cabal-warnings-once/Main.hs b/tests/integration/tests/3591-cabal-warnings-once/Main.hs new file mode 100644 index 0000000000..c094b33158 --- /dev/null +++ b/tests/integration/tests/3591-cabal-warnings-once/Main.hs @@ -0,0 +1,14 @@ +-- Stack warns about unknown fields in Cabal files, but only once. +-- +-- https://github.com/commercialhaskell/stack/issues/3591 + +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stackCheckStderr ["build", "--dry-run"] $ \str -> + case filter ("unknown-cabal-field-name" `isInfixOf`) (lines str) of + [] -> error "unknown-Cabal-field-name didn't appear once" + [_] -> pure () + _:_:_ -> error "unknown-Cabal-field-name appeared multiple times" diff --git a/tests/integration/tests/3591-cabal-warnings-once/files/.gitignore b/tests/integration/tests/3591-cabal-warnings-once/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3591-cabal-warnings-once/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3591-cabal-warnings-once/files/package.yaml b/tests/integration/tests/3591-cabal-warnings-once/files/package.yaml new file mode 100644 index 0000000000..a3a9b0e627 --- /dev/null +++ b/tests/integration/tests/3591-cabal-warnings-once/files/package.yaml @@ -0,0 +1,12 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + +verbatim: + unknown-Cabal-field-name: makes a warning! diff --git a/tests/integration/tests/3591-cabal-warnings-once/files/src/Lib.hs b/tests/integration/tests/3591-cabal-warnings-once/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/3591-cabal-warnings-once/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/3591-cabal-warnings-once/files/stack.yaml b/tests/integration/tests/3591-cabal-warnings-once/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3591-cabal-warnings-once/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/3631-build-http2/Main.hs b/tests/integration/tests/3631-build-http2/Main.hs new file mode 100644 index 0000000000..56518962a8 --- /dev/null +++ b/tests/integration/tests/3631-build-http2/Main.hs @@ -0,0 +1,11 @@ +-- Stack can build the http2 package. +-- +-- +-- https://github.com/commercialhaskell/stack/issues/3631 + +import StackTest + +main :: IO () +main = do + stack ["build", defaultSnapshotArg, "--dry-run", "http2"] + stack ["build", defaultSnapshotArg, "http2"] diff --git a/tests/integration/tests/365-invalid-success/Main.hs b/tests/integration/tests/365-invalid-success/Main.hs new file mode 100644 index 0000000000..64a1e640a0 --- /dev/null +++ b/tests/integration/tests/365-invalid-success/Main.hs @@ -0,0 +1,17 @@ +-- If Stack fails to build a package once, it should fail to build it +-- (unchanged) a second time. +-- +-- See: https://github.com/commercialhaskell/stack/issues/365 + +import StackTest + +main :: IO () +main = do + copy "src/Lib_FAIL.hs" "src/Lib.hs" + stackErr ["build"] + stackErr ["build"] + copy "src/Lib_OK.hs" "src/Lib.hs" + stack ["build"] + copy "src/Lib_FAIL.hs" "src/Lib.hs" + stackErr ["build"] + stackErr ["build"] diff --git a/tests/integration/tests/365-invalid-success/files/.gitignore b/tests/integration/tests/365-invalid-success/files/.gitignore new file mode 100644 index 0000000000..bf0c94dd4e --- /dev/null +++ b/tests/integration/tests/365-invalid-success/files/.gitignore @@ -0,0 +1,2 @@ +src/Lib.hs +myPackage.cabal diff --git a/tests/integration/tests/365-invalid-success/files/package.yaml b/tests/integration/tests/365-invalid-success/files/package.yaml new file mode 100644 index 0000000000..bb02298ab5 --- /dev/null +++ b/tests/integration/tests/365-invalid-success/files/package.yaml @@ -0,0 +1,12 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +library: + source-dirs: src + exposed-modules: + - Lib + other-modules: [] diff --git a/tests/integration/tests/365-invalid-success/files/src/Lib_FAIL.hs b/tests/integration/tests/365-invalid-success/files/src/Lib_FAIL.hs new file mode 100644 index 0000000000..764550ce44 --- /dev/null +++ b/tests/integration/tests/365-invalid-success/files/src/Lib_FAIL.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE CPP #-} + +-- | Version of Lib that does not compile. +module Lib where + +-- Avoid problems with CPP and HLint +#ifndef __HLINT__ + +#error Not going to compile, sorry + +#endif diff --git a/tests/integration/tests/365-invalid-success/files/src/Lib_OK.hs b/tests/integration/tests/365-invalid-success/files/src/Lib_OK.hs new file mode 100644 index 0000000000..8745d54bd6 --- /dev/null +++ b/tests/integration/tests/365-invalid-success/files/src/Lib_OK.hs @@ -0,0 +1,2 @@ +-- | Version of Lib that does compile. +module Lib where diff --git a/tests/integration/tests/365-invalid-success/files/stack.yaml b/tests/integration/tests/365-invalid-success/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/365-invalid-success/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/366-non-root-dir/Main.hs b/tests/integration/tests/366-non-root-dir/Main.hs new file mode 100644 index 0000000000..1a7697a78a --- /dev/null +++ b/tests/integration/tests/366-non-root-dir/Main.hs @@ -0,0 +1,12 @@ +-- Stack builds when commanded from a subdirectory of the project directory. +-- +-- See: https://github.com/commercialhaskell/stack/issues/366 + +import StackTest +import System.Directory ( setCurrentDirectory ) + +main :: IO () +main = do + setCurrentDirectory "app" + stack ["build"] + stack ["exec", "myExe"] diff --git a/tests/integration/tests/366-non-root-dir/files/.gitignore b/tests/integration/tests/366-non-root-dir/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/366-non-root-dir/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/366-non-root-dir/files/app/Main.hs b/tests/integration/tests/366-non-root-dir/files/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/366-non-root-dir/files/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/366-non-root-dir/files/package.yaml b/tests/integration/tests/366-non-root-dir/files/package.yaml new file mode 100644 index 0000000000..ee1f9f8afa --- /dev/null +++ b/tests/integration/tests/366-non-root-dir/files/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +executables: + myExe: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/366-non-root-dir/files/stack.yaml b/tests/integration/tests/366-non-root-dir/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/366-non-root-dir/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/3685-config-yaml-for-allow-newer/Main.hs b/tests/integration/tests/3685-config-yaml-for-allow-newer/Main.hs new file mode 100644 index 0000000000..09d8c46d6e --- /dev/null +++ b/tests/integration/tests/3685-config-yaml-for-allow-newer/Main.hs @@ -0,0 +1,21 @@ +-- Stack advises the use of allow-newer in a configuration file if the package +-- versions needed are not the version in the snapshot. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3685 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +planRecommendation :: String +planRecommendation = "To ignore all version constraints" + +main :: IO () +main = + -- intero-0.1.23 chosen because it depends on ghc >=7.8 && <8.2.2. + stackErrStderr ["install", "intero-0.1.23"] (expectMessage planRecommendation) + +expectMessage :: String -> String -> IO () +expectMessage msg stderr = do + unless (words msg `isInfixOf` words stderr) $ + error $ "Expected a recommendation: \n" ++ show msg diff --git a/tests/integration/tests/3685-config-yaml-for-allow-newer/files/stack.yaml b/tests/integration/tests/3685-config-yaml-for-allow-newer/files/stack.yaml new file mode 100644 index 0000000000..776ef68d10 --- /dev/null +++ b/tests/integration/tests/3685-config-yaml-for-allow-newer/files/stack.yaml @@ -0,0 +1,3 @@ +snapshot: lts-24.37 + +packages: [] diff --git a/tests/integration/tests/370-invalid-setup-hs/Main.hs b/tests/integration/tests/370-invalid-setup-hs/Main.hs new file mode 100644 index 0000000000..08d1440472 --- /dev/null +++ b/tests/integration/tests/370-invalid-setup-hs/Main.hs @@ -0,0 +1,8 @@ +-- Stack ignores a package's Setup.hs file when the Cabal build type is Simple. +-- +-- See: https://github.com/commercialhaskell/stack/issues/370 + +import StackTest + +main :: IO () +main = stack ["build"] diff --git a/tests/integration/tests/370-invalid-setup-hs/files/.gitignore b/tests/integration/tests/370-invalid-setup-hs/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/370-invalid-setup-hs/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/370-invalid-setup-hs/files/Setup.hs b/tests/integration/tests/370-invalid-setup-hs/files/Setup.hs new file mode 100644 index 0000000000..58838f89a6 --- /dev/null +++ b/tests/integration/tests/370-invalid-setup-hs/files/Setup.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE CPP #-} + +-- A bogus Setup.hs file that does not compile + +-- Avoid problems with CPP and HLint +#ifndef __HLINT__ + +#error Not going to compile, sorry + +#endif diff --git a/tests/integration/tests/370-invalid-setup-hs/files/package.yaml b/tests/integration/tests/370-invalid-setup-hs/files/package.yaml new file mode 100644 index 0000000000..a7305a77cf --- /dev/null +++ b/tests/integration/tests/370-invalid-setup-hs/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/370-invalid-setup-hs/files/src/Lib.hs b/tests/integration/tests/370-invalid-setup-hs/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/370-invalid-setup-hs/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/370-invalid-setup-hs/files/stack.yaml b/tests/integration/tests/370-invalid-setup-hs/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/370-invalid-setup-hs/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/3770-no-rerun-tests/Main.hs b/tests/integration/tests/3770-no-rerun-tests/Main.hs new file mode 100644 index 0000000000..6a08206e94 --- /dev/null +++ b/tests/integration/tests/3770-no-rerun-tests/Main.hs @@ -0,0 +1,25 @@ +-- | Stack can avoid re-running successful test suites. +-- +-- See: https://github.com/commercialhaskell/stack/pull/3770 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stackCheckStderr ["test"] (expectMessage testSuitePassed) + stackCheckStderr + ["test", "--no-rerun-tests"] + (expectMessage skippedAlreadyPassedTest) + +testSuitePassed :: String +testSuitePassed = "Test suite test passed" + +skippedAlreadyPassedTest :: String +skippedAlreadyPassedTest = "skipping already passed test" + +expectMessage :: String -> String -> IO () +expectMessage msg stderr = do + unless (words msg `isInfixOf` words stderr) $ + error $ "Expected output: \n" ++ show msg diff --git a/tests/integration/tests/3770-no-rerun-tests/files/.gitignore b/tests/integration/tests/3770-no-rerun-tests/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3770-no-rerun-tests/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3770-no-rerun-tests/files/package.yaml b/tests/integration/tests/3770-no-rerun-tests/files/package.yaml new file mode 100644 index 0000000000..9a50489941 --- /dev/null +++ b/tests/integration/tests/3770-no-rerun-tests/files/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +tests: + test: + source-dirs: test + main: Main.hs diff --git a/tests/integration/tests/3770-no-rerun-tests/files/stack.yaml b/tests/integration/tests/3770-no-rerun-tests/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3770-no-rerun-tests/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/test/integration/tests/copy-bins-works/files/src/Main.hs b/tests/integration/tests/3770-no-rerun-tests/files/test/Main.hs similarity index 100% rename from test/integration/tests/copy-bins-works/files/src/Main.hs rename to tests/integration/tests/3770-no-rerun-tests/files/test/Main.hs diff --git a/tests/integration/tests/3787-internal-libs-with-no-main-lib/Main.hs b/tests/integration/tests/3787-internal-libs-with-no-main-lib/Main.hs new file mode 100644 index 0000000000..415d810f0a --- /dev/null +++ b/tests/integration/tests/3787-internal-libs-with-no-main-lib/Main.hs @@ -0,0 +1,9 @@ +-- Stack builds a package with a private named sublibrary (an internal library) +-- but no main library. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3787 + +import StackTest + +main :: IO () +main = stack ["build"] diff --git a/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/.gitignore b/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/app/Main.hs b/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/app/Main.hs new file mode 100644 index 0000000000..a392820cc0 --- /dev/null +++ b/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/app/Main.hs @@ -0,0 +1,6 @@ +module Main where + +import Lib ( someFunc ) + +main :: IO () +main = someFunc diff --git a/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/int/Lib.hs b/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/int/Lib.hs new file mode 100644 index 0000000000..35dc3b3347 --- /dev/null +++ b/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/int/Lib.hs @@ -0,0 +1,6 @@ +module Lib + ( someFunc + ) where + +someFunc :: IO () +someFunc = pure () diff --git a/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/package.yaml b/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/package.yaml new file mode 100644 index 0000000000..c3b0d2aaf8 --- /dev/null +++ b/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/package.yaml @@ -0,0 +1,17 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +internal-libraries: + internal: + source-dirs: int + +executables: + myExe: + source-dirs: app + main: Main.hs + dependencies: + - internal diff --git a/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/stack.yaml b/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3787-internal-libs-with-no-main-lib/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/384-local-deps/Main.hs b/tests/integration/tests/384-local-deps/Main.hs new file mode 100644 index 0000000000..bf08823e28 --- /dev/null +++ b/tests/integration/tests/384-local-deps/Main.hs @@ -0,0 +1,11 @@ +-- Stack can initialise a multi-project package where one project package +-- depends on another project package. +-- +-- See: https://github.com/commercialhaskell/stack/issues/384 + +import StackTest + +main :: IO () +main = do + stack ["init"] + stack ["build"] diff --git a/tests/integration/tests/384-local-deps/files/.gitignore b/tests/integration/tests/384-local-deps/files/.gitignore new file mode 100644 index 0000000000..11a5409872 --- /dev/null +++ b/tests/integration/tests/384-local-deps/files/.gitignore @@ -0,0 +1,3 @@ +stack.yaml +myPackageA.cabal +myPackageB.cabal diff --git a/tests/integration/tests/384-local-deps/files/myPackageA/package.yaml b/tests/integration/tests/384-local-deps/files/myPackageA/package.yaml new file mode 100644 index 0000000000..d26b472ded --- /dev/null +++ b/tests/integration/tests/384-local-deps/files/myPackageA/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/384-local-deps/files/myPackageA/src/Lib.hs b/tests/integration/tests/384-local-deps/files/myPackageA/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/384-local-deps/files/myPackageA/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/384-local-deps/files/myPackageB/package.yaml b/tests/integration/tests/384-local-deps/files/myPackageB/package.yaml new file mode 100644 index 0000000000..e5f489eebf --- /dev/null +++ b/tests/integration/tests/384-local-deps/files/myPackageB/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base +- myPackageA + +library: + source-dirs: src diff --git a/tests/integration/tests/384-local-deps/files/myPackageB/src/Lib.hs b/tests/integration/tests/384-local-deps/files/myPackageB/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/384-local-deps/files/myPackageB/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/3850-cached-templates-network-errors/Main.hs b/tests/integration/tests/3850-cached-templates-network-errors/Main.hs new file mode 100644 index 0000000000..80c1c706ad --- /dev/null +++ b/tests/integration/tests/3850-cached-templates-network-errors/Main.hs @@ -0,0 +1,40 @@ +-- Stack can used cached Stack project templates. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3850 + +import Control.Exception ( bracket ) +import Control.Monad ( unless, when ) +import Data.List ( isInfixOf ) +import Data.Maybe ( fromMaybe ) +import StackTest +import System.Directory ( removeDirectoryRecursive ) +import System.Environment ( lookupEnv, setEnv ) + +main :: IO () +main = when isLinux $ do + performCachingTest "myProjectA" "myProjectB" templateUrl + performCachingTest "myProjectC" "myProjectD" githubTemplate + where + performCachingTest :: String -> String -> String -> IO () + performCachingTest projectName1 projectName2 template = do + let arguments = ["new", projectName1, template] + bracket + ( lookupEnv "HTTPS_PROXY" ) + ( (setEnv "HTTPS_PROXY") . (fromMaybe "") ) + ( const $ do + stack ["new", projectName1, template] + setEnv "HTTPS_PROXY" "http://sdsgsfgslfgsjflgkjs" -- make https requests fail + stackCheckStderr ["new", projectName2, template] $ \stderr -> + unless ("Using cached local version." `isInfixOf` stderr) $ + error "stack didn't load the cached template" + ) + + -- This template has a `stack.yaml` file so `stack new` does not have to + -- `stack init` and therefore the test runs faster + templateUrl :: String + templateUrl = + "https://raw.githubusercontent.com/commercialhaskell/stack-templates/refs/heads/master/tasty-discover.hsfiles" + + -- The same template, cached differently + githubTemplate :: String + githubTemplate = "github:commercialhaskell/tasty-discover.hsfiles" diff --git a/tests/integration/tests/3861-ignore-bounds-in-snapshots/Main.hs b/tests/integration/tests/3861-ignore-bounds-in-snapshots/Main.hs new file mode 100644 index 0000000000..12ce4c6fdd --- /dev/null +++ b/tests/integration/tests/3861-ignore-bounds-in-snapshots/Main.hs @@ -0,0 +1,11 @@ +-- Stack trusts package versions in a snapshot over Cabal file dependency +-- information. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3861 + +import StackTest + +main :: IO () +main = do + stackErr ["build", "--stack-yaml", "stack-bad.yaml"] + stack ["build", "--stack-yaml", "stack-good.yaml"] diff --git a/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/.gitignore b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/test/integration/tests/3861-ignore-bounds-in-snapshots/files/bad-bounds.tar b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/bad-bounds.tar similarity index 100% rename from test/integration/tests/3861-ignore-bounds-in-snapshots/files/bad-bounds.tar rename to tests/integration/tests/3861-ignore-bounds-in-snapshots/files/bad-bounds.tar diff --git a/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/package.yaml b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/package.yaml new file mode 100644 index 0000000000..ab04bf7f95 --- /dev/null +++ b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- bad-bounds + +library: {} diff --git a/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/snapshot.yaml b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/snapshot.yaml new file mode 100644 index 0000000000..17c7ae14e7 --- /dev/null +++ b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/snapshot.yaml @@ -0,0 +1,8 @@ +snapshot: ghc-9.10.3 +packages: +- ./bad-bounds.tar + +# Include a flag to get a different snapshot hash +flags: + bad-bounds: + unimportant: false diff --git a/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-bad.yaml b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-bad.yaml new file mode 100644 index 0000000000..3980df2b36 --- /dev/null +++ b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-bad.yaml @@ -0,0 +1,8 @@ +snapshot: ghc-9.10.3 +extra-deps: +- ./bad-bounds.tar + +# Include a flag to get a different snapshot hash +flags: + bad-bounds: + unimportant: true diff --git a/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-good.yaml b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-good.yaml new file mode 100644 index 0000000000..1c2b12b0f1 --- /dev/null +++ b/tests/integration/tests/3861-ignore-bounds-in-snapshots/files/stack-good.yaml @@ -0,0 +1 @@ +snapshot: snapshot.yaml diff --git a/tests/integration/tests/3863-purge-command/Main.hs b/tests/integration/tests/3863-purge-command/Main.hs new file mode 100644 index 0000000000..140de74b76 --- /dev/null +++ b/tests/integration/tests/3863-purge-command/Main.hs @@ -0,0 +1,51 @@ +{-# LANGUAGE ViewPatterns #-} + +-- Stack can purge all Stack work directories. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3863 + +import Control.Monad ( unless ) +import Data.Char ( isSpace ) +import Data.List ( dropWhileEnd ) +import Data.Maybe ( fromMaybe, listToMaybe ) +import StackTest +import System.FilePath ( splitDirectories ) + +trimEnd :: String -> String +trimEnd = dropWhileEnd isSpace + +main :: IO () +main = + -- For these commands, we'll need to know the `dist` directory. + -- This is usually `.stack-work/dist/$compiler-variant/Cabal-xxxx` + stackCheckStdout ["path", "--dist-dir"] $ \(trimEnd -> distDir) -> do + stackCheckStdout ["path", "--local-install-root"] $ \(trimEnd -> localInstallRoot) -> do + -- Usually `.stack-work` + let stackWork = fromMaybe (error "There must be a Stack working directory.") $ + listToMaybe (splitDirectories distDir) + + -- First, clean the .stack-work directory. + -- This is only necessary when running individual tests. + stackIgnoreException ["purge"] + -- See #4936 for details regarding the windows condition + unless isWindows $ doesNotExist stackWork + + -- The dist directory should exist after a build + stack ["build"] + doesExist distDir + doesExist localInstallRoot + doesExist stackWork + + -- The dist directory should not exist after a clean, whereas the + -- .stack-work directory should + stackIgnoreException ["clean"] + -- See #4936 for details regarding the windows condition + unless isWindows $ do + doesNotExist distDir + doesExist localInstallRoot + doesExist stackWork + + -- The .stack-work directory should not exist after a purge + stackIgnoreException ["purge"] + -- See #4936 for details regarding the windows condition + unless isWindows $ doesNotExist stackWork diff --git a/tests/integration/tests/3863-purge-command/files/.gitignore b/tests/integration/tests/3863-purge-command/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3863-purge-command/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3863-purge-command/files/package.yaml b/tests/integration/tests/3863-purge-command/files/package.yaml new file mode 100644 index 0000000000..a7305a77cf --- /dev/null +++ b/tests/integration/tests/3863-purge-command/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/3863-purge-command/files/src/Lib.hs b/tests/integration/tests/3863-purge-command/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/3863-purge-command/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/3863-purge-command/files/stack.yaml b/tests/integration/tests/3863-purge-command/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3863-purge-command/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/3899-dont-rebuild-sublibraries/Main.hs b/tests/integration/tests/3899-dont-rebuild-sublibraries/Main.hs new file mode 100644 index 0000000000..8f76c8d683 --- /dev/null +++ b/tests/integration/tests/3899-dont-rebuild-sublibraries/Main.hs @@ -0,0 +1,18 @@ +-- Stack does not recompile a package with a private named sublibrary (an +-- internal library) on a second build. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3899 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stack ["build"] + res <- compilingModulesLines . snd <$> stackStderr ["build"] + unless (null res) $ fail "Stack recompiled code" + +-- Returns the lines where a module is compiled +compilingModulesLines :: String -> [String] +compilingModulesLines = filter (isInfixOf " Compiling ") . lines diff --git a/tests/integration/tests/3899-dont-rebuild-sublibraries/files/.gitignore b/tests/integration/tests/3899-dont-rebuild-sublibraries/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3899-dont-rebuild-sublibraries/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3899-dont-rebuild-sublibraries/files/app/Main.hs b/tests/integration/tests/3899-dont-rebuild-sublibraries/files/app/Main.hs new file mode 100644 index 0000000000..ac9093c87d --- /dev/null +++ b/tests/integration/tests/3899-dont-rebuild-sublibraries/files/app/Main.hs @@ -0,0 +1,6 @@ +module Main where + +import Lib + +main :: IO () +main = pure () diff --git a/test/integration/tests/3899-dont-rebuild-sublibraries/files/src-internal/Internal.hs b/tests/integration/tests/3899-dont-rebuild-sublibraries/files/int/Internal.hs similarity index 100% rename from test/integration/tests/3899-dont-rebuild-sublibraries/files/src-internal/Internal.hs rename to tests/integration/tests/3899-dont-rebuild-sublibraries/files/int/Internal.hs diff --git a/tests/integration/tests/3899-dont-rebuild-sublibraries/files/package.yaml b/tests/integration/tests/3899-dont-rebuild-sublibraries/files/package.yaml new file mode 100644 index 0000000000..d3ca861409 --- /dev/null +++ b/tests/integration/tests/3899-dont-rebuild-sublibraries/files/package.yaml @@ -0,0 +1,22 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - internal + +internal-libraries: + internal: + source-dirs: int + +executables: + myExe: + source-dirs: app + main: Main.hs + dependencies: + - myPackage diff --git a/test/integration/tests/3899-dont-rebuild-sublibraries/files/src/Lib.hs b/tests/integration/tests/3899-dont-rebuild-sublibraries/files/src/Lib.hs similarity index 100% rename from test/integration/tests/3899-dont-rebuild-sublibraries/files/src/Lib.hs rename to tests/integration/tests/3899-dont-rebuild-sublibraries/files/src/Lib.hs diff --git a/tests/integration/tests/3899-dont-rebuild-sublibraries/files/stack.yaml b/tests/integration/tests/3899-dont-rebuild-sublibraries/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3899-dont-rebuild-sublibraries/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/3926-ghci-with-sublibraries/Main.hs b/tests/integration/tests/3926-ghci-with-sublibraries/Main.hs new file mode 100644 index 0000000000..de824bfca1 --- /dev/null +++ b/tests/integration/tests/3926-ghci-with-sublibraries/Main.hs @@ -0,0 +1,42 @@ +-- Stack does not recompile a package with a private named sublibrary (an +-- internal library) on a second build. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3926 + +import Control.Monad ( unless, when ) +import Control.Monad.IO.Class ( liftIO ) +import Data.List ( isInfixOf, isSuffixOf ) +import StackTest.Repl + +main :: IO () +main = do + copy "src/Lib.v1" "src/Lib.hs" + copy "int/Internal.v1" "int/Internal.hs" + stack ["build"] -- need a build before ghci at the moment, see #4148 + stackRepl [] $ do + nextPrompt + replCommand ":main" + line <- replGetLine + let expected = "Successful!" + when (line /= expected) $ + error $ + "Main module didn't load correctly.\n" + <> "Expected: " <> expected <> "\n" + <> "Actual : " <> line <> "\n" + liftIO $ copy "int/Internal.v2" "int/Internal.hs" + reloadAndTest "checkInternal" "\"OK\"" "Internal library didn't reload." + liftIO $ copy "src/Lib.v2" "src/Lib.hs" + reloadAndTest "checkLib" "\"OK\"" "Main library didn't reload." + +reloadAndTest :: String -> String -> String -> Repl () +reloadAndTest cmd exp err = do + reload + replCommand cmd + line <- replGetLine + liftIO . putStrLn $ line + unless (exp `isSuffixOf` line) $ error err + +reload :: Repl () +reload = replCommand ":reload" >> loop + where + loop = replGetLine >>= \line -> unless ("Ok" `isInfixOf` line) loop diff --git a/tests/integration/tests/3926-ghci-with-sublibraries/files/.gitignore b/tests/integration/tests/3926-ghci-with-sublibraries/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3926-ghci-with-sublibraries/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3926-ghci-with-sublibraries/files/app/Main.hs b/tests/integration/tests/3926-ghci-with-sublibraries/files/app/Main.hs new file mode 100644 index 0000000000..7eaf6f21c1 --- /dev/null +++ b/tests/integration/tests/3926-ghci-with-sublibraries/files/app/Main.hs @@ -0,0 +1,6 @@ +module Main where + +import Lib + +main :: IO () +main = putStrLn "Successful!" diff --git a/test/integration/tests/3926-ghci-with-sublibraries/files/src-internal/Internal.v1 b/tests/integration/tests/3926-ghci-with-sublibraries/files/int/Internal.hs similarity index 100% rename from test/integration/tests/3926-ghci-with-sublibraries/files/src-internal/Internal.v1 rename to tests/integration/tests/3926-ghci-with-sublibraries/files/int/Internal.hs diff --git a/tests/integration/tests/3926-ghci-with-sublibraries/files/int/Internal.v1 b/tests/integration/tests/3926-ghci-with-sublibraries/files/int/Internal.v1 new file mode 100644 index 0000000000..d066bb085e --- /dev/null +++ b/tests/integration/tests/3926-ghci-with-sublibraries/files/int/Internal.v1 @@ -0,0 +1 @@ +module Internal where diff --git a/tests/integration/tests/3926-ghci-with-sublibraries/files/int/Internal.v2 b/tests/integration/tests/3926-ghci-with-sublibraries/files/int/Internal.v2 new file mode 100644 index 0000000000..771c9f2365 --- /dev/null +++ b/tests/integration/tests/3926-ghci-with-sublibraries/files/int/Internal.v2 @@ -0,0 +1,6 @@ +module Internal + ( checkInternal + ) where + +checkInternal :: String +checkInternal = "OK" diff --git a/tests/integration/tests/3926-ghci-with-sublibraries/files/package.yaml b/tests/integration/tests/3926-ghci-with-sublibraries/files/package.yaml new file mode 100644 index 0000000000..d3ca861409 --- /dev/null +++ b/tests/integration/tests/3926-ghci-with-sublibraries/files/package.yaml @@ -0,0 +1,22 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - internal + +internal-libraries: + internal: + source-dirs: int + +executables: + myExe: + source-dirs: app + main: Main.hs + dependencies: + - myPackage diff --git a/test/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v1 b/tests/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.hs similarity index 100% rename from test/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v1 rename to tests/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.hs diff --git a/tests/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v1 b/tests/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v1 new file mode 100644 index 0000000000..1369151610 --- /dev/null +++ b/tests/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v1 @@ -0,0 +1,3 @@ +module Lib where + +import Internal diff --git a/tests/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v2 b/tests/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v2 new file mode 100644 index 0000000000..ef8bb078c5 --- /dev/null +++ b/tests/integration/tests/3926-ghci-with-sublibraries/files/src/Lib.v2 @@ -0,0 +1,8 @@ +module Lib + ( checkLib + ) where + +import Internal + +checkLib :: String +checkLib = "OK" diff --git a/tests/integration/tests/3926-ghci-with-sublibraries/files/stack.yaml b/tests/integration/tests/3926-ghci-with-sublibraries/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3926-ghci-with-sublibraries/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/3940-base-upgrade-warning/Main.hs b/tests/integration/tests/3940-base-upgrade-warning/Main.hs new file mode 100644 index 0000000000..78c9521283 --- /dev/null +++ b/tests/integration/tests/3940-base-upgrade-warning/Main.hs @@ -0,0 +1,29 @@ +-- Stack provides appropriate advice if the version of base required is not that +-- specified by the snapshot. Stack warns that base is a wired-in package before +-- GHC 9.12.1. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3940 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +-- Use short message fragment because prettyWarn formatting and colour +unattainableBaseWarning :: String +unattainableBaseWarning = + "Build requires unattainable version of" + +-- Use short message fragment because prettyWarn formatting and colour +noBaseUpgradeWarning :: String +noBaseUpgradeWarning = + "Before GHC 9.12.1, the base package is" + +main :: IO () +main = do + stackErrStderr ["build", "--stack-yaml", "unattainable-base.yaml"] (expectMessage unattainableBaseWarning) + stackErrStderr ["build", "--stack-yaml", "no-base-upgrade.yaml"] (expectMessage noBaseUpgradeWarning) + +expectMessage :: String -> String -> IO () +expectMessage msg stderr = + unless (words msg `isInfixOf` words stderr) $ + error $ "Expected a warning: \n" ++ show msg diff --git a/tests/integration/tests/3940-base-upgrade-warning/files/.gitignore b/tests/integration/tests/3940-base-upgrade-warning/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3940-base-upgrade-warning/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3940-base-upgrade-warning/files/no-base-upgrade.yaml b/tests/integration/tests/3940-base-upgrade-warning/files/no-base-upgrade.yaml new file mode 100644 index 0000000000..22e1252a91 --- /dev/null +++ b/tests/integration/tests/3940-base-upgrade-warning/files/no-base-upgrade.yaml @@ -0,0 +1,3 @@ +snapshot: ghc-9.10.3 +extra-deps: +- base-4.10.1.0 diff --git a/tests/integration/tests/3940-base-upgrade-warning/files/package.yaml b/tests/integration/tests/3940-base-upgrade-warning/files/package.yaml new file mode 100644 index 0000000000..d734612453 --- /dev/null +++ b/tests/integration/tests/3940-base-upgrade-warning/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base < 4.20 + +library: + source-dirs: src diff --git a/tests/integration/tests/3940-base-upgrade-warning/files/src/Lib.hs b/tests/integration/tests/3940-base-upgrade-warning/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/3940-base-upgrade-warning/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/3940-base-upgrade-warning/files/unattainable-base.yaml b/tests/integration/tests/3940-base-upgrade-warning/files/unattainable-base.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3940-base-upgrade-warning/files/unattainable-base.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/3942-solver-error-output/Main.hs b/tests/integration/tests/3942-solver-error-output/Main.hs new file mode 100644 index 0000000000..e8369562ab --- /dev/null +++ b/tests/integration/tests/3942-solver-error-output/Main.hs @@ -0,0 +1,21 @@ +-- Stack build fails if a dependency is not available. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3942 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +-- | Stack's error code for failing to construct a build plan. +planFailure :: String +planFailure = + "[S-4804]" + +main :: IO () +main = do + stackErrStderr ["./script.hs"] (expectMessage planFailure) + +expectMessage :: String -> String -> IO () +expectMessage msg stderr = do + unless (words msg `isInfixOf` words stderr) $ + error $ "Expected a warning: \n" ++ show msg diff --git a/tests/integration/tests/3942-solver-error-output/files/.gitignore b/tests/integration/tests/3942-solver-error-output/files/.gitignore new file mode 100644 index 0000000000..191b37bd49 --- /dev/null +++ b/tests/integration/tests/3942-solver-error-output/files/.gitignore @@ -0,0 +1,2 @@ +no-deps.cabal +one-deps.cabal diff --git a/tests/integration/tests/3942-solver-error-output/files/no-deps/package.yaml b/tests/integration/tests/3942-solver-error-output/files/no-deps/package.yaml new file mode 100644 index 0000000000..48ede3554f --- /dev/null +++ b/tests/integration/tests/3942-solver-error-output/files/no-deps/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: no-deps +synopsis: A package with no dependencies, other than base + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/3942-solver-error-output/files/one-deps/package.yaml b/tests/integration/tests/3942-solver-error-output/files/one-deps/package.yaml new file mode 100644 index 0000000000..0cabd80dac --- /dev/null +++ b/tests/integration/tests/3942-solver-error-output/files/one-deps/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: one-deps +synopsis: A package with one dependency (no-deps) other than base + +dependencies: +- base +- no-deps + +library: {} diff --git a/tests/integration/tests/3942-solver-error-output/files/script.hs b/tests/integration/tests/3942-solver-error-output/files/script.hs new file mode 100644 index 0000000000..fde3259536 --- /dev/null +++ b/tests/integration/tests/3942-solver-error-output/files/script.hs @@ -0,0 +1,5 @@ +#!/usr/bin/env stack +-- stack runhaskell --stack-yaml test-stack.yml --package one-deps + +main :: IO () +main = pure () diff --git a/tests/integration/tests/3942-solver-error-output/files/test-stack.yml b/tests/integration/tests/3942-solver-error-output/files/test-stack.yml new file mode 100644 index 0000000000..c2035ec9bc --- /dev/null +++ b/tests/integration/tests/3942-solver-error-output/files/test-stack.yml @@ -0,0 +1,6 @@ +snapshot: ghc-9.10.3 + +packages: [] + +extra-deps: +- ./one-deps diff --git a/tests/integration/tests/3959-order-of-flags/Main.hs b/tests/integration/tests/3959-order-of-flags/Main.hs new file mode 100644 index 0000000000..479eae7be6 --- /dev/null +++ b/tests/integration/tests/3959-order-of-flags/Main.hs @@ -0,0 +1,28 @@ +-- Stack allows build flags or options to be specified before or after the build +-- command. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3959 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + checkFlagsBeforeCommand + checkFlagsAfterCommand + +checkFlagsBeforeCommand :: IO () +checkFlagsBeforeCommand = + stackCheckStderr ["--test", "--no-run-tests", "build"] checker + +checkFlagsAfterCommand :: IO () +checkFlagsAfterCommand = + stackCheckStderr ["build", "--test", "--no-run-tests"] checker + +checker :: String -> IO () +checker output = do + let testsAreDisabled = + any (\ln -> "All test running disabled by" `isInfixOf` ln) (lines output) + unless testsAreDisabled $ + fail "Tests should not be run" diff --git a/tests/integration/tests/3959-order-of-flags/files/.gitignore b/tests/integration/tests/3959-order-of-flags/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3959-order-of-flags/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3959-order-of-flags/files/package.yaml b/tests/integration/tests/3959-order-of-flags/files/package.yaml new file mode 100644 index 0000000000..ee1a6463a8 --- /dev/null +++ b/tests/integration/tests/3959-order-of-flags/files/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +tests: + test: + main: Main.hs + source-dirs: test diff --git a/tests/integration/tests/3959-order-of-flags/files/stack.yaml b/tests/integration/tests/3959-order-of-flags/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3959-order-of-flags/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/test/integration/tests/3959-order-of-flags/files/test/Spec.hs b/tests/integration/tests/3959-order-of-flags/files/test/Main.hs similarity index 100% rename from test/integration/tests/3959-order-of-flags/files/test/Spec.hs rename to tests/integration/tests/3959-order-of-flags/files/test/Main.hs diff --git a/tests/integration/tests/397-case-insensitive-flags/Main.hs b/tests/integration/tests/397-case-insensitive-flags/Main.hs new file mode 100644 index 0000000000..38eb51ed26 --- /dev/null +++ b/tests/integration/tests/397-case-insensitive-flags/Main.hs @@ -0,0 +1,11 @@ +-- Cabal flags are case-insensitive and Stack treats them as such. +-- +-- See: https://github.com/commercialhaskell/stack/issues/397 + +import StackTest + +main :: IO () +main = do + stackErr ["build"] + stack ["build", "--flag", "myPackage:nEcEssAry"] + stack ["build", "--flag", "*:nEcEssAry"] diff --git a/tests/integration/tests/397-case-insensitive-flags/files/.gitignore b/tests/integration/tests/397-case-insensitive-flags/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/397-case-insensitive-flags/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/397-case-insensitive-flags/files/package.yaml b/tests/integration/tests/397-case-insensitive-flags/files/package.yaml new file mode 100644 index 0000000000..973e01a4f6 --- /dev/null +++ b/tests/integration/tests/397-case-insensitive-flags/files/package.yaml @@ -0,0 +1,18 @@ +spec-version: 0.36.0 + +name: myPackage + +flags: + necessary: + description: The package will not build unless this flag is true. + manual: true + default: false + +dependencies: +- base + +library: + source-dirs: src + when: + - condition: flag(necessary) + cpp-options: -DWORK diff --git a/tests/integration/tests/397-case-insensitive-flags/files/src/Lib.hs b/tests/integration/tests/397-case-insensitive-flags/files/src/Lib.hs new file mode 100644 index 0000000000..eccdf60245 --- /dev/null +++ b/tests/integration/tests/397-case-insensitive-flags/files/src/Lib.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE CPP #-} + +module Lib where + +-- Avoid problems with CPP and HLint +#ifndef __HLINT__ + +#if !WORK +#error Not going to work, sorry +#endif + +#endif diff --git a/tests/integration/tests/397-case-insensitive-flags/files/stack.yaml b/tests/integration/tests/397-case-insensitive-flags/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/397-case-insensitive-flags/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/3996-sublib-not-depended-upon/.gitignore b/tests/integration/tests/3996-sublib-not-depended-upon/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3996-sublib-not-depended-upon/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3996-sublib-not-depended-upon/Main.hs b/tests/integration/tests/3996-sublib-not-depended-upon/Main.hs new file mode 100644 index 0000000000..0ba51d1aea --- /dev/null +++ b/tests/integration/tests/3996-sublib-not-depended-upon/Main.hs @@ -0,0 +1,10 @@ +-- Stack should build a package with a main library and an internal library (a +-- private named sublibrary) even if the main library does not depend on the +-- sublibrary. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3996 + +import StackTest + +main :: IO () +main = stack ["build"] diff --git a/tests/integration/tests/3996-sublib-not-depended-upon/files/int/Internal.hs b/tests/integration/tests/3996-sublib-not-depended-upon/files/int/Internal.hs new file mode 100644 index 0000000000..d066bb085e --- /dev/null +++ b/tests/integration/tests/3996-sublib-not-depended-upon/files/int/Internal.hs @@ -0,0 +1 @@ +module Internal where diff --git a/tests/integration/tests/3996-sublib-not-depended-upon/files/package.yaml b/tests/integration/tests/3996-sublib-not-depended-upon/files/package.yaml new file mode 100644 index 0000000000..ef6e5935e5 --- /dev/null +++ b/tests/integration/tests/3996-sublib-not-depended-upon/files/package.yaml @@ -0,0 +1,13 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + +internal-libraries: + internal: + source-dirs: int diff --git a/tests/integration/tests/3996-sublib-not-depended-upon/files/src/Lib.hs b/tests/integration/tests/3996-sublib-not-depended-upon/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/3996-sublib-not-depended-upon/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/3996-sublib-not-depended-upon/files/stack.yaml b/tests/integration/tests/3996-sublib-not-depended-upon/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3996-sublib-not-depended-upon/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/3997-coverage-with-cabal-3/Main.hs b/tests/integration/tests/3997-coverage-with-cabal-3/Main.hs new file mode 100644 index 0000000000..c6f049f66f --- /dev/null +++ b/tests/integration/tests/3997-coverage-with-cabal-3/Main.hs @@ -0,0 +1,13 @@ +-- Stack can create a coverage report for a test suite. +-- +-- See: https://github.com/commercialhaskell/stack/issues/3997 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stackCheckStderr ["test", "--coverage"] $ \out -> do + unless ("The coverage report for myPackage's test-suite test is available at" `isInfixOf` out) $ + fail "Coverage report didn't build" diff --git a/tests/integration/tests/3997-coverage-with-cabal-3/files/.gitignore b/tests/integration/tests/3997-coverage-with-cabal-3/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/3997-coverage-with-cabal-3/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/3997-coverage-with-cabal-3/files/package.yaml b/tests/integration/tests/3997-coverage-with-cabal-3/files/package.yaml new file mode 100644 index 0000000000..3ba4c450a6 --- /dev/null +++ b/tests/integration/tests/3997-coverage-with-cabal-3/files/package.yaml @@ -0,0 +1,16 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + +tests: + test: + source-dirs: test + main: Main.hs + dependencies: + - myPackage diff --git a/test/integration/tests/3997-coverage-with-cabal-3/files/src/Lib.hs b/tests/integration/tests/3997-coverage-with-cabal-3/files/src/Lib.hs similarity index 100% rename from test/integration/tests/3997-coverage-with-cabal-3/files/src/Lib.hs rename to tests/integration/tests/3997-coverage-with-cabal-3/files/src/Lib.hs diff --git a/tests/integration/tests/3997-coverage-with-cabal-3/files/stack.yaml b/tests/integration/tests/3997-coverage-with-cabal-3/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/3997-coverage-with-cabal-3/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/test/integration/tests/3997-coverage-with-cabal-3/files/test/Main.hs b/tests/integration/tests/3997-coverage-with-cabal-3/files/test/Main.hs similarity index 100% rename from test/integration/tests/3997-coverage-with-cabal-3/files/test/Main.hs rename to tests/integration/tests/3997-coverage-with-cabal-3/files/test/Main.hs diff --git a/tests/integration/tests/4001-excess-recompilation/Main.hs b/tests/integration/tests/4001-excess-recompilation/Main.hs new file mode 100644 index 0000000000..03a19bcd10 --- /dev/null +++ b/tests/integration/tests/4001-excess-recompilation/Main.hs @@ -0,0 +1,24 @@ +-- Stack does not recompile a package when a test suite or benchmark is dirty +-- but the test suite or benchmark is not a build target. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4001 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + copy "test/Main.v1" "test/Main.hs" + copy "bench/Main.v1" "bench/Main.hs" + stack ["build"] + + copy "test/Main.v2" "test/Main.hs" + copy "bench/Main.v2" "bench/Main.hs" + res <- unregisteringLines . snd <$> stackStderr ["build"] + unless (null res) $ + fail "Stack recompiled when a test or benchmark file was changed, but only \ + \the library was targeted." + +unregisteringLines :: String -> [String] +unregisteringLines = filter (isInfixOf " unregistering ") . lines diff --git a/tests/integration/tests/4001-excess-recompilation/files/.gitignore b/tests/integration/tests/4001-excess-recompilation/files/.gitignore new file mode 100644 index 0000000000..f54a4084b1 --- /dev/null +++ b/tests/integration/tests/4001-excess-recompilation/files/.gitignore @@ -0,0 +1 @@ +myPackage.Cabal diff --git a/tests/integration/tests/4001-excess-recompilation/files/bench/Main.hs b/tests/integration/tests/4001-excess-recompilation/files/bench/Main.hs new file mode 100644 index 0000000000..6fca540825 --- /dev/null +++ b/tests/integration/tests/4001-excess-recompilation/files/bench/Main.hs @@ -0,0 +1,4 @@ +import Lib + +main :: IO () +main = pure () diff --git a/tests/integration/tests/4001-excess-recompilation/files/bench/Main.v1 b/tests/integration/tests/4001-excess-recompilation/files/bench/Main.v1 new file mode 100644 index 0000000000..a0e060b7ca --- /dev/null +++ b/tests/integration/tests/4001-excess-recompilation/files/bench/Main.v1 @@ -0,0 +1,4 @@ +import Lib + +main :: IO () +main = putStrLn "Version 1" diff --git a/tests/integration/tests/4001-excess-recompilation/files/bench/Main.v2 b/tests/integration/tests/4001-excess-recompilation/files/bench/Main.v2 new file mode 100644 index 0000000000..d1741bc55f --- /dev/null +++ b/tests/integration/tests/4001-excess-recompilation/files/bench/Main.v2 @@ -0,0 +1,4 @@ +import Lib + +main :: IO () +main = putStrLn "Version 2" diff --git a/tests/integration/tests/4001-excess-recompilation/files/package.yaml b/tests/integration/tests/4001-excess-recompilation/files/package.yaml new file mode 100644 index 0000000000..5817077e48 --- /dev/null +++ b/tests/integration/tests/4001-excess-recompilation/files/package.yaml @@ -0,0 +1,23 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + +tests: + test: + source-dirs: test + main: Main.hs + dependencies: + - myPackage + +benchmarks: + bench: + source-dirs: bench + main: Main.hs + dependencies: + - myPackage diff --git a/test/integration/tests/4001-excess-recompilation/files/src/Lib.hs b/tests/integration/tests/4001-excess-recompilation/files/src/Lib.hs similarity index 100% rename from test/integration/tests/4001-excess-recompilation/files/src/Lib.hs rename to tests/integration/tests/4001-excess-recompilation/files/src/Lib.hs diff --git a/tests/integration/tests/4001-excess-recompilation/files/stack.yaml b/tests/integration/tests/4001-excess-recompilation/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/4001-excess-recompilation/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/4001-excess-recompilation/files/test/Main.hs b/tests/integration/tests/4001-excess-recompilation/files/test/Main.hs new file mode 100644 index 0000000000..6fca540825 --- /dev/null +++ b/tests/integration/tests/4001-excess-recompilation/files/test/Main.hs @@ -0,0 +1,4 @@ +import Lib + +main :: IO () +main = pure () diff --git a/tests/integration/tests/4001-excess-recompilation/files/test/Main.v1 b/tests/integration/tests/4001-excess-recompilation/files/test/Main.v1 new file mode 100644 index 0000000000..a0e060b7ca --- /dev/null +++ b/tests/integration/tests/4001-excess-recompilation/files/test/Main.v1 @@ -0,0 +1,4 @@ +import Lib + +main :: IO () +main = putStrLn "Version 1" diff --git a/tests/integration/tests/4001-excess-recompilation/files/test/Main.v2 b/tests/integration/tests/4001-excess-recompilation/files/test/Main.v2 new file mode 100644 index 0000000000..d1741bc55f --- /dev/null +++ b/tests/integration/tests/4001-excess-recompilation/files/test/Main.v2 @@ -0,0 +1,4 @@ +import Lib + +main :: IO () +main = putStrLn "Version 2" diff --git a/tests/integration/tests/4044-no-run-tests-config/Main.hs b/tests/integration/tests/4044-no-run-tests-config/Main.hs new file mode 100644 index 0000000000..afd68b4d03 --- /dev/null +++ b/tests/integration/tests/4044-no-run-tests-config/Main.hs @@ -0,0 +1,10 @@ +-- Stack can be configured not to run test suites. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4044 + +import StackTest + +main :: IO () +main = do + stack ["test"] + stack ["build", "myPackage:test:test"] diff --git a/tests/integration/tests/4044-no-run-tests-config/files/.gitignore b/tests/integration/tests/4044-no-run-tests-config/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4044-no-run-tests-config/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4044-no-run-tests-config/files/package.yaml b/tests/integration/tests/4044-no-run-tests-config/files/package.yaml new file mode 100644 index 0000000000..28ed00191b --- /dev/null +++ b/tests/integration/tests/4044-no-run-tests-config/files/package.yaml @@ -0,0 +1,14 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.1.0.0 + +dependencies: +- base + +library: {} + +tests: + test: + source-dirs: test + main: Main.hs diff --git a/tests/integration/tests/4044-no-run-tests-config/files/stack.yaml b/tests/integration/tests/4044-no-run-tests-config/files/stack.yaml new file mode 100644 index 0000000000..a2a5183fa0 --- /dev/null +++ b/tests/integration/tests/4044-no-run-tests-config/files/stack.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +build: + test-arguments: + no-run-tests: true diff --git a/tests/integration/tests/4044-no-run-tests-config/files/test/Main.hs b/tests/integration/tests/4044-no-run-tests-config/files/test/Main.hs new file mode 100644 index 0000000000..40565d378d --- /dev/null +++ b/tests/integration/tests/4044-no-run-tests-config/files/test/Main.hs @@ -0,0 +1,2 @@ +main :: IO () +main = error "This test will fail if run." diff --git a/test/integration/tests/4085-insufficient-error-message/Dockerfile b/tests/integration/tests/4085-insufficient-error-message/Dockerfile similarity index 100% rename from test/integration/tests/4085-insufficient-error-message/Dockerfile rename to tests/integration/tests/4085-insufficient-error-message/Dockerfile diff --git a/tests/integration/tests/4085-insufficient-error-message/Main.hs b/tests/integration/tests/4085-insufficient-error-message/Main.hs new file mode 100644 index 0000000000..6ec7afb6c5 --- /dev/null +++ b/tests/integration/tests/4085-insufficient-error-message/Main.hs @@ -0,0 +1,100 @@ +-- See: https://github.com/commercialhaskell/stack/issues/4044 + +{-# LANGUAGE OverloadedStrings #-} + +{-- + +import StackTest +import System.Environment (getEnv) +import System.Directory +import System.FilePath +import System.Process +import System.Exit +import Control.Exception.Base (bracket, bracket_) +import Control.Monad (guard, when, unless, msum) +import Control.Concurrent (threadDelay) +import Data.List (isInfixOf, delete, repeat) + +createDockerVolume :: Int -> IO String +createDockerVolume sizeInMB = do + (ec, stdout, stderr) <- runEx "docker" $ "volume create" + ++ " --driver local" + ++ " --opt type=tmpfs" + ++ " --opt device=tmpfs" + ++ " --opt o=size=" ++ show sizeInMB ++ "m" + unless (ec == ExitSuccess) $ error $ "Exited with exit code: " ++ show ec + pure $ delete '\n' stdout + +removeDockerVolume :: Int -> String -> IO () +removeDockerVolume attempts name | attempts <= 0 = + error $ "Can't remove docker volume " ++ name +removeDockerVolume attempts name = do + (ec, _, stderr) <- runEx "docker" $ "volume rm --force " ++ name + let wasRemoved = (ec == ExitSuccess) || isInfixOf "No such volume" stderr + unless wasRemoved $ + threadDelay 3000000 >> -- sometimes docker releases a volume slowly + removeDockerVolume (attempts - 1) name + +withDockerVolume :: Int -> (String -> IO a) -> IO a +withDockerVolume sizeInMB = + bracket (createDockerVolume sizeInMB) (removeDockerVolume 5) + +buildDockerImageWithStackSourceInside :: String -> IO () +buildDockerImageWithStackSourceInside tag = withSourceDirectory $ do + dir <- testDir + runShell ("docker build" + ++ " --file " ++ (dir "Dockerfile") + ++ " --tag " ++ tag + ++ " --memory-swap -1" + ++ " .") + removeDanglingImages + +removeDanglingImages :: IO () +removeDanglingImages = + runShell "docker rmi -f $(docker images --quiet --filter 'dangling=true')" + +runDockerContainerWithVolume + :: String + -> String + -> String + -> String + -> IO (ExitCode, String, String) +runDockerContainerWithVolume imageTag volumeName volumeLocation cmd = + runEx "docker" $ "run" + ++ " --rm" + ++ " --workdir " ++ volumeLocation + ++ " --mount type=volume,dst=" ++ volumeLocation ++ ",src=" ++ volumeName + ++ " " ++ imageTag + ++ " " ++ cmd + +validateSrderr :: String -> Bool +validateSrderr = isInfixOf "No space left on device" + +imageTag :: String +imageTag = "4085-fix" + +spaceInMBJustEnoughToFailInTheExactMoment :: Int +spaceInMBJustEnoughToFailInTheExactMoment = 2000 + +main :: IO () +main = do + buildDockerImageWithStackSourceInside imageTag + (ec, _, stderr) <- withDockerVolume + spaceInMBJustEnoughToFailInTheExactMoment + (\volumeName -> + runDockerContainerWithVolume imageTag volumeName "/app" $ + "stack" + ++ " --stack-root " ++ "/app" + ++ " --snapshot nightly-2018-06-05" + ++ " --no-terminal" + ++ " --install-ghc" + ++ " test") + unless (ec /= ExitSuccess) $ + error "stack process succeeded, but it shouldn't" + unless (validateSrderr stderr) $ + error "stderr validation failed" + +// --} + +main :: IO () +main = putStrLn "This test is disabled (see https://github.com/commercialhaskell/stack/issues/4427)." diff --git a/tests/integration/tests/4095-utf8-pure-nix/Main.hs b/tests/integration/tests/4095-utf8-pure-nix/Main.hs new file mode 100644 index 0000000000..34ee080b98 --- /dev/null +++ b/tests/integration/tests/4095-utf8-pure-nix/Main.hs @@ -0,0 +1,28 @@ +-- Stack supports Unicode code points in a Nix environment. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4095 + +import Control.Monad ( unless ) +import Data.Maybe ( isJust ) +import StackTest +import System.Environment ( lookupEnv ) + +-- This test requires that Nix is installed and that the NIX_PATH has been set +-- so as to allow the path to be used. +main :: IO () +main + | isWindows = + logInfo "Disabled on Windows as Nix is not currently supported on \ + \Windows." + | isMacOSX = + logInfo "Disabled on macOS as it takes too long to run, since it tries \ + \to build GHC." + | otherwise = do + isInContainer <- getInContainer + unless isInContainer $ do + stack ["build", "--nix-pure"] + stack ["exec", "--nix-pure", "myExe"] + +-- | 'True' if we are currently running inside a Docker container. +getInContainer :: IO Bool +getInContainer = isJust <$> lookupEnv "STACK_IN_CONTAINER" diff --git a/tests/integration/tests/4095-utf8-pure-nix/files/.gitignore b/tests/integration/tests/4095-utf8-pure-nix/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4095-utf8-pure-nix/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4095-utf8-pure-nix/files/app/Main.hs b/tests/integration/tests/4095-utf8-pure-nix/files/app/Main.hs new file mode 100644 index 0000000000..3350958478 --- /dev/null +++ b/tests/integration/tests/4095-utf8-pure-nix/files/app/Main.hs @@ -0,0 +1,5 @@ +import System.IO ( stdout ) +import Text.Printf ( hPrintf ) + +main :: IO () +main = hPrintf stdout "平和" diff --git a/tests/integration/tests/4095-utf8-pure-nix/files/package.yaml b/tests/integration/tests/4095-utf8-pure-nix/files/package.yaml new file mode 100644 index 0000000000..ee1f9f8afa --- /dev/null +++ b/tests/integration/tests/4095-utf8-pure-nix/files/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +executables: + myExe: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/4095-utf8-pure-nix/files/stack.yaml b/tests/integration/tests/4095-utf8-pure-nix/files/stack.yaml new file mode 100644 index 0000000000..6eed06afb0 --- /dev/null +++ b/tests/integration/tests/4095-utf8-pure-nix/files/stack.yaml @@ -0,0 +1,10 @@ +# As of 2026-03-01, with channel nixos-25.11, avaiable Nix packages +# haskell.compiler.ghc*, based on: +# +# $ nix-instantiate --eval -E "with import {}; lib.attrNames haskell.compiler" +# +# are: +# +# 948 (GHC 9.4.8), 967 (GHC 9.6.7), 984 (GHC 9.8.4), 9102 (GHC 9.10.2), +# 9103 (GHC 9.10.3), 9122 (GHC 9.12.2). +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/4101-dependency-tree/Main.hs b/tests/integration/tests/4101-dependency-tree/Main.hs new file mode 100644 index 0000000000..913b2db802 --- /dev/null +++ b/tests/integration/tests/4101-dependency-tree/Main.hs @@ -0,0 +1,55 @@ +-- Stack can report the dependency tree of project packages. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4101 + +import Control.Monad ( unless, when ) +import Data.List ( isPrefixOf ) +import StackTest +import System.Directory ( getCurrentDirectory ) + +main :: IO () +main = unless isWindows $ do + stackCheckStdout ["ls", "dependencies", "tree"] $ \stdOut -> do + let expected = unlines [ "Packages" + , "├─┬ myPackageA 0.0.0" + , "│ ├─┬ base 4.20.2.0" + ] + unless (expected `isPrefixOf` stdOut) $ + error $ unlines [ "Expected:", expected, "Actual:", stdOut ] + + stackCheckStdout ["ls", "dependencies", "tree", "--depth=1"] $ \stdOut -> do + let expected = unlines [ "Packages" + , "├─┬ myPackageA 0.0.0" + , "│ ├── base 4.20.2.0" + , "│ ├── filelock 0.1.1.2" + , "│ ├── mtl 2.3.1" + , "│ └── myPackageB 0.0.0" + , "└─┬ myPackageB 0.0.0" + , " └── base 4.20.2.0" + ] + when (stdOut /= expected) $ + error $ unlines [ "Expected:", expected, "Actual:", stdOut ] + + stackCheckStdout ["ls", "dependencies", "tree", "myPackageB"] $ \stdOut -> do + let expected = unlines [ "Packages" + , "└─┬ myPackageB 0.0.0" + , " └─┬ base 4.20.2.0" + , " ├─┬ ghc-internal 9.1003.0" + , " │ ├─┬ ghc-bignum 1.3" + , " │ │ └─┬ ghc-prim 0.12.0" + , " │ │ └── rts 1.0.2" + , " │ ├─┬ ghc-prim 0.12.0" + , " │ │ └── rts 1.0.2" + , " │ └── rts 1.0.2" + , " └─┬ ghc-prim 0.12.0" + , " └── rts 1.0.2" + ] + when (stdOut /= expected) $ + error $ unlines [ "Expected:", expected, "Actual:", stdOut ] + + stackCheckStdout ["ls", "dependencies", "json"] $ \stdOut -> do + currdir <- getCurrentDirectory + let expected = + "[{\"dependencies\":[\"base\",\"bytestring\",\"filepath\",\"os-string\",\"time\"]" + unless (expected `isPrefixOf` stdOut) $ + error $ unlines [ "Expected:", expected, "Actual:", stdOut ] diff --git a/tests/integration/tests/4101-dependency-tree/files/.gitignore b/tests/integration/tests/4101-dependency-tree/files/.gitignore new file mode 100644 index 0000000000..f9a6e152d2 --- /dev/null +++ b/tests/integration/tests/4101-dependency-tree/files/.gitignore @@ -0,0 +1,2 @@ +myPackageA.cabal +myPackageB.cabal diff --git a/tests/integration/tests/4101-dependency-tree/files/myPackageB/package.yaml b/tests/integration/tests/4101-dependency-tree/files/myPackageB/package.yaml new file mode 100644 index 0000000000..f94636c710 --- /dev/null +++ b/tests/integration/tests/4101-dependency-tree/files/myPackageB/package.yaml @@ -0,0 +1,8 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/4101-dependency-tree/files/myPackageB/src/Lib.hs b/tests/integration/tests/4101-dependency-tree/files/myPackageB/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/4101-dependency-tree/files/myPackageB/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/4101-dependency-tree/files/package.yaml b/tests/integration/tests/4101-dependency-tree/files/package.yaml new file mode 100644 index 0000000000..682e620e4c --- /dev/null +++ b/tests/integration/tests/4101-dependency-tree/files/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base +- filelock +- mtl +- myPackageB + +library: {} diff --git a/tests/integration/tests/4101-dependency-tree/files/src/Lib.hs b/tests/integration/tests/4101-dependency-tree/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/4101-dependency-tree/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/4101-dependency-tree/files/stack.yaml b/tests/integration/tests/4101-dependency-tree/files/stack.yaml new file mode 100644 index 0000000000..743bc66c45 --- /dev/null +++ b/tests/integration/tests/4101-dependency-tree/files/stack.yaml @@ -0,0 +1,9 @@ +snapshot: lts-24.37 + +packages: +- . +- myPackageB + +extra-deps: +- github: snoyberg/filelock + commit: 4f080496d8bf153fbe26e64d1f52cf73c7db25f6 diff --git a/tests/integration/tests/4105-test-coverage-of-internal-lib/Main.hs b/tests/integration/tests/4105-test-coverage-of-internal-lib/Main.hs new file mode 100644 index 0000000000..3d8d2dc4f7 --- /dev/null +++ b/tests/integration/tests/4105-test-coverage-of-internal-lib/Main.hs @@ -0,0 +1,25 @@ +-- Stack generates coverage reports for main libraries and private named +-- libraries (internal libraries). +-- +-- See: https://github.com/commercialhaskell/stack/issues/4105 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf, isPrefixOf ) +import StackTest + +main :: IO () +main = do + stack ["clean"] + stackCheckStdout ["test", "--coverage", "--color", "never"] check + +check :: String -> IO () +check output = case getCoverageLines output of + _:exprs:_ -> unless ("2/2" `isInfixOf` exprs) testFail + _ -> testFail + where + testFail = fail "Stack didn't generate coverage from both libraries" + +getCoverageLines :: String -> [String] +getCoverageLines = dropWhile (not . isCoverageHeader) . lines + where + isCoverageHeader = isPrefixOf "Summary coverage report for " diff --git a/tests/integration/tests/4105-test-coverage-of-internal-lib/files/.gitignore b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4105-test-coverage-of-internal-lib/files/int/Internal.hs b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/int/Internal.hs new file mode 100644 index 0000000000..cd713ec4e2 --- /dev/null +++ b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/int/Internal.hs @@ -0,0 +1,7 @@ +module Internal + ( funcInternal + ) where + +-- | A function of the internal library +funcInternal :: Int -> Int +funcInternal = pred diff --git a/tests/integration/tests/4105-test-coverage-of-internal-lib/files/package.yaml b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/package.yaml new file mode 100644 index 0000000000..18ae9cc898 --- /dev/null +++ b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/package.yaml @@ -0,0 +1,21 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + +internal-libraries: + internal: + source-dirs: int + +tests: + test: + source-dirs: test + main: Main.hs + dependencies: + - internal + - myPackage diff --git a/tests/integration/tests/4105-test-coverage-of-internal-lib/files/src/Lib.hs b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/src/Lib.hs new file mode 100644 index 0000000000..034418635a --- /dev/null +++ b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/src/Lib.hs @@ -0,0 +1,7 @@ +module Lib + ( funcLib + ) where + +-- | A function of the main library +funcLib :: Int -> Int +funcLib = succ diff --git a/tests/integration/tests/4105-test-coverage-of-internal-lib/files/stack.yaml b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/stack.yaml new file mode 100644 index 0000000000..c292f63385 --- /dev/null +++ b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/stack.yaml @@ -0,0 +1 @@ +snapshot: lts-24.37 diff --git a/tests/integration/tests/4105-test-coverage-of-internal-lib/files/test/Main.hs b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/test/Main.hs new file mode 100644 index 0000000000..967f9031e2 --- /dev/null +++ b/tests/integration/tests/4105-test-coverage-of-internal-lib/files/test/Main.hs @@ -0,0 +1,6 @@ +import Control.Monad ( when ) + +import Lib ( funcLib ) +import Internal ( funcInternal ) + +main = when (funcLib 41 /= funcInternal 43) $ error "test failed" diff --git a/tests/integration/tests/4181-clean-wo-dl-ghc/Main.hs b/tests/integration/tests/4181-clean-wo-dl-ghc/Main.hs new file mode 100644 index 0000000000..9736da7645 --- /dev/null +++ b/tests/integration/tests/4181-clean-wo-dl-ghc/Main.hs @@ -0,0 +1,13 @@ +-- Stack's clean command should not require the presence of the specified +-- version of GHC. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4181 + +import StackTest + +main :: IO () +main = do + -- `stack clean` should succeed even though there is no ghc available. + -- See the stack.yaml file for how this works. + stackIgnoreException ["clean"] + stackCleanFull diff --git a/tests/integration/tests/4181-clean-wo-dl-ghc/files/.gitignore b/tests/integration/tests/4181-clean-wo-dl-ghc/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4181-clean-wo-dl-ghc/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4181-clean-wo-dl-ghc/files/package.yaml b/tests/integration/tests/4181-clean-wo-dl-ghc/files/package.yaml new file mode 100644 index 0000000000..4b373ba746 --- /dev/null +++ b/tests/integration/tests/4181-clean-wo-dl-ghc/files/package.yaml @@ -0,0 +1,8 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/4181-clean-wo-dl-ghc/files/stack.yaml b/tests/integration/tests/4181-clean-wo-dl-ghc/files/stack.yaml new file mode 100644 index 0000000000..7a64fd4244 --- /dev/null +++ b/tests/integration/tests/4181-clean-wo-dl-ghc/files/stack.yaml @@ -0,0 +1,6 @@ +# Update the resolver as necessary +snapshot: ghc-8.22 +# Do not use the system ghc, as ghc must not be available +system-ghc: false +# Do not install any other ghc, as ghc must not be available +install-ghc: false diff --git a/tests/integration/tests/4215-missing-unregister/Main.hs b/tests/integration/tests/4215-missing-unregister/Main.hs new file mode 100644 index 0000000000..4542b9dbaf --- /dev/null +++ b/tests/integration/tests/4215-missing-unregister/Main.hs @@ -0,0 +1,12 @@ +-- Even if the project directory is unchanged and the project package name and +-- version is the same, Stack recognises when a project package is different to +-- a project package that has been built previously. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4215 + +import StackTest + +main :: IO () +main = do + stack ["--stack-yaml", "stack1.yaml", "build"] + stack ["--stack-yaml", "stack2.yaml", "build"] diff --git a/tests/integration/tests/4215-missing-unregister/files/.gitignore b/tests/integration/tests/4215-missing-unregister/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4215-missing-unregister/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4215-missing-unregister/files/stack1.yaml b/tests/integration/tests/4215-missing-unregister/files/stack1.yaml new file mode 100644 index 0000000000..365c421b18 --- /dev/null +++ b/tests/integration/tests/4215-missing-unregister/files/stack1.yaml @@ -0,0 +1,4 @@ +snapshot: ghc-9.10.3 + +packages: +- v1 diff --git a/tests/integration/tests/4215-missing-unregister/files/stack2.yaml b/tests/integration/tests/4215-missing-unregister/files/stack2.yaml new file mode 100644 index 0000000000..02a50554ec --- /dev/null +++ b/tests/integration/tests/4215-missing-unregister/files/stack2.yaml @@ -0,0 +1,4 @@ +snapshot: ghc-9.10.3 + +packages: +- v2 diff --git a/tests/integration/tests/4215-missing-unregister/files/v1/package.yaml b/tests/integration/tests/4215-missing-unregister/files/v1/package.yaml new file mode 100644 index 0000000000..4b373ba746 --- /dev/null +++ b/tests/integration/tests/4215-missing-unregister/files/v1/package.yaml @@ -0,0 +1,8 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/4215-missing-unregister/files/v2/package.yaml b/tests/integration/tests/4215-missing-unregister/files/v2/package.yaml new file mode 100644 index 0000000000..4b373ba746 --- /dev/null +++ b/tests/integration/tests/4215-missing-unregister/files/v2/package.yaml @@ -0,0 +1,8 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/4270-files-order/Main.hs b/tests/integration/tests/4270-files-order/Main.hs new file mode 100644 index 0000000000..e51067ba08 --- /dev/null +++ b/tests/integration/tests/4270-files-order/Main.hs @@ -0,0 +1,22 @@ +-- Stack's ghci command can load a project with c-sources, if the package +-- description lists the C source files in dependency order. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4270 + +import Control.Monad ( when ) +import StackTest +import StackTest.Repl + +main :: IO () +main = do + stack ["build"] + stackRepl [] $ do + nextPrompt + replCommand "putStrLn greeting" + line <- replGetLine + let expected = "Hello, world!" + when (line /= expected) $ + error $ + "Didn't load correctly.\n" + <> "Expected: " <> expected <> "\n" + <> "Actual : " <> line <> "\n" diff --git a/tests/integration/tests/4270-files-order/files/.gitignore b/tests/integration/tests/4270-files-order/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4270-files-order/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/test/integration/tests/4270-files-order/files/cbits/a_dependent.c b/tests/integration/tests/4270-files-order/files/cbits/a_dependent.c similarity index 100% rename from test/integration/tests/4270-files-order/files/cbits/a_dependent.c rename to tests/integration/tests/4270-files-order/files/cbits/a_dependent.c diff --git a/test/integration/tests/4270-files-order/files/cbits/a_dependent.h b/tests/integration/tests/4270-files-order/files/cbits/a_dependent.h similarity index 100% rename from test/integration/tests/4270-files-order/files/cbits/a_dependent.h rename to tests/integration/tests/4270-files-order/files/cbits/a_dependent.h diff --git a/test/integration/tests/4270-files-order/files/cbits/the_dependency.c b/tests/integration/tests/4270-files-order/files/cbits/the_dependency.c similarity index 100% rename from test/integration/tests/4270-files-order/files/cbits/the_dependency.c rename to tests/integration/tests/4270-files-order/files/cbits/the_dependency.c diff --git a/tests/integration/tests/4270-files-order/files/package.yaml b/tests/integration/tests/4270-files-order/files/package.yaml new file mode 100644 index 0000000000..70fc7a38c9 --- /dev/null +++ b/tests/integration/tests/4270-files-order/files/package.yaml @@ -0,0 +1,13 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + include-dirs: cbits + c-sources: + - cbits/the_dependency.c + - cbits/a_dependent.c diff --git a/tests/integration/tests/4270-files-order/files/src/Lib.hs b/tests/integration/tests/4270-files-order/files/src/Lib.hs new file mode 100644 index 0000000000..a1d89f1b4f --- /dev/null +++ b/tests/integration/tests/4270-files-order/files/src/Lib.hs @@ -0,0 +1,11 @@ +module Lib where + +import Foreign.C.String ( CString, peekCString ) +import System.IO.Unsafe ( unsafePerformIO) + +foreign import ccall unsafe "a_dependent.h greeting" + c_greeting :: CString + +{-# NOINLINE greeting #-} +greeting :: String +greeting = unsafePerformIO $ peekCString c_greeting diff --git a/tests/integration/tests/4270-files-order/files/stack.yaml b/tests/integration/tests/4270-files-order/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/4270-files-order/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/4324-dot-includes-boot-packages/Main.hs b/tests/integration/tests/4324-dot-includes-boot-packages/Main.hs new file mode 100644 index 0000000000..20c4bff744 --- /dev/null +++ b/tests/integration/tests/4324-dot-includes-boot-packages/Main.hs @@ -0,0 +1,13 @@ +-- Stack's dot command includes the dependencies of GHC's boot packages. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4324 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stackCheckStdout ["dot", "--external"] $ \str -> + unless ("\n\"process\" ->" `isInfixOf` str) $ + error "Not showing dependencies of process" diff --git a/tests/integration/tests/4324-dot-includes-boot-packages/files/.gitignore b/tests/integration/tests/4324-dot-includes-boot-packages/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4324-dot-includes-boot-packages/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4324-dot-includes-boot-packages/files/package.yaml b/tests/integration/tests/4324-dot-includes-boot-packages/files/package.yaml new file mode 100644 index 0000000000..ff5d3bfd91 --- /dev/null +++ b/tests/integration/tests/4324-dot-includes-boot-packages/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- process + +library: {} diff --git a/tests/integration/tests/4324-dot-includes-boot-packages/files/stack.yaml b/tests/integration/tests/4324-dot-includes-boot-packages/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/4324-dot-includes-boot-packages/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/4390-dot-no-ghc/Main.hs b/tests/integration/tests/4390-dot-no-ghc/Main.hs new file mode 100644 index 0000000000..9463c22f72 --- /dev/null +++ b/tests/integration/tests/4390-dot-no-ghc/Main.hs @@ -0,0 +1,10 @@ +-- Stack's dot command does not require the presence of the specified GHC. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4390 + +import StackTest + +main :: IO () +main = do + stack ["ls", "dependencies", "--global-hints"] + stack ["dot", "--global-hints"] diff --git a/tests/integration/tests/4390-dot-no-ghc/files/.gitignore b/tests/integration/tests/4390-dot-no-ghc/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4390-dot-no-ghc/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4390-dot-no-ghc/files/package.yaml b/tests/integration/tests/4390-dot-no-ghc/files/package.yaml new file mode 100644 index 0000000000..4b373ba746 --- /dev/null +++ b/tests/integration/tests/4390-dot-no-ghc/files/package.yaml @@ -0,0 +1,8 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/4390-dot-no-ghc/files/stack.yaml b/tests/integration/tests/4390-dot-no-ghc/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/4390-dot-no-ghc/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/4408-init-internal-libs/Main.hs b/tests/integration/tests/4408-init-internal-libs/Main.hs new file mode 100644 index 0000000000..5973d2da18 --- /dev/null +++ b/tests/integration/tests/4408-init-internal-libs/Main.hs @@ -0,0 +1,9 @@ +-- Stack can initialise a project where a project package includes a private +-- named library (an internal library). +-- +-- See: https://github.com/commercialhaskell/stack/issues/4408 + +import StackTest + +main :: IO () +main = stack ["init"] diff --git a/tests/integration/tests/4408-init-internal-libs/files/.gitignore b/tests/integration/tests/4408-init-internal-libs/files/.gitignore new file mode 100644 index 0000000000..82bb5452c4 --- /dev/null +++ b/tests/integration/tests/4408-init-internal-libs/files/.gitignore @@ -0,0 +1,2 @@ +myPackage.cabal +stack.yaml diff --git a/tests/integration/tests/4408-init-internal-libs/files/int/Internal.hs b/tests/integration/tests/4408-init-internal-libs/files/int/Internal.hs new file mode 100644 index 0000000000..d066bb085e --- /dev/null +++ b/tests/integration/tests/4408-init-internal-libs/files/int/Internal.hs @@ -0,0 +1 @@ +module Internal where diff --git a/tests/integration/tests/4408-init-internal-libs/files/package.yaml b/tests/integration/tests/4408-init-internal-libs/files/package.yaml new file mode 100644 index 0000000000..ab1ff55ee9 --- /dev/null +++ b/tests/integration/tests/4408-init-internal-libs/files/package.yaml @@ -0,0 +1,15 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - internal + +internal-libraries: + internal: + source-dirs: int diff --git a/tests/integration/tests/4408-init-internal-libs/files/src/Lib.hs b/tests/integration/tests/4408-init-internal-libs/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/4408-init-internal-libs/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/443-specify-path/Main.hs b/tests/integration/tests/443-specify-path/Main.hs new file mode 100644 index 0000000000..4c01c57e58 --- /dev/null +++ b/tests/integration/tests/443-specify-path/Main.hs @@ -0,0 +1,30 @@ +-- Stack can specify the directory to which built executable binary files are +-- copied. +-- +-- See: https://github.com/commercialhaskell/stack/issues/443 + +import StackTest +import System.Directory ( createDirectory, getCurrentDirectory ) +import System.FilePath ( () ) + +main :: IO () +main = do + -- Default install + -- A manual test of the default stack install is required + + -- Install in current dir + stack [ "--local-bin-path", ".", "install" ] + doesExist myPackageExe + + -- Install in relative path + createDirectory "bin" + stack [ "--local-bin-path", "./bin", "install" ] + doesExist ("./bin/" <> myPackageExe) + + -- Install in absolute path + tmpDirectory <- fmap ( "bin-absolute") getCurrentDirectory + createDirectory tmpDirectory + stack [ "--local-bin-path", tmpDirectory, "install" ] + doesExist (tmpDirectory myPackageExe) + where + myPackageExe = "myPackage" <> exeExt diff --git a/tests/integration/tests/443-specify-path/files/.gitignore b/tests/integration/tests/443-specify-path/files/.gitignore new file mode 100644 index 0000000000..e28030a9e4 --- /dev/null +++ b/tests/integration/tests/443-specify-path/files/.gitignore @@ -0,0 +1,5 @@ +bin/ +bin-absolute/ +myPackage.cabal +myPackage.exe +myPackage diff --git a/tests/integration/tests/443-specify-path/files/app/Main.hs b/tests/integration/tests/443-specify-path/files/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/443-specify-path/files/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/443-specify-path/files/package.yaml b/tests/integration/tests/443-specify-path/files/package.yaml new file mode 100644 index 0000000000..113821465d --- /dev/null +++ b/tests/integration/tests/443-specify-path/files/package.yaml @@ -0,0 +1,12 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.1.0.0 + +dependencies: +- base + +executables: + myPackage: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/443-specify-path/files/stack.yaml b/tests/integration/tests/443-specify-path/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/443-specify-path/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/444-package-option/Main.hs b/tests/integration/tests/444-package-option/Main.hs new file mode 100644 index 0000000000..a5baaca85f --- /dev/null +++ b/tests/integration/tests/444-package-option/Main.hs @@ -0,0 +1,8 @@ +import StackTest + +main :: IO () +main = do + isAlpine <- getIsAlpine + if isAlpine || isARM + then logInfo "Disabled on Alpine Linux and ARM since it cannot yet install its own GHC." + else stack [defaultSnapshotArg, "--install-ghc", "runghc", "--package", "safe", "Test.hs"] diff --git a/test/integration/tests/444-package-option/files/Test.hs b/tests/integration/tests/444-package-option/files/Test.hs similarity index 100% rename from test/integration/tests/444-package-option/files/Test.hs rename to tests/integration/tests/444-package-option/files/Test.hs diff --git a/tests/integration/tests/4453-detailed/Main.hs b/tests/integration/tests/4453-detailed/Main.hs new file mode 100644 index 0000000000..fe56218f86 --- /dev/null +++ b/tests/integration/tests/4453-detailed/Main.hs @@ -0,0 +1,8 @@ +-- Stack supports tests of type detailed-0.9. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4453 + +import StackTest + +main :: IO () +main = stack ["test"] diff --git a/tests/integration/tests/4453-detailed/files/.gitignore b/tests/integration/tests/4453-detailed/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4453-detailed/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4453-detailed/files/package.yaml b/tests/integration/tests/4453-detailed/files/package.yaml new file mode 100644 index 0000000000..8ecf25e975 --- /dev/null +++ b/tests/integration/tests/4453-detailed/files/package.yaml @@ -0,0 +1,16 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +tests: + test: + verbatim: + type: detailed-0.9 + test-module: Test + source-dirs: test + other-modules: [] + dependencies: + - Cabal >= 1.20 diff --git a/tests/integration/tests/4453-detailed/files/stack.yaml b/tests/integration/tests/4453-detailed/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/4453-detailed/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/4453-detailed/files/test/Test.hs b/tests/integration/tests/4453-detailed/files/test/Test.hs new file mode 100644 index 0000000000..c0e8be21d2 --- /dev/null +++ b/tests/integration/tests/4453-detailed/files/test/Test.hs @@ -0,0 +1,20 @@ +module Test + ( tests + ) where + +import Distribution.TestSuite + ( Progress (..), Result (..), Test (..), TestInstance (..) ) + +tests :: IO [Test] +tests = pure [ test "test" Pass ] + +test :: String -> Result -> Test +test name r = Test testInstance + where + testInstance = TestInstance + { run = pure (Finished r) + , name = name + , tags = [] + , options = [] + , setOption = \_ _ -> Right testInstance + } diff --git a/tests/integration/tests/4488-newer-cabal-version/Main.hs b/tests/integration/tests/4488-newer-cabal-version/Main.hs new file mode 100644 index 0000000000..c246a1ee4f --- /dev/null +++ b/tests/integration/tests/4488-newer-cabal-version/Main.hs @@ -0,0 +1,11 @@ +-- Stack can build with a version of Cabal (the library) other than that which +-- is a boot package of the specified GHC version. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4488 + +import StackTest + +main :: IO () +main = do + stackErr ["--stack-yaml", "stack-bad.yaml", "build", "--dry-run"] + stack ["--stack-yaml", "stack-good.yaml", "build", "--dry-run"] diff --git a/tests/integration/tests/4488-newer-cabal-version/files/.gitignore b/tests/integration/tests/4488-newer-cabal-version/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4488-newer-cabal-version/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4488-newer-cabal-version/files/package.yaml b/tests/integration/tests/4488-newer-cabal-version/files/package.yaml new file mode 100644 index 0000000000..f695f01228 --- /dev/null +++ b/tests/integration/tests/4488-newer-cabal-version/files/package.yaml @@ -0,0 +1,12 @@ +spec-version: 0.36.0 + +name: myPackage +custom-setup: + dependencies: + - base + - Cabal == 3.16.1.0 + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/4488-newer-cabal-version/files/stack-bad.yaml b/tests/integration/tests/4488-newer-cabal-version/files/stack-bad.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/4488-newer-cabal-version/files/stack-bad.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/4488-newer-cabal-version/files/stack-good.yaml b/tests/integration/tests/4488-newer-cabal-version/files/stack-good.yaml new file mode 100644 index 0000000000..578b1f2a09 --- /dev/null +++ b/tests/integration/tests/4488-newer-cabal-version/files/stack-good.yaml @@ -0,0 +1,6 @@ +snapshot: ghc-9.10.3 + +extra-deps: +- Cabal-3.16.1.0 +- Cabal-syntax-3.16.1.0 +- alex-3.5.4.2 diff --git a/tests/integration/tests/4706-ignore-ghc-env-files/Main.hs b/tests/integration/tests/4706-ignore-ghc-env-files/Main.hs new file mode 100644 index 0000000000..e7e1666c54 --- /dev/null +++ b/tests/integration/tests/4706-ignore-ghc-env-files/Main.hs @@ -0,0 +1,30 @@ +-- Stack ignores GHC_ENVIRONMENT. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4706 + +import Control.Exception ( bracket_ ) +import StackTest +import System.Directory ( canonicalizePath, removeFile ) +import System.Environment ( setEnv ) +import System.Info (arch, os) + +main :: IO () +main = do + let ghcVer = "9.10.3" + fp = concat + [ ".ghc.environment." + , arch + , "-" + , os + , "-" + , ghcVer + ] + bracket_ + (writeFile fp "This is an invalid GHC environment file") + (removeFile fp) + ( do + envFile <- canonicalizePath fp + setEnv "GHC_ENVIRONMENT" envFile + stack ["build"] + stack ["runghc", "Main.hs"] + ) diff --git a/tests/integration/tests/4706-ignore-ghc-env-files/files/.gitignore b/tests/integration/tests/4706-ignore-ghc-env-files/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4706-ignore-ghc-env-files/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4706-ignore-ghc-env-files/files/Main.hs b/tests/integration/tests/4706-ignore-ghc-env-files/files/Main.hs new file mode 100644 index 0000000000..d582e1e36a --- /dev/null +++ b/tests/integration/tests/4706-ignore-ghc-env-files/files/Main.hs @@ -0,0 +1,2 @@ +main :: IO () +main = pure () diff --git a/tests/integration/tests/4706-ignore-ghc-env-files/files/package.yaml b/tests/integration/tests/4706-ignore-ghc-env-files/files/package.yaml new file mode 100644 index 0000000000..4b373ba746 --- /dev/null +++ b/tests/integration/tests/4706-ignore-ghc-env-files/files/package.yaml @@ -0,0 +1,8 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/4706-ignore-ghc-env-files/files/stack.yaml b/tests/integration/tests/4706-ignore-ghc-env-files/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/4706-ignore-ghc-env-files/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/4754-rebuild-haddocks/Main.hs b/tests/integration/tests/4754-rebuild-haddocks/Main.hs new file mode 100644 index 0000000000..3d5e0c499e --- /dev/null +++ b/tests/integration/tests/4754-rebuild-haddocks/Main.hs @@ -0,0 +1,13 @@ +-- +-- See: https://github.com/commercialhaskell/stack/issues/4754 + +import StackTest + +main :: IO () +main = do + stackCleanFull + stackErr ["haddock"] + stackCleanFull + stackErr ["haddock", "--no-haddock-deps"] + stack ["build"] + stackErr ["haddock", "--no-haddock-deps"] diff --git a/tests/integration/tests/4754-rebuild-haddocks/files/.gitignore b/tests/integration/tests/4754-rebuild-haddocks/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4754-rebuild-haddocks/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4754-rebuild-haddocks/files/package.yaml b/tests/integration/tests/4754-rebuild-haddocks/files/package.yaml new file mode 100644 index 0000000000..3c2901b603 --- /dev/null +++ b/tests/integration/tests/4754-rebuild-haddocks/files/package.yaml @@ -0,0 +1,12 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + ghc-options: + - -Werror + - -Winvalid-haddock diff --git a/tests/integration/tests/4754-rebuild-haddocks/files/src/Lib.hs b/tests/integration/tests/4754-rebuild-haddocks/files/src/Lib.hs new file mode 100644 index 0000000000..4aa20bc752 --- /dev/null +++ b/tests/integration/tests/4754-rebuild-haddocks/files/src/Lib.hs @@ -0,0 +1,5 @@ +module Lib where + +-- | The function below intentionally contains invalid Haddock +func :: () +func = () -- ^ this should fail!!! diff --git a/tests/integration/tests/4754-rebuild-haddocks/files/stack.yaml b/tests/integration/tests/4754-rebuild-haddocks/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/4754-rebuild-haddocks/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/4783-doctest-deps/Main.hs b/tests/integration/tests/4783-doctest-deps/Main.hs new file mode 100644 index 0000000000..ccdbd4732b --- /dev/null +++ b/tests/integration/tests/4783-doctest-deps/Main.hs @@ -0,0 +1,9 @@ +-- +-- See: https://github.com/commercialhaskell/stack/issues/4783 + +import StackTest + +main :: IO () +main = do + stack ["build", "myPackageB"] + stack ["test"] diff --git a/tests/integration/tests/4783-doctest-deps/files/.gitignore b/tests/integration/tests/4783-doctest-deps/files/.gitignore new file mode 100644 index 0000000000..f9a6e152d2 --- /dev/null +++ b/tests/integration/tests/4783-doctest-deps/files/.gitignore @@ -0,0 +1,2 @@ +myPackageA.cabal +myPackageB.cabal diff --git a/tests/integration/tests/4783-doctest-deps/files/myPackageB/Acme/Dont.hs b/tests/integration/tests/4783-doctest-deps/files/myPackageB/Acme/Dont.hs new file mode 100644 index 0000000000..dd6e565c6f --- /dev/null +++ b/tests/integration/tests/4783-doctest-deps/files/myPackageB/Acme/Dont.hs @@ -0,0 +1,4 @@ +module Acme.Dont where + +don't :: (Monad m) => m a -> m () +don't _action = return () diff --git a/tests/integration/tests/4783-doctest-deps/files/myPackageB/package.yaml b/tests/integration/tests/4783-doctest-deps/files/myPackageB/package.yaml new file mode 100644 index 0000000000..f94636c710 --- /dev/null +++ b/tests/integration/tests/4783-doctest-deps/files/myPackageB/package.yaml @@ -0,0 +1,8 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/4783-doctest-deps/files/package.yaml b/tests/integration/tests/4783-doctest-deps/files/package.yaml new file mode 100644 index 0000000000..142bbea6b6 --- /dev/null +++ b/tests/integration/tests/4783-doctest-deps/files/package.yaml @@ -0,0 +1,17 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +library: + source-dirs: src + +tests: + doctest: + source-dirs: test + main: Main.hs + dependencies: + - doctest + - acme-dont diff --git a/tests/integration/tests/4783-doctest-deps/files/snapshot.yaml b/tests/integration/tests/4783-doctest-deps/files/snapshot.yaml new file mode 100644 index 0000000000..8637adcee0 --- /dev/null +++ b/tests/integration/tests/4783-doctest-deps/files/snapshot.yaml @@ -0,0 +1,6 @@ +name: mySnapshot + +snapshot: lts-24.37 + +packages: +- acme-dont-1.1@sha256:8264ad3e5113d3e0417b46e71d5a9c0914a1f03b5b81319cc329f1dc0f49b96c,602 diff --git a/tests/integration/tests/4783-doctest-deps/files/src/Lib.hs b/tests/integration/tests/4783-doctest-deps/files/src/Lib.hs new file mode 100644 index 0000000000..c625fb3d19 --- /dev/null +++ b/tests/integration/tests/4783-doctest-deps/files/src/Lib.hs @@ -0,0 +1,8 @@ +module Lib where + +-- | +-- +-- >>> import Acme.Dont +-- >>> don't func +func :: IO () +func = error "func" diff --git a/tests/integration/tests/4783-doctest-deps/files/stack.yaml b/tests/integration/tests/4783-doctest-deps/files/stack.yaml new file mode 100644 index 0000000000..9cfd6e6102 --- /dev/null +++ b/tests/integration/tests/4783-doctest-deps/files/stack.yaml @@ -0,0 +1,5 @@ +snapshot: snapshot.yaml + +extra-deps: +# Include a package with a duplicated module name +- myPackageB diff --git a/tests/integration/tests/4783-doctest-deps/files/test/Main.hs b/tests/integration/tests/4783-doctest-deps/files/test/Main.hs new file mode 100644 index 0000000000..241b37352a --- /dev/null +++ b/tests/integration/tests/4783-doctest-deps/files/test/Main.hs @@ -0,0 +1,4 @@ +import Test.DocTest + +main :: IO () +main = doctest ["src/Lib.hs"] diff --git a/tests/integration/tests/4897-boot-package-pruned/Main.hs b/tests/integration/tests/4897-boot-package-pruned/Main.hs new file mode 100644 index 0000000000..8e9c457127 --- /dev/null +++ b/tests/integration/tests/4897-boot-package-pruned/Main.hs @@ -0,0 +1,19 @@ +-- Stack warns when a required GHC boot package has been pruned. +-- +-- See: https://github.com/commercialhaskell/stack/issues/4897 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +planFailure :: String +planFailure = "but this GHC boot package has been pruned" + +main :: IO () +main = do + stackErrStderr ["build"] (expectMessage planFailure) + +expectMessage :: String -> String -> IO () +expectMessage msg stderr = do + unless (words msg `isInfixOf` words stderr) $ + error $ "Expected an error: \n" ++ show msg diff --git a/tests/integration/tests/4897-boot-package-pruned/files/.gitignore b/tests/integration/tests/4897-boot-package-pruned/files/.gitignore new file mode 100644 index 0000000000..59d924c932 --- /dev/null +++ b/tests/integration/tests/4897-boot-package-pruned/files/.gitignore @@ -0,0 +1,2 @@ +directory.cabal +myPackage.cabal diff --git a/tests/integration/tests/4897-boot-package-pruned/files/directory/directory.cabal b/tests/integration/tests/4897-boot-package-pruned/files/directory/directory.cabal new file mode 100644 index 0000000000..003b0b4583 --- /dev/null +++ b/tests/integration/tests/4897-boot-package-pruned/files/directory/directory.cabal @@ -0,0 +1,14 @@ +cabal-version: 1.12 + +-- This file has been generated from package.yaml by hpack version 0.39.1. +-- +-- see: https://github.com/sol/hpack + +name: directory +version: 0.0.0 +build-type: Simple + +library + build-depends: + base + default-language: Haskell2010 diff --git a/tests/integration/tests/4897-boot-package-pruned/files/directory/package.yaml b/tests/integration/tests/4897-boot-package-pruned/files/directory/package.yaml new file mode 100644 index 0000000000..f93449661a --- /dev/null +++ b/tests/integration/tests/4897-boot-package-pruned/files/directory/package.yaml @@ -0,0 +1,8 @@ +spec-version: 0.36.0 + +name: directory + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/4897-boot-package-pruned/files/package.yaml b/tests/integration/tests/4897-boot-package-pruned/files/package.yaml new file mode 100644 index 0000000000..625da91c48 --- /dev/null +++ b/tests/integration/tests/4897-boot-package-pruned/files/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.1.0.0 + +dependencies: +- base +- directory +- process + +library: {} diff --git a/tests/integration/tests/4897-boot-package-pruned/files/stack.yaml b/tests/integration/tests/4897-boot-package-pruned/files/stack.yaml new file mode 100644 index 0000000000..8a84491306 --- /dev/null +++ b/tests/integration/tests/4897-boot-package-pruned/files/stack.yaml @@ -0,0 +1,4 @@ +snapshot: ghc-9.10.3 + +extra-deps: +- ./directory diff --git a/tests/integration/tests/4938-non-ascii-module-names/Main.hs b/tests/integration/tests/4938-non-ascii-module-names/Main.hs new file mode 100644 index 0000000000..5fe95a532b --- /dev/null +++ b/tests/integration/tests/4938-non-ascii-module-names/Main.hs @@ -0,0 +1,15 @@ +-- Stack accepts module names that include Unicode code points outside of the +-- Basic Latin Unicode block (ASCII). +-- +-- See: https://github.com/commercialhaskell/stack/issues/4938 + +import Control.Monad ( unless ) +import StackTest + +main :: IO () +main = do + -- Disabled on Windows due to an error occurred in the integration tests + -- regarding Unicode character. Tried to fix it (https://github.com/commercialhaskell/stack/pull/5162/commits/8f04ad9e4cbaa54370dc5af476e3307a16c84405) + -- but it didn't work + unless isWindows $ + stack ["build"] diff --git a/tests/integration/tests/4938-non-ascii-module-names/files/.gitignore b/tests/integration/tests/4938-non-ascii-module-names/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/4938-non-ascii-module-names/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/4938-non-ascii-module-names/files/package.yaml b/tests/integration/tests/4938-non-ascii-module-names/files/package.yaml new file mode 100644 index 0000000000..a7305a77cf --- /dev/null +++ b/tests/integration/tests/4938-non-ascii-module-names/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src diff --git "a/tests/integration/tests/4938-non-ascii-module-names/files/src/\316\224\316\277\316\272\316\271\316\274\316\256.hs" "b/tests/integration/tests/4938-non-ascii-module-names/files/src/\316\224\316\277\316\272\316\271\316\274\316\256.hs" new file mode 100644 index 0000000000..9e2709d52d --- /dev/null +++ "b/tests/integration/tests/4938-non-ascii-module-names/files/src/\316\224\316\277\316\272\316\271\316\274\316\256.hs" @@ -0,0 +1,6 @@ +module Δοκιμή + ( func + ) where + +func :: IO () +func = pure () diff --git a/tests/integration/tests/4938-non-ascii-module-names/files/stack.yaml b/tests/integration/tests/4938-non-ascii-module-names/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/4938-non-ascii-module-names/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/5180-ghc-rts-flags/Main.hs b/tests/integration/tests/5180-ghc-rts-flags/Main.hs new file mode 100644 index 0000000000..abdca100ad --- /dev/null +++ b/tests/integration/tests/5180-ghc-rts-flags/Main.hs @@ -0,0 +1,8 @@ +-- Stack supports GHC RTS options. +-- +-- See: https://github.com/commercialhaskell/stack/issues/5180 + +import StackTest + +main :: IO () +main = stack ["build"] diff --git a/tests/integration/tests/5180-ghc-rts-flags/files/.gitignore b/tests/integration/tests/5180-ghc-rts-flags/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/5180-ghc-rts-flags/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/5180-ghc-rts-flags/files/package.yaml b/tests/integration/tests/5180-ghc-rts-flags/files/package.yaml new file mode 100644 index 0000000000..a7305a77cf --- /dev/null +++ b/tests/integration/tests/5180-ghc-rts-flags/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/5180-ghc-rts-flags/files/src/Lib.hs b/tests/integration/tests/5180-ghc-rts-flags/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/5180-ghc-rts-flags/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/5180-ghc-rts-flags/files/stack.yaml b/tests/integration/tests/5180-ghc-rts-flags/files/stack.yaml new file mode 100644 index 0000000000..6f95611bd9 --- /dev/null +++ b/tests/integration/tests/5180-ghc-rts-flags/files/stack.yaml @@ -0,0 +1,4 @@ +snapshot: ghc-9.10.3 + +ghc-options: + "$locals": -j8 +RTS -s -A128M diff --git a/tests/integration/tests/5272-only-locals/Main.hs b/tests/integration/tests/5272-only-locals/Main.hs new file mode 100644 index 0000000000..0b2d4ebc3a --- /dev/null +++ b/tests/integration/tests/5272-only-locals/Main.hs @@ -0,0 +1,14 @@ +-- Stack supports a build target limited to locals. +-- +-- See: https://github.com/commercialhaskell/stack/issues/5272 + +import Control.Monad ( void ) +import StackTest + +main :: IO () +main = do + -- Ensure that the acme-missiles package is not in a package database + void $ stack' ["exec", "ghc-pkg", "unregister", "acme-missiles"] + stackErr ["build", "--only-locals"] + stack ["build", "--only-snapshot"] + stack ["build", "--only-locals"] diff --git a/tests/integration/tests/5272-only-locals/files/.gitignore b/tests/integration/tests/5272-only-locals/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/5272-only-locals/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/5272-only-locals/files/package.yaml b/tests/integration/tests/5272-only-locals/files/package.yaml new file mode 100644 index 0000000000..c80a013e52 --- /dev/null +++ b/tests/integration/tests/5272-only-locals/files/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- acme-missiles + +library: + source-dirs: src diff --git a/tests/integration/tests/5272-only-locals/files/src/Lib.hs b/tests/integration/tests/5272-only-locals/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/5272-only-locals/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/5272-only-locals/files/stack.yaml b/tests/integration/tests/5272-only-locals/files/stack.yaml new file mode 100644 index 0000000000..bb37b901bc --- /dev/null +++ b/tests/integration/tests/5272-only-locals/files/stack.yaml @@ -0,0 +1,4 @@ +snapshot: ghc-9.10.3 + +extra-deps: +- acme-missiles-0.3@rev:0 diff --git a/tests/integration/tests/5680-share-package-across-projects/Main.hs b/tests/integration/tests/5680-share-package-across-projects/Main.hs new file mode 100644 index 0000000000..b5797bdfb8 --- /dev/null +++ b/tests/integration/tests/5680-share-package-across-projects/Main.hs @@ -0,0 +1,11 @@ +-- Stack supports project packages located outside of the project directory. +-- +-- See: https://github.com/commercialhaskell/stack/issues/5680 + +import StackTest + +main :: IO () +main = do + withCwd "myPackageA" $ stack ["build"] + withCwd "myPackageB" $ stack ["build"] + withCwd "myPackageA" $ stack ["build"] diff --git a/tests/integration/tests/5680-share-package-across-projects/files/.gitignore b/tests/integration/tests/5680-share-package-across-projects/files/.gitignore new file mode 100644 index 0000000000..c4ea0f5ab0 --- /dev/null +++ b/tests/integration/tests/5680-share-package-across-projects/files/.gitignore @@ -0,0 +1,3 @@ +myPackageA.cabal +myPackageB.cabal +myPackageC.cabal diff --git a/tests/integration/tests/5680-share-package-across-projects/files/myPackageA/package.yaml b/tests/integration/tests/5680-share-package-across-projects/files/myPackageA/package.yaml new file mode 100644 index 0000000000..d26b472ded --- /dev/null +++ b/tests/integration/tests/5680-share-package-across-projects/files/myPackageA/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/5680-share-package-across-projects/files/myPackageA/src/Lib.hs b/tests/integration/tests/5680-share-package-across-projects/files/myPackageA/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/5680-share-package-across-projects/files/myPackageA/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/5680-share-package-across-projects/files/myPackageA/stack.yaml b/tests/integration/tests/5680-share-package-across-projects/files/myPackageA/stack.yaml new file mode 100644 index 0000000000..227a659a66 --- /dev/null +++ b/tests/integration/tests/5680-share-package-across-projects/files/myPackageA/stack.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +packages: +- . +- ../myPackageC diff --git a/tests/integration/tests/5680-share-package-across-projects/files/myPackageB/package.yaml b/tests/integration/tests/5680-share-package-across-projects/files/myPackageB/package.yaml new file mode 100644 index 0000000000..03b4ec87d3 --- /dev/null +++ b/tests/integration/tests/5680-share-package-across-projects/files/myPackageB/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/5680-share-package-across-projects/files/myPackageB/src/Lib.hs b/tests/integration/tests/5680-share-package-across-projects/files/myPackageB/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/5680-share-package-across-projects/files/myPackageB/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/5680-share-package-across-projects/files/myPackageB/stack.yaml b/tests/integration/tests/5680-share-package-across-projects/files/myPackageB/stack.yaml new file mode 100644 index 0000000000..227a659a66 --- /dev/null +++ b/tests/integration/tests/5680-share-package-across-projects/files/myPackageB/stack.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +packages: +- . +- ../myPackageC diff --git a/tests/integration/tests/5680-share-package-across-projects/files/myPackageC/package.yaml b/tests/integration/tests/5680-share-package-across-projects/files/myPackageC/package.yaml new file mode 100644 index 0000000000..4ab5200b73 --- /dev/null +++ b/tests/integration/tests/5680-share-package-across-projects/files/myPackageC/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackageC + +dependencies: +- base + +library: + source-dirs: src diff --git a/test/integration/tests/1659-skip-component/files/src/Lib.hs b/tests/integration/tests/5680-share-package-across-projects/files/myPackageC/src/Lib.hs similarity index 100% rename from test/integration/tests/1659-skip-component/files/src/Lib.hs rename to tests/integration/tests/5680-share-package-across-projects/files/myPackageC/src/Lib.hs diff --git a/tests/integration/tests/5680-share-package-across-projects/files/myPackageC/stack.yaml b/tests/integration/tests/5680-share-package-across-projects/files/myPackageC/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/5680-share-package-across-projects/files/myPackageC/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/6046-missing-sublib-unregister/Main.hs b/tests/integration/tests/6046-missing-sublib-unregister/Main.hs new file mode 100644 index 0000000000..c43d98a66d --- /dev/null +++ b/tests/integration/tests/6046-missing-sublib-unregister/Main.hs @@ -0,0 +1,16 @@ +-- Stack builds a package where the main library depends on a private named +-- lirary (an internal library). +-- +-- See: https://github.com/commercialhaskell/stack/issues/6046 + +import StackTest + +-- This tests building a package with a library and an internal sub library, +-- where the library depends on the sub library, first version 0.1.0.0 and then +-- version 0.2.0.0. +main :: IO () +main = do + copy "package1.yaml" "package.yaml" + stack ["build"] + copy "package2.yaml" "package.yaml" + stack ["build"] diff --git a/tests/integration/tests/6046-missing-sublib-unregister/files/int/Internal.hs b/tests/integration/tests/6046-missing-sublib-unregister/files/int/Internal.hs new file mode 100644 index 0000000000..d066bb085e --- /dev/null +++ b/tests/integration/tests/6046-missing-sublib-unregister/files/int/Internal.hs @@ -0,0 +1 @@ +module Internal where diff --git a/tests/integration/tests/6046-missing-sublib-unregister/files/package1.yaml b/tests/integration/tests/6046-missing-sublib-unregister/files/package1.yaml new file mode 100644 index 0000000000..62c65aee09 --- /dev/null +++ b/tests/integration/tests/6046-missing-sublib-unregister/files/package1.yaml @@ -0,0 +1,16 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.1.0.0 + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - internal + +internal-libraries: + internal: + source-dirs: int diff --git a/tests/integration/tests/6046-missing-sublib-unregister/files/package2.yaml b/tests/integration/tests/6046-missing-sublib-unregister/files/package2.yaml new file mode 100644 index 0000000000..176e733308 --- /dev/null +++ b/tests/integration/tests/6046-missing-sublib-unregister/files/package2.yaml @@ -0,0 +1,16 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.2.0.0 + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - internal + +internal-libraries: + internal: + source-dirs: int diff --git a/tests/integration/tests/6046-missing-sublib-unregister/files/src/Lib.hs b/tests/integration/tests/6046-missing-sublib-unregister/files/src/Lib.hs new file mode 100644 index 0000000000..1369151610 --- /dev/null +++ b/tests/integration/tests/6046-missing-sublib-unregister/files/src/Lib.hs @@ -0,0 +1,3 @@ +module Lib where + +import Internal diff --git a/tests/integration/tests/6046-missing-sublib-unregister/files/stack.yaml b/tests/integration/tests/6046-missing-sublib-unregister/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/6046-missing-sublib-unregister/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/606-local-version-not-exist/Main.hs b/tests/integration/tests/606-local-version-not-exist/Main.hs new file mode 100644 index 0000000000..02f4066f51 --- /dev/null +++ b/tests/integration/tests/606-local-version-not-exist/Main.hs @@ -0,0 +1,12 @@ +-- Stack reports an error when the target is a project package version, +-- whether the version is correct or not. +-- +-- See: https://github.com/commercialhaskell/stack/issues/606 + +import StackTest + +main :: IO () +main = do + stackErr ["build", "myPackage-1"] + stackErr ["build", "myPackage-0.1.0.0"] + stack ["build", "myPackage"] diff --git a/tests/integration/tests/606-local-version-not-exist/files/.gitignore b/tests/integration/tests/606-local-version-not-exist/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/606-local-version-not-exist/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/606-local-version-not-exist/files/package.yaml b/tests/integration/tests/606-local-version-not-exist/files/package.yaml new file mode 100644 index 0000000000..b20cb3407d --- /dev/null +++ b/tests/integration/tests/606-local-version-not-exist/files/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.1.0.0 + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/606-local-version-not-exist/files/src/Lib.hs b/tests/integration/tests/606-local-version-not-exist/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/606-local-version-not-exist/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/606-local-version-not-exist/files/stack.yaml b/tests/integration/tests/606-local-version-not-exist/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/606-local-version-not-exist/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/617-extra-dep-flag/Main.hs b/tests/integration/tests/617-extra-dep-flag/Main.hs new file mode 100644 index 0000000000..eefa091b4f --- /dev/null +++ b/tests/integration/tests/617-extra-dep-flag/Main.hs @@ -0,0 +1,8 @@ +-- Stack can specify a Cabal flag for an extra-dep. +-- +-- See: https://github.com/commercialhaskell/stack/issues/617 + +import StackTest + +main :: IO () +main = stack ["build", "acme-missiles-0.3"] diff --git a/tests/integration/tests/617-extra-dep-flag/files/stack.yaml b/tests/integration/tests/617-extra-dep-flag/files/stack.yaml new file mode 100644 index 0000000000..ae1b6d1fd9 --- /dev/null +++ b/tests/integration/tests/617-extra-dep-flag/files/stack.yaml @@ -0,0 +1,10 @@ +snapshot: ghc-9.10.3 + +packages: [] + +extra-deps: +- text-1.2.0.3 + +flags: + text: + integer-simple: false diff --git a/tests/integration/tests/617-unused-flag-cli/Main.hs b/tests/integration/tests/617-unused-flag-cli/Main.hs new file mode 100644 index 0000000000..0170e4148f --- /dev/null +++ b/tests/integration/tests/617-unused-flag-cli/Main.hs @@ -0,0 +1,13 @@ +-- Stack reports an error when a Cabal flag is specified on the command line for +-- a specific non-existent package or a non-existent Cabal flag is specified for +-- a specific project package. +-- +-- See: https://github.com/commercialhaskell/stack/issues/617 + +import StackTest + +main :: IO () +main = do + stackErr ["build", "--flag", "noSuchPackage:my-flag"] + stackErr ["build", "--flag", "myPackage:no-such-flag"] + stack ["build", "--flag", "*:no-such-flag"] diff --git a/tests/integration/tests/617-unused-flag-cli/files/.gitignore b/tests/integration/tests/617-unused-flag-cli/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/617-unused-flag-cli/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/617-unused-flag-cli/files/package.yaml b/tests/integration/tests/617-unused-flag-cli/files/package.yaml new file mode 100644 index 0000000000..b20cb3407d --- /dev/null +++ b/tests/integration/tests/617-unused-flag-cli/files/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.1.0.0 + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/617-unused-flag-cli/files/src/Lib.hs b/tests/integration/tests/617-unused-flag-cli/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/617-unused-flag-cli/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/617-unused-flag-cli/files/stack.yaml b/tests/integration/tests/617-unused-flag-cli/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/617-unused-flag-cli/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/617-unused-flag-name-yaml/Main.hs b/tests/integration/tests/617-unused-flag-name-yaml/Main.hs new file mode 100644 index 0000000000..5fabddaa78 --- /dev/null +++ b/tests/integration/tests/617-unused-flag-name-yaml/Main.hs @@ -0,0 +1,9 @@ +-- Stack reports an error when a non-existent Cabal flag is specified for +-- a project package. +-- +-- See: https://github.com/commercialhaskell/stack/issues/617 + +import StackTest + +main :: IO () +main = stackErr ["build"] diff --git a/tests/integration/tests/617-unused-flag-name-yaml/files/.gitignore b/tests/integration/tests/617-unused-flag-name-yaml/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/617-unused-flag-name-yaml/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/617-unused-flag-name-yaml/files/package.yaml b/tests/integration/tests/617-unused-flag-name-yaml/files/package.yaml new file mode 100644 index 0000000000..b20cb3407d --- /dev/null +++ b/tests/integration/tests/617-unused-flag-name-yaml/files/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.1.0.0 + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/617-unused-flag-name-yaml/files/src/Lib.hs b/tests/integration/tests/617-unused-flag-name-yaml/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/617-unused-flag-name-yaml/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/617-unused-flag-name-yaml/files/stack.yaml b/tests/integration/tests/617-unused-flag-name-yaml/files/stack.yaml new file mode 100644 index 0000000000..d7173d4dd8 --- /dev/null +++ b/tests/integration/tests/617-unused-flag-name-yaml/files/stack.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +flags: + myPackage: + no-such-flag: false diff --git a/tests/integration/tests/617-unused-flag-yaml/Main.hs b/tests/integration/tests/617-unused-flag-yaml/Main.hs new file mode 100644 index 0000000000..609efaadd3 --- /dev/null +++ b/tests/integration/tests/617-unused-flag-yaml/Main.hs @@ -0,0 +1,9 @@ +-- Stack reports an error when a Cabal flag is specified for a specific +-- non-existent package. +-- +-- See: https://github.com/commercialhaskell/stack/issues/617 + +import StackTest + +main :: IO () +main = stackErr ["build"] diff --git a/tests/integration/tests/617-unused-flag-yaml/files/.gitignore b/tests/integration/tests/617-unused-flag-yaml/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/617-unused-flag-yaml/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/617-unused-flag-yaml/files/package.yaml b/tests/integration/tests/617-unused-flag-yaml/files/package.yaml new file mode 100644 index 0000000000..b20cb3407d --- /dev/null +++ b/tests/integration/tests/617-unused-flag-yaml/files/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.1.0.0 + +dependencies: +- base + +library: + source-dirs: src diff --git a/test/integration/tests/3397-ghc-solver/files/src/Lib.hs b/tests/integration/tests/617-unused-flag-yaml/files/src/Lib.hs similarity index 100% rename from test/integration/tests/3397-ghc-solver/files/src/Lib.hs rename to tests/integration/tests/617-unused-flag-yaml/files/src/Lib.hs diff --git a/tests/integration/tests/617-unused-flag-yaml/files/stack.yaml b/tests/integration/tests/617-unused-flag-yaml/files/stack.yaml new file mode 100644 index 0000000000..ba4e5e5804 --- /dev/null +++ b/tests/integration/tests/617-unused-flag-yaml/files/stack.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +flags: + no-such-package: + my-flag: false diff --git a/tests/integration/tests/620-env-command/Main.hs b/tests/integration/tests/620-env-command/Main.hs new file mode 100644 index 0000000000..94b2c35a5b --- /dev/null +++ b/tests/integration/tests/620-env-command/Main.hs @@ -0,0 +1,12 @@ +-- Stack's config env command produces output that can be evaluated by bash's +-- built-in eval command. +-- +-- See: https://github.com/commercialhaskell/stack/issues/617 + +import Control.Exception ( throwIO ) +import Control.Monad ( unless ) +import StackTest +import System.Process ( rawSystem ) + +main :: IO () +main = unless isWindows $ rawSystem "bash" ["run.sh"] >>= throwIO diff --git a/test/integration/tests/620-env-command/files/.gitignore b/tests/integration/tests/620-env-command/files/.gitignore similarity index 100% rename from test/integration/tests/620-env-command/files/.gitignore rename to tests/integration/tests/620-env-command/files/.gitignore diff --git a/tests/integration/tests/620-env-command/files/Main.hs b/tests/integration/tests/620-env-command/files/Main.hs new file mode 100644 index 0000000000..d582e1e36a --- /dev/null +++ b/tests/integration/tests/620-env-command/files/Main.hs @@ -0,0 +1,2 @@ +main :: IO () +main = pure () diff --git a/tests/integration/tests/620-env-command/files/run.sh b/tests/integration/tests/620-env-command/files/run.sh new file mode 100644 index 0000000000..134e7e4c5a --- /dev/null +++ b/tests/integration/tests/620-env-command/files/run.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +eval `stack --snapshot lts-24.37 config env` +ghc Main.hs diff --git a/tests/integration/tests/6342-say-ghc-version-in-build/.gitignore b/tests/integration/tests/6342-say-ghc-version-in-build/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/6342-say-ghc-version-in-build/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/6342-say-ghc-version-in-build/Main.hs b/tests/integration/tests/6342-say-ghc-version-in-build/Main.hs new file mode 100644 index 0000000000..51385a13ab --- /dev/null +++ b/tests/integration/tests/6342-say-ghc-version-in-build/Main.hs @@ -0,0 +1,25 @@ +-- Stack outputs the GHC version that it is using during a build. +-- +-- See: https://github.com/commercialhaskell/stack/issues/6342 + +import Control.Monad ( unless ) +import Data.Char ( isSpace ) +import Data.List ( dropWhileEnd, isInfixOf ) +import StackTest + +main :: IO () +main = + -- Query the actual compiler + stackCheckStdout ["query", "compiler", "actual"] $ \compiler -> do + stackCheckStderr ["build"] (expectMessage $ buildWith (trimEnd compiler)) + +buildWith :: String -> String +buildWith compiler = "build (lib) with " <> compiler + +expectMessage :: String -> String -> IO () +expectMessage msg stderr = do + unless (words msg `isInfixOf` words stderr) + (error $ "Expected message: \n" ++ show msg) + +trimEnd :: String -> String +trimEnd = dropWhileEnd isSpace diff --git a/tests/integration/tests/6342-say-ghc-version-in-build/files/package.yaml b/tests/integration/tests/6342-say-ghc-version-in-build/files/package.yaml new file mode 100644 index 0000000000..2e4f2c0854 --- /dev/null +++ b/tests/integration/tests/6342-say-ghc-version-in-build/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base < 5 + +library: + source-dirs: src diff --git a/tests/integration/tests/6342-say-ghc-version-in-build/files/src/Lib.hs b/tests/integration/tests/6342-say-ghc-version-in-build/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/6342-say-ghc-version-in-build/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/6342-say-ghc-version-in-build/files/stack.yaml b/tests/integration/tests/6342-say-ghc-version-in-build/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/6342-say-ghc-version-in-build/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/6372-sdist-unicode-test/.gitignore b/tests/integration/tests/6372-sdist-unicode-test/.gitignore new file mode 100644 index 0000000000..a92d9c3b13 --- /dev/null +++ b/tests/integration/tests/6372-sdist-unicode-test/.gitignore @@ -0,0 +1 @@ +test١٢٣.cabal diff --git a/tests/integration/tests/6372-sdist-unicode-test/Main.hs b/tests/integration/tests/6372-sdist-unicode-test/Main.hs new file mode 100644 index 0000000000..0b33987b95 --- /dev/null +++ b/tests/integration/tests/6372-sdist-unicode-test/Main.hs @@ -0,0 +1,23 @@ +-- Stack sdist handles Unicode code points. +-- +-- See: https://github.com/commercialhaskell/stack/issues/6372 + +import Control.Monad ( unless ) +import StackTest + +-- | The test fails at runtime on the Windows Server 2022 GitHub-hosted runner +-- only, at the point of outputting a Unicode character, with: +-- +-- : commitAndReleaseBuffer: invalid argument (cannot encode character '\1633') +-- +-- That appears to be similar to +-- https://gitlab.haskell.org/ghc/ghc/-/issues/8118, however: (1) the locale is +-- set to C.UTF-8 and the active code page is 65001; and +-- (2) `GHC.IO.Encoding.setLocaleEncoding utf8` has no effect. +-- +-- Until the origin of the problem is identified, we disable the test on +-- Windows. + +main :: IO () +main = unless isWindows $ + stack ["sdist", "."] diff --git a/tests/integration/tests/6372-sdist-unicode-test/files/package.yaml b/tests/integration/tests/6372-sdist-unicode-test/files/package.yaml new file mode 100644 index 0000000000..42e9845b9c --- /dev/null +++ b/tests/integration/tests/6372-sdist-unicode-test/files/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: test١٢٣ +description: Test of non-ASCII Unicode code points in file names +license: BSD-3-Clause + +dependencies: +- base < 5 + +library: + source-dirs: src diff --git "a/tests/integration/tests/6372-sdist-unicode-test/files/src/Lib\316\261\316\262\316\263.hs" "b/tests/integration/tests/6372-sdist-unicode-test/files/src/Lib\316\261\316\262\316\263.hs" new file mode 100644 index 0000000000..5fbed3015d --- /dev/null +++ "b/tests/integration/tests/6372-sdist-unicode-test/files/src/Lib\316\261\316\262\316\263.hs" @@ -0,0 +1 @@ +module Libαβγ where diff --git a/tests/integration/tests/6372-sdist-unicode-test/files/stack.yaml b/tests/integration/tests/6372-sdist-unicode-test/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/6372-sdist-unicode-test/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/6451-exe-targets/Main.hs b/tests/integration/tests/6451-exe-targets/Main.hs new file mode 100644 index 0000000000..09b0f66c7e --- /dev/null +++ b/tests/integration/tests/6451-exe-targets/Main.hs @@ -0,0 +1,24 @@ +-- | Stack should build only those executables requested by the program +-- arguments. +-- +-- Issue: https://github.com/commercialhaskell/stack/issues/3229 is no longer +-- applicable. +-- +-- See: https://github.com/commercialhaskell/stack/issues/6451 + +module Main where + +import Control.Monad ( when ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stackCheckStderr + ["build", ":myExeA"] + (rejectMessage "Installing executable myExeB in") + +rejectMessage :: String -> String -> IO () +rejectMessage msg stderr = + when (msg `isInfixOf` stderr) $ + error $ "Did not expect message here: \n" ++ show msg diff --git a/tests/integration/tests/6451-exe-targets/files/.gitignore b/tests/integration/tests/6451-exe-targets/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/6451-exe-targets/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/6451-exe-targets/files/appA/Main.hs b/tests/integration/tests/6451-exe-targets/files/appA/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/6451-exe-targets/files/appA/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/6451-exe-targets/files/appB/Main.hs b/tests/integration/tests/6451-exe-targets/files/appB/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/6451-exe-targets/files/appB/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/6451-exe-targets/files/package.yaml b/tests/integration/tests/6451-exe-targets/files/package.yaml new file mode 100644 index 0000000000..6338785a63 --- /dev/null +++ b/tests/integration/tests/6451-exe-targets/files/package.yaml @@ -0,0 +1,21 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + +executables: + myExeA: + source-dirs: appA + main: Main.hs + dependencies: + - myPackage + myExeB: + source-dirs: appB + main: Main.hs + dependencies: + - myPackage diff --git a/tests/integration/tests/6451-exe-targets/files/src/Lib.hs b/tests/integration/tests/6451-exe-targets/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/6451-exe-targets/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/6451-exe-targets/files/stack.yaml b/tests/integration/tests/6451-exe-targets/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/6451-exe-targets/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/6465-msys-environment/Main.hs b/tests/integration/tests/6465-msys-environment/Main.hs new file mode 100644 index 0000000000..8c64777059 --- /dev/null +++ b/tests/integration/tests/6465-msys-environment/Main.hs @@ -0,0 +1,12 @@ +-- | On Windows, Stack supports different MSYS2 environments. +-- +-- See: https://github.com/commercialhaskell/stack/issues/6465 + +import Control.Monad ( when ) +import StackTest + +main :: IO () +main = when isWindows $ do + stack ["exec", "--", "pacman", "-S", "--noconfirm", "mingw-w64-ucrt-x86_64-gsl"] + stack ["build"] + stack ["exec", "--", "myExe"] diff --git a/tests/integration/tests/6465-msys-environment/files/.gitignore b/tests/integration/tests/6465-msys-environment/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/6465-msys-environment/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/6465-msys-environment/files/app/Main.hs b/tests/integration/tests/6465-msys-environment/files/app/Main.hs new file mode 100644 index 0000000000..76919d9ea3 --- /dev/null +++ b/tests/integration/tests/6465-msys-environment/files/app/Main.hs @@ -0,0 +1,12 @@ +module Main (main) where + +import Foreign.C.Types + +foreign import ccall "gsl/gsl_sf_bessel.h gsl_sf_bessel_J0" + gsl_sf_bessel_J0 :: CDouble -> CDouble + +main :: IO () +main = do + let x = CDouble 5.0 + result = gsl_sf_bessel_J0 x + putStrLn $ "J0(" ++ show x ++ ") = " ++ show result diff --git a/tests/integration/tests/6465-msys-environment/files/package.yaml b/tests/integration/tests/6465-msys-environment/files/package.yaml new file mode 100644 index 0000000000..d385615dc8 --- /dev/null +++ b/tests/integration/tests/6465-msys-environment/files/package.yaml @@ -0,0 +1,14 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +extra-libraries: +- gsl + +executables: + myExe: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/6465-msys-environment/files/stack.yaml b/tests/integration/tests/6465-msys-environment/files/stack.yaml new file mode 100644 index 0000000000..36d650b698 --- /dev/null +++ b/tests/integration/tests/6465-msys-environment/files/stack.yaml @@ -0,0 +1,3 @@ +snapshot: ghc-9.10.3 + +msys-environment: UCRT64 diff --git a/tests/integration/tests/6867-timeout-grace/Main.hs b/tests/integration/tests/6867-timeout-grace/Main.hs new file mode 100644 index 0000000000..e5a6d6868a --- /dev/null +++ b/tests/integration/tests/6867-timeout-grace/Main.hs @@ -0,0 +1,42 @@ +-- | Stack supports a grace period timeout for test suites. +-- +-- See: https://github.com/commercialhaskell/stack/issues/6867 + +import Control.Monad ( unless ) +import Data.Char ( toLower ) +import Data.IORef ( newIORef, readIORef, writeIORef ) +import Data.List ( isInfixOf ) +import Data.Time.Clock ( diffUTCTime, getCurrentTime) +import StackTest + +main :: IO () +main = do + stack ["test", "--no-run-tests"] -- pre-build to avoid counting build time in the test + + start <- getCurrentTime + errRef <- newIORef "" + stackErrStderr + [ "test" + , "--test-suite-timeout", "1" + , "--test-suite-timeout-grace", "1" + ] + (writeIORef errRef) + end <- getCurrentTime + err <- readIORef errRef + + let errLower = map toLower err + elapsedSecs :: Double + elapsedSecs = realToFrac (diffUTCTime end start) + + logInfo $ "Elapsed time: " ++ show elapsedSecs ++ "s" + + unless ("timed out" `isInfixOf` errLower) $ + error "Expected test-suite timeout message in stderr output." + + if isWindows + then unless (elapsedSecs < 5.0) $ + error $ "Expected timeout+grace run to finish quickly on Windows, took " + ++ show elapsedSecs ++ "s" + else unless (elapsedSecs > 1.5 && elapsedSecs < 5.0) $ + error $ "Expected timeout+grace run to take about timeout+grace on Unix, took " + ++ show elapsedSecs ++ "s" diff --git a/tests/integration/tests/6867-timeout-grace/files/.gitignore b/tests/integration/tests/6867-timeout-grace/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/6867-timeout-grace/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/6867-timeout-grace/files/package.yaml b/tests/integration/tests/6867-timeout-grace/files/package.yaml new file mode 100644 index 0000000000..c5ca426401 --- /dev/null +++ b/tests/integration/tests/6867-timeout-grace/files/package.yaml @@ -0,0 +1,15 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +tests: + test: + main: Main.hs + source-dirs: test + when: + - condition: '!os(windows)' + dependencies: + - unix diff --git a/tests/integration/tests/6867-timeout-grace/files/stack.yaml b/tests/integration/tests/6867-timeout-grace/files/stack.yaml new file mode 100644 index 0000000000..53317c86d3 --- /dev/null +++ b/tests/integration/tests/6867-timeout-grace/files/stack.yaml @@ -0,0 +1,3 @@ +snapshot: lts-24.37 +packages: +- . diff --git a/tests/integration/tests/6867-timeout-grace/files/test/Main.hs b/tests/integration/tests/6867-timeout-grace/files/test/Main.hs new file mode 100644 index 0000000000..cc4bfd8036 --- /dev/null +++ b/tests/integration/tests/6867-timeout-grace/files/test/Main.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE CPP #-} + +module Main + ( main + ) where + +import Control.Concurrent ( threadDelay ) + +#ifndef mingw32_HOST_OS +import System.Posix.Signals + ( Handler (Ignore), installHandler, sigTERM ) +#endif + +main :: IO () +main = do +#ifndef mingw32_HOST_OS + _ <- installHandler sigTERM Ignore Nothing +#endif + threadDelay 6000000 diff --git a/tests/integration/tests/6879-stack-yaml-includes/Main.hs b/tests/integration/tests/6879-stack-yaml-includes/Main.hs new file mode 100644 index 0000000000..60e70406ad --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/Main.hs @@ -0,0 +1,59 @@ +-- | Stack supports an !include directive in YAML configuration files. +-- +-- See: https://github.com/commercialhaskell/stack/issues/6879 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest +import System.Directory ( getCurrentDirectory ) +import System.Environment ( setEnv ) +import System.FilePath ( () ) + +main :: IO () +main = do + let + checkFor expected actual = + unless (expected == actual) $ + error ("expected " <> show expected <> "but got: " <> show actual) + + -- Check that includes in stack.yaml files are included + stackCheckStdout + ["--stack-yaml", "stack-including-flags.yaml", "run"] + (checkFor "TEST_FLAG was set\n") + + stackCheckStdout + ["--stack-yaml", "stack-including-flags-with-newline.yaml", "run"] + (checkFor "TEST_FLAG was set\n") + + stackCheckStdout + ["--stack-yaml", "stack-not-including-flags.yaml", "run"] + (checkFor "TEST_FLAG was not set\n") + + -- Check that includes in config.yaml files are included + currentDir <- getCurrentDirectory + setEnv "STACK_CONFIG" (currentDir "config-including-flags.yaml") + stackCheckStdout + ["--stack-yaml", "stack-not-including-flags.yaml", "run"] + (checkFor "TEST_FLAG was set\n") + + -- Check that 'config set' succeeds when the key already exists in a + -- stack.yaml file that uses !include directives + stackCheckStderr + ["--stack-yaml", "stack-including-flags.yaml", "config", "set", "snapshot", "ghc-9.10.3"] + (expectMessage "already") + -- Check that 'config set' succeeds when the key already exists in a + -- stack.yaml file that uses !include directives (with newline variant) + stackCheckStderr + ["--stack-yaml", "stack-including-flags-with-newline.yaml", "config", "set", "snapshot", "ghc-9.10.3"] + (expectMessage "already") + + -- Check that 'config set' raises an error when the key does not exist in a + -- stack.yaml file that uses !include directives + stackErrStderr + ["--stack-yaml", "stack-including-file-with-install-ghc.yaml", "config", "set", "install-ghc", "true"] + (expectMessage "!include") + +expectMessage :: String -> String -> IO () +expectMessage msg stderr' = do + unless (msg `isInfixOf` stderr') $ + error $ "Expected stderr to contain " ++ show msg ++ " but got:\n" ++ stderr' diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/.gitignore b/tests/integration/tests/6879-stack-yaml-includes/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/app/Main.hs b/tests/integration/tests/6879-stack-yaml-includes/files/app/Main.hs new file mode 100644 index 0000000000..d412740785 --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/app/Main.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE CPP #-} + +module Main + ( main + ) where + +main :: IO () +main = +#if TEST_FLAG + putStrLn "TEST_FLAG was set" +#else + putStrLn "TEST_FLAG was not set" +#endif diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/config-flags.yaml b/tests/integration/tests/6879-stack-yaml-includes/files/config-flags.yaml new file mode 100644 index 0000000000..222ce796b8 --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/config-flags.yaml @@ -0,0 +1,2 @@ +ghc-options: + "$everything": -DTEST_FLAG diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/config-including-flags.yaml b/tests/integration/tests/6879-stack-yaml-includes/files/config-including-flags.yaml new file mode 100644 index 0000000000..290a0eee8a --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/config-including-flags.yaml @@ -0,0 +1 @@ +<<: !include config-flags.yaml diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/install-ghc.yaml b/tests/integration/tests/6879-stack-yaml-includes/files/install-ghc.yaml new file mode 100644 index 0000000000..fd3953b239 --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/install-ghc.yaml @@ -0,0 +1 @@ +install-ghc: true diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/package.yaml b/tests/integration/tests/6879-stack-yaml-includes/files/package.yaml new file mode 100644 index 0000000000..a211bd8344 --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/package.yaml @@ -0,0 +1,20 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +flags: + test-flag: + description: Generate a compiler error for test purposes + default: false + manual: true + +executables: + myExe: + source-dirs: app + main: Main.hs + when: + - condition: flag(test-flag) + cpp-options: -DTEST_FLAG diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/stack-flags.yaml b/tests/integration/tests/6879-stack-yaml-includes/files/stack-flags.yaml new file mode 100644 index 0000000000..81f0ef1634 --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/stack-flags.yaml @@ -0,0 +1,3 @@ +flags: + myPackage: + test-flag: true diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/stack-including-file-with-install-ghc.yaml b/tests/integration/tests/6879-stack-yaml-includes/files/stack-including-file-with-install-ghc.yaml new file mode 100644 index 0000000000..e58b5115e2 --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/stack-including-file-with-install-ghc.yaml @@ -0,0 +1,2 @@ +snapshot: ghc-9.10.3 +<<: !include install-ghc.yaml diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/stack-including-flags-with-newline.yaml b/tests/integration/tests/6879-stack-yaml-includes/files/stack-including-flags-with-newline.yaml new file mode 100644 index 0000000000..4b7216c7c8 --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/stack-including-flags-with-newline.yaml @@ -0,0 +1,3 @@ +snapshot: ghc-9.10.3 +<<: + !include stack-flags.yaml diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/stack-including-flags.yaml b/tests/integration/tests/6879-stack-yaml-includes/files/stack-including-flags.yaml new file mode 100644 index 0000000000..6f6a0cd5f3 --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/stack-including-flags.yaml @@ -0,0 +1,2 @@ +snapshot: ghc-9.10.3 +<<: !include stack-flags.yaml diff --git a/tests/integration/tests/6879-stack-yaml-includes/files/stack-not-including-flags.yaml b/tests/integration/tests/6879-stack-yaml-includes/files/stack-not-including-flags.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/6879-stack-yaml-includes/files/stack-not-including-flags.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/6905-cyclic-plan/Main.hs b/tests/integration/tests/6905-cyclic-plan/Main.hs new file mode 100644 index 0000000000..75120e5d06 --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/Main.hs @@ -0,0 +1,27 @@ +-- | The test's project has project packages A and B. +-- +-- In terms of main libraries, the dependencies are (->- is 'depends on'): +-- +-- A ->- B +-- +-- In terms of executables (a test suite): +-- +-- B ->- A +-- +-- As, overall, A ->- B and B ->- A, packages A and B cannot be built +-- 'all-in-one'. +-- +-- This integration test tests: +-- +-- * when A is named myPackageA and B is named myPackageB; and +-- +-- * when A is named myPackageD and B is named myPackageC. +-- +-- See: https://github.com/commercialhaskell/stack/issues/6905 + +import StackTest + +main :: IO () +main = do + stack ["--stack-yaml", "stack1.yaml", "test"] + stack ["--stack-yaml", "stack2.yaml", "test"] diff --git a/tests/integration/tests/6905-cyclic-plan/files/.gitignore b/tests/integration/tests/6905-cyclic-plan/files/.gitignore new file mode 100644 index 0000000000..5087095541 --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/.gitignore @@ -0,0 +1,4 @@ +myPackageA.cabal +myPackageB.cabal +myPackageC.cabal +myPackageD.cabal diff --git a/tests/integration/tests/6905-cyclic-plan/files/myPackageA/package.yaml b/tests/integration/tests/6905-cyclic-plan/files/myPackageA/package.yaml new file mode 100644 index 0000000000..bb7167768b --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/myPackageA/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - myPackageB diff --git a/tests/integration/tests/6905-cyclic-plan/files/myPackageA/src/LibA.hs b/tests/integration/tests/6905-cyclic-plan/files/myPackageA/src/LibA.hs new file mode 100644 index 0000000000..743d4bf8d0 --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/myPackageA/src/LibA.hs @@ -0,0 +1,5 @@ +module LibA + ( funcB + ) where + +import LibB ( funcB ) diff --git a/tests/integration/tests/6905-cyclic-plan/files/myPackageB/package.yaml b/tests/integration/tests/6905-cyclic-plan/files/myPackageB/package.yaml new file mode 100644 index 0000000000..c5d19b86c3 --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/myPackageB/package.yaml @@ -0,0 +1,16 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base + +library: + source-dirs: src + +tests: + test: + source-dirs: test + main: Main.hs + dependencies: + - myPackageA diff --git a/tests/integration/tests/6905-cyclic-plan/files/myPackageB/src/LibB.hs b/tests/integration/tests/6905-cyclic-plan/files/myPackageB/src/LibB.hs new file mode 100644 index 0000000000..e714f965fe --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/myPackageB/src/LibB.hs @@ -0,0 +1,6 @@ +module LibB + ( funcB + ) where + +funcB :: IO () +funcB = pure () diff --git a/tests/integration/tests/6905-cyclic-plan/files/myPackageB/test/Main.hs b/tests/integration/tests/6905-cyclic-plan/files/myPackageB/test/Main.hs new file mode 100644 index 0000000000..e7c27dd1be --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/myPackageB/test/Main.hs @@ -0,0 +1,4 @@ +import LibA ( funcB ) + +main :: IO () +main = funcB diff --git a/tests/integration/tests/6905-cyclic-plan/files/myPackageC/package.yaml b/tests/integration/tests/6905-cyclic-plan/files/myPackageC/package.yaml new file mode 100644 index 0000000000..89dfd7f40b --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/myPackageC/package.yaml @@ -0,0 +1,16 @@ +spec-version: 0.36.0 + +name: myPackageC + +dependencies: +- base + +library: + source-dirs: src + +tests: + test: + source-dirs: test + main: Main.hs + dependencies: + - myPackageD diff --git a/tests/integration/tests/6905-cyclic-plan/files/myPackageC/src/LibC.hs b/tests/integration/tests/6905-cyclic-plan/files/myPackageC/src/LibC.hs new file mode 100644 index 0000000000..28474cddd5 --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/myPackageC/src/LibC.hs @@ -0,0 +1,6 @@ +module LibC + ( funcC + ) where + +funcC :: IO () +funcC = pure () diff --git a/tests/integration/tests/6905-cyclic-plan/files/myPackageC/test/Main.hs b/tests/integration/tests/6905-cyclic-plan/files/myPackageC/test/Main.hs new file mode 100644 index 0000000000..c438615556 --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/myPackageC/test/Main.hs @@ -0,0 +1,4 @@ +import LibD ( funcC ) + +main :: IO () +main = funcC diff --git a/tests/integration/tests/6905-cyclic-plan/files/myPackageD/package.yaml b/tests/integration/tests/6905-cyclic-plan/files/myPackageD/package.yaml new file mode 100644 index 0000000000..1d913ac5ef --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/myPackageD/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackageD + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - myPackageC diff --git a/tests/integration/tests/6905-cyclic-plan/files/myPackageD/src/LibD.hs b/tests/integration/tests/6905-cyclic-plan/files/myPackageD/src/LibD.hs new file mode 100644 index 0000000000..481d4e7f69 --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/myPackageD/src/LibD.hs @@ -0,0 +1,5 @@ +module LibD + ( funcC + ) where + +import LibC ( funcC ) diff --git a/tests/integration/tests/6905-cyclic-plan/files/stack1.yaml b/tests/integration/tests/6905-cyclic-plan/files/stack1.yaml new file mode 100644 index 0000000000..68891a8d7c --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/stack1.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +packages: +- myPackageA +- myPackageB diff --git a/tests/integration/tests/6905-cyclic-plan/files/stack2.yaml b/tests/integration/tests/6905-cyclic-plan/files/stack2.yaml new file mode 100644 index 0000000000..740f730a48 --- /dev/null +++ b/tests/integration/tests/6905-cyclic-plan/files/stack2.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +packages: +- myPackageC +- myPackageD diff --git a/tests/integration/tests/6905-invalid-cycle/Main.hs b/tests/integration/tests/6905-invalid-cycle/Main.hs new file mode 100644 index 0000000000..34e3959973 --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/Main.hs @@ -0,0 +1,35 @@ +-- | The test's project has project packages A, B, C and D. +-- +-- In terms of main libraries, the dependencies are (->- is 'depends on'): +-- +-- A ->- B and C ->- D, D ->- C (a cycle) +-- +-- In terms of executables (a test suite): +-- +-- B ->- A, B ->- C +-- +-- As, overall, A ->- B and B ->- A, packages A and B cannot be built +-- 'all-in-one'. However, if the test suite of B is not being built, A and B can +-- be built. + +-- The test suite of B cannot be built, because C ->- D and D ->- C. +-- +-- See: https://github.com/commercialhaskell/stack/issues/6905 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stack ["build", "myPackageA", "myPackageB"] + stackErrStderr ["test", "myPackageB"] (expectMessage dependencyCycleDetected) + +dependencyCycleDetected :: String +dependencyCycleDetected = + "myPackageC dependency cycle detected: myPackageC, myPackageD, myPackageC" + +expectMessage :: String -> String -> IO () +expectMessage msg stderr = + unless (words msg `isInfixOf` words stderr) + (error $ "Expected a warning: \n" ++ show msg) diff --git a/tests/integration/tests/6905-invalid-cycle/files/.gitignore b/tests/integration/tests/6905-invalid-cycle/files/.gitignore new file mode 100644 index 0000000000..5087095541 --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/.gitignore @@ -0,0 +1,4 @@ +myPackageA.cabal +myPackageB.cabal +myPackageC.cabal +myPackageD.cabal diff --git a/tests/integration/tests/6905-invalid-cycle/files/myPackageA/package.yaml b/tests/integration/tests/6905-invalid-cycle/files/myPackageA/package.yaml new file mode 100644 index 0000000000..bb7167768b --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/myPackageA/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - myPackageB diff --git a/tests/integration/tests/6905-invalid-cycle/files/myPackageA/src/LibA.hs b/tests/integration/tests/6905-invalid-cycle/files/myPackageA/src/LibA.hs new file mode 100644 index 0000000000..743d4bf8d0 --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/myPackageA/src/LibA.hs @@ -0,0 +1,5 @@ +module LibA + ( funcB + ) where + +import LibB ( funcB ) diff --git a/tests/integration/tests/6905-invalid-cycle/files/myPackageB/package.yaml b/tests/integration/tests/6905-invalid-cycle/files/myPackageB/package.yaml new file mode 100644 index 0000000000..dbdaaca9ee --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/myPackageB/package.yaml @@ -0,0 +1,17 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base + +library: + source-dirs: src + +tests: + test: + source-dirs: test + main: Main.hs + dependencies: + - myPackageA + - myPackageC diff --git a/tests/integration/tests/6905-invalid-cycle/files/myPackageB/src/LibB.hs b/tests/integration/tests/6905-invalid-cycle/files/myPackageB/src/LibB.hs new file mode 100644 index 0000000000..e714f965fe --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/myPackageB/src/LibB.hs @@ -0,0 +1,6 @@ +module LibB + ( funcB + ) where + +funcB :: IO () +funcB = pure () diff --git a/tests/integration/tests/6905-invalid-cycle/files/myPackageB/test/Main.hs b/tests/integration/tests/6905-invalid-cycle/files/myPackageB/test/Main.hs new file mode 100644 index 0000000000..4443983c52 --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/myPackageB/test/Main.hs @@ -0,0 +1,7 @@ +import LibA ( funcB ) +import LibC ( funcC ) + +main :: IO () +main = do + funcB + funcC diff --git a/tests/integration/tests/6905-invalid-cycle/files/myPackageC/package.yaml b/tests/integration/tests/6905-invalid-cycle/files/myPackageC/package.yaml new file mode 100644 index 0000000000..d657e99064 --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/myPackageC/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackageC + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - myPackageD diff --git a/tests/integration/tests/6905-invalid-cycle/files/myPackageC/src/LibC.hs b/tests/integration/tests/6905-invalid-cycle/files/myPackageC/src/LibC.hs new file mode 100644 index 0000000000..b692cc5dc4 --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/myPackageC/src/LibC.hs @@ -0,0 +1,8 @@ +module LibC + ( funcC + ) where + +import LibD ( funcD ) + +funcC :: IO () +funcC = funcD diff --git a/tests/integration/tests/6905-invalid-cycle/files/myPackageD/package.yaml b/tests/integration/tests/6905-invalid-cycle/files/myPackageD/package.yaml new file mode 100644 index 0000000000..1d913ac5ef --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/myPackageD/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackageD + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - myPackageC diff --git a/tests/integration/tests/6905-invalid-cycle/files/myPackageD/src/LibD.hs b/tests/integration/tests/6905-invalid-cycle/files/myPackageD/src/LibD.hs new file mode 100644 index 0000000000..33b5940491 --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/myPackageD/src/LibD.hs @@ -0,0 +1,8 @@ +module LibD + ( funcD + ) where + +import LibC ( funcC ) + +funcD :: IO () +funcD = funcC diff --git a/tests/integration/tests/6905-invalid-cycle/files/stack.yaml b/tests/integration/tests/6905-invalid-cycle/files/stack.yaml new file mode 100644 index 0000000000..240f775bdc --- /dev/null +++ b/tests/integration/tests/6905-invalid-cycle/files/stack.yaml @@ -0,0 +1,7 @@ +snapshot: ghc-9.10.3 + +packages: +- myPackageA +- myPackageB +- myPackageC +- myPackageD diff --git a/tests/integration/tests/6905-multi-test/Main.hs b/tests/integration/tests/6905-multi-test/Main.hs new file mode 100644 index 0000000000..2e21e47b41 --- /dev/null +++ b/tests/integration/tests/6905-multi-test/Main.hs @@ -0,0 +1,28 @@ +-- | The test's project has project packages A, B and C (which has no library). +-- +-- In terms of main libraries, the dependencies are (->- is 'depends on'): +-- +-- A ->- B +-- +-- In terms of executables (including test suites): +-- +-- B ->- A and C ->- A +-- +-- As, overall, A ->- B and B ->- A, packages A and B cannot be built +-- 'all-in-one'. +-- +-- A, B and C are named myPackageA, myPackageB and myPackageC respectively. +-- +-- See: https://github.com/commercialhaskell/stack/issues/6905 + +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stackCheckStderr ["test", "--coverage"] $ \out -> do + unless ("The coverage report for myPackageA's test-suite test1 is available at" `isInfixOf` out) $ + fail "Didn't get expected report for test1" + unless ("[S-6829]" `isInfixOf` out) $ + fail "Didn't get expected empty report for test2" diff --git a/tests/integration/tests/6905-multi-test/files/.gitignore b/tests/integration/tests/6905-multi-test/files/.gitignore new file mode 100644 index 0000000000..03e2c6587b --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/.gitignore @@ -0,0 +1,6 @@ +myPackageA.cabal +myPackageB.cabal +myPackageC.cabal +myPackageD.cabal +myPackageE.cabal +myPackageF.cabal diff --git a/tests/integration/tests/6905-multi-test/files/myPackageA/app/Main.hs b/tests/integration/tests/6905-multi-test/files/myPackageA/app/Main.hs new file mode 100644 index 0000000000..df716cf71e --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageA/app/Main.hs @@ -0,0 +1,6 @@ +module Main where + +import LibA ( funcA ) + +main :: IO () +main = funcA diff --git a/tests/integration/tests/6905-multi-test/files/myPackageA/package.yaml b/tests/integration/tests/6905-multi-test/files/myPackageA/package.yaml new file mode 100644 index 0000000000..d246bd23b8 --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageA/package.yaml @@ -0,0 +1,33 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - myPackageB + +executables: + myExeA: + source-dirs: app + main: Main.hs + dependencies: + - myPackageA + +tests: + test1: + source-dirs: test1 + main: Main.hs + dependencies: + - myPackageA + test2: + source-dirs: test2 + main: Main.hs + dependencies: + - myPackageA + test3: + source-dirs: test3 + main: Main.hs diff --git a/tests/integration/tests/6905-multi-test/files/myPackageA/src/LibA.hs b/tests/integration/tests/6905-multi-test/files/myPackageA/src/LibA.hs new file mode 100644 index 0000000000..a7981b6f2c --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageA/src/LibA.hs @@ -0,0 +1,9 @@ +module LibA + ( funcA + , funcB + ) where + +import LibB ( funcB ) + +funcA :: IO () +funcA = pure () diff --git a/tests/integration/tests/6905-multi-test/files/myPackageA/test1/Main.hs b/tests/integration/tests/6905-multi-test/files/myPackageA/test1/Main.hs new file mode 100644 index 0000000000..71c96e8fce --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageA/test1/Main.hs @@ -0,0 +1,4 @@ +import LibA ( funcA ) + +main :: IO () +main = funcA diff --git a/tests/integration/tests/6905-multi-test/files/myPackageA/test2/Main.hs b/tests/integration/tests/6905-multi-test/files/myPackageA/test2/Main.hs new file mode 100644 index 0000000000..3ca9d54762 --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageA/test2/Main.hs @@ -0,0 +1,4 @@ +import LibA + +main :: IO () +main = pure () diff --git a/tests/integration/tests/6905-multi-test/files/myPackageA/test3/Main.hs b/tests/integration/tests/6905-multi-test/files/myPackageA/test3/Main.hs new file mode 100644 index 0000000000..d582e1e36a --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageA/test3/Main.hs @@ -0,0 +1,2 @@ +main :: IO () +main = pure () diff --git a/tests/integration/tests/6905-multi-test/files/myPackageB/package.yaml b/tests/integration/tests/6905-multi-test/files/myPackageB/package.yaml new file mode 100644 index 0000000000..c5d19b86c3 --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageB/package.yaml @@ -0,0 +1,16 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base + +library: + source-dirs: src + +tests: + test: + source-dirs: test + main: Main.hs + dependencies: + - myPackageA diff --git a/tests/integration/tests/6905-multi-test/files/myPackageB/src/LibB.hs b/tests/integration/tests/6905-multi-test/files/myPackageB/src/LibB.hs new file mode 100644 index 0000000000..e714f965fe --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageB/src/LibB.hs @@ -0,0 +1,6 @@ +module LibB + ( funcB + ) where + +funcB :: IO () +funcB = pure () diff --git a/tests/integration/tests/6905-multi-test/files/myPackageB/test/Main.hs b/tests/integration/tests/6905-multi-test/files/myPackageB/test/Main.hs new file mode 100644 index 0000000000..e7c27dd1be --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageB/test/Main.hs @@ -0,0 +1,4 @@ +import LibA ( funcB ) + +main :: IO () +main = funcB diff --git a/tests/integration/tests/6905-multi-test/files/myPackageC/app/Main.hs b/tests/integration/tests/6905-multi-test/files/myPackageC/app/Main.hs new file mode 100644 index 0000000000..ca55d965a4 --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageC/app/Main.hs @@ -0,0 +1,4 @@ +import LibC ( funcC ) + +main :: IO () +main = funcC diff --git a/tests/integration/tests/6905-multi-test/files/myPackageC/package.yaml b/tests/integration/tests/6905-multi-test/files/myPackageC/package.yaml new file mode 100644 index 0000000000..629f8cba45 --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageC/package.yaml @@ -0,0 +1,22 @@ +spec-version: 0.36.0 + +name: myPackageC + +dependencies: +- base + +executables: + myExeC: + source-dirs: + - app + - src + main: Main.hs + +tests: + test: + source-dirs: + - src + - test + main: Main.hs + dependencies: + - myPackageA diff --git a/tests/integration/tests/6905-multi-test/files/myPackageC/src/LibC.hs b/tests/integration/tests/6905-multi-test/files/myPackageC/src/LibC.hs new file mode 100644 index 0000000000..6289d8843d --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageC/src/LibC.hs @@ -0,0 +1,6 @@ +module LibC + ( funcC + ) where + +funcC :: IO () +funcC = pure () diff --git a/tests/integration/tests/6905-multi-test/files/myPackageC/test/Main.hs b/tests/integration/tests/6905-multi-test/files/myPackageC/test/Main.hs new file mode 100644 index 0000000000..0535c37e44 --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/myPackageC/test/Main.hs @@ -0,0 +1,7 @@ +import LibA ( funcA ) +import LibC ( funcC ) + +main :: IO () +main = do + funcA + funcC diff --git a/tests/integration/tests/6905-multi-test/files/stack.yaml b/tests/integration/tests/6905-multi-test/files/stack.yaml new file mode 100644 index 0000000000..14a33de5df --- /dev/null +++ b/tests/integration/tests/6905-multi-test/files/stack.yaml @@ -0,0 +1,6 @@ +snapshot: ghc-9.10.3 + +packages: +- myPackageA +- myPackageB +- myPackageC diff --git a/tests/integration/tests/717-sdist-test/Main.hs b/tests/integration/tests/717-sdist-test/Main.hs new file mode 100644 index 0000000000..fdd3691cee --- /dev/null +++ b/tests/integration/tests/717-sdist-test/Main.hs @@ -0,0 +1,26 @@ +-- Stack's test of an archive file produced by sdist should fail if the package +-- description did not list all files used by the package itself. +-- +-- https://github.com/commercialhaskell/stack/issues/717 + +import StackTest + +main :: IO () +main = do + -- Verify building works: + stack ["build"] + -- Keep old behavior: + stack ["sdist"] + -- Successful sdist with --test-tarball: + stack ["sdist", "working-package-with-th", "--test-tarball"] + -- Fails because package contains TH which depends on files which are not put + -- into sdist tarball: + stackErr ["sdist", "failing-package-with-th", "--test-tarball"] + -- Same, but inside a subdir: + stackErr ["sdist", "subdirs/failing-in-subdir", "--test-tarball"] + -- Depends on failing-package-with-th and failing-in-subdir - these would fail + -- if they were the target of sdist, but since they are just dependencies, the + -- operation should succeed: + stack ["sdist", "subdirs/dependent-on-failing-packages", "--test-tarball"] + -- Fails because a test depends on files which are not put into sdist tarball: + stackErr ["sdist", "failing-package-with-test", "--test-tarball"] diff --git a/tests/integration/tests/717-sdist-test/files/.gitignore b/tests/integration/tests/717-sdist-test/files/.gitignore new file mode 100644 index 0000000000..1b68608ab5 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/.gitignore @@ -0,0 +1,5 @@ +dependent-on-failing-packages.cabal +failing-in-subdir.cabal +failing-package-with-th.cabal +failing-package-with-test.cabal +working-package-with-th.cabal diff --git a/test/integration/tests/1659-skip-component/files/LICENSE b/tests/integration/tests/717-sdist-test/files/failing-package-with-test/LICENSE similarity index 100% rename from test/integration/tests/1659-skip-component/files/LICENSE rename to tests/integration/tests/717-sdist-test/files/failing-package-with-test/LICENSE diff --git a/tests/integration/tests/717-sdist-test/files/failing-package-with-test/README.md b/tests/integration/tests/717-sdist-test/files/failing-package-with-test/README.md new file mode 100644 index 0000000000..d432cbeb7e --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/failing-package-with-test/README.md @@ -0,0 +1 @@ +# failing-package-with-test diff --git a/tests/integration/tests/717-sdist-test/files/failing-package-with-test/files/file.txt b/tests/integration/tests/717-sdist-test/files/failing-package-with-test/files/file.txt new file mode 100644 index 0000000000..d95f3ad14d --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/failing-package-with-test/files/file.txt @@ -0,0 +1 @@ +content diff --git a/tests/integration/tests/717-sdist-test/files/failing-package-with-test/package.yaml b/tests/integration/tests/717-sdist-test/files/failing-package-with-test/package.yaml new file mode 100644 index 0000000000..5d83a066f5 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/failing-package-with-test/package.yaml @@ -0,0 +1,13 @@ +spec-version: 0.36.0 + +name: failing-package-with-test +version: 0.1.0.0 +description: A package with a test suite that is incomplete if sdist. + +dependencies: +- base < 5 + +tests: + my-test: + source-dirs: test + main: Test.hs diff --git a/tests/integration/tests/717-sdist-test/files/failing-package-with-test/test/Test.hs b/tests/integration/tests/717-sdist-test/files/failing-package-with-test/test/Test.hs new file mode 100644 index 0000000000..7372c2c72c --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/failing-package-with-test/test/Test.hs @@ -0,0 +1,4 @@ +import Data.Functor ( void ) + +main :: IO () +main = void $ readFile "files/file.txt" diff --git a/test/integration/tests/717-sdist-test/files/package-with-failing-test/LICENSE b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/LICENSE similarity index 100% rename from test/integration/tests/717-sdist-test/files/package-with-failing-test/LICENSE rename to tests/integration/tests/717-sdist-test/files/failing-package-with-th/LICENSE diff --git a/tests/integration/tests/717-sdist-test/files/failing-package-with-th/README.md b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/README.md new file mode 100644 index 0000000000..6c7f4896ea --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/README.md @@ -0,0 +1 @@ +# failing-package-with-th diff --git a/tests/integration/tests/717-sdist-test/files/failing-package-with-th/files/file.txt b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/files/file.txt new file mode 100644 index 0000000000..d95f3ad14d --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/files/file.txt @@ -0,0 +1 @@ +content diff --git a/tests/integration/tests/717-sdist-test/files/failing-package-with-th/package.yaml b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/package.yaml new file mode 100644 index 0000000000..f9a9959604 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/package.yaml @@ -0,0 +1,15 @@ +spec-version: 0.36.0 + +name: failing-package-with-th +version: 0.1.0.0 +description: A package with Template Haskell that is incomplete if sdist. + +extra-source-files: +- README.md + +dependencies: +- base < 5 +- template-haskell + +library: + source-dirs: src diff --git a/tests/integration/tests/717-sdist-test/files/failing-package-with-th/src/Lib.hs b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/src/Lib.hs new file mode 100644 index 0000000000..0e37b70732 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/src/Lib.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE TemplateHaskell #-} +module Lib + ( someFunc + ) where + +import TH ( thFunc ) +import Language.Haskell.TH () + +someFunc :: IO () +someFunc = print $(thFunc) diff --git a/tests/integration/tests/717-sdist-test/files/failing-package-with-th/src/TH.hs b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/src/TH.hs new file mode 100644 index 0000000000..a83a2014e7 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/failing-package-with-th/src/TH.hs @@ -0,0 +1,10 @@ +module TH + ( thFunc + ) where + +import Language.Haskell.TH ( Exp (..), Lit (..), Q, runIO ) + +thFunc :: Q Exp +thFunc = runIO $ do + readFile "files/file.txt" + pure $ LitE (IntegerL 5) diff --git a/tests/integration/tests/717-sdist-test/files/stack.yaml b/tests/integration/tests/717-sdist-test/files/stack.yaml new file mode 100644 index 0000000000..196e557d7b --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/stack.yaml @@ -0,0 +1,8 @@ +snapshot: ghc-9.10.3 + +packages: +- failing-package-with-test +- failing-package-with-th +- subdirs/dependent-on-failing-packages +- subdirs/failing-in-subdir +- working-package-with-th diff --git a/test/integration/tests/717-sdist-test/files/package-with-th/LICENSE b/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/LICENSE similarity index 100% rename from test/integration/tests/717-sdist-test/files/package-with-th/LICENSE rename to tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/LICENSE diff --git a/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/README.md b/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/README.md new file mode 100644 index 0000000000..387b7303cf --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/README.md @@ -0,0 +1 @@ +# dependent-on-failing-packages diff --git a/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/files/file.txt b/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/files/file.txt new file mode 100644 index 0000000000..d95f3ad14d --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/files/file.txt @@ -0,0 +1 @@ +content diff --git a/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/package.yaml b/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/package.yaml new file mode 100644 index 0000000000..62ab378b6e --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/package.yaml @@ -0,0 +1,17 @@ +spec-version: 0.36.0 + +name: dependent-on-failing-packages +version: 0.1.0.0 +description: A package that can be sdist that depends on packages that cannot + +extra-source-files: +- README.md + +dependencies: +- base < 5 +- template-haskell +- failing-package-with-th +- failing-in-subdir + +library: + source-dirs: src diff --git a/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/src/LibD.hs b/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/src/LibD.hs new file mode 100644 index 0000000000..5967ffc1dc --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/src/LibD.hs @@ -0,0 +1,11 @@ +module LibD + ( someFuncD + ) where + +import Lib ( someFunc ) +import LibC ( someFuncC ) + +someFuncD :: IO () +someFuncD = do + someFunc + someFuncC diff --git a/test/integration/tests/717-sdist-test/files/package-with-working-th/LICENSE b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/LICENSE similarity index 100% rename from test/integration/tests/717-sdist-test/files/package-with-working-th/LICENSE rename to tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/LICENSE diff --git a/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/README.md b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/README.md new file mode 100644 index 0000000000..0533304f7f --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/README.md @@ -0,0 +1 @@ +# failing-in-subdir diff --git a/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/files/file.txt b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/files/file.txt new file mode 100644 index 0000000000..d95f3ad14d --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/files/file.txt @@ -0,0 +1 @@ +content diff --git a/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/package.yaml b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/package.yaml new file mode 100644 index 0000000000..0109b121c9 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/package.yaml @@ -0,0 +1,15 @@ +spec-version: 0.36.0 + +name: failing-in-subdir +version: 0.1.0.0 +description: A package with Template Haskell that is incomplete if sdist. + +extra-source-files: +- README.md + +dependencies: +- base < 5 +- template-haskell + +library: + source-dirs: src diff --git a/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/LibC.hs b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/LibC.hs new file mode 100644 index 0000000000..bce50b4df4 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/LibC.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE TemplateHaskell #-} + +module LibC + ( someFuncC + ) where + +import THInSubdir ( thFuncC ) + +someFuncC :: IO () +someFuncC = print $(thFuncC) diff --git a/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/THInSubdir.hs b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/THInSubdir.hs new file mode 100644 index 0000000000..803150f5ca --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/subdirs/failing-in-subdir/src/THInSubdir.hs @@ -0,0 +1,10 @@ +module THInSubdir + ( thFuncC + ) where + +import Language.Haskell.TH ( Exp (..), Lit (..), Q, runIO ) + +thFuncC :: Q Exp +thFuncC = runIO $ do + readFile "files/file.txt" + pure $ LitE (IntegerL 5) diff --git a/test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/LICENSE b/tests/integration/tests/717-sdist-test/files/working-package-with-th/LICENSE similarity index 100% rename from test/integration/tests/717-sdist-test/files/subdirs/dependent-on-failing-packages/LICENSE rename to tests/integration/tests/717-sdist-test/files/working-package-with-th/LICENSE diff --git a/tests/integration/tests/717-sdist-test/files/working-package-with-th/README.md b/tests/integration/tests/717-sdist-test/files/working-package-with-th/README.md new file mode 100644 index 0000000000..c98b2279e2 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/working-package-with-th/README.md @@ -0,0 +1 @@ +# working-package-with-th diff --git a/tests/integration/tests/717-sdist-test/files/working-package-with-th/package.yaml b/tests/integration/tests/717-sdist-test/files/working-package-with-th/package.yaml new file mode 100644 index 0000000000..660aa8f803 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/working-package-with-th/package.yaml @@ -0,0 +1,15 @@ +spec-version: 0.36.0 + +name: working-package-with-th +version: 0.1.0.0 +description: A package with Template Haskell that can be sdist. + +extra-source-files: +- README.md + +dependencies: +- base < 5 +- template-haskell + +library: + source-dirs: src diff --git a/tests/integration/tests/717-sdist-test/files/working-package-with-th/src/Lib.hs b/tests/integration/tests/717-sdist-test/files/working-package-with-th/src/Lib.hs new file mode 100644 index 0000000000..dfdbfc9d08 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/working-package-with-th/src/Lib.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE TemplateHaskell #-} + +module Lib + ( someFunc + ) where + +import TH ( thFunc ) + +someFunc :: IO () +someFunc = print $(thFunc) diff --git a/tests/integration/tests/717-sdist-test/files/working-package-with-th/src/TH.hs b/tests/integration/tests/717-sdist-test/files/working-package-with-th/src/TH.hs new file mode 100644 index 0000000000..06925fbf18 --- /dev/null +++ b/tests/integration/tests/717-sdist-test/files/working-package-with-th/src/TH.hs @@ -0,0 +1,8 @@ +module TH + ( thFunc + ) where + +import Language.Haskell.TH ( Exp (..), Lit (..), Q ) + +thFunc :: Q Exp +thFunc = pure $ LitE (IntegerL 5) diff --git a/tests/integration/tests/763-buildable-false/Main.hs b/tests/integration/tests/763-buildable-false/Main.hs new file mode 100644 index 0000000000..c8e7b28d40 --- /dev/null +++ b/tests/integration/tests/763-buildable-false/Main.hs @@ -0,0 +1,12 @@ +-- Stack reports an error if commanded to build a specific component of a +-- package that is not buildable. +-- +-- https://github.com/commercialhaskell/stack/issues/763 + +import StackTest + +main :: IO () +main = do + stack ["build"] + stack ["build", ":myPackage", "--flag", "myPackage:buildable"] + stackErr ["build", ":myPackage"] diff --git a/tests/integration/tests/763-buildable-false/files/.gitignore b/tests/integration/tests/763-buildable-false/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/763-buildable-false/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/763-buildable-false/files/app/Main.hs b/tests/integration/tests/763-buildable-false/files/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/763-buildable-false/files/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/763-buildable-false/files/package.yaml b/tests/integration/tests/763-buildable-false/files/package.yaml new file mode 100644 index 0000000000..ef84df9c46 --- /dev/null +++ b/tests/integration/tests/763-buildable-false/files/package.yaml @@ -0,0 +1,24 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.1.0.0 + +flags: + buildable: + description: Make component buildable. + default: false + manual: true + +dependencies: +- base + +executables: + myPackage: + source-dirs: app + main: Main.hs + when: + - condition: flag(buildable) + then: + buildable: true + else: + buildable: false diff --git a/tests/integration/tests/763-buildable-false/files/stack.yaml b/tests/integration/tests/763-buildable-false/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/763-buildable-false/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/796-ghc-options/Main.hs b/tests/integration/tests/796-ghc-options/Main.hs new file mode 100644 index 0000000000..0b5bb1cfdb --- /dev/null +++ b/tests/integration/tests/796-ghc-options/Main.hs @@ -0,0 +1,12 @@ +-- Stack can set environment variables in a project-level configuration file or +-- at the command line and cause the package to be rebuilt. +-- +-- See: https://github.com/commercialhaskell/stack/issues/796 + +import StackTest + +main :: IO () +main = do + stack ["build"] + stackErr ["build", "--ghc-options=-DVARIABLE_C"] + stack ["build", "--ghc-options=-DVARIABLE_D"] diff --git a/tests/integration/tests/796-ghc-options/files/.gitignore b/tests/integration/tests/796-ghc-options/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/796-ghc-options/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/796-ghc-options/files/package.yaml b/tests/integration/tests/796-ghc-options/files/package.yaml new file mode 100644 index 0000000000..b20cb3407d --- /dev/null +++ b/tests/integration/tests/796-ghc-options/files/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackage +version: 0.1.0.0 + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/796-ghc-options/files/src/Lib.hs b/tests/integration/tests/796-ghc-options/files/src/Lib.hs new file mode 100644 index 0000000000..5a5e96c7f0 --- /dev/null +++ b/tests/integration/tests/796-ghc-options/files/src/Lib.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE CPP #-} + +module Lib where + +-- Avoid problems with CPP and HLint +#ifndef __HLINT__ + +#ifndef VARIABLE_A +#error VARIABLE_A isn't defined +#endif + +#ifndef VARIABLE_B +#error VARIABLE_B isn't defined +#endif + +#ifdef VARIABLE_C +#error VARIABLE_C is defined +#endif + +#endif diff --git a/tests/integration/tests/796-ghc-options/files/stack.yaml b/tests/integration/tests/796-ghc-options/files/stack.yaml new file mode 100644 index 0000000000..3c2c041e3d --- /dev/null +++ b/tests/integration/tests/796-ghc-options/files/stack.yaml @@ -0,0 +1,7 @@ +snapshot: ghc-9.10.3 + +ghc-options: + "*": -DVARIABLE_A + myPackage: -DVARIABLE_B + +rebuild-ghc-options: true diff --git a/tests/integration/tests/allow-newer-specific-packages/Main.hs b/tests/integration/tests/allow-newer-specific-packages/Main.hs new file mode 100644 index 0000000000..af0d7407fe --- /dev/null +++ b/tests/integration/tests/allow-newer-specific-packages/Main.hs @@ -0,0 +1,7 @@ +-- | Stack allows allow-newer to be applied to the dependencies of specified +-- packages. + +import StackTest + +main :: IO () +main = stack ["build"] diff --git a/tests/integration/tests/allow-newer-specific-packages/files/.gitignore b/tests/integration/tests/allow-newer-specific-packages/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/allow-newer-specific-packages/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/allow-newer-specific-packages/files/package.yaml b/tests/integration/tests/allow-newer-specific-packages/files/package.yaml new file mode 100644 index 0000000000..9104b9bf64 --- /dev/null +++ b/tests/integration/tests/allow-newer-specific-packages/files/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- acme-missiles < 0.3 + +library: + source-dirs: src diff --git a/tests/integration/tests/allow-newer-specific-packages/files/src/Lib.hs b/tests/integration/tests/allow-newer-specific-packages/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/allow-newer-specific-packages/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/allow-newer-specific-packages/files/stack.yaml b/tests/integration/tests/allow-newer-specific-packages/files/stack.yaml new file mode 100644 index 0000000000..ebf6dd48f5 --- /dev/null +++ b/tests/integration/tests/allow-newer-specific-packages/files/stack.yaml @@ -0,0 +1,9 @@ +snapshot: ghc-9.10.3 + +extra-deps: +- acme-missiles-0.3 + +allow-newer: true + +allow-newer-deps: +- myPackage diff --git a/tests/integration/tests/basic-install/Main.hs b/tests/integration/tests/basic-install/Main.hs new file mode 100644 index 0000000000..659cccdb75 --- /dev/null +++ b/tests/integration/tests/basic-install/Main.hs @@ -0,0 +1,8 @@ +-- | Stack will build packages in the package index in the absence of a Stack +-- project-level configuration file (by referring to the configuration file in +-- the global-project directory in the Stack root). + +import StackTest + +main :: IO () +main = stack ["build", "acme-missiles-0.3"] diff --git a/tests/integration/tests/build-ghc/Main.hs b/tests/integration/tests/build-ghc/Main.hs new file mode 100644 index 0000000000..2d533a4c18 --- /dev/null +++ b/tests/integration/tests/build-ghc/Main.hs @@ -0,0 +1,28 @@ +import StackTest +import System.Directory (withCurrentDirectory) + +main :: IO () +main = superslow $ do + -- cleanup previous failing test... + removeDirIgnore "tmpPackage" + + stack ["new", "--snapshot=lts-13.11", "tmpPackage"] + + -- use a commit which is known to succeed with hadrian binary-dist + let commitId = "be0dde8e3c27ca56477d1d1801bb77621f3618e1" + flavour = "quick" + + withCurrentDirectory "tmpPackage" $ do + appendFile "stack.yaml" $ unlines + [ "compiler-repository: https://gitlab.haskell.org/ghc/ghc.git" + , "compiler: ghc-git-" ++ commitId ++ "-" ++ flavour + ] + + -- Setup the package + stack ["setup"] + + -- build it with the built GHC + stack ["build"] + + -- cleanup + removeDirIgnore "tmpPackage" diff --git a/tests/integration/tests/cabal-non-buildable-bug/Main.hs b/tests/integration/tests/cabal-non-buildable-bug/Main.hs new file mode 100644 index 0000000000..ceaa053290 --- /dev/null +++ b/tests/integration/tests/cabal-non-buildable-bug/Main.hs @@ -0,0 +1,9 @@ +-- | When building, Stack ignores the components of packages that are +-- not-buildable. + +import StackTest + +main :: IO () +main = do + stack ["build", "--dry-run"] + stack ["build"] diff --git a/tests/integration/tests/cabal-non-buildable-bug/files/.gitignore b/tests/integration/tests/cabal-non-buildable-bug/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/cabal-non-buildable-bug/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/cabal-non-buildable-bug/files/package.yaml b/tests/integration/tests/cabal-non-buildable-bug/files/package.yaml new file mode 100644 index 0000000000..47630ba295 --- /dev/null +++ b/tests/integration/tests/cabal-non-buildable-bug/files/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +executables: + myExe: + main: Main.hs # Does not exist + buildable: false diff --git a/tests/integration/tests/cabal-non-buildable-bug/files/stack.yaml b/tests/integration/tests/cabal-non-buildable-bug/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/cabal-non-buildable-bug/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/cabal-sublibrary-dependency/Main.hs b/tests/integration/tests/cabal-sublibrary-dependency/Main.hs new file mode 100644 index 0000000000..4f9ba45ac1 --- /dev/null +++ b/tests/integration/tests/cabal-sublibrary-dependency/Main.hs @@ -0,0 +1,10 @@ +-- | Stack can build a project package that depends on the public named library +-- (a sublibrary) of another project package. However, the latter package must +-- also have a main library (which may be a Stack bug). +-- +-- See: https://github.com/commercialhaskell/stack/issues/6896 + +import StackTest + +main :: IO () +main = stack ["build", ":myExe"] diff --git a/tests/integration/tests/cabal-sublibrary-dependency/files/.gitignore b/tests/integration/tests/cabal-sublibrary-dependency/files/.gitignore new file mode 100644 index 0000000000..f9a6e152d2 --- /dev/null +++ b/tests/integration/tests/cabal-sublibrary-dependency/files/.gitignore @@ -0,0 +1,2 @@ +myPackageA.cabal +myPackageB.cabal diff --git a/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageA/app/Main.hs b/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageA/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageA/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageA/package.yaml b/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageA/package.yaml new file mode 100644 index 0000000000..c7b310119d --- /dev/null +++ b/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageA/package.yaml @@ -0,0 +1,12 @@ +spec-version: 0.36.0 + +name: myPackageA + +dependencies: +- base +- myPackageB:myPackageB-sub + +executables: + myExe: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageB/package.yaml b/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageB/package.yaml new file mode 100644 index 0000000000..9591417520 --- /dev/null +++ b/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageB/package.yaml @@ -0,0 +1,13 @@ +spec-version: 0.36.0 + +name: myPackageB + +dependencies: +- base + +library: {} + +internal-libraries: + myPackageB-sub: + visibility: public + source-dirs: src diff --git a/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageB/src/Lib.hs b/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageB/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/cabal-sublibrary-dependency/files/myPackageB/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/cabal-sublibrary-dependency/files/stack.yaml b/tests/integration/tests/cabal-sublibrary-dependency/files/stack.yaml new file mode 100644 index 0000000000..68891a8d7c --- /dev/null +++ b/tests/integration/tests/cabal-sublibrary-dependency/files/stack.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +packages: +- myPackageA +- myPackageB diff --git a/tests/integration/tests/copy-bins-works/Main.hs b/tests/integration/tests/copy-bins-works/Main.hs new file mode 100644 index 0000000000..d22b31fe8e --- /dev/null +++ b/tests/integration/tests/copy-bins-works/Main.hs @@ -0,0 +1,15 @@ +-- | Stack's build command supports the copy-bins flag. + +import StackTest +import System.Directory ( createDirectoryIfMissing ) + +main :: IO () +main = do + createDirectoryIfMissing True "bin1" + stack ["build", "--copy-bins", "--local-bin-path", "bin1"] + doesExist ("bin1/" <> myPackageExe) + createDirectoryIfMissing True "bin2" + stack ["--stack-yaml", "stack-copy-bins.yaml", "build", "--local-bin-path", "bin2"] + doesExist ("bin2/" <> myPackageExe) + where + myPackageExe = "myExe" <> exeExt diff --git a/tests/integration/tests/copy-bins-works/files/.gitignore b/tests/integration/tests/copy-bins-works/files/.gitignore new file mode 100644 index 0000000000..fc53374c2f --- /dev/null +++ b/tests/integration/tests/copy-bins-works/files/.gitignore @@ -0,0 +1,3 @@ +myPackage.cabal +bin1/ +bin2/ diff --git a/tests/integration/tests/copy-bins-works/files/app/Main.hs b/tests/integration/tests/copy-bins-works/files/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/copy-bins-works/files/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/copy-bins-works/files/package.yaml b/tests/integration/tests/copy-bins-works/files/package.yaml new file mode 100644 index 0000000000..ee1f9f8afa --- /dev/null +++ b/tests/integration/tests/copy-bins-works/files/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +executables: + myExe: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/copy-bins-works/files/stack-copy-bins.yaml b/tests/integration/tests/copy-bins-works/files/stack-copy-bins.yaml new file mode 100644 index 0000000000..4ca93380fd --- /dev/null +++ b/tests/integration/tests/copy-bins-works/files/stack-copy-bins.yaml @@ -0,0 +1,4 @@ +snapshot: ghc-9.10.3 + +build: + copy-bins: true diff --git a/tests/integration/tests/copy-bins-works/files/stack.yaml b/tests/integration/tests/copy-bins-works/files/stack.yaml new file mode 100644 index 0000000000..abe6c1f573 --- /dev/null +++ b/tests/integration/tests/copy-bins-works/files/stack.yaml @@ -0,0 +1,4 @@ +snapshot: ghc-9.10.3 + +build: + copy-bins: false diff --git a/test/integration/tests/1336-1337-new-package-names/.gitignore b/tests/integration/tests/cyclic-test-deps/.gitignore similarity index 100% rename from test/integration/tests/1336-1337-new-package-names/.gitignore rename to tests/integration/tests/cyclic-test-deps/.gitignore diff --git a/tests/integration/tests/cyclic-test-deps/Main.hs b/tests/integration/tests/cyclic-test-deps/Main.hs new file mode 100644 index 0000000000..80dab5b758 --- /dev/null +++ b/tests/integration/tests/cyclic-test-deps/Main.hs @@ -0,0 +1,10 @@ +import StackTest + +main :: IO () +main = do + removeDirIgnore "text-2.1.2" + stack ["unpack", "text-2.1.2"] + stack ["unpack", "QuickCheck-2.15.0.1"] + removeFileIgnore "stack.yaml" + stack ["init", defaultSnapshotArg] + stack ["test", "--dry-run"] diff --git a/tests/integration/tests/drop-packages/Main.hs b/tests/integration/tests/drop-packages/Main.hs new file mode 100644 index 0000000000..69700c9823 --- /dev/null +++ b/tests/integration/tests/drop-packages/Main.hs @@ -0,0 +1,6 @@ +-- | Stack can drop packages from a snapshot. + +import StackTest + +main :: IO () +main = stackErr ["build"] diff --git a/tests/integration/tests/drop-packages/files/.gitignore b/tests/integration/tests/drop-packages/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/drop-packages/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/drop-packages/files/package.yaml b/tests/integration/tests/drop-packages/files/package.yaml new file mode 100644 index 0000000000..e4d2cd7656 --- /dev/null +++ b/tests/integration/tests/drop-packages/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- unliftio-core + +library: {} diff --git a/tests/integration/tests/drop-packages/files/stack.yaml b/tests/integration/tests/drop-packages/files/stack.yaml new file mode 100644 index 0000000000..86308b6f48 --- /dev/null +++ b/tests/integration/tests/drop-packages/files/stack.yaml @@ -0,0 +1,4 @@ +snapshot: lts-24.37 + +drop-packages: +- unliftio-core diff --git a/tests/integration/tests/duplicate-package-ids/Main.hs b/tests/integration/tests/duplicate-package-ids/Main.hs new file mode 100644 index 0000000000..38ca390b7f --- /dev/null +++ b/tests/integration/tests/duplicate-package-ids/Main.hs @@ -0,0 +1,10 @@ +-- | Stack distinguises between a package in the package index and a project +-- package, even if they have the same name and version. + +import StackTest + +main :: IO () +main = do + stack ["--stack-yaml", "stack1.yaml", "build", "acme-missiles"] + stack ["unpack", "acme-missiles-0.3"] + stack ["--stack-yaml", "stack2.yaml", "build"] diff --git a/tests/integration/tests/duplicate-package-ids/files/.gitignore b/tests/integration/tests/duplicate-package-ids/files/.gitignore new file mode 100644 index 0000000000..2707bad1cb --- /dev/null +++ b/tests/integration/tests/duplicate-package-ids/files/.gitignore @@ -0,0 +1 @@ +acme-missiles-0.3/ diff --git a/tests/integration/tests/duplicate-package-ids/files/mySnapshot.yaml b/tests/integration/tests/duplicate-package-ids/files/mySnapshot.yaml new file mode 100644 index 0000000000..09bb7df5ab --- /dev/null +++ b/tests/integration/tests/duplicate-package-ids/files/mySnapshot.yaml @@ -0,0 +1,6 @@ +name: mySnapshot + +snapshot: ghc-9.10.3 + +packages: +- acme-missiles-0.3 diff --git a/tests/integration/tests/duplicate-package-ids/files/stack1.yaml b/tests/integration/tests/duplicate-package-ids/files/stack1.yaml new file mode 100644 index 0000000000..1412b971c3 --- /dev/null +++ b/tests/integration/tests/duplicate-package-ids/files/stack1.yaml @@ -0,0 +1,3 @@ +snapshot: mySnapshot.yaml + +packages: [] diff --git a/tests/integration/tests/duplicate-package-ids/files/stack2.yaml b/tests/integration/tests/duplicate-package-ids/files/stack2.yaml new file mode 100644 index 0000000000..1b7948f75a --- /dev/null +++ b/tests/integration/tests/duplicate-package-ids/files/stack2.yaml @@ -0,0 +1,4 @@ +snapshot: mySnapshot.yaml + +packages: +- acme-missiles-0.3 diff --git a/tests/integration/tests/ghc-install-hooks/Main.hs b/tests/integration/tests/ghc-install-hooks/Main.hs new file mode 100644 index 0000000000..f4013f1eb3 --- /dev/null +++ b/tests/integration/tests/ghc-install-hooks/Main.hs @@ -0,0 +1,8 @@ +-- | Stack supports GHC installation customisation shell scripts. + +import Control.Exception ( throwIO ) +import StackTest +import System.Process ( rawSystem ) + +main :: IO () +main = rawSystem "sh" ["run.sh"] >>= throwIO diff --git a/tests/integration/tests/ghc-install-hooks/files/.gitignore b/tests/integration/tests/ghc-install-hooks/files/.gitignore new file mode 100644 index 0000000000..17906d378e --- /dev/null +++ b/tests/integration/tests/ghc-install-hooks/files/.gitignore @@ -0,0 +1 @@ +/fake-root/ diff --git a/tests/integration/tests/ghc-install-hooks/files/Test.hs b/tests/integration/tests/ghc-install-hooks/files/Test.hs new file mode 100644 index 0000000000..623c600c18 --- /dev/null +++ b/tests/integration/tests/ghc-install-hooks/files/Test.hs @@ -0,0 +1 @@ +main = putStrLn "Looks like everything is working!" diff --git a/tests/integration/tests/ghc-install-hooks/files/run.sh b/tests/integration/tests/ghc-install-hooks/files/run.sh new file mode 100644 index 0000000000..beaf269b9e --- /dev/null +++ b/tests/integration/tests/ghc-install-hooks/files/run.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +set -exu + +stack_bin=$("$STACK_EXE" path --snapshot ghc-9.10.3 --compiler-bin) + +export STACK_ROOT=$(pwd)/fake-root + +mkdir -p "${STACK_ROOT}"/hooks + +echo "echo '${stack_bin}/ghc'" > "${STACK_ROOT}"/hooks/ghc-install.sh +chmod +x "${STACK_ROOT}"/hooks/ghc-install.sh + +"$STACK_EXE" --no-install-ghc --snapshot ghc-9.10.3 ghc -- --info +"$STACK_EXE" --no-install-ghc --snapshot ghc-9.10.3 runghc Test.hs diff --git a/tests/integration/tests/git-submodules/Main.hs b/tests/integration/tests/git-submodules/Main.hs new file mode 100644 index 0000000000..196abe7e72 --- /dev/null +++ b/tests/integration/tests/git-submodules/Main.hs @@ -0,0 +1,83 @@ +import Control.Monad ( when ) +import Data.List ( filter ) +import StackTest +import System.Directory + ( createDirectoryIfMissing, getCurrentDirectory + , withCurrentDirectory + ) +import System.Exit ( exitFailure ) +import System.FilePath ( () ) +import System.IO ( IOMode (..), hPutStrLn, withFile ) + +main :: IO () +main = when isLinux $ do + runShell "git config --global protocol.file.allow always" + + let + gitInit = do + runShell "git init ." + runShell "git config user.name Test" + runShell "git config user.email test@test.com" + runShell "git config commit.gpgsign false" + + let withEmptyDir name inner = do + removeDirIgnore name + createDirectoryIfMissing True name + withCurrentDirectory name inner + + withEmptyDir "tmpSubSubRepo" $ do + gitInit + stack ["new", "pkg ", defaultSnapshotArg] + runShell "git add pkg" + runShell "git commit -m SubSubCommit" + + withEmptyDir "tmpSubRepo" $ do + gitInit + runShell "git submodule add ../tmpSubSubRepo sub" + runShell "git commit -a -m SubCommit" + + withEmptyDir "tmpRepo" $ do + gitInit + runShell "git submodule add ../tmpSubRepo sub" + runShell "git commit -a -m Commit" + + removeDirIgnore "tmpPackage" + stack ["new", defaultSnapshotArg, "tmpPackage"] + + curDir <- getCurrentDirectory + let tmpRepoDir = curDir "tmpRepo" + gitHead <- runWithCwd tmpRepoDir "git" ["rev-parse", "HEAD"] + let gitHeadCommit = stripNewline gitHead + + withCurrentDirectory "tmpPackage" $ do + -- add git dependency on repo with recursive submodules + writeToStackFile (tmpRepoDir, gitHeadCommit) + -- Setup the package + stack ["setup"] + + -- cleanup + removeDirIgnore "tmpRepo" + removeDirIgnore "tmpSubRepo" + removeDirIgnore "tmpSubSubRepo" + removeDirIgnore "tmpPackage" + +writeToStackFile :: (String, String) -> IO () +writeToStackFile (tmpRepoDir, gitCommit) = do + curDir <- getCurrentDirectory + let stackFile = curDir "stack.yaml" + let line1 = "extra-deps:" + line2 = "- git: " ++ tmpRepoDir + line3 = " commit: " ++ gitCommit + line4 = " subdir: sub/sub/pkg" + withFile stackFile AppendMode (\handle -> do + hPutStrLn handle line1 + hPutStrLn handle line2 + hPutStrLn handle line3 + hPutStrLn handle line4 + ) + +newline :: Char +newline = '\n' + +stripNewline :: String -> String +stripNewline = filter (/= newline) diff --git a/test/integration/tests/git-submodules/files/.gitignore b/tests/integration/tests/git-submodules/files/.gitignore similarity index 100% rename from test/integration/tests/git-submodules/files/.gitignore rename to tests/integration/tests/git-submodules/files/.gitignore diff --git a/tests/integration/tests/haddock-options/Main.hs b/tests/integration/tests/haddock-options/Main.hs new file mode 100644 index 0000000000..5cf2f7f412 --- /dev/null +++ b/tests/integration/tests/haddock-options/Main.hs @@ -0,0 +1,22 @@ +import Control.Monad ( unless ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + -- VARIABLE_B is defined here and VARIABLE_A in stack.yaml file + stackCheckStderr ["haddock", "--haddock-arguments", "--optghc=-DVARIABLE_B"] $ \s -> + unless (errorMsg `isInfixOf` s) $ + error "VARIABLE_A and VARIABLE_B not both defined" + stack ["clean"] + -- Works just fine, test #3099 while at it. + stack ["haddock", "--no-haddock-hyperlink-source"] + stack ["clean"] + -- Fails to work because we have bad argument + stackErr ["haddock", "--haddock-arguments", "--stack_it_badhaddockargument"] + +-- The error message differs by operating system +errorMsg :: String +errorMsg = if isLinux + then "error: #error VARIABLE_A and VARIABLE_B is defined" + else "error: VARIABLE_A and VARIABLE_B is defined" diff --git a/tests/integration/tests/haddock-options/files/.gitignore b/tests/integration/tests/haddock-options/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/haddock-options/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/haddock-options/files/package.yaml b/tests/integration/tests/haddock-options/files/package.yaml new file mode 100644 index 0000000000..a7305a77cf --- /dev/null +++ b/tests/integration/tests/haddock-options/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/haddock-options/files/src/Lib.hs b/tests/integration/tests/haddock-options/files/src/Lib.hs new file mode 100644 index 0000000000..4683fbf37e --- /dev/null +++ b/tests/integration/tests/haddock-options/files/src/Lib.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE CPP #-} + +module Lib + ( someFunc + ) where + +someFunc :: IO () +someFunc = putStrLn "someFunc" + +#if defined(VARIABLE_A) && defined(VARIABLE_B) + +#error VARIABLE_A and VARIABLE_B is defined + +#endif diff --git a/tests/integration/tests/haddock-options/files/stack.yaml b/tests/integration/tests/haddock-options/files/stack.yaml new file mode 100644 index 0000000000..69b8c2eeb2 --- /dev/null +++ b/tests/integration/tests/haddock-options/files/stack.yaml @@ -0,0 +1,6 @@ +snapshot: ghc-9.10.3 + +build: + haddock-arguments: + haddock-args: + - --optghc=-DVARIABLE_A diff --git a/tests/integration/tests/hpack-repo/Main.hs b/tests/integration/tests/hpack-repo/Main.hs new file mode 100644 index 0000000000..58ef93df19 --- /dev/null +++ b/tests/integration/tests/hpack-repo/Main.hs @@ -0,0 +1,9 @@ +-- Stack supports immutable dependency packages that are described only by a +-- package.yaml file. However, this work flow is deprecated. +-- +-- See: https://github.com/commercialhaskell/stack/issues/5210 + +import StackTest + +main :: IO () +main = stack ["build"] diff --git a/tests/integration/tests/hpack-repo/files/.gitignore b/tests/integration/tests/hpack-repo/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/hpack-repo/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/hpack-repo/files/package.yaml b/tests/integration/tests/hpack-repo/files/package.yaml new file mode 100644 index 0000000000..498b32af5d --- /dev/null +++ b/tests/integration/tests/hpack-repo/files/package.yaml @@ -0,0 +1,10 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- validity + +library: + source-dirs: src diff --git a/tests/integration/tests/hpack-repo/files/src/Lib.hs b/tests/integration/tests/hpack-repo/files/src/Lib.hs new file mode 100644 index 0000000000..0512f4633e --- /dev/null +++ b/tests/integration/tests/hpack-repo/files/src/Lib.hs @@ -0,0 +1,3 @@ +module Lib where + +import Data.Validity diff --git a/tests/integration/tests/hpack-repo/files/stack.yaml b/tests/integration/tests/hpack-repo/files/stack.yaml new file mode 100644 index 0000000000..a3f1d38792 --- /dev/null +++ b/tests/integration/tests/hpack-repo/files/stack.yaml @@ -0,0 +1,8 @@ +snapshot: lts-24.37 + +# At this commit, the validity package is described only by a package.yaml file +extra-deps: +- git: https://github.com/NorfairKing/validity.git + commit: d128cc30bc886e31ea7f8161fb7708c08b162937 + subdirs: + - validity diff --git a/tests/integration/tests/init-omit-packages/Main.hs b/tests/integration/tests/init-omit-packages/Main.hs new file mode 100644 index 0000000000..f43e79bb4c --- /dev/null +++ b/tests/integration/tests/init-omit-packages/Main.hs @@ -0,0 +1,14 @@ +-- | Stack's init command provides an --omit-packages flag to avoid the problem +-- of bad project packages. + +import Control.Monad ( unless ) +import StackTest +import System.IO ( readFile ) + +main :: IO () +main = do + stackErr ["init", "--snapshot", "lts-24.37"] + stack ["init", "--snapshot", "lts-24.37", "--omit-packages"] + contents <- lines <$> readFile "stack.yaml" + unless ("#- bad" `elem` contents) $ + error "commented out 'bad' package was expected" diff --git a/tests/integration/tests/init-omit-packages/files/.gitignore b/tests/integration/tests/init-omit-packages/files/.gitignore new file mode 100644 index 0000000000..74744b2dd6 --- /dev/null +++ b/tests/integration/tests/init-omit-packages/files/.gitignore @@ -0,0 +1,3 @@ +stack.yaml +good.cabal +bad.cabal diff --git a/tests/integration/tests/init-omit-packages/files/bad/Bad.hs b/tests/integration/tests/init-omit-packages/files/bad/Bad.hs new file mode 100644 index 0000000000..fcd40f5dfd --- /dev/null +++ b/tests/integration/tests/init-omit-packages/files/bad/Bad.hs @@ -0,0 +1,4 @@ +module Bad where + +bad :: Int +bad = error "Something bad here" diff --git a/tests/integration/tests/init-omit-packages/files/bad/package.yaml b/tests/integration/tests/init-omit-packages/files/bad/package.yaml new file mode 100644 index 0000000000..0875dcdae1 --- /dev/null +++ b/tests/integration/tests/init-omit-packages/files/bad/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: bad + +dependencies: +- base +- non-existent-package == 666.0 + +library: {} diff --git a/test/integration/tests/init-omit-packages/files/good/Good.hs b/tests/integration/tests/init-omit-packages/files/good/Good.hs similarity index 100% rename from test/integration/tests/init-omit-packages/files/good/Good.hs rename to tests/integration/tests/init-omit-packages/files/good/Good.hs diff --git a/tests/integration/tests/init-omit-packages/files/good/package.yaml b/tests/integration/tests/init-omit-packages/files/good/package.yaml new file mode 100644 index 0000000000..9b93276a13 --- /dev/null +++ b/tests/integration/tests/init-omit-packages/files/good/package.yaml @@ -0,0 +1,8 @@ +spec-version: 0.36.0 + +name: good + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/internal-libraries/Main.hs b/tests/integration/tests/internal-libraries/Main.hs new file mode 100644 index 0000000000..c8b3b9c6be --- /dev/null +++ b/tests/integration/tests/internal-libraries/Main.hs @@ -0,0 +1,7 @@ +-- | Stack supports private named libraries (internal libraries) and foreign +-- libraries. + +import StackTest + +main :: IO () +main = stack ["build"] diff --git a/tests/integration/tests/internal-libraries/files/.gitignore b/tests/integration/tests/internal-libraries/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/internal-libraries/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/internal-libraries/files/app/Main.hs b/tests/integration/tests/internal-libraries/files/app/Main.hs new file mode 100644 index 0000000000..89ad4b3e08 --- /dev/null +++ b/tests/integration/tests/internal-libraries/files/app/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = pure () diff --git a/tests/integration/tests/internal-libraries/files/int/Internal.hs b/tests/integration/tests/internal-libraries/files/int/Internal.hs new file mode 100644 index 0000000000..d066bb085e --- /dev/null +++ b/tests/integration/tests/internal-libraries/files/int/Internal.hs @@ -0,0 +1 @@ +module Internal where diff --git a/tests/integration/tests/internal-libraries/files/package.yaml b/tests/integration/tests/internal-libraries/files/package.yaml new file mode 100644 index 0000000000..f991171fd9 --- /dev/null +++ b/tests/integration/tests/internal-libraries/files/package.yaml @@ -0,0 +1,40 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + +internal-libraries: + internal: + source-dirs: int + dependencies: + - myPackage + - stm + +executables: + myExe: + source-dirs: app + main: Main.hs + dependencies: + - internal + - myPackage + +verbatim: | + foreign-library foreign + type: native-shared + other-modules: + Foreign + build-depends: + base + , myPackage + , internal + , mtl + hs-source-dirs: + src-foreign + default-language: Haskell2010 + if os(Windows) + options: standalone diff --git a/tests/integration/tests/internal-libraries/files/src-foreign/Foreign.hs b/tests/integration/tests/internal-libraries/files/src-foreign/Foreign.hs new file mode 100644 index 0000000000..8c00a37599 --- /dev/null +++ b/tests/integration/tests/internal-libraries/files/src-foreign/Foreign.hs @@ -0,0 +1 @@ +module Foreign where diff --git a/tests/integration/tests/internal-libraries/files/src/Lib.hs b/tests/integration/tests/internal-libraries/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/internal-libraries/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/internal-libraries/files/stack.yaml b/tests/integration/tests/internal-libraries/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/internal-libraries/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/lock-files/Main.hs b/tests/integration/tests/lock-files/Main.hs new file mode 100644 index 0000000000..84131e34f0 --- /dev/null +++ b/tests/integration/tests/lock-files/Main.hs @@ -0,0 +1,16 @@ +-- | Stack creates lock files. + +import Control.Monad ( unless, when ) +import Data.List ( isInfixOf ) +import StackTest + +main :: IO () +main = do + stack ["--stack-yaml", "stack1.yaml", "build"] + lock1 <- readFile "stack1.yaml.lock" + unless ("acme-box" `isInfixOf` lock1) $ + error "Package acme-box wasn't found in Stack lock file" + stack ["--stack-yaml", "stack2.yaml", "build"] + lock2 <- readFile "stack2.yaml.lock" + when ("acme-box" `isInfixOf` lock2) $ + error "Package acme-box shouldn't be in Stack lock file anymore" diff --git a/tests/integration/tests/lock-files/files/.gitignore b/tests/integration/tests/lock-files/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/lock-files/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/lock-files/files/package.yaml b/tests/integration/tests/lock-files/files/package.yaml new file mode 100644 index 0000000000..a7305a77cf --- /dev/null +++ b/tests/integration/tests/lock-files/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src diff --git a/tests/integration/tests/lock-files/files/src/Lib.hs b/tests/integration/tests/lock-files/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/lock-files/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/lock-files/files/stack1.yaml b/tests/integration/tests/lock-files/files/stack1.yaml new file mode 100644 index 0000000000..e317b1aefe --- /dev/null +++ b/tests/integration/tests/lock-files/files/stack1.yaml @@ -0,0 +1,5 @@ +snapshot: ghc-9.10.3 + +extra-deps: +- acme-missiles-0.3 +- acme-box-0.0.0.0 diff --git a/tests/integration/tests/lock-files/files/stack2.yaml b/tests/integration/tests/lock-files/files/stack2.yaml new file mode 100644 index 0000000000..37425098dd --- /dev/null +++ b/tests/integration/tests/lock-files/files/stack2.yaml @@ -0,0 +1,4 @@ +snapshot: ghc-9.10.3 + +extra-deps: +- acme-missiles-0.3 diff --git a/tests/integration/tests/module-added-multiple-times/Main.hs b/tests/integration/tests/module-added-multiple-times/Main.hs new file mode 100644 index 0000000000..1cb5324fff --- /dev/null +++ b/tests/integration/tests/module-added-multiple-times/Main.hs @@ -0,0 +1,16 @@ +-- | Stack can load a package into GHC's repl. + +import Control.Monad ( when ) +import StackTest.Repl + +main :: IO () +main = stackRepl [] $ do + nextPrompt + replCommand ":main" + line <- replGetLine + let expected = "OK" + when (line /= expected) $ + error $ + "Main module didn't load correctly.\n" + <> "Expected: " <> expected <> "\n" + <> "Actual : " <> line <> "\n" diff --git a/tests/integration/tests/module-added-multiple-times/files/.gitignore b/tests/integration/tests/module-added-multiple-times/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/module-added-multiple-times/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/module-added-multiple-times/files/app/Main.hs b/tests/integration/tests/module-added-multiple-times/files/app/Main.hs new file mode 100644 index 0000000000..d09b7c23cd --- /dev/null +++ b/tests/integration/tests/module-added-multiple-times/files/app/Main.hs @@ -0,0 +1,6 @@ +module Main where + +import Lib.Lib ( func ) + +main :: IO () +main = putStrLn func diff --git a/tests/integration/tests/module-added-multiple-times/files/package.yaml b/tests/integration/tests/module-added-multiple-times/files/package.yaml new file mode 100644 index 0000000000..bea19d444e --- /dev/null +++ b/tests/integration/tests/module-added-multiple-times/files/package.yaml @@ -0,0 +1,16 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + +executables: + myExe: + source-dirs: app + main: Main.hs + dependencies: + - myPackage diff --git a/tests/integration/tests/module-added-multiple-times/files/src/Lib/Lib.hs b/tests/integration/tests/module-added-multiple-times/files/src/Lib/Lib.hs new file mode 100644 index 0000000000..3c09853b27 --- /dev/null +++ b/tests/integration/tests/module-added-multiple-times/files/src/Lib/Lib.hs @@ -0,0 +1,6 @@ +module Lib.Lib + ( func + ) where + +func :: String +func = "OK" diff --git a/tests/integration/tests/module-added-multiple-times/files/stack.yaml b/tests/integration/tests/module-added-multiple-times/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/module-added-multiple-times/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/mutable-deps/Main.hs b/tests/integration/tests/mutable-deps/Main.hs new file mode 100644 index 0000000000..2106578801 --- /dev/null +++ b/tests/integration/tests/mutable-deps/Main.hs @@ -0,0 +1,33 @@ +import Control.Monad ( forM_, unless, when ) +import Data.List ( isInfixOf, stripPrefix ) +import StackTest + +-- The package 'files' depends directly on filemanip, which depends directly on +-- packages directory, filepath and unix-compat. Package directory also depends +-- directly on filepath. Package unix-compat depends directly on directory, +-- filepath, time, unix and Win32. +-- The stack.yaml file, however, identifies filepath-1.4.100.4 as a local +-- package. Consequently, filepath is a mutable package and the packages +-- that depend on it should also be treated as mutable packages. + +main :: IO () +main = unless isWindows $ do -- dependency issues on Windows + let isBuild package line = + case stripPrefix package line of + Just x -> "> build" `isInfixOf` line + Nothing -> False + expectRecompilation pkgs stderr = forM_ pkgs $ \p -> + unless (any (isBuild p) $ lines stderr) $ + error $ "package " ++ show p ++ " recompilation was expected" + expectNoRecompilation pkgs stderr = forM_ pkgs $ \p -> + when (any (isBuild p) $ lines stderr) $ + error $ "package " ++ show p ++ " recompilation was not expected" + mutablePackages = [ "filepath" + , "directory" + , "filemanip" + , "myPackage" + ] + stackCheckStderr ["build"] $ expectRecompilation mutablePackages + stackCheckStderr ["build" , "--profile"] $ expectRecompilation mutablePackages + stackCheckStderr ["build"] $ expectNoRecompilation mutablePackages + stackCheckStderr ["build" , "--profile"] $ expectNoRecompilation mutablePackages diff --git a/tests/integration/tests/mutable-deps/files/.gitignore b/tests/integration/tests/mutable-deps/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/mutable-deps/files/app/Main.hs b/tests/integration/tests/mutable-deps/files/app/Main.hs new file mode 100644 index 0000000000..ecf5062557 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/app/Main.hs @@ -0,0 +1,7 @@ +module Main where + +import Lib + +main = do + cFiles <- allCFiles + putStrLn $ "C files:" ++ show cFiles diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/Generate.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/Generate.hs new file mode 100644 index 0000000000..6b132101ab --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/Generate.hs @@ -0,0 +1,205 @@ +{-# LANGUAGE CPP, RecordWildCards, ViewPatterns #-} + +module Generate(main) where + +import Control.Exception +import Control.Monad +import Data.Semigroup +import Data.Char +import Data.List +import System.Directory +import System.IO + + +main :: IO () +main = do + src <- readFile "System/FilePath/Internal.hs" + let tests = map renderTest $ concatMap parseTest $ lines src + writeFileBinaryChanged "tests/filepath-tests/TestGen.hs" $ unlines $ + ["-- GENERATED CODE: See ../Generate.hs" +#ifndef GHC_MAKE + , "{-# LANGUAGE OverloadedStrings #-}" + , "{-# LANGUAGE ViewPatterns #-}" +#endif + , "{-# LANGUAGE CPP #-}" + , "{-# OPTIONS_GHC -Wno-name-shadowing #-}" + , "{-# OPTIONS_GHC -Wno-orphans #-}" + ,"module TestGen(tests) where" + ,"import TestUtil" + ,"#if !MIN_VERSION_base(4,11,0)" + ,"import Data.Semigroup" + ,"#endif" + ,"import Prelude as P" + ,"import Data.String" + ,"import GHC.IO.Encoding.Failure ( CodingFailureMode(..) )" + ,"import GHC.IO.Encoding.UTF16 ( mkUTF16le )" + ,"import GHC.IO.Encoding.UTF8 ( mkUTF8 )" + ,"import System.OsString.Internal.Types" + ,"import System.OsString.Encoding.Internal" + ,"import qualified Data.Char as C" + ,"import qualified System.OsString.Data.ByteString.Short as SBS" + ,"import qualified System.OsString.Data.ByteString.Short.Word16 as SBS16" + ,"import qualified System.FilePath.Windows as W" + ,"import qualified System.FilePath.Posix as P" +#ifdef GHC_MAKE + ,"import qualified System.OsPath.Windows.Internal as AFP_W" + ,"import qualified System.OsPath.Posix.Internal as AFP_P" +#else + ,"import qualified System.OsPath.Windows as AFP_W" + ,"import qualified System.OsPath.Posix as AFP_P" +#endif + ,"instance IsString WindowsString where fromString = WS . either (error . show) id . encodeWithTE (mkUTF16le TransliterateCodingFailure)" + ,"instance IsString PosixString where fromString = PS . either (error . show) id . encodeWithTE (mkUTF8 TransliterateCodingFailure)" + ,"#if defined(mingw32_HOST_OS) || defined(__MINGW32__)" + ,"instance IsString OsString where fromString = OsString . WS . either (error . show) id . encodeWithTE (mkUTF16le TransliterateCodingFailure)" + ,"#else" + ,"instance IsString OsString where fromString = OsString . PS . either (error . show) id . encodeWithTE (mkUTF8 TransliterateCodingFailure)" + ,"#endif" + ,"tests :: [(String, Property)]" + ,"tests ="] ++ + [" " ++ c ++ "(" ++ show t1 ++ ", " ++ t2 ++ ")" | (c,(t1,t2)) <- zip ("[":repeat ",") tests] ++ + [" ]"] + + + +data PW = P -- legacy posix + | W -- legacy windows + | AFP_P -- abstract-filepath posix + | AFP_W -- abstract-filepath windows + deriving Show + +data Test = Test + {testPlatform :: PW + ,testVars :: [(String,String)] -- generator constructor, variable + ,testBody :: [String] + } + + +parseTest :: String -> [Test] +parseTest (stripPrefix "-- > " -> Just x) = platform $ toLexemes x + where + platform ("Windows":":":x) = [valid W x, valid AFP_W x] + platform ("Posix" :":":x) = [valid P x, valid AFP_P x] + platform x = [valid P x, valid W x, valid AFP_P x, valid AFP_W x] + + valid p ("Valid":x) = free p a $ drop 1 b + where (a,b) = break (== "=>") x + valid p x = free p [] x + + free p val x = Test p [(ctor v, v) | v <- vars] x + where vars = nub $ sort [v | v@[c] <- x, isAlpha c] + ctor v | v < "x" = "" + | v `elem` val = "QFilePathValid" ++ show p + | otherwise = case p of + AFP_P -> if v == "z" then "QFilePathsAFP_P" else "QFilePathAFP_P" + AFP_W -> if v == "z" then "QFilePathsAFP_W" else "QFilePathAFP_W" + _ -> if v == "z" then "" else "QFilePath" +parseTest _ = [] + + +toLexemes :: String -> [String] +toLexemes x = case lex x of + [("","")] -> [] + [(x,y)] -> x : toLexemes y + y -> error $ "Generate.toLexemes, " ++ show x ++ " -> " ++ show y + + +fromLexemes :: [String] -> String +fromLexemes = unwords . f + where + f ("`":x:"`":xs) = ("`" ++ x ++ "`") : f xs + f (x:y:xs) | x `elem` ["[","("] || y `elem` [",",")","]"] = f $ (x ++ y) : xs + f (x:xs) = x : f xs + f [] = [] + + +renderTest :: Test -> (String, String) +renderTest Test{..} = (body, code) + where + code = "property $ " ++ if null testVars then body else "\\" ++ unwords vars ++ " -> " ++ body + vars = [if null ctor then v else "(" ++ ctor ++ " " ++ v ++ ")" | (ctor,v) <- testVars] + + body = fromLexemes $ map (qualify testPlatform) testBody + + + +qualify :: PW -> String -> String +qualify pw str + | str `elem` fpops || (all isAlpha str && length str > 1 && str `notElem` prelude) + = if str `elem` bs then qualifyBS str else show pw ++ "." ++ str + | otherwise = encode str + where + bs = ["null", "concat", "isPrefixOf", "isSuffixOf", "any"] + prelude = ["elem","uncurry","snd","fst","not","if","then","else" + ,"True","False","Just","Nothing","fromJust","foldr"] + fpops = ["","<.>","-<.>"] +#ifdef GHC_MAKE + encode v + | isString' v = case pw of + AFP_P -> "(encodeUtf8 " <> v <> ")" + AFP_W -> "(encodeUtf16LE " <> v <> ")" + _ -> v + | isChar' v = case pw of + AFP_P -> "(fromIntegral . C.ord $ " <> v <> ")" + AFP_W -> "(fromIntegral . C.ord $ " <> v <> ")" + _ -> v + | otherwise = v + isString' xs@('"':_:_) = last xs == '"' + isString' _ = False + isChar' xs@('\'':_:_) = last xs == '\'' + isChar' _ = False + qualifyBS v = case pw of + AFP_P -> "SBS." <> v + AFP_W -> "SBS16." <> v + _ -> v +#else + encode v + | isString' v = case pw of + AFP_P -> "(" <> v <> ")" + AFP_W -> "(" <> v <> ")" + _ -> v + | isChar' v = case pw of + AFP_P -> "(PW . fromIntegral . C.ord $ " <> v <> ")" + AFP_W -> "(WW . fromIntegral . C.ord $ " <> v <> ")" + _ -> v + | otherwise = v + isString' xs@('"':_:_) = last xs == '"' + isString' _ = False + isChar' xs@('\'':_:_) = last xs == '\'' + isChar' _ = False + qualifyBS v = case pw of + AFP_P + | v == "concat" -> "(PS . SBS." <> v <> " . fmap getPosixString)" + | v == "any" -> "(\\f (getPosixString -> x) -> SBS." <> v <> " (f . PW) x)" + | v == "isPrefixOf" -> "(\\(getPosixString -> x) (getPosixString -> y) -> SBS." <> v <> " x y)" + | v == "isSuffixOf" -> "(\\(getPosixString -> x) (getPosixString -> y) -> SBS." <> v <> " x y)" + | otherwise -> "(SBS." <> v <> " . getPosixString)" + AFP_W + | v == "concat" -> "(WS . SBS16." <> v <> " . fmap getWindowsString)" + | v == "any" -> "(\\f (getWindowsString -> x) -> SBS16." <> v <> " (f . WW) x)" + | v == "isPrefixOf" -> "(\\(getWindowsString -> x) (getWindowsString -> y) -> SBS16." <> v <> " x y)" + | v == "isSuffixOf" -> "(\\(getWindowsString -> x) (getWindowsString -> y) -> SBS16." <> v <> " x y)" + | otherwise -> "(SBS16." <> v <> " . getWindowsString)" + _ -> v +#endif + + + +--------------------------------------------------------------------- +-- UTILITIES + +writeFileBinary :: FilePath -> String -> IO () +writeFileBinary file x = withBinaryFile file WriteMode $ \h -> hPutStr h x + +readFileBinary' :: FilePath -> IO String +readFileBinary' file = withBinaryFile file ReadMode $ \h -> do + s <- hGetContents h + evaluate $ length s + pure s + +writeFileBinaryChanged :: FilePath -> String -> IO () +writeFileBinaryChanged file x = do + b <- doesFileExist file + old <- if b then fmap Just $ readFileBinary' file else pure Nothing + when (Just x /= old) $ + writeFileBinary file x diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/HACKING.md b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/HACKING.md new file mode 100644 index 0000000000..45a6837c13 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/HACKING.md @@ -0,0 +1,19 @@ +# Hacking + +Most of the code is in `System/FilePath/Internal.hs` which is `cpphs`'d into both `System/FilePath/Posix.hs` +and `System/FilePath/Windows.hs` via `make cpp` and commited to the repo. This Internal module is a bit weird +in that it isn't really a Haskell module, but is more an include file. + +The library has extensive doc tests. Anything starting with `-- >` is transformed into a doc test as a predicate +that must evaluate to `True`. These tests follow a few rules: + +* Tests prefixed with `Windows:` or `Posix:` are only tested against that specific + implementation - otherwise tests are run against both implementations. +* Any single letter variable, e.g. `x`, is considered universal quantification, and is checked with `QuickCheck`. +* If `Valid x =>` appears at the start of a doc test, that means the property + will only be tested with `x` passing the `isValid` predicate. + +The tests can be generated by `make gen` in the root of the repo, and will be placed in `tests/filepath-tests/TestGen.hs`. +The `TestGen.hs` file is checked into the repo, and the CI scripts check that `TestGen.hs` is in sync with +what would be generated a fresh - if you don't regenerate `TestGen.hs` the CI will fail. + diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/LICENSE b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/LICENSE new file mode 100644 index 0000000000..5fc319a5b2 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/LICENSE @@ -0,0 +1,30 @@ +Copyright Neil Mitchell 2005-2020. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Neil Mitchell nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/Makefile b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/Makefile new file mode 100644 index 0000000000..a18a9155d1 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/Makefile @@ -0,0 +1,7 @@ +all: gen + +gen: + runhaskell Generate.hs + + +.PHONY: all gen diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/README.md b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/README.md new file mode 100644 index 0000000000..5ec5e51f5c --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/README.md @@ -0,0 +1,47 @@ +# FilePath [![Hackage version](https://img.shields.io/hackage/v/filepath.svg?label=Hackage)](https://hackage.haskell.org/package/filepath) + +The `filepath` package provides functionality for manipulating `FilePath` values, and is shipped with [GHC](https://www.haskell.org/ghc/). +It provides two variants for filepaths: + +1. legacy filepaths: `type FilePath = String` +2. operating system abstracted filepaths (`OsPath`): internally unpinned `ShortByteString` (platform-dependent encoding) + +It is recommended to use `OsPath` when possible, because it is more correct. + +For each variant there are three main modules: + +* `System.FilePath.Posix` / `System.OsPath.Posix` manipulates POSIX\/Linux style `FilePath` values (with `/` as the path separator). +* `System.FilePath.Windows` / `System.OsPath.Windows` manipulates Windows style `FilePath` values (with either `\` or `/` as the path separator, and deals with drives). +* `System.FilePath` / `System.OsPath` for dealing with current platform-specific filepaths + +All three modules provide the same API, and the same documentation (calling out differences in the different variants). + +`System.OsString` is like `System.OsPath`, but more general purpose. Refer to the documentation of +those modules for more information. + +### What is a `FilePath`? + +In Haskell, the legacy definition (used in `base` and Prelude) is `type FilePath = String`, +where a Haskell `String` is a list of Unicode code points. + +The new definition is (simplified) `newtype OsPath = AFP ShortByteString`, where +`ShortByteString` is an unpinned byte array and follows syscall conventions, preserving the encoding. + +On unix, filenames don't have a predefined encoding as per the +[POSIX specification](https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_170) +and are passed as `char[]` to syscalls. + +On windows (at least the API used by `Win32`) filepaths are UTF-16LE strings. + +You are encouraged to use `OsPath` whenever possible, because it is more correct. + +Also note that this is a low-level library and it makes no attempt at providing a more +type safe variant for filepaths (e.g. by distinguishing between absolute and relative +paths) and ensures no invariants (such as filepath validity). + +For such libraries, check out the following: + +* [hpath](https://hackage.haskell.org/package/hpath) +* [path](https://hackage.haskell.org/package/path) +* [paths](https://hackage.haskell.org/package/paths) +* [strong-path](https://hackage.haskell.org/package/strong-path) diff --git a/test/integration/tests/1659-skip-component/files/Setup.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/Setup.hs similarity index 100% rename from test/integration/tests/1659-skip-component/files/Setup.hs rename to tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/Setup.hs diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath.hs new file mode 100644 index 0000000000..b760a319b3 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath.hs @@ -0,0 +1,147 @@ +{-# LANGUAGE CPP #-} +#if __GLASGOW_HASKELL__ >= 704 +{-# LANGUAGE Safe #-} +#endif +{- | +Module : System.FilePath +Copyright : (c) Neil Mitchell 2005-2014 +License : BSD3 + +Maintainer : ndmitchell@gmail.com +Stability : stable +Portability : portable + +A library for 'FilePath' manipulations, using Posix or Windows filepaths +depending on the platform. + +Both "System.FilePath.Posix" and "System.FilePath.Windows" provide the +same interface. + +Given the example 'FilePath': @\/directory\/file.ext@ + +We can use the following functions to extract pieces. + +* 'takeFileName' gives @\"file.ext\"@ + +* 'takeDirectory' gives @\"\/directory\"@ + +* 'takeExtension' gives @\".ext\"@ + +* 'dropExtension' gives @\"\/directory\/file\"@ + +* 'takeBaseName' gives @\"file\"@ + +And we could have built an equivalent path with the following expressions: + +* @\"\/directory\" '' \"file.ext\"@. + +* @\"\/directory\/file" '<.>' \"ext\"@. + +* @\"\/directory\/file.txt" '-<.>' \"ext\"@. + +Each function in this module is documented with several examples, +which are also used as tests. + +Here are a few examples of using the @filepath@ functions together: + +/Example 1:/ Find the possible locations of a Haskell module @Test@ imported from module @Main@: + +@['replaceFileName' path_to_main \"Test\" '<.>' ext | ext <- [\"hs\",\"lhs\"] ]@ + +/Example 2:/ Download a file from @url@ and save it to disk: + +@do let file = 'makeValid' url + System.Directory.createDirectoryIfMissing True ('takeDirectory' file)@ + +/Example 3:/ Compile a Haskell file, putting the @.hi@ file under @interface@: + +@'takeDirectory' file '' \"interface\" '' ('takeFileName' file '-<.>' \"hi\")@ + +References: +[1] (Microsoft MSDN) +-} + + +#if defined(mingw32_HOST_OS) || defined(__MINGW32__) +module System.FilePath( + -- * Separator predicates + FilePath, + pathSeparator, pathSeparators, isPathSeparator, + searchPathSeparator, isSearchPathSeparator, + extSeparator, isExtSeparator, + + -- * @$PATH@ methods + splitSearchPath, getSearchPath, + + -- * Extension functions + splitExtension, + takeExtension, replaceExtension, (-<.>), dropExtension, addExtension, hasExtension, (<.>), + splitExtensions, dropExtensions, takeExtensions, replaceExtensions, isExtensionOf, + stripExtension, + + -- * Filename\/directory functions + splitFileName, + takeFileName, replaceFileName, dropFileName, + takeBaseName, replaceBaseName, + takeDirectory, replaceDirectory, + combine, (), + splitPath, joinPath, splitDirectories, + + -- * Drive functions + splitDrive, joinDrive, + takeDrive, hasDrive, dropDrive, isDrive, + + -- * Trailing slash functions + hasTrailingPathSeparator, + addTrailingPathSeparator, + dropTrailingPathSeparator, + + -- * File name manipulations + normalise, equalFilePath, + makeRelative, + isRelative, isAbsolute, + isValid, makeValid +) where +import System.FilePath.Windows +#else +module System.FilePath( + -- * Separator predicates + FilePath, + pathSeparator, pathSeparators, isPathSeparator, + searchPathSeparator, isSearchPathSeparator, + extSeparator, isExtSeparator, + + -- * @$PATH@ methods + splitSearchPath, getSearchPath, + + -- * Extension functions + splitExtension, + takeExtension, replaceExtension, (-<.>), dropExtension, addExtension, hasExtension, (<.>), + splitExtensions, dropExtensions, takeExtensions, replaceExtensions, isExtensionOf, + stripExtension, + + -- * Filename\/directory functions + splitFileName, + takeFileName, replaceFileName, dropFileName, + takeBaseName, replaceBaseName, + takeDirectory, replaceDirectory, + combine, (), + splitPath, joinPath, splitDirectories, + + -- * Drive functions + splitDrive, joinDrive, + takeDrive, hasDrive, dropDrive, isDrive, + + -- * Trailing slash functions + hasTrailingPathSeparator, + addTrailingPathSeparator, + dropTrailingPathSeparator, + + -- * File name manipulations + normalise, equalFilePath, + makeRelative, + isRelative, isAbsolute, + isValid, makeValid +) where +import System.FilePath.Posix +#endif diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath/Internal.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath/Internal.hs new file mode 100644 index 0000000000..0c92b3e0af --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath/Internal.hs @@ -0,0 +1,1350 @@ +{-# LANGUAGE PatternGuards #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE MultiWayIf #-} + +-- This template expects CPP definitions for: +-- MODULE_NAME = Posix | Windows +-- IS_WINDOWS = False | True + +-- | +-- Module : System.FilePath.MODULE_NAME +-- Copyright : (c) Neil Mitchell 2005-2014 +-- License : BSD3 +-- +-- Maintainer : ndmitchell@gmail.com +-- Stability : stable +-- Portability : portable +-- +-- A library for 'FilePath' manipulations, using MODULE_NAME style paths on +-- all platforms. Importing "System.FilePath" is usually better. +-- +-- Given the example 'FilePath': @\/directory\/file.ext@ +-- +-- We can use the following functions to extract pieces. +-- +-- * 'takeFileName' gives @\"file.ext\"@ +-- +-- * 'takeDirectory' gives @\"\/directory\"@ +-- +-- * 'takeExtension' gives @\".ext\"@ +-- +-- * 'dropExtension' gives @\"\/directory\/file\"@ +-- +-- * 'takeBaseName' gives @\"file\"@ +-- +-- And we could have built an equivalent path with the following expressions: +-- +-- * @\"\/directory\" '' \"file.ext\"@. +-- +-- * @\"\/directory\/file" '<.>' \"ext\"@. +-- +-- * @\"\/directory\/file.txt" '-<.>' \"ext\"@. +-- +-- Each function in this module is documented with several examples, +-- which are also used as tests. +-- +-- Here are a few examples of using the @filepath@ functions together: +-- +-- /Example 1:/ Find the possible locations of a Haskell module @Test@ imported from module @Main@: +-- +-- @['replaceFileName' path_to_main \"Test\" '<.>' ext | ext <- [\"hs\",\"lhs\"] ]@ +-- +-- /Example 2:/ Download a file from @url@ and save it to disk: +-- +-- @do let file = 'makeValid' url +-- System.Directory.createDirectoryIfMissing True ('takeDirectory' file)@ +-- +-- /Example 3:/ Compile a Haskell file, putting the @.hi@ file under @interface@: +-- +-- @'takeDirectory' file '' \"interface\" '' ('takeFileName' file '-<.>' \"hi\")@ +-- +-- References: +-- [1] (Microsoft MSDN) +#ifndef OS_PATH +module System.FilePath.MODULE_NAME +#else +module System.OsPath.MODULE_NAME.Internal +#endif + ( + -- * Separator predicates +#ifndef OS_PATH + FilePath, +#endif + pathSeparator, pathSeparators, isPathSeparator, + searchPathSeparator, isSearchPathSeparator, + extSeparator, isExtSeparator, + + -- * @$PATH@ methods + splitSearchPath, +#ifndef OS_PATH + getSearchPath, +#endif + + -- * Extension functions + splitExtension, + takeExtension, replaceExtension, (-<.>), dropExtension, addExtension, hasExtension, (<.>), + splitExtensions, dropExtensions, takeExtensions, replaceExtensions, isExtensionOf, + stripExtension, + + -- * Filename\/directory functions + splitFileName, + takeFileName, replaceFileName, dropFileName, + takeBaseName, replaceBaseName, + takeDirectory, replaceDirectory, + combine, (), + splitPath, joinPath, splitDirectories, + + -- * Drive functions + splitDrive, joinDrive, + takeDrive, hasDrive, dropDrive, isDrive, + + -- * Trailing slash functions + hasTrailingPathSeparator, + addTrailingPathSeparator, + dropTrailingPathSeparator, + + -- * File name manipulations + normalise, equalFilePath, + makeRelative, + isRelative, isAbsolute, + isValid, makeValid + ) + where + +{- HLINT ignore "Use fewer imports" -} +import Prelude (Char, Bool(..), Maybe(..), (.), (&&), (<=), not, fst, maybe, (||), (==), ($), otherwise, fmap, mempty, (>=), (/=), (++), snd) +import Data.Bifunctor (first) +import Data.Semigroup ((<>)) +import qualified Prelude as P +import Data.Maybe(fromMaybe, isJust) +import qualified Data.List as L + +#ifndef OS_PATH +import Data.String (fromString) +import System.Environment(getEnv) +import Prelude (String, map, FilePath, Eq, IO, id, last, init, reverse, dropWhile, null, break, take, all, elem, any, span) +import Data.Char(toLower, toUpper, isAsciiLower, isAsciiUpper) +import Data.List(stripPrefix, isSuffixOf, uncons, dropWhileEnd) +#define CHAR Char +#define STRING String +#define FILEPATH FilePath +#else +import Prelude (fromIntegral, return, IO, Either(..)) +import Control.Exception ( catch, displayException, evaluate, fromException, toException, throwIO, Exception, SomeAsyncException(..), SomeException ) +import Control.DeepSeq (force) +import GHC.IO (unsafePerformIO) +import qualified Data.Char as C +#ifdef WINDOWS +import GHC.IO.Encoding.Failure ( CodingFailureMode(..) ) +import GHC.IO.Encoding.UTF16 ( mkUTF16le ) +import qualified GHC.Foreign as GHC +import Data.Word ( Word16 ) +import System.OsString.Data.ByteString.Short.Word16 +import System.OsString.Data.ByteString.Short ( packCStringLen ) +#define CHAR Word16 +#define STRING ShortByteString +#define FILEPATH ShortByteString +#else +import GHC.IO.Encoding.Failure ( CodingFailureMode(..) ) +import qualified GHC.Foreign as GHC +import GHC.IO.Encoding.UTF8 ( mkUTF8 ) +import Data.Word ( Word8 ) +import System.OsString.Data.ByteString.Short +#define CHAR Word8 +#define STRING ShortByteString +#define FILEPATH ShortByteString +#endif +#endif + + +infixr 7 <.>, -<.> +infixr 5 + + + + + +--------------------------------------------------------------------- +-- Platform Abstraction Methods (private) + +-- | Is the operating system Unix or Linux like +isPosix :: Bool +isPosix = not isWindows + +-- | Is the operating system Windows like +isWindows :: Bool +isWindows = IS_WINDOWS + + +--------------------------------------------------------------------- +-- The basic functions + +-- | The character that separates directories. In the case where more than +-- one character is possible, 'pathSeparator' is the \'ideal\' one. +-- +-- > Windows: pathSeparator == '\\' +-- > Posix: pathSeparator == '/' +-- > isPathSeparator pathSeparator +pathSeparator :: CHAR +pathSeparator = if isWindows then _backslash else _slash + +-- | The list of all possible separators. +-- +-- > Windows: pathSeparators == ['\\', '/'] +-- > Posix: pathSeparators == ['/'] +-- > pathSeparator `elem` pathSeparators +pathSeparators :: [CHAR] +pathSeparators = if isWindows then [_backslash, _slash] else [_slash] + +-- | Rather than using @(== 'pathSeparator')@, use this. Test if something +-- is a path separator. +-- +-- > isPathSeparator a == (a `elem` pathSeparators) +isPathSeparator :: CHAR -> Bool +isPathSeparator c + | c == _slash = True + | c == _backslash = isWindows + | otherwise = False + + +-- | The character that is used to separate the entries in the $PATH environment variable. +-- +-- > Windows: searchPathSeparator == ';' +-- > Posix: searchPathSeparator == ':' +searchPathSeparator :: CHAR +searchPathSeparator = if isWindows then _semicolon else _colon + +-- | Is the character a file separator? +-- +-- > isSearchPathSeparator a == (a == searchPathSeparator) +isSearchPathSeparator :: CHAR -> Bool +isSearchPathSeparator = (== searchPathSeparator) + + +-- | File extension character +-- +-- > extSeparator == '.' +extSeparator :: CHAR +extSeparator = _period + +-- | Is the character an extension character? +-- +-- > isExtSeparator a == (a == extSeparator) +isExtSeparator :: CHAR -> Bool +isExtSeparator = (== extSeparator) + + +--------------------------------------------------------------------- +-- Path methods (environment $PATH) + +-- | Take a string, split it on the 'searchPathSeparator' character. +-- Blank items are ignored on Windows, and converted to @.@ on Posix. +-- On Windows path elements are stripped of quotes. +-- +-- Follows the recommendations in +-- +-- +-- > Posix: splitSearchPath "File1:File2:File3" == ["File1","File2","File3"] +-- > Posix: splitSearchPath "File1::File2:File3" == ["File1",".","File2","File3"] +-- > Windows: splitSearchPath "File1;File2;File3" == ["File1","File2","File3"] +-- > Windows: splitSearchPath "File1;;File2;File3" == ["File1","File2","File3"] +-- > Windows: splitSearchPath "File1;\"File2\";File3" == ["File1","File2","File3"] +splitSearchPath :: STRING -> [FILEPATH] +splitSearchPath = f + where + f xs = let (pre, post) = break isSearchPathSeparator xs + in case uncons post of + Nothing -> g pre + Just (_, t) -> g pre ++ f t + + g x = case uncons x of + Nothing -> [singleton _period | isPosix] + Just (h, t) + | h == _quotedbl + , (Just _) <- uncons t -- >= 2 + , isWindows + , (Just (i, l)) <- unsnoc t + , l == _quotedbl -> [i] + | otherwise -> [x] + + +-- TODO for AFPP +#ifndef OS_PATH +-- | Get a list of 'FILEPATH's in the $PATH variable. +getSearchPath :: IO [FILEPATH] +getSearchPath = fmap splitSearchPath (getEnv "PATH") +#endif + + +--------------------------------------------------------------------- +-- Extension methods + +-- | Split on the extension. 'addExtension' is the inverse. +-- +-- > splitExtension "/directory/path.ext" == ("/directory/path",".ext") +-- > uncurry (<>) (splitExtension x) == x +-- > Valid x => uncurry addExtension (splitExtension x) == x +-- > splitExtension "file.txt" == ("file",".txt") +-- > splitExtension "file" == ("file","") +-- > splitExtension "file/file.txt" == ("file/file",".txt") +-- > splitExtension "file.txt/boris" == ("file.txt/boris","") +-- > splitExtension "file.txt/boris.ext" == ("file.txt/boris",".ext") +-- > splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob",".fred") +-- > splitExtension "file/path.txt/" == ("file/path.txt/","") + +-- A naive implementation would be to use @splitFileName_@ first, +-- then break filename into basename and extension, then recombine dir and basename. +-- This is way too expensive, see @splitFileName_@ comment for discussion. +-- +-- Instead we speculatively split on the extension separator first, then check +-- whether results are well-formed. +splitExtension :: FILEPATH -> (STRING, STRING) +splitExtension x + -- Imagine x = "no-dots", then nameDot = "" + | null nameDot = (x, mempty) + -- Imagine x = "\\shared.with.dots\no-dots" + | isWindows && null (dropDrive nameDot) = (x, mempty) + -- Imagine x = "dir.with.dots/no-dots" + | any isPathSeparator ext = (x, mempty) + | otherwise = (init nameDot, extSeparator `cons` ext) + where + (nameDot, ext) = breakEnd isExtSeparator x + +-- | Get the extension of a file, returns @\"\"@ for no extension, @.ext@ otherwise. +-- +-- > takeExtension "/directory/path.ext" == ".ext" +-- > takeExtension x == snd (splitExtension x) +-- > Valid x => takeExtension (addExtension x "ext") == ".ext" +-- > Valid x => takeExtension (replaceExtension x "ext") == ".ext" +takeExtension :: FILEPATH -> STRING +takeExtension = snd . splitExtension + +-- | Remove the current extension and add another, equivalent to 'replaceExtension'. +-- +-- > "/directory/path.txt" -<.> "ext" == "/directory/path.ext" +-- > "/directory/path.txt" -<.> ".ext" == "/directory/path.ext" +-- > "foo.o" -<.> "c" == "foo.c" +(-<.>) :: FILEPATH -> STRING -> FILEPATH +(-<.>) = replaceExtension + +-- | Set the extension of a file, overwriting one if already present, equivalent to '-<.>'. +-- +-- > replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext" +-- > replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext" +-- > replaceExtension "file.txt" ".bob" == "file.bob" +-- > replaceExtension "file.txt" "bob" == "file.bob" +-- > replaceExtension "file" ".bob" == "file.bob" +-- > replaceExtension "file.txt" "" == "file" +-- > replaceExtension "file.fred.bob" "txt" == "file.fred.txt" +-- > replaceExtension x y == addExtension (dropExtension x) y +replaceExtension :: FILEPATH -> STRING -> FILEPATH +replaceExtension x y = dropExtension x <.> y + +-- | Add an extension, even if there is already one there, equivalent to 'addExtension'. +-- +-- > "/directory/path" <.> "ext" == "/directory/path.ext" +-- > "/directory/path" <.> ".ext" == "/directory/path.ext" +(<.>) :: FILEPATH -> STRING -> FILEPATH +(<.>) = addExtension + +-- | Remove last extension, and the \".\" preceding it. +-- +-- > dropExtension "/directory/path.ext" == "/directory/path" +-- > dropExtension x == fst (splitExtension x) +dropExtension :: FILEPATH -> FILEPATH +dropExtension = fst . splitExtension + +-- | Add an extension, even if there is already one there, equivalent to '<.>'. +-- +-- > addExtension "/directory/path" "ext" == "/directory/path.ext" +-- > addExtension "file.txt" "bib" == "file.txt.bib" +-- > addExtension "file." ".bib" == "file..bib" +-- > addExtension "file" ".bib" == "file.bib" +-- > addExtension "/" "x" == "/.x" +-- > addExtension x "" == x +-- > Valid x => takeFileName (addExtension (addTrailingPathSeparator x) "ext") == ".ext" +-- > Windows: addExtension "\\\\share" ".txt" == "\\\\share\\.txt" +addExtension :: FILEPATH -> STRING -> FILEPATH +addExtension file xs = case uncons xs of + Nothing -> file + Just (x, _) -> joinDrive a res + where + res = if isExtSeparator x then b <> xs + else b <> (extSeparator `cons` xs) + + (a,b) = splitDrive file + +-- | Does the given filename have an extension? +-- +-- > hasExtension "/directory/path.ext" == True +-- > hasExtension "/directory/path" == False +-- > null (takeExtension x) == not (hasExtension x) +hasExtension :: FILEPATH -> Bool +hasExtension = any isExtSeparator . takeFileName + + +-- | Does the given filename have the specified extension? +-- +-- > "png" `isExtensionOf` "/directory/file.png" == True +-- > ".png" `isExtensionOf` "/directory/file.png" == True +-- > ".tar.gz" `isExtensionOf` "bar/foo.tar.gz" == True +-- > "ar.gz" `isExtensionOf` "bar/foo.tar.gz" == False +-- > "png" `isExtensionOf` "/directory/file.png.jpg" == False +-- > "csv/table.csv" `isExtensionOf` "/data/csv/table.csv" == False +isExtensionOf :: STRING -> FILEPATH -> Bool +isExtensionOf ext = \fp -> case uncons ext of + Just (x, _) + | x == _period -> isSuffixOf ext . takeExtensions $ fp + _ -> isSuffixOf (_period `cons` ext) . takeExtensions $ fp + +-- | Drop the given extension from a FILEPATH, and the @\".\"@ preceding it. +-- Returns 'Nothing' if the FILEPATH does not have the given extension, or +-- 'Just' and the part before the extension if it does. +-- +-- This function can be more predictable than 'dropExtensions', especially if the filename +-- might itself contain @.@ characters. +-- +-- > stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x" +-- > stripExtension "hi.o" "foo.x.hs.o" == Nothing +-- > dropExtension x == fromJust (stripExtension (takeExtension x) x) +-- > dropExtensions x == fromJust (stripExtension (takeExtensions x) x) +-- > stripExtension ".c.d" "a.b.c.d" == Just "a.b" +-- > stripExtension ".c.d" "a.b..c.d" == Just "a.b." +-- > stripExtension "baz" "foo.bar" == Nothing +-- > stripExtension "bar" "foobar" == Nothing +-- > stripExtension "" x == Just x +stripExtension :: STRING -> FILEPATH -> Maybe FILEPATH +stripExtension ext path = case uncons ext of + Just (x, _) -> let dotExt = if isExtSeparator x then ext else _period `cons` ext + in stripSuffix dotExt path + Nothing -> Just path + + +-- | Split on all extensions. +-- +-- > splitExtensions "/directory/path.ext" == ("/directory/path",".ext") +-- > splitExtensions "file.tar.gz" == ("file",".tar.gz") +-- > uncurry (<>) (splitExtensions x) == x +-- > Valid x => uncurry addExtension (splitExtensions x) == x +splitExtensions :: FILEPATH -> (FILEPATH, STRING) +splitExtensions x = (a <> c, d) + where + (a,b) = splitFileName_ x + (c,d) = break isExtSeparator b + +-- | Drop all extensions. +-- +-- > dropExtensions "/directory/path.ext" == "/directory/path" +-- > dropExtensions "file.tar.gz" == "file" +-- > not $ hasExtension $ dropExtensions x +-- > not $ any isExtSeparator $ takeFileName $ dropExtensions x +dropExtensions :: FILEPATH -> FILEPATH +dropExtensions = fst . splitExtensions + +-- | Get all extensions. +-- +-- > takeExtensions "/directory/path.ext" == ".ext" +-- > takeExtensions "file.tar.gz" == ".tar.gz" +takeExtensions :: FILEPATH -> STRING +takeExtensions = snd . splitExtensions + + +-- | Replace all extensions of a file with a new extension. Note +-- that 'replaceExtension' and 'addExtension' both work for adding +-- multiple extensions, so only required when you need to drop +-- all extensions first. +-- +-- > replaceExtensions "file.fred.bob" "txt" == "file.txt" +-- > replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz" +replaceExtensions :: FILEPATH -> STRING -> FILEPATH +replaceExtensions x y = dropExtensions x <.> y + + + +--------------------------------------------------------------------- +-- Drive methods + +-- | Is the given character a valid drive letter? +-- only a-z and A-Z are letters, not isAlpha which is more unicodey +isLetter :: CHAR -> Bool +isLetter x = isAsciiLower x || isAsciiUpper x + + +-- | Split a path into a drive and a path. +-- On Posix, \/ is a Drive. +-- +-- > uncurry (<>) (splitDrive x) == x +-- > Windows: splitDrive "file" == ("","file") +-- > Windows: splitDrive "c:/file" == ("c:/","file") +-- > Windows: splitDrive "c:\\file" == ("c:\\","file") +-- > Windows: splitDrive "\\\\shared\\test" == ("\\\\shared\\","test") +-- > Windows: splitDrive "\\\\shared" == ("\\\\shared","") +-- > Windows: splitDrive "\\\\?\\UNC\\shared\\file" == ("\\\\?\\UNC\\shared\\","file") +-- > Windows: splitDrive "\\\\?\\UNCshared\\file" == ("\\\\?\\","UNCshared\\file") +-- > Windows: splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\","file") +-- > Windows: splitDrive "/d" == ("","/d") +-- > Posix: splitDrive "/test" == ("/","test") +-- > Posix: splitDrive "//test" == ("//","test") +-- > Posix: splitDrive "test/file" == ("","test/file") +-- > Posix: splitDrive "file" == ("","file") +splitDrive :: FILEPATH -> (FILEPATH, FILEPATH) +splitDrive x | isPosix = span (== _slash) x +splitDrive x | Just y <- readDriveLetter x = y +splitDrive x | Just y <- readDriveUNC x = y +splitDrive x | Just y <- readDriveShare x = y +splitDrive x = (mempty, x) + +addSlash :: FILEPATH -> FILEPATH -> (FILEPATH, FILEPATH) +addSlash a xs = (a <> c, d) + where (c, d) = span isPathSeparator xs + +-- See [1]. +-- "\\?\D:\" or "\\?\UNC\\" +readDriveUNC :: FILEPATH -> Maybe (FILEPATH, FILEPATH) +readDriveUNC bs = case unpack bs of + (s1:s2:q:s3:xs) + | q == _question && L.all isPathSeparator [s1,s2,s3] -> + case L.map toUpper xs of + (u:n:c:s4:_) + | u == _U && n == _N && c == _C && isPathSeparator s4 -> + let (a,b) = readDriveShareName (pack (L.drop 4 xs)) + in Just (pack (s1:s2:_question:s3:L.take 4 xs) <> a, b) + _ -> case readDriveLetter (pack xs) of + -- Extended-length path. + Just (a,b) -> Just (pack [s1,s2,_question,s3] <> a, b) + Nothing -> Nothing + _ -> Nothing + +{- c:\ -} +readDriveLetter :: STRING -> Maybe (FILEPATH, FILEPATH) +readDriveLetter bs = case uncons2 bs of + Nothing -> Nothing + Just (x, c, ys) + | isLetter x, c == _colon -> Just $ case uncons ys of + Just (y, _) + | isPathSeparator y -> addSlash (pack [x,_colon]) ys + _ -> (pack [x,_colon], ys) + | otherwise -> Nothing + +{- \\sharename\ -} +readDriveShare :: STRING -> Maybe (FILEPATH, FILEPATH) +readDriveShare bs = case unpack bs of + (s1:s2:xs) | isPathSeparator s1 && isPathSeparator s2 -> + let (a, b) = readDriveShareName (pack xs) + in Just (s1 `cons` (s2 `cons` a), b) + _ -> Nothing + +{- assume you have already seen \\ -} +{- share\bob -> "share\", "bob" -} +readDriveShareName :: STRING -> (FILEPATH, FILEPATH) +readDriveShareName name = addSlash a b + where (a,b) = break isPathSeparator name + + + +-- | Join a drive and the rest of the path. +-- +-- > Valid x => uncurry joinDrive (splitDrive x) == x +-- > Windows: joinDrive "C:" "foo" == "C:foo" +-- > Windows: joinDrive "C:\\" "bar" == "C:\\bar" +-- > Windows: joinDrive "\\\\share" "foo" == "\\\\share\\foo" +-- > Windows: joinDrive "/:" "foo" == "/:\\foo" +joinDrive :: FILEPATH -> FILEPATH -> FILEPATH +joinDrive = combineAlways + +-- | Get the drive from a filepath. +-- +-- > takeDrive x == fst (splitDrive x) +takeDrive :: FILEPATH -> FILEPATH +takeDrive = fst . splitDrive + +-- | Delete the drive, if it exists. +-- +-- > dropDrive x == snd (splitDrive x) +dropDrive :: FILEPATH -> FILEPATH +dropDrive = snd . splitDrive + +-- | Does a path have a drive. +-- +-- > not (hasDrive x) == null (takeDrive x) +-- > Posix: hasDrive "/foo" == True +-- > Windows: hasDrive "C:\\foo" == True +-- > Windows: hasDrive "C:foo" == True +-- > hasDrive "foo" == False +-- > hasDrive "" == False +hasDrive :: FILEPATH -> Bool +hasDrive = not . null . takeDrive + + +-- | Is an element a drive +-- +-- > Posix: isDrive "/" == True +-- > Posix: isDrive "/foo" == False +-- > Windows: isDrive "C:\\" == True +-- > Windows: isDrive "C:\\foo" == False +-- > isDrive "" == False +isDrive :: FILEPATH -> Bool +isDrive x = not (null x) && null (dropDrive x) + + +--------------------------------------------------------------------- +-- Operations on a filepath, as a list of directories + +-- | Split a filename into directory and file. '' is the inverse. +-- The first component will often end with a trailing slash. +-- +-- > splitFileName "/directory/file.ext" == ("/directory/","file.ext") +-- > Valid x => uncurry () (splitFileName x) == x || fst (splitFileName x) == "./" +-- > Valid x => isValid (fst (splitFileName x)) +-- > splitFileName "file/bob.txt" == ("file/", "bob.txt") +-- > splitFileName "file/" == ("file/", "") +-- > splitFileName "bob" == ("./", "bob") +-- > Posix: splitFileName "/" == ("/","") +-- > Windows: splitFileName "c:" == ("c:","") +-- > Windows: splitFileName "\\\\?\\A:\\fred" == ("\\\\?\\A:\\","fred") +-- > Windows: splitFileName "\\\\?\\A:" == ("\\\\?\\A:","") +splitFileName :: FILEPATH -> (STRING, STRING) +splitFileName x = if null path + then (dotSlash, file) + else (path, file) + where + (path, file) = splitFileName_ x + dotSlash = _period `cons` singleton _slash + +-- version of splitFileName where, if the FILEPATH has no directory +-- component, the returned directory is "" rather than "./". This +-- is used in cases where we are going to combine the returned +-- directory to make a valid FILEPATH, and having a "./" appear would +-- look strange and upset simple equality properties. See +-- e.g. replaceFileName. +-- +-- A naive implementation is +-- +-- splitFileName_ fp = (drv <> dir, file) +-- where +-- (drv, pth) = splitDrive fp +-- (dir, file) = breakEnd isPathSeparator pth +-- +-- but it is undesirable for two reasons: +-- * splitDrive is very slow on Windows, +-- * we unconditionally allocate 5 FilePath objects where only 2 would normally suffice. +-- +-- In the implementation below we first speculatively split the input by the last path +-- separator. In the vast majority of cases this is already the answer, except +-- two exceptional cases explained below. +-- +splitFileName_ :: FILEPATH -> (STRING, STRING) +splitFileName_ fp + -- If dirSlash is empty, @fp@ is either a genuine filename without any dir, + -- or just a Windows drive name without slash like "c:". + -- Run readDriveLetter to figure out. + | isWindows + , null dirSlash + = fromMaybe (mempty, fp) (readDriveLetter fp) + -- Another Windows quirk is that @fp@ could have been a shared drive "\\share" + -- or UNC location "\\?\UNC\foo", where path separator is a part of the drive name. + -- We can test this by trying dropDrive and falling back to splitDrive. + | isWindows + = case uncons2 dirSlash of + Just (s1, s2, bs') + | isPathSeparator s1 + -- If bs' is empty, then s2 as the last character of dirSlash must be a path separator, + -- so we are in the middle of shared drive. + -- Otherwise, since s1 is a path separator, we might be in the middle of UNC path. + , null bs' || maybe False isIncompleteUNC (readDriveUNC dirSlash) + -> (fp, mempty) + -- This handles inputs like "//?/A:" and "//?/A:foo" + | isPathSeparator s1 + , isPathSeparator s2 + , Just (s3, s4, bs'') <- uncons2 bs' + , s3 == _question + , isPathSeparator s4 + , null bs'' + , Just (drive, rest) <- readDriveLetter file + -> (dirSlash <> drive, rest) + _ -> (dirSlash, file) + | otherwise + = (dirSlash, file) + where + (dirSlash, file) = breakEnd isPathSeparator fp + dropExcessTrailingPathSeparators x + | hasTrailingPathSeparator x + , let x' = dropWhileEnd isPathSeparator x + , otherwise = if | null x' -> singleton (last x) + | otherwise -> addTrailingPathSeparator x' + | otherwise = x + + -- an "incomplete" UNC is one without a path (but potentially a drive) + isIncompleteUNC (pref, suff) = null suff && not (hasPenultimateColon pref) + + -- e.g. @//?/a:/@ or @//?/a://@, but not @//?/a:@ + hasPenultimateColon pref + | hasTrailingPathSeparator pref + = maybe False (maybe False ((== _colon) . snd) . unsnoc . fst) . unsnoc . dropExcessTrailingPathSeparators $ pref + | otherwise = False + +-- | Set the filename. +-- +-- > replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext" +-- > Valid x => replaceFileName x (takeFileName x) == x +replaceFileName :: FILEPATH -> STRING -> FILEPATH +replaceFileName x y = a y where (a,_) = splitFileName_ x + +-- | Drop the filename. Unlike 'takeDirectory', this function will leave +-- a trailing path separator on the directory. +-- +-- > dropFileName "/directory/file.ext" == "/directory/" +-- > dropFileName x == fst (splitFileName x) +-- > isPrefixOf (takeDrive x) (dropFileName x) +dropFileName :: FILEPATH -> FILEPATH +dropFileName = fst . splitFileName + + +-- | Get the file name. +-- +-- > takeFileName "/directory/file.ext" == "file.ext" +-- > takeFileName "test/" == "" +-- > isSuffixOf (takeFileName x) x +-- > takeFileName x == snd (splitFileName x) +-- > Valid x => takeFileName (replaceFileName x "fred") == "fred" +-- > Valid x => takeFileName (x "fred") == "fred" +-- > Valid x => isRelative (takeFileName x) +takeFileName :: FILEPATH -> FILEPATH +takeFileName = snd . splitFileName + +-- | Get the base name, without an extension or path. +-- +-- > takeBaseName "/directory/file.ext" == "file" +-- > takeBaseName "file/test.txt" == "test" +-- > takeBaseName "dave.ext" == "dave" +-- > takeBaseName "" == "" +-- > takeBaseName "test" == "test" +-- > takeBaseName (addTrailingPathSeparator x) == "" +-- > takeBaseName "file/file.tar.gz" == "file.tar" +takeBaseName :: FILEPATH -> STRING +takeBaseName = dropExtension . takeFileName + +-- | Set the base name. +-- +-- > replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext" +-- > replaceBaseName "file/test.txt" "bob" == "file/bob.txt" +-- > replaceBaseName "fred" "bill" == "bill" +-- > replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar" +-- > Valid x => replaceBaseName x (takeBaseName x) == x +replaceBaseName :: FILEPATH -> STRING -> FILEPATH +replaceBaseName pth nam = combineAlways a (nam <.> ext) + where + (a,b) = splitFileName_ pth + ext = takeExtension b + +-- | Is an item either a directory or the last character a path separator? +-- +-- > hasTrailingPathSeparator "test" == False +-- > hasTrailingPathSeparator "test/" == True +hasTrailingPathSeparator :: FILEPATH -> Bool +hasTrailingPathSeparator x + | null x = False + | otherwise = isPathSeparator $ last x + + +hasLeadingPathSeparator :: FILEPATH -> Bool +hasLeadingPathSeparator = maybe False (isPathSeparator . fst) . uncons + + +-- | Add a trailing file path separator if one is not already present. +-- +-- > hasTrailingPathSeparator (addTrailingPathSeparator x) +-- > hasTrailingPathSeparator x ==> addTrailingPathSeparator x == x +-- > Posix: addTrailingPathSeparator "test/rest" == "test/rest/" +addTrailingPathSeparator :: FILEPATH -> FILEPATH +addTrailingPathSeparator x = if hasTrailingPathSeparator x then x else x <> singleton pathSeparator + + +-- | Remove any trailing path separators +-- +-- > dropTrailingPathSeparator "file/test/" == "file/test" +-- > dropTrailingPathSeparator "/" == "/" +-- > Windows: dropTrailingPathSeparator "\\" == "\\" +-- > Posix: not (hasTrailingPathSeparator (dropTrailingPathSeparator x)) || isDrive x +dropTrailingPathSeparator :: FILEPATH -> FILEPATH +dropTrailingPathSeparator x = + if hasTrailingPathSeparator x && not (isDrive x) + then let x' = dropWhileEnd isPathSeparator x + in if null x' then singleton (last x) else x' + else x + + +-- | Get the directory name, move up one level. +-- +-- > takeDirectory "/directory/other.ext" == "/directory" +-- > isPrefixOf (takeDirectory x) x || takeDirectory x == "." +-- > takeDirectory "foo" == "." +-- > takeDirectory "/" == "/" +-- > takeDirectory "/foo" == "/" +-- > takeDirectory "/foo/bar/baz" == "/foo/bar" +-- > takeDirectory "/foo/bar/baz/" == "/foo/bar/baz" +-- > takeDirectory "foo/bar/baz" == "foo/bar" +-- > Windows: takeDirectory "foo\\bar" == "foo" +-- > Windows: takeDirectory "foo\\bar\\\\" == "foo\\bar" +-- > Windows: takeDirectory "C:\\" == "C:\\" +takeDirectory :: FILEPATH -> FILEPATH +takeDirectory = dropTrailingPathSeparator . dropFileName + +-- | Set the directory, keeping the filename the same. +-- +-- > replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext" +-- > Valid x => replaceDirectory x (takeDirectory x) `equalFilePath` x +replaceDirectory :: FILEPATH -> STRING -> FILEPATH +replaceDirectory x dir = combineAlways dir (takeFileName x) + + +-- | An alias for ''. +combine :: FILEPATH -> FILEPATH -> FILEPATH +combine a b | hasLeadingPathSeparator b || hasDrive b = b + | otherwise = combineAlways a b + +-- | Combine two paths, assuming rhs is NOT absolute. +combineAlways :: FILEPATH -> FILEPATH -> FILEPATH +combineAlways a b | null a = b + | null b = a + | hasTrailingPathSeparator a = a <> b + | otherwise = case unpack a of + [a1, a2] | isWindows + , isLetter a1 + , a2 == _colon -> a <> b + _ -> a <> (pathSeparator `cons` b) + + +-- | Combine two paths with a path separator. +-- If the second path starts with a path separator or a drive letter, then it returns the second. +-- The intention is that @readFile (dir '' file)@ will access the same file as +-- @setCurrentDirectory dir; readFile file@. +-- +-- > Posix: "/directory" "file.ext" == "/directory/file.ext" +-- > Windows: "/directory" "file.ext" == "/directory\\file.ext" +-- > "directory" "/file.ext" == "/file.ext" +-- > Valid x => (takeDirectory x takeFileName x) `equalFilePath` x +-- +-- Combined: +-- +-- > Posix: "/" "test" == "/test" +-- > Posix: "home" "bob" == "home/bob" +-- > Posix: "x:" "foo" == "x:/foo" +-- > Windows: "C:\\foo" "bar" == "C:\\foo\\bar" +-- > Windows: "home" "bob" == "home\\bob" +-- +-- Not combined: +-- +-- > Posix: "home" "/bob" == "/bob" +-- > Windows: "home" "C:\\bob" == "C:\\bob" +-- +-- Not combined (tricky): +-- +-- On Windows, if a filepath starts with a single slash, it is relative to the +-- root of the current drive. In [1], this is (confusingly) referred to as an +-- absolute path. +-- The current behavior of '' is to never combine these forms. +-- +-- > Windows: "home" "/bob" == "/bob" +-- > Windows: "home" "\\bob" == "\\bob" +-- > Windows: "C:\\home" "\\bob" == "\\bob" +-- +-- On Windows, from [1]: "If a file name begins with only a disk designator +-- but not the backslash after the colon, it is interpreted as a relative path +-- to the current directory on the drive with the specified letter." +-- The current behavior of '' is to never combine these forms. +-- +-- > Windows: "D:\\foo" "C:bar" == "C:bar" +-- > Windows: "C:\\foo" "C:bar" == "C:bar" +() :: FILEPATH -> FILEPATH -> FILEPATH +() = combine + + +-- | Split a path by the directory separator. +-- +-- > splitPath "/directory/file.ext" == ["/","directory/","file.ext"] +-- > concat (splitPath x) == x +-- > splitPath "test//item/" == ["test//","item/"] +-- > splitPath "test/item/file" == ["test/","item/","file"] +-- > splitPath "" == [] +-- > Windows: splitPath "c:\\test\\path" == ["c:\\","test\\","path"] +-- > Posix: splitPath "/file/test" == ["/","file/","test"] +splitPath :: FILEPATH -> [FILEPATH] +splitPath x = [drive | not (null drive)] ++ f path + where + (drive, path) = splitDrive x + + f y + | null y = [] + | otherwise = (a <> c) : f d + where + (a,b) = break isPathSeparator y + (c,d) = span isPathSeparator b + +-- | Just as 'splitPath', but don't add the trailing slashes to each element. +-- +-- > splitDirectories "/directory/file.ext" == ["/","directory","file.ext"] +-- > splitDirectories "test/file" == ["test","file"] +-- > splitDirectories "/test/file" == ["/","test","file"] +-- > Windows: splitDirectories "C:\\test\\file" == ["C:\\", "test", "file"] +-- > Valid x => joinPath (splitDirectories x) `equalFilePath` x +-- > splitDirectories "" == [] +-- > Windows: splitDirectories "C:\\test\\\\\\file" == ["C:\\", "test", "file"] +-- > splitDirectories "/test///file" == ["/","test","file"] +splitDirectories :: FILEPATH -> [FILEPATH] +splitDirectories = L.map dropTrailingPathSeparator . splitPath + + +-- | Join path elements back together. +-- +-- > joinPath z == foldr () "" z +-- > joinPath ["/","directory/","file.ext"] == "/directory/file.ext" +-- > Valid x => joinPath (splitPath x) == x +-- > joinPath [] == "" +-- > Posix: joinPath ["test","file","path"] == "test/file/path" +joinPath :: [FILEPATH] -> FILEPATH +-- Note that this definition on c:\\c:\\, join then split will give c:\\. +joinPath = P.foldr combine mempty + + + + + + +--------------------------------------------------------------------- +-- File name manipulators + +-- | Equality of two 'FILEPATH's. +-- If you call @System.Directory.canonicalizePath@ +-- first this has a much better chance of working. +-- Note that this doesn't follow symlinks or DOSNAM~1s. +-- +-- Similar to 'normalise', this does not expand @".."@, because of symlinks. +-- +-- > x == y ==> equalFilePath x y +-- > normalise x == normalise y ==> equalFilePath x y +-- > equalFilePath "foo" "foo/" +-- > not (equalFilePath "/a/../c" "/c") +-- > not (equalFilePath "foo" "/foo") +-- > Posix: not (equalFilePath "foo" "FOO") +-- > Windows: equalFilePath "foo" "FOO" +-- > Windows: not (equalFilePath "C:" "C:/") +equalFilePath :: FILEPATH -> FILEPATH -> Bool +equalFilePath a b = f a == f b + where + f x | isWindows = dropTrailingPathSeparator $ map toLower $ normalise x + | otherwise = dropTrailingPathSeparator $ normalise x + + +-- | Contract a filename, based on a relative path. Note that the resulting path +-- will never introduce @..@ paths, as the presence of symlinks means @..\/b@ +-- may not reach @a\/b@ if it starts from @a\/c@. For a worked example see +-- . +-- +-- The corresponding @makeAbsolute@ function can be found in +-- @System.Directory@. +-- +-- > makeRelative "/directory" "/directory/file.ext" == "file.ext" +-- > Valid x => makeRelative (takeDirectory x) x `equalFilePath` takeFileName x +-- > makeRelative x x == "." +-- > Valid x y => equalFilePath x y || (isRelative x && makeRelative y x == x) || equalFilePath (y makeRelative y x) x +-- > Windows: makeRelative "C:\\Home" "c:\\home\\bob" == "bob" +-- > Windows: makeRelative "C:\\Home" "c:/home/bob" == "bob" +-- > Windows: makeRelative "C:\\Home" "D:\\Home\\Bob" == "D:\\Home\\Bob" +-- > Windows: makeRelative "C:\\Home" "C:Home\\Bob" == "C:Home\\Bob" +-- > Windows: makeRelative "/Home" "/home/bob" == "bob" +-- > Windows: makeRelative "/" "//" == "//" +-- > Posix: makeRelative "/Home" "/home/bob" == "/home/bob" +-- > Posix: makeRelative "/home/" "/home/bob/foo/bar" == "bob/foo/bar" +-- > Posix: makeRelative "/fred" "bob" == "bob" +-- > Posix: makeRelative "/file/test" "/file/test/fred" == "fred" +-- > Posix: makeRelative "/file/test" "/file/test/fred/" == "fred/" +-- > Posix: makeRelative "some/path" "some/path/a/b/c" == "a/b/c" +makeRelative :: FILEPATH -> FILEPATH -> FILEPATH +makeRelative root path + | equalFilePath root path = singleton _period + | takeAbs root /= takeAbs path = path + | otherwise = f (dropAbs root) (dropAbs path) + where + f x y + | null x = dropWhile isPathSeparator y + | otherwise = let (x1,x2) = g x + (y1,y2) = g y + in if equalFilePath x1 y1 then f x2 y2 else path + g x = (dropWhile isPathSeparator a, dropWhile isPathSeparator b) + where (a, b) = break isPathSeparator $ dropWhile isPathSeparator x + + -- on windows, need to drop '/' which is kind of absolute, but not a drive + dropAbs x + | Just (hd, tl) <- uncons x + , isPathSeparator hd + , not (hasDrive x) + = tl + | otherwise + = dropDrive x + + takeAbs x + | Just (hd, _) <- uncons x + , isPathSeparator hd + , not (hasDrive x) + = singleton pathSeparator + | otherwise + = map (\y -> if isPathSeparator y then pathSeparator else toLower y) $ takeDrive x + +-- | Normalise a file +-- +-- * \/\/ outside of the drive can be made blank +-- +-- * \/ -> 'pathSeparator' +-- +-- * .\/ -> \"\" +-- +-- Does not remove @".."@, because of symlinks. +-- +-- > Posix: normalise "/file/\\test////" == "/file/\\test/" +-- > Posix: normalise "/file/./test" == "/file/test" +-- > Posix: normalise "/test/file/../bob/fred/" == "/test/file/../bob/fred/" +-- > Posix: normalise "../bob/fred/" == "../bob/fred/" +-- > Posix: normalise "/a/../c" == "/a/../c" +-- > Posix: normalise "./bob/fred/" == "bob/fred/" +-- > Windows: normalise "c:\\file/bob\\" == "C:\\file\\bob\\" +-- > Windows: normalise "c:\\" == "C:\\" +-- > Windows: normalise "c:\\\\\\\\" == "C:\\" +-- > Windows: normalise "C:.\\" == "C:" +-- > Windows: normalise "\\\\server\\test" == "\\\\server\\test" +-- > Windows: normalise "//server/test" == "\\\\server\\test" +-- > Windows: normalise "c:/file" == "C:\\file" +-- > Windows: normalise "/file" == "\\file" +-- > Windows: normalise "\\" == "\\" +-- > Windows: normalise "/./" == "\\" +-- > normalise "." == "." +-- > Posix: normalise "./" == "./" +-- > Posix: normalise "./." == "./" +-- > Posix: normalise "/./" == "/" +-- > Posix: normalise "/" == "/" +-- > Posix: normalise "bob/fred/." == "bob/fred/" +-- > Posix: normalise "//home" == "/home" +normalise :: FILEPATH -> FILEPATH +normalise filepath = + result <> + (if addPathSeparator + then singleton pathSeparator + else mempty) + where + (drv,pth) = splitDrive filepath + + result = joinDrive' (normaliseDrive drv) (f pth) + + joinDrive' d p + = if null d && null p + then singleton _period + else joinDrive d p + + addPathSeparator = isDirPath pth + && not (hasTrailingPathSeparator result) + && not (isRelativeDrive drv) + + isDirPath xs = hasTrailingPathSeparator xs + || not (null xs) && last xs == _period + && hasTrailingPathSeparator (init xs) + + f = joinPath . dropDots . propSep . splitDirectories + + propSep (x:xs) + | all isPathSeparator x = singleton pathSeparator : xs + | otherwise = x : xs + propSep [] = [] + + dropDots = L.filter (singleton _period /=) + +normaliseDrive :: FILEPATH -> FILEPATH +normaliseDrive bs + | null bs = mempty + | isPosix = pack [pathSeparator] + | Just (drv, _) <- readDriveLetter x2 + = case unpack drv of + (x:_:[]) -> pack [toUpper x, _colon] + (x:_) -> pack [toUpper x, _colon, pathSeparator] + _ -> P.error "impossible" + | otherwise = x2 + where + x2 = map repSlash bs + repSlash x = if isPathSeparator x then pathSeparator else x + +-- Information for validity functions on Windows. See [1]. +isBadCHARacter :: CHAR -> Bool +isBadCHARacter x = x >= _nul && x <= _US + || x `L.elem` + [ _less + , _greater + , _colon + , _quotedbl + , _bar + , _question + , _asterisk + ] + +badElements :: [FILEPATH] +badElements = fmap fromString + ["CON","PRN","AUX","NUL","CLOCK$" + ,"COM1","COM2","COM3","COM4","COM5","COM6","COM7","COM8","COM9" + ,"LPT1","LPT2","LPT3","LPT4","LPT5","LPT6","LPT7","LPT8","LPT9"] + + +-- | Is a FILEPATH valid, i.e. could you create a file like it? This function checks for invalid names, +-- and invalid characters, but does not check if length limits are exceeded, as these are typically +-- filesystem dependent. +-- +-- > isValid "" == False +-- > isValid "\0" == False +-- > Posix: isValid "/random_ path:*" == True +-- > Posix: isValid x == not (null x) +-- > Windows: isValid "c:\\test" == True +-- > Windows: isValid "c:\\test:of_test" == False +-- > Windows: isValid "test*" == False +-- > Windows: isValid "c:\\test\\nul" == False +-- > Windows: isValid "c:\\test\\prn.txt" == False +-- > Windows: isValid "c:\\nul\\file" == False +-- > Windows: isValid "\\\\" == False +-- > Windows: isValid "\\\\\\foo" == False +-- > Windows: isValid "\\\\?\\D:file" == False +-- > Windows: isValid "foo\tbar" == False +-- > Windows: isValid "nul .txt" == False +-- > Windows: isValid " nul.txt" == True +isValid :: FILEPATH -> Bool +isValid path + | null path = False + | _nul `elem` path = False + | isPosix = True + | otherwise = + not (any isBadCHARacter x2) && + not (L.any f $ splitDirectories x2) && + not (isJust (readDriveShare x1) && all isPathSeparator x1) && + not (isJust (readDriveUNC x1) && not (hasTrailingPathSeparator x1)) + where + (x1,x2) = splitDrive path + f x = map toUpper (dropWhileEnd (== _space) $ dropExtensions x) `L.elem` badElements + + +-- | Take a FILEPATH and make it valid; does not change already valid FILEPATHs. +-- +-- > isValid (makeValid x) +-- > isValid x ==> makeValid x == x +-- > makeValid "" == "_" +-- > makeValid "file\0name" == "file_name" +-- > Windows: makeValid "c:\\already\\/valid" == "c:\\already\\/valid" +-- > Windows: makeValid "c:\\test:of_test" == "c:\\test_of_test" +-- > Windows: makeValid "test*" == "test_" +-- > Windows: makeValid "c:\\test\\nul" == "c:\\test\\nul_" +-- > Windows: makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt" +-- > Windows: makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt" +-- > Windows: makeValid "c:\\nul\\file" == "c:\\nul_\\file" +-- > Windows: makeValid "\\\\\\foo" == "\\\\drive" +-- > Windows: makeValid "\\\\?\\D:file" == "\\\\?\\D:\\file" +-- > Windows: makeValid "nul .txt" == "nul _.txt" +makeValid :: FILEPATH -> FILEPATH +makeValid path + | null path = singleton _underscore + | isPosix = map (\x -> if x == _nul then _underscore else x) path + | isJust (readDriveShare drv) && all isPathSeparator drv = take 2 drv <> fromString "drive" + | isJust (readDriveUNC drv) && not (hasTrailingPathSeparator drv) = + makeValid (drv <> (pathSeparator `cons` pth)) + | otherwise = joinDrive drv $ validElements $ validCHARs pth + + where + (drv,pth) = splitDrive path + + validCHARs = map f + f x = if isBadCHARacter x then _underscore else x + + validElements = joinPath . fmap g . splitPath + g x = h a <> b + where (a,b) = break isPathSeparator x + h x = if map toUpper (dropWhileEnd (== _space) a) `L.elem` badElements then snoc a _underscore <.> b else x + where (a,b) = splitExtensions x + + +-- | Is a path relative, or is it fixed to the root? +-- +-- > Windows: isRelative "path\\test" == True +-- > Windows: isRelative "c:\\test" == False +-- > Windows: isRelative "c:test" == True +-- > Windows: isRelative "c:\\" == False +-- > Windows: isRelative "c:/" == False +-- > Windows: isRelative "c:" == True +-- > Windows: isRelative "\\\\foo" == False +-- > Windows: isRelative "\\\\?\\foo" == False +-- > Windows: isRelative "\\\\?\\UNC\\foo" == False +-- > Windows: isRelative "/foo" == True +-- > Windows: isRelative "\\foo" == True +-- > Posix: isRelative "test/path" == True +-- > Posix: isRelative "/test" == False +-- > Posix: isRelative "/" == False +-- +-- According to [1]: +-- +-- * "A UNC name of any format [is never relative]." +-- +-- * "You cannot use the "\\?\" prefix with a relative path." +isRelative :: FILEPATH -> Bool +isRelative x = null drive || isRelativeDrive drive + where drive = takeDrive x + + +{- c:foo -} +-- From [1]: "If a file name begins with only a disk designator but not the +-- backslash after the colon, it is interpreted as a relative path to the +-- current directory on the drive with the specified letter." +isRelativeDrive :: STRING -> Bool +isRelativeDrive x = + maybe False (not . hasTrailingPathSeparator . fst) (readDriveLetter x) + + +-- | @not . 'isRelative'@ +-- +-- > isAbsolute x == not (isRelative x) +isAbsolute :: FILEPATH -> Bool +isAbsolute = not . isRelative + +#ifndef OS_PATH + +----------------------------------------------------------------------------- +-- spanEnd (>2) [1,2,3,4,1,2,3,4] = ([1,2,3,4,1,2], [3,4]) +spanEnd :: (a -> Bool) -> [a] -> ([a], [a]) +spanEnd p = L.foldr (\x (pref, suff) -> if null pref && p x then (pref, x : suff) else (x : pref, suff)) ([], []) + +-- breakEnd (< 2) [1,2,3,4,1,2,3,4] == ([1,2,3,4,1],[2,3,4]) +breakEnd :: (a -> Bool) -> [a] -> ([a], [a]) +breakEnd p = spanEnd (not . p) + +-- | The stripSuffix function drops the given suffix from a list. It returns +-- Nothing if the list did not end with the suffix given, or Just the list +-- before the suffix, if it does. +stripSuffix :: Eq a => [a] -> [a] -> Maybe [a] +stripSuffix xs ys = reverse P.<$> stripPrefix (reverse xs) (reverse ys) + +cons :: a -> [a] -> [a] +cons = (:) + +unsnoc :: [a] -> Maybe ([a], a) +unsnoc = L.foldr (\x -> Just . maybe ([], x) (first (x :))) Nothing + +uncons2 :: [a] -> Maybe (a, a, [a]) +uncons2 [] = Nothing +uncons2 [_] = Nothing +uncons2 (x : y : zs) = Just (x, y, zs) + +_period, _quotedbl, _backslash, _slash, _question, _U, _N, _C, _colon, _semicolon, _US, _less, _greater, _bar, _asterisk, _nul, _space, _underscore :: Char +_period = '.' +_quotedbl = '"' +_slash = '/' +_backslash = '\\' +_question = '?' +_colon = ':' +_semicolon = ';' +_U = 'U' +_N = 'N' +_C = 'C' +_US = '\US' +_less = '<' +_greater = '>' +_bar = '|' +_asterisk = '*' +_nul = '\NUL' +_space = ' ' +_underscore = '_' + +singleton :: Char -> String +singleton c = [c] + +pack :: String -> String +pack = id + + +unpack :: String -> String +unpack = id + + +snoc :: String -> Char -> String +{- HLINT ignore "Redundant lambda" -} +snoc str = \c -> str <> [c] + +#else +-- | Like 'try', but rethrows async exceptions. +trySafe :: Exception e => IO a -> IO (Either e a) +trySafe ioA = catch action eHandler + where + action = do + v <- ioA + return (Right v) + eHandler e + | isAsyncException e = throwIO e + | otherwise = return (Left e) + +isAsyncException :: Exception e => e -> Bool +isAsyncException e = + case fromException (toException e) of + Just (SomeAsyncException _) -> True + Nothing -> False +#ifdef WINDOWS +fromString :: P.String -> STRING +fromString str = P.either (P.error . P.show) P.id $ unsafePerformIO $ do + r <- trySafe @SomeException $ GHC.withCStringLen (mkUTF16le ErrorOnCodingFailure) str $ \cstr -> packCStringLen cstr + evaluate $ force $ first displayException r +#else +fromString :: P.String -> STRING +fromString str = P.either (P.error . P.show) P.id $ unsafePerformIO $ do + r <- trySafe @SomeException $ GHC.withCStringLen (mkUTF8 ErrorOnCodingFailure) str $ \cstr -> packCStringLen cstr + evaluate $ force $ first displayException r +#endif + +_a, _z, _A, _Z, _period, _quotedbl, _backslash, _slash, _question, _U, _N, _C, _colon, _semicolon, _US, _less, _greater, _bar, _asterisk, _nul, _space, _underscore :: CHAR +_a = 0x61 +_z = 0x7a +_A = 0x41 +_Z = 0x5a +_period = 0x2e +_quotedbl = 0x22 +_slash = 0x2f +_backslash = 0x5c +_question = 0x3f +_colon = 0x3a +_semicolon = 0x3b +_U = 0x55 +_N = 0x4e +_C = 0x43 +_US = 0x1f +_less = 0x3c +_greater = 0x3e +_bar = 0x7c +_asterisk = 0x2a +_nul = 0x00 +_space = 0x20 +_underscore = 0x5f + +isAsciiUpper :: CHAR -> Bool +isAsciiUpper w = _A <= w && w <= _Z + +isAsciiLower :: CHAR -> Bool +isAsciiLower w = _a <= w && w <= _z + +---------------------------------------------------------------- + +toUpper :: CHAR -> CHAR +-- charToWord16 should be safe here, since C.toUpper doesn't go beyond Word16 maxbound +toUpper = charToWord . C.toUpper . wordToChar + +toLower :: CHAR -> CHAR +-- charToWord16 should be safe here, since C.toLower doesn't go beyond Word16 maxbound +toLower = charToWord . C.toLower . wordToChar + + +-- | Total conversion to char. +wordToChar :: CHAR -> Char +wordToChar = C.chr . fromIntegral + +-- | This is unsafe and clamps at Word16 maxbound. +charToWord :: Char -> CHAR +charToWord = fromIntegral . C.ord + +#endif diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath/Posix.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath/Posix.hs new file mode 100644 index 0000000000..d07171f59d --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath/Posix.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE CPP #-} + +#undef WINDOWS +#define IS_WINDOWS False +#define MODULE_NAME Posix + +#include "Internal.hs" diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath/Windows.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath/Windows.hs new file mode 100644 index 0000000000..a53580cbaf --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/FilePath/Windows.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE CPP #-} + +#undef POSIX +#define WINDOWS +#define IS_WINDOWS True +#define MODULE_NAME Windows + +#include "Internal.hs" diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath.hs new file mode 100644 index 0000000000..ffbfd380aa --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath.hs @@ -0,0 +1,63 @@ +{-# LANGUAGE CPP #-} + +#define FILEPATH_NAME OsPath +#define OSSTRING_NAME OsString +#define WORD_NAME OsChar + +-- | +-- Module : System.OsPath +-- Copyright : © 2021 Julian Ospald +-- License : MIT +-- +-- Maintainer : Julian Ospald +-- Stability : experimental +-- Portability : portable +-- +-- An implementation of the , +-- which aims to supersede @type FilePath = String@ for various reasons: +-- +-- 1. it is more efficient and avoids memory fragmentation (uses unpinned 'ShortByteString' under the hood) +-- 2. it is more type-safe (newtype over 'ShortByteString') +-- 3. avoids round-tripping issues by not converting to String (which is not total and loses the encoding) +-- 4. abstracts over unix and windows while keeping the original bytes +-- +-- It is important to know that filenames\/filepaths have different representations across platforms: +-- +-- - On /Windows/, filepaths are expected to be encoded as UTF16-LE , but +-- may also include invalid surrogate pairs, in which case UCS-2 can be used. They are passed as @wchar_t*@ to syscalls. +-- 'OsPath' only maintains the wide character invariant. +-- - On /Unix/, filepaths don't have a predefined encoding (although they +-- are often interpreted as UTF8) as per the +-- +-- and are passed as @char[]@ to syscalls. 'OsPath' maintains no invariant +-- here. +-- +-- Apart from encoding, filepaths have additional restrictions per platform: +-- +-- - On /Windows/ the may apply +-- - On /Unix/, only @NUL@ bytes are disallowed as per the +-- +-- Use 'isValid' to check for these restrictions ('OsPath' doesn't +-- maintain this invariant). +-- +-- Also note that these restrictions are +-- not exhaustive and further filesystem specific restrictions may apply on +-- all platforms. This library makes no attempt at satisfying these. +-- Library users may need to account for that, depending +-- on what filesystems they want to support. +-- +-- It is advised to follow these principles when dealing with filepaths\/filenames: +-- +-- 1. Avoid interpreting filenames that the OS returns, unless absolutely necessary. +-- For example, the filepath separator is usually a predefined 'Word8'/'Word16', regardless of encoding. +-- So even if we need to split filepaths, it might still not be necessary to understand the encoding +-- of the filename. +-- 2. When interpreting OS returned filenames consider that these might not be UTF8 on /unix/ +-- or at worst don't have an ASCII compatible encoding. The are 3 available strategies fer decoding/encoding: +-- a) pick the best UTF (UTF-8 on unix, UTF-16LE on windows), b) decode with an explicitly defined 'TextEncoding', +-- c) mimic the behavior of the @base@ library (permissive UTF16 on windows, current filesystem encoding on unix). +-- 3. Avoid comparing @String@ based filepaths, because filenames of different encodings +-- may have the same @String@ representation, although they're not the same byte-wise. + + +#include "OsPath/Common.hs" diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath.hs-boot b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath.hs-boot new file mode 100644 index 0000000000..85a7c8c3c0 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath.hs-boot @@ -0,0 +1,6 @@ +module System.OsPath where + +import System.OsPath.Types + ( OsPath ) + +isValid :: OsPath -> Bool diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Common.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Common.hs new file mode 100644 index 0000000000..5058b0e195 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Common.hs @@ -0,0 +1,1469 @@ +{-# LANGUAGE TypeApplications #-} +-- This template expects CPP definitions for: +-- +-- WINDOWS defined? = no | yes | no +-- POSIX defined? = yes | no | no +-- +-- FILEPATH_NAME = PosixPath | WindowsPath | OsPath +-- OSSTRING_NAME = PosixString | WindowsString | OsString +-- WORD_NAME = PosixChar | WindowsChar | OsChar + +-- For (native) abstract file paths we document both platforms, so people can +-- understand how their code is compiled no matter what. But for the +-- platform-specific types we only want to document the behavior on that +-- platform. +#if defined(WINDOWS) +#define WINDOWS_DOC +#elif defined(POSIX) +#define POSIX_DOC +#endif + +#ifdef WINDOWS +module System.OsPath.Windows +#elif defined(POSIX) +module System.OsPath.Posix +#else +module System.OsPath +#endif + ( + -- * Types +#ifdef WINDOWS + WindowsString + , WindowsChar + , WindowsPath +#elif defined(POSIX) + PosixString + , PosixChar + , PosixPath +#else + OsPath + , OsString + , OsChar +#endif + -- * Filepath construction + , PS.encodeUtf + , PS.unsafeEncodeUtf + , PS.encodeWith + , encodeFS +#if defined(WINDOWS) || defined(POSIX) + , pstr +#else + , osp +#endif + , PS.pack + + -- * Filepath deconstruction + , PS.decodeUtf + , PS.decodeWith + , decodeFS + , PS.unpack + + -- * Word construction + , unsafeFromChar + + -- * Word deconstruction + , toChar + + -- * Separator predicates + , pathSeparator + , pathSeparators + , isPathSeparator + , searchPathSeparator + , isSearchPathSeparator + , extSeparator + , isExtSeparator + + -- * $PATH methods + , splitSearchPath, + + -- * Extension functions + splitExtension, + takeExtension, replaceExtension, (-<.>), dropExtension, addExtension, hasExtension, (<.>), + splitExtensions, dropExtensions, takeExtensions, replaceExtensions, isExtensionOf, + stripExtension, + + -- * Filename\/directory functions + splitFileName, + takeFileName, replaceFileName, dropFileName, + takeBaseName, replaceBaseName, + takeDirectory, replaceDirectory, + combine, (), + splitPath, joinPath, splitDirectories, + + -- * Drive functions + splitDrive, joinDrive, + takeDrive, hasDrive, dropDrive, isDrive, + + -- * Trailing slash functions + hasTrailingPathSeparator, + addTrailingPathSeparator, + dropTrailingPathSeparator, + + -- * File name manipulations + normalise, equalFilePath, + makeRelative, + isRelative, isAbsolute, + isValid, makeValid + ) +where + + +#ifdef WINDOWS +import System.OsPath.Types +import System.OsString.Windows as PS + ( unsafeFromChar + , toChar + , decodeUtf + , decodeWith + , pack + , encodeUtf + , unsafeEncodeUtf + , encodeWith + , unpack + ) +import Data.Bifunctor ( bimap ) +import qualified System.OsPath.Windows.Internal as C +import GHC.IO.Encoding.UTF16 ( mkUTF16le ) +import Language.Haskell.TH.Quote + ( QuasiQuoter (..) ) +import Language.Haskell.TH.Syntax + ( Lift (..), lift ) +import GHC.IO.Encoding.Failure ( CodingFailureMode(..) ) +import Control.Monad ( when ) + +#elif defined(POSIX) +import GHC.IO.Encoding.Failure ( CodingFailureMode(..) ) +import Control.Monad ( when ) +import Language.Haskell.TH.Quote + ( QuasiQuoter (..) ) +import Language.Haskell.TH.Syntax + ( Lift (..), lift ) + +import GHC.IO.Encoding.UTF8 ( mkUTF8 ) +import System.OsPath.Types +import System.OsString.Posix as PS + ( unsafeFromChar + , toChar + , decodeUtf + , decodeWith + , pack + , encodeUtf + , unsafeEncodeUtf + , encodeWith + , unpack + ) +import Data.Bifunctor ( bimap ) +import qualified System.OsPath.Posix.Internal as C + +#else + +import System.OsPath.Internal as PS + ( osp + , decodeUtf + , decodeWith + , pack + , encodeUtf + , unsafeEncodeUtf + , encodeWith + , unpack + ) +import System.OsPath.Types + ( OsPath ) +import System.OsString ( unsafeFromChar, toChar ) + +#if defined(mingw32_HOST_OS) || defined(__MINGW32__) +import qualified System.OsPath.Windows as C +#else +import qualified System.OsPath.Posix as C +#endif + +import Data.Bifunctor + ( bimap ) +#endif +import System.OsString.Internal.Types +import System.OsString.Encoding.Internal + + +------------------------ +-- Separator predicates + + +#ifdef WINDOWS_DOC +-- | The character that separates directories. In the case where more than +-- one character is possible, 'pathSeparator' is the \'ideal\' one. +-- +-- > pathSeparator == '\\'S +#elif defined(POSIX_DOC) +-- | The character that separates directories. In the case where more than +-- one character is possible, 'pathSeparator' is the \'ideal\' one. +-- +-- > pathSeparator == '/' +#else +-- | The character that separates directories. In the case where more than +-- one character is possible, 'pathSeparator' is the \'ideal\' one. +-- +-- > Windows: pathSeparator == '\\'S +-- > Posix: pathSeparator == '/' +#endif +pathSeparator :: WORD_NAME +pathSeparator = WORD_NAME C.pathSeparator + +#ifdef WINDOWS_DOC +-- | The list of all possible separators. +-- +-- > pathSeparators == ['\\', '/'] +-- > pathSeparator `elem` pathSeparators +#elif defined(POSIX_DOC) +-- | The list of all possible separators. +-- +-- > pathSeparators == ['/'] +-- > pathSeparator `elem` pathSeparators +#else +-- | The list of all possible separators. +-- +-- > Windows: pathSeparators == ['\\', '/'] +-- > Posix: pathSeparators == ['/'] +-- > pathSeparator `elem` pathSeparators +#endif +pathSeparators :: [WORD_NAME] +pathSeparators = WORD_NAME <$> C.pathSeparators + +-- | Rather than using @(== 'pathSeparator')@, use this. Test if something +-- is a path separator. +-- +-- > isPathSeparator a == (a `elem` pathSeparators) +isPathSeparator :: WORD_NAME -> Bool +isPathSeparator (WORD_NAME w) = C.isPathSeparator w + +#ifdef WINDOWS_DOC +-- | The character that is used to separate the entries in the $PATH environment variable. +-- +-- > searchPathSeparator == ';' +#elif defined(POSIX_DOC) +-- | The character that is used to separate the entries in the $PATH environment variable. +-- +-- > searchPathSeparator == ':' +#else +-- | The character that is used to separate the entries in the $PATH environment variable. +-- +-- > Posix: searchPathSeparator == ':' +-- > Windows: searchPathSeparator == ';' +#endif +searchPathSeparator :: WORD_NAME +searchPathSeparator = WORD_NAME C.searchPathSeparator + +-- | Is the character a file separator? +-- +-- > isSearchPathSeparator a == (a == searchPathSeparator) +isSearchPathSeparator :: WORD_NAME -> Bool +isSearchPathSeparator (WORD_NAME w) = C.isSearchPathSeparator w + + +-- | File extension character +-- +-- > extSeparator == '.' +extSeparator :: WORD_NAME +extSeparator = WORD_NAME C.extSeparator + + +-- | Is the character an extension character? +-- +-- > isExtSeparator a == (a == extSeparator) +isExtSeparator :: WORD_NAME -> Bool +isExtSeparator (WORD_NAME w) = C.isExtSeparator w + + +--------------------------------------------------------------------- +-- Path methods (environment $PATH) + +#ifdef WINDOWS_DOC +-- | Take a string, split it on the 'searchPathSeparator' character. +-- +-- Blank items are ignored and path elements are stripped of quotes. +-- +-- > splitSearchPath "File1;File2;File3" == ["File1","File2","File3"] +-- > splitSearchPath "File1;;File2;File3" == ["File1","File2","File3"] +-- > splitSearchPath "File1;\"File2\";File3" == ["File1","File2","File3"] +#elif defined(POSIX_DOC) +-- | Take a string, split it on the 'searchPathSeparator' character. +-- +-- Blank items are converted to @.@ on , and quotes are not +-- treated specially. +-- +-- Follows the recommendations in +-- +-- +-- > splitSearchPath "File1:File2:File3" == ["File1","File2","File3"] +-- > splitSearchPath "File1::File2:File3" == ["File1",".","File2","File3"] +#else +-- | Take a string, split it on the 'searchPathSeparator' character. +-- +-- On Windows, blank items are ignored on Windows, and path elements are +-- stripped of quotes. +-- +-- On Posix, blank items are converted to @.@ on Posix, and quotes are not +-- treated specially. +-- +-- Follows the recommendations in +-- +-- +-- > Windows: splitSearchPath "File1;File2;File3" == ["File1","File2","File3"] +-- > Windows: splitSearchPath "File1;;File2;File3" == ["File1","File2","File3"] +-- > Windows: splitSearchPath "File1;\"File2\";File3" == ["File1","File2","File3"] +-- > Posix: splitSearchPath "File1:File2:File3" == ["File1","File2","File3"] +-- > Posix: splitSearchPath "File1::File2:File3" == ["File1",".","File2","File3"] +#endif +splitSearchPath :: OSSTRING_NAME -> [FILEPATH_NAME] +splitSearchPath (OSSTRING_NAME x) = fmap OSSTRING_NAME . C.splitSearchPath $ x + + + +------------------------ +-- Extension functions + +-- | Split on the extension. 'addExtension' is the inverse. +-- +-- > splitExtension "/directory/path.ext" == ("/directory/path",".ext") +-- > uncurry (<>) (splitExtension x) == x +-- > Valid x => uncurry addExtension (splitExtension x) == x +-- > splitExtension "file.txt" == ("file",".txt") +-- > splitExtension "file" == ("file","") +-- > splitExtension "file/file.txt" == ("file/file",".txt") +-- > splitExtension "file.txt/boris" == ("file.txt/boris","") +-- > splitExtension "file.txt/boris.ext" == ("file.txt/boris",".ext") +-- > splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob",".fred") +-- > splitExtension "file/path.txt/" == ("file/path.txt/","") +splitExtension :: FILEPATH_NAME -> (FILEPATH_NAME, OSSTRING_NAME) +splitExtension (OSSTRING_NAME x) = bimap OSSTRING_NAME OSSTRING_NAME $ C.splitExtension x + + +-- | Get the extension of a file, returns @\"\"@ for no extension, @.ext@ otherwise. +-- +-- > takeExtension "/directory/path.ext" == ".ext" +-- > takeExtension x == snd (splitExtension x) +-- > Valid x => takeExtension (addExtension x "ext") == ".ext" +-- > Valid x => takeExtension (replaceExtension x "ext") == ".ext" +takeExtension :: FILEPATH_NAME -> OSSTRING_NAME +takeExtension (OSSTRING_NAME x) = OSSTRING_NAME $ C.takeExtension x + + +-- | Remove the current extension and add another, equivalent to 'replaceExtension'. +-- +-- > "/directory/path.txt" -<.> "ext" == "/directory/path.ext" +-- > "/directory/path.txt" -<.> ".ext" == "/directory/path.ext" +-- > "foo.o" -<.> "c" == "foo.c" +(-<.>) :: FILEPATH_NAME -> OSSTRING_NAME -> FILEPATH_NAME +(-<.>) = replaceExtension + +-- | Set the extension of a file, overwriting one if already present, equivalent to '-<.>'. +-- +-- > replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext" +-- > replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext" +-- > replaceExtension "file.txt" ".bob" == "file.bob" +-- > replaceExtension "file.txt" "bob" == "file.bob" +-- > replaceExtension "file" ".bob" == "file.bob" +-- > replaceExtension "file.txt" "" == "file" +-- > replaceExtension "file.fred.bob" "txt" == "file.fred.txt" +-- > replaceExtension x y == addExtension (dropExtension x) y +replaceExtension :: FILEPATH_NAME -> OSSTRING_NAME -> FILEPATH_NAME +replaceExtension (OSSTRING_NAME path) (OSSTRING_NAME ext) = OSSTRING_NAME (C.replaceExtension path ext) + + +-- | Add an extension, even if there is already one there, equivalent to 'addExtension'. +-- +-- > "/directory/path" <.> "ext" == "/directory/path.ext" +-- > "/directory/path" <.> ".ext" == "/directory/path.ext" +(<.>) :: FILEPATH_NAME -> OSSTRING_NAME -> FILEPATH_NAME +(<.>) = addExtension + +-- | Remove last extension, and the \".\" preceding it. +-- +-- > dropExtension "/directory/path.ext" == "/directory/path" +-- > dropExtension x == fst (splitExtension x) +dropExtension :: FILEPATH_NAME -> FILEPATH_NAME +dropExtension (OSSTRING_NAME x) = OSSTRING_NAME $ C.dropExtension x + + +-- | Add an extension, even if there is already one there, equivalent to '<.>'. +-- +-- > addExtension "/directory/path" "ext" == "/directory/path.ext" +-- > addExtension "file.txt" "bib" == "file.txt.bib" +-- > addExtension "file." ".bib" == "file..bib" +-- > addExtension "file" ".bib" == "file.bib" +-- > addExtension "/" "x" == "/.x" +-- > addExtension x "" == x +-- > Valid x => takeFileName (addExtension (addTrailingPathSeparator x) "ext") == ".ext" +-- > Windows: addExtension "\\\\share" ".txt" == "\\\\share\\.txt" + +#ifdef WINDOWS_DOC +-- | Add an extension, even if there is already one there, equivalent to '<.>'. +-- +-- > addExtension "/directory/path" "ext" == "/directory/path.ext" +-- > addExtension "file.txt" "bib" == "file.txt.bib" +-- > addExtension "file." ".bib" == "file..bib" +-- > addExtension "file" ".bib" == "file.bib" +-- > addExtension "/" "x" == "/.x" +-- > addExtension x "" == x +-- > Valid x => takeFileName (addExtension (addTrailingPathSeparator x) "ext") == ".ext" +-- > addExtension "\\\\share" ".txt" == "\\\\share\\.txt" +#elif defined(POSIX_DOC) +-- | Add an extension, even if there is already one there, equivalent to '<.>'. +-- +-- > addExtension "/directory/path" "ext" == "/directory/path.ext" +-- > addExtension "file.txt" "bib" == "file.txt.bib" +-- > addExtension "file." ".bib" == "file..bib" +-- > addExtension "file" ".bib" == "file.bib" +-- > addExtension "/" "x" == "/.x" +-- > addExtension x "" == x +-- > Valid x => takeFileName (addExtension (addTrailingPathSeparator x) "ext") == ".ext" +#else +-- | Add an extension, even if there is already one there, equivalent to '<.>'. +-- +-- > addExtension "/directory/path" "ext" == "/directory/path.ext" +-- > addExtension "file.txt" "bib" == "file.txt.bib" +-- > addExtension "file." ".bib" == "file..bib" +-- > addExtension "file" ".bib" == "file.bib" +-- > addExtension "/" "x" == "/.x" +-- > addExtension x "" == x +-- > Valid x => takeFileName (addExtension (addTrailingPathSeparator x) "ext") == ".ext" +-- > Windows: addExtension "\\\\share" ".txt" == "\\\\share\\.txt" +#endif +addExtension :: FILEPATH_NAME -> OSSTRING_NAME -> FILEPATH_NAME +addExtension (OSSTRING_NAME bs) (OSSTRING_NAME ext) = OSSTRING_NAME $ C.addExtension bs ext + + +-- | Does the given filename have an extension? +-- +-- > hasExtension "/directory/path.ext" == True +-- > hasExtension "/directory/path" == False +-- > null (takeExtension x) == not (hasExtension x) +hasExtension :: FILEPATH_NAME -> Bool +hasExtension (OSSTRING_NAME x) = C.hasExtension x + +-- | Does the given filename have the specified extension? +-- +-- > "png" `isExtensionOf` "/directory/file.png" == True +-- > ".png" `isExtensionOf` "/directory/file.png" == True +-- > ".tar.gz" `isExtensionOf` "bar/foo.tar.gz" == True +-- > "ar.gz" `isExtensionOf` "bar/foo.tar.gz" == False +-- > "png" `isExtensionOf` "/directory/file.png.jpg" == False +-- > "csv/table.csv" `isExtensionOf` "/data/csv/table.csv" == False +isExtensionOf :: OSSTRING_NAME -> FILEPATH_NAME -> Bool +isExtensionOf (OSSTRING_NAME x) (OSSTRING_NAME y) = C.isExtensionOf x y + +-- | Drop the given extension from a filepath, and the @\".\"@ preceding it. +-- Returns 'Nothing' if the filepath does not have the given extension, or +-- 'Just' and the part before the extension if it does. +-- +-- This function can be more predictable than 'dropExtensions', especially if the filename +-- might itself contain @.@ characters. +-- +-- > stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x" +-- > stripExtension "hi.o" "foo.x.hs.o" == Nothing +-- > dropExtension x == fromJust (stripExtension (takeExtension x) x) +-- > dropExtensions x == fromJust (stripExtension (takeExtensions x) x) +-- > stripExtension ".c.d" "a.b.c.d" == Just "a.b" +-- > stripExtension ".c.d" "a.b..c.d" == Just "a.b." +-- > stripExtension "baz" "foo.bar" == Nothing +-- > stripExtension "bar" "foobar" == Nothing +-- > stripExtension "" x == Just x +stripExtension :: OSSTRING_NAME -> FILEPATH_NAME -> Maybe FILEPATH_NAME +stripExtension (OSSTRING_NAME bs) (OSSTRING_NAME x) = OSSTRING_NAME <$> C.stripExtension bs x + +-- | Split on all extensions. +-- +-- > splitExtensions "/directory/path.ext" == ("/directory/path",".ext") +-- > splitExtensions "file.tar.gz" == ("file",".tar.gz") +-- > uncurry (<>) (splitExtensions x) == x +-- > Valid x => uncurry addExtension (splitExtensions x) == x +-- > splitExtensions "file.tar.gz" == ("file",".tar.gz") +splitExtensions :: FILEPATH_NAME -> (FILEPATH_NAME, OSSTRING_NAME) +splitExtensions (OSSTRING_NAME x) = bimap OSSTRING_NAME OSSTRING_NAME $ C.splitExtensions x + + +-- | Drop all extensions. +-- +-- > dropExtensions "/directory/path.ext" == "/directory/path" +-- > dropExtensions "file.tar.gz" == "file" +-- > not $ hasExtension $ dropExtensions x +-- > not $ any isExtSeparator $ takeFileName $ dropExtensions x +dropExtensions :: FILEPATH_NAME -> FILEPATH_NAME +dropExtensions (OSSTRING_NAME x) = OSSTRING_NAME $ C.dropExtensions x + + +-- | Get all extensions. +-- +-- > takeExtensions "/directory/path.ext" == ".ext" +-- > takeExtensions "file.tar.gz" == ".tar.gz" +takeExtensions :: FILEPATH_NAME -> OSSTRING_NAME +takeExtensions (OSSTRING_NAME x) = OSSTRING_NAME $ C.takeExtensions x + +-- | Replace all extensions of a file with a new extension. Note +-- that 'replaceExtension' and 'addExtension' both work for adding +-- multiple extensions, so only required when you need to drop +-- all extensions first. +-- +-- > replaceExtensions "file.fred.bob" "txt" == "file.txt" +-- > replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz" +replaceExtensions :: FILEPATH_NAME -> OSSTRING_NAME -> FILEPATH_NAME +replaceExtensions (OSSTRING_NAME x) (OSSTRING_NAME y) = OSSTRING_NAME $ C.replaceExtensions x y + + +------------------------ +-- Drive functions + + +#ifdef WINDOWS_DOC +-- | Split a path into a drive and a path. +-- +-- > uncurry (<>) (splitDrive x) == x +-- > splitDrive "file" == ("","file") +-- > splitDrive "c:/file" == ("c:/","file") +-- > splitDrive "c:\\file" == ("c:\\","file") +-- > splitDrive "\\\\shared\\test" == ("\\\\shared\\","test") +-- > splitDrive "\\\\shared" == ("\\\\shared","") +-- > splitDrive "\\\\?\\UNC\\shared\\file" == ("\\\\?\\UNC\\shared\\","file") +-- > splitDrive "\\\\?\\UNCshared\\file" == ("\\\\?\\","UNCshared\\file") +-- > splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\","file") +-- > splitDrive "/d" == ("","/d") +#elif defined(POSIX_DOC) +-- | Split a path into a drive and a path. +-- \/ is a Drive. +-- +-- > uncurry (<>) (splitDrive x) == x +-- > splitDrive "/test" == ("/","test") +-- > splitDrive "//test" == ("//","test") +-- > splitDrive "test/file" == ("","test/file") +-- > splitDrive "file" == ("","file") +#else +-- | Split a path into a drive and a path. +-- On Posix, \/ is a Drive. +-- +-- > uncurry (<>) (splitDrive x) == x +-- > Windows: splitDrive "file" == ("","file") +-- > Windows: splitDrive "c:/file" == ("c:/","file") +-- > Windows: splitDrive "c:\\file" == ("c:\\","file") +-- > Windows: splitDrive "\\\\shared\\test" == ("\\\\shared\\","test") +-- > Windows: splitDrive "\\\\shared" == ("\\\\shared","") +-- > Windows: splitDrive "\\\\?\\UNC\\shared\\file" == ("\\\\?\\UNC\\shared\\","file") +-- > Windows: splitDrive "\\\\?\\UNCshared\\file" == ("\\\\?\\","UNCshared\\file") +-- > Windows: splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\","file") +-- > Windows: splitDrive "/d" == ("","/d") +-- > Posix: splitDrive "/test" == ("/","test") +-- > Posix: splitDrive "//test" == ("//","test") +-- > Posix: splitDrive "test/file" == ("","test/file") +-- > Posix: splitDrive "file" == ("","file") +#endif +splitDrive :: FILEPATH_NAME -> (FILEPATH_NAME, FILEPATH_NAME) +splitDrive (OSSTRING_NAME p) = bimap OSSTRING_NAME OSSTRING_NAME $ C.splitDrive p + + +-- | Join a drive and the rest of the path. +-- +-- > Valid x => uncurry joinDrive (splitDrive x) == x +-- > Windows: joinDrive "C:" "foo" == "C:foo" +-- > Windows: joinDrive "C:\\" "bar" == "C:\\bar" +-- > Windows: joinDrive "\\\\share" "foo" == "\\\\share\\foo" +-- > Windows: joinDrive "/:" "foo" == "/:\\foo" + +#ifdef WINDOWS_DOC +-- | Join a drive and the rest of the path. +-- +-- > Valid x => uncurry joinDrive (splitDrive x) == x +-- > joinDrive "C:" "foo" == "C:foo" +-- > joinDrive "C:\\" "bar" == "C:\\bar" +-- > joinDrive "\\\\share" "foo" == "\\\\share\\foo" +-- > joinDrive "/:" "foo" == "/:\\foo" +#elif defined(POSIX_DOC) +-- | Join a drive and the rest of the path. +-- +-- > Valid x => uncurry joinDrive (splitDrive x) == x +#else +-- | Join a drive and the rest of the path. +-- +-- > Valid x => uncurry joinDrive (splitDrive x) == x +-- > Windows: joinDrive "C:" "foo" == "C:foo" +-- > Windows: joinDrive "C:\\" "bar" == "C:\\bar" +-- > Windows: joinDrive "\\\\share" "foo" == "\\\\share\\foo" +-- > Windows: joinDrive "/:" "foo" == "/:\\foo" +#endif +joinDrive :: FILEPATH_NAME -> FILEPATH_NAME -> FILEPATH_NAME +joinDrive (OSSTRING_NAME a) (OSSTRING_NAME b) = OSSTRING_NAME $ C.joinDrive a b + + +-- | Get the drive from a filepath. +-- +-- > takeDrive x == fst (splitDrive x) +takeDrive :: FILEPATH_NAME -> FILEPATH_NAME +takeDrive (OSSTRING_NAME x) = OSSTRING_NAME $ C.takeDrive x + + +-- | Delete the drive, if it exists. +-- +-- > dropDrive x == snd (splitDrive x) +dropDrive :: FILEPATH_NAME -> FILEPATH_NAME +dropDrive (OSSTRING_NAME x) = OSSTRING_NAME $ C.dropDrive x + + +#ifdef WINDOWS_DOC +-- | Does a path have a drive. +-- +-- > not (hasDrive x) == null (takeDrive x) +-- > hasDrive "C:\\foo" == True +-- > hasDrive "C:foo" == True +-- > hasDrive "foo" == False +-- > hasDrive "" == False +-- +#elif defined(POSIX_DOC) +-- | Does a path have a drive. +-- +-- > not (hasDrive x) == null (takeDrive x) +-- > hasDrive "/foo" == True +-- > hasDrive "foo" == False +-- > hasDrive "" == False +-- +#else +-- | Does a path have a drive. +-- +-- > not (hasDrive x) == null (takeDrive x) +-- > Posix: hasDrive "/foo" == True +-- > Windows: hasDrive "C:\\foo" == True +-- > Windows: hasDrive "C:foo" == True +-- > hasDrive "foo" == False +-- > hasDrive "" == False +-- +#endif +hasDrive :: FILEPATH_NAME -> Bool +hasDrive (OSSTRING_NAME x) = C.hasDrive x + + +#ifdef WINDOWS_DOC +-- | Is an element a drive +-- +-- > isDrive "C:\\" == True +-- > isDrive "C:\\foo" == False +-- > isDrive "" == False +#elif defined(POSIX_DOC) +-- | Is an element a drive +-- +-- > isDrive "/" == True +-- > isDrive "/foo" == False +-- > isDrive "" == False +#else +-- | Is an element a drive +-- +-- > Posix: isDrive "/" == True +-- > Posix: isDrive "/foo" == False +-- > Windows: isDrive "C:\\" == True +-- > Windows: isDrive "C:\\foo" == False +-- > isDrive "" == False +#endif +isDrive :: FILEPATH_NAME -> Bool +isDrive (OSSTRING_NAME x) = C.isDrive x + + +--------------------------------------------------------------------- +-- Operations on a filepath, as a list of directories + +#ifdef WINDOWS_DOC +-- | Split a filename into directory and file. '' is the inverse. +-- The first component will often end with a trailing slash. +-- +-- > splitFileName "/directory/file.ext" == ("/directory/","file.ext") +-- > Valid x => uncurry () (splitFileName x) == x || fst (splitFileName x) == "./" +-- > Valid x => isValid (fst (splitFileName x)) +-- > splitFileName "file/bob.txt" == ("file/", "bob.txt") +-- > splitFileName "file/" == ("file/", "") +-- > splitFileName "bob" == ("./", "bob") +-- > splitFileName "c:" == ("c:","") +#elif defined(POSIX_DOC) +-- | Split a filename into directory and file. '' is the inverse. +-- The first component will often end with a trailing slash. +-- +-- > splitFileName "/directory/file.ext" == ("/directory/","file.ext") +-- > Valid x => uncurry () (splitFileName x) == x || fst (splitFileName x) == "./" +-- > Valid x => isValid (fst (splitFileName x)) +-- > splitFileName "file/bob.txt" == ("file/", "bob.txt") +-- > splitFileName "file/" == ("file/", "") +-- > splitFileName "bob" == ("./", "bob") +-- > splitFileName "/" == ("/","") +#else +-- | Split a filename into directory and file. '' is the inverse. +-- The first component will often end with a trailing slash. +-- +-- > splitFileName "/directory/file.ext" == ("/directory/","file.ext") +-- > Valid x => uncurry () (splitFileName x) == x || fst (splitFileName x) == "./" +-- > Valid x => isValid (fst (splitFileName x)) +-- > splitFileName "file/bob.txt" == ("file/", "bob.txt") +-- > splitFileName "file/" == ("file/", "") +-- > splitFileName "bob" == ("./", "bob") +-- > Posix: splitFileName "/" == ("/","") +-- > Windows: splitFileName "c:" == ("c:","") +#endif +splitFileName :: FILEPATH_NAME -> (FILEPATH_NAME, FILEPATH_NAME) +splitFileName (OSSTRING_NAME x) = bimap OSSTRING_NAME OSSTRING_NAME $ C.splitFileName x + + +-- | Set the filename. +-- +-- > replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext" +-- > Valid x => replaceFileName x (takeFileName x) == x +replaceFileName :: FILEPATH_NAME -> OSSTRING_NAME -> FILEPATH_NAME +replaceFileName (OSSTRING_NAME x) (OSSTRING_NAME y) = OSSTRING_NAME $ C.replaceFileName x y + + +-- | Drop the filename. Unlike 'takeDirectory', this function will leave +-- a trailing path separator on the directory. +-- +-- > dropFileName "/directory/file.ext" == "/directory/" +-- > dropFileName x == fst (splitFileName x) +dropFileName :: FILEPATH_NAME -> FILEPATH_NAME +dropFileName (OSSTRING_NAME x) = OSSTRING_NAME $ C.dropFileName x + + +-- | Get the file name. +-- +-- > takeFileName "/directory/file.ext" == "file.ext" +-- > takeFileName "test/" == "" +-- > takeFileName x `isSuffixOf` x +-- > takeFileName x == snd (splitFileName x) +-- > Valid x => takeFileName (replaceFileName x "fred") == "fred" +-- > Valid x => takeFileName (x "fred") == "fred" +-- > Valid x => isRelative (takeFileName x) +takeFileName :: FILEPATH_NAME -> FILEPATH_NAME +takeFileName (OSSTRING_NAME x) = OSSTRING_NAME $ C.takeFileName x + + +-- | Get the base name, without an extension or path. +-- +-- > takeBaseName "/directory/file.ext" == "file" +-- > takeBaseName "file/test.txt" == "test" +-- > takeBaseName "dave.ext" == "dave" +-- > takeBaseName "" == "" +-- > takeBaseName "test" == "test" +-- > takeBaseName (addTrailingPathSeparator x) == "" +-- > takeBaseName "file/file.tar.gz" == "file.tar" +takeBaseName :: FILEPATH_NAME -> FILEPATH_NAME +takeBaseName (OSSTRING_NAME x) = OSSTRING_NAME $ C.takeBaseName x + + +-- | Set the base name. +-- +-- > replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext" +-- > replaceBaseName "file/test.txt" "bob" == "file/bob.txt" +-- > replaceBaseName "fred" "bill" == "bill" +-- > replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar" +-- > Valid x => replaceBaseName x (takeBaseName x) == x +replaceBaseName :: FILEPATH_NAME -> OSSTRING_NAME -> FILEPATH_NAME +replaceBaseName (OSSTRING_NAME path) (OSSTRING_NAME name) = OSSTRING_NAME $ C.replaceBaseName path name + + +-- | Is an item either a directory or the last character a path separator? +-- +-- > hasTrailingPathSeparator "test" == False +-- > hasTrailingPathSeparator "test/" == True +hasTrailingPathSeparator :: FILEPATH_NAME -> Bool +hasTrailingPathSeparator (OSSTRING_NAME x) = C.hasTrailingPathSeparator x + + +#ifdef WINDOWS_DOC +-- | Add a trailing file path separator if one is not already present. +-- +-- > hasTrailingPathSeparator (addTrailingPathSeparator x) +-- > hasTrailingPathSeparator x ==> addTrailingPathSeparator x == x +#elif defined(POSIX_DOC) +-- | Add a trailing file path separator if one is not already present. +-- +-- > hasTrailingPathSeparator (addTrailingPathSeparator x) +-- > hasTrailingPathSeparator x ==> addTrailingPathSeparator x == x +-- > addTrailingPathSeparator "test/rest" == "test/rest/" +#else +-- | Add a trailing file path separator if one is not already present. +-- +-- > hasTrailingPathSeparator (addTrailingPathSeparator x) +-- > hasTrailingPathSeparator x ==> addTrailingPathSeparator x == x +-- > Posix: addTrailingPathSeparator "test/rest" == "test/rest/" +#endif +addTrailingPathSeparator :: FILEPATH_NAME -> FILEPATH_NAME +addTrailingPathSeparator (OSSTRING_NAME bs) = OSSTRING_NAME $ C.addTrailingPathSeparator bs + + +#ifdef WINDOWS_DOC +-- | Remove any trailing path separators +-- +-- > dropTrailingPathSeparator "file/test/" == "file/test" +-- > dropTrailingPathSeparator "/" == "/" +-- > dropTrailingPathSeparator "\\" == "\\" +#elif defined(POSIX_DOC) +-- | Remove any trailing path separators +-- +-- > dropTrailingPathSeparator "file/test/" == "file/test" +-- > dropTrailingPathSeparator "/" == "/" +-- > not (hasTrailingPathSeparator (dropTrailingPathSeparator x)) || isDrive x +#else +-- | Remove any trailing path separators +-- +-- > dropTrailingPathSeparator "file/test/" == "file/test" +-- > dropTrailingPathSeparator "/" == "/" +-- > Windows: dropTrailingPathSeparator "\\" == "\\" +-- > Posix: not (hasTrailingPathSeparator (dropTrailingPathSeparator x)) || isDrive x +#endif +dropTrailingPathSeparator :: FILEPATH_NAME -> FILEPATH_NAME +dropTrailingPathSeparator (OSSTRING_NAME x) = OSSTRING_NAME $ C.dropTrailingPathSeparator x + + +#ifdef WINDOWS_DOC +-- | Get the directory name, move up one level. +-- +-- > takeDirectory "/directory/other.ext" == "/directory" +-- > takeDirectory x `isPrefixOf` x || takeDirectory x == "." +-- > takeDirectory "foo" == "." +-- > takeDirectory "/" == "/" +-- > takeDirectory "/foo" == "/" +-- > takeDirectory "/foo/bar/baz" == "/foo/bar" +-- > takeDirectory "/foo/bar/baz/" == "/foo/bar/baz" +-- > takeDirectory "foo/bar/baz" == "foo/bar" +-- > takeDirectory "foo\\bar" == "foo" +-- > takeDirectory "foo\\bar\\\\" == "foo\\bar" +-- > takeDirectory "C:\\" == "C:\\" +#elif defined(POSIX_DOC) +-- | Get the directory name, move up one level. +-- +-- > takeDirectory "/directory/other.ext" == "/directory" +-- > takeDirectory x `isPrefixOf` x || takeDirectory x == "." +-- > takeDirectory "foo" == "." +-- > takeDirectory "/" == "/" +-- > takeDirectory "/foo" == "/" +-- > takeDirectory "/foo/bar/baz" == "/foo/bar" +-- > takeDirectory "/foo/bar/baz/" == "/foo/bar/baz" +-- > takeDirectory "foo/bar/baz" == "foo/bar" +#else +-- | Get the directory name, move up one level. +-- +-- > takeDirectory "/directory/other.ext" == "/directory" +-- > takeDirectory x `isPrefixOf` x || takeDirectory x == "." +-- > takeDirectory "foo" == "." +-- > takeDirectory "/" == "/" +-- > takeDirectory "/foo" == "/" +-- > takeDirectory "/foo/bar/baz" == "/foo/bar" +-- > takeDirectory "/foo/bar/baz/" == "/foo/bar/baz" +-- > takeDirectory "foo/bar/baz" == "foo/bar" +-- > Windows: takeDirectory "foo\\bar" == "foo" +-- > Windows: takeDirectory "foo\\bar\\\\" == "foo\\bar" +-- > Windows: takeDirectory "C:\\" == "C:\\" +#endif +takeDirectory :: FILEPATH_NAME -> FILEPATH_NAME +takeDirectory (OSSTRING_NAME x) = OSSTRING_NAME $ C.takeDirectory x + + +-- | Set the directory, keeping the filename the same. +-- +-- > replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext" +-- > Valid x => replaceDirectory x (takeDirectory x) `equalFilePath` x +replaceDirectory :: FILEPATH_NAME -> FILEPATH_NAME -> FILEPATH_NAME +replaceDirectory (OSSTRING_NAME file) (OSSTRING_NAME dir) = OSSTRING_NAME $ C.replaceDirectory file dir + + +-- | An alias for ''. +combine :: FILEPATH_NAME -> FILEPATH_NAME -> FILEPATH_NAME +combine (OSSTRING_NAME a) (OSSTRING_NAME b) = OSSTRING_NAME $ C.combine a b + +#ifdef WINDOWS_DOC +-- | Combine two paths with a path separator. +-- If the second path starts with a path separator or a drive letter, then it returns the second. +-- The intention is that @readFile (dir '' file)@ will access the same file as +-- @setCurrentDirectory dir; readFile file@. +-- +-- > "/directory" "file.ext" == "/directory\\file.ext" +-- > "directory" "/file.ext" == "/file.ext" +-- > Valid x => (takeDirectory x takeFileName x) `equalFilePath` x +-- +-- Combined: +-- +-- > "C:\\foo" "bar" == "C:\\foo\\bar" +-- > "home" "bob" == "home\\bob" +-- +-- Not combined: +-- +-- > "home" "C:\\bob" == "C:\\bob" +-- +-- Not combined (tricky): +-- +-- If a filepath starts with a single slash, it is relative to the +-- root of the current drive. In [1], this is (confusingly) referred to as an +-- absolute path. +-- The current behavior of '' is to never combine these forms. +-- +-- > "home" "/bob" == "/bob" +-- > "home" "\\bob" == "\\bob" +-- > "C:\\home" "\\bob" == "\\bob" +-- +-- From [1]: "If a file name begins with only a disk designator +-- but not the backslash after the colon, it is interpreted as a relative path +-- to the current directory on the drive with the specified letter." +-- The current behavior of '' is to never combine these forms. +-- +-- > "D:\\foo" "C:bar" == "C:bar" +-- > "C:\\foo" "C:bar" == "C:bar" +#elif defined(POSIX_DOC) +-- | Combine two paths with a path separator. +-- If the second path starts with a path separator or a drive letter, then it returns the second. +-- The intention is that @readFile (dir '' file)@ will access the same file as +-- @setCurrentDirectory dir; readFile file@. +-- +-- > "/directory" "file.ext" == "/directory/file.ext" +-- > Valid x => (takeDirectory x takeFileName x) `equalFilePath` x +-- +-- Combined: +-- +-- > "/" "test" == "/test" +-- > "home" "bob" == "home/bob" +-- > "x:" "foo" == "x:/foo" +-- +-- Not combined: +-- +-- > "home" "/bob" == "/bob" +#else +-- | Combine two paths with a path separator. +-- If the second path starts with a path separator or a drive letter, then it returns the second. +-- The intention is that @readFile (dir '' file)@ will access the same file as +-- @setCurrentDirectory dir; readFile file@. +-- +-- > Posix: "/directory" "file.ext" == "/directory/file.ext" +-- > Windows: "/directory" "file.ext" == "/directory\\file.ext" +-- > "directory" "/file.ext" == "/file.ext" +-- > Valid x => (takeDirectory x takeFileName x) `equalFilePath` x +-- +-- Combined: +-- +-- > Posix: "/" "test" == "/test" +-- > Posix: "home" "bob" == "home/bob" +-- > Posix: "x:" "foo" == "x:/foo" +-- > Windows: "C:\\foo" "bar" == "C:\\foo\\bar" +-- > Windows: "home" "bob" == "home\\bob" +-- +-- Not combined: +-- +-- > Posix: "home" "/bob" == "/bob" +-- > Windows: "home" "C:\\bob" == "C:\\bob" +-- +-- Not combined (tricky): +-- +-- On Windows, if a filepath starts with a single slash, it is relative to the +-- root of the current drive. In [1], this is (confusingly) referred to as an +-- absolute path. +-- The current behavior of '' is to never combine these forms. +-- +-- > Windows: "home" "/bob" == "/bob" +-- > Windows: "home" "\\bob" == "\\bob" +-- > Windows: "C:\\home" "\\bob" == "\\bob" +-- +-- On Windows, from [1]: "If a file name begins with only a disk designator +-- but not the backslash after the colon, it is interpreted as a relative path +-- to the current directory on the drive with the specified letter." +-- The current behavior of '' is to never combine these forms. +-- +-- > Windows: "D:\\foo" "C:bar" == "C:bar" +-- > Windows: "C:\\foo" "C:bar" == "C:bar" +#endif +() :: FILEPATH_NAME -> FILEPATH_NAME -> FILEPATH_NAME +() = combine + + +#ifdef WINDOWS_DOC +-- | Split a path by the directory separator. +-- +-- > splitPath "/directory/file.ext" == ["/","directory/","file.ext"] +-- > concat (splitPath x) == x +-- > splitPath "test//item/" == ["test//","item/"] +-- > splitPath "test/item/file" == ["test/","item/","file"] +-- > splitPath "" == [] +-- > splitPath "c:\\test\\path" == ["c:\\","test\\","path"] +#elif defined(POSIX_DOC) +-- | Split a path by the directory separator. +-- +-- > splitPath "/directory/file.ext" == ["/","directory/","file.ext"] +-- > concat (splitPath x) == x +-- > splitPath "test//item/" == ["test//","item/"] +-- > splitPath "test/item/file" == ["test/","item/","file"] +-- > splitPath "" == [] +-- > splitPath "/file/test" == ["/","file/","test"] +#else +-- | Split a path by the directory separator. +-- +-- > splitPath "/directory/file.ext" == ["/","directory/","file.ext"] +-- > concat (splitPath x) == x +-- > splitPath "test//item/" == ["test//","item/"] +-- > splitPath "test/item/file" == ["test/","item/","file"] +-- > splitPath "" == [] +-- > Windows: splitPath "c:\\test\\path" == ["c:\\","test\\","path"] +-- > Posix: splitPath "/file/test" == ["/","file/","test"] +#endif +splitPath :: FILEPATH_NAME -> [FILEPATH_NAME] +splitPath (OSSTRING_NAME bs) = OSSTRING_NAME <$> C.splitPath bs + +#ifdef WINDOWS_DOC +-- | Just as 'splitPath', but don't add the trailing slashes to each element. +-- +-- > splitDirectories "/directory/file.ext" == ["/","directory","file.ext"] +-- > splitDirectories "test/file" == ["test","file"] +-- > splitDirectories "/test/file" == ["/","test","file"] +-- > splitDirectories "C:\\test\\file" == ["C:\\", "test", "file"] +-- > Valid x => joinPath (splitDirectories x) `equalFilePath` x +-- > splitDirectories "" == [] +-- > splitDirectories "C:\\test\\\\\\file" == ["C:\\", "test", "file"] +-- > splitDirectories "/test///file" == ["/","test","file"] +#elif defined(POSIX_DOC) +-- | Just as 'splitPath', but don't add the trailing slashes to each element. +-- +-- > splitDirectories "/directory/file.ext" == ["/","directory","file.ext"] +-- > splitDirectories "test/file" == ["test","file"] +-- > splitDirectories "/test/file" == ["/","test","file"] +-- > Valid x => joinPath (splitDirectories x) `equalFilePath` x +-- > splitDirectories "" == [] +-- > splitDirectories "/test///file" == ["/","test","file"] +#else +-- | Just as 'splitPath', but don't add the trailing slashes to each element. +-- +-- > splitDirectories "/directory/file.ext" == ["/","directory","file.ext"] +-- > splitDirectories "test/file" == ["test","file"] +-- > splitDirectories "/test/file" == ["/","test","file"] +-- > Windows: splitDirectories "C:\\test\\file" == ["C:\\", "test", "file"] +-- > Valid x => joinPath (splitDirectories x) `equalFilePath` x +-- > splitDirectories "" == [] +-- > Windows: splitDirectories "C:\\test\\\\\\file" == ["C:\\", "test", "file"] +-- > splitDirectories "/test///file" == ["/","test","file"] +#endif +splitDirectories :: FILEPATH_NAME -> [FILEPATH_NAME] +splitDirectories (OSSTRING_NAME x) = OSSTRING_NAME <$> C.splitDirectories x + +#ifdef WINDOWS_DOC +-- | Join path elements back together. +-- +-- > joinPath z == foldr () "" z +-- > joinPath ["/","directory/","file.ext"] == "/directory/file.ext" +-- > Valid x => joinPath (splitPath x) == x +-- > joinPath [] == "" +#elif defined(POSIX_DOC) +-- | Join path elements back together. +-- +-- > joinPath z == foldr () "" z +-- > joinPath ["/","directory/","file.ext"] == "/directory/file.ext" +-- > Valid x => joinPath (splitPath x) == x +-- > joinPath [] == "" +-- > joinPath ["test","file","path"] == "test/file/path" +#else +-- | Join path elements back together. +-- +-- > joinPath z == foldr () "" z +-- > joinPath ["/","directory/","file.ext"] == "/directory/file.ext" +-- > Valid x => joinPath (splitPath x) == x +-- > joinPath [] == "" +-- > Posix: joinPath ["test","file","path"] == "test/file/path" +#endif +joinPath :: [FILEPATH_NAME] -> FILEPATH_NAME +joinPath = foldr () (OSSTRING_NAME mempty) + + + + + + + + + +------------------------ +-- File name manipulations + + +#ifdef WINDOWS_DOC +-- | Equality of two filepaths. +-- If you call @System.Directory.canonicalizePath@ +-- first this has a much better chance of working. +-- Note that this doesn't follow symlinks or DOSNAM~1s. +-- +-- Similar to 'normalise', this does not expand @".."@, because of symlinks. +-- +-- > x == y ==> equalFilePath x y +-- > normalise x == normalise y ==> equalFilePath x y +-- > equalFilePath "foo" "foo/" +-- > not (equalFilePath "/a/../c" "/c") +-- > not (equalFilePath "foo" "/foo") +-- > equalFilePath "foo" "FOO" +-- > not (equalFilePath "C:" "C:/") +#elif defined(POSIX_DOC) +-- | Equality of two filepaths. +-- If you call @System.Directory.canonicalizePath@ +-- first this has a much better chance of working. +-- Note that this doesn't follow symlinks or DOSNAM~1s. +-- +-- Similar to 'normalise', this does not expand @".."@, because of symlinks. +-- +-- > x == y ==> equalFilePath x y +-- > normalise x == normalise y ==> equalFilePath x y +-- > equalFilePath "foo" "foo/" +-- > not (equalFilePath "/a/../c" "/c") +-- > not (equalFilePath "foo" "/foo") +-- > not (equalFilePath "foo" "FOO") +#else +-- | Equality of two filepaths. +-- If you call @System.Directory.canonicalizePath@ +-- first this has a much better chance of working. +-- Note that this doesn't follow symlinks or DOSNAM~1s. +-- +-- Similar to 'normalise', this does not expand @".."@, because of symlinks. +-- +-- > x == y ==> equalFilePath x y +-- > normalise x == normalise y ==> equalFilePath x y +-- > equalFilePath "foo" "foo/" +-- > not (equalFilePath "/a/../c" "/c") +-- > not (equalFilePath "foo" "/foo") +-- > Posix: not (equalFilePath "foo" "FOO") +-- > Windows: equalFilePath "foo" "FOO" +-- > Windows: not (equalFilePath "C:" "C:/") +#endif +equalFilePath :: FILEPATH_NAME -> FILEPATH_NAME -> Bool +equalFilePath (OSSTRING_NAME p1) (OSSTRING_NAME p2) = C.equalFilePath p1 p2 + +#ifdef WINDOWS_DOC +-- | Contract a filename, based on a relative path. Note that the resulting path +-- will never introduce @..@ paths, as the presence of symlinks means @..\/b@ +-- may not reach @a\/b@ if it starts from @a\/c@. For a worked example see +-- . +-- +-- The corresponding @makeAbsolute@ function can be found in +-- @System.Directory@. +-- +-- > makeRelative "/directory" "/directory/file.ext" == "file.ext" +-- > Valid x => makeRelative (takeDirectory x) x `equalFilePath` takeFileName x +-- > makeRelative x x == "." +-- > Valid x y => equalFilePath x y || (isRelative x && makeRelative y x == x) || equalFilePath (y makeRelative y x) x +-- > makeRelative "C:\\Home" "c:\\home\\bob" == "bob" +-- > makeRelative "C:\\Home" "c:/home/bob" == "bob" +-- > makeRelative "C:\\Home" "D:\\Home\\Bob" == "D:\\Home\\Bob" +-- > makeRelative "C:\\Home" "C:Home\\Bob" == "C:Home\\Bob" +-- > makeRelative "/Home" "/home/bob" == "bob" +-- > makeRelative "/" "//" == "//" +#elif defined(POSIX_DOC) +-- | Contract a filename, based on a relative path. Note that the resulting path +-- will never introduce @..@ paths, as the presence of symlinks means @..\/b@ +-- may not reach @a\/b@ if it starts from @a\/c@. For a worked example see +-- . +-- +-- The corresponding @makeAbsolute@ function can be found in +-- @System.Directory@. +-- +-- > makeRelative "/directory" "/directory/file.ext" == "file.ext" +-- > Valid x => makeRelative (takeDirectory x) x `equalFilePath` takeFileName x +-- > makeRelative x x == "." +-- > Valid x y => equalFilePath x y || (isRelative x && makeRelative y x == x) || equalFilePath (y makeRelative y x) x +-- > makeRelative "/Home" "/home/bob" == "/home/bob" +-- > makeRelative "/home/" "/home/bob/foo/bar" == "bob/foo/bar" +-- > makeRelative "/fred" "bob" == "bob" +-- > makeRelative "/file/test" "/file/test/fred" == "fred" +-- > makeRelative "/file/test" "/file/test/fred/" == "fred/" +-- > makeRelative "some/path" "some/path/a/b/c" == "a/b/c" +#else +-- | Contract a filename, based on a relative path. Note that the resulting path +-- will never introduce @..@ paths, as the presence of symlinks means @..\/b@ +-- may not reach @a\/b@ if it starts from @a\/c@. For a worked example see +-- . +-- +-- The corresponding @makeAbsolute@ function can be found in +-- @System.Directory@. +-- +-- > makeRelative "/directory" "/directory/file.ext" == "file.ext" +-- > Valid x => makeRelative (takeDirectory x) x `equalFilePath` takeFileName x +-- > makeRelative x x == "." +-- > Valid x y => equalFilePath x y || (isRelative x && makeRelative y x == x) || equalFilePath (y makeRelative y x) x +-- > Windows: makeRelative "C:\\Home" "c:\\home\\bob" == "bob" +-- > Windows: makeRelative "C:\\Home" "c:/home/bob" == "bob" +-- > Windows: makeRelative "C:\\Home" "D:\\Home\\Bob" == "D:\\Home\\Bob" +-- > Windows: makeRelative "C:\\Home" "C:Home\\Bob" == "C:Home\\Bob" +-- > Windows: makeRelative "/Home" "/home/bob" == "bob" +-- > Windows: makeRelative "/" "//" == "//" +-- > Posix: makeRelative "/Home" "/home/bob" == "/home/bob" +-- > Posix: makeRelative "/home/" "/home/bob/foo/bar" == "bob/foo/bar" +-- > Posix: makeRelative "/fred" "bob" == "bob" +-- > Posix: makeRelative "/file/test" "/file/test/fred" == "fred" +-- > Posix: makeRelative "/file/test" "/file/test/fred/" == "fred/" +-- > Posix: makeRelative "some/path" "some/path/a/b/c" == "a/b/c" +#endif +makeRelative :: FILEPATH_NAME -> FILEPATH_NAME -> FILEPATH_NAME +makeRelative (OSSTRING_NAME root) (OSSTRING_NAME path) = OSSTRING_NAME $ C.makeRelative root path + +#ifdef WINDOWS_DOC +-- | Normalise a file +-- +-- * \/\/ outside of the drive can be made blank +-- +-- * \/ -> 'pathSeparator' +-- +-- * .\/ -> \"\" +-- +-- Does not remove @".."@, because of symlinks. +-- +-- > normalise "c:\\file/bob\\" == "C:\\file\\bob\\" +-- > normalise "c:\\" == "C:\\" +-- > normalise "C:.\\" == "C:" +-- > normalise "\\\\server\\test" == "\\\\server\\test" +-- > normalise "//server/test" == "\\\\server\\test" +-- > normalise "c:/file" == "C:\\file" +-- > normalise "/file" == "\\file" +-- > normalise "\\" == "\\" +-- > normalise "/./" == "\\" +-- > normalise "." == "." +#elif defined(POSIX_DOC) +-- | Normalise a file +-- +-- * \/\/ outside of the drive can be made blank +-- +-- * \/ -> 'pathSeparator' +-- +-- * .\/ -> \"\" +-- +-- Does not remove @".."@, because of symlinks. +-- +-- > normalise "/file/\\test////" == "/file/\\test/" +-- > normalise "/file/./test" == "/file/test" +-- > normalise "/test/file/../bob/fred/" == "/test/file/../bob/fred/" +-- > normalise "../bob/fred/" == "../bob/fred/" +-- > normalise "/a/../c" == "/a/../c" +-- > normalise "./bob/fred/" == "bob/fred/" +-- > normalise "." == "." +-- > normalise "./" == "./" +-- > normalise "./." == "./" +-- > normalise "/./" == "/" +-- > normalise "/" == "/" +-- > normalise "bob/fred/." == "bob/fred/" +-- > normalise "//home" == "/home" +#else +-- | Normalise a file +-- +-- * \/\/ outside of the drive can be made blank +-- +-- * \/ -> 'pathSeparator' +-- +-- * .\/ -> \"\" +-- +-- Does not remove @".."@, because of symlinks. +-- +-- > Posix: normalise "/file/\\test////" == "/file/\\test/" +-- > Posix: normalise "/file/./test" == "/file/test" +-- > Posix: normalise "/test/file/../bob/fred/" == "/test/file/../bob/fred/" +-- > Posix: normalise "../bob/fred/" == "../bob/fred/" +-- > Posix: normalise "/a/../c" == "/a/../c" +-- > Posix: normalise "./bob/fred/" == "bob/fred/" +-- > Windows: normalise "c:\\file/bob\\" == "C:\\file\\bob\\" +-- > Windows: normalise "c:\\" == "C:\\" +-- > Windows: normalise "C:.\\" == "C:" +-- > Windows: normalise "\\\\server\\test" == "\\\\server\\test" +-- > Windows: normalise "//server/test" == "\\\\server\\test" +-- > Windows: normalise "c:/file" == "C:\\file" +-- > Windows: normalise "/file" == "\\file" +-- > Windows: normalise "\\" == "\\" +-- > Windows: normalise "/./" == "\\" +-- > normalise "." == "." +-- > Posix: normalise "./" == "./" +-- > Posix: normalise "./." == "./" +-- > Posix: normalise "/./" == "/" +-- > Posix: normalise "/" == "/" +-- > Posix: normalise "bob/fred/." == "bob/fred/" +-- > Posix: normalise "//home" == "/home" +#endif +normalise :: FILEPATH_NAME -> FILEPATH_NAME +normalise (OSSTRING_NAME filepath) = OSSTRING_NAME $ C.normalise filepath + + +#ifdef WINDOWS_DOC +-- | Is a filepath valid, i.e. could you create a file like it? This function checks for invalid names, +-- and invalid characters, but does not check if length limits are exceeded, as these are typically +-- filesystem dependent. +-- +-- > isValid "" == False +-- > isValid "\0" == False +-- > isValid "c:\\test" == True +-- > isValid "c:\\test:of_test" == False +-- > isValid "test*" == False +-- > isValid "c:\\test\\nul" == False +-- > isValid "c:\\test\\prn.txt" == False +-- > isValid "c:\\nul\\file" == False +-- > isValid "\\\\" == False +-- > isValid "\\\\\\foo" == False +-- > isValid "\\\\?\\D:file" == False +-- > isValid "foo\tbar" == False +-- > isValid "nul .txt" == False +-- > isValid " nul.txt" == True +#elif defined(POSIX_DOC) +-- | Is a filepath valid, i.e. could you create a file like it? This function checks for invalid names, +-- and invalid characters, but does not check if length limits are exceeded, as these are typically +-- filesystem dependent. +-- +-- > isValid "" == False +-- > isValid "\0" == False +-- > isValid "/random_ path:*" == True +-- > isValid x == not (null x) +#else +-- | Is a filepath valid, i.e. could you create a file like it? This function checks for invalid names, +-- and invalid characters, but does not check if length limits are exceeded, as these are typically +-- filesystem dependent. +-- +-- > isValid "" == False +-- > isValid "\0" == False +-- > Posix: isValid "/random_ path:*" == True +-- > Posix: isValid x == not (null x) +-- > Windows: isValid "c:\\test" == True +-- > Windows: isValid "c:\\test:of_test" == False +-- > Windows: isValid "test*" == False +-- > Windows: isValid "c:\\test\\nul" == False +-- > Windows: isValid "c:\\test\\prn.txt" == False +-- > Windows: isValid "c:\\nul\\file" == False +-- > Windows: isValid "\\\\" == False +-- > Windows: isValid "\\\\\\foo" == False +-- > Windows: isValid "\\\\?\\D:file" == False +-- > Windows: isValid "foo\tbar" == False +-- > Windows: isValid "nul .txt" == False +-- > Windows: isValid " nul.txt" == True +#endif +isValid :: FILEPATH_NAME -> Bool +isValid (OSSTRING_NAME filepath) = C.isValid filepath + + +#ifdef WINDOWS_DOC +-- | Take a filepath and make it valid; does not change already valid filepaths. +-- +-- > isValid (makeValid x) +-- > isValid x ==> makeValid x == x +-- > makeValid "" == "_" +-- > makeValid "file\0name" == "file_name" +-- > makeValid "c:\\already\\/valid" == "c:\\already\\/valid" +-- > makeValid "c:\\test:of_test" == "c:\\test_of_test" +-- > makeValid "test*" == "test_" +-- > makeValid "c:\\test\\nul" == "c:\\test\\nul_" +-- > makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt" +-- > makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt" +-- > makeValid "c:\\nul\\file" == "c:\\nul_\\file" +-- > makeValid "\\\\\\foo" == "\\\\drive" +-- > makeValid "\\\\?\\D:file" == "\\\\?\\D:\\file" +-- > makeValid "nul .txt" == "nul _.txt" +#elif defined(POSIX_DOC) +-- | Take a filepath and make it valid; does not change already valid filepaths. +-- +-- > isValid (makeValid x) +-- > isValid x ==> makeValid x == x +-- > makeValid "" == "_" +-- > makeValid "file\0name" == "file_name" +#else +-- | Take a filepath and make it valid; does not change already valid filepaths. +-- +-- > isValid (makeValid x) +-- > isValid x ==> makeValid x == x +-- > makeValid "" == "_" +-- > makeValid "file\0name" == "file_name" +-- > Windows: makeValid "c:\\already\\/valid" == "c:\\already\\/valid" +-- > Windows: makeValid "c:\\test:of_test" == "c:\\test_of_test" +-- > Windows: makeValid "test*" == "test_" +-- > Windows: makeValid "c:\\test\\nul" == "c:\\test\\nul_" +-- > Windows: makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt" +-- > Windows: makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt" +-- > Windows: makeValid "c:\\nul\\file" == "c:\\nul_\\file" +-- > Windows: makeValid "\\\\\\foo" == "\\\\drive" +-- > Windows: makeValid "\\\\?\\D:file" == "\\\\?\\D:\\file" +-- > Windows: makeValid "nul .txt" == "nul _.txt" +#endif +makeValid :: FILEPATH_NAME -> FILEPATH_NAME +makeValid (OSSTRING_NAME path) = OSSTRING_NAME $ C.makeValid path + + +#ifdef WINDOWS_DOC +-- | Is a path relative, or is it fixed to the root? +-- +-- > isRelative "path\\test" == True +-- > isRelative "c:\\test" == False +-- > isRelative "c:test" == True +-- > isRelative "c:\\" == False +-- > isRelative "c:/" == False +-- > isRelative "c:" == True +-- > isRelative "\\\\foo" == False +-- > isRelative "\\\\?\\foo" == False +-- > isRelative "\\\\?\\UNC\\foo" == False +-- > isRelative "/foo" == True +-- > isRelative "\\foo" == True +-- +-- According to [1]: +-- +-- * "A UNC name of any format [is never relative]." +-- +-- * "You cannot use the "\\?\" prefix with a relative path." +#elif defined(POSIX_DOC) +-- | Is a path relative, or is it fixed to the root? +-- +-- > isRelative "test/path" == True +-- > isRelative "/test" == False +-- > isRelative "/" == False +#else +-- | Is a path relative, or is it fixed to the root? +-- +-- > Windows: isRelative "path\\test" == True +-- > Windows: isRelative "c:\\test" == False +-- > Windows: isRelative "c:test" == True +-- > Windows: isRelative "c:\\" == False +-- > Windows: isRelative "c:/" == False +-- > Windows: isRelative "c:" == True +-- > Windows: isRelative "\\\\foo" == False +-- > Windows: isRelative "\\\\?\\foo" == False +-- > Windows: isRelative "\\\\?\\UNC\\foo" == False +-- > Windows: isRelative "/foo" == True +-- > Windows: isRelative "\\foo" == True +-- > Posix: isRelative "test/path" == True +-- > Posix: isRelative "/test" == False +-- > Posix: isRelative "/" == False +-- +-- According to [1]: +-- +-- * "A UNC name of any format [is never relative]." +-- +-- * "You cannot use the "\\?\" prefix with a relative path." +#endif +isRelative :: FILEPATH_NAME -> Bool +isRelative (OSSTRING_NAME x) = C.isRelative x + + +-- | @not . 'isRelative'@ +-- +-- > isAbsolute x == not (isRelative x) +isAbsolute :: FILEPATH_NAME -> Bool +isAbsolute (OSSTRING_NAME x) = C.isAbsolute x + + +-- things not defined in os-string + +#ifdef WINDOWS +encodeFS :: String -> IO WindowsPath +encodeFS = fmap WindowsString . encodeWithBaseWindows + +decodeFS :: WindowsPath -> IO String +decodeFS (WindowsString x) = decodeWithBaseWindows x +#elif defined(POSIX) +encodeFS :: String -> IO PosixPath +encodeFS = fmap PosixString . encodeWithBasePosix + +decodeFS :: PosixPath -> IO String +decodeFS (PosixString x) = decodeWithBasePosix x +#else +encodeFS :: String -> IO OsPath +#if defined(mingw32_HOST_OS) || defined(__MINGW32__) +encodeFS = fmap (OsString . WindowsString) . encodeWithBaseWindows +#else +encodeFS = fmap (OsString . PosixString) . encodeWithBasePosix +#endif + +decodeFS :: OsPath -> IO String +#if defined(mingw32_HOST_OS) || defined(__MINGW32__) +decodeFS (OsString (WindowsString x)) = decodeWithBaseWindows x +#else +decodeFS (OsString (PosixString x)) = decodeWithBasePosix x +#endif + +#endif + diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Encoding.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Encoding.hs new file mode 100644 index 0000000000..6281ef38b8 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Encoding.hs @@ -0,0 +1,31 @@ +module System.OsPath.Encoding + ( + -- * Types + EncodingException(..) + , showEncodingException + + -- * UCS-2 + , ucs2le + , mkUcs2le + , ucs2le_DF + , ucs2le_EF + , ucs2le_decode + , ucs2le_encode + + -- * UTF-16LE_b + , utf16le_b + , mkUTF16le_b + , utf16le_b_DF + , utf16le_b_EF + , utf16le_b_decode + , utf16le_b_encode + + -- * base encoding + , encodeWithBasePosix + , decodeWithBasePosix + , encodeWithBaseWindows + , decodeWithBaseWindows + ) + where + +import System.OsString.Encoding.Internal diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Internal.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Internal.hs new file mode 100644 index 0000000000..bc1d5a9bfc --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Internal.hs @@ -0,0 +1,176 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE UnliftedFFITypes #-} +{-# LANGUAGE TemplateHaskellQuotes #-} +{-# LANGUAGE ViewPatterns #-} -- needed to quote a view pattern + +module System.OsPath.Internal where + +import {-# SOURCE #-} System.OsPath + ( isValid ) +import System.OsPath.Types +import qualified System.OsString.Internal as OS + +import Control.Monad.Catch + ( MonadThrow ) +import Data.ByteString + ( ByteString ) +import Language.Haskell.TH.Quote + ( QuasiQuoter (..) ) +import Language.Haskell.TH.Syntax + ( Lift (..), lift ) +import GHC.IO.Encoding.Failure ( CodingFailureMode(..) ) + +import System.OsString.Internal.Types +import System.OsPath.Encoding +import Control.Monad (when) +import System.IO + ( TextEncoding ) + +#if defined(mingw32_HOST_OS) || defined(__MINGW32__) +import qualified System.OsPath.Windows as PF +import GHC.IO.Encoding.UTF16 ( mkUTF16le ) +#else +import qualified System.OsPath.Posix as PF +import GHC.IO.Encoding.UTF8 ( mkUTF8 ) +#endif +import GHC.Stack (HasCallStack) + + + +-- | Partial unicode friendly encoding. +-- +-- On windows this encodes as UTF16-LE (strictly), which is a pretty good guess. +-- On unix this encodes as UTF8 (strictly), which is a good guess. +-- +-- Throws an 'EncodingException' if encoding fails. If the input does not +-- contain surrogate chars, you can use 'unsafeEncodeUtf'. +encodeUtf :: MonadThrow m => FilePath -> m OsPath +encodeUtf = OS.encodeUtf + +-- | Unsafe unicode friendly encoding. +-- +-- Like 'encodeUtf', except it crashes when the input contains +-- surrogate chars. For sanitized input, this can be useful. +unsafeEncodeUtf :: HasCallStack => String -> OsString +unsafeEncodeUtf = OS.unsafeEncodeUtf + +-- | Encode a 'FilePath' with the specified encoding. +-- +-- Note: on windows, we expect a "wide char" encoding (e.g. UCS-2 or UTF-16). Anything +-- that works with @Word16@ boundaries. Picking an incompatible encoding may crash +-- filepath operations. +encodeWith :: TextEncoding -- ^ unix text encoding + -> TextEncoding -- ^ windows text encoding (wide char) + -> FilePath + -> Either EncodingException OsPath +encodeWith = OS.encodeWith + +-- | Like 'encodeUtf', except this mimics the behavior of the base library when doing filesystem +-- operations, which is: +-- +-- 1. on unix, uses shady PEP 383 style encoding (based on the current locale, +-- but PEP 383 only works properly on UTF-8 encodings, so good luck) +-- 2. on windows does permissive UTF-16 encoding, where coding errors generate +-- Chars in the surrogate range +-- +-- Looking up the locale requires IO. If you're not worried about calls +-- to 'setFileSystemEncoding', then 'unsafePerformIO' may be feasible (make sure +-- to deeply evaluate the result to catch exceptions). +encodeFS :: FilePath -> IO OsPath +encodeFS = OS.encodeFS + + +-- | Partial unicode friendly decoding. +-- +-- On windows this decodes as UTF16-LE (strictly), which is a pretty good guess. +-- On unix this decodes as UTF8 (strictly), which is a good guess. +-- +-- Throws a 'EncodingException' if decoding fails. +decodeUtf :: MonadThrow m => OsPath -> m FilePath +decodeUtf = OS.decodeUtf + +-- | Decode an 'OsPath' with the specified encoding. +decodeWith :: TextEncoding -- ^ unix text encoding + -> TextEncoding -- ^ windows text encoding + -> OsPath + -> Either EncodingException FilePath +decodeWith = OS.decodeWith + +-- | Like 'decodeUtf', except this mimics the behavior of the base library when doing filesystem +-- operations, which is: +-- +-- 1. on unix, uses shady PEP 383 style encoding (based on the current locale, +-- but PEP 383 only works properly on UTF-8 encodings, so good luck) +-- 2. on windows does permissive UTF-16 encoding, where coding errors generate +-- Chars in the surrogate range +-- +-- Looking up the locale requires IO. If you're not worried about calls +-- to 'setFileSystemEncoding', then 'unsafePerformIO' may be feasible (make sure +-- to deeply evaluate the result to catch exceptions). +decodeFS :: OsPath -> IO FilePath +decodeFS = OS.decodeFS + + +-- | Constructs an @OsPath@ from a ByteString. +-- +-- On windows, this ensures valid UCS-2LE, on unix it is passed unchanged/unchecked. +-- +-- Throws 'EncodingException' on invalid UCS-2LE on windows (although unlikely). +fromBytes :: MonadThrow m + => ByteString + -> m OsPath +fromBytes = OS.fromBytes + + + +-- | QuasiQuote an 'OsPath'. This accepts Unicode characters +-- and encodes as UTF-8 on unix and UTF-16LE on windows. Runs 'isValid' +-- on the input. If used as a pattern, requires turning on the @ViewPatterns@ +-- extension. +osp :: QuasiQuoter +osp = QuasiQuoter +#if defined(mingw32_HOST_OS) || defined(__MINGW32__) + { quoteExp = \s -> do + osp' <- either (fail . show) (pure . OsString) . PF.encodeWith (mkUTF16le ErrorOnCodingFailure) $ s + when (not $ isValid osp') $ fail ("filepath not valid: " ++ show osp') + lift osp' + , quotePat = \s -> do + osp' <- either (fail . show) (pure . OsString) . PF.encodeWith (mkUTF16le ErrorOnCodingFailure) $ s + when (not $ isValid osp') $ fail ("filepath not valid: " ++ show osp') + [p|((==) osp' -> True)|] + , quoteType = \_ -> + fail "illegal QuasiQuote (allowed as expression or pattern only, used as a type)" + , quoteDec = \_ -> + fail "illegal QuasiQuote (allowed as expression or pattern only, used as a declaration)" + } +#else + { quoteExp = \s -> do + osp' <- either (fail . show) (pure . OsString) . PF.encodeWith (mkUTF8 ErrorOnCodingFailure) $ s + when (not $ isValid osp') $ fail ("filepath not valid: " ++ show osp') + lift osp' + , quotePat = \s -> do + osp' <- either (fail . show) (pure . OsString) . PF.encodeWith (mkUTF8 ErrorOnCodingFailure) $ s + when (not $ isValid osp') $ fail ("filepath not valid: " ++ show osp') + [p|((==) osp' -> True)|] + , quoteType = \_ -> + fail "illegal QuasiQuote (allowed as expression or pattern only, used as a type)" + , quoteDec = \_ -> + fail "illegal QuasiQuote (allowed as expression or pattern only, used as a declaration)" + } +#endif + + +-- | Unpack an 'OsPath' to a list of 'OsChar'. +unpack :: OsPath -> [OsChar] +unpack = OS.unpack + + +-- | Pack a list of 'OsChar' to an 'OsPath'. +-- +-- Note that using this in conjunction with 'unsafeFromChar' to +-- convert from @[Char]@ to 'OsPath' is probably not what +-- you want, because it will truncate unicode code points. +pack :: [OsChar] -> OsPath +pack = OS.pack + diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Posix.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Posix.hs new file mode 100644 index 0000000000..fc62c86bbe --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Posix.hs @@ -0,0 +1,31 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE TemplateHaskellQuotes #-} +{-# LANGUAGE ViewPatterns #-} + +#undef WINDOWS +#define POSIX +#define IS_WINDOWS False +#define FILEPATH_NAME PosixPath +#define OSSTRING_NAME PosixString +#define WORD_NAME PosixChar + +#include "Common.hs" + +-- | QuasiQuote a 'PosixPath'. This accepts Unicode characters +-- and encodes as UTF-8. Runs 'isValid' on the input. +pstr :: QuasiQuoter +pstr = + QuasiQuoter + { quoteExp = \s -> do + ps <- either (fail . show) pure $ encodeWith (mkUTF8 ErrorOnCodingFailure) s + when (not $ isValid ps) $ fail ("filepath not valid: " ++ show ps) + lift ps + , quotePat = \s -> do + osp' <- either (fail . show) pure . encodeWith (mkUTF8 ErrorOnCodingFailure) $ s + when (not $ isValid osp') $ fail ("filepath not valid: " ++ show osp') + [p|((==) osp' -> True)|] + , quoteType = \_ -> + fail "illegal QuasiQuote (allowed as expression or pattern only, used as a type)" + , quoteDec = \_ -> + fail "illegal QuasiQuote (allowed as expression or pattern only, used as a declaration)" + } diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Posix/Internal.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Posix/Internal.hs new file mode 100644 index 0000000000..312f789b83 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Posix/Internal.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE CPP #-} + +#undef WINDOWS +#define OS_PATH +#define IS_WINDOWS False +#define MODULE_NAME Posix + +#include "../../FilePath/Internal.hs" diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Types.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Types.hs new file mode 100644 index 0000000000..6bf1b774ce --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Types.hs @@ -0,0 +1,42 @@ +{-# LANGUAGE CPP #-} + +module System.OsPath.Types + ( + -- * FilePath types + OsPath + , WindowsPath + , PosixPath + , PlatformPath + + -- * OsString reexports + , WindowsString + , PosixString + , WindowsChar + , PosixChar + , OsString + , OsChar + ) +where + +import System.OsString.Internal.Types + + +-- | Filepaths are @wchar_t*@ data on windows as passed to syscalls. +type WindowsPath = WindowsString + +-- | Filepaths are @char[]@ data on unix as passed to syscalls. +type PosixPath = PosixString + +#if defined(mingw32_HOST_OS) || defined(__MINGW32__) +-- | Ifdef around current platform (either 'WindowsPath' or 'PosixPath'). +type PlatformPath = WindowsPath +#else +-- | Ifdef around current platform (either 'WindowsPath' or 'PosixPath'). +type PlatformPath = PosixPath +#endif + + +-- | Type representing filenames\/pathnames. +-- +-- This type doesn't add any guarantees over 'OsString'. +type OsPath = OsString diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Windows.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Windows.hs new file mode 100644 index 0000000000..ab9efc03f5 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Windows.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE TemplateHaskellQuotes #-} +{-# LANGUAGE ViewPatterns #-} + +#undef POSIX +#define IS_WINDOWS True +#define WINDOWS +#define FILEPATH_NAME WindowsPath +#define OSSTRING_NAME WindowsString +#define WORD_NAME WindowsChar + +#include "Common.hs" + + +-- | QuasiQuote a 'WindowsPath'. This accepts Unicode characters +-- and encodes as UTF-16LE. Runs 'isValid' on the input. +pstr :: QuasiQuoter +pstr = + QuasiQuoter + { quoteExp = \s -> do + ps <- either (fail . show) pure $ encodeWith (mkUTF16le ErrorOnCodingFailure) s + when (not $ isValid ps) $ fail ("filepath not valid: " ++ show ps) + lift ps + , quotePat = \s -> do + osp' <- either (fail . show) pure . encodeWith (mkUTF16le ErrorOnCodingFailure) $ s + when (not $ isValid osp') $ fail ("filepath not valid: " ++ show osp') + [p|((==) osp' -> True)|] + , quoteType = \_ -> + fail "illegal QuasiQuote (allowed as expression or pattern only, used as a type)" + , quoteDec = \_ -> + fail "illegal QuasiQuote (allowed as expression or pattern only, used as a declaration)" + } diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Windows/Internal.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Windows/Internal.hs new file mode 100644 index 0000000000..a32d812403 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/System/OsPath/Windows/Internal.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE CPP #-} + +#undef POSIX +#define WINDOWS +#define OS_PATH +#define IS_WINDOWS True +#define MODULE_NAME Windows + +#include "../../FilePath/Internal.hs" diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/bench/BenchFilePath.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/bench/BenchFilePath.hs new file mode 100644 index 0000000000..5319f1c0d1 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/bench/BenchFilePath.hs @@ -0,0 +1,245 @@ +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE QuasiQuotes #-} + +module Main where + +import System.OsPath.Types +import System.OsPath.Encoding ( ucs2le ) +import qualified System.OsString.Internal.Types as OST +import qualified Data.ByteString.Short as SBS + +import Test.Tasty.Bench + +import qualified System.FilePath.Posix as PF +import qualified System.FilePath.Posix as WF +import qualified System.OsString.Posix as OSP +import qualified System.OsString.Windows as WSP +import qualified System.OsPath.Posix as APF +import qualified System.OsPath.Windows as AWF + +main :: IO () +main = defaultMain + [ bgroup "filepath (string)" $ map (uncurry bench) + [("splitExtension (posix)" , nf PF.splitExtension posixPath) + ,("splitExtension (windows)" , nf WF.splitExtension windowsPath) + ,("takeExtension (posix)" , nf PF.takeExtension posixPath) + ,("takeExtension (windows)" , nf WF.takeExtension windowsPath) + ,("replaceExtension (posix)" , nf (PF.replaceExtension ".lol") posixPath) + ,("replaceExtension (windows)" , nf (WF.replaceExtension ".lol") windowsPath) + ,("dropExtension (posix)" , nf PF.dropExtension posixPath) + ,("dropExtension (windows)" , nf WF.dropExtension windowsPath) + ,("addExtension (posix)" , nf (PF.addExtension ".lol") posixPath) + ,("addExtension (windows)" , nf (WF.addExtension ".lol") windowsPath) + ,("hasExtension (posix)" , nf PF.hasExtension posixPath) + ,("hasExtension (windows)" , nf WF.hasExtension windowsPath) + ,("splitExtensions (posix)" , nf PF.splitExtensions posixPath) + ,("splitExtensions (windows)" , nf WF.splitExtensions windowsPath) + ,("dropExtensions (posix)" , nf PF.dropExtensions posixPath) + ,("dropExtensions (windows)" , nf WF.dropExtensions windowsPath) + ,("takeExtensions (posix)" , nf PF.takeExtensions posixPath) + ,("takeExtensions (windows)" , nf WF.takeExtensions windowsPath) + ,("replaceExtensions (posix)" , nf (PF.replaceExtensions ".lol") posixPath) + ,("replaceExtensions (windows)" , nf (WF.replaceExtensions ".lol") windowsPath) + ,("isExtensionOf (posix)" , nf (PF.isExtensionOf ".lol") posixPath) + ,("isExtensionOf (windows)" , nf (WF.isExtensionOf ".lol") windowsPath) + ,("stripExtension (posix)" , nf (PF.stripExtension ".lol") posixPath) + ,("stripExtension (windows)" , nf (WF.stripExtension ".lol") windowsPath) + + ,("splitFileName (posix)" , nf PF.splitFileName posixPath) + ,("splitFileName (windows)" , nf WF.splitFileName windowsPath) + ,("takeFileName (posix)" , nf PF.takeFileName posixPath) + ,("takeFileName (windows)" , nf WF.takeFileName windowsPath) + ,("replaceFileName (posix)" , nf (PF.replaceFileName "lol") posixPath) + ,("replaceFileName (windows)" , nf (WF.replaceFileName "lol") windowsPath) + ,("dropFileName (posix)" , nf PF.dropFileName posixPath) + ,("dropFileName (windows)" , nf WF.dropFileName windowsPath) + ,("takeBaseName (posix)" , nf PF.takeBaseName posixPath) + ,("takeBaseName (windows)" , nf WF.takeBaseName windowsPath) + ,("replaceBaseName (posix)" , nf (PF.replaceBaseName "lol") posixPath) + ,("replaceBaseName (windows)" , nf (WF.replaceBaseName "lol") windowsPath) + ,("takeDirectory (posix)" , nf PF.takeDirectory posixPath) + ,("takeDirectory (windows)" , nf WF.takeDirectory windowsPath) + ,("replaceDirectory (posix)" , nf (PF.replaceDirectory "lol") posixPath) + ,("replaceDirectory (windows)" , nf (WF.replaceDirectory "lol") windowsPath) + ,("combine (posix)" , nf (PF.combine "lol") posixPath) + ,("combine (windows)" , nf (WF.combine "lol") windowsPath) + ,("splitPath (posix)" , nf PF.splitPath posixPath) + ,("splitPath (windows)" , nf WF.splitPath windowsPath) + ,("joinPath (posix)" , nf PF.joinPath (PF.splitPath posixPath)) + ,("joinPath (windows)" , nf WF.joinPath (WF.splitPath windowsPath)) + ,("splitDirectories (posix)" , nf PF.splitDirectories posixPath) + ,("splitDirectories (windows)" , nf WF.splitDirectories windowsPath) + + ,("splitDrive (posix)" , nf PF.splitDrive posixPath) + ,("splitDrive (windows)" , nf WF.splitDrive windowsPath) + ,("joinDrive (posix)" , nf (PF.joinDrive "/") posixPath) + ,("joinDrive (windows)" , nf (WF.joinDrive "C:\\") windowsPath) + ,("takeDrive (posix)" , nf PF.takeDrive posixPath) + ,("takeDrive (windows)" , nf WF.takeDrive windowsPath) + ,("hasDrive (posix)" , nf PF.hasDrive posixPath) + ,("hasDrive (windows)" , nf WF.hasDrive windowsPath) + ,("dropDrive (posix)" , nf PF.dropDrive posixPath) + ,("dropDrive (windows)" , nf WF.dropDrive windowsPath) + ,("isDrive (posix)" , nf PF.isDrive posixPath) + ,("isDrive (windows)" , nf WF.isDrive windowsPath) + + ,("hasTrailingPathSeparator (posix)" , nf PF.hasTrailingPathSeparator posixPath) + ,("hasTrailingPathSeparator (windows)" , nf WF.hasTrailingPathSeparator windowsPath) + ,("addTrailingPathSeparator (posix)" , nf PF.addTrailingPathSeparator posixPath) + ,("addTrailingPathSeparator (windows)" , nf WF.addTrailingPathSeparator windowsPath) + ,("dropTrailingPathSeparator (posix)" , nf PF.addTrailingPathSeparator posixPath) + ,("dropTrailingPathSeparator (windows)" , nf WF.addTrailingPathSeparator windowsPath) + + ,("normalise (posix)" , nf PF.normalise posixPath) + ,("normalise (windows)" , nf WF.normalise windowsPath) + ,("equalFilePath (posix)" , nf (PF.equalFilePath "abc/def/zs") posixPath) + ,("equalFilePath (windows)" , nf (WF.equalFilePath "abc/def/zs") windowsPath) + ,("makeRelative (posix)" , nf (PF.makeRelative "abc/def/zs") posixPath) + ,("makeRelative (windows)" , nf (WF.makeRelative "abc/def/zs") windowsPath) + ,("isRelative (posix)" , nf PF.isRelative posixPath) + ,("isRelative (windows)" , nf WF.isRelative windowsPath) + ,("isAbsolute (posix)" , nf PF.isAbsolute posixPath) + ,("isAbsolute (windows)" , nf WF.isAbsolute windowsPath) + ,("isValid (posix)" , nf PF.isValid posixPath) + ,("isValid (windows)" , nf WF.isValid windowsPath) + ,("makeValid (posix)" , nf PF.makeValid posixPath) + ,("makeValid (windows)" , nf WF.makeValid windowsPath) + + ,("splitSearchPath (posix)" , nf PF.splitSearchPath posixSearchPath) + ,("splitSearchPath (windows)" , nf WF.splitSearchPath windowsSearchPath) + ] + + , bgroup "filepath (AFPP)" $ map (uncurry bench) + [ ("splitExtension (posix)" , nf APF.splitExtension posixPathAFPP) + , ("splitExtension (windows)" , nf AWF.splitExtension windowsPathAFPP) + , ("takeExtension (posix)" , nf APF.takeExtension posixPathAFPP) + , ("takeExtension (windows)" , nf AWF.takeExtension windowsPathAFPP) + , ("replaceExtension (posix)" , nf (APF.replaceExtension [OSP.pstr|.lol|]) posixPathAFPP) + , ("replaceExtension (windows)" , nf (AWF.replaceExtension [WSP.pstr|.lol|]) windowsPathAFPP) + , ("dropExtension (posix)" , nf APF.dropExtension posixPathAFPP) + , ("dropExtension (windows)" , nf AWF.dropExtension windowsPathAFPP) + , ("addExtension (posix)" , nf (APF.addExtension [OSP.pstr|.lol|]) posixPathAFPP) + , ("addExtension (windows)" , nf (AWF.addExtension [WSP.pstr|.lol|]) windowsPathAFPP) + , ("hasExtension (posix)" , nf APF.hasExtension posixPathAFPP) + , ("hasExtension (windows)" , nf AWF.hasExtension windowsPathAFPP) + , ("splitExtensions (posix)" , nf APF.splitExtensions posixPathAFPP) + , ("splitExtensions (windows)" , nf AWF.splitExtensions windowsPathAFPP) + , ("dropExtensions (posix)" , nf APF.dropExtensions posixPathAFPP) + , ("dropExtensions (windows)" , nf AWF.dropExtensions windowsPathAFPP) + , ("takeExtensions (posix)" , nf APF.takeExtensions posixPathAFPP) + , ("takeExtensions (windows)" , nf AWF.takeExtensions windowsPathAFPP) + , ("replaceExtensions (posix)" , nf (APF.replaceExtensions [OSP.pstr|.lol|]) posixPathAFPP) + , ("replaceExtensions (windows)" , nf (AWF.replaceExtensions [WSP.pstr|.lol|]) windowsPathAFPP) + , ("isExtensionOf (posix)" , nf (APF.isExtensionOf [OSP.pstr|.lol|]) posixPathAFPP) + , ("isExtensionOf (windows)" , nf (AWF.isExtensionOf [WSP.pstr|.lol|]) windowsPathAFPP) + , ("stripExtension (posix)" , nf (APF.stripExtension [OSP.pstr|.lol|]) posixPathAFPP) + , ("stripExtension (windows)" , nf (AWF.stripExtension [WSP.pstr|.lol|]) windowsPathAFPP) + + , ("splitFileName (posix)" , nf APF.splitFileName posixPathAFPP) + , ("splitFileName (windows)" , nf AWF.splitFileName windowsPathAFPP) + , ("takeFileName (posix)" , nf APF.takeFileName posixPathAFPP) + , ("takeFileName (windows)" , nf AWF.takeFileName windowsPathAFPP) + , ("replaceFileName (posix)" , nf (APF.replaceFileName [OSP.pstr|lol|]) posixPathAFPP) + , ("replaceFileName (windows)" , nf (AWF.replaceFileName [WSP.pstr|lol|]) windowsPathAFPP) + , ("dropFileName (posix)" , nf APF.dropFileName posixPathAFPP) + , ("dropFileName (windows)" , nf AWF.dropFileName windowsPathAFPP) + , ("takeBaseName (posix)" , nf APF.takeBaseName posixPathAFPP) + , ("takeBaseName (windows)" , nf AWF.takeBaseName windowsPathAFPP) + , ("replaceBaseName (posix)" , nf (APF.replaceBaseName [OSP.pstr|lol|]) posixPathAFPP) + , ("replaceBaseName (windows)" , nf (AWF.replaceBaseName [WSP.pstr|lol|]) windowsPathAFPP) + , ("takeDirectory (posix)" , nf APF.takeDirectory posixPathAFPP) + , ("takeDirectory (windows)" , nf AWF.takeDirectory windowsPathAFPP) + , ("replaceDirectory (posix)" , nf (APF.replaceDirectory [OSP.pstr|lol|]) posixPathAFPP) + , ("replaceDirectory (windows)" , nf (AWF.replaceDirectory [WSP.pstr|lol|]) windowsPathAFPP) + , ("combine (posix)" , nf (APF.combine [OSP.pstr|lol|]) posixPathAFPP) + , ("combine (windows)" , nf (AWF.combine [WSP.pstr|lol|]) windowsPathAFPP) + , ("splitPath (posix)" , nf APF.splitPath posixPathAFPP) + , ("splitPath (windows)" , nf AWF.splitPath windowsPathAFPP) + , ("joinPath (posix)" , nf APF.joinPath (APF.splitPath posixPathAFPP)) + , ("joinPath (windows)" , nf AWF.joinPath (AWF.splitPath windowsPathAFPP)) + , ("splitDirectories (posix)" , nf APF.splitDirectories posixPathAFPP) + , ("splitDirectories (windows)" , nf AWF.splitDirectories windowsPathAFPP) + + , ("splitDrive (posix)" , nf APF.splitDrive posixPathAFPP) + , ("splitDrive (windows)" , nf AWF.splitDrive windowsPathAFPP) + , ("joinDrive (posix)" , nf (APF.joinDrive [OSP.pstr|/|]) posixPathAFPP) + , ("joinDrive (windows)" , nf (AWF.joinDrive [WSP.pstr|C:\|]) windowsPathAFPP) + , ("takeDrive (posix)" , nf APF.takeDrive posixPathAFPP) + , ("takeDrive (windows)" , nf AWF.takeDrive windowsPathAFPP) + , ("hasDrive (posix)" , nf APF.hasDrive posixPathAFPP) + , ("hasDrive (windows)" , nf AWF.hasDrive windowsPathAFPP) + , ("dropDrive (posix)" , nf APF.dropDrive posixPathAFPP) + , ("dropDrive (windows)" , nf AWF.dropDrive windowsPathAFPP) + , ("isDrive (posix)" , nf APF.isDrive posixPathAFPP) + , ("isDrive (windows)" , nf AWF.isDrive windowsPathAFPP) + + , ("hasTrailingPathSeparator (posix)" , nf APF.hasTrailingPathSeparator posixPathAFPP) + , ("hasTrailingPathSeparator (windows)" , nf AWF.hasTrailingPathSeparator windowsPathAFPP) + , ("addTrailingPathSeparator (posix)" , nf APF.addTrailingPathSeparator posixPathAFPP) + , ("addTrailingPathSeparator (windows)" , nf AWF.addTrailingPathSeparator windowsPathAFPP) + , ("dropTrailingPathSeparator (posix)" , nf APF.addTrailingPathSeparator posixPathAFPP) + , ("dropTrailingPathSeparator (windows)" , nf AWF.addTrailingPathSeparator windowsPathAFPP) + + , ("normalise (posix)" , nf APF.normalise posixPathAFPP) + , ("normalise (windows)" , nf AWF.normalise windowsPathAFPP) + , ("equalFilePath (posix)" , nf (APF.equalFilePath [OSP.pstr|abc/def/zs|]) posixPathAFPP) + , ("equalFilePath (windows)" , nf (AWF.equalFilePath [WSP.pstr|abc/def/zs|]) windowsPathAFPP) + , ("makeRelative (posix)" , nf (APF.makeRelative [OSP.pstr|abc/def/zs|]) posixPathAFPP) + , ("makeRelative (windows)" , nf (AWF.makeRelative [WSP.pstr|abc/def/zs|]) windowsPathAFPP) + , ("isRelative (posix)" , nf APF.isRelative posixPathAFPP) + , ("isRelative (windows)" , nf AWF.isRelative windowsPathAFPP) + , ("isAbsolute (posix)" , nf APF.isAbsolute posixPathAFPP) + , ("isAbsolute (windows)" , nf AWF.isAbsolute windowsPathAFPP) + , ("isValid (posix)" , nf APF.isValid posixPathAFPP) + , ("isValid (windows)" , nf AWF.isValid windowsPathAFPP) + , ("makeValid (posix)" , nf APF.makeValid posixPathAFPP) + , ("makeValid (windows)" , nf AWF.makeValid windowsPathAFPP) + + , ("splitSearchPath (posix)" , nf APF.splitSearchPath posixSearchPathAFPP) + , ("splitSearchPath (windows)" , nf AWF.splitSearchPath windowsSearchPathAFPP) + ] + + , bgroup "encoding/decoding" $ map (uncurry bench) + [ ("decodeUtf (posix)" , nf (APF.decodeUtf @Maybe) posixPathAFPP) + , ("decodeUtf (windows)" , nf (AWF.decodeUtf @Maybe) windowsPathAFPP) + , ("decodeWith (windows)" , nf (AWF.decodeWith ucs2le) windowsPathAFPP) + + , ("encodeUtf (posix)" , nf (APF.encodeUtf @Maybe) posixPath) + , ("encodeUtf (windows)" , nf (AWF.encodeUtf @Maybe) windowsPath) + , ("encodeWith (windows)" , nf (AWF.encodeWith ucs2le) windowsPath) + + , ("unpack PlatformString (posix)" , nf APF.unpack posixPathAFPP) + , ("unpack PlatformString (windows)" , nf AWF.unpack windowsPathAFPP) + , ("pack PlatformString (posix)" , nf APF.pack (APF.unpack posixPathAFPP)) + , ("pack PlatformString (windows)" , nf AWF.pack (AWF.unpack windowsPathAFPP)) + + , ("fromBytes (posix)" , nf (OSP.fromBytes @Maybe) (SBS.fromShort . OST.getPosixString $ posixPathAFPP)) + , ("fromBytes (windows)" , nf (WSP.fromBytes @Maybe) (SBS.fromShort . OST.getWindowsString $ windowsPathAFPP)) + ] + ] + + +posixPath :: FilePath +posixPath = "/foo/bar/bath/baz/baz/tz/fooooooooooooooo/laaaaaaaaaaaaaaa/baaaaaaaaaaaaar/zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz/zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz/kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk/kkkkkkkkkkkkkkkkkk/h/h/h/a/s/r/a/h/gt/r/r/r/s/s.txt" + +windowsPath :: FilePath +windowsPath = "C:\\foo\\bar\\bath\\baz\\baz\\tz\\fooooooooooooooo\\laaaaaaaaaaaaaaa\\baaaaaaaaaaaaar\\zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz\\zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz\\kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk\\kkkkkkkkkkkkkkkkkk\\h\\h\\h\\a\\s\\r\\a\\h\\gt\\r\\r\\r\\s\\s.txt" + +posixPathAFPP :: PosixPath +posixPathAFPP = [OSP.pstr|/foo/bar/bath/baz/baz/tz/fooooooooooooooo/laaaaaaaaaaaaaaa/baaaaaaaaaaaaar/zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz/zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz/kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk/kkkkkkkkkkkkkkkkkk/h/h/h/a/s/r/a/h/gt/r/r/r/s/s.txt|] + +windowsPathAFPP :: WindowsPath +windowsPathAFPP = [WSP.pstr|C:\\foo\\bar\\bath\\baz\\baz\\tz\\fooooooooooooooo\\laaaaaaaaaaaaaaa\\baaaaaaaaaaaaar\\zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz\\zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz\\kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk\\kkkkkkkkkkkkkkkkkk\\h\\h\\h\\a\\s\\r\\a\\h\\gt\\r\\r\\r\\s\\s.txt|] + +posixSearchPath :: FilePath +posixSearchPath = ":foo:bar:bath:baz:baz:tz:fooooooooooooooo:laaaaaaaaaaaaaaa:baaaaaaaaaaaaar:zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz:zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz:kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk:kkkkkkkkkkkkkkkkkk:h:h:h:a:s:r:a:h:gt:r:r:r:s:s.txt" + +windowsSearchPath :: FilePath +windowsSearchPath = "foo;bar;bath;baz;baz;tz;fooooooooooooooo;laaaaaaaaaaaaaaa;baaaaaaaaaaaaar;zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz;zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz;kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk;kkkkkkkkkkkkkkkkkk;h;h;h;a;s;r;a;h;gt;r;r;r;s;s.txt" + +posixSearchPathAFPP :: PosixString +posixSearchPathAFPP = [OSP.pstr|:foo:bar:bath:baz:baz:tz:fooooooooooooooo:laaaaaaaaaaaaaaa:baaaaaaaaaaaaar:zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz:zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz:kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk:kkkkkkkkkkkkkkkkkk:h:h:h:a:s:r:a:h:gt:r:r:r:s:s.txt|] + +windowsSearchPathAFPP :: WindowsString +windowsSearchPathAFPP = [WSP.pstr|foo;bar;bath;baz;baz;tz;fooooooooooooooo;laaaaaaaaaaaaaaa;baaaaaaaaaaaaar;zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz;zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz;kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk;kkkkkkkkkkkkkkkkkk;h;h;h;a;s;r;a;h;gt;r;r;r;s;s.txt|] diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/changelog.md b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/changelog.md new file mode 100644 index 0000000000..08202f3e0f --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/changelog.md @@ -0,0 +1,158 @@ +# Changelog for [`filepath` package](http://hackage.haskell.org/package/filepath) + +_Note: below all `FilePath` values are unquoted, so `\\` really means two backslashes._ + +## 1.5.4.0 *Nov 2024* + +* Don't catch async exceptions in internal functions wrt https://github.com/haskell/os-string/issues/22 + +## 1.5.3.0 *Jun 2024* + +* Adjust for `encodeFS`/`decodedFS` deprecation in os-string + +## 1.5.2.0 *Jan 2024* + +* Fix a bug in `[splitFileName](https://github.com/haskell/filepath/issues/219)` +* make `osp :: QuasiQuoter` valid as a pattern wrt [#210](https://github.com/haskell/filepath/pull/210) +* Add `unsafeEncodeUtf` from os-string + +## 1.5.0.0 *Nov 2023* + +* remove `OsString` modules + +## 1.4.200.0 *Nov 2023* + +* deprecate `OsString` modules + +## 1.4.100.4 *Jul 2023* + +* Fix isInfixOf and breakSubString in Word16, wrt [#195](https://github.com/haskell/filepath/issues/195) + +## 1.4.100.3 *Feb 2023* + +* Fix a regression in `splitFileName` wrt [#189](https://github.com/haskell/filepath/pull/189) + +## 1.4.100.2 *Feb 2023* + +* Speed up `splitFileName`, `splitExtension`, `readDriveLetter` and various other helpers (up to 20x faster) by @Bodigrim + +## 1.4.100.1 *Feb 2023* + +* Fix regression in `System.FilePath.Windows.normalise` wrt [#187](https://github.com/haskell/filepath/issues/187) +* Fix tests on GHC 9.4.4 +* Avoid head and tail + +## 1.4.100.0 *July 2022* + +Implementation of the [Abstract FilePath Proposal](https://gitlab.haskell.org/ghc/ghc/-/wikis/proposal/abstract-file-path) +in user-space as a separate type. + +Introduction to the new API is explained [in this blog post](https://hasufell.github.io/posts/2022-06-29-fixing-haskell-filepaths.html). + +## 1.4.2.2 *Dec 2021* + +This release is purely a documentation release, fixing the broken haddock links. + +### Affected users + +This release affects users who apply downstream patches to `System.FilePath.Internal`, +since `System.FilePath.Posix` and `System.FilePath.Windows` are now generated via `make cpp` +during development. + +To make your patch apply, either apply it to `System.FilePath.Posix` and `System.FilePath.Windows` +instead or run `make cpp` after applying your patch. + +### Changes + +* Document relation between `joinPath` and `()` wrt [#82](https://github.com/haskell/filepath/issues/82), [#82](https://github.com/haskell/filepath/issues/86) +* Clarify that `normalise` does not remove `..` wrt [#86](https://github.com/haskell/filepath/issues/86) +* Make clear that `equalFilePath` does not expand `..` wrt [#87](https://github.com/haskell/filepath/issues/87) +* Fix haddock source links by manually cpping wrt [#81](https://github.com/haskell/filepath/issues/81) +* Make export list in `System.FilePath` explicit to get haddocks on the landing module + + +## 1.4.2.1 *Jul 2018* + + * Bundled with GHC 8.6.1 + +## 1.4.2 *Jan 2018* + + * Bundled with GHC 8.4.1 + + * Add `isExtensionOf` function. + +## 1.4.1.2 *Feb 2017* + + * Bundled with GHC 8.2.1 + +## 1.4.1.1 *Nov 2016* + + * Bundled with GHC 8.0.2 + + * Documentation improvements + +## 1.4.1.0 *Dec 2015* + + * Bundled with GHC 8.0.1 + + * Add `replaceExtensions` and `stripExtension` functions. + + * Make `isValid` detect more invalid Windows paths, e.g. `nul .txt` and `foo\nbar`. + + * Improve the documentation. + + * Bug fix: `isValid "\0"` now returns `False`, instead of `True` + +## 1.4.0.0 *Mar 2015* + + * Bundled with GHC 7.10.1 + + * New function: Add `-<.>` as an alias for `replaceExtension`. + + * Semantic change: `joinDrive /foo bar` now returns `/foo/bar`, instead of `/foobar` + + * Semantic change: on Windows, `splitSearchPath File1;\"File 2\"` now returns `[File1,File2]` instead of `[File1,\"File2\"]` + + * Bug fix: on Posix systems, `normalise //home` now returns `/home`, instead of `//home` + + * Bug fix: `normalise /./` now returns `/` on Posix and `\` on Windows, instead of `//` and `\\` + + * Bug fix: `isDrive ""` now returns `False`, instead of `True` + + * Bug fix: on Windows, `dropTrailingPathSeparator /` now returns `/` unchanged, instead of the normalised `\` + + * Bug fix: on Windows, `equalFilePath C:\ C:` now returns `False`, instead of `True` + + * Bug fix: on Windows, `isValid \\\foo` now returns `False`, instead of `True` + + * Bug fix: on Windows, `isValid \\?\D:file` now returns `False`, instead of `True` + + * Bug fix: on Windows, `normalise \` now returns `\` unchanged, instead of `\\` + + * Bug fix: on Windows, `normalise C:.\` now returns `C:`, instead of `C:\\` + + * Bug fix: on Windows, `normalise //server/test` now returns `\\server\test`, instead of `//server/test` unchanged + + * Bug fix: on Windows, `makeRelative / //` now returns `//`, instead of `""` + +## 1.3.0.2 *Mar 2014* + + * Bundled with GHC 7.8.1 + + * Update to Cabal 1.10 format + + * Minor Haddock cleanups + +## 1.3.0.1 *Sep 2012* + + * Bundled with GHC 7.6.1 + + * No changes + +## 1.3.0.0 *Feb 2012* + + * Bundled with GHC 7.4.1 + + * Add support for SafeHaskell + + * Bug fix: `normalise /` now returns `/`, instead of `/.` diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/filepath.cabal b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/filepath.cabal new file mode 100644 index 0000000000..82f423aba7 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/filepath.cabal @@ -0,0 +1,184 @@ +cabal-version: 2.2 +name: filepath +version: 1.5.4.0 + +-- NOTE: Don't forget to update ./changelog.md +license: BSD-3-Clause +license-file: LICENSE +author: Neil Mitchell +maintainer: Julian Ospald +copyright: Neil Mitchell 2005-2020, Julian Ospald 2021-2022 +bug-reports: https://github.com/haskell/filepath/issues +homepage: + https://github.com/haskell/filepath/blob/master/README.md + +category: System +build-type: Simple +synopsis: Library for manipulating FilePaths in a cross platform way. +tested-with: + GHC ==8.6.5 + || ==8.8.4 + || ==8.10.7 + || ==9.0.2 + || ==9.2.8 + || ==9.4.8 + || ==9.6.3 + || ==9.8.1 + +description: + This package provides functionality for manipulating @FilePath@ values, and is shipped with . It provides two variants for filepaths: + . + 1. legacy filepaths: @type FilePath = String@ + . + 2. operating system abstracted filepaths (@OsPath@): internally unpinned @ShortByteString@ (platform-dependent encoding) + . + It is recommended to use @OsPath@ when possible, because it is more correct. + . + For each variant there are three main modules: + . + * "System.FilePath.Posix" / "System.OsPath.Posix" manipulates POSIX\/Linux style @FilePath@ values (with @\/@ as the path separator). + . + * "System.FilePath.Windows" / "System.OsPath.Windows" manipulates Windows style @FilePath@ values (with either @\\@ or @\/@ as the path separator, and deals with drives). + . + * "System.FilePath" / "System.OsPath" for dealing with current platform-specific filepaths + . + For more powerful string manipulation of @OsPath@, you can use the (@OsPath@ is a type synonym for @OsString@). + . + An introduction into the new API can be found in this + . + Code examples for the new API can be found . + +extra-source-files: + Generate.hs + Makefile + System/FilePath/Internal.hs + System/OsPath/Common.hs + +extra-doc-files: + changelog.md + HACKING.md + README.md + +flag cpphs + description: Use cpphs (fixes haddock source links) + default: False + manual: True + +source-repository head + type: git + location: https://github.com/haskell/filepath + +library + exposed-modules: + System.FilePath + System.FilePath.Posix + System.FilePath.Windows + System.OsPath + System.OsPath.Encoding + System.OsPath.Internal + System.OsPath.Posix + System.OsPath.Posix.Internal + System.OsPath.Types + System.OsPath.Windows + System.OsPath.Windows.Internal + + other-extensions: + CPP + PatternGuards + + if impl(ghc >=7.2) + other-extensions: Safe + + default-language: Haskell2010 + build-depends: + , base >=4.12.0.0 && <4.22 + , bytestring >=0.11.3.0 + , deepseq + , exceptions + , template-haskell + , os-string >=2.0.1 + + ghc-options: -Wall + + if flag(cpphs) + ghc-options: -pgmPcpphs -optP--cpp + build-tool-depends: cpphs:cpphs -any + +test-suite filepath-tests + type: exitcode-stdio-1.0 + main-is: Test.hs + hs-source-dirs: tests tests/filepath-tests + other-modules: + TestGen + TestUtil + + build-depends: + , base + , bytestring >=0.11.3.0 + , filepath + , os-string >=2.0.1 + , tasty + , tasty-quickcheck + + default-language: Haskell2010 + ghc-options: -Wall + +test-suite filepath-equivalent-tests + default-language: Haskell2010 + ghc-options: -Wall + type: exitcode-stdio-1.0 + main-is: TestEquiv.hs + hs-source-dirs: tests tests/filepath-equivalent-tests + other-modules: + Legacy.System.FilePath + Legacy.System.FilePath.Posix + Legacy.System.FilePath.Windows + TestUtil + Gen + + build-depends: + , base + , bytestring >=0.11.3.0 + , filepath + , generic-random + , generic-deriving + , os-string >=2.0.1 + , tasty + , tasty-quickcheck + +test-suite abstract-filepath + default-language: Haskell2010 + ghc-options: -Wall + type: exitcode-stdio-1.0 + main-is: Test.hs + hs-source-dirs: tests tests/abstract-filepath + other-modules: + Arbitrary + OsPathSpec + TestUtil + + build-depends: + , base + , bytestring >=0.11.3.0 + , deepseq + , filepath + , os-string >=2.0.1 + , quickcheck-classes-base ^>=0.6.2 + , tasty + , tasty-quickcheck + +benchmark bench-filepath + default-language: Haskell2010 + ghc-options: -Wall + type: exitcode-stdio-1.0 + main-is: BenchFilePath.hs + hs-source-dirs: bench + build-depends: + , base + , bytestring >=0.11.3.0 + , deepseq + , filepath + , os-string >=2.0.1 + , tasty-bench + + ghc-options: -with-rtsopts=-A32m diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/TestUtil.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/TestUtil.hs new file mode 100644 index 0000000000..f238f10eb4 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/TestUtil.hs @@ -0,0 +1,159 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE TypeApplications #-} +{-# OPTIONS_GHC -Wno-orphans #-} + +module TestUtil( + module TestUtil, + module Test.Tasty.QuickCheck, + module Data.List, + module Data.Maybe + ) where + +import Test.Tasty.QuickCheck hiding ((==>)) +import Data.ByteString.Short (ShortByteString) +import Data.List +import Data.Maybe +import Control.Monad +import qualified System.FilePath.Windows as W +import qualified System.FilePath.Posix as P +#ifdef GHC_MAKE +import qualified System.OsPath.Windows.Internal as AFP_W +import qualified System.OsPath.Posix.Internal as AFP_P +#else +import qualified System.OsPath.Windows as AFP_W +import qualified System.OsPath.Posix as AFP_P +import System.OsPath.Types +#endif +import System.OsString.Internal.Types +import System.OsString.Encoding.Internal +import GHC.IO.Encoding.UTF16 ( mkUTF16le ) +import GHC.IO.Encoding.UTF8 ( mkUTF8 ) +import GHC.IO.Encoding.Failure + + +infixr 0 ==> +(==>) :: Bool -> Bool -> Bool +a ==> b = not a || b + + +newtype QFilePathValidW = QFilePathValidW FilePath deriving Show + +instance Arbitrary QFilePathValidW where + arbitrary = fmap (QFilePathValidW . W.makeValid) arbitraryFilePath + shrink (QFilePathValidW x) = shrinkValid QFilePathValidW W.makeValid x + +newtype QFilePathValidP = QFilePathValidP FilePath deriving Show + +instance Arbitrary QFilePathValidP where + arbitrary = fmap (QFilePathValidP . P.makeValid) arbitraryFilePath + shrink (QFilePathValidP x) = shrinkValid QFilePathValidP P.makeValid x + +newtype QFilePath = QFilePath FilePath deriving Show + +instance Arbitrary QFilePath where + arbitrary = fmap QFilePath arbitraryFilePath + shrink (QFilePath x) = shrinkValid QFilePath id x + + +-- | Generate an arbitrary FilePath use a few special (interesting) characters. +arbitraryFilePath :: Gen FilePath +arbitraryFilePath = sized $ \n -> do + k <- choose (0,n) + replicateM k $ elements "?./:\\a ;_" + +-- | Shrink, but also apply a validity function. Try and make shorter, or use more +-- @a@ (since @a@ is pretty dull), but make sure you terminate even after valid. +shrinkValid :: (FilePath -> a) -> (FilePath -> FilePath) -> FilePath -> [a] +shrinkValid wrap valid o = + [ wrap y + | y <- map valid $ shrinkList (\x -> ['a' | x /= 'a']) o + , length y < length o || (length y == length o && countA y > countA o)] + where countA = length . filter (== 'a') + +encodeUtf16LE :: String -> ShortByteString +encodeUtf16LE = either (error . show) id . encodeWithTE (mkUTF16le TransliterateCodingFailure) + +encodeUtf8 :: String -> ShortByteString +encodeUtf8 = either (error . show) id . encodeWithTE (mkUTF8 TransliterateCodingFailure) + +decodeUtf16LE :: ShortByteString -> String +decodeUtf16LE = either (error . show) id . decodeWithTE (mkUTF16le TransliterateCodingFailure) + +decodeUtf8 :: ShortByteString -> String +decodeUtf8 = either (error . show) id . decodeWithTE (mkUTF8 TransliterateCodingFailure) + +#ifdef GHC_MAKE +newtype QFilePathValidAFP_W = QFilePathValidAFP_W ShortByteString deriving Show + +instance Arbitrary QFilePathValidAFP_W where + arbitrary = fmap (QFilePathValidAFP_W . AFP_W.makeValid . encodeUtf16LE) arbitraryFilePath + shrink (QFilePathValidAFP_W x) = shrinkValid (QFilePathValidAFP_W . encodeUtf16LE) (decodeUtf16LE . AFP_W.makeValid . encodeUtf16LE) (decodeUtf16LE x) + +newtype QFilePathValidAFP_P = QFilePathValidAFP_P ShortByteString deriving Show + +instance Arbitrary QFilePathValidAFP_P where + arbitrary = fmap (QFilePathValidAFP_P . AFP_P.makeValid . encodeUtf8) arbitraryFilePath + shrink (QFilePathValidAFP_P x) = shrinkValid (QFilePathValidAFP_P . encodeUtf8) (decodeUtf8 . AFP_P.makeValid . encodeUtf8) (decodeUtf8 x) + +newtype QFilePathAFP_W = QFilePathAFP_W ShortByteString deriving Show +newtype QFilePathAFP_P = QFilePathAFP_P ShortByteString deriving Show + +instance Arbitrary QFilePathAFP_W where + arbitrary = fmap (QFilePathAFP_W . encodeUtf16LE) arbitraryFilePath + shrink (QFilePathAFP_W x) = shrinkValid (QFilePathAFP_W . encodeUtf16LE) id (decodeUtf16LE x) + +instance Arbitrary QFilePathAFP_P where + arbitrary = fmap (QFilePathAFP_P . encodeUtf8) arbitraryFilePath + shrink (QFilePathAFP_P x) = shrinkValid (QFilePathAFP_P . encodeUtf8) id (decodeUtf8 x) + +newtype QFilePathsAFP_W = QFilePathsAFP_W [ShortByteString] deriving Show +newtype QFilePathsAFP_P = QFilePathsAFP_P [ShortByteString] deriving Show + +instance Arbitrary QFilePathsAFP_W where + arbitrary = fmap (QFilePathsAFP_W . fmap encodeUtf16LE) (listOf arbitraryFilePath) + +instance Arbitrary QFilePathsAFP_P where + arbitrary = fmap (QFilePathsAFP_P . fmap encodeUtf8) (listOf arbitraryFilePath) + +#else + + +newtype QFilePathValidAFP_W = QFilePathValidAFP_W WindowsPath deriving Show + +instance Arbitrary QFilePathValidAFP_W where + arbitrary = fmap (QFilePathValidAFP_W . AFP_W.makeValid . WS . encodeUtf16LE) arbitraryFilePath + shrink (QFilePathValidAFP_W x) = shrinkValid (QFilePathValidAFP_W . WS . encodeUtf16LE) (decodeUtf16LE . getWindowsString . AFP_W.makeValid . WS . encodeUtf16LE) (decodeUtf16LE . getWindowsString $ x) + +newtype QFilePathValidAFP_P = QFilePathValidAFP_P PosixPath deriving Show + +instance Arbitrary QFilePathValidAFP_P where + arbitrary = fmap (QFilePathValidAFP_P . AFP_P.makeValid . PS . encodeUtf8) arbitraryFilePath + shrink (QFilePathValidAFP_P x) = shrinkValid (QFilePathValidAFP_P . PS . encodeUtf8) (decodeUtf8 . getPosixString . AFP_P.makeValid . PS . encodeUtf8) (decodeUtf8 . getPosixString $ x) + +newtype QFilePathAFP_W = QFilePathAFP_W WindowsPath deriving Show +newtype QFilePathAFP_P = QFilePathAFP_P PosixPath deriving Show + +instance Arbitrary QFilePathAFP_W where + arbitrary = fmap (QFilePathAFP_W . WS . encodeUtf16LE) arbitraryFilePath + shrink (QFilePathAFP_W x) = shrinkValid (QFilePathAFP_W . WS . encodeUtf16LE) id (decodeUtf16LE . getWindowsString $ x) + +instance Arbitrary QFilePathAFP_P where + arbitrary = fmap (QFilePathAFP_P . PS . encodeUtf8) arbitraryFilePath + shrink (QFilePathAFP_P x) = shrinkValid (QFilePathAFP_P . PS . encodeUtf8) id (decodeUtf8 . getPosixString $ x) + +newtype QFilePathsAFP_W = QFilePathsAFP_W [WindowsPath] deriving Show +newtype QFilePathsAFP_P = QFilePathsAFP_P [PosixPath] deriving Show + +instance Arbitrary QFilePathsAFP_W where + arbitrary = fmap (QFilePathsAFP_W . fmap (WS . encodeUtf16LE)) (listOf arbitraryFilePath) + +instance Arbitrary QFilePathsAFP_P where + arbitrary = fmap (QFilePathsAFP_P . fmap (PS . encodeUtf8)) (listOf arbitraryFilePath) + +instance Arbitrary WindowsChar where + arbitrary = WW <$> arbitrary + +instance Arbitrary PosixChar where + arbitrary = PW <$> arbitrary +#endif + diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/abstract-filepath/Arbitrary.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/abstract-filepath/Arbitrary.hs new file mode 100644 index 0000000000..5753523413 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/abstract-filepath/Arbitrary.hs @@ -0,0 +1,69 @@ +{-# OPTIONS_GHC -Wno-orphans #-} + +module Arbitrary where + +import Data.Char +import Data.Maybe +import System.OsString +import System.OsString.Internal.Types +import qualified System.OsString.Posix as Posix +import qualified System.OsString.Windows as Windows +import Data.ByteString ( ByteString ) +import qualified Data.ByteString as ByteString +import Test.Tasty.QuickCheck + + +instance Arbitrary OsString where + arbitrary = fmap fromJust $ encodeUtf <$> listOf filepathChar + +instance Arbitrary PosixString where + arbitrary = fmap fromJust $ Posix.encodeUtf <$> listOf filepathChar + +instance Arbitrary WindowsString where + arbitrary = fmap fromJust $ Windows.encodeUtf <$> listOf filepathChar + + +newtype NonNullString = NonNullString { nonNullString :: String } + deriving Show + +instance Arbitrary NonNullString where + arbitrary = NonNullString <$> listOf filepathChar + +filepathChar :: Gen Char +filepathChar = arbitraryUnicodeChar `suchThat` (\c -> not (isNull c) && isValidUnicode c) + where + isNull = (== '\NUL') + isValidUnicode c = case generalCategory c of + Surrogate -> False + NotAssigned -> False + _ -> True + + +newtype NonNullAsciiString = NonNullAsciiString { nonNullAsciiString :: String } + deriving Show + +instance Arbitrary NonNullAsciiString where + arbitrary = NonNullAsciiString <$> listOf filepathAsciiChar + +filepathAsciiChar :: Gen Char +filepathAsciiChar = arbitraryASCIIChar `suchThat` (\c -> not (isNull c)) + where + isNull = (== '\NUL') + +newtype NonNullSurrogateString = NonNullSurrogateString { nonNullSurrogateString :: String } + deriving Show + +instance Arbitrary NonNullSurrogateString where + arbitrary = NonNullSurrogateString <$> listOf filepathWithSurrogates + +filepathWithSurrogates :: Gen Char +filepathWithSurrogates = + frequency + [(3, arbitraryASCIIChar), + (1, arbitraryUnicodeChar), + (1, arbitraryBoundedEnum) + ] + + +instance Arbitrary ByteString where arbitrary = ByteString.pack <$> arbitrary +instance CoArbitrary ByteString where coarbitrary = coarbitrary . ByteString.unpack diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/abstract-filepath/OsPathSpec.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/abstract-filepath/OsPathSpec.hs new file mode 100644 index 0000000000..95b96423d8 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/abstract-filepath/OsPathSpec.hs @@ -0,0 +1,261 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE QuasiQuotes #-} + +module OsPathSpec where + +import Data.Maybe + +import System.OsPath as OSP +import System.OsString.Internal.Types +import System.OsPath.Posix as Posix +import System.OsPath.Windows as Windows +import System.OsPath.Encoding +import qualified System.OsString.Internal.Types as OS +import System.OsString.Data.ByteString.Short ( toShort ) +import System.OsString.Posix as PosixS hiding (map) +import System.OsString.Windows as WindowsS hiding (map) + +import Control.Exception +import Data.ByteString ( ByteString ) +import qualified Data.ByteString as BS +import qualified Test.QuickCheck.Classes.Base as QC +import GHC.IO.Encoding.UTF8 ( mkUTF8 ) +import GHC.IO.Encoding.UTF16 ( mkUTF16le ) +import GHC.IO.Encoding ( setFileSystemEncoding ) +import GHC.IO.Encoding.Failure ( CodingFailureMode(..) ) +import Control.DeepSeq +import Data.Bifunctor ( first ) +import qualified Data.ByteString.Char8 as C +import qualified System.OsString.Data.ByteString.Short.Word16 as BS16 +import qualified System.OsString.Data.ByteString.Short as SBS +import Data.Char ( ord ) +import Data.Proxy ( Proxy(..) ) +import Test.Tasty +import Test.Tasty.QuickCheck + +import Arbitrary + + +fromRight :: b -> Either a b -> b +fromRight _ (Right b) = b +fromRight b _ = b + + +tests :: TestTree +tests = testGroup "Abstract filepath" [ + testGroup "filepaths" + [ testProperties "OSP" + [ ("pack . unpack == id", + property $ \ws@(OsString _) -> + OSP.pack (OSP.unpack ws) === ws + ), + ("encodeUtf . decodeUtf == id", + property $ \(NonNullString str) -> (OSP.decodeUtf . fromJust . OSP.encodeUtf) str == Just str) + ], + testProperties "Windows" + [ ("pack . unpack == id (Windows)", + property $ \ws@(WindowsString _) -> + Windows.pack (Windows.unpack ws) === ws + ) + , ("decodeUtf . encodeUtf == id", + property $ \(NonNullString str) -> (Windows.decodeUtf . fromJust . Windows.encodeUtf) str == Just str) + , ("encodeWith ucs2le . decodeWith ucs2le == id", + property $ \(padEven -> bs) -> (Windows.encodeWith ucs2le . (\(Right r) -> r) . Windows.decodeWith ucs2le . OS.WS . toShort) bs + === Right (OS.WS . toShort $ bs)) + , ("decodeFS . encodeFS == id (Windows)", + property $ \(NonNullString str) -> ioProperty $ do + r1 <- Windows.encodeFS str + r2 <- try @SomeException $ Windows.decodeFS r1 + r3 <- evaluate $ force $ first displayException r2 + pure (r3 === Right str) + ) + , ("fromPlatformString* functions are equivalent under ASCII", + property $ \(WindowsString . BS16.pack . map (fromIntegral . ord) . nonNullAsciiString -> str) -> ioProperty $ do + r1 <- Windows.decodeFS str + r2 <- Windows.decodeUtf str + (Right r3) <- pure $ Windows.decodeWith (mkUTF16le TransliterateCodingFailure) str + (Right r4) <- pure $ Windows.decodeWith (mkUTF16le RoundtripFailure) str + (Right r5) <- pure $ Windows.decodeWith (mkUTF16le ErrorOnCodingFailure) str + pure ( r1 === r2 + .&&. r1 === r3 + .&&. r1 === r4 + .&&. r1 === r5 + ) + ) + , ("toPlatformString* functions are equivalent under ASCII", + property $ \(NonNullAsciiString str) -> ioProperty $ do + r1 <- Windows.encodeFS str + r2 <- Windows.encodeUtf str + (Right r3) <- pure $ Windows.encodeWith (mkUTF16le TransliterateCodingFailure) str + (Right r4) <- pure $ Windows.encodeWith (mkUTF16le RoundtripFailure) str + (Right r5) <- pure $ Windows.encodeWith (mkUTF16le ErrorOnCodingFailure) str + pure ( r1 === r2 + .&&. r1 === r3 + .&&. r1 === r4 + .&&. r1 === r5 + ) + ) + , ("Unit test toPlatformString*", + property $ ioProperty $ do + let str = "ABcK_(ツ123_&**" + let expected = WindowsString $ BS16.pack [0x0041,0x0042,0x0063,0x004b,0x005f,0x0028,0x30c4,0x0031,0x0032,0x0033,0x005f,0x0026,0x002a,0x002a] + r1 <- Windows.encodeFS str + r2 <- Windows.encodeUtf str + (Right r3) <- pure $ Windows.encodeWith (mkUTF16le TransliterateCodingFailure) str + (Right r4) <- pure $ Windows.encodeWith (mkUTF16le RoundtripFailure) str + (Right r5) <- pure $ Windows.encodeWith (mkUTF16le ErrorOnCodingFailure) str + pure ( r1 === expected + .&&. r2 === expected + .&&. r3 === expected + .&&. r4 === expected + .&&. r5 === expected + ) + ) + , ("Unit test fromPlatformString*", + property $ ioProperty $ do + let bs = WindowsString $ BS16.pack [0x0041,0x0042,0x0063,0x004b,0x005f,0x0028,0x30c4,0x0031,0x0032,0x0033,0x005f,0x0026,0x002a,0x002a] + let expected = "ABcK_(ツ123_&**" + r1 <- Windows.decodeFS bs + r2 <- Windows.decodeUtf bs + (Right r3) <- pure $ Windows.decodeWith (mkUTF16le TransliterateCodingFailure) bs + (Right r4) <- pure $ Windows.decodeWith (mkUTF16le RoundtripFailure) bs + (Right r5) <- pure $ Windows.decodeWith (mkUTF16le ErrorOnCodingFailure) bs + pure ( r1 === expected + .&&. r2 === expected + .&&. r3 === expected + .&&. r4 === expected + .&&. r5 === expected + ) + ) + ] + , testProperties "Posix" + [ ("decodeUtf . encodeUtf == id", + property $ \(NonNullString str) -> (Posix.decodeUtf . fromJust . Posix.encodeUtf) str == Just str) + , ("encodeWith ucs2le . decodeWith ucs2le == id (Posix)", + property $ \(padEven -> bs) -> (Posix.encodeWith ucs2le . (\(Right r) -> r) . Posix.decodeWith ucs2le . OS.PS . toShort) bs === Right (OS.PS . toShort $ bs)) + , ("decodeFS . encodeFS == id", + property $ \(NonNullString str) -> ioProperty $ do + setFileSystemEncoding (mkUTF8 TransliterateCodingFailure) + r1 <- Posix.encodeFS str + r2 <- try @SomeException $ Posix.decodeFS r1 + r3 <- evaluate $ force $ first displayException r2 + pure (r3 === Right str) + ) + , ("fromPlatformString* functions are equivalent under ASCII", + property $ \(PosixString . SBS.toShort . C.pack . nonNullAsciiString -> str) -> ioProperty $ do + r1 <- Posix.decodeFS str + r2 <- Posix.decodeUtf str + (Right r3) <- pure $ Posix.decodeWith (mkUTF8 TransliterateCodingFailure) str + (Right r4) <- pure $ Posix.decodeWith (mkUTF8 RoundtripFailure) str + (Right r5) <- pure $ Posix.decodeWith (mkUTF8 ErrorOnCodingFailure) str + pure ( r1 === r2 + .&&. r1 === r3 + .&&. r1 === r4 + .&&. r1 === r5 + ) + ) + , ("toPlatformString* functions are equivalent under ASCII", + property $ \(NonNullAsciiString str) -> ioProperty $ do + r1 <- Posix.encodeFS str + r2 <- Posix.encodeUtf str + (Right r3) <- pure $ Posix.encodeWith (mkUTF8 TransliterateCodingFailure) str + (Right r4) <- pure $ Posix.encodeWith (mkUTF8 RoundtripFailure) str + (Right r5) <- pure $ Posix.encodeWith (mkUTF8 ErrorOnCodingFailure) str + pure ( r1 === r2 + .&&. r1 === r3 + .&&. r1 === r4 + .&&. r1 === r5 + ) + ) + , ("Unit test toPlatformString*", + property $ ioProperty $ do + let str = "ABcK_(ツ123_&**" + let expected = PosixString $ SBS.pack [0x41,0x42,0x63,0x4b,0x5f,0x28,0xe3,0x83,0x84,0x31,0x32,0x33,0x5f,0x26,0x2a,0x2a] + r1 <- Posix.encodeFS str + r2 <- Posix.encodeUtf str + (Right r3) <- pure $ Posix.encodeWith (mkUTF8 TransliterateCodingFailure) str + (Right r4) <- pure $ Posix.encodeWith (mkUTF8 RoundtripFailure) str + (Right r5) <- pure $ Posix.encodeWith (mkUTF8 ErrorOnCodingFailure) str + pure ( r1 === expected + .&&. r2 === expected + .&&. r3 === expected + .&&. r4 === expected + .&&. r5 === expected + ) + ) + , ("Unit test fromPlatformString*", + property $ ioProperty $ do + let bs = PosixString $ SBS.pack [0x41,0x42,0x63,0x4b,0x5f,0x28,0xe3,0x83,0x84,0x31,0x32,0x33,0x5f,0x26,0x2a,0x2a] + let expected = "ABcK_(ツ123_&**" + r1 <- Posix.decodeFS bs + r2 <- Posix.decodeUtf bs + (Right r3) <- pure $ Posix.decodeWith (mkUTF8 TransliterateCodingFailure) bs + (Right r4) <- pure $ Posix.decodeWith (mkUTF8 RoundtripFailure) bs + (Right r5) <- pure $ Posix.decodeWith (mkUTF8 ErrorOnCodingFailure) bs + pure ( r1 === expected + .&&. r2 === expected + .&&. r3 === expected + .&&. r4 === expected + .&&. r5 === expected + ) + ) + , ("pack . unpack == id (Posix)", + property $ \ws@(PosixString _) -> + Posix.pack (Posix.unpack ws) === ws + ) + ] + ], + testGroup "QuasiQuoter" + [ testProperties "windows" + [ ("QuasiQuoter (WindowsPath)", + property $ do + let bs = WindowsString $ BS16.pack [0x0041,0x0042,0x0063,0x004b,0x005f] + let expected = [Windows.pstr|ABcK_|] + bs === expected + ) + , ("QuasiQuoter (WindowsString)", + property $ do + let bs = WindowsString $ BS16.pack [0x0041,0x0042,0x0063,0x004b,0x005f,0x0028,0x30c4,0x0031,0x0032,0x0033,0x005f,0x0026,0x002a,0x002a] + let expected = [WindowsS.pstr|ABcK_(ツ123_&**|] + bs === expected + ) + ], + testProperties "posix" + [ ("QuasiQuoter (PosixPath)", + property $ do + let bs = PosixString $ SBS.pack [0x41,0x42,0x63,0x4b,0x5f] + let expected = [Posix.pstr|ABcK_|] + bs === expected + ) + , ("QuasiQuoter (PosixString)", + property $ do + let bs = PosixString $ SBS.pack [0x41,0x42,0x63,0x4b,0x5f,0x28,0xe3,0x83,0x84,0x31,0x32,0x33,0x5f,0x26,0x2a,0x2a] + let expected = [PosixS.pstr|ABcK_(ツ123_&**|] + bs === expected + ) + ] + ], + testProperties "Type laws" + (QC.lawsProperties (QC.ordLaws (Proxy @OsPath)) + ++ QC.lawsProperties (QC.monoidLaws (Proxy @OsPath)) + + ++ QC.lawsProperties (QC.ordLaws (Proxy @OsString)) + ++ QC.lawsProperties (QC.monoidLaws (Proxy @OsString)) + + ++ QC.lawsProperties (QC.ordLaws (Proxy @WindowsString)) + ++ QC.lawsProperties (QC.monoidLaws (Proxy @WindowsString)) + + ++ QC.lawsProperties (QC.ordLaws (Proxy @PosixString)) + ++ QC.lawsProperties (QC.monoidLaws (Proxy @PosixString)) + + ++ QC.lawsProperties (QC.ordLaws (Proxy @PlatformString)) + ++ QC.lawsProperties (QC.monoidLaws (Proxy @PlatformString))) + ] + + +padEven :: ByteString -> ByteString +padEven bs + | even (BS.length bs) = bs + | otherwise = bs `BS.append` BS.pack [70] diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/abstract-filepath/Test.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/abstract-filepath/Test.hs new file mode 100644 index 0000000000..31698b6b84 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/abstract-filepath/Test.hs @@ -0,0 +1,7 @@ +module Main (main) where + +import qualified OsPathSpec +import Test.Tasty + +main :: IO () +main = defaultMain OsPathSpec.tests diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Gen.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Gen.hs new file mode 100644 index 0000000000..97aa358ae0 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Gen.hs @@ -0,0 +1,202 @@ +{-# LANGUAGE OverlappingInstances #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingVia, TypeOperators #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DataKinds #-} + +module Gen where + +import System.FilePath +import Data.List.NonEmpty (NonEmpty(..)) +import GHC.Generics +import Generic.Random +import Generics.Deriving.Show +import Prelude as P +import Test.Tasty.QuickCheck hiding ((==>)) + +import qualified Data.List.NonEmpty as NE + + +class AltShow a where + altShow :: a -> String + +instance {-# OVERLAPPABLE #-} Show a => AltShow a where + altShow = show + +instance {-# OVERLAPS #-} AltShow String where + altShow = id + +instance {-# OVERLAPPABLE #-} AltShow a => AltShow (Maybe a) where + altShow Nothing = "" + altShow (Just a) = altShow a + + +newtype WindowsFilePaths = WindowsFilePaths { unWindowsFilePaths :: [WindowsFilePath] } + deriving (Show, Eq, Ord, Generic) + +-- filepath = namespace *"\" namespace-tail +-- / UNC +-- / [ disk ] *"\" relative-path +-- / disk *"\" +data WindowsFilePath = NS NameSpace [Separator] NSTail + | UNC UNCShare + | N (Maybe Char) [Separator] (Maybe RelFilePath) + -- ^ This differs from the grammar, because we allow + -- empty paths + | PotentiallyInvalid FilePath + -- ^ this branch is added purely for the tests + deriving (GShow, Eq, Ord, Generic) + deriving Arbitrary via (GenericArbitraryRec '[6, 2, 2, 1] `AndShrinking` WindowsFilePath) + +instance Show WindowsFilePath where + show wf = gshow wf ++ " (" ++ altShow wf ++ ")" + +instance AltShow WindowsFilePath where + altShow (NS ns seps nstail) = altShow ns ++ altShow seps ++ altShow nstail + altShow (UNC unc) = altShow unc + altShow (N mdisk seps mfrp) = maybe [] (:[]) mdisk ++ (altShow seps ++ altShow mfrp) + altShow (PotentiallyInvalid fp) = fp + + +-- namespace-tail = ( disk 1*"\" relative-path ; C:foo\bar is not valid +-- ; namespaced paths are all absolute +-- / disk *"\" +-- / relative-path +-- ) +data NSTail = NST1 Char (NonEmpty Separator) RelFilePath + | NST2 Char [Separator] + | NST3 RelFilePath + deriving (GShow, Show, Eq, Ord, Generic) + deriving Arbitrary via (GenericArbitraryRec '[1, 1, 1] `AndShrinking` NSTail) + +instance AltShow NSTail where + altShow (NST1 disk seps relfp) = disk:':':(altShow seps ++ altShow relfp) + altShow (NST2 disk seps) = disk:':':altShow seps + altShow (NST3 relfp) = altShow relfp + + +-- UNC = "\\" 1*pchar "\" 1*pchar [ 1*"\" [ relative-path ] ] +data UNCShare = UNCShare Separator Separator + NonEmptyString + (NonEmpty Separator) + NonEmptyString + (Maybe (NonEmpty Separator, Maybe RelFilePath)) + deriving (GShow, Show, Eq, Ord, Generic) + deriving Arbitrary via (GenericArbitraryRec '[1] `AndShrinking` UNCShare) + +instance AltShow UNCShare where + altShow (UNCShare sep1 sep2 fp1 seps fp2 mrfp) = altShow sep1 ++ altShow sep2 ++ altShow fp1 ++ altShow seps ++ altShow fp2 ++ maybe "" (\(a, b) -> altShow a ++ maybe "" altShow b) mrfp + +newtype NonEmptyString = NonEmptyString (NonEmpty Char) + deriving (GShow, Show, Eq, Ord, Generic) + deriving Arbitrary via (GenericArbitraryRec '[1] `AndShrinking` NonEmptyString) + +instance Semigroup NonEmptyString where + (<>) (NonEmptyString ne) (NonEmptyString ne') = NonEmptyString (ne <> ne') + +instance AltShow NonEmptyString where + altShow (NonEmptyString ns) = NE.toList ns + + +-- | Windows API Namespaces +-- +-- https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#namespaces +-- https://support.microsoft.com/en-us/topic/70b92942-a643-2f2d-2ac6-aad8acad49fb +-- https://superuser.com/a/1096784/854039 +-- https://reverseengineering.stackexchange.com/a/15178 +-- https://stackoverflow.com/a/25099634 +-- +-- namespace = file-namespace / device-namespace / nt-namespace +-- file-namespace = "\" "\" "?" "\" +-- device-namespace = "\" "\" "." "\" +-- nt-namespace = "\" "?" "?" "\" +data NameSpace = FileNameSpace + | DeviceNameSpace + | NTNameSpace + deriving (GShow, Show, Eq, Ord, Generic) + deriving Arbitrary via (GenericArbitraryRec '[3, 1, 1] `AndShrinking` NameSpace) + +instance AltShow NameSpace where + altShow FileNameSpace = "\\\\?\\" + altShow DeviceNameSpace = "\\\\.\\" + altShow NTNameSpace = "\\??\\" + + +data Separator = UnixSep + | WindowsSep + deriving (GShow, Show, Eq, Ord, Generic) + deriving Arbitrary via (GenericArbitraryRec '[1, 1] `AndShrinking` Separator) + +instance AltShow Separator where + altShow UnixSep = "/" + altShow WindowsSep = "\\" + +instance {-# OVERLAPS #-} AltShow (NonEmpty Separator) where + altShow ne = mconcat $ NE.toList (altShow <$> ne) + +instance {-# OVERLAPS #-} AltShow [Separator] where + altShow [] = "" + altShow ne = altShow (NE.fromList ne) + +-- relative-path = 1*(path-name 1*"\") [ file-name ] / file-name +data RelFilePath = Rel1 (NonEmpty (NonEmptyString, NonEmpty Separator)) (Maybe FileName) + | Rel2 FileName + deriving (GShow, Show, Eq, Ord, Generic) + deriving Arbitrary via (GenericArbitraryRec '[2, 1] `AndShrinking` RelFilePath) + +instance AltShow RelFilePath where + altShow (Rel1 ns mf) = mconcat (NE.toList $ fmap (\(a, b) -> altShow a ++ altShow b) ns) ++ altShow mf + altShow (Rel2 fn) = altShow fn + +-- file-name = 1*pchar [ stream ] +data FileName = FileName NonEmptyString (Maybe DataStream) + deriving (GShow, Show, Eq, Ord, Generic) + +instance Arbitrary FileName where + -- make sure that half of the filenames include a dot '.' + -- so that we can deal with extensions + arbitrary = do + ns <- arbitrary + ds <- arbitrary + i <- chooseInt (0, 100) + if i >= 50 + then do + ns' <- arbitrary + pure $ FileName (ns <> NonEmptyString ('.':|[]) <> ns') ds + else pure $ FileName ns ds + shrink = genericShrink + + +instance Arbitrary (Maybe DataStream) where + arbitrary = genericArbitraryRec (1 % 1 % ()) + shrink = genericShrink + +instance AltShow FileName where + altShow (FileName ns ds) = altShow ns ++ altShow ds + +-- stream = ":" 1*schar [ ":" 1*schar ] / ":" ":" 1*schar +data DataStream = DS1 NonEmptyString (Maybe NonEmptyString) + | DS2 NonEmptyString -- ::datatype + deriving (GShow, Show, Eq, Ord, Generic) + deriving Arbitrary via (GenericArbitraryRec '[1, 1] `AndShrinking` DataStream) + +instance AltShow DataStream where + altShow (DS1 ns Nothing) = ":" ++ altShow ns + altShow (DS1 ns (Just ns2)) = ":" ++ altShow ns ++ ":" ++ altShow ns2 + altShow (DS2 ns) = "::" ++ altShow ns + +instance Arbitrary WindowsFilePaths where + arbitrary = WindowsFilePaths <$> listOf' arbitrary + shrink = genericShrink + +instance Arbitrary [Separator] where + arbitrary = listOf' arbitrary + shrink = genericShrink + +instance Arbitrary a => Arbitrary (NonEmpty a) where + arbitrary = NE.fromList <$> listOf1' arbitrary + shrink = genericShrink + diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Legacy/System/FilePath.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Legacy/System/FilePath.hs new file mode 100644 index 0000000000..0297b0e72e --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Legacy/System/FilePath.hs @@ -0,0 +1,147 @@ +{-# LANGUAGE CPP #-} +#if __GLASGOW_HASKELL__ >= 704 +{-# LANGUAGE Safe #-} +#endif +{- | +Module : System.FilePath +Copyright : (c) Neil Mitchell 2005-2014 +License : BSD3 + +Maintainer : ndmitchell@gmail.com +Stability : stable +Portability : portable + +A library for 'FilePath' manipulations, using Posix or Windows filepaths +depending on the platform. + +Both "System.FilePath.Posix" and "System.FilePath.Windows" provide the +same interface. + +Given the example 'FilePath': @\/directory\/file.ext@ + +We can use the following functions to extract pieces. + +* 'takeFileName' gives @\"file.ext\"@ + +* 'takeDirectory' gives @\"\/directory\"@ + +* 'takeExtension' gives @\".ext\"@ + +* 'dropExtension' gives @\"\/directory\/file\"@ + +* 'takeBaseName' gives @\"file\"@ + +And we could have built an equivalent path with the following expressions: + +* @\"\/directory\" '' \"file.ext\"@. + +* @\"\/directory\/file" '<.>' \"ext\"@. + +* @\"\/directory\/file.txt" '-<.>' \"ext\"@. + +Each function in this module is documented with several examples, +which are also used as tests. + +Here are a few examples of using the @filepath@ functions together: + +/Example 1:/ Find the possible locations of a Haskell module @Test@ imported from module @Main@: + +@['replaceFileName' path_to_main \"Test\" '<.>' ext | ext <- [\"hs\",\"lhs\"] ]@ + +/Example 2:/ Download a file from @url@ and save it to disk: + +@do let file = 'makeValid' url + System.Directory.createDirectoryIfMissing True ('takeDirectory' file)@ + +/Example 3:/ Compile a Haskell file, putting the @.hi@ file under @interface@: + +@'takeDirectory' file '' \"interface\" '' ('takeFileName' file '-<.>' \"hi\")@ + +References: +[1] (Microsoft MSDN) +-} + + +#if defined(mingw32_HOST_OS) || defined(__MINGW32__) +module Legacy.System.FilePath( + -- * Separator predicates + FilePath, + pathSeparator, pathSeparators, isPathSeparator, + searchPathSeparator, isSearchPathSeparator, + extSeparator, isExtSeparator, + + -- * @$PATH@ methods + splitSearchPath, getSearchPath, + + -- * Extension functions + splitExtension, + takeExtension, replaceExtension, (-<.>), dropExtension, addExtension, hasExtension, (<.>), + splitExtensions, dropExtensions, takeExtensions, replaceExtensions, isExtensionOf, + stripExtension, + + -- * Filename\/directory functions + splitFileName, + takeFileName, replaceFileName, dropFileName, + takeBaseName, replaceBaseName, + takeDirectory, replaceDirectory, + combine, (), + splitPath, joinPath, splitDirectories, + + -- * Drive functions + splitDrive, joinDrive, + takeDrive, hasDrive, dropDrive, isDrive, + + -- * Trailing slash functions + hasTrailingPathSeparator, + addTrailingPathSeparator, + dropTrailingPathSeparator, + + -- * File name manipulations + normalise, equalFilePath, + makeRelative, + isRelative, isAbsolute, + isValid, makeValid +) where +import Legacy.System.FilePath.Windows +#else +module Legacy.System.FilePath( + -- * Separator predicates + FilePath, + pathSeparator, pathSeparators, isPathSeparator, + searchPathSeparator, isSearchPathSeparator, + extSeparator, isExtSeparator, + + -- * @$PATH@ methods + splitSearchPath, getSearchPath, + + -- * Extension functions + splitExtension, + takeExtension, replaceExtension, (-<.>), dropExtension, addExtension, hasExtension, (<.>), + splitExtensions, dropExtensions, takeExtensions, replaceExtensions, isExtensionOf, + stripExtension, + + -- * Filename\/directory functions + splitFileName, + takeFileName, replaceFileName, dropFileName, + takeBaseName, replaceBaseName, + takeDirectory, replaceDirectory, + combine, (), + splitPath, joinPath, splitDirectories, + + -- * Drive functions + splitDrive, joinDrive, + takeDrive, hasDrive, dropDrive, isDrive, + + -- * Trailing slash functions + hasTrailingPathSeparator, + addTrailingPathSeparator, + dropTrailingPathSeparator, + + -- * File name manipulations + normalise, equalFilePath, + makeRelative, + isRelative, isAbsolute, + isValid, makeValid +) where +import Legacy.System.FilePath.Posix +#endif diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Legacy/System/FilePath/Posix.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Legacy/System/FilePath/Posix.hs new file mode 100644 index 0000000000..cbf561b856 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Legacy/System/FilePath/Posix.hs @@ -0,0 +1,1048 @@ + + + +{-# LANGUAGE PatternGuards #-} + +-- This template expects CPP definitions for: +-- MODULE_NAME = Posix | Windows +-- IS_WINDOWS = False | True + +-- | +-- Module : System.FilePath.MODULE_NAME +-- Copyright : (c) Neil Mitchell 2005-2014 +-- License : BSD3 +-- +-- Maintainer : ndmitchell@gmail.com +-- Stability : stable +-- Portability : portable +-- +-- A library for 'FilePath' manipulations, using MODULE_NAME style paths on +-- all platforms. Importing "System.FilePath" is usually better. +-- +-- Given the example 'FilePath': @\/directory\/file.ext@ +-- +-- We can use the following functions to extract pieces. +-- +-- * 'takeFileName' gives @\"file.ext\"@ +-- +-- * 'takeDirectory' gives @\"\/directory\"@ +-- +-- * 'takeExtension' gives @\".ext\"@ +-- +-- * 'dropExtension' gives @\"\/directory\/file\"@ +-- +-- * 'takeBaseName' gives @\"file\"@ +-- +-- And we could have built an equivalent path with the following expressions: +-- +-- * @\"\/directory\" '' \"file.ext\"@. +-- +-- * @\"\/directory\/file" '<.>' \"ext\"@. +-- +-- * @\"\/directory\/file.txt" '-<.>' \"ext\"@. +-- +-- Each function in this module is documented with several examples, +-- which are also used as tests. +-- +-- Here are a few examples of using the @filepath@ functions together: +-- +-- /Example 1:/ Find the possible locations of a Haskell module @Test@ imported from module @Main@: +-- +-- @['replaceFileName' path_to_main \"Test\" '<.>' ext | ext <- [\"hs\",\"lhs\"] ]@ +-- +-- /Example 2:/ Download a file from @url@ and save it to disk: +-- +-- @do let file = 'makeValid' url +-- System.Directory.createDirectoryIfMissing True ('takeDirectory' file)@ +-- +-- /Example 3:/ Compile a Haskell file, putting the @.hi@ file under @interface@: +-- +-- @'takeDirectory' file '' \"interface\" '' ('takeFileName' file '-<.>' \"hi\")@ +-- +-- References: +-- [1] (Microsoft MSDN) +module Legacy.System.FilePath.Posix + ( + -- * Separator predicates + FilePath, + pathSeparator, pathSeparators, isPathSeparator, + searchPathSeparator, isSearchPathSeparator, + extSeparator, isExtSeparator, + + -- * @$PATH@ methods + splitSearchPath, getSearchPath, + + -- * Extension functions + splitExtension, + takeExtension, replaceExtension, (-<.>), dropExtension, addExtension, hasExtension, (<.>), + splitExtensions, dropExtensions, takeExtensions, replaceExtensions, isExtensionOf, + stripExtension, + + -- * Filename\/directory functions + splitFileName, + takeFileName, replaceFileName, dropFileName, + takeBaseName, replaceBaseName, + takeDirectory, replaceDirectory, + combine, (), + splitPath, joinPath, splitDirectories, + + -- * Drive functions + splitDrive, joinDrive, + takeDrive, hasDrive, dropDrive, isDrive, + + -- * Trailing slash functions + hasTrailingPathSeparator, + addTrailingPathSeparator, + dropTrailingPathSeparator, + + -- * File name manipulations + normalise, equalFilePath, + makeRelative, + isRelative, isAbsolute, + isValid, makeValid + ) + where + +import Data.Char(toLower, toUpper, isAsciiLower, isAsciiUpper) +import Data.Maybe(isJust) +import Data.List(stripPrefix, isSuffixOf) + +import System.Environment(getEnv) + + +infixr 7 <.>, -<.> +infixr 5 + + + + + +--------------------------------------------------------------------- +-- Platform Abstraction Methods (private) + +-- | Is the operating system Unix or Linux like +isPosix :: Bool +isPosix = not isWindows + +-- | Is the operating system Windows like +isWindows :: Bool +isWindows = False + + +--------------------------------------------------------------------- +-- The basic functions + +-- | The character that separates directories. In the case where more than +-- one character is possible, 'pathSeparator' is the \'ideal\' one. +-- +-- > Windows: pathSeparator == '\\' +-- > Posix: pathSeparator == '/' +-- > isPathSeparator pathSeparator +pathSeparator :: Char +pathSeparator = if isWindows then '\\' else '/' + +-- | The list of all possible separators. +-- +-- > Windows: pathSeparators == ['\\', '/'] +-- > Posix: pathSeparators == ['/'] +-- > pathSeparator `elem` pathSeparators +pathSeparators :: [Char] +pathSeparators = if isWindows then "\\/" else "/" + +-- | Rather than using @(== 'pathSeparator')@, use this. Test if something +-- is a path separator. +-- +-- > isPathSeparator a == (a `elem` pathSeparators) +isPathSeparator :: Char -> Bool +isPathSeparator '/' = True +isPathSeparator '\\' = isWindows +isPathSeparator _ = False + + +-- | The character that is used to separate the entries in the $PATH environment variable. +-- +-- > Windows: searchPathSeparator == ';' +-- > Posix: searchPathSeparator == ':' +searchPathSeparator :: Char +searchPathSeparator = if isWindows then ';' else ':' + +-- | Is the character a file separator? +-- +-- > isSearchPathSeparator a == (a == searchPathSeparator) +isSearchPathSeparator :: Char -> Bool +isSearchPathSeparator = (== searchPathSeparator) + + +-- | File extension character +-- +-- > extSeparator == '.' +extSeparator :: Char +extSeparator = '.' + +-- | Is the character an extension character? +-- +-- > isExtSeparator a == (a == extSeparator) +isExtSeparator :: Char -> Bool +isExtSeparator = (== extSeparator) + + +--------------------------------------------------------------------- +-- Path methods (environment $PATH) + +-- | Take a string, split it on the 'searchPathSeparator' character. +-- Blank items are ignored on Windows, and converted to @.@ on Posix. +-- On Windows path elements are stripped of quotes. +-- +-- Follows the recommendations in +-- +-- +-- > Posix: splitSearchPath "File1:File2:File3" == ["File1","File2","File3"] +-- > Posix: splitSearchPath "File1::File2:File3" == ["File1",".","File2","File3"] +-- > Windows: splitSearchPath "File1;File2;File3" == ["File1","File2","File3"] +-- > Windows: splitSearchPath "File1;;File2;File3" == ["File1","File2","File3"] +-- > Windows: splitSearchPath "File1;\"File2\";File3" == ["File1","File2","File3"] +splitSearchPath :: String -> [FilePath] +splitSearchPath = f + where + f xs = case break isSearchPathSeparator xs of + (pre, [] ) -> g pre + (pre, _:post) -> g pre ++ f post + + g "" = ["." | isPosix] + g ('\"':x@(_:_)) | isWindows && last x == '\"' = [init x] + g x = [x] + + +-- | Get a list of 'FilePath's in the $PATH variable. +getSearchPath :: IO [FilePath] +getSearchPath = fmap splitSearchPath (getEnv "PATH") + + +--------------------------------------------------------------------- +-- Extension methods + +-- | Split on the extension. 'addExtension' is the inverse. +-- +-- > splitExtension "/directory/path.ext" == ("/directory/path",".ext") +-- > uncurry (++) (splitExtension x) == x +-- > Valid x => uncurry addExtension (splitExtension x) == x +-- > splitExtension "file.txt" == ("file",".txt") +-- > splitExtension "file" == ("file","") +-- > splitExtension "file/file.txt" == ("file/file",".txt") +-- > splitExtension "file.txt/boris" == ("file.txt/boris","") +-- > splitExtension "file.txt/boris.ext" == ("file.txt/boris",".ext") +-- > splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob",".fred") +-- > splitExtension "file/path.txt/" == ("file/path.txt/","") +splitExtension :: FilePath -> (String, String) +splitExtension x = case nameDot of + "" -> (x,"") + _ -> (dir ++ init nameDot, extSeparator : ext) + where + (dir,file) = splitFileName_ x + (nameDot,ext) = breakEnd isExtSeparator file + +-- | Get the extension of a file, returns @\"\"@ for no extension, @.ext@ otherwise. +-- +-- > takeExtension "/directory/path.ext" == ".ext" +-- > takeExtension x == snd (splitExtension x) +-- > Valid x => takeExtension (addExtension x "ext") == ".ext" +-- > Valid x => takeExtension (replaceExtension x "ext") == ".ext" +takeExtension :: FilePath -> String +takeExtension = snd . splitExtension + +-- | Remove the current extension and add another, equivalent to 'replaceExtension'. +-- +-- > "/directory/path.txt" -<.> "ext" == "/directory/path.ext" +-- > "/directory/path.txt" -<.> ".ext" == "/directory/path.ext" +-- > "foo.o" -<.> "c" == "foo.c" +(-<.>) :: FilePath -> String -> FilePath +(-<.>) = replaceExtension + +-- | Set the extension of a file, overwriting one if already present, equivalent to '-<.>'. +-- +-- > replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext" +-- > replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext" +-- > replaceExtension "file.txt" ".bob" == "file.bob" +-- > replaceExtension "file.txt" "bob" == "file.bob" +-- > replaceExtension "file" ".bob" == "file.bob" +-- > replaceExtension "file.txt" "" == "file" +-- > replaceExtension "file.fred.bob" "txt" == "file.fred.txt" +-- > replaceExtension x y == addExtension (dropExtension x) y +replaceExtension :: FilePath -> String -> FilePath +replaceExtension x y = dropExtension x <.> y + +-- | Add an extension, even if there is already one there, equivalent to 'addExtension'. +-- +-- > "/directory/path" <.> "ext" == "/directory/path.ext" +-- > "/directory/path" <.> ".ext" == "/directory/path.ext" +(<.>) :: FilePath -> String -> FilePath +(<.>) = addExtension + +-- | Remove last extension, and the \".\" preceding it. +-- +-- > dropExtension "/directory/path.ext" == "/directory/path" +-- > dropExtension x == fst (splitExtension x) +dropExtension :: FilePath -> FilePath +dropExtension = fst . splitExtension + +-- | Add an extension, even if there is already one there, equivalent to '<.>'. +-- +-- > addExtension "/directory/path" "ext" == "/directory/path.ext" +-- > addExtension "file.txt" "bib" == "file.txt.bib" +-- > addExtension "file." ".bib" == "file..bib" +-- > addExtension "file" ".bib" == "file.bib" +-- > addExtension "/" "x" == "/.x" +-- > addExtension x "" == x +-- > Valid x => takeFileName (addExtension (addTrailingPathSeparator x) "ext") == ".ext" +-- > Windows: addExtension "\\\\share" ".txt" == "\\\\share\\.txt" +addExtension :: FilePath -> String -> FilePath +addExtension file "" = file +addExtension file xs@(x:_) = joinDrive a res + where + res = if isExtSeparator x then b ++ xs + else b ++ [extSeparator] ++ xs + + (a,b) = splitDrive file + +-- | Does the given filename have an extension? +-- +-- > hasExtension "/directory/path.ext" == True +-- > hasExtension "/directory/path" == False +-- > null (takeExtension x) == not (hasExtension x) +hasExtension :: FilePath -> Bool +hasExtension = any isExtSeparator . takeFileName + + +-- | Does the given filename have the specified extension? +-- +-- > "png" `isExtensionOf` "/directory/file.png" == True +-- > ".png" `isExtensionOf` "/directory/file.png" == True +-- > ".tar.gz" `isExtensionOf` "bar/foo.tar.gz" == True +-- > "ar.gz" `isExtensionOf` "bar/foo.tar.gz" == False +-- > "png" `isExtensionOf` "/directory/file.png.jpg" == False +-- > "csv/table.csv" `isExtensionOf` "/data/csv/table.csv" == False +isExtensionOf :: String -> FilePath -> Bool +isExtensionOf ext@('.':_) = isSuffixOf ext . takeExtensions +isExtensionOf ext = isSuffixOf ('.':ext) . takeExtensions + +-- | Drop the given extension from a FilePath, and the @\".\"@ preceding it. +-- Returns 'Nothing' if the FilePath does not have the given extension, or +-- 'Just' and the part before the extension if it does. +-- +-- This function can be more predictable than 'dropExtensions', especially if the filename +-- might itself contain @.@ characters. +-- +-- > stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x" +-- > stripExtension "hi.o" "foo.x.hs.o" == Nothing +-- > dropExtension x == fromJust (stripExtension (takeExtension x) x) +-- > dropExtensions x == fromJust (stripExtension (takeExtensions x) x) +-- > stripExtension ".c.d" "a.b.c.d" == Just "a.b" +-- > stripExtension ".c.d" "a.b..c.d" == Just "a.b." +-- > stripExtension "baz" "foo.bar" == Nothing +-- > stripExtension "bar" "foobar" == Nothing +-- > stripExtension "" x == Just x +stripExtension :: String -> FilePath -> Maybe FilePath +stripExtension [] path = Just path +stripExtension ext@(x:_) path = stripSuffix dotExt path + where dotExt = if isExtSeparator x then ext else '.':ext + + +-- | Split on all extensions. +-- +-- > splitExtensions "/directory/path.ext" == ("/directory/path",".ext") +-- > splitExtensions "file.tar.gz" == ("file",".tar.gz") +-- > uncurry (++) (splitExtensions x) == x +-- > Valid x => uncurry addExtension (splitExtensions x) == x +-- > splitExtensions "file.tar.gz" == ("file",".tar.gz") +splitExtensions :: FilePath -> (FilePath, String) +splitExtensions x = (a ++ c, d) + where + (a,b) = splitFileName_ x + (c,d) = break isExtSeparator b + +-- | Drop all extensions. +-- +-- > dropExtensions "/directory/path.ext" == "/directory/path" +-- > dropExtensions "file.tar.gz" == "file" +-- > not $ hasExtension $ dropExtensions x +-- > not $ any isExtSeparator $ takeFileName $ dropExtensions x +dropExtensions :: FilePath -> FilePath +dropExtensions = fst . splitExtensions + +-- | Get all extensions. +-- +-- > takeExtensions "/directory/path.ext" == ".ext" +-- > takeExtensions "file.tar.gz" == ".tar.gz" +takeExtensions :: FilePath -> String +takeExtensions = snd . splitExtensions + + +-- | Replace all extensions of a file with a new extension. Note +-- that 'replaceExtension' and 'addExtension' both work for adding +-- multiple extensions, so only required when you need to drop +-- all extensions first. +-- +-- > replaceExtensions "file.fred.bob" "txt" == "file.txt" +-- > replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz" +replaceExtensions :: FilePath -> String -> FilePath +replaceExtensions x y = dropExtensions x <.> y + + + +--------------------------------------------------------------------- +-- Drive methods + +-- | Is the given character a valid drive letter? +-- only a-z and A-Z are letters, not isAlpha which is more unicodey +isLetter :: Char -> Bool +isLetter x = isAsciiLower x || isAsciiUpper x + + +-- | Split a path into a drive and a path. +-- On Posix, \/ is a Drive. +-- +-- > uncurry (++) (splitDrive x) == x +-- > Windows: splitDrive "file" == ("","file") +-- > Windows: splitDrive "c:/file" == ("c:/","file") +-- > Windows: splitDrive "c:\\file" == ("c:\\","file") +-- > Windows: splitDrive "\\\\shared\\test" == ("\\\\shared\\","test") +-- > Windows: splitDrive "\\\\shared" == ("\\\\shared","") +-- > Windows: splitDrive "\\\\?\\UNC\\shared\\file" == ("\\\\?\\UNC\\shared\\","file") +-- > Windows: splitDrive "\\\\?\\UNCshared\\file" == ("\\\\?\\","UNCshared\\file") +-- > Windows: splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\","file") +-- > Windows: splitDrive "/d" == ("","/d") +-- > Posix: splitDrive "/test" == ("/","test") +-- > Posix: splitDrive "//test" == ("//","test") +-- > Posix: splitDrive "test/file" == ("","test/file") +-- > Posix: splitDrive "file" == ("","file") +splitDrive :: FilePath -> (FilePath, FilePath) +splitDrive x | isPosix = span (== '/') x +splitDrive x | Just y <- readDriveLetter x = y +splitDrive x | Just y <- readDriveUNC x = y +splitDrive x | Just y <- readDriveShare x = y +splitDrive x = ("",x) + +addSlash :: FilePath -> FilePath -> (FilePath, FilePath) +addSlash a xs = (a++c,d) + where (c,d) = span isPathSeparator xs + +-- See [1]. +-- "\\?\D:\" or "\\?\UNC\\" +readDriveUNC :: FilePath -> Maybe (FilePath, FilePath) +readDriveUNC (s1:s2:'?':s3:xs) | all isPathSeparator [s1,s2,s3] = + case map toUpper xs of + ('U':'N':'C':s4:_) | isPathSeparator s4 -> + let (a,b) = readDriveShareName (drop 4 xs) + in Just (s1:s2:'?':s3:take 4 xs ++ a, b) + _ -> case readDriveLetter xs of + -- Extended-length path. + Just (a,b) -> Just (s1:s2:'?':s3:a,b) + Nothing -> Nothing +readDriveUNC _ = Nothing + +{- c:\ -} +readDriveLetter :: String -> Maybe (FilePath, FilePath) +readDriveLetter (x:':':y:xs) | isLetter x && isPathSeparator y = Just $ addSlash [x,':'] (y:xs) +readDriveLetter (x:':':xs) | isLetter x = Just ([x,':'], xs) +readDriveLetter _ = Nothing + +{- \\sharename\ -} +readDriveShare :: String -> Maybe (FilePath, FilePath) +readDriveShare (s1:s2:xs) | isPathSeparator s1 && isPathSeparator s2 = + Just (s1:s2:a,b) + where (a,b) = readDriveShareName xs +readDriveShare _ = Nothing + +{- assume you have already seen \\ -} +{- share\bob -> "share\", "bob" -} +readDriveShareName :: String -> (FilePath, FilePath) +readDriveShareName name = addSlash a b + where (a,b) = break isPathSeparator name + + + +-- | Join a drive and the rest of the path. +-- +-- > Valid x => uncurry joinDrive (splitDrive x) == x +-- > Windows: joinDrive "C:" "foo" == "C:foo" +-- > Windows: joinDrive "C:\\" "bar" == "C:\\bar" +-- > Windows: joinDrive "\\\\share" "foo" == "\\\\share\\foo" +-- > Windows: joinDrive "/:" "foo" == "/:\\foo" +joinDrive :: FilePath -> FilePath -> FilePath +joinDrive = combineAlways + +-- | Get the drive from a filepath. +-- +-- > takeDrive x == fst (splitDrive x) +takeDrive :: FilePath -> FilePath +takeDrive = fst . splitDrive + +-- | Delete the drive, if it exists. +-- +-- > dropDrive x == snd (splitDrive x) +dropDrive :: FilePath -> FilePath +dropDrive = snd . splitDrive + +-- | Does a path have a drive. +-- +-- > not (hasDrive x) == null (takeDrive x) +-- > Posix: hasDrive "/foo" == True +-- > Windows: hasDrive "C:\\foo" == True +-- > Windows: hasDrive "C:foo" == True +-- > hasDrive "foo" == False +-- > hasDrive "" == False +hasDrive :: FilePath -> Bool +hasDrive = not . null . takeDrive + + +-- | Is an element a drive +-- +-- > Posix: isDrive "/" == True +-- > Posix: isDrive "/foo" == False +-- > Windows: isDrive "C:\\" == True +-- > Windows: isDrive "C:\\foo" == False +-- > isDrive "" == False +isDrive :: FilePath -> Bool +isDrive x = not (null x) && null (dropDrive x) + + +--------------------------------------------------------------------- +-- Operations on a filepath, as a list of directories + +-- | Split a filename into directory and file. '' is the inverse. +-- The first component will often end with a trailing slash. +-- +-- > splitFileName "/directory/file.ext" == ("/directory/","file.ext") +-- > Valid x => uncurry () (splitFileName x) == x || fst (splitFileName x) == "./" +-- > Valid x => isValid (fst (splitFileName x)) +-- > splitFileName "file/bob.txt" == ("file/", "bob.txt") +-- > splitFileName "file/" == ("file/", "") +-- > splitFileName "bob" == ("./", "bob") +-- > Posix: splitFileName "/" == ("/","") +-- > Windows: splitFileName "c:" == ("c:","") +splitFileName :: FilePath -> (String, String) +splitFileName x = (if null dir then "./" else dir, name) + where + (dir, name) = splitFileName_ x + +-- version of splitFileName where, if the FilePath has no directory +-- component, the returned directory is "" rather than "./". This +-- is used in cases where we are going to combine the returned +-- directory to make a valid FilePath, and having a "./" appear would +-- look strange and upset simple equality properties. See +-- e.g. replaceFileName. +splitFileName_ :: FilePath -> (String, String) +splitFileName_ x = (drv ++ dir, file) + where + (drv,pth) = splitDrive x + (dir,file) = breakEnd isPathSeparator pth + +-- | Set the filename. +-- +-- > replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext" +-- > Valid x => replaceFileName x (takeFileName x) == x +replaceFileName :: FilePath -> String -> FilePath +replaceFileName x y = a y where (a,_) = splitFileName_ x + +-- | Drop the filename. Unlike 'takeDirectory', this function will leave +-- a trailing path separator on the directory. +-- +-- > dropFileName "/directory/file.ext" == "/directory/" +-- > dropFileName x == fst (splitFileName x) +dropFileName :: FilePath -> FilePath +dropFileName = fst . splitFileName + + +-- | Get the file name. +-- +-- > takeFileName "/directory/file.ext" == "file.ext" +-- > takeFileName "test/" == "" +-- > takeFileName x `isSuffixOf` x +-- > takeFileName x == snd (splitFileName x) +-- > Valid x => takeFileName (replaceFileName x "fred") == "fred" +-- > Valid x => takeFileName (x "fred") == "fred" +-- > Valid x => isRelative (takeFileName x) +takeFileName :: FilePath -> FilePath +takeFileName = snd . splitFileName + +-- | Get the base name, without an extension or path. +-- +-- > takeBaseName "/directory/file.ext" == "file" +-- > takeBaseName "file/test.txt" == "test" +-- > takeBaseName "dave.ext" == "dave" +-- > takeBaseName "" == "" +-- > takeBaseName "test" == "test" +-- > takeBaseName (addTrailingPathSeparator x) == "" +-- > takeBaseName "file/file.tar.gz" == "file.tar" +takeBaseName :: FilePath -> String +takeBaseName = dropExtension . takeFileName + +-- | Set the base name. +-- +-- > replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext" +-- > replaceBaseName "file/test.txt" "bob" == "file/bob.txt" +-- > replaceBaseName "fred" "bill" == "bill" +-- > replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar" +-- > Valid x => replaceBaseName x (takeBaseName x) == x +replaceBaseName :: FilePath -> String -> FilePath +replaceBaseName pth nam = combineAlways a (nam <.> ext) + where + (a,b) = splitFileName_ pth + ext = takeExtension b + +-- | Is an item either a directory or the last character a path separator? +-- +-- > hasTrailingPathSeparator "test" == False +-- > hasTrailingPathSeparator "test/" == True +hasTrailingPathSeparator :: FilePath -> Bool +hasTrailingPathSeparator "" = False +hasTrailingPathSeparator x = isPathSeparator (last x) + + +hasLeadingPathSeparator :: FilePath -> Bool +hasLeadingPathSeparator "" = False +hasLeadingPathSeparator x = isPathSeparator (head x) + + +-- | Add a trailing file path separator if one is not already present. +-- +-- > hasTrailingPathSeparator (addTrailingPathSeparator x) +-- > hasTrailingPathSeparator x ==> addTrailingPathSeparator x == x +-- > Posix: addTrailingPathSeparator "test/rest" == "test/rest/" +addTrailingPathSeparator :: FilePath -> FilePath +addTrailingPathSeparator x = if hasTrailingPathSeparator x then x else x ++ [pathSeparator] + + +-- | Remove any trailing path separators +-- +-- > dropTrailingPathSeparator "file/test/" == "file/test" +-- > dropTrailingPathSeparator "/" == "/" +-- > Windows: dropTrailingPathSeparator "\\" == "\\" +-- > Posix: not (hasTrailingPathSeparator (dropTrailingPathSeparator x)) || isDrive x +dropTrailingPathSeparator :: FilePath -> FilePath +dropTrailingPathSeparator x = + if hasTrailingPathSeparator x && not (isDrive x) + then let x' = dropWhileEnd isPathSeparator x + in if null x' then [last x] else x' + else x + + +-- | Get the directory name, move up one level. +-- +-- > takeDirectory "/directory/other.ext" == "/directory" +-- > takeDirectory x `isPrefixOf` x || takeDirectory x == "." +-- > takeDirectory "foo" == "." +-- > takeDirectory "/" == "/" +-- > takeDirectory "/foo" == "/" +-- > takeDirectory "/foo/bar/baz" == "/foo/bar" +-- > takeDirectory "/foo/bar/baz/" == "/foo/bar/baz" +-- > takeDirectory "foo/bar/baz" == "foo/bar" +-- > Windows: takeDirectory "foo\\bar" == "foo" +-- > Windows: takeDirectory "foo\\bar\\\\" == "foo\\bar" +-- > Windows: takeDirectory "C:\\" == "C:\\" +takeDirectory :: FilePath -> FilePath +takeDirectory = dropTrailingPathSeparator . dropFileName + +-- | Set the directory, keeping the filename the same. +-- +-- > replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext" +-- > Valid x => replaceDirectory x (takeDirectory x) `equalFilePath` x +replaceDirectory :: FilePath -> String -> FilePath +replaceDirectory x dir = combineAlways dir (takeFileName x) + + +-- | An alias for ''. +combine :: FilePath -> FilePath -> FilePath +combine a b | hasLeadingPathSeparator b || hasDrive b = b + | otherwise = combineAlways a b + +-- | Combine two paths, assuming rhs is NOT absolute. +combineAlways :: FilePath -> FilePath -> FilePath +combineAlways a b | null a = b + | null b = a + | hasTrailingPathSeparator a = a ++ b + | otherwise = case a of + [a1,':'] | isWindows && isLetter a1 -> a ++ b + _ -> a ++ [pathSeparator] ++ b + + +-- | Combine two paths with a path separator. +-- If the second path starts with a path separator or a drive letter, then it returns the second. +-- The intention is that @readFile (dir '' file)@ will access the same file as +-- @setCurrentDirectory dir; readFile file@. +-- +-- > Posix: "/directory" "file.ext" == "/directory/file.ext" +-- > Windows: "/directory" "file.ext" == "/directory\\file.ext" +-- > "directory" "/file.ext" == "/file.ext" +-- > Valid x => (takeDirectory x takeFileName x) `equalFilePath` x +-- +-- Combined: +-- +-- > Posix: "/" "test" == "/test" +-- > Posix: "home" "bob" == "home/bob" +-- > Posix: "x:" "foo" == "x:/foo" +-- > Windows: "C:\\foo" "bar" == "C:\\foo\\bar" +-- > Windows: "home" "bob" == "home\\bob" +-- +-- Not combined: +-- +-- > Posix: "home" "/bob" == "/bob" +-- > Windows: "home" "C:\\bob" == "C:\\bob" +-- +-- Not combined (tricky): +-- +-- On Windows, if a filepath starts with a single slash, it is relative to the +-- root of the current drive. In [1], this is (confusingly) referred to as an +-- absolute path. +-- The current behavior of '' is to never combine these forms. +-- +-- > Windows: "home" "/bob" == "/bob" +-- > Windows: "home" "\\bob" == "\\bob" +-- > Windows: "C:\\home" "\\bob" == "\\bob" +-- +-- On Windows, from [1]: "If a file name begins with only a disk designator +-- but not the backslash after the colon, it is interpreted as a relative path +-- to the current directory on the drive with the specified letter." +-- The current behavior of '' is to never combine these forms. +-- +-- > Windows: "D:\\foo" "C:bar" == "C:bar" +-- > Windows: "C:\\foo" "C:bar" == "C:bar" +() :: FilePath -> FilePath -> FilePath +() = combine + + +-- | Split a path by the directory separator. +-- +-- > splitPath "/directory/file.ext" == ["/","directory/","file.ext"] +-- > concat (splitPath x) == x +-- > splitPath "test//item/" == ["test//","item/"] +-- > splitPath "test/item/file" == ["test/","item/","file"] +-- > splitPath "" == [] +-- > Windows: splitPath "c:\\test\\path" == ["c:\\","test\\","path"] +-- > Posix: splitPath "/file/test" == ["/","file/","test"] +splitPath :: FilePath -> [FilePath] +splitPath x = [drive | drive /= ""] ++ f path + where + (drive,path) = splitDrive x + + f "" = [] + f y = (a++c) : f d + where + (a,b) = break isPathSeparator y + (c,d) = span isPathSeparator b + +-- | Just as 'splitPath', but don't add the trailing slashes to each element. +-- +-- > splitDirectories "/directory/file.ext" == ["/","directory","file.ext"] +-- > splitDirectories "test/file" == ["test","file"] +-- > splitDirectories "/test/file" == ["/","test","file"] +-- > Windows: splitDirectories "C:\\test\\file" == ["C:\\", "test", "file"] +-- > Valid x => joinPath (splitDirectories x) `equalFilePath` x +-- > splitDirectories "" == [] +-- > Windows: splitDirectories "C:\\test\\\\\\file" == ["C:\\", "test", "file"] +-- > splitDirectories "/test///file" == ["/","test","file"] +splitDirectories :: FilePath -> [FilePath] +splitDirectories = map dropTrailingPathSeparator . splitPath + + +-- | Join path elements back together. +-- +-- > joinPath a == foldr () "" a +-- > joinPath ["/","directory/","file.ext"] == "/directory/file.ext" +-- > Valid x => joinPath (splitPath x) == x +-- > joinPath [] == "" +-- > Posix: joinPath ["test","file","path"] == "test/file/path" +joinPath :: [FilePath] -> FilePath +-- Note that this definition on c:\\c:\\, join then split will give c:\\. +joinPath = foldr combine "" + + + + + + +--------------------------------------------------------------------- +-- File name manipulators + +-- | Equality of two 'FilePath's. +-- If you call @System.Directory.canonicalizePath@ +-- first this has a much better chance of working. +-- Note that this doesn't follow symlinks or DOSNAM~1s. +-- +-- Similar to 'normalise', this does not expand @".."@, because of symlinks. +-- +-- > x == y ==> equalFilePath x y +-- > normalise x == normalise y ==> equalFilePath x y +-- > equalFilePath "foo" "foo/" +-- > not (equalFilePath "/a/../c" "/c") +-- > not (equalFilePath "foo" "/foo") +-- > Posix: not (equalFilePath "foo" "FOO") +-- > Windows: equalFilePath "foo" "FOO" +-- > Windows: not (equalFilePath "C:" "C:/") +equalFilePath :: FilePath -> FilePath -> Bool +equalFilePath a b = f a == f b + where + f x | isWindows = dropTrailingPathSeparator $ map toLower $ normalise x + | otherwise = dropTrailingPathSeparator $ normalise x + + +-- | Contract a filename, based on a relative path. Note that the resulting path +-- will never introduce @..@ paths, as the presence of symlinks means @..\/b@ +-- may not reach @a\/b@ if it starts from @a\/c@. For a worked example see +-- . +-- +-- The corresponding @makeAbsolute@ function can be found in +-- @System.Directory@. +-- +-- > makeRelative "/directory" "/directory/file.ext" == "file.ext" +-- > Valid x => makeRelative (takeDirectory x) x `equalFilePath` takeFileName x +-- > makeRelative x x == "." +-- > Valid x y => equalFilePath x y || (isRelative x && makeRelative y x == x) || equalFilePath (y makeRelative y x) x +-- > Windows: makeRelative "C:\\Home" "c:\\home\\bob" == "bob" +-- > Windows: makeRelative "C:\\Home" "c:/home/bob" == "bob" +-- > Windows: makeRelative "C:\\Home" "D:\\Home\\Bob" == "D:\\Home\\Bob" +-- > Windows: makeRelative "C:\\Home" "C:Home\\Bob" == "C:Home\\Bob" +-- > Windows: makeRelative "/Home" "/home/bob" == "bob" +-- > Windows: makeRelative "/" "//" == "//" +-- > Posix: makeRelative "/Home" "/home/bob" == "/home/bob" +-- > Posix: makeRelative "/home/" "/home/bob/foo/bar" == "bob/foo/bar" +-- > Posix: makeRelative "/fred" "bob" == "bob" +-- > Posix: makeRelative "/file/test" "/file/test/fred" == "fred" +-- > Posix: makeRelative "/file/test" "/file/test/fred/" == "fred/" +-- > Posix: makeRelative "some/path" "some/path/a/b/c" == "a/b/c" +makeRelative :: FilePath -> FilePath -> FilePath +makeRelative root path + | equalFilePath root path = "." + | takeAbs root /= takeAbs path = path + | otherwise = f (dropAbs root) (dropAbs path) + where + f "" y = dropWhile isPathSeparator y + f x y = let (x1,x2) = g x + (y1,y2) = g y + in if equalFilePath x1 y1 then f x2 y2 else path + + g x = (dropWhile isPathSeparator a, dropWhile isPathSeparator b) + where (a,b) = break isPathSeparator $ dropWhile isPathSeparator x + + -- on windows, need to drop '/' which is kind of absolute, but not a drive + dropAbs x | hasLeadingPathSeparator x && not (hasDrive x) = tail x + dropAbs x = dropDrive x + + takeAbs x | hasLeadingPathSeparator x && not (hasDrive x) = [pathSeparator] + takeAbs x = map (\y -> if isPathSeparator y then pathSeparator else toLower y) $ takeDrive x + +-- | Normalise a file +-- +-- * \/\/ outside of the drive can be made blank +-- +-- * \/ -> 'pathSeparator' +-- +-- * .\/ -> \"\" +-- +-- Does not remove @".."@, because of symlinks. +-- +-- > Posix: normalise "/file/\\test////" == "/file/\\test/" +-- > Posix: normalise "/file/./test" == "/file/test" +-- > Posix: normalise "/test/file/../bob/fred/" == "/test/file/../bob/fred/" +-- > Posix: normalise "../bob/fred/" == "../bob/fred/" +-- > Posix: normalise "/a/../c" == "/a/../c" +-- > Posix: normalise "./bob/fred/" == "bob/fred/" +-- > Windows: normalise "c:\\file/bob\\" == "C:\\file\\bob\\" +-- > Windows: normalise "c:\\" == "C:\\" +-- > Windows: normalise "C:.\\" == "C:" +-- > Windows: normalise "\\\\server\\test" == "\\\\server\\test" +-- > Windows: normalise "//server/test" == "\\\\server\\test" +-- > Windows: normalise "c:/file" == "C:\\file" +-- > Windows: normalise "/file" == "\\file" +-- > Windows: normalise "\\" == "\\" +-- > Windows: normalise "/./" == "\\" +-- > normalise "." == "." +-- > Posix: normalise "./" == "./" +-- > Posix: normalise "./." == "./" +-- > Posix: normalise "/./" == "/" +-- > Posix: normalise "/" == "/" +-- > Posix: normalise "bob/fred/." == "bob/fred/" +-- > Posix: normalise "//home" == "/home" +normalise :: FilePath -> FilePath +normalise path = result ++ [pathSeparator | addPathSeparator] + where + (drv,pth) = splitDrive path + result = joinDrive' (normaliseDrive drv) (f pth) + + joinDrive' "" "" = "." + joinDrive' d p = joinDrive d p + + addPathSeparator = isDirPath pth + && not (hasTrailingPathSeparator result) + && not (isRelativeDrive drv) + + isDirPath xs = hasTrailingPathSeparator xs + || not (null xs) && last xs == '.' && hasTrailingPathSeparator (init xs) + + f = joinPath . dropDots . propSep . splitDirectories + + propSep (x:xs) | all isPathSeparator x = [pathSeparator] : xs + | otherwise = x : xs + propSep [] = [] + + dropDots = filter ("." /=) + +normaliseDrive :: FilePath -> FilePath +normaliseDrive "" = "" +normaliseDrive _ | isPosix = [pathSeparator] +normaliseDrive drive = if isJust $ readDriveLetter x2 + then map toUpper x2 + else x2 + where + x2 = map repSlash drive + + repSlash x = if isPathSeparator x then pathSeparator else x + +-- Information for validity functions on Windows. See [1]. +isBadCharacter :: Char -> Bool +isBadCharacter x = x >= '\0' && x <= '\31' || x `elem` ":*?><|\"" + +badElements :: [FilePath] +badElements = + ["CON","PRN","AUX","NUL","CLOCK$" + ,"COM1","COM2","COM3","COM4","COM5","COM6","COM7","COM8","COM9" + ,"LPT1","LPT2","LPT3","LPT4","LPT5","LPT6","LPT7","LPT8","LPT9"] + + +-- | Is a FilePath valid, i.e. could you create a file like it? This function checks for invalid names, +-- and invalid characters, but does not check if length limits are exceeded, as these are typically +-- filesystem dependent. +-- +-- > isValid "" == False +-- > isValid "\0" == False +-- > Posix: isValid "/random_ path:*" == True +-- > Posix: isValid x == not (null x) +-- > Windows: isValid "c:\\test" == True +-- > Windows: isValid "c:\\test:of_test" == False +-- > Windows: isValid "test*" == False +-- > Windows: isValid "c:\\test\\nul" == False +-- > Windows: isValid "c:\\test\\prn.txt" == False +-- > Windows: isValid "c:\\nul\\file" == False +-- > Windows: isValid "\\\\" == False +-- > Windows: isValid "\\\\\\foo" == False +-- > Windows: isValid "\\\\?\\D:file" == False +-- > Windows: isValid "foo\tbar" == False +-- > Windows: isValid "nul .txt" == False +-- > Windows: isValid " nul.txt" == True +isValid :: FilePath -> Bool +isValid "" = False +isValid x | '\0' `elem` x = False +isValid _ | isPosix = True +isValid path = + not (any isBadCharacter x2) && + not (any f $ splitDirectories x2) && + not (isJust (readDriveShare x1) && all isPathSeparator x1) && + not (isJust (readDriveUNC x1) && not (hasTrailingPathSeparator x1)) + where + (x1,x2) = splitDrive path + f x = map toUpper (dropWhileEnd (== ' ') $ dropExtensions x) `elem` badElements + + +-- | Take a FilePath and make it valid; does not change already valid FilePaths. +-- +-- > isValid (makeValid x) +-- > isValid x ==> makeValid x == x +-- > makeValid "" == "_" +-- > makeValid "file\0name" == "file_name" +-- > Windows: makeValid "c:\\already\\/valid" == "c:\\already\\/valid" +-- > Windows: makeValid "c:\\test:of_test" == "c:\\test_of_test" +-- > Windows: makeValid "test*" == "test_" +-- > Windows: makeValid "c:\\test\\nul" == "c:\\test\\nul_" +-- > Windows: makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt" +-- > Windows: makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt" +-- > Windows: makeValid "c:\\nul\\file" == "c:\\nul_\\file" +-- > Windows: makeValid "\\\\\\foo" == "\\\\drive" +-- > Windows: makeValid "\\\\?\\D:file" == "\\\\?\\D:\\file" +-- > Windows: makeValid "nul .txt" == "nul _.txt" +makeValid :: FilePath -> FilePath +makeValid "" = "_" +makeValid path + | isPosix = map (\x -> if x == '\0' then '_' else x) path + | isJust (readDriveShare drv) && all isPathSeparator drv = take 2 drv ++ "drive" + | isJust (readDriveUNC drv) && not (hasTrailingPathSeparator drv) = + makeValid (drv ++ [pathSeparator] ++ pth) + | otherwise = joinDrive drv $ validElements $ validChars pth + where + (drv,pth) = splitDrive path + + validChars = map f + f x = if isBadCharacter x then '_' else x + + validElements x = joinPath $ map g $ splitPath x + g x = h a ++ b + where (a,b) = break isPathSeparator x + h x = if map toUpper (dropWhileEnd (== ' ') a) `elem` badElements then a ++ "_" <.> b else x + where (a,b) = splitExtensions x + + +-- | Is a path relative, or is it fixed to the root? +-- +-- > Windows: isRelative "path\\test" == True +-- > Windows: isRelative "c:\\test" == False +-- > Windows: isRelative "c:test" == True +-- > Windows: isRelative "c:\\" == False +-- > Windows: isRelative "c:/" == False +-- > Windows: isRelative "c:" == True +-- > Windows: isRelative "\\\\foo" == False +-- > Windows: isRelative "\\\\?\\foo" == False +-- > Windows: isRelative "\\\\?\\UNC\\foo" == False +-- > Windows: isRelative "/foo" == True +-- > Windows: isRelative "\\foo" == True +-- > Posix: isRelative "test/path" == True +-- > Posix: isRelative "/test" == False +-- > Posix: isRelative "/" == False +-- +-- According to [1]: +-- +-- * "A UNC name of any format [is never relative]." +-- +-- * "You cannot use the "\\?\" prefix with a relative path." +isRelative :: FilePath -> Bool +isRelative x = null drive || isRelativeDrive drive + where drive = takeDrive x + + +{- c:foo -} +-- From [1]: "If a file name begins with only a disk designator but not the +-- backslash after the colon, it is interpreted as a relative path to the +-- current directory on the drive with the specified letter." +isRelativeDrive :: String -> Bool +isRelativeDrive x = + maybe False (not . hasTrailingPathSeparator . fst) (readDriveLetter x) + + +-- | @not . 'isRelative'@ +-- +-- > isAbsolute x == not (isRelative x) +isAbsolute :: FilePath -> Bool +isAbsolute = not . isRelative + + +----------------------------------------------------------------------------- +-- dropWhileEnd (>2) [1,2,3,4,1,2,3,4] == [1,2,3,4,1,2]) +-- Note that Data.List.dropWhileEnd is only available in base >= 4.5. +dropWhileEnd :: (a -> Bool) -> [a] -> [a] +dropWhileEnd p = reverse . dropWhile p . reverse + +-- takeWhileEnd (>2) [1,2,3,4,1,2,3,4] == [3,4]) +takeWhileEnd :: (a -> Bool) -> [a] -> [a] +takeWhileEnd p = reverse . takeWhile p . reverse + +-- spanEnd (>2) [1,2,3,4,1,2,3,4] = ([1,2,3,4,1,2], [3,4]) +spanEnd :: (a -> Bool) -> [a] -> ([a], [a]) +spanEnd p xs = (dropWhileEnd p xs, takeWhileEnd p xs) + +-- breakEnd (< 2) [1,2,3,4,1,2,3,4] == ([1,2,3,4,1],[2,3,4]) +breakEnd :: (a -> Bool) -> [a] -> ([a], [a]) +breakEnd p = spanEnd (not . p) + +-- | The stripSuffix function drops the given suffix from a list. It returns +-- Nothing if the list did not end with the suffix given, or Just the list +-- before the suffix, if it does. +stripSuffix :: Eq a => [a] -> [a] -> Maybe [a] +stripSuffix xs ys = fmap reverse $ stripPrefix (reverse xs) (reverse ys) diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Legacy/System/FilePath/Windows.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Legacy/System/FilePath/Windows.hs new file mode 100644 index 0000000000..0b8f3277ec --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/Legacy/System/FilePath/Windows.hs @@ -0,0 +1,1048 @@ + + + +{-# LANGUAGE PatternGuards #-} + +-- This template expects CPP definitions for: +-- MODULE_NAME = Posix | Windows +-- IS_WINDOWS = False | True + +-- | +-- Module : System.FilePath.MODULE_NAME +-- Copyright : (c) Neil Mitchell 2005-2014 +-- License : BSD3 +-- +-- Maintainer : ndmitchell@gmail.com +-- Stability : stable +-- Portability : portable +-- +-- A library for 'FilePath' manipulations, using MODULE_NAME style paths on +-- all platforms. Importing "System.FilePath" is usually better. +-- +-- Given the example 'FilePath': @\/directory\/file.ext@ +-- +-- We can use the following functions to extract pieces. +-- +-- * 'takeFileName' gives @\"file.ext\"@ +-- +-- * 'takeDirectory' gives @\"\/directory\"@ +-- +-- * 'takeExtension' gives @\".ext\"@ +-- +-- * 'dropExtension' gives @\"\/directory\/file\"@ +-- +-- * 'takeBaseName' gives @\"file\"@ +-- +-- And we could have built an equivalent path with the following expressions: +-- +-- * @\"\/directory\" '' \"file.ext\"@. +-- +-- * @\"\/directory\/file" '<.>' \"ext\"@. +-- +-- * @\"\/directory\/file.txt" '-<.>' \"ext\"@. +-- +-- Each function in this module is documented with several examples, +-- which are also used as tests. +-- +-- Here are a few examples of using the @filepath@ functions together: +-- +-- /Example 1:/ Find the possible locations of a Haskell module @Test@ imported from module @Main@: +-- +-- @['replaceFileName' path_to_main \"Test\" '<.>' ext | ext <- [\"hs\",\"lhs\"] ]@ +-- +-- /Example 2:/ Download a file from @url@ and save it to disk: +-- +-- @do let file = 'makeValid' url +-- System.Directory.createDirectoryIfMissing True ('takeDirectory' file)@ +-- +-- /Example 3:/ Compile a Haskell file, putting the @.hi@ file under @interface@: +-- +-- @'takeDirectory' file '' \"interface\" '' ('takeFileName' file '-<.>' \"hi\")@ +-- +-- References: +-- [1] (Microsoft MSDN) +module Legacy.System.FilePath.Windows + ( + -- * Separator predicates + FilePath, + pathSeparator, pathSeparators, isPathSeparator, + searchPathSeparator, isSearchPathSeparator, + extSeparator, isExtSeparator, + + -- * @$PATH@ methods + splitSearchPath, getSearchPath, + + -- * Extension functions + splitExtension, + takeExtension, replaceExtension, (-<.>), dropExtension, addExtension, hasExtension, (<.>), + splitExtensions, dropExtensions, takeExtensions, replaceExtensions, isExtensionOf, + stripExtension, + + -- * Filename\/directory functions + splitFileName, + takeFileName, replaceFileName, dropFileName, + takeBaseName, replaceBaseName, + takeDirectory, replaceDirectory, + combine, (), + splitPath, joinPath, splitDirectories, + + -- * Drive functions + splitDrive, joinDrive, + takeDrive, hasDrive, dropDrive, isDrive, + + -- * Trailing slash functions + hasTrailingPathSeparator, + addTrailingPathSeparator, + dropTrailingPathSeparator, + + -- * File name manipulations + normalise, equalFilePath, + makeRelative, + isRelative, isAbsolute, + isValid, makeValid + ) + where + +import Data.Char(toLower, toUpper, isAsciiLower, isAsciiUpper) +import Data.Maybe(isJust) +import Data.List(stripPrefix, isSuffixOf) + +import System.Environment(getEnv) + + +infixr 7 <.>, -<.> +infixr 5 + + + + + +--------------------------------------------------------------------- +-- Platform Abstraction Methods (private) + +-- | Is the operating system Unix or Linux like +isPosix :: Bool +isPosix = not isWindows + +-- | Is the operating system Windows like +isWindows :: Bool +isWindows = True + + +--------------------------------------------------------------------- +-- The basic functions + +-- | The character that separates directories. In the case where more than +-- one character is possible, 'pathSeparator' is the \'ideal\' one. +-- +-- > Windows: pathSeparator == '\\' +-- > Posix: pathSeparator == '/' +-- > isPathSeparator pathSeparator +pathSeparator :: Char +pathSeparator = if isWindows then '\\' else '/' + +-- | The list of all possible separators. +-- +-- > Windows: pathSeparators == ['\\', '/'] +-- > Posix: pathSeparators == ['/'] +-- > pathSeparator `elem` pathSeparators +pathSeparators :: [Char] +pathSeparators = if isWindows then "\\/" else "/" + +-- | Rather than using @(== 'pathSeparator')@, use this. Test if something +-- is a path separator. +-- +-- > isPathSeparator a == (a `elem` pathSeparators) +isPathSeparator :: Char -> Bool +isPathSeparator '/' = True +isPathSeparator '\\' = isWindows +isPathSeparator _ = False + + +-- | The character that is used to separate the entries in the $PATH environment variable. +-- +-- > Windows: searchPathSeparator == ';' +-- > Posix: searchPathSeparator == ':' +searchPathSeparator :: Char +searchPathSeparator = if isWindows then ';' else ':' + +-- | Is the character a file separator? +-- +-- > isSearchPathSeparator a == (a == searchPathSeparator) +isSearchPathSeparator :: Char -> Bool +isSearchPathSeparator = (== searchPathSeparator) + + +-- | File extension character +-- +-- > extSeparator == '.' +extSeparator :: Char +extSeparator = '.' + +-- | Is the character an extension character? +-- +-- > isExtSeparator a == (a == extSeparator) +isExtSeparator :: Char -> Bool +isExtSeparator = (== extSeparator) + + +--------------------------------------------------------------------- +-- Path methods (environment $PATH) + +-- | Take a string, split it on the 'searchPathSeparator' character. +-- Blank items are ignored on Windows, and converted to @.@ on Posix. +-- On Windows path elements are stripped of quotes. +-- +-- Follows the recommendations in +-- +-- +-- > Posix: splitSearchPath "File1:File2:File3" == ["File1","File2","File3"] +-- > Posix: splitSearchPath "File1::File2:File3" == ["File1",".","File2","File3"] +-- > Windows: splitSearchPath "File1;File2;File3" == ["File1","File2","File3"] +-- > Windows: splitSearchPath "File1;;File2;File3" == ["File1","File2","File3"] +-- > Windows: splitSearchPath "File1;\"File2\";File3" == ["File1","File2","File3"] +splitSearchPath :: String -> [FilePath] +splitSearchPath = f + where + f xs = case break isSearchPathSeparator xs of + (pre, [] ) -> g pre + (pre, _:post) -> g pre ++ f post + + g "" = ["." | isPosix] + g ('\"':x@(_:_)) | isWindows && last x == '\"' = [init x] + g x = [x] + + +-- | Get a list of 'FilePath's in the $PATH variable. +getSearchPath :: IO [FilePath] +getSearchPath = fmap splitSearchPath (getEnv "PATH") + + +--------------------------------------------------------------------- +-- Extension methods + +-- | Split on the extension. 'addExtension' is the inverse. +-- +-- > splitExtension "/directory/path.ext" == ("/directory/path",".ext") +-- > uncurry (++) (splitExtension x) == x +-- > Valid x => uncurry addExtension (splitExtension x) == x +-- > splitExtension "file.txt" == ("file",".txt") +-- > splitExtension "file" == ("file","") +-- > splitExtension "file/file.txt" == ("file/file",".txt") +-- > splitExtension "file.txt/boris" == ("file.txt/boris","") +-- > splitExtension "file.txt/boris.ext" == ("file.txt/boris",".ext") +-- > splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob",".fred") +-- > splitExtension "file/path.txt/" == ("file/path.txt/","") +splitExtension :: FilePath -> (String, String) +splitExtension x = case nameDot of + "" -> (x,"") + _ -> (dir ++ init nameDot, extSeparator : ext) + where + (dir,file) = splitFileName_ x + (nameDot,ext) = breakEnd isExtSeparator file + +-- | Get the extension of a file, returns @\"\"@ for no extension, @.ext@ otherwise. +-- +-- > takeExtension "/directory/path.ext" == ".ext" +-- > takeExtension x == snd (splitExtension x) +-- > Valid x => takeExtension (addExtension x "ext") == ".ext" +-- > Valid x => takeExtension (replaceExtension x "ext") == ".ext" +takeExtension :: FilePath -> String +takeExtension = snd . splitExtension + +-- | Remove the current extension and add another, equivalent to 'replaceExtension'. +-- +-- > "/directory/path.txt" -<.> "ext" == "/directory/path.ext" +-- > "/directory/path.txt" -<.> ".ext" == "/directory/path.ext" +-- > "foo.o" -<.> "c" == "foo.c" +(-<.>) :: FilePath -> String -> FilePath +(-<.>) = replaceExtension + +-- | Set the extension of a file, overwriting one if already present, equivalent to '-<.>'. +-- +-- > replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext" +-- > replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext" +-- > replaceExtension "file.txt" ".bob" == "file.bob" +-- > replaceExtension "file.txt" "bob" == "file.bob" +-- > replaceExtension "file" ".bob" == "file.bob" +-- > replaceExtension "file.txt" "" == "file" +-- > replaceExtension "file.fred.bob" "txt" == "file.fred.txt" +-- > replaceExtension x y == addExtension (dropExtension x) y +replaceExtension :: FilePath -> String -> FilePath +replaceExtension x y = dropExtension x <.> y + +-- | Add an extension, even if there is already one there, equivalent to 'addExtension'. +-- +-- > "/directory/path" <.> "ext" == "/directory/path.ext" +-- > "/directory/path" <.> ".ext" == "/directory/path.ext" +(<.>) :: FilePath -> String -> FilePath +(<.>) = addExtension + +-- | Remove last extension, and the \".\" preceding it. +-- +-- > dropExtension "/directory/path.ext" == "/directory/path" +-- > dropExtension x == fst (splitExtension x) +dropExtension :: FilePath -> FilePath +dropExtension = fst . splitExtension + +-- | Add an extension, even if there is already one there, equivalent to '<.>'. +-- +-- > addExtension "/directory/path" "ext" == "/directory/path.ext" +-- > addExtension "file.txt" "bib" == "file.txt.bib" +-- > addExtension "file." ".bib" == "file..bib" +-- > addExtension "file" ".bib" == "file.bib" +-- > addExtension "/" "x" == "/.x" +-- > addExtension x "" == x +-- > Valid x => takeFileName (addExtension (addTrailingPathSeparator x) "ext") == ".ext" +-- > Windows: addExtension "\\\\share" ".txt" == "\\\\share\\.txt" +addExtension :: FilePath -> String -> FilePath +addExtension file "" = file +addExtension file xs@(x:_) = joinDrive a res + where + res = if isExtSeparator x then b ++ xs + else b ++ [extSeparator] ++ xs + + (a,b) = splitDrive file + +-- | Does the given filename have an extension? +-- +-- > hasExtension "/directory/path.ext" == True +-- > hasExtension "/directory/path" == False +-- > null (takeExtension x) == not (hasExtension x) +hasExtension :: FilePath -> Bool +hasExtension = any isExtSeparator . takeFileName + + +-- | Does the given filename have the specified extension? +-- +-- > "png" `isExtensionOf` "/directory/file.png" == True +-- > ".png" `isExtensionOf` "/directory/file.png" == True +-- > ".tar.gz" `isExtensionOf` "bar/foo.tar.gz" == True +-- > "ar.gz" `isExtensionOf` "bar/foo.tar.gz" == False +-- > "png" `isExtensionOf` "/directory/file.png.jpg" == False +-- > "csv/table.csv" `isExtensionOf` "/data/csv/table.csv" == False +isExtensionOf :: String -> FilePath -> Bool +isExtensionOf ext@('.':_) = isSuffixOf ext . takeExtensions +isExtensionOf ext = isSuffixOf ('.':ext) . takeExtensions + +-- | Drop the given extension from a FilePath, and the @\".\"@ preceding it. +-- Returns 'Nothing' if the FilePath does not have the given extension, or +-- 'Just' and the part before the extension if it does. +-- +-- This function can be more predictable than 'dropExtensions', especially if the filename +-- might itself contain @.@ characters. +-- +-- > stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x" +-- > stripExtension "hi.o" "foo.x.hs.o" == Nothing +-- > dropExtension x == fromJust (stripExtension (takeExtension x) x) +-- > dropExtensions x == fromJust (stripExtension (takeExtensions x) x) +-- > stripExtension ".c.d" "a.b.c.d" == Just "a.b" +-- > stripExtension ".c.d" "a.b..c.d" == Just "a.b." +-- > stripExtension "baz" "foo.bar" == Nothing +-- > stripExtension "bar" "foobar" == Nothing +-- > stripExtension "" x == Just x +stripExtension :: String -> FilePath -> Maybe FilePath +stripExtension [] path = Just path +stripExtension ext@(x:_) path = stripSuffix dotExt path + where dotExt = if isExtSeparator x then ext else '.':ext + + +-- | Split on all extensions. +-- +-- > splitExtensions "/directory/path.ext" == ("/directory/path",".ext") +-- > splitExtensions "file.tar.gz" == ("file",".tar.gz") +-- > uncurry (++) (splitExtensions x) == x +-- > Valid x => uncurry addExtension (splitExtensions x) == x +-- > splitExtensions "file.tar.gz" == ("file",".tar.gz") +splitExtensions :: FilePath -> (FilePath, String) +splitExtensions x = (a ++ c, d) + where + (a,b) = splitFileName_ x + (c,d) = break isExtSeparator b + +-- | Drop all extensions. +-- +-- > dropExtensions "/directory/path.ext" == "/directory/path" +-- > dropExtensions "file.tar.gz" == "file" +-- > not $ hasExtension $ dropExtensions x +-- > not $ any isExtSeparator $ takeFileName $ dropExtensions x +dropExtensions :: FilePath -> FilePath +dropExtensions = fst . splitExtensions + +-- | Get all extensions. +-- +-- > takeExtensions "/directory/path.ext" == ".ext" +-- > takeExtensions "file.tar.gz" == ".tar.gz" +takeExtensions :: FilePath -> String +takeExtensions = snd . splitExtensions + + +-- | Replace all extensions of a file with a new extension. Note +-- that 'replaceExtension' and 'addExtension' both work for adding +-- multiple extensions, so only required when you need to drop +-- all extensions first. +-- +-- > replaceExtensions "file.fred.bob" "txt" == "file.txt" +-- > replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz" +replaceExtensions :: FilePath -> String -> FilePath +replaceExtensions x y = dropExtensions x <.> y + + + +--------------------------------------------------------------------- +-- Drive methods + +-- | Is the given character a valid drive letter? +-- only a-z and A-Z are letters, not isAlpha which is more unicodey +isLetter :: Char -> Bool +isLetter x = isAsciiLower x || isAsciiUpper x + + +-- | Split a path into a drive and a path. +-- On Posix, \/ is a Drive. +-- +-- > uncurry (++) (splitDrive x) == x +-- > Windows: splitDrive "file" == ("","file") +-- > Windows: splitDrive "c:/file" == ("c:/","file") +-- > Windows: splitDrive "c:\\file" == ("c:\\","file") +-- > Windows: splitDrive "\\\\shared\\test" == ("\\\\shared\\","test") +-- > Windows: splitDrive "\\\\shared" == ("\\\\shared","") +-- > Windows: splitDrive "\\\\?\\UNC\\shared\\file" == ("\\\\?\\UNC\\shared\\","file") +-- > Windows: splitDrive "\\\\?\\UNCshared\\file" == ("\\\\?\\","UNCshared\\file") +-- > Windows: splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\","file") +-- > Windows: splitDrive "/d" == ("","/d") +-- > Posix: splitDrive "/test" == ("/","test") +-- > Posix: splitDrive "//test" == ("//","test") +-- > Posix: splitDrive "test/file" == ("","test/file") +-- > Posix: splitDrive "file" == ("","file") +splitDrive :: FilePath -> (FilePath, FilePath) +splitDrive x | isPosix = span (== '/') x +splitDrive x | Just y <- readDriveLetter x = y +splitDrive x | Just y <- readDriveUNC x = y +splitDrive x | Just y <- readDriveShare x = y +splitDrive x = ("",x) + +addSlash :: FilePath -> FilePath -> (FilePath, FilePath) +addSlash a xs = (a++c,d) + where (c,d) = span isPathSeparator xs + +-- See [1]. +-- "\\?\D:\" or "\\?\UNC\\" +readDriveUNC :: FilePath -> Maybe (FilePath, FilePath) +readDriveUNC (s1:s2:'?':s3:xs) | all isPathSeparator [s1,s2,s3] = + case map toUpper xs of + ('U':'N':'C':s4:_) | isPathSeparator s4 -> + let (a,b) = readDriveShareName (drop 4 xs) + in Just (s1:s2:'?':s3:take 4 xs ++ a, b) + _ -> case readDriveLetter xs of + -- Extended-length path. + Just (a,b) -> Just (s1:s2:'?':s3:a,b) + Nothing -> Nothing +readDriveUNC _ = Nothing + +{- c:\ -} +readDriveLetter :: String -> Maybe (FilePath, FilePath) +readDriveLetter (x:':':y:xs) | isLetter x && isPathSeparator y = Just $ addSlash [x,':'] (y:xs) +readDriveLetter (x:':':xs) | isLetter x = Just ([x,':'], xs) +readDriveLetter _ = Nothing + +{- \\sharename\ -} +readDriveShare :: String -> Maybe (FilePath, FilePath) +readDriveShare (s1:s2:xs) | isPathSeparator s1 && isPathSeparator s2 = + Just (s1:s2:a,b) + where (a,b) = readDriveShareName xs +readDriveShare _ = Nothing + +{- assume you have already seen \\ -} +{- share\bob -> "share\", "bob" -} +readDriveShareName :: String -> (FilePath, FilePath) +readDriveShareName name = addSlash a b + where (a,b) = break isPathSeparator name + + + +-- | Join a drive and the rest of the path. +-- +-- > Valid x => uncurry joinDrive (splitDrive x) == x +-- > Windows: joinDrive "C:" "foo" == "C:foo" +-- > Windows: joinDrive "C:\\" "bar" == "C:\\bar" +-- > Windows: joinDrive "\\\\share" "foo" == "\\\\share\\foo" +-- > Windows: joinDrive "/:" "foo" == "/:\\foo" +joinDrive :: FilePath -> FilePath -> FilePath +joinDrive = combineAlways + +-- | Get the drive from a filepath. +-- +-- > takeDrive x == fst (splitDrive x) +takeDrive :: FilePath -> FilePath +takeDrive = fst . splitDrive + +-- | Delete the drive, if it exists. +-- +-- > dropDrive x == snd (splitDrive x) +dropDrive :: FilePath -> FilePath +dropDrive = snd . splitDrive + +-- | Does a path have a drive. +-- +-- > not (hasDrive x) == null (takeDrive x) +-- > Posix: hasDrive "/foo" == True +-- > Windows: hasDrive "C:\\foo" == True +-- > Windows: hasDrive "C:foo" == True +-- > hasDrive "foo" == False +-- > hasDrive "" == False +hasDrive :: FilePath -> Bool +hasDrive = not . null . takeDrive + + +-- | Is an element a drive +-- +-- > Posix: isDrive "/" == True +-- > Posix: isDrive "/foo" == False +-- > Windows: isDrive "C:\\" == True +-- > Windows: isDrive "C:\\foo" == False +-- > isDrive "" == False +isDrive :: FilePath -> Bool +isDrive x = not (null x) && null (dropDrive x) + + +--------------------------------------------------------------------- +-- Operations on a filepath, as a list of directories + +-- | Split a filename into directory and file. '' is the inverse. +-- The first component will often end with a trailing slash. +-- +-- > splitFileName "/directory/file.ext" == ("/directory/","file.ext") +-- > Valid x => uncurry () (splitFileName x) == x || fst (splitFileName x) == "./" +-- > Valid x => isValid (fst (splitFileName x)) +-- > splitFileName "file/bob.txt" == ("file/", "bob.txt") +-- > splitFileName "file/" == ("file/", "") +-- > splitFileName "bob" == ("./", "bob") +-- > Posix: splitFileName "/" == ("/","") +-- > Windows: splitFileName "c:" == ("c:","") +splitFileName :: FilePath -> (String, String) +splitFileName x = (if null dir then "./" else dir, name) + where + (dir, name) = splitFileName_ x + +-- version of splitFileName where, if the FilePath has no directory +-- component, the returned directory is "" rather than "./". This +-- is used in cases where we are going to combine the returned +-- directory to make a valid FilePath, and having a "./" appear would +-- look strange and upset simple equality properties. See +-- e.g. replaceFileName. +splitFileName_ :: FilePath -> (String, String) +splitFileName_ x = (drv ++ dir, file) + where + (drv,pth) = splitDrive x + (dir,file) = breakEnd isPathSeparator pth + +-- | Set the filename. +-- +-- > replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext" +-- > Valid x => replaceFileName x (takeFileName x) == x +replaceFileName :: FilePath -> String -> FilePath +replaceFileName x y = a y where (a,_) = splitFileName_ x + +-- | Drop the filename. Unlike 'takeDirectory', this function will leave +-- a trailing path separator on the directory. +-- +-- > dropFileName "/directory/file.ext" == "/directory/" +-- > dropFileName x == fst (splitFileName x) +dropFileName :: FilePath -> FilePath +dropFileName = fst . splitFileName + + +-- | Get the file name. +-- +-- > takeFileName "/directory/file.ext" == "file.ext" +-- > takeFileName "test/" == "" +-- > takeFileName x `isSuffixOf` x +-- > takeFileName x == snd (splitFileName x) +-- > Valid x => takeFileName (replaceFileName x "fred") == "fred" +-- > Valid x => takeFileName (x "fred") == "fred" +-- > Valid x => isRelative (takeFileName x) +takeFileName :: FilePath -> FilePath +takeFileName = snd . splitFileName + +-- | Get the base name, without an extension or path. +-- +-- > takeBaseName "/directory/file.ext" == "file" +-- > takeBaseName "file/test.txt" == "test" +-- > takeBaseName "dave.ext" == "dave" +-- > takeBaseName "" == "" +-- > takeBaseName "test" == "test" +-- > takeBaseName (addTrailingPathSeparator x) == "" +-- > takeBaseName "file/file.tar.gz" == "file.tar" +takeBaseName :: FilePath -> String +takeBaseName = dropExtension . takeFileName + +-- | Set the base name. +-- +-- > replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext" +-- > replaceBaseName "file/test.txt" "bob" == "file/bob.txt" +-- > replaceBaseName "fred" "bill" == "bill" +-- > replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar" +-- > Valid x => replaceBaseName x (takeBaseName x) == x +replaceBaseName :: FilePath -> String -> FilePath +replaceBaseName pth nam = combineAlways a (nam <.> ext) + where + (a,b) = splitFileName_ pth + ext = takeExtension b + +-- | Is an item either a directory or the last character a path separator? +-- +-- > hasTrailingPathSeparator "test" == False +-- > hasTrailingPathSeparator "test/" == True +hasTrailingPathSeparator :: FilePath -> Bool +hasTrailingPathSeparator "" = False +hasTrailingPathSeparator x = isPathSeparator (last x) + + +hasLeadingPathSeparator :: FilePath -> Bool +hasLeadingPathSeparator "" = False +hasLeadingPathSeparator x = isPathSeparator (head x) + + +-- | Add a trailing file path separator if one is not already present. +-- +-- > hasTrailingPathSeparator (addTrailingPathSeparator x) +-- > hasTrailingPathSeparator x ==> addTrailingPathSeparator x == x +-- > Posix: addTrailingPathSeparator "test/rest" == "test/rest/" +addTrailingPathSeparator :: FilePath -> FilePath +addTrailingPathSeparator x = if hasTrailingPathSeparator x then x else x ++ [pathSeparator] + + +-- | Remove any trailing path separators +-- +-- > dropTrailingPathSeparator "file/test/" == "file/test" +-- > dropTrailingPathSeparator "/" == "/" +-- > Windows: dropTrailingPathSeparator "\\" == "\\" +-- > Posix: not (hasTrailingPathSeparator (dropTrailingPathSeparator x)) || isDrive x +dropTrailingPathSeparator :: FilePath -> FilePath +dropTrailingPathSeparator x = + if hasTrailingPathSeparator x && not (isDrive x) + then let x' = dropWhileEnd isPathSeparator x + in if null x' then [last x] else x' + else x + + +-- | Get the directory name, move up one level. +-- +-- > takeDirectory "/directory/other.ext" == "/directory" +-- > takeDirectory x `isPrefixOf` x || takeDirectory x == "." +-- > takeDirectory "foo" == "." +-- > takeDirectory "/" == "/" +-- > takeDirectory "/foo" == "/" +-- > takeDirectory "/foo/bar/baz" == "/foo/bar" +-- > takeDirectory "/foo/bar/baz/" == "/foo/bar/baz" +-- > takeDirectory "foo/bar/baz" == "foo/bar" +-- > Windows: takeDirectory "foo\\bar" == "foo" +-- > Windows: takeDirectory "foo\\bar\\\\" == "foo\\bar" +-- > Windows: takeDirectory "C:\\" == "C:\\" +takeDirectory :: FilePath -> FilePath +takeDirectory = dropTrailingPathSeparator . dropFileName + +-- | Set the directory, keeping the filename the same. +-- +-- > replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext" +-- > Valid x => replaceDirectory x (takeDirectory x) `equalFilePath` x +replaceDirectory :: FilePath -> String -> FilePath +replaceDirectory x dir = combineAlways dir (takeFileName x) + + +-- | An alias for ''. +combine :: FilePath -> FilePath -> FilePath +combine a b | hasLeadingPathSeparator b || hasDrive b = b + | otherwise = combineAlways a b + +-- | Combine two paths, assuming rhs is NOT absolute. +combineAlways :: FilePath -> FilePath -> FilePath +combineAlways a b | null a = b + | null b = a + | hasTrailingPathSeparator a = a ++ b + | otherwise = case a of + [a1,':'] | isWindows && isLetter a1 -> a ++ b + _ -> a ++ [pathSeparator] ++ b + + +-- | Combine two paths with a path separator. +-- If the second path starts with a path separator or a drive letter, then it returns the second. +-- The intention is that @readFile (dir '' file)@ will access the same file as +-- @setCurrentDirectory dir; readFile file@. +-- +-- > Posix: "/directory" "file.ext" == "/directory/file.ext" +-- > Windows: "/directory" "file.ext" == "/directory\\file.ext" +-- > "directory" "/file.ext" == "/file.ext" +-- > Valid x => (takeDirectory x takeFileName x) `equalFilePath` x +-- +-- Combined: +-- +-- > Posix: "/" "test" == "/test" +-- > Posix: "home" "bob" == "home/bob" +-- > Posix: "x:" "foo" == "x:/foo" +-- > Windows: "C:\\foo" "bar" == "C:\\foo\\bar" +-- > Windows: "home" "bob" == "home\\bob" +-- +-- Not combined: +-- +-- > Posix: "home" "/bob" == "/bob" +-- > Windows: "home" "C:\\bob" == "C:\\bob" +-- +-- Not combined (tricky): +-- +-- On Windows, if a filepath starts with a single slash, it is relative to the +-- root of the current drive. In [1], this is (confusingly) referred to as an +-- absolute path. +-- The current behavior of '' is to never combine these forms. +-- +-- > Windows: "home" "/bob" == "/bob" +-- > Windows: "home" "\\bob" == "\\bob" +-- > Windows: "C:\\home" "\\bob" == "\\bob" +-- +-- On Windows, from [1]: "If a file name begins with only a disk designator +-- but not the backslash after the colon, it is interpreted as a relative path +-- to the current directory on the drive with the specified letter." +-- The current behavior of '' is to never combine these forms. +-- +-- > Windows: "D:\\foo" "C:bar" == "C:bar" +-- > Windows: "C:\\foo" "C:bar" == "C:bar" +() :: FilePath -> FilePath -> FilePath +() = combine + + +-- | Split a path by the directory separator. +-- +-- > splitPath "/directory/file.ext" == ["/","directory/","file.ext"] +-- > concat (splitPath x) == x +-- > splitPath "test//item/" == ["test//","item/"] +-- > splitPath "test/item/file" == ["test/","item/","file"] +-- > splitPath "" == [] +-- > Windows: splitPath "c:\\test\\path" == ["c:\\","test\\","path"] +-- > Posix: splitPath "/file/test" == ["/","file/","test"] +splitPath :: FilePath -> [FilePath] +splitPath x = [drive | drive /= ""] ++ f path + where + (drive,path) = splitDrive x + + f "" = [] + f y = (a++c) : f d + where + (a,b) = break isPathSeparator y + (c,d) = span isPathSeparator b + +-- | Just as 'splitPath', but don't add the trailing slashes to each element. +-- +-- > splitDirectories "/directory/file.ext" == ["/","directory","file.ext"] +-- > splitDirectories "test/file" == ["test","file"] +-- > splitDirectories "/test/file" == ["/","test","file"] +-- > Windows: splitDirectories "C:\\test\\file" == ["C:\\", "test", "file"] +-- > Valid x => joinPath (splitDirectories x) `equalFilePath` x +-- > splitDirectories "" == [] +-- > Windows: splitDirectories "C:\\test\\\\\\file" == ["C:\\", "test", "file"] +-- > splitDirectories "/test///file" == ["/","test","file"] +splitDirectories :: FilePath -> [FilePath] +splitDirectories = map dropTrailingPathSeparator . splitPath + + +-- | Join path elements back together. +-- +-- > joinPath a == foldr () "" a +-- > joinPath ["/","directory/","file.ext"] == "/directory/file.ext" +-- > Valid x => joinPath (splitPath x) == x +-- > joinPath [] == "" +-- > Posix: joinPath ["test","file","path"] == "test/file/path" +joinPath :: [FilePath] -> FilePath +-- Note that this definition on c:\\c:\\, join then split will give c:\\. +joinPath = foldr combine "" + + + + + + +--------------------------------------------------------------------- +-- File name manipulators + +-- | Equality of two 'FilePath's. +-- If you call @System.Directory.canonicalizePath@ +-- first this has a much better chance of working. +-- Note that this doesn't follow symlinks or DOSNAM~1s. +-- +-- Similar to 'normalise', this does not expand @".."@, because of symlinks. +-- +-- > x == y ==> equalFilePath x y +-- > normalise x == normalise y ==> equalFilePath x y +-- > equalFilePath "foo" "foo/" +-- > not (equalFilePath "/a/../c" "/c") +-- > not (equalFilePath "foo" "/foo") +-- > Posix: not (equalFilePath "foo" "FOO") +-- > Windows: equalFilePath "foo" "FOO" +-- > Windows: not (equalFilePath "C:" "C:/") +equalFilePath :: FilePath -> FilePath -> Bool +equalFilePath a b = f a == f b + where + f x | isWindows = dropTrailingPathSeparator $ map toLower $ normalise x + | otherwise = dropTrailingPathSeparator $ normalise x + + +-- | Contract a filename, based on a relative path. Note that the resulting path +-- will never introduce @..@ paths, as the presence of symlinks means @..\/b@ +-- may not reach @a\/b@ if it starts from @a\/c@. For a worked example see +-- . +-- +-- The corresponding @makeAbsolute@ function can be found in +-- @System.Directory@. +-- +-- > makeRelative "/directory" "/directory/file.ext" == "file.ext" +-- > Valid x => makeRelative (takeDirectory x) x `equalFilePath` takeFileName x +-- > makeRelative x x == "." +-- > Valid x y => equalFilePath x y || (isRelative x && makeRelative y x == x) || equalFilePath (y makeRelative y x) x +-- > Windows: makeRelative "C:\\Home" "c:\\home\\bob" == "bob" +-- > Windows: makeRelative "C:\\Home" "c:/home/bob" == "bob" +-- > Windows: makeRelative "C:\\Home" "D:\\Home\\Bob" == "D:\\Home\\Bob" +-- > Windows: makeRelative "C:\\Home" "C:Home\\Bob" == "C:Home\\Bob" +-- > Windows: makeRelative "/Home" "/home/bob" == "bob" +-- > Windows: makeRelative "/" "//" == "//" +-- > Posix: makeRelative "/Home" "/home/bob" == "/home/bob" +-- > Posix: makeRelative "/home/" "/home/bob/foo/bar" == "bob/foo/bar" +-- > Posix: makeRelative "/fred" "bob" == "bob" +-- > Posix: makeRelative "/file/test" "/file/test/fred" == "fred" +-- > Posix: makeRelative "/file/test" "/file/test/fred/" == "fred/" +-- > Posix: makeRelative "some/path" "some/path/a/b/c" == "a/b/c" +makeRelative :: FilePath -> FilePath -> FilePath +makeRelative root path + | equalFilePath root path = "." + | takeAbs root /= takeAbs path = path + | otherwise = f (dropAbs root) (dropAbs path) + where + f "" y = dropWhile isPathSeparator y + f x y = let (x1,x2) = g x + (y1,y2) = g y + in if equalFilePath x1 y1 then f x2 y2 else path + + g x = (dropWhile isPathSeparator a, dropWhile isPathSeparator b) + where (a,b) = break isPathSeparator $ dropWhile isPathSeparator x + + -- on windows, need to drop '/' which is kind of absolute, but not a drive + dropAbs x | hasLeadingPathSeparator x && not (hasDrive x) = tail x + dropAbs x = dropDrive x + + takeAbs x | hasLeadingPathSeparator x && not (hasDrive x) = [pathSeparator] + takeAbs x = map (\y -> if isPathSeparator y then pathSeparator else toLower y) $ takeDrive x + +-- | Normalise a file +-- +-- * \/\/ outside of the drive can be made blank +-- +-- * \/ -> 'pathSeparator' +-- +-- * .\/ -> \"\" +-- +-- Does not remove @".."@, because of symlinks. +-- +-- > Posix: normalise "/file/\\test////" == "/file/\\test/" +-- > Posix: normalise "/file/./test" == "/file/test" +-- > Posix: normalise "/test/file/../bob/fred/" == "/test/file/../bob/fred/" +-- > Posix: normalise "../bob/fred/" == "../bob/fred/" +-- > Posix: normalise "/a/../c" == "/a/../c" +-- > Posix: normalise "./bob/fred/" == "bob/fred/" +-- > Windows: normalise "c:\\file/bob\\" == "C:\\file\\bob\\" +-- > Windows: normalise "c:\\" == "C:\\" +-- > Windows: normalise "C:.\\" == "C:" +-- > Windows: normalise "\\\\server\\test" == "\\\\server\\test" +-- > Windows: normalise "//server/test" == "\\\\server\\test" +-- > Windows: normalise "c:/file" == "C:\\file" +-- > Windows: normalise "/file" == "\\file" +-- > Windows: normalise "\\" == "\\" +-- > Windows: normalise "/./" == "\\" +-- > normalise "." == "." +-- > Posix: normalise "./" == "./" +-- > Posix: normalise "./." == "./" +-- > Posix: normalise "/./" == "/" +-- > Posix: normalise "/" == "/" +-- > Posix: normalise "bob/fred/." == "bob/fred/" +-- > Posix: normalise "//home" == "/home" +normalise :: FilePath -> FilePath +normalise path = result ++ [pathSeparator | addPathSeparator] + where + (drv,pth) = splitDrive path + result = joinDrive' (normaliseDrive drv) (f pth) + + joinDrive' "" "" = "." + joinDrive' d p = joinDrive d p + + addPathSeparator = isDirPath pth + && not (hasTrailingPathSeparator result) + && not (isRelativeDrive drv) + + isDirPath xs = hasTrailingPathSeparator xs + || not (null xs) && last xs == '.' && hasTrailingPathSeparator (init xs) + + f = joinPath . dropDots . propSep . splitDirectories + + propSep (x:xs) | all isPathSeparator x = [pathSeparator] : xs + | otherwise = x : xs + propSep [] = [] + + dropDots = filter ("." /=) + +normaliseDrive :: FilePath -> FilePath +normaliseDrive "" = "" +normaliseDrive _ | isPosix = [pathSeparator] +normaliseDrive drive = if isJust $ readDriveLetter x2 + then map toUpper x2 + else x2 + where + x2 = map repSlash drive + + repSlash x = if isPathSeparator x then pathSeparator else x + +-- Information for validity functions on Windows. See [1]. +isBadCharacter :: Char -> Bool +isBadCharacter x = x >= '\0' && x <= '\31' || x `elem` ":*?><|\"" + +badElements :: [FilePath] +badElements = + ["CON","PRN","AUX","NUL","CLOCK$" + ,"COM1","COM2","COM3","COM4","COM5","COM6","COM7","COM8","COM9" + ,"LPT1","LPT2","LPT3","LPT4","LPT5","LPT6","LPT7","LPT8","LPT9"] + + +-- | Is a FilePath valid, i.e. could you create a file like it? This function checks for invalid names, +-- and invalid characters, but does not check if length limits are exceeded, as these are typically +-- filesystem dependent. +-- +-- > isValid "" == False +-- > isValid "\0" == False +-- > Posix: isValid "/random_ path:*" == True +-- > Posix: isValid x == not (null x) +-- > Windows: isValid "c:\\test" == True +-- > Windows: isValid "c:\\test:of_test" == False +-- > Windows: isValid "test*" == False +-- > Windows: isValid "c:\\test\\nul" == False +-- > Windows: isValid "c:\\test\\prn.txt" == False +-- > Windows: isValid "c:\\nul\\file" == False +-- > Windows: isValid "\\\\" == False +-- > Windows: isValid "\\\\\\foo" == False +-- > Windows: isValid "\\\\?\\D:file" == False +-- > Windows: isValid "foo\tbar" == False +-- > Windows: isValid "nul .txt" == False +-- > Windows: isValid " nul.txt" == True +isValid :: FilePath -> Bool +isValid "" = False +isValid x | '\0' `elem` x = False +isValid _ | isPosix = True +isValid path = + not (any isBadCharacter x2) && + not (any f $ splitDirectories x2) && + not (isJust (readDriveShare x1) && all isPathSeparator x1) && + not (isJust (readDriveUNC x1) && not (hasTrailingPathSeparator x1)) + where + (x1,x2) = splitDrive path + f x = map toUpper (dropWhileEnd (== ' ') $ dropExtensions x) `elem` badElements + + +-- | Take a FilePath and make it valid; does not change already valid FilePaths. +-- +-- > isValid (makeValid x) +-- > isValid x ==> makeValid x == x +-- > makeValid "" == "_" +-- > makeValid "file\0name" == "file_name" +-- > Windows: makeValid "c:\\already\\/valid" == "c:\\already\\/valid" +-- > Windows: makeValid "c:\\test:of_test" == "c:\\test_of_test" +-- > Windows: makeValid "test*" == "test_" +-- > Windows: makeValid "c:\\test\\nul" == "c:\\test\\nul_" +-- > Windows: makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt" +-- > Windows: makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt" +-- > Windows: makeValid "c:\\nul\\file" == "c:\\nul_\\file" +-- > Windows: makeValid "\\\\\\foo" == "\\\\drive" +-- > Windows: makeValid "\\\\?\\D:file" == "\\\\?\\D:\\file" +-- > Windows: makeValid "nul .txt" == "nul _.txt" +makeValid :: FilePath -> FilePath +makeValid "" = "_" +makeValid path + | isPosix = map (\x -> if x == '\0' then '_' else x) path + | isJust (readDriveShare drv) && all isPathSeparator drv = take 2 drv ++ "drive" + | isJust (readDriveUNC drv) && not (hasTrailingPathSeparator drv) = + makeValid (drv ++ [pathSeparator] ++ pth) + | otherwise = joinDrive drv $ validElements $ validChars pth + where + (drv,pth) = splitDrive path + + validChars = map f + f x = if isBadCharacter x then '_' else x + + validElements x = joinPath $ map g $ splitPath x + g x = h a ++ b + where (a,b) = break isPathSeparator x + h x = if map toUpper (dropWhileEnd (== ' ') a) `elem` badElements then a ++ "_" <.> b else x + where (a,b) = splitExtensions x + + +-- | Is a path relative, or is it fixed to the root? +-- +-- > Windows: isRelative "path\\test" == True +-- > Windows: isRelative "c:\\test" == False +-- > Windows: isRelative "c:test" == True +-- > Windows: isRelative "c:\\" == False +-- > Windows: isRelative "c:/" == False +-- > Windows: isRelative "c:" == True +-- > Windows: isRelative "\\\\foo" == False +-- > Windows: isRelative "\\\\?\\foo" == False +-- > Windows: isRelative "\\\\?\\UNC\\foo" == False +-- > Windows: isRelative "/foo" == True +-- > Windows: isRelative "\\foo" == True +-- > Posix: isRelative "test/path" == True +-- > Posix: isRelative "/test" == False +-- > Posix: isRelative "/" == False +-- +-- According to [1]: +-- +-- * "A UNC name of any format [is never relative]." +-- +-- * "You cannot use the "\\?\" prefix with a relative path." +isRelative :: FilePath -> Bool +isRelative x = null drive || isRelativeDrive drive + where drive = takeDrive x + + +{- c:foo -} +-- From [1]: "If a file name begins with only a disk designator but not the +-- backslash after the colon, it is interpreted as a relative path to the +-- current directory on the drive with the specified letter." +isRelativeDrive :: String -> Bool +isRelativeDrive x = + maybe False (not . hasTrailingPathSeparator . fst) (readDriveLetter x) + + +-- | @not . 'isRelative'@ +-- +-- > isAbsolute x == not (isRelative x) +isAbsolute :: FilePath -> Bool +isAbsolute = not . isRelative + + +----------------------------------------------------------------------------- +-- dropWhileEnd (>2) [1,2,3,4,1,2,3,4] == [1,2,3,4,1,2]) +-- Note that Data.List.dropWhileEnd is only available in base >= 4.5. +dropWhileEnd :: (a -> Bool) -> [a] -> [a] +dropWhileEnd p = reverse . dropWhile p . reverse + +-- takeWhileEnd (>2) [1,2,3,4,1,2,3,4] == [3,4]) +takeWhileEnd :: (a -> Bool) -> [a] -> [a] +takeWhileEnd p = reverse . takeWhile p . reverse + +-- spanEnd (>2) [1,2,3,4,1,2,3,4] = ([1,2,3,4,1,2], [3,4]) +spanEnd :: (a -> Bool) -> [a] -> ([a], [a]) +spanEnd p xs = (dropWhileEnd p xs, takeWhileEnd p xs) + +-- breakEnd (< 2) [1,2,3,4,1,2,3,4] == ([1,2,3,4,1],[2,3,4]) +breakEnd :: (a -> Bool) -> [a] -> ([a], [a]) +breakEnd p = spanEnd (not . p) + +-- | The stripSuffix function drops the given suffix from a list. It returns +-- Nothing if the list did not end with the suffix given, or Just the list +-- before the suffix, if it does. +stripSuffix :: Eq a => [a] -> [a] -> Maybe [a] +stripSuffix xs ys = fmap reverse $ stripPrefix (reverse xs) (reverse ys) diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/TestEquiv.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/TestEquiv.hs new file mode 100644 index 0000000000..339ce1ace6 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-equivalent-tests/TestEquiv.hs @@ -0,0 +1,433 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverlappingInstances #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE TypeApplications #-} + +module Main where + +import Test.Tasty +import Test.Tasty.QuickCheck hiding ((==>)) +import TestUtil +import Prelude as P +import Data.Char (isAsciiLower, isAsciiUpper) +import Gen + +import qualified System.FilePath.Windows as W +import qualified System.FilePath.Posix as P +import qualified Legacy.System.FilePath.Windows as LW +import qualified Legacy.System.FilePath.Posix as LP + + +main :: IO () +main = defaultMain equivalentTests + + +equivalentTests :: TestTree +equivalentTests = testGroup "equivalence" + [ testProperties "windows" + [ + ( "pathSeparator" + , property $ W.pathSeparator == LW.pathSeparator + ) + , + ( "pathSeparators" + , property $ W.pathSeparators == LW.pathSeparators + ) + , + ( "isPathSeparator" + , property $ \p -> W.isPathSeparator p == LW.isPathSeparator p + ) + , + ( "searchPathSeparator" + , property $ W.searchPathSeparator == LW.searchPathSeparator + ) + , + ( "isSearchPathSeparator" + , property $ \p -> W.isSearchPathSeparator p == LW.isSearchPathSeparator p + ) + , + ( "extSeparator" + , property $ W.extSeparator == LW.extSeparator + ) + , + ( "isExtSeparator" + , property $ \p -> W.isExtSeparator p == LW.isExtSeparator p + ) + , + ( "splitSearchPath" + , property $ \(xs :: WindowsFilePaths) + -> let p = (intercalate ";" (altShow <$> unWindowsFilePaths xs)) + in W.splitSearchPath p == LW.splitSearchPath p + ) + , + ( "splitExtension" + , property $ \(altShow @WindowsFilePath -> p) -> W.splitExtension p == LW.splitExtension p + ) + , + ( "takeExtension" + , property $ \(altShow @WindowsFilePath -> p) -> W.takeExtension p == LW.takeExtension p + ) + , + ( "replaceExtension" + , property $ \(altShow @WindowsFilePath -> p) s -> W.replaceExtension p s == LW.replaceExtension p s + ) + , + ( "dropExtension" + , property $ \(altShow @WindowsFilePath -> p) -> W.dropExtension p == LW.dropExtension p + ) + , + ( "addExtension" + , property $ \(altShow @WindowsFilePath -> p) s -> W.addExtension p s == LW.addExtension p s + ) + , + ( "hasExtension" + , property $ \(altShow @WindowsFilePath -> p) -> W.hasExtension p == LW.hasExtension p + ) + , + ( "splitExtensions" + , property $ \(altShow @WindowsFilePath -> p) -> W.splitExtensions p == LW.splitExtensions p + ) + , + ( "dropExtensions" + , property $ \(altShow @WindowsFilePath -> p) -> W.dropExtensions p == LW.dropExtensions p + ) + , + ( "takeExtensions" + , property $ \p -> W.takeExtensions p == LW.takeExtensions p + ) + , + ( "replaceExtensions" + , property $ \(altShow @WindowsFilePath -> p) s -> W.replaceExtensions p s == LW.replaceExtensions p s + ) + , + ( "isExtensionOf" + , property $ \(altShow @WindowsFilePath -> p) s -> W.isExtensionOf p s == LW.isExtensionOf p s + ) + , + ( "stripExtension" + , property $ \(altShow @WindowsFilePath -> p) s -> W.stripExtension p s == LW.stripExtension p s + ) + , + ( "splitFileName" + , property $ \(altShow @WindowsFilePath -> p) -> W.splitFileName p == LW.splitFileName p + ) + , + ( "takeFileName" + , property $ \(altShow @WindowsFilePath -> p) -> W.takeFileName p == LW.takeFileName p + ) + , + ( "replaceFileName" + , property $ \(altShow @WindowsFilePath -> p) s -> W.replaceFileName p s == LW.replaceFileName p s + ) + , + ( "dropFileName" + , property $ \(altShow @WindowsFilePath -> p) -> W.dropFileName p == LW.dropFileName p + ) + , + ( "takeBaseName" + , property $ \(altShow @WindowsFilePath -> p) -> W.takeBaseName p == LW.takeBaseName p + ) + , + ( "replaceBaseName" + , property $ \(altShow @WindowsFilePath -> p) s -> W.replaceBaseName p s == LW.replaceBaseName p s + ) + , + ( "takeDirectory" + , property $ \(altShow @WindowsFilePath -> p) -> W.takeDirectory p == LW.takeDirectory p + ) + , + ( "replaceDirectory" + , property $ \(altShow @WindowsFilePath -> p) s -> W.replaceDirectory p s == LW.replaceDirectory p s + ) + , + ( "combine" + , property $ \(altShow @WindowsFilePath -> p) s -> W.combine p s == LW.combine p s + ) + , + ( "splitPath" + , property $ \(altShow @WindowsFilePath -> p) -> W.splitPath p == LW.splitPath p + ) + , + ( "joinPath" + , property $ \(xs :: WindowsFilePaths) -> + let p = altShow <$> unWindowsFilePaths xs + in W.joinPath p == LW.joinPath p + ) + , + ( "splitDirectories" + , property $ \(altShow @WindowsFilePath -> p) -> W.splitDirectories p == LW.splitDirectories p + ) + , + ( "splitDrive" + , property $ \(altShow @WindowsFilePath -> p) -> W.splitDrive p == LW.splitDrive p + ) + , + ( "joinDrive" + , property $ \(altShow @WindowsFilePath -> p) s -> W.joinDrive p s == LW.joinDrive p s + ) + , + ( "takeDrive" + , property $ \(altShow @WindowsFilePath -> p) -> W.takeDrive p == LW.takeDrive p + ) + , + ( "hasDrive" + , property $ \(altShow @WindowsFilePath -> p) -> W.hasDrive p == LW.hasDrive p + ) + , + ( "dropDrive" + , property $ \(altShow @WindowsFilePath -> p) -> W.dropDrive p == LW.dropDrive p + ) + , + ( "isDrive" + , property $ \(altShow @WindowsFilePath -> p) -> W.isDrive p == LW.isDrive p + ) + , + ( "hasTrailingPathSeparator" + , property $ \(altShow @WindowsFilePath -> p) -> W.hasTrailingPathSeparator p == LW.hasTrailingPathSeparator p + ) + , + ( "addTrailingPathSeparator" + , property $ \(altShow @WindowsFilePath -> p) -> W.addTrailingPathSeparator p == LW.addTrailingPathSeparator p + ) + , + ( "dropTrailingPathSeparator" + , property $ \(altShow @WindowsFilePath -> p) -> W.dropTrailingPathSeparator p == LW.dropTrailingPathSeparator p + ) + , + ( "normalise" + , property $ \(altShow @WindowsFilePath -> p) -> case p of + (l:':':rs) + -- new filepath normalises "a:////////" to "A:\\" + -- see https://github.com/haskell/filepath/commit/cb4890aa03a5ee61f16f7a08dd2d964fffffb385 + | isAsciiLower l || isAsciiUpper l + , let (seps, path) = span LW.isPathSeparator rs + , length seps > 1 -> let np = l : ':' : LW.pathSeparator : path in W.normalise np == LW.normalise np + _ -> W.normalise p == LW.normalise p + ) + , + ( "equalFilePath" + , property $ \p s -> W.equalFilePath p s == LW.equalFilePath p s + ) + , + ( "makeRelative" + , property $ \p s -> W.makeRelative p s == LW.makeRelative p s + ) + , + ( "isRelative" + , property $ \p -> W.isRelative p == LW.isRelative p + ) + , + ( "isAbsolute" + , property $ \p -> W.isAbsolute p == LW.isAbsolute p + ) + , + ( "isValid" + , property $ \p -> W.isValid p == LW.isValid p + ) + , + ( "makeValid" + , property $ \p -> W.makeValid p == LW.makeValid p + ) + ], + testProperties "posix" $ [ + ( "pathSeparator" + , property $ P.pathSeparator == LP.pathSeparator + ) + , + ( "pathSeparators" + , property $ P.pathSeparators == LP.pathSeparators + ) + , + ( "isPathSeparator" + , property $ \p -> P.isPathSeparator p == LP.isPathSeparator p + ) + , + ( "searchPathSeparator" + , property $ P.searchPathSeparator == LP.searchPathSeparator + ) + , + ( "isSearchPathSeparator" + , property $ \p -> P.isSearchPathSeparator p == LP.isSearchPathSeparator p + ) + , + ( "extSeparator" + , property $ P.extSeparator == LP.extSeparator + ) + , + ( "isExtSeparator" + , property $ \p -> P.isExtSeparator p == LP.isExtSeparator p + ) + , + ( "splitSearchPath" + , property $ \p -> P.splitSearchPath p == LP.splitSearchPath p + ) + , + ( "splitExtension" + , property $ \p -> P.splitExtension p == LP.splitExtension p + ) + , + ( "takeExtension" + , property $ \p -> P.takeExtension p == LP.takeExtension p + ) + , + ( "replaceExtension" + , property $ \p s -> P.replaceExtension p s == LP.replaceExtension p s + ) + , + ( "dropExtension" + , property $ \p -> P.dropExtension p == LP.dropExtension p + ) + , + ( "addExtension" + , property $ \p s -> P.addExtension p s == LP.addExtension p s + ) + , + ( "hasExtension" + , property $ \p -> P.hasExtension p == LP.hasExtension p + ) + , + ( "splitExtensions" + , property $ \p -> P.splitExtensions p == LP.splitExtensions p + ) + , + ( "dropExtensions" + , property $ \p -> P.dropExtensions p == LP.dropExtensions p + ) + , + ( "takeExtensions" + , property $ \p -> P.takeExtensions p == LP.takeExtensions p + ) + , + ( "replaceExtensions" + , property $ \p s -> P.replaceExtensions p s == LP.replaceExtensions p s + ) + , + ( "isExtensionOf" + , property $ \p s -> P.isExtensionOf p s == LP.isExtensionOf p s + ) + , + ( "stripExtension" + , property $ \p s -> P.stripExtension p s == LP.stripExtension p s + ) + , + ( "splitFileName" + , property $ \p -> P.splitFileName p == LP.splitFileName p + ) + , + ( "takeFileName" + , property $ \p -> P.takeFileName p == LP.takeFileName p + ) + , + ( "replaceFileName" + , property $ \p s -> P.replaceFileName p s == LP.replaceFileName p s + ) + , + ( "dropFileName" + , property $ \p -> P.dropFileName p == LP.dropFileName p + ) + , + ( "takeBaseName" + , property $ \p -> P.takeBaseName p == LP.takeBaseName p + ) + , + ( "replaceBaseName" + , property $ \p s -> P.replaceBaseName p s == LP.replaceBaseName p s + ) + , + ( "takeDirectory" + , property $ \p -> P.takeDirectory p == LP.takeDirectory p + ) + , + ( "replaceDirectory" + , property $ \p s -> P.replaceDirectory p s == LP.replaceDirectory p s + ) + , + ( "combine" + , property $ \p s -> P.combine p s == LP.combine p s + ) + , + ( "splitPath" + , property $ \p -> P.splitPath p == LP.splitPath p + ) + , + ( "joinPath" + , property $ \p -> P.joinPath p == LP.joinPath p + ) + , + ( "splitDirectories" + , property $ \p -> P.splitDirectories p == LP.splitDirectories p + ) + , + ( "splitDirectories" + , property $ \p -> P.splitDirectories p == LP.splitDirectories p + ) + , + ( "splitDrive" + , property $ \p -> P.splitDrive p == LP.splitDrive p + ) + , + ( "joinDrive" + , property $ \p s -> P.joinDrive p s == LP.joinDrive p s + ) + , + ( "takeDrive" + , property $ \p -> P.takeDrive p == LP.takeDrive p + ) + , + ( "hasDrive" + , property $ \p -> P.hasDrive p == LP.hasDrive p + ) + , + ( "dropDrive" + , property $ \p -> P.dropDrive p == LP.dropDrive p + ) + , + ( "isDrive" + , property $ \p -> P.isDrive p == LP.isDrive p + ) + , + ( "hasTrailingPathSeparator" + , property $ \p -> P.hasTrailingPathSeparator p == LP.hasTrailingPathSeparator p + ) + , + ( "addTrailingPathSeparator" + , property $ \p -> P.addTrailingPathSeparator p == LP.addTrailingPathSeparator p + ) + , + ( "dropTrailingPathSeparator" + , property $ \p -> P.dropTrailingPathSeparator p == LP.dropTrailingPathSeparator p + ) + , + ( "normalise" + , property $ \p -> P.normalise p == LP.normalise p + ) + , + ( "equalFilePath" + , property $ \p s -> P.equalFilePath p s == LP.equalFilePath p s + ) + , + ( "makeRelative" + , property $ \p s -> P.makeRelative p s == LP.makeRelative p s + ) + , + ( "isRelative" + , property $ \p -> P.isRelative p == LP.isRelative p + ) + , + ( "isAbsolute" + , property $ \p -> P.isAbsolute p == LP.isAbsolute p + ) + , + ( "isValid" + , property $ \p -> P.isValid p == LP.isValid p + ) + , + ( "makeValid" + , property $ \p -> P.makeValid p == LP.makeValid p + ) + ] + ] + diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-tests/Test.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-tests/Test.hs new file mode 100644 index 0000000000..cdcffd27bb --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-tests/Test.hs @@ -0,0 +1,9 @@ +module Main where + +import TestGen (tests) +import Test.Tasty +import Test.Tasty.QuickCheck + +main :: IO () +main = defaultMain $ testProperties "doctests" tests + diff --git a/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-tests/TestGen.hs b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-tests/TestGen.hs new file mode 100644 index 0000000000..9d5c822c77 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/filepath-1.5.4.0/tests/filepath-tests/TestGen.hs @@ -0,0 +1,955 @@ +-- GENERATED CODE: See ../Generate.hs +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -Wno-name-shadowing #-} +{-# OPTIONS_GHC -Wno-orphans #-} +module TestGen(tests) where +import TestUtil +#if !MIN_VERSION_base(4,11,0) +import Data.Semigroup +#endif +import Prelude as P +import Data.String +import GHC.IO.Encoding.Failure ( CodingFailureMode(..) ) +import GHC.IO.Encoding.UTF16 ( mkUTF16le ) +import GHC.IO.Encoding.UTF8 ( mkUTF8 ) +import System.OsString.Internal.Types +import System.OsString.Encoding.Internal +import qualified Data.Char as C +import qualified System.OsString.Data.ByteString.Short as SBS +import qualified System.OsString.Data.ByteString.Short.Word16 as SBS16 +import qualified System.FilePath.Windows as W +import qualified System.FilePath.Posix as P +import qualified System.OsPath.Windows as AFP_W +import qualified System.OsPath.Posix as AFP_P +instance IsString WindowsString where fromString = WS . either (error . show) id . encodeWithTE (mkUTF16le TransliterateCodingFailure) +instance IsString PosixString where fromString = PS . either (error . show) id . encodeWithTE (mkUTF8 TransliterateCodingFailure) +#if defined(mingw32_HOST_OS) || defined(__MINGW32__) +instance IsString OsString where fromString = OsString . WS . either (error . show) id . encodeWithTE (mkUTF16le TransliterateCodingFailure) +#else +instance IsString OsString where fromString = OsString . PS . either (error . show) id . encodeWithTE (mkUTF8 TransliterateCodingFailure) +#endif +tests :: [(String, Property)] +tests = + [("W.pathSeparator == '\\\\'", property $ W.pathSeparator == '\\') + ,("AFP_W.pathSeparator == (WW . fromIntegral . C.ord $ '\\\\')", property $ AFP_W.pathSeparator == (WW . fromIntegral . C.ord $ '\\')) + ,("P.pathSeparator == '/'", property $ P.pathSeparator == '/') + ,("AFP_P.pathSeparator == (PW . fromIntegral . C.ord $ '/')", property $ AFP_P.pathSeparator == (PW . fromIntegral . C.ord $ '/')) + ,("P.isPathSeparator P.pathSeparator", property $ P.isPathSeparator P.pathSeparator) + ,("W.isPathSeparator W.pathSeparator", property $ W.isPathSeparator W.pathSeparator) + ,("AFP_P.isPathSeparator AFP_P.pathSeparator", property $ AFP_P.isPathSeparator AFP_P.pathSeparator) + ,("AFP_W.isPathSeparator AFP_W.pathSeparator", property $ AFP_W.isPathSeparator AFP_W.pathSeparator) + ,("W.pathSeparators == ['\\\\', '/']", property $ W.pathSeparators == ['\\', '/']) + ,("AFP_W.pathSeparators == [(WW . fromIntegral . C.ord $ '\\\\'), (WW . fromIntegral . C.ord $ '/')]", property $ AFP_W.pathSeparators == [(WW . fromIntegral . C.ord $ '\\'), (WW . fromIntegral . C.ord $ '/')]) + ,("P.pathSeparators == ['/']", property $ P.pathSeparators == ['/']) + ,("AFP_P.pathSeparators == [(PW . fromIntegral . C.ord $ '/')]", property $ AFP_P.pathSeparators == [(PW . fromIntegral . C.ord $ '/')]) + ,("P.pathSeparator `elem` P.pathSeparators", property $ P.pathSeparator `elem` P.pathSeparators) + ,("W.pathSeparator `elem` W.pathSeparators", property $ W.pathSeparator `elem` W.pathSeparators) + ,("AFP_P.pathSeparator `elem` AFP_P.pathSeparators", property $ AFP_P.pathSeparator `elem` AFP_P.pathSeparators) + ,("AFP_W.pathSeparator `elem` AFP_W.pathSeparators", property $ AFP_W.pathSeparator `elem` AFP_W.pathSeparators) + ,("P.isPathSeparator a == (a `elem` P.pathSeparators)", property $ \a -> P.isPathSeparator a == (a `elem` P.pathSeparators)) + ,("W.isPathSeparator a == (a `elem` W.pathSeparators)", property $ \a -> W.isPathSeparator a == (a `elem` W.pathSeparators)) + ,("AFP_P.isPathSeparator a == (a `elem` AFP_P.pathSeparators)", property $ \a -> AFP_P.isPathSeparator a == (a `elem` AFP_P.pathSeparators)) + ,("AFP_W.isPathSeparator a == (a `elem` AFP_W.pathSeparators)", property $ \a -> AFP_W.isPathSeparator a == (a `elem` AFP_W.pathSeparators)) + ,("W.searchPathSeparator == ';'", property $ W.searchPathSeparator == ';') + ,("AFP_W.searchPathSeparator == (WW . fromIntegral . C.ord $ ';')", property $ AFP_W.searchPathSeparator == (WW . fromIntegral . C.ord $ ';')) + ,("P.searchPathSeparator == ':'", property $ P.searchPathSeparator == ':') + ,("AFP_P.searchPathSeparator == (PW . fromIntegral . C.ord $ ':')", property $ AFP_P.searchPathSeparator == (PW . fromIntegral . C.ord $ ':')) + ,("P.isSearchPathSeparator a == (a == P.searchPathSeparator)", property $ \a -> P.isSearchPathSeparator a == (a == P.searchPathSeparator)) + ,("W.isSearchPathSeparator a == (a == W.searchPathSeparator)", property $ \a -> W.isSearchPathSeparator a == (a == W.searchPathSeparator)) + ,("AFP_P.isSearchPathSeparator a == (a == AFP_P.searchPathSeparator)", property $ \a -> AFP_P.isSearchPathSeparator a == (a == AFP_P.searchPathSeparator)) + ,("AFP_W.isSearchPathSeparator a == (a == AFP_W.searchPathSeparator)", property $ \a -> AFP_W.isSearchPathSeparator a == (a == AFP_W.searchPathSeparator)) + ,("P.extSeparator == '.'", property $ P.extSeparator == '.') + ,("W.extSeparator == '.'", property $ W.extSeparator == '.') + ,("AFP_P.extSeparator == (PW . fromIntegral . C.ord $ '.')", property $ AFP_P.extSeparator == (PW . fromIntegral . C.ord $ '.')) + ,("AFP_W.extSeparator == (WW . fromIntegral . C.ord $ '.')", property $ AFP_W.extSeparator == (WW . fromIntegral . C.ord $ '.')) + ,("P.isExtSeparator a == (a == P.extSeparator)", property $ \a -> P.isExtSeparator a == (a == P.extSeparator)) + ,("W.isExtSeparator a == (a == W.extSeparator)", property $ \a -> W.isExtSeparator a == (a == W.extSeparator)) + ,("AFP_P.isExtSeparator a == (a == AFP_P.extSeparator)", property $ \a -> AFP_P.isExtSeparator a == (a == AFP_P.extSeparator)) + ,("AFP_W.isExtSeparator a == (a == AFP_W.extSeparator)", property $ \a -> AFP_W.isExtSeparator a == (a == AFP_W.extSeparator)) + ,("P.splitSearchPath \"File1:File2:File3\" == [\"File1\", \"File2\", \"File3\"]", property $ P.splitSearchPath "File1:File2:File3" == ["File1", "File2", "File3"]) + ,("AFP_P.splitSearchPath (\"File1:File2:File3\") == [(\"File1\"), (\"File2\"), (\"File3\")]", property $ AFP_P.splitSearchPath ("File1:File2:File3") == [("File1"), ("File2"), ("File3")]) + ,("P.splitSearchPath \"File1::File2:File3\" == [\"File1\", \".\", \"File2\", \"File3\"]", property $ P.splitSearchPath "File1::File2:File3" == ["File1", ".", "File2", "File3"]) + ,("AFP_P.splitSearchPath (\"File1::File2:File3\") == [(\"File1\"), (\".\"), (\"File2\"), (\"File3\")]", property $ AFP_P.splitSearchPath ("File1::File2:File3") == [("File1"), ("."), ("File2"), ("File3")]) + ,("W.splitSearchPath \"File1;File2;File3\" == [\"File1\", \"File2\", \"File3\"]", property $ W.splitSearchPath "File1;File2;File3" == ["File1", "File2", "File3"]) + ,("AFP_W.splitSearchPath (\"File1;File2;File3\") == [(\"File1\"), (\"File2\"), (\"File3\")]", property $ AFP_W.splitSearchPath ("File1;File2;File3") == [("File1"), ("File2"), ("File3")]) + ,("W.splitSearchPath \"File1;;File2;File3\" == [\"File1\", \"File2\", \"File3\"]", property $ W.splitSearchPath "File1;;File2;File3" == ["File1", "File2", "File3"]) + ,("AFP_W.splitSearchPath (\"File1;;File2;File3\") == [(\"File1\"), (\"File2\"), (\"File3\")]", property $ AFP_W.splitSearchPath ("File1;;File2;File3") == [("File1"), ("File2"), ("File3")]) + ,("W.splitSearchPath \"File1;\\\"File2\\\";File3\" == [\"File1\", \"File2\", \"File3\"]", property $ W.splitSearchPath "File1;\"File2\";File3" == ["File1", "File2", "File3"]) + ,("AFP_W.splitSearchPath (\"File1;\\\"File2\\\";File3\") == [(\"File1\"), (\"File2\"), (\"File3\")]", property $ AFP_W.splitSearchPath ("File1;\"File2\";File3") == [("File1"), ("File2"), ("File3")]) + ,("P.splitExtension \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ P.splitExtension "/directory/path.ext" == ("/directory/path", ".ext")) + ,("W.splitExtension \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ W.splitExtension "/directory/path.ext" == ("/directory/path", ".ext")) + ,("AFP_P.splitExtension (\"/directory/path.ext\") == ((\"/directory/path\"), (\".ext\"))", property $ AFP_P.splitExtension ("/directory/path.ext") == (("/directory/path"), (".ext"))) + ,("AFP_W.splitExtension (\"/directory/path.ext\") == ((\"/directory/path\"), (\".ext\"))", property $ AFP_W.splitExtension ("/directory/path.ext") == (("/directory/path"), (".ext"))) + ,("uncurry (<>) (P.splitExtension x) == x", property $ \(QFilePath x) -> uncurry (<>) (P.splitExtension x) == x) + ,("uncurry (<>) (W.splitExtension x) == x", property $ \(QFilePath x) -> uncurry (<>) (W.splitExtension x) == x) + ,("uncurry (<>) (AFP_P.splitExtension x) == x", property $ \(QFilePathAFP_P x) -> uncurry (<>) (AFP_P.splitExtension x) == x) + ,("uncurry (<>) (AFP_W.splitExtension x) == x", property $ \(QFilePathAFP_W x) -> uncurry (<>) (AFP_W.splitExtension x) == x) + ,("uncurry P.addExtension (P.splitExtension x) == x", property $ \(QFilePathValidP x) -> uncurry P.addExtension (P.splitExtension x) == x) + ,("uncurry W.addExtension (W.splitExtension x) == x", property $ \(QFilePathValidW x) -> uncurry W.addExtension (W.splitExtension x) == x) + ,("uncurry AFP_P.addExtension (AFP_P.splitExtension x) == x", property $ \(QFilePathValidAFP_P x) -> uncurry AFP_P.addExtension (AFP_P.splitExtension x) == x) + ,("uncurry AFP_W.addExtension (AFP_W.splitExtension x) == x", property $ \(QFilePathValidAFP_W x) -> uncurry AFP_W.addExtension (AFP_W.splitExtension x) == x) + ,("P.splitExtension \"file.txt\" == (\"file\", \".txt\")", property $ P.splitExtension "file.txt" == ("file", ".txt")) + ,("W.splitExtension \"file.txt\" == (\"file\", \".txt\")", property $ W.splitExtension "file.txt" == ("file", ".txt")) + ,("AFP_P.splitExtension (\"file.txt\") == ((\"file\"), (\".txt\"))", property $ AFP_P.splitExtension ("file.txt") == (("file"), (".txt"))) + ,("AFP_W.splitExtension (\"file.txt\") == ((\"file\"), (\".txt\"))", property $ AFP_W.splitExtension ("file.txt") == (("file"), (".txt"))) + ,("P.splitExtension \"file\" == (\"file\", \"\")", property $ P.splitExtension "file" == ("file", "")) + ,("W.splitExtension \"file\" == (\"file\", \"\")", property $ W.splitExtension "file" == ("file", "")) + ,("AFP_P.splitExtension (\"file\") == ((\"file\"), (\"\"))", property $ AFP_P.splitExtension ("file") == (("file"), (""))) + ,("AFP_W.splitExtension (\"file\") == ((\"file\"), (\"\"))", property $ AFP_W.splitExtension ("file") == (("file"), (""))) + ,("P.splitExtension \"file/file.txt\" == (\"file/file\", \".txt\")", property $ P.splitExtension "file/file.txt" == ("file/file", ".txt")) + ,("W.splitExtension \"file/file.txt\" == (\"file/file\", \".txt\")", property $ W.splitExtension "file/file.txt" == ("file/file", ".txt")) + ,("AFP_P.splitExtension (\"file/file.txt\") == ((\"file/file\"), (\".txt\"))", property $ AFP_P.splitExtension ("file/file.txt") == (("file/file"), (".txt"))) + ,("AFP_W.splitExtension (\"file/file.txt\") == ((\"file/file\"), (\".txt\"))", property $ AFP_W.splitExtension ("file/file.txt") == (("file/file"), (".txt"))) + ,("P.splitExtension \"file.txt/boris\" == (\"file.txt/boris\", \"\")", property $ P.splitExtension "file.txt/boris" == ("file.txt/boris", "")) + ,("W.splitExtension \"file.txt/boris\" == (\"file.txt/boris\", \"\")", property $ W.splitExtension "file.txt/boris" == ("file.txt/boris", "")) + ,("AFP_P.splitExtension (\"file.txt/boris\") == ((\"file.txt/boris\"), (\"\"))", property $ AFP_P.splitExtension ("file.txt/boris") == (("file.txt/boris"), (""))) + ,("AFP_W.splitExtension (\"file.txt/boris\") == ((\"file.txt/boris\"), (\"\"))", property $ AFP_W.splitExtension ("file.txt/boris") == (("file.txt/boris"), (""))) + ,("P.splitExtension \"file.txt/boris.ext\" == (\"file.txt/boris\", \".ext\")", property $ P.splitExtension "file.txt/boris.ext" == ("file.txt/boris", ".ext")) + ,("W.splitExtension \"file.txt/boris.ext\" == (\"file.txt/boris\", \".ext\")", property $ W.splitExtension "file.txt/boris.ext" == ("file.txt/boris", ".ext")) + ,("AFP_P.splitExtension (\"file.txt/boris.ext\") == ((\"file.txt/boris\"), (\".ext\"))", property $ AFP_P.splitExtension ("file.txt/boris.ext") == (("file.txt/boris"), (".ext"))) + ,("AFP_W.splitExtension (\"file.txt/boris.ext\") == ((\"file.txt/boris\"), (\".ext\"))", property $ AFP_W.splitExtension ("file.txt/boris.ext") == (("file.txt/boris"), (".ext"))) + ,("P.splitExtension \"file/path.txt.bob.fred\" == (\"file/path.txt.bob\", \".fred\")", property $ P.splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob", ".fred")) + ,("W.splitExtension \"file/path.txt.bob.fred\" == (\"file/path.txt.bob\", \".fred\")", property $ W.splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob", ".fred")) + ,("AFP_P.splitExtension (\"file/path.txt.bob.fred\") == ((\"file/path.txt.bob\"), (\".fred\"))", property $ AFP_P.splitExtension ("file/path.txt.bob.fred") == (("file/path.txt.bob"), (".fred"))) + ,("AFP_W.splitExtension (\"file/path.txt.bob.fred\") == ((\"file/path.txt.bob\"), (\".fred\"))", property $ AFP_W.splitExtension ("file/path.txt.bob.fred") == (("file/path.txt.bob"), (".fred"))) + ,("P.splitExtension \"file/path.txt/\" == (\"file/path.txt/\", \"\")", property $ P.splitExtension "file/path.txt/" == ("file/path.txt/", "")) + ,("W.splitExtension \"file/path.txt/\" == (\"file/path.txt/\", \"\")", property $ W.splitExtension "file/path.txt/" == ("file/path.txt/", "")) + ,("AFP_P.splitExtension (\"file/path.txt/\") == ((\"file/path.txt/\"), (\"\"))", property $ AFP_P.splitExtension ("file/path.txt/") == (("file/path.txt/"), (""))) + ,("AFP_W.splitExtension (\"file/path.txt/\") == ((\"file/path.txt/\"), (\"\"))", property $ AFP_W.splitExtension ("file/path.txt/") == (("file/path.txt/"), (""))) + ,("P.takeExtension \"/directory/path.ext\" == \".ext\"", property $ P.takeExtension "/directory/path.ext" == ".ext") + ,("W.takeExtension \"/directory/path.ext\" == \".ext\"", property $ W.takeExtension "/directory/path.ext" == ".ext") + ,("AFP_P.takeExtension (\"/directory/path.ext\") == (\".ext\")", property $ AFP_P.takeExtension ("/directory/path.ext") == (".ext")) + ,("AFP_W.takeExtension (\"/directory/path.ext\") == (\".ext\")", property $ AFP_W.takeExtension ("/directory/path.ext") == (".ext")) + ,("P.takeExtension x == snd (P.splitExtension x)", property $ \(QFilePath x) -> P.takeExtension x == snd (P.splitExtension x)) + ,("W.takeExtension x == snd (W.splitExtension x)", property $ \(QFilePath x) -> W.takeExtension x == snd (W.splitExtension x)) + ,("AFP_P.takeExtension x == snd (AFP_P.splitExtension x)", property $ \(QFilePathAFP_P x) -> AFP_P.takeExtension x == snd (AFP_P.splitExtension x)) + ,("AFP_W.takeExtension x == snd (AFP_W.splitExtension x)", property $ \(QFilePathAFP_W x) -> AFP_W.takeExtension x == snd (AFP_W.splitExtension x)) + ,("P.takeExtension (P.addExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidP x) -> P.takeExtension (P.addExtension x "ext") == ".ext") + ,("W.takeExtension (W.addExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidW x) -> W.takeExtension (W.addExtension x "ext") == ".ext") + ,("AFP_P.takeExtension (AFP_P.addExtension x (\"ext\")) == (\".ext\")", property $ \(QFilePathValidAFP_P x) -> AFP_P.takeExtension (AFP_P.addExtension x ("ext")) == (".ext")) + ,("AFP_W.takeExtension (AFP_W.addExtension x (\"ext\")) == (\".ext\")", property $ \(QFilePathValidAFP_W x) -> AFP_W.takeExtension (AFP_W.addExtension x ("ext")) == (".ext")) + ,("P.takeExtension (P.replaceExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidP x) -> P.takeExtension (P.replaceExtension x "ext") == ".ext") + ,("W.takeExtension (W.replaceExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidW x) -> W.takeExtension (W.replaceExtension x "ext") == ".ext") + ,("AFP_P.takeExtension (AFP_P.replaceExtension x (\"ext\")) == (\".ext\")", property $ \(QFilePathValidAFP_P x) -> AFP_P.takeExtension (AFP_P.replaceExtension x ("ext")) == (".ext")) + ,("AFP_W.takeExtension (AFP_W.replaceExtension x (\"ext\")) == (\".ext\")", property $ \(QFilePathValidAFP_W x) -> AFP_W.takeExtension (AFP_W.replaceExtension x ("ext")) == (".ext")) + ,("\"/directory/path.txt\" P.-<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" P.-<.> "ext" == "/directory/path.ext") + ,("\"/directory/path.txt\" W.-<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" W.-<.> "ext" == "/directory/path.ext") + ,("(\"/directory/path.txt\") AFP_P.-<.> (\"ext\") == (\"/directory/path.ext\")", property $ ("/directory/path.txt") AFP_P.-<.> ("ext") == ("/directory/path.ext")) + ,("(\"/directory/path.txt\") AFP_W.-<.> (\"ext\") == (\"/directory/path.ext\")", property $ ("/directory/path.txt") AFP_W.-<.> ("ext") == ("/directory/path.ext")) + ,("\"/directory/path.txt\" P.-<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" P.-<.> ".ext" == "/directory/path.ext") + ,("\"/directory/path.txt\" W.-<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" W.-<.> ".ext" == "/directory/path.ext") + ,("(\"/directory/path.txt\") AFP_P.-<.> (\".ext\") == (\"/directory/path.ext\")", property $ ("/directory/path.txt") AFP_P.-<.> (".ext") == ("/directory/path.ext")) + ,("(\"/directory/path.txt\") AFP_W.-<.> (\".ext\") == (\"/directory/path.ext\")", property $ ("/directory/path.txt") AFP_W.-<.> (".ext") == ("/directory/path.ext")) + ,("\"foo.o\" P.-<.> \"c\" == \"foo.c\"", property $ "foo.o" P.-<.> "c" == "foo.c") + ,("\"foo.o\" W.-<.> \"c\" == \"foo.c\"", property $ "foo.o" W.-<.> "c" == "foo.c") + ,("(\"foo.o\") AFP_P.-<.> (\"c\") == (\"foo.c\")", property $ ("foo.o") AFP_P.-<.> ("c") == ("foo.c")) + ,("(\"foo.o\") AFP_W.-<.> (\"c\") == (\"foo.c\")", property $ ("foo.o") AFP_W.-<.> ("c") == ("foo.c")) + ,("P.replaceExtension \"/directory/path.txt\" \"ext\" == \"/directory/path.ext\"", property $ P.replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext") + ,("W.replaceExtension \"/directory/path.txt\" \"ext\" == \"/directory/path.ext\"", property $ W.replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext") + ,("AFP_P.replaceExtension (\"/directory/path.txt\") (\"ext\") == (\"/directory/path.ext\")", property $ AFP_P.replaceExtension ("/directory/path.txt") ("ext") == ("/directory/path.ext")) + ,("AFP_W.replaceExtension (\"/directory/path.txt\") (\"ext\") == (\"/directory/path.ext\")", property $ AFP_W.replaceExtension ("/directory/path.txt") ("ext") == ("/directory/path.ext")) + ,("P.replaceExtension \"/directory/path.txt\" \".ext\" == \"/directory/path.ext\"", property $ P.replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext") + ,("W.replaceExtension \"/directory/path.txt\" \".ext\" == \"/directory/path.ext\"", property $ W.replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext") + ,("AFP_P.replaceExtension (\"/directory/path.txt\") (\".ext\") == (\"/directory/path.ext\")", property $ AFP_P.replaceExtension ("/directory/path.txt") (".ext") == ("/directory/path.ext")) + ,("AFP_W.replaceExtension (\"/directory/path.txt\") (\".ext\") == (\"/directory/path.ext\")", property $ AFP_W.replaceExtension ("/directory/path.txt") (".ext") == ("/directory/path.ext")) + ,("P.replaceExtension \"file.txt\" \".bob\" == \"file.bob\"", property $ P.replaceExtension "file.txt" ".bob" == "file.bob") + ,("W.replaceExtension \"file.txt\" \".bob\" == \"file.bob\"", property $ W.replaceExtension "file.txt" ".bob" == "file.bob") + ,("AFP_P.replaceExtension (\"file.txt\") (\".bob\") == (\"file.bob\")", property $ AFP_P.replaceExtension ("file.txt") (".bob") == ("file.bob")) + ,("AFP_W.replaceExtension (\"file.txt\") (\".bob\") == (\"file.bob\")", property $ AFP_W.replaceExtension ("file.txt") (".bob") == ("file.bob")) + ,("P.replaceExtension \"file.txt\" \"bob\" == \"file.bob\"", property $ P.replaceExtension "file.txt" "bob" == "file.bob") + ,("W.replaceExtension \"file.txt\" \"bob\" == \"file.bob\"", property $ W.replaceExtension "file.txt" "bob" == "file.bob") + ,("AFP_P.replaceExtension (\"file.txt\") (\"bob\") == (\"file.bob\")", property $ AFP_P.replaceExtension ("file.txt") ("bob") == ("file.bob")) + ,("AFP_W.replaceExtension (\"file.txt\") (\"bob\") == (\"file.bob\")", property $ AFP_W.replaceExtension ("file.txt") ("bob") == ("file.bob")) + ,("P.replaceExtension \"file\" \".bob\" == \"file.bob\"", property $ P.replaceExtension "file" ".bob" == "file.bob") + ,("W.replaceExtension \"file\" \".bob\" == \"file.bob\"", property $ W.replaceExtension "file" ".bob" == "file.bob") + ,("AFP_P.replaceExtension (\"file\") (\".bob\") == (\"file.bob\")", property $ AFP_P.replaceExtension ("file") (".bob") == ("file.bob")) + ,("AFP_W.replaceExtension (\"file\") (\".bob\") == (\"file.bob\")", property $ AFP_W.replaceExtension ("file") (".bob") == ("file.bob")) + ,("P.replaceExtension \"file.txt\" \"\" == \"file\"", property $ P.replaceExtension "file.txt" "" == "file") + ,("W.replaceExtension \"file.txt\" \"\" == \"file\"", property $ W.replaceExtension "file.txt" "" == "file") + ,("AFP_P.replaceExtension (\"file.txt\") (\"\") == (\"file\")", property $ AFP_P.replaceExtension ("file.txt") ("") == ("file")) + ,("AFP_W.replaceExtension (\"file.txt\") (\"\") == (\"file\")", property $ AFP_W.replaceExtension ("file.txt") ("") == ("file")) + ,("P.replaceExtension \"file.fred.bob\" \"txt\" == \"file.fred.txt\"", property $ P.replaceExtension "file.fred.bob" "txt" == "file.fred.txt") + ,("W.replaceExtension \"file.fred.bob\" \"txt\" == \"file.fred.txt\"", property $ W.replaceExtension "file.fred.bob" "txt" == "file.fred.txt") + ,("AFP_P.replaceExtension (\"file.fred.bob\") (\"txt\") == (\"file.fred.txt\")", property $ AFP_P.replaceExtension ("file.fred.bob") ("txt") == ("file.fred.txt")) + ,("AFP_W.replaceExtension (\"file.fred.bob\") (\"txt\") == (\"file.fred.txt\")", property $ AFP_W.replaceExtension ("file.fred.bob") ("txt") == ("file.fred.txt")) + ,("P.replaceExtension x y == P.addExtension (P.dropExtension x) y", property $ \(QFilePath x) (QFilePath y) -> P.replaceExtension x y == P.addExtension (P.dropExtension x) y) + ,("W.replaceExtension x y == W.addExtension (W.dropExtension x) y", property $ \(QFilePath x) (QFilePath y) -> W.replaceExtension x y == W.addExtension (W.dropExtension x) y) + ,("AFP_P.replaceExtension x y == AFP_P.addExtension (AFP_P.dropExtension x) y", property $ \(QFilePathAFP_P x) (QFilePathAFP_P y) -> AFP_P.replaceExtension x y == AFP_P.addExtension (AFP_P.dropExtension x) y) + ,("AFP_W.replaceExtension x y == AFP_W.addExtension (AFP_W.dropExtension x) y", property $ \(QFilePathAFP_W x) (QFilePathAFP_W y) -> AFP_W.replaceExtension x y == AFP_W.addExtension (AFP_W.dropExtension x) y) + ,("\"/directory/path\" P.<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path" P.<.> "ext" == "/directory/path.ext") + ,("\"/directory/path\" W.<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path" W.<.> "ext" == "/directory/path.ext") + ,("(\"/directory/path\") AFP_P.<.> (\"ext\") == (\"/directory/path.ext\")", property $ ("/directory/path") AFP_P.<.> ("ext") == ("/directory/path.ext")) + ,("(\"/directory/path\") AFP_W.<.> (\"ext\") == (\"/directory/path.ext\")", property $ ("/directory/path") AFP_W.<.> ("ext") == ("/directory/path.ext")) + ,("\"/directory/path\" P.<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path" P.<.> ".ext" == "/directory/path.ext") + ,("\"/directory/path\" W.<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path" W.<.> ".ext" == "/directory/path.ext") + ,("(\"/directory/path\") AFP_P.<.> (\".ext\") == (\"/directory/path.ext\")", property $ ("/directory/path") AFP_P.<.> (".ext") == ("/directory/path.ext")) + ,("(\"/directory/path\") AFP_W.<.> (\".ext\") == (\"/directory/path.ext\")", property $ ("/directory/path") AFP_W.<.> (".ext") == ("/directory/path.ext")) + ,("P.dropExtension \"/directory/path.ext\" == \"/directory/path\"", property $ P.dropExtension "/directory/path.ext" == "/directory/path") + ,("W.dropExtension \"/directory/path.ext\" == \"/directory/path\"", property $ W.dropExtension "/directory/path.ext" == "/directory/path") + ,("AFP_P.dropExtension (\"/directory/path.ext\") == (\"/directory/path\")", property $ AFP_P.dropExtension ("/directory/path.ext") == ("/directory/path")) + ,("AFP_W.dropExtension (\"/directory/path.ext\") == (\"/directory/path\")", property $ AFP_W.dropExtension ("/directory/path.ext") == ("/directory/path")) + ,("P.dropExtension x == fst (P.splitExtension x)", property $ \(QFilePath x) -> P.dropExtension x == fst (P.splitExtension x)) + ,("W.dropExtension x == fst (W.splitExtension x)", property $ \(QFilePath x) -> W.dropExtension x == fst (W.splitExtension x)) + ,("AFP_P.dropExtension x == fst (AFP_P.splitExtension x)", property $ \(QFilePathAFP_P x) -> AFP_P.dropExtension x == fst (AFP_P.splitExtension x)) + ,("AFP_W.dropExtension x == fst (AFP_W.splitExtension x)", property $ \(QFilePathAFP_W x) -> AFP_W.dropExtension x == fst (AFP_W.splitExtension x)) + ,("P.addExtension \"/directory/path\" \"ext\" == \"/directory/path.ext\"", property $ P.addExtension "/directory/path" "ext" == "/directory/path.ext") + ,("W.addExtension \"/directory/path\" \"ext\" == \"/directory/path.ext\"", property $ W.addExtension "/directory/path" "ext" == "/directory/path.ext") + ,("AFP_P.addExtension (\"/directory/path\") (\"ext\") == (\"/directory/path.ext\")", property $ AFP_P.addExtension ("/directory/path") ("ext") == ("/directory/path.ext")) + ,("AFP_W.addExtension (\"/directory/path\") (\"ext\") == (\"/directory/path.ext\")", property $ AFP_W.addExtension ("/directory/path") ("ext") == ("/directory/path.ext")) + ,("P.addExtension \"file.txt\" \"bib\" == \"file.txt.bib\"", property $ P.addExtension "file.txt" "bib" == "file.txt.bib") + ,("W.addExtension \"file.txt\" \"bib\" == \"file.txt.bib\"", property $ W.addExtension "file.txt" "bib" == "file.txt.bib") + ,("AFP_P.addExtension (\"file.txt\") (\"bib\") == (\"file.txt.bib\")", property $ AFP_P.addExtension ("file.txt") ("bib") == ("file.txt.bib")) + ,("AFP_W.addExtension (\"file.txt\") (\"bib\") == (\"file.txt.bib\")", property $ AFP_W.addExtension ("file.txt") ("bib") == ("file.txt.bib")) + ,("P.addExtension \"file.\" \".bib\" == \"file..bib\"", property $ P.addExtension "file." ".bib" == "file..bib") + ,("W.addExtension \"file.\" \".bib\" == \"file..bib\"", property $ W.addExtension "file." ".bib" == "file..bib") + ,("AFP_P.addExtension (\"file.\") (\".bib\") == (\"file..bib\")", property $ AFP_P.addExtension ("file.") (".bib") == ("file..bib")) + ,("AFP_W.addExtension (\"file.\") (\".bib\") == (\"file..bib\")", property $ AFP_W.addExtension ("file.") (".bib") == ("file..bib")) + ,("P.addExtension \"file\" \".bib\" == \"file.bib\"", property $ P.addExtension "file" ".bib" == "file.bib") + ,("W.addExtension \"file\" \".bib\" == \"file.bib\"", property $ W.addExtension "file" ".bib" == "file.bib") + ,("AFP_P.addExtension (\"file\") (\".bib\") == (\"file.bib\")", property $ AFP_P.addExtension ("file") (".bib") == ("file.bib")) + ,("AFP_W.addExtension (\"file\") (\".bib\") == (\"file.bib\")", property $ AFP_W.addExtension ("file") (".bib") == ("file.bib")) + ,("P.addExtension \"/\" \"x\" == \"/.x\"", property $ P.addExtension "/" "x" == "/.x") + ,("W.addExtension \"/\" \"x\" == \"/.x\"", property $ W.addExtension "/" "x" == "/.x") + ,("AFP_P.addExtension (\"/\") (\"x\") == (\"/.x\")", property $ AFP_P.addExtension ("/") ("x") == ("/.x")) + ,("AFP_W.addExtension (\"/\") (\"x\") == (\"/.x\")", property $ AFP_W.addExtension ("/") ("x") == ("/.x")) + ,("P.addExtension x \"\" == x", property $ \(QFilePath x) -> P.addExtension x "" == x) + ,("W.addExtension x \"\" == x", property $ \(QFilePath x) -> W.addExtension x "" == x) + ,("AFP_P.addExtension x (\"\") == x", property $ \(QFilePathAFP_P x) -> AFP_P.addExtension x ("") == x) + ,("AFP_W.addExtension x (\"\") == x", property $ \(QFilePathAFP_W x) -> AFP_W.addExtension x ("") == x) + ,("P.takeFileName (P.addExtension (P.addTrailingPathSeparator x) \"ext\") == \".ext\"", property $ \(QFilePathValidP x) -> P.takeFileName (P.addExtension (P.addTrailingPathSeparator x) "ext") == ".ext") + ,("W.takeFileName (W.addExtension (W.addTrailingPathSeparator x) \"ext\") == \".ext\"", property $ \(QFilePathValidW x) -> W.takeFileName (W.addExtension (W.addTrailingPathSeparator x) "ext") == ".ext") + ,("AFP_P.takeFileName (AFP_P.addExtension (AFP_P.addTrailingPathSeparator x) (\"ext\")) == (\".ext\")", property $ \(QFilePathValidAFP_P x) -> AFP_P.takeFileName (AFP_P.addExtension (AFP_P.addTrailingPathSeparator x) ("ext")) == (".ext")) + ,("AFP_W.takeFileName (AFP_W.addExtension (AFP_W.addTrailingPathSeparator x) (\"ext\")) == (\".ext\")", property $ \(QFilePathValidAFP_W x) -> AFP_W.takeFileName (AFP_W.addExtension (AFP_W.addTrailingPathSeparator x) ("ext")) == (".ext")) + ,("W.addExtension \"\\\\\\\\share\" \".txt\" == \"\\\\\\\\share\\\\.txt\"", property $ W.addExtension "\\\\share" ".txt" == "\\\\share\\.txt") + ,("AFP_W.addExtension (\"\\\\\\\\share\") (\".txt\") == (\"\\\\\\\\share\\\\.txt\")", property $ AFP_W.addExtension ("\\\\share") (".txt") == ("\\\\share\\.txt")) + ,("P.hasExtension \"/directory/path.ext\" == True", property $ P.hasExtension "/directory/path.ext" == True) + ,("W.hasExtension \"/directory/path.ext\" == True", property $ W.hasExtension "/directory/path.ext" == True) + ,("AFP_P.hasExtension (\"/directory/path.ext\") == True", property $ AFP_P.hasExtension ("/directory/path.ext") == True) + ,("AFP_W.hasExtension (\"/directory/path.ext\") == True", property $ AFP_W.hasExtension ("/directory/path.ext") == True) + ,("P.hasExtension \"/directory/path\" == False", property $ P.hasExtension "/directory/path" == False) + ,("W.hasExtension \"/directory/path\" == False", property $ W.hasExtension "/directory/path" == False) + ,("AFP_P.hasExtension (\"/directory/path\") == False", property $ AFP_P.hasExtension ("/directory/path") == False) + ,("AFP_W.hasExtension (\"/directory/path\") == False", property $ AFP_W.hasExtension ("/directory/path") == False) + ,("null (P.takeExtension x) == not (P.hasExtension x)", property $ \(QFilePath x) -> null (P.takeExtension x) == not (P.hasExtension x)) + ,("null (W.takeExtension x) == not (W.hasExtension x)", property $ \(QFilePath x) -> null (W.takeExtension x) == not (W.hasExtension x)) + ,("(SBS.null . getPosixString) (AFP_P.takeExtension x) == not (AFP_P.hasExtension x)", property $ \(QFilePathAFP_P x) -> (SBS.null . getPosixString) (AFP_P.takeExtension x) == not (AFP_P.hasExtension x)) + ,("(SBS16.null . getWindowsString) (AFP_W.takeExtension x) == not (AFP_W.hasExtension x)", property $ \(QFilePathAFP_W x) -> (SBS16.null . getWindowsString) (AFP_W.takeExtension x) == not (AFP_W.hasExtension x)) + ,("\"png\" `P.isExtensionOf` \"/directory/file.png\" == True", property $ "png" `P.isExtensionOf` "/directory/file.png" == True) + ,("\"png\" `W.isExtensionOf` \"/directory/file.png\" == True", property $ "png" `W.isExtensionOf` "/directory/file.png" == True) + ,("(\"png\") `AFP_P.isExtensionOf` (\"/directory/file.png\") == True", property $ ("png") `AFP_P.isExtensionOf` ("/directory/file.png") == True) + ,("(\"png\") `AFP_W.isExtensionOf` (\"/directory/file.png\") == True", property $ ("png") `AFP_W.isExtensionOf` ("/directory/file.png") == True) + ,("\".png\" `P.isExtensionOf` \"/directory/file.png\" == True", property $ ".png" `P.isExtensionOf` "/directory/file.png" == True) + ,("\".png\" `W.isExtensionOf` \"/directory/file.png\" == True", property $ ".png" `W.isExtensionOf` "/directory/file.png" == True) + ,("(\".png\") `AFP_P.isExtensionOf` (\"/directory/file.png\") == True", property $ (".png") `AFP_P.isExtensionOf` ("/directory/file.png") == True) + ,("(\".png\") `AFP_W.isExtensionOf` (\"/directory/file.png\") == True", property $ (".png") `AFP_W.isExtensionOf` ("/directory/file.png") == True) + ,("\".tar.gz\" `P.isExtensionOf` \"bar/foo.tar.gz\" == True", property $ ".tar.gz" `P.isExtensionOf` "bar/foo.tar.gz" == True) + ,("\".tar.gz\" `W.isExtensionOf` \"bar/foo.tar.gz\" == True", property $ ".tar.gz" `W.isExtensionOf` "bar/foo.tar.gz" == True) + ,("(\".tar.gz\") `AFP_P.isExtensionOf` (\"bar/foo.tar.gz\") == True", property $ (".tar.gz") `AFP_P.isExtensionOf` ("bar/foo.tar.gz") == True) + ,("(\".tar.gz\") `AFP_W.isExtensionOf` (\"bar/foo.tar.gz\") == True", property $ (".tar.gz") `AFP_W.isExtensionOf` ("bar/foo.tar.gz") == True) + ,("\"ar.gz\" `P.isExtensionOf` \"bar/foo.tar.gz\" == False", property $ "ar.gz" `P.isExtensionOf` "bar/foo.tar.gz" == False) + ,("\"ar.gz\" `W.isExtensionOf` \"bar/foo.tar.gz\" == False", property $ "ar.gz" `W.isExtensionOf` "bar/foo.tar.gz" == False) + ,("(\"ar.gz\") `AFP_P.isExtensionOf` (\"bar/foo.tar.gz\") == False", property $ ("ar.gz") `AFP_P.isExtensionOf` ("bar/foo.tar.gz") == False) + ,("(\"ar.gz\") `AFP_W.isExtensionOf` (\"bar/foo.tar.gz\") == False", property $ ("ar.gz") `AFP_W.isExtensionOf` ("bar/foo.tar.gz") == False) + ,("\"png\" `P.isExtensionOf` \"/directory/file.png.jpg\" == False", property $ "png" `P.isExtensionOf` "/directory/file.png.jpg" == False) + ,("\"png\" `W.isExtensionOf` \"/directory/file.png.jpg\" == False", property $ "png" `W.isExtensionOf` "/directory/file.png.jpg" == False) + ,("(\"png\") `AFP_P.isExtensionOf` (\"/directory/file.png.jpg\") == False", property $ ("png") `AFP_P.isExtensionOf` ("/directory/file.png.jpg") == False) + ,("(\"png\") `AFP_W.isExtensionOf` (\"/directory/file.png.jpg\") == False", property $ ("png") `AFP_W.isExtensionOf` ("/directory/file.png.jpg") == False) + ,("\"csv/table.csv\" `P.isExtensionOf` \"/data/csv/table.csv\" == False", property $ "csv/table.csv" `P.isExtensionOf` "/data/csv/table.csv" == False) + ,("\"csv/table.csv\" `W.isExtensionOf` \"/data/csv/table.csv\" == False", property $ "csv/table.csv" `W.isExtensionOf` "/data/csv/table.csv" == False) + ,("(\"csv/table.csv\") `AFP_P.isExtensionOf` (\"/data/csv/table.csv\") == False", property $ ("csv/table.csv") `AFP_P.isExtensionOf` ("/data/csv/table.csv") == False) + ,("(\"csv/table.csv\") `AFP_W.isExtensionOf` (\"/data/csv/table.csv\") == False", property $ ("csv/table.csv") `AFP_W.isExtensionOf` ("/data/csv/table.csv") == False) + ,("P.stripExtension \"hs.o\" \"foo.x.hs.o\" == Just \"foo.x\"", property $ P.stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x") + ,("W.stripExtension \"hs.o\" \"foo.x.hs.o\" == Just \"foo.x\"", property $ W.stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x") + ,("AFP_P.stripExtension (\"hs.o\") (\"foo.x.hs.o\") == Just (\"foo.x\")", property $ AFP_P.stripExtension ("hs.o") ("foo.x.hs.o") == Just ("foo.x")) + ,("AFP_W.stripExtension (\"hs.o\") (\"foo.x.hs.o\") == Just (\"foo.x\")", property $ AFP_W.stripExtension ("hs.o") ("foo.x.hs.o") == Just ("foo.x")) + ,("P.stripExtension \"hi.o\" \"foo.x.hs.o\" == Nothing", property $ P.stripExtension "hi.o" "foo.x.hs.o" == Nothing) + ,("W.stripExtension \"hi.o\" \"foo.x.hs.o\" == Nothing", property $ W.stripExtension "hi.o" "foo.x.hs.o" == Nothing) + ,("AFP_P.stripExtension (\"hi.o\") (\"foo.x.hs.o\") == Nothing", property $ AFP_P.stripExtension ("hi.o") ("foo.x.hs.o") == Nothing) + ,("AFP_W.stripExtension (\"hi.o\") (\"foo.x.hs.o\") == Nothing", property $ AFP_W.stripExtension ("hi.o") ("foo.x.hs.o") == Nothing) + ,("P.dropExtension x == fromJust (P.stripExtension (P.takeExtension x) x)", property $ \(QFilePath x) -> P.dropExtension x == fromJust (P.stripExtension (P.takeExtension x) x)) + ,("W.dropExtension x == fromJust (W.stripExtension (W.takeExtension x) x)", property $ \(QFilePath x) -> W.dropExtension x == fromJust (W.stripExtension (W.takeExtension x) x)) + ,("AFP_P.dropExtension x == fromJust (AFP_P.stripExtension (AFP_P.takeExtension x) x)", property $ \(QFilePathAFP_P x) -> AFP_P.dropExtension x == fromJust (AFP_P.stripExtension (AFP_P.takeExtension x) x)) + ,("AFP_W.dropExtension x == fromJust (AFP_W.stripExtension (AFP_W.takeExtension x) x)", property $ \(QFilePathAFP_W x) -> AFP_W.dropExtension x == fromJust (AFP_W.stripExtension (AFP_W.takeExtension x) x)) + ,("P.dropExtensions x == fromJust (P.stripExtension (P.takeExtensions x) x)", property $ \(QFilePath x) -> P.dropExtensions x == fromJust (P.stripExtension (P.takeExtensions x) x)) + ,("W.dropExtensions x == fromJust (W.stripExtension (W.takeExtensions x) x)", property $ \(QFilePath x) -> W.dropExtensions x == fromJust (W.stripExtension (W.takeExtensions x) x)) + ,("AFP_P.dropExtensions x == fromJust (AFP_P.stripExtension (AFP_P.takeExtensions x) x)", property $ \(QFilePathAFP_P x) -> AFP_P.dropExtensions x == fromJust (AFP_P.stripExtension (AFP_P.takeExtensions x) x)) + ,("AFP_W.dropExtensions x == fromJust (AFP_W.stripExtension (AFP_W.takeExtensions x) x)", property $ \(QFilePathAFP_W x) -> AFP_W.dropExtensions x == fromJust (AFP_W.stripExtension (AFP_W.takeExtensions x) x)) + ,("P.stripExtension \".c.d\" \"a.b.c.d\" == Just \"a.b\"", property $ P.stripExtension ".c.d" "a.b.c.d" == Just "a.b") + ,("W.stripExtension \".c.d\" \"a.b.c.d\" == Just \"a.b\"", property $ W.stripExtension ".c.d" "a.b.c.d" == Just "a.b") + ,("AFP_P.stripExtension (\".c.d\") (\"a.b.c.d\") == Just (\"a.b\")", property $ AFP_P.stripExtension (".c.d") ("a.b.c.d") == Just ("a.b")) + ,("AFP_W.stripExtension (\".c.d\") (\"a.b.c.d\") == Just (\"a.b\")", property $ AFP_W.stripExtension (".c.d") ("a.b.c.d") == Just ("a.b")) + ,("P.stripExtension \".c.d\" \"a.b..c.d\" == Just \"a.b.\"", property $ P.stripExtension ".c.d" "a.b..c.d" == Just "a.b.") + ,("W.stripExtension \".c.d\" \"a.b..c.d\" == Just \"a.b.\"", property $ W.stripExtension ".c.d" "a.b..c.d" == Just "a.b.") + ,("AFP_P.stripExtension (\".c.d\") (\"a.b..c.d\") == Just (\"a.b.\")", property $ AFP_P.stripExtension (".c.d") ("a.b..c.d") == Just ("a.b.")) + ,("AFP_W.stripExtension (\".c.d\") (\"a.b..c.d\") == Just (\"a.b.\")", property $ AFP_W.stripExtension (".c.d") ("a.b..c.d") == Just ("a.b.")) + ,("P.stripExtension \"baz\" \"foo.bar\" == Nothing", property $ P.stripExtension "baz" "foo.bar" == Nothing) + ,("W.stripExtension \"baz\" \"foo.bar\" == Nothing", property $ W.stripExtension "baz" "foo.bar" == Nothing) + ,("AFP_P.stripExtension (\"baz\") (\"foo.bar\") == Nothing", property $ AFP_P.stripExtension ("baz") ("foo.bar") == Nothing) + ,("AFP_W.stripExtension (\"baz\") (\"foo.bar\") == Nothing", property $ AFP_W.stripExtension ("baz") ("foo.bar") == Nothing) + ,("P.stripExtension \"bar\" \"foobar\" == Nothing", property $ P.stripExtension "bar" "foobar" == Nothing) + ,("W.stripExtension \"bar\" \"foobar\" == Nothing", property $ W.stripExtension "bar" "foobar" == Nothing) + ,("AFP_P.stripExtension (\"bar\") (\"foobar\") == Nothing", property $ AFP_P.stripExtension ("bar") ("foobar") == Nothing) + ,("AFP_W.stripExtension (\"bar\") (\"foobar\") == Nothing", property $ AFP_W.stripExtension ("bar") ("foobar") == Nothing) + ,("P.stripExtension \"\" x == Just x", property $ \(QFilePath x) -> P.stripExtension "" x == Just x) + ,("W.stripExtension \"\" x == Just x", property $ \(QFilePath x) -> W.stripExtension "" x == Just x) + ,("AFP_P.stripExtension (\"\") x == Just x", property $ \(QFilePathAFP_P x) -> AFP_P.stripExtension ("") x == Just x) + ,("AFP_W.stripExtension (\"\") x == Just x", property $ \(QFilePathAFP_W x) -> AFP_W.stripExtension ("") x == Just x) + ,("P.splitExtensions \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ P.splitExtensions "/directory/path.ext" == ("/directory/path", ".ext")) + ,("W.splitExtensions \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ W.splitExtensions "/directory/path.ext" == ("/directory/path", ".ext")) + ,("AFP_P.splitExtensions (\"/directory/path.ext\") == ((\"/directory/path\"), (\".ext\"))", property $ AFP_P.splitExtensions ("/directory/path.ext") == (("/directory/path"), (".ext"))) + ,("AFP_W.splitExtensions (\"/directory/path.ext\") == ((\"/directory/path\"), (\".ext\"))", property $ AFP_W.splitExtensions ("/directory/path.ext") == (("/directory/path"), (".ext"))) + ,("P.splitExtensions \"file.tar.gz\" == (\"file\", \".tar.gz\")", property $ P.splitExtensions "file.tar.gz" == ("file", ".tar.gz")) + ,("W.splitExtensions \"file.tar.gz\" == (\"file\", \".tar.gz\")", property $ W.splitExtensions "file.tar.gz" == ("file", ".tar.gz")) + ,("AFP_P.splitExtensions (\"file.tar.gz\") == ((\"file\"), (\".tar.gz\"))", property $ AFP_P.splitExtensions ("file.tar.gz") == (("file"), (".tar.gz"))) + ,("AFP_W.splitExtensions (\"file.tar.gz\") == ((\"file\"), (\".tar.gz\"))", property $ AFP_W.splitExtensions ("file.tar.gz") == (("file"), (".tar.gz"))) + ,("uncurry (<>) (P.splitExtensions x) == x", property $ \(QFilePath x) -> uncurry (<>) (P.splitExtensions x) == x) + ,("uncurry (<>) (W.splitExtensions x) == x", property $ \(QFilePath x) -> uncurry (<>) (W.splitExtensions x) == x) + ,("uncurry (<>) (AFP_P.splitExtensions x) == x", property $ \(QFilePathAFP_P x) -> uncurry (<>) (AFP_P.splitExtensions x) == x) + ,("uncurry (<>) (AFP_W.splitExtensions x) == x", property $ \(QFilePathAFP_W x) -> uncurry (<>) (AFP_W.splitExtensions x) == x) + ,("uncurry P.addExtension (P.splitExtensions x) == x", property $ \(QFilePathValidP x) -> uncurry P.addExtension (P.splitExtensions x) == x) + ,("uncurry W.addExtension (W.splitExtensions x) == x", property $ \(QFilePathValidW x) -> uncurry W.addExtension (W.splitExtensions x) == x) + ,("uncurry AFP_P.addExtension (AFP_P.splitExtensions x) == x", property $ \(QFilePathValidAFP_P x) -> uncurry AFP_P.addExtension (AFP_P.splitExtensions x) == x) + ,("uncurry AFP_W.addExtension (AFP_W.splitExtensions x) == x", property $ \(QFilePathValidAFP_W x) -> uncurry AFP_W.addExtension (AFP_W.splitExtensions x) == x) + ,("P.dropExtensions \"/directory/path.ext\" == \"/directory/path\"", property $ P.dropExtensions "/directory/path.ext" == "/directory/path") + ,("W.dropExtensions \"/directory/path.ext\" == \"/directory/path\"", property $ W.dropExtensions "/directory/path.ext" == "/directory/path") + ,("AFP_P.dropExtensions (\"/directory/path.ext\") == (\"/directory/path\")", property $ AFP_P.dropExtensions ("/directory/path.ext") == ("/directory/path")) + ,("AFP_W.dropExtensions (\"/directory/path.ext\") == (\"/directory/path\")", property $ AFP_W.dropExtensions ("/directory/path.ext") == ("/directory/path")) + ,("P.dropExtensions \"file.tar.gz\" == \"file\"", property $ P.dropExtensions "file.tar.gz" == "file") + ,("W.dropExtensions \"file.tar.gz\" == \"file\"", property $ W.dropExtensions "file.tar.gz" == "file") + ,("AFP_P.dropExtensions (\"file.tar.gz\") == (\"file\")", property $ AFP_P.dropExtensions ("file.tar.gz") == ("file")) + ,("AFP_W.dropExtensions (\"file.tar.gz\") == (\"file\")", property $ AFP_W.dropExtensions ("file.tar.gz") == ("file")) + ,("not $ P.hasExtension $ P.dropExtensions x", property $ \(QFilePath x) -> not $ P.hasExtension $ P.dropExtensions x) + ,("not $ W.hasExtension $ W.dropExtensions x", property $ \(QFilePath x) -> not $ W.hasExtension $ W.dropExtensions x) + ,("not $ AFP_P.hasExtension $ AFP_P.dropExtensions x", property $ \(QFilePathAFP_P x) -> not $ AFP_P.hasExtension $ AFP_P.dropExtensions x) + ,("not $ AFP_W.hasExtension $ AFP_W.dropExtensions x", property $ \(QFilePathAFP_W x) -> not $ AFP_W.hasExtension $ AFP_W.dropExtensions x) + ,("not $ any P.isExtSeparator $ P.takeFileName $ P.dropExtensions x", property $ \(QFilePath x) -> not $ any P.isExtSeparator $ P.takeFileName $ P.dropExtensions x) + ,("not $ any W.isExtSeparator $ W.takeFileName $ W.dropExtensions x", property $ \(QFilePath x) -> not $ any W.isExtSeparator $ W.takeFileName $ W.dropExtensions x) + ,("not $ (\\f (getPosixString -> x) -> SBS.any (f . PW) x) AFP_P.isExtSeparator $ AFP_P.takeFileName $ AFP_P.dropExtensions x", property $ \(QFilePathAFP_P x) -> not $ (\f (getPosixString -> x) -> SBS.any (f . PW) x) AFP_P.isExtSeparator $ AFP_P.takeFileName $ AFP_P.dropExtensions x) + ,("not $ (\\f (getWindowsString -> x) -> SBS16.any (f . WW) x) AFP_W.isExtSeparator $ AFP_W.takeFileName $ AFP_W.dropExtensions x", property $ \(QFilePathAFP_W x) -> not $ (\f (getWindowsString -> x) -> SBS16.any (f . WW) x) AFP_W.isExtSeparator $ AFP_W.takeFileName $ AFP_W.dropExtensions x) + ,("P.takeExtensions \"/directory/path.ext\" == \".ext\"", property $ P.takeExtensions "/directory/path.ext" == ".ext") + ,("W.takeExtensions \"/directory/path.ext\" == \".ext\"", property $ W.takeExtensions "/directory/path.ext" == ".ext") + ,("AFP_P.takeExtensions (\"/directory/path.ext\") == (\".ext\")", property $ AFP_P.takeExtensions ("/directory/path.ext") == (".ext")) + ,("AFP_W.takeExtensions (\"/directory/path.ext\") == (\".ext\")", property $ AFP_W.takeExtensions ("/directory/path.ext") == (".ext")) + ,("P.takeExtensions \"file.tar.gz\" == \".tar.gz\"", property $ P.takeExtensions "file.tar.gz" == ".tar.gz") + ,("W.takeExtensions \"file.tar.gz\" == \".tar.gz\"", property $ W.takeExtensions "file.tar.gz" == ".tar.gz") + ,("AFP_P.takeExtensions (\"file.tar.gz\") == (\".tar.gz\")", property $ AFP_P.takeExtensions ("file.tar.gz") == (".tar.gz")) + ,("AFP_W.takeExtensions (\"file.tar.gz\") == (\".tar.gz\")", property $ AFP_W.takeExtensions ("file.tar.gz") == (".tar.gz")) + ,("P.replaceExtensions \"file.fred.bob\" \"txt\" == \"file.txt\"", property $ P.replaceExtensions "file.fred.bob" "txt" == "file.txt") + ,("W.replaceExtensions \"file.fred.bob\" \"txt\" == \"file.txt\"", property $ W.replaceExtensions "file.fred.bob" "txt" == "file.txt") + ,("AFP_P.replaceExtensions (\"file.fred.bob\") (\"txt\") == (\"file.txt\")", property $ AFP_P.replaceExtensions ("file.fred.bob") ("txt") == ("file.txt")) + ,("AFP_W.replaceExtensions (\"file.fred.bob\") (\"txt\") == (\"file.txt\")", property $ AFP_W.replaceExtensions ("file.fred.bob") ("txt") == ("file.txt")) + ,("P.replaceExtensions \"file.fred.bob\" \"tar.gz\" == \"file.tar.gz\"", property $ P.replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz") + ,("W.replaceExtensions \"file.fred.bob\" \"tar.gz\" == \"file.tar.gz\"", property $ W.replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz") + ,("AFP_P.replaceExtensions (\"file.fred.bob\") (\"tar.gz\") == (\"file.tar.gz\")", property $ AFP_P.replaceExtensions ("file.fred.bob") ("tar.gz") == ("file.tar.gz")) + ,("AFP_W.replaceExtensions (\"file.fred.bob\") (\"tar.gz\") == (\"file.tar.gz\")", property $ AFP_W.replaceExtensions ("file.fred.bob") ("tar.gz") == ("file.tar.gz")) + ,("uncurry (<>) (P.splitDrive x) == x", property $ \(QFilePath x) -> uncurry (<>) (P.splitDrive x) == x) + ,("uncurry (<>) (W.splitDrive x) == x", property $ \(QFilePath x) -> uncurry (<>) (W.splitDrive x) == x) + ,("uncurry (<>) (AFP_P.splitDrive x) == x", property $ \(QFilePathAFP_P x) -> uncurry (<>) (AFP_P.splitDrive x) == x) + ,("uncurry (<>) (AFP_W.splitDrive x) == x", property $ \(QFilePathAFP_W x) -> uncurry (<>) (AFP_W.splitDrive x) == x) + ,("W.splitDrive \"file\" == (\"\", \"file\")", property $ W.splitDrive "file" == ("", "file")) + ,("AFP_W.splitDrive (\"file\") == ((\"\"), (\"file\"))", property $ AFP_W.splitDrive ("file") == ((""), ("file"))) + ,("W.splitDrive \"c:/file\" == (\"c:/\", \"file\")", property $ W.splitDrive "c:/file" == ("c:/", "file")) + ,("AFP_W.splitDrive (\"c:/file\") == ((\"c:/\"), (\"file\"))", property $ AFP_W.splitDrive ("c:/file") == (("c:/"), ("file"))) + ,("W.splitDrive \"c:\\\\file\" == (\"c:\\\\\", \"file\")", property $ W.splitDrive "c:\\file" == ("c:\\", "file")) + ,("AFP_W.splitDrive (\"c:\\\\file\") == ((\"c:\\\\\"), (\"file\"))", property $ AFP_W.splitDrive ("c:\\file") == (("c:\\"), ("file"))) + ,("W.splitDrive \"\\\\\\\\shared\\\\test\" == (\"\\\\\\\\shared\\\\\", \"test\")", property $ W.splitDrive "\\\\shared\\test" == ("\\\\shared\\", "test")) + ,("AFP_W.splitDrive (\"\\\\\\\\shared\\\\test\") == ((\"\\\\\\\\shared\\\\\"), (\"test\"))", property $ AFP_W.splitDrive ("\\\\shared\\test") == (("\\\\shared\\"), ("test"))) + ,("W.splitDrive \"\\\\\\\\shared\" == (\"\\\\\\\\shared\", \"\")", property $ W.splitDrive "\\\\shared" == ("\\\\shared", "")) + ,("AFP_W.splitDrive (\"\\\\\\\\shared\") == ((\"\\\\\\\\shared\"), (\"\"))", property $ AFP_W.splitDrive ("\\\\shared") == (("\\\\shared"), (""))) + ,("W.splitDrive \"\\\\\\\\?\\\\UNC\\\\shared\\\\file\" == (\"\\\\\\\\?\\\\UNC\\\\shared\\\\\", \"file\")", property $ W.splitDrive "\\\\?\\UNC\\shared\\file" == ("\\\\?\\UNC\\shared\\", "file")) + ,("AFP_W.splitDrive (\"\\\\\\\\?\\\\UNC\\\\shared\\\\file\") == ((\"\\\\\\\\?\\\\UNC\\\\shared\\\\\"), (\"file\"))", property $ AFP_W.splitDrive ("\\\\?\\UNC\\shared\\file") == (("\\\\?\\UNC\\shared\\"), ("file"))) + ,("W.splitDrive \"\\\\\\\\?\\\\UNCshared\\\\file\" == (\"\\\\\\\\?\\\\\", \"UNCshared\\\\file\")", property $ W.splitDrive "\\\\?\\UNCshared\\file" == ("\\\\?\\", "UNCshared\\file")) + ,("AFP_W.splitDrive (\"\\\\\\\\?\\\\UNCshared\\\\file\") == ((\"\\\\\\\\?\\\\\"), (\"UNCshared\\\\file\"))", property $ AFP_W.splitDrive ("\\\\?\\UNCshared\\file") == (("\\\\?\\"), ("UNCshared\\file"))) + ,("W.splitDrive \"\\\\\\\\?\\\\d:\\\\file\" == (\"\\\\\\\\?\\\\d:\\\\\", \"file\")", property $ W.splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\", "file")) + ,("AFP_W.splitDrive (\"\\\\\\\\?\\\\d:\\\\file\") == ((\"\\\\\\\\?\\\\d:\\\\\"), (\"file\"))", property $ AFP_W.splitDrive ("\\\\?\\d:\\file") == (("\\\\?\\d:\\"), ("file"))) + ,("W.splitDrive \"/d\" == (\"\", \"/d\")", property $ W.splitDrive "/d" == ("", "/d")) + ,("AFP_W.splitDrive (\"/d\") == ((\"\"), (\"/d\"))", property $ AFP_W.splitDrive ("/d") == ((""), ("/d"))) + ,("P.splitDrive \"/test\" == (\"/\", \"test\")", property $ P.splitDrive "/test" == ("/", "test")) + ,("AFP_P.splitDrive (\"/test\") == ((\"/\"), (\"test\"))", property $ AFP_P.splitDrive ("/test") == (("/"), ("test"))) + ,("P.splitDrive \"//test\" == (\"//\", \"test\")", property $ P.splitDrive "//test" == ("//", "test")) + ,("AFP_P.splitDrive (\"//test\") == ((\"//\"), (\"test\"))", property $ AFP_P.splitDrive ("//test") == (("//"), ("test"))) + ,("P.splitDrive \"test/file\" == (\"\", \"test/file\")", property $ P.splitDrive "test/file" == ("", "test/file")) + ,("AFP_P.splitDrive (\"test/file\") == ((\"\"), (\"test/file\"))", property $ AFP_P.splitDrive ("test/file") == ((""), ("test/file"))) + ,("P.splitDrive \"file\" == (\"\", \"file\")", property $ P.splitDrive "file" == ("", "file")) + ,("AFP_P.splitDrive (\"file\") == ((\"\"), (\"file\"))", property $ AFP_P.splitDrive ("file") == ((""), ("file"))) + ,("uncurry P.joinDrive (P.splitDrive x) == x", property $ \(QFilePathValidP x) -> uncurry P.joinDrive (P.splitDrive x) == x) + ,("uncurry W.joinDrive (W.splitDrive x) == x", property $ \(QFilePathValidW x) -> uncurry W.joinDrive (W.splitDrive x) == x) + ,("uncurry AFP_P.joinDrive (AFP_P.splitDrive x) == x", property $ \(QFilePathValidAFP_P x) -> uncurry AFP_P.joinDrive (AFP_P.splitDrive x) == x) + ,("uncurry AFP_W.joinDrive (AFP_W.splitDrive x) == x", property $ \(QFilePathValidAFP_W x) -> uncurry AFP_W.joinDrive (AFP_W.splitDrive x) == x) + ,("W.joinDrive \"C:\" \"foo\" == \"C:foo\"", property $ W.joinDrive "C:" "foo" == "C:foo") + ,("AFP_W.joinDrive (\"C:\") (\"foo\") == (\"C:foo\")", property $ AFP_W.joinDrive ("C:") ("foo") == ("C:foo")) + ,("W.joinDrive \"C:\\\\\" \"bar\" == \"C:\\\\bar\"", property $ W.joinDrive "C:\\" "bar" == "C:\\bar") + ,("AFP_W.joinDrive (\"C:\\\\\") (\"bar\") == (\"C:\\\\bar\")", property $ AFP_W.joinDrive ("C:\\") ("bar") == ("C:\\bar")) + ,("W.joinDrive \"\\\\\\\\share\" \"foo\" == \"\\\\\\\\share\\\\foo\"", property $ W.joinDrive "\\\\share" "foo" == "\\\\share\\foo") + ,("AFP_W.joinDrive (\"\\\\\\\\share\") (\"foo\") == (\"\\\\\\\\share\\\\foo\")", property $ AFP_W.joinDrive ("\\\\share") ("foo") == ("\\\\share\\foo")) + ,("W.joinDrive \"/:\" \"foo\" == \"/:\\\\foo\"", property $ W.joinDrive "/:" "foo" == "/:\\foo") + ,("AFP_W.joinDrive (\"/:\") (\"foo\") == (\"/:\\\\foo\")", property $ AFP_W.joinDrive ("/:") ("foo") == ("/:\\foo")) + ,("P.takeDrive x == fst (P.splitDrive x)", property $ \(QFilePath x) -> P.takeDrive x == fst (P.splitDrive x)) + ,("W.takeDrive x == fst (W.splitDrive x)", property $ \(QFilePath x) -> W.takeDrive x == fst (W.splitDrive x)) + ,("AFP_P.takeDrive x == fst (AFP_P.splitDrive x)", property $ \(QFilePathAFP_P x) -> AFP_P.takeDrive x == fst (AFP_P.splitDrive x)) + ,("AFP_W.takeDrive x == fst (AFP_W.splitDrive x)", property $ \(QFilePathAFP_W x) -> AFP_W.takeDrive x == fst (AFP_W.splitDrive x)) + ,("P.dropDrive x == snd (P.splitDrive x)", property $ \(QFilePath x) -> P.dropDrive x == snd (P.splitDrive x)) + ,("W.dropDrive x == snd (W.splitDrive x)", property $ \(QFilePath x) -> W.dropDrive x == snd (W.splitDrive x)) + ,("AFP_P.dropDrive x == snd (AFP_P.splitDrive x)", property $ \(QFilePathAFP_P x) -> AFP_P.dropDrive x == snd (AFP_P.splitDrive x)) + ,("AFP_W.dropDrive x == snd (AFP_W.splitDrive x)", property $ \(QFilePathAFP_W x) -> AFP_W.dropDrive x == snd (AFP_W.splitDrive x)) + ,("not (P.hasDrive x) == null (P.takeDrive x)", property $ \(QFilePath x) -> not (P.hasDrive x) == null (P.takeDrive x)) + ,("not (W.hasDrive x) == null (W.takeDrive x)", property $ \(QFilePath x) -> not (W.hasDrive x) == null (W.takeDrive x)) + ,("not (AFP_P.hasDrive x) == (SBS.null . getPosixString) (AFP_P.takeDrive x)", property $ \(QFilePathAFP_P x) -> not (AFP_P.hasDrive x) == (SBS.null . getPosixString) (AFP_P.takeDrive x)) + ,("not (AFP_W.hasDrive x) == (SBS16.null . getWindowsString) (AFP_W.takeDrive x)", property $ \(QFilePathAFP_W x) -> not (AFP_W.hasDrive x) == (SBS16.null . getWindowsString) (AFP_W.takeDrive x)) + ,("P.hasDrive \"/foo\" == True", property $ P.hasDrive "/foo" == True) + ,("AFP_P.hasDrive (\"/foo\") == True", property $ AFP_P.hasDrive ("/foo") == True) + ,("W.hasDrive \"C:\\\\foo\" == True", property $ W.hasDrive "C:\\foo" == True) + ,("AFP_W.hasDrive (\"C:\\\\foo\") == True", property $ AFP_W.hasDrive ("C:\\foo") == True) + ,("W.hasDrive \"C:foo\" == True", property $ W.hasDrive "C:foo" == True) + ,("AFP_W.hasDrive (\"C:foo\") == True", property $ AFP_W.hasDrive ("C:foo") == True) + ,("P.hasDrive \"foo\" == False", property $ P.hasDrive "foo" == False) + ,("W.hasDrive \"foo\" == False", property $ W.hasDrive "foo" == False) + ,("AFP_P.hasDrive (\"foo\") == False", property $ AFP_P.hasDrive ("foo") == False) + ,("AFP_W.hasDrive (\"foo\") == False", property $ AFP_W.hasDrive ("foo") == False) + ,("P.hasDrive \"\" == False", property $ P.hasDrive "" == False) + ,("W.hasDrive \"\" == False", property $ W.hasDrive "" == False) + ,("AFP_P.hasDrive (\"\") == False", property $ AFP_P.hasDrive ("") == False) + ,("AFP_W.hasDrive (\"\") == False", property $ AFP_W.hasDrive ("") == False) + ,("P.isDrive \"/\" == True", property $ P.isDrive "/" == True) + ,("AFP_P.isDrive (\"/\") == True", property $ AFP_P.isDrive ("/") == True) + ,("P.isDrive \"/foo\" == False", property $ P.isDrive "/foo" == False) + ,("AFP_P.isDrive (\"/foo\") == False", property $ AFP_P.isDrive ("/foo") == False) + ,("W.isDrive \"C:\\\\\" == True", property $ W.isDrive "C:\\" == True) + ,("AFP_W.isDrive (\"C:\\\\\") == True", property $ AFP_W.isDrive ("C:\\") == True) + ,("W.isDrive \"C:\\\\foo\" == False", property $ W.isDrive "C:\\foo" == False) + ,("AFP_W.isDrive (\"C:\\\\foo\") == False", property $ AFP_W.isDrive ("C:\\foo") == False) + ,("P.isDrive \"\" == False", property $ P.isDrive "" == False) + ,("W.isDrive \"\" == False", property $ W.isDrive "" == False) + ,("AFP_P.isDrive (\"\") == False", property $ AFP_P.isDrive ("") == False) + ,("AFP_W.isDrive (\"\") == False", property $ AFP_W.isDrive ("") == False) + ,("P.splitFileName \"/directory/file.ext\" == (\"/directory/\", \"file.ext\")", property $ P.splitFileName "/directory/file.ext" == ("/directory/", "file.ext")) + ,("W.splitFileName \"/directory/file.ext\" == (\"/directory/\", \"file.ext\")", property $ W.splitFileName "/directory/file.ext" == ("/directory/", "file.ext")) + ,("AFP_P.splitFileName (\"/directory/file.ext\") == ((\"/directory/\"), (\"file.ext\"))", property $ AFP_P.splitFileName ("/directory/file.ext") == (("/directory/"), ("file.ext"))) + ,("AFP_W.splitFileName (\"/directory/file.ext\") == ((\"/directory/\"), (\"file.ext\"))", property $ AFP_W.splitFileName ("/directory/file.ext") == (("/directory/"), ("file.ext"))) + ,("uncurry (P.) (P.splitFileName x) == x || fst (P.splitFileName x) == \"./\"", property $ \(QFilePathValidP x) -> uncurry (P.) (P.splitFileName x) == x || fst (P.splitFileName x) == "./") + ,("uncurry (W.) (W.splitFileName x) == x || fst (W.splitFileName x) == \"./\"", property $ \(QFilePathValidW x) -> uncurry (W.) (W.splitFileName x) == x || fst (W.splitFileName x) == "./") + ,("uncurry (AFP_P.) (AFP_P.splitFileName x) == x || fst (AFP_P.splitFileName x) == (\"./\")", property $ \(QFilePathValidAFP_P x) -> uncurry (AFP_P.) (AFP_P.splitFileName x) == x || fst (AFP_P.splitFileName x) == ("./")) + ,("uncurry (AFP_W.) (AFP_W.splitFileName x) == x || fst (AFP_W.splitFileName x) == (\"./\")", property $ \(QFilePathValidAFP_W x) -> uncurry (AFP_W.) (AFP_W.splitFileName x) == x || fst (AFP_W.splitFileName x) == ("./")) + ,("P.isValid (fst (P.splitFileName x))", property $ \(QFilePathValidP x) -> P.isValid (fst (P.splitFileName x))) + ,("W.isValid (fst (W.splitFileName x))", property $ \(QFilePathValidW x) -> W.isValid (fst (W.splitFileName x))) + ,("AFP_P.isValid (fst (AFP_P.splitFileName x))", property $ \(QFilePathValidAFP_P x) -> AFP_P.isValid (fst (AFP_P.splitFileName x))) + ,("AFP_W.isValid (fst (AFP_W.splitFileName x))", property $ \(QFilePathValidAFP_W x) -> AFP_W.isValid (fst (AFP_W.splitFileName x))) + ,("P.splitFileName \"file/bob.txt\" == (\"file/\", \"bob.txt\")", property $ P.splitFileName "file/bob.txt" == ("file/", "bob.txt")) + ,("W.splitFileName \"file/bob.txt\" == (\"file/\", \"bob.txt\")", property $ W.splitFileName "file/bob.txt" == ("file/", "bob.txt")) + ,("AFP_P.splitFileName (\"file/bob.txt\") == ((\"file/\"), (\"bob.txt\"))", property $ AFP_P.splitFileName ("file/bob.txt") == (("file/"), ("bob.txt"))) + ,("AFP_W.splitFileName (\"file/bob.txt\") == ((\"file/\"), (\"bob.txt\"))", property $ AFP_W.splitFileName ("file/bob.txt") == (("file/"), ("bob.txt"))) + ,("P.splitFileName \"file/\" == (\"file/\", \"\")", property $ P.splitFileName "file/" == ("file/", "")) + ,("W.splitFileName \"file/\" == (\"file/\", \"\")", property $ W.splitFileName "file/" == ("file/", "")) + ,("AFP_P.splitFileName (\"file/\") == ((\"file/\"), (\"\"))", property $ AFP_P.splitFileName ("file/") == (("file/"), (""))) + ,("AFP_W.splitFileName (\"file/\") == ((\"file/\"), (\"\"))", property $ AFP_W.splitFileName ("file/") == (("file/"), (""))) + ,("P.splitFileName \"bob\" == (\"./\", \"bob\")", property $ P.splitFileName "bob" == ("./", "bob")) + ,("W.splitFileName \"bob\" == (\"./\", \"bob\")", property $ W.splitFileName "bob" == ("./", "bob")) + ,("AFP_P.splitFileName (\"bob\") == ((\"./\"), (\"bob\"))", property $ AFP_P.splitFileName ("bob") == (("./"), ("bob"))) + ,("AFP_W.splitFileName (\"bob\") == ((\"./\"), (\"bob\"))", property $ AFP_W.splitFileName ("bob") == (("./"), ("bob"))) + ,("P.splitFileName \"/\" == (\"/\", \"\")", property $ P.splitFileName "/" == ("/", "")) + ,("AFP_P.splitFileName (\"/\") == ((\"/\"), (\"\"))", property $ AFP_P.splitFileName ("/") == (("/"), (""))) + ,("W.splitFileName \"c:\" == (\"c:\", \"\")", property $ W.splitFileName "c:" == ("c:", "")) + ,("AFP_W.splitFileName (\"c:\") == ((\"c:\"), (\"\"))", property $ AFP_W.splitFileName ("c:") == (("c:"), (""))) + ,("W.splitFileName \"\\\\\\\\?\\\\A:\\\\fred\" == (\"\\\\\\\\?\\\\A:\\\\\", \"fred\")", property $ W.splitFileName "\\\\?\\A:\\fred" == ("\\\\?\\A:\\", "fred")) + ,("AFP_W.splitFileName (\"\\\\\\\\?\\\\A:\\\\fred\") == ((\"\\\\\\\\?\\\\A:\\\\\"), (\"fred\"))", property $ AFP_W.splitFileName ("\\\\?\\A:\\fred") == (("\\\\?\\A:\\"), ("fred"))) + ,("W.splitFileName \"\\\\\\\\?\\\\A:\" == (\"\\\\\\\\?\\\\A:\", \"\")", property $ W.splitFileName "\\\\?\\A:" == ("\\\\?\\A:", "")) + ,("AFP_W.splitFileName (\"\\\\\\\\?\\\\A:\") == ((\"\\\\\\\\?\\\\A:\"), (\"\"))", property $ AFP_W.splitFileName ("\\\\?\\A:") == (("\\\\?\\A:"), (""))) + ,("P.replaceFileName \"/directory/other.txt\" \"file.ext\" == \"/directory/file.ext\"", property $ P.replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext") + ,("W.replaceFileName \"/directory/other.txt\" \"file.ext\" == \"/directory/file.ext\"", property $ W.replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext") + ,("AFP_P.replaceFileName (\"/directory/other.txt\") (\"file.ext\") == (\"/directory/file.ext\")", property $ AFP_P.replaceFileName ("/directory/other.txt") ("file.ext") == ("/directory/file.ext")) + ,("AFP_W.replaceFileName (\"/directory/other.txt\") (\"file.ext\") == (\"/directory/file.ext\")", property $ AFP_W.replaceFileName ("/directory/other.txt") ("file.ext") == ("/directory/file.ext")) + ,("P.replaceFileName x (P.takeFileName x) == x", property $ \(QFilePathValidP x) -> P.replaceFileName x (P.takeFileName x) == x) + ,("W.replaceFileName x (W.takeFileName x) == x", property $ \(QFilePathValidW x) -> W.replaceFileName x (W.takeFileName x) == x) + ,("AFP_P.replaceFileName x (AFP_P.takeFileName x) == x", property $ \(QFilePathValidAFP_P x) -> AFP_P.replaceFileName x (AFP_P.takeFileName x) == x) + ,("AFP_W.replaceFileName x (AFP_W.takeFileName x) == x", property $ \(QFilePathValidAFP_W x) -> AFP_W.replaceFileName x (AFP_W.takeFileName x) == x) + ,("P.dropFileName \"/directory/file.ext\" == \"/directory/\"", property $ P.dropFileName "/directory/file.ext" == "/directory/") + ,("W.dropFileName \"/directory/file.ext\" == \"/directory/\"", property $ W.dropFileName "/directory/file.ext" == "/directory/") + ,("AFP_P.dropFileName (\"/directory/file.ext\") == (\"/directory/\")", property $ AFP_P.dropFileName ("/directory/file.ext") == ("/directory/")) + ,("AFP_W.dropFileName (\"/directory/file.ext\") == (\"/directory/\")", property $ AFP_W.dropFileName ("/directory/file.ext") == ("/directory/")) + ,("P.dropFileName x == fst (P.splitFileName x)", property $ \(QFilePath x) -> P.dropFileName x == fst (P.splitFileName x)) + ,("W.dropFileName x == fst (W.splitFileName x)", property $ \(QFilePath x) -> W.dropFileName x == fst (W.splitFileName x)) + ,("AFP_P.dropFileName x == fst (AFP_P.splitFileName x)", property $ \(QFilePathAFP_P x) -> AFP_P.dropFileName x == fst (AFP_P.splitFileName x)) + ,("AFP_W.dropFileName x == fst (AFP_W.splitFileName x)", property $ \(QFilePathAFP_W x) -> AFP_W.dropFileName x == fst (AFP_W.splitFileName x)) + ,("isPrefixOf (P.takeDrive x) (P.dropFileName x)", property $ \(QFilePath x) -> isPrefixOf (P.takeDrive x) (P.dropFileName x)) + ,("isPrefixOf (W.takeDrive x) (W.dropFileName x)", property $ \(QFilePath x) -> isPrefixOf (W.takeDrive x) (W.dropFileName x)) + ,("(\\(getPosixString -> x) (getPosixString -> y) -> SBS.isPrefixOf x y) (AFP_P.takeDrive x) (AFP_P.dropFileName x)", property $ \(QFilePathAFP_P x) -> (\(getPosixString -> x) (getPosixString -> y) -> SBS.isPrefixOf x y) (AFP_P.takeDrive x) (AFP_P.dropFileName x)) + ,("(\\(getWindowsString -> x) (getWindowsString -> y) -> SBS16.isPrefixOf x y) (AFP_W.takeDrive x) (AFP_W.dropFileName x)", property $ \(QFilePathAFP_W x) -> (\(getWindowsString -> x) (getWindowsString -> y) -> SBS16.isPrefixOf x y) (AFP_W.takeDrive x) (AFP_W.dropFileName x)) + ,("P.takeFileName \"/directory/file.ext\" == \"file.ext\"", property $ P.takeFileName "/directory/file.ext" == "file.ext") + ,("W.takeFileName \"/directory/file.ext\" == \"file.ext\"", property $ W.takeFileName "/directory/file.ext" == "file.ext") + ,("AFP_P.takeFileName (\"/directory/file.ext\") == (\"file.ext\")", property $ AFP_P.takeFileName ("/directory/file.ext") == ("file.ext")) + ,("AFP_W.takeFileName (\"/directory/file.ext\") == (\"file.ext\")", property $ AFP_W.takeFileName ("/directory/file.ext") == ("file.ext")) + ,("P.takeFileName \"test/\" == \"\"", property $ P.takeFileName "test/" == "") + ,("W.takeFileName \"test/\" == \"\"", property $ W.takeFileName "test/" == "") + ,("AFP_P.takeFileName (\"test/\") == (\"\")", property $ AFP_P.takeFileName ("test/") == ("")) + ,("AFP_W.takeFileName (\"test/\") == (\"\")", property $ AFP_W.takeFileName ("test/") == ("")) + ,("isSuffixOf (P.takeFileName x) x", property $ \(QFilePath x) -> isSuffixOf (P.takeFileName x) x) + ,("isSuffixOf (W.takeFileName x) x", property $ \(QFilePath x) -> isSuffixOf (W.takeFileName x) x) + ,("(\\(getPosixString -> x) (getPosixString -> y) -> SBS.isSuffixOf x y) (AFP_P.takeFileName x) x", property $ \(QFilePathAFP_P x) -> (\(getPosixString -> x) (getPosixString -> y) -> SBS.isSuffixOf x y) (AFP_P.takeFileName x) x) + ,("(\\(getWindowsString -> x) (getWindowsString -> y) -> SBS16.isSuffixOf x y) (AFP_W.takeFileName x) x", property $ \(QFilePathAFP_W x) -> (\(getWindowsString -> x) (getWindowsString -> y) -> SBS16.isSuffixOf x y) (AFP_W.takeFileName x) x) + ,("P.takeFileName x == snd (P.splitFileName x)", property $ \(QFilePath x) -> P.takeFileName x == snd (P.splitFileName x)) + ,("W.takeFileName x == snd (W.splitFileName x)", property $ \(QFilePath x) -> W.takeFileName x == snd (W.splitFileName x)) + ,("AFP_P.takeFileName x == snd (AFP_P.splitFileName x)", property $ \(QFilePathAFP_P x) -> AFP_P.takeFileName x == snd (AFP_P.splitFileName x)) + ,("AFP_W.takeFileName x == snd (AFP_W.splitFileName x)", property $ \(QFilePathAFP_W x) -> AFP_W.takeFileName x == snd (AFP_W.splitFileName x)) + ,("P.takeFileName (P.replaceFileName x \"fred\") == \"fred\"", property $ \(QFilePathValidP x) -> P.takeFileName (P.replaceFileName x "fred") == "fred") + ,("W.takeFileName (W.replaceFileName x \"fred\") == \"fred\"", property $ \(QFilePathValidW x) -> W.takeFileName (W.replaceFileName x "fred") == "fred") + ,("AFP_P.takeFileName (AFP_P.replaceFileName x (\"fred\")) == (\"fred\")", property $ \(QFilePathValidAFP_P x) -> AFP_P.takeFileName (AFP_P.replaceFileName x ("fred")) == ("fred")) + ,("AFP_W.takeFileName (AFP_W.replaceFileName x (\"fred\")) == (\"fred\")", property $ \(QFilePathValidAFP_W x) -> AFP_W.takeFileName (AFP_W.replaceFileName x ("fred")) == ("fred")) + ,("P.takeFileName (x P. \"fred\") == \"fred\"", property $ \(QFilePathValidP x) -> P.takeFileName (x P. "fred") == "fred") + ,("W.takeFileName (x W. \"fred\") == \"fred\"", property $ \(QFilePathValidW x) -> W.takeFileName (x W. "fred") == "fred") + ,("AFP_P.takeFileName (x AFP_P. (\"fred\")) == (\"fred\")", property $ \(QFilePathValidAFP_P x) -> AFP_P.takeFileName (x AFP_P. ("fred")) == ("fred")) + ,("AFP_W.takeFileName (x AFP_W. (\"fred\")) == (\"fred\")", property $ \(QFilePathValidAFP_W x) -> AFP_W.takeFileName (x AFP_W. ("fred")) == ("fred")) + ,("P.isRelative (P.takeFileName x)", property $ \(QFilePathValidP x) -> P.isRelative (P.takeFileName x)) + ,("W.isRelative (W.takeFileName x)", property $ \(QFilePathValidW x) -> W.isRelative (W.takeFileName x)) + ,("AFP_P.isRelative (AFP_P.takeFileName x)", property $ \(QFilePathValidAFP_P x) -> AFP_P.isRelative (AFP_P.takeFileName x)) + ,("AFP_W.isRelative (AFP_W.takeFileName x)", property $ \(QFilePathValidAFP_W x) -> AFP_W.isRelative (AFP_W.takeFileName x)) + ,("P.takeBaseName \"/directory/file.ext\" == \"file\"", property $ P.takeBaseName "/directory/file.ext" == "file") + ,("W.takeBaseName \"/directory/file.ext\" == \"file\"", property $ W.takeBaseName "/directory/file.ext" == "file") + ,("AFP_P.takeBaseName (\"/directory/file.ext\") == (\"file\")", property $ AFP_P.takeBaseName ("/directory/file.ext") == ("file")) + ,("AFP_W.takeBaseName (\"/directory/file.ext\") == (\"file\")", property $ AFP_W.takeBaseName ("/directory/file.ext") == ("file")) + ,("P.takeBaseName \"file/test.txt\" == \"test\"", property $ P.takeBaseName "file/test.txt" == "test") + ,("W.takeBaseName \"file/test.txt\" == \"test\"", property $ W.takeBaseName "file/test.txt" == "test") + ,("AFP_P.takeBaseName (\"file/test.txt\") == (\"test\")", property $ AFP_P.takeBaseName ("file/test.txt") == ("test")) + ,("AFP_W.takeBaseName (\"file/test.txt\") == (\"test\")", property $ AFP_W.takeBaseName ("file/test.txt") == ("test")) + ,("P.takeBaseName \"dave.ext\" == \"dave\"", property $ P.takeBaseName "dave.ext" == "dave") + ,("W.takeBaseName \"dave.ext\" == \"dave\"", property $ W.takeBaseName "dave.ext" == "dave") + ,("AFP_P.takeBaseName (\"dave.ext\") == (\"dave\")", property $ AFP_P.takeBaseName ("dave.ext") == ("dave")) + ,("AFP_W.takeBaseName (\"dave.ext\") == (\"dave\")", property $ AFP_W.takeBaseName ("dave.ext") == ("dave")) + ,("P.takeBaseName \"\" == \"\"", property $ P.takeBaseName "" == "") + ,("W.takeBaseName \"\" == \"\"", property $ W.takeBaseName "" == "") + ,("AFP_P.takeBaseName (\"\") == (\"\")", property $ AFP_P.takeBaseName ("") == ("")) + ,("AFP_W.takeBaseName (\"\") == (\"\")", property $ AFP_W.takeBaseName ("") == ("")) + ,("P.takeBaseName \"test\" == \"test\"", property $ P.takeBaseName "test" == "test") + ,("W.takeBaseName \"test\" == \"test\"", property $ W.takeBaseName "test" == "test") + ,("AFP_P.takeBaseName (\"test\") == (\"test\")", property $ AFP_P.takeBaseName ("test") == ("test")) + ,("AFP_W.takeBaseName (\"test\") == (\"test\")", property $ AFP_W.takeBaseName ("test") == ("test")) + ,("P.takeBaseName (P.addTrailingPathSeparator x) == \"\"", property $ \(QFilePath x) -> P.takeBaseName (P.addTrailingPathSeparator x) == "") + ,("W.takeBaseName (W.addTrailingPathSeparator x) == \"\"", property $ \(QFilePath x) -> W.takeBaseName (W.addTrailingPathSeparator x) == "") + ,("AFP_P.takeBaseName (AFP_P.addTrailingPathSeparator x) == (\"\")", property $ \(QFilePathAFP_P x) -> AFP_P.takeBaseName (AFP_P.addTrailingPathSeparator x) == ("")) + ,("AFP_W.takeBaseName (AFP_W.addTrailingPathSeparator x) == (\"\")", property $ \(QFilePathAFP_W x) -> AFP_W.takeBaseName (AFP_W.addTrailingPathSeparator x) == ("")) + ,("P.takeBaseName \"file/file.tar.gz\" == \"file.tar\"", property $ P.takeBaseName "file/file.tar.gz" == "file.tar") + ,("W.takeBaseName \"file/file.tar.gz\" == \"file.tar\"", property $ W.takeBaseName "file/file.tar.gz" == "file.tar") + ,("AFP_P.takeBaseName (\"file/file.tar.gz\") == (\"file.tar\")", property $ AFP_P.takeBaseName ("file/file.tar.gz") == ("file.tar")) + ,("AFP_W.takeBaseName (\"file/file.tar.gz\") == (\"file.tar\")", property $ AFP_W.takeBaseName ("file/file.tar.gz") == ("file.tar")) + ,("P.replaceBaseName \"/directory/other.ext\" \"file\" == \"/directory/file.ext\"", property $ P.replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext") + ,("W.replaceBaseName \"/directory/other.ext\" \"file\" == \"/directory/file.ext\"", property $ W.replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext") + ,("AFP_P.replaceBaseName (\"/directory/other.ext\") (\"file\") == (\"/directory/file.ext\")", property $ AFP_P.replaceBaseName ("/directory/other.ext") ("file") == ("/directory/file.ext")) + ,("AFP_W.replaceBaseName (\"/directory/other.ext\") (\"file\") == (\"/directory/file.ext\")", property $ AFP_W.replaceBaseName ("/directory/other.ext") ("file") == ("/directory/file.ext")) + ,("P.replaceBaseName \"file/test.txt\" \"bob\" == \"file/bob.txt\"", property $ P.replaceBaseName "file/test.txt" "bob" == "file/bob.txt") + ,("W.replaceBaseName \"file/test.txt\" \"bob\" == \"file/bob.txt\"", property $ W.replaceBaseName "file/test.txt" "bob" == "file/bob.txt") + ,("AFP_P.replaceBaseName (\"file/test.txt\") (\"bob\") == (\"file/bob.txt\")", property $ AFP_P.replaceBaseName ("file/test.txt") ("bob") == ("file/bob.txt")) + ,("AFP_W.replaceBaseName (\"file/test.txt\") (\"bob\") == (\"file/bob.txt\")", property $ AFP_W.replaceBaseName ("file/test.txt") ("bob") == ("file/bob.txt")) + ,("P.replaceBaseName \"fred\" \"bill\" == \"bill\"", property $ P.replaceBaseName "fred" "bill" == "bill") + ,("W.replaceBaseName \"fred\" \"bill\" == \"bill\"", property $ W.replaceBaseName "fred" "bill" == "bill") + ,("AFP_P.replaceBaseName (\"fred\") (\"bill\") == (\"bill\")", property $ AFP_P.replaceBaseName ("fred") ("bill") == ("bill")) + ,("AFP_W.replaceBaseName (\"fred\") (\"bill\") == (\"bill\")", property $ AFP_W.replaceBaseName ("fred") ("bill") == ("bill")) + ,("P.replaceBaseName \"/dave/fred/bob.gz.tar\" \"new\" == \"/dave/fred/new.tar\"", property $ P.replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar") + ,("W.replaceBaseName \"/dave/fred/bob.gz.tar\" \"new\" == \"/dave/fred/new.tar\"", property $ W.replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar") + ,("AFP_P.replaceBaseName (\"/dave/fred/bob.gz.tar\") (\"new\") == (\"/dave/fred/new.tar\")", property $ AFP_P.replaceBaseName ("/dave/fred/bob.gz.tar") ("new") == ("/dave/fred/new.tar")) + ,("AFP_W.replaceBaseName (\"/dave/fred/bob.gz.tar\") (\"new\") == (\"/dave/fred/new.tar\")", property $ AFP_W.replaceBaseName ("/dave/fred/bob.gz.tar") ("new") == ("/dave/fred/new.tar")) + ,("P.replaceBaseName x (P.takeBaseName x) == x", property $ \(QFilePathValidP x) -> P.replaceBaseName x (P.takeBaseName x) == x) + ,("W.replaceBaseName x (W.takeBaseName x) == x", property $ \(QFilePathValidW x) -> W.replaceBaseName x (W.takeBaseName x) == x) + ,("AFP_P.replaceBaseName x (AFP_P.takeBaseName x) == x", property $ \(QFilePathValidAFP_P x) -> AFP_P.replaceBaseName x (AFP_P.takeBaseName x) == x) + ,("AFP_W.replaceBaseName x (AFP_W.takeBaseName x) == x", property $ \(QFilePathValidAFP_W x) -> AFP_W.replaceBaseName x (AFP_W.takeBaseName x) == x) + ,("P.hasTrailingPathSeparator \"test\" == False", property $ P.hasTrailingPathSeparator "test" == False) + ,("W.hasTrailingPathSeparator \"test\" == False", property $ W.hasTrailingPathSeparator "test" == False) + ,("AFP_P.hasTrailingPathSeparator (\"test\") == False", property $ AFP_P.hasTrailingPathSeparator ("test") == False) + ,("AFP_W.hasTrailingPathSeparator (\"test\") == False", property $ AFP_W.hasTrailingPathSeparator ("test") == False) + ,("P.hasTrailingPathSeparator \"test/\" == True", property $ P.hasTrailingPathSeparator "test/" == True) + ,("W.hasTrailingPathSeparator \"test/\" == True", property $ W.hasTrailingPathSeparator "test/" == True) + ,("AFP_P.hasTrailingPathSeparator (\"test/\") == True", property $ AFP_P.hasTrailingPathSeparator ("test/") == True) + ,("AFP_W.hasTrailingPathSeparator (\"test/\") == True", property $ AFP_W.hasTrailingPathSeparator ("test/") == True) + ,("P.hasTrailingPathSeparator (P.addTrailingPathSeparator x)", property $ \(QFilePath x) -> P.hasTrailingPathSeparator (P.addTrailingPathSeparator x)) + ,("W.hasTrailingPathSeparator (W.addTrailingPathSeparator x)", property $ \(QFilePath x) -> W.hasTrailingPathSeparator (W.addTrailingPathSeparator x)) + ,("AFP_P.hasTrailingPathSeparator (AFP_P.addTrailingPathSeparator x)", property $ \(QFilePathAFP_P x) -> AFP_P.hasTrailingPathSeparator (AFP_P.addTrailingPathSeparator x)) + ,("AFP_W.hasTrailingPathSeparator (AFP_W.addTrailingPathSeparator x)", property $ \(QFilePathAFP_W x) -> AFP_W.hasTrailingPathSeparator (AFP_W.addTrailingPathSeparator x)) + ,("P.hasTrailingPathSeparator x ==> P.addTrailingPathSeparator x == x", property $ \(QFilePath x) -> P.hasTrailingPathSeparator x ==> P.addTrailingPathSeparator x == x) + ,("W.hasTrailingPathSeparator x ==> W.addTrailingPathSeparator x == x", property $ \(QFilePath x) -> W.hasTrailingPathSeparator x ==> W.addTrailingPathSeparator x == x) + ,("AFP_P.hasTrailingPathSeparator x ==> AFP_P.addTrailingPathSeparator x == x", property $ \(QFilePathAFP_P x) -> AFP_P.hasTrailingPathSeparator x ==> AFP_P.addTrailingPathSeparator x == x) + ,("AFP_W.hasTrailingPathSeparator x ==> AFP_W.addTrailingPathSeparator x == x", property $ \(QFilePathAFP_W x) -> AFP_W.hasTrailingPathSeparator x ==> AFP_W.addTrailingPathSeparator x == x) + ,("P.addTrailingPathSeparator \"test/rest\" == \"test/rest/\"", property $ P.addTrailingPathSeparator "test/rest" == "test/rest/") + ,("AFP_P.addTrailingPathSeparator (\"test/rest\") == (\"test/rest/\")", property $ AFP_P.addTrailingPathSeparator ("test/rest") == ("test/rest/")) + ,("P.dropTrailingPathSeparator \"file/test/\" == \"file/test\"", property $ P.dropTrailingPathSeparator "file/test/" == "file/test") + ,("W.dropTrailingPathSeparator \"file/test/\" == \"file/test\"", property $ W.dropTrailingPathSeparator "file/test/" == "file/test") + ,("AFP_P.dropTrailingPathSeparator (\"file/test/\") == (\"file/test\")", property $ AFP_P.dropTrailingPathSeparator ("file/test/") == ("file/test")) + ,("AFP_W.dropTrailingPathSeparator (\"file/test/\") == (\"file/test\")", property $ AFP_W.dropTrailingPathSeparator ("file/test/") == ("file/test")) + ,("P.dropTrailingPathSeparator \"/\" == \"/\"", property $ P.dropTrailingPathSeparator "/" == "/") + ,("W.dropTrailingPathSeparator \"/\" == \"/\"", property $ W.dropTrailingPathSeparator "/" == "/") + ,("AFP_P.dropTrailingPathSeparator (\"/\") == (\"/\")", property $ AFP_P.dropTrailingPathSeparator ("/") == ("/")) + ,("AFP_W.dropTrailingPathSeparator (\"/\") == (\"/\")", property $ AFP_W.dropTrailingPathSeparator ("/") == ("/")) + ,("W.dropTrailingPathSeparator \"\\\\\" == \"\\\\\"", property $ W.dropTrailingPathSeparator "\\" == "\\") + ,("AFP_W.dropTrailingPathSeparator (\"\\\\\") == (\"\\\\\")", property $ AFP_W.dropTrailingPathSeparator ("\\") == ("\\")) + ,("not (P.hasTrailingPathSeparator (P.dropTrailingPathSeparator x)) || P.isDrive x", property $ \(QFilePath x) -> not (P.hasTrailingPathSeparator (P.dropTrailingPathSeparator x)) || P.isDrive x) + ,("not (AFP_P.hasTrailingPathSeparator (AFP_P.dropTrailingPathSeparator x)) || AFP_P.isDrive x", property $ \(QFilePathAFP_P x) -> not (AFP_P.hasTrailingPathSeparator (AFP_P.dropTrailingPathSeparator x)) || AFP_P.isDrive x) + ,("P.takeDirectory \"/directory/other.ext\" == \"/directory\"", property $ P.takeDirectory "/directory/other.ext" == "/directory") + ,("W.takeDirectory \"/directory/other.ext\" == \"/directory\"", property $ W.takeDirectory "/directory/other.ext" == "/directory") + ,("AFP_P.takeDirectory (\"/directory/other.ext\") == (\"/directory\")", property $ AFP_P.takeDirectory ("/directory/other.ext") == ("/directory")) + ,("AFP_W.takeDirectory (\"/directory/other.ext\") == (\"/directory\")", property $ AFP_W.takeDirectory ("/directory/other.ext") == ("/directory")) + ,("isPrefixOf (P.takeDirectory x) x || P.takeDirectory x == \".\"", property $ \(QFilePath x) -> isPrefixOf (P.takeDirectory x) x || P.takeDirectory x == ".") + ,("isPrefixOf (W.takeDirectory x) x || W.takeDirectory x == \".\"", property $ \(QFilePath x) -> isPrefixOf (W.takeDirectory x) x || W.takeDirectory x == ".") + ,("(\\(getPosixString -> x) (getPosixString -> y) -> SBS.isPrefixOf x y) (AFP_P.takeDirectory x) x || AFP_P.takeDirectory x == (\".\")", property $ \(QFilePathAFP_P x) -> (\(getPosixString -> x) (getPosixString -> y) -> SBS.isPrefixOf x y) (AFP_P.takeDirectory x) x || AFP_P.takeDirectory x == (".")) + ,("(\\(getWindowsString -> x) (getWindowsString -> y) -> SBS16.isPrefixOf x y) (AFP_W.takeDirectory x) x || AFP_W.takeDirectory x == (\".\")", property $ \(QFilePathAFP_W x) -> (\(getWindowsString -> x) (getWindowsString -> y) -> SBS16.isPrefixOf x y) (AFP_W.takeDirectory x) x || AFP_W.takeDirectory x == (".")) + ,("P.takeDirectory \"foo\" == \".\"", property $ P.takeDirectory "foo" == ".") + ,("W.takeDirectory \"foo\" == \".\"", property $ W.takeDirectory "foo" == ".") + ,("AFP_P.takeDirectory (\"foo\") == (\".\")", property $ AFP_P.takeDirectory ("foo") == (".")) + ,("AFP_W.takeDirectory (\"foo\") == (\".\")", property $ AFP_W.takeDirectory ("foo") == (".")) + ,("P.takeDirectory \"/\" == \"/\"", property $ P.takeDirectory "/" == "/") + ,("W.takeDirectory \"/\" == \"/\"", property $ W.takeDirectory "/" == "/") + ,("AFP_P.takeDirectory (\"/\") == (\"/\")", property $ AFP_P.takeDirectory ("/") == ("/")) + ,("AFP_W.takeDirectory (\"/\") == (\"/\")", property $ AFP_W.takeDirectory ("/") == ("/")) + ,("P.takeDirectory \"/foo\" == \"/\"", property $ P.takeDirectory "/foo" == "/") + ,("W.takeDirectory \"/foo\" == \"/\"", property $ W.takeDirectory "/foo" == "/") + ,("AFP_P.takeDirectory (\"/foo\") == (\"/\")", property $ AFP_P.takeDirectory ("/foo") == ("/")) + ,("AFP_W.takeDirectory (\"/foo\") == (\"/\")", property $ AFP_W.takeDirectory ("/foo") == ("/")) + ,("P.takeDirectory \"/foo/bar/baz\" == \"/foo/bar\"", property $ P.takeDirectory "/foo/bar/baz" == "/foo/bar") + ,("W.takeDirectory \"/foo/bar/baz\" == \"/foo/bar\"", property $ W.takeDirectory "/foo/bar/baz" == "/foo/bar") + ,("AFP_P.takeDirectory (\"/foo/bar/baz\") == (\"/foo/bar\")", property $ AFP_P.takeDirectory ("/foo/bar/baz") == ("/foo/bar")) + ,("AFP_W.takeDirectory (\"/foo/bar/baz\") == (\"/foo/bar\")", property $ AFP_W.takeDirectory ("/foo/bar/baz") == ("/foo/bar")) + ,("P.takeDirectory \"/foo/bar/baz/\" == \"/foo/bar/baz\"", property $ P.takeDirectory "/foo/bar/baz/" == "/foo/bar/baz") + ,("W.takeDirectory \"/foo/bar/baz/\" == \"/foo/bar/baz\"", property $ W.takeDirectory "/foo/bar/baz/" == "/foo/bar/baz") + ,("AFP_P.takeDirectory (\"/foo/bar/baz/\") == (\"/foo/bar/baz\")", property $ AFP_P.takeDirectory ("/foo/bar/baz/") == ("/foo/bar/baz")) + ,("AFP_W.takeDirectory (\"/foo/bar/baz/\") == (\"/foo/bar/baz\")", property $ AFP_W.takeDirectory ("/foo/bar/baz/") == ("/foo/bar/baz")) + ,("P.takeDirectory \"foo/bar/baz\" == \"foo/bar\"", property $ P.takeDirectory "foo/bar/baz" == "foo/bar") + ,("W.takeDirectory \"foo/bar/baz\" == \"foo/bar\"", property $ W.takeDirectory "foo/bar/baz" == "foo/bar") + ,("AFP_P.takeDirectory (\"foo/bar/baz\") == (\"foo/bar\")", property $ AFP_P.takeDirectory ("foo/bar/baz") == ("foo/bar")) + ,("AFP_W.takeDirectory (\"foo/bar/baz\") == (\"foo/bar\")", property $ AFP_W.takeDirectory ("foo/bar/baz") == ("foo/bar")) + ,("W.takeDirectory \"foo\\\\bar\" == \"foo\"", property $ W.takeDirectory "foo\\bar" == "foo") + ,("AFP_W.takeDirectory (\"foo\\\\bar\") == (\"foo\")", property $ AFP_W.takeDirectory ("foo\\bar") == ("foo")) + ,("W.takeDirectory \"foo\\\\bar\\\\\\\\\" == \"foo\\\\bar\"", property $ W.takeDirectory "foo\\bar\\\\" == "foo\\bar") + ,("AFP_W.takeDirectory (\"foo\\\\bar\\\\\\\\\") == (\"foo\\\\bar\")", property $ AFP_W.takeDirectory ("foo\\bar\\\\") == ("foo\\bar")) + ,("W.takeDirectory \"C:\\\\\" == \"C:\\\\\"", property $ W.takeDirectory "C:\\" == "C:\\") + ,("AFP_W.takeDirectory (\"C:\\\\\") == (\"C:\\\\\")", property $ AFP_W.takeDirectory ("C:\\") == ("C:\\")) + ,("P.replaceDirectory \"root/file.ext\" \"/directory/\" == \"/directory/file.ext\"", property $ P.replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext") + ,("W.replaceDirectory \"root/file.ext\" \"/directory/\" == \"/directory/file.ext\"", property $ W.replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext") + ,("AFP_P.replaceDirectory (\"root/file.ext\") (\"/directory/\") == (\"/directory/file.ext\")", property $ AFP_P.replaceDirectory ("root/file.ext") ("/directory/") == ("/directory/file.ext")) + ,("AFP_W.replaceDirectory (\"root/file.ext\") (\"/directory/\") == (\"/directory/file.ext\")", property $ AFP_W.replaceDirectory ("root/file.ext") ("/directory/") == ("/directory/file.ext")) + ,("P.replaceDirectory x (P.takeDirectory x) `P.equalFilePath` x", property $ \(QFilePathValidP x) -> P.replaceDirectory x (P.takeDirectory x) `P.equalFilePath` x) + ,("W.replaceDirectory x (W.takeDirectory x) `W.equalFilePath` x", property $ \(QFilePathValidW x) -> W.replaceDirectory x (W.takeDirectory x) `W.equalFilePath` x) + ,("AFP_P.replaceDirectory x (AFP_P.takeDirectory x) `AFP_P.equalFilePath` x", property $ \(QFilePathValidAFP_P x) -> AFP_P.replaceDirectory x (AFP_P.takeDirectory x) `AFP_P.equalFilePath` x) + ,("AFP_W.replaceDirectory x (AFP_W.takeDirectory x) `AFP_W.equalFilePath` x", property $ \(QFilePathValidAFP_W x) -> AFP_W.replaceDirectory x (AFP_W.takeDirectory x) `AFP_W.equalFilePath` x) + ,("\"/directory\" P. \"file.ext\" == \"/directory/file.ext\"", property $ "/directory" P. "file.ext" == "/directory/file.ext") + ,("(\"/directory\") AFP_P. (\"file.ext\") == (\"/directory/file.ext\")", property $ ("/directory") AFP_P. ("file.ext") == ("/directory/file.ext")) + ,("\"/directory\" W. \"file.ext\" == \"/directory\\\\file.ext\"", property $ "/directory" W. "file.ext" == "/directory\\file.ext") + ,("(\"/directory\") AFP_W. (\"file.ext\") == (\"/directory\\\\file.ext\")", property $ ("/directory") AFP_W. ("file.ext") == ("/directory\\file.ext")) + ,("\"directory\" P. \"/file.ext\" == \"/file.ext\"", property $ "directory" P. "/file.ext" == "/file.ext") + ,("\"directory\" W. \"/file.ext\" == \"/file.ext\"", property $ "directory" W. "/file.ext" == "/file.ext") + ,("(\"directory\") AFP_P. (\"/file.ext\") == (\"/file.ext\")", property $ ("directory") AFP_P. ("/file.ext") == ("/file.ext")) + ,("(\"directory\") AFP_W. (\"/file.ext\") == (\"/file.ext\")", property $ ("directory") AFP_W. ("/file.ext") == ("/file.ext")) + ,("(P.takeDirectory x P. P.takeFileName x) `P.equalFilePath` x", property $ \(QFilePathValidP x) -> (P.takeDirectory x P. P.takeFileName x) `P.equalFilePath` x) + ,("(W.takeDirectory x W. W.takeFileName x) `W.equalFilePath` x", property $ \(QFilePathValidW x) -> (W.takeDirectory x W. W.takeFileName x) `W.equalFilePath` x) + ,("(AFP_P.takeDirectory x AFP_P. AFP_P.takeFileName x) `AFP_P.equalFilePath` x", property $ \(QFilePathValidAFP_P x) -> (AFP_P.takeDirectory x AFP_P. AFP_P.takeFileName x) `AFP_P.equalFilePath` x) + ,("(AFP_W.takeDirectory x AFP_W. AFP_W.takeFileName x) `AFP_W.equalFilePath` x", property $ \(QFilePathValidAFP_W x) -> (AFP_W.takeDirectory x AFP_W. AFP_W.takeFileName x) `AFP_W.equalFilePath` x) + ,("\"/\" P. \"test\" == \"/test\"", property $ "/" P. "test" == "/test") + ,("(\"/\") AFP_P. (\"test\") == (\"/test\")", property $ ("/") AFP_P. ("test") == ("/test")) + ,("\"home\" P. \"bob\" == \"home/bob\"", property $ "home" P. "bob" == "home/bob") + ,("(\"home\") AFP_P. (\"bob\") == (\"home/bob\")", property $ ("home") AFP_P. ("bob") == ("home/bob")) + ,("\"x:\" P. \"foo\" == \"x:/foo\"", property $ "x:" P. "foo" == "x:/foo") + ,("(\"x:\") AFP_P. (\"foo\") == (\"x:/foo\")", property $ ("x:") AFP_P. ("foo") == ("x:/foo")) + ,("\"C:\\\\foo\" W. \"bar\" == \"C:\\\\foo\\\\bar\"", property $ "C:\\foo" W. "bar" == "C:\\foo\\bar") + ,("(\"C:\\\\foo\") AFP_W. (\"bar\") == (\"C:\\\\foo\\\\bar\")", property $ ("C:\\foo") AFP_W. ("bar") == ("C:\\foo\\bar")) + ,("\"home\" W. \"bob\" == \"home\\\\bob\"", property $ "home" W. "bob" == "home\\bob") + ,("(\"home\") AFP_W. (\"bob\") == (\"home\\\\bob\")", property $ ("home") AFP_W. ("bob") == ("home\\bob")) + ,("\"home\" P. \"/bob\" == \"/bob\"", property $ "home" P. "/bob" == "/bob") + ,("(\"home\") AFP_P. (\"/bob\") == (\"/bob\")", property $ ("home") AFP_P. ("/bob") == ("/bob")) + ,("\"home\" W. \"C:\\\\bob\" == \"C:\\\\bob\"", property $ "home" W. "C:\\bob" == "C:\\bob") + ,("(\"home\") AFP_W. (\"C:\\\\bob\") == (\"C:\\\\bob\")", property $ ("home") AFP_W. ("C:\\bob") == ("C:\\bob")) + ,("\"home\" W. \"/bob\" == \"/bob\"", property $ "home" W. "/bob" == "/bob") + ,("(\"home\") AFP_W. (\"/bob\") == (\"/bob\")", property $ ("home") AFP_W. ("/bob") == ("/bob")) + ,("\"home\" W. \"\\\\bob\" == \"\\\\bob\"", property $ "home" W. "\\bob" == "\\bob") + ,("(\"home\") AFP_W. (\"\\\\bob\") == (\"\\\\bob\")", property $ ("home") AFP_W. ("\\bob") == ("\\bob")) + ,("\"C:\\\\home\" W. \"\\\\bob\" == \"\\\\bob\"", property $ "C:\\home" W. "\\bob" == "\\bob") + ,("(\"C:\\\\home\") AFP_W. (\"\\\\bob\") == (\"\\\\bob\")", property $ ("C:\\home") AFP_W. ("\\bob") == ("\\bob")) + ,("\"D:\\\\foo\" W. \"C:bar\" == \"C:bar\"", property $ "D:\\foo" W. "C:bar" == "C:bar") + ,("(\"D:\\\\foo\") AFP_W. (\"C:bar\") == (\"C:bar\")", property $ ("D:\\foo") AFP_W. ("C:bar") == ("C:bar")) + ,("\"C:\\\\foo\" W. \"C:bar\" == \"C:bar\"", property $ "C:\\foo" W. "C:bar" == "C:bar") + ,("(\"C:\\\\foo\") AFP_W. (\"C:bar\") == (\"C:bar\")", property $ ("C:\\foo") AFP_W. ("C:bar") == ("C:bar")) + ,("P.splitPath \"/directory/file.ext\" == [\"/\", \"directory/\", \"file.ext\"]", property $ P.splitPath "/directory/file.ext" == ["/", "directory/", "file.ext"]) + ,("W.splitPath \"/directory/file.ext\" == [\"/\", \"directory/\", \"file.ext\"]", property $ W.splitPath "/directory/file.ext" == ["/", "directory/", "file.ext"]) + ,("AFP_P.splitPath (\"/directory/file.ext\") == [(\"/\"), (\"directory/\"), (\"file.ext\")]", property $ AFP_P.splitPath ("/directory/file.ext") == [("/"), ("directory/"), ("file.ext")]) + ,("AFP_W.splitPath (\"/directory/file.ext\") == [(\"/\"), (\"directory/\"), (\"file.ext\")]", property $ AFP_W.splitPath ("/directory/file.ext") == [("/"), ("directory/"), ("file.ext")]) + ,("concat (P.splitPath x) == x", property $ \(QFilePath x) -> concat (P.splitPath x) == x) + ,("concat (W.splitPath x) == x", property $ \(QFilePath x) -> concat (W.splitPath x) == x) + ,("(PS . SBS.concat . fmap getPosixString) (AFP_P.splitPath x) == x", property $ \(QFilePathAFP_P x) -> (PS . SBS.concat . fmap getPosixString) (AFP_P.splitPath x) == x) + ,("(WS . SBS16.concat . fmap getWindowsString) (AFP_W.splitPath x) == x", property $ \(QFilePathAFP_W x) -> (WS . SBS16.concat . fmap getWindowsString) (AFP_W.splitPath x) == x) + ,("P.splitPath \"test//item/\" == [\"test//\", \"item/\"]", property $ P.splitPath "test//item/" == ["test//", "item/"]) + ,("W.splitPath \"test//item/\" == [\"test//\", \"item/\"]", property $ W.splitPath "test//item/" == ["test//", "item/"]) + ,("AFP_P.splitPath (\"test//item/\") == [(\"test//\"), (\"item/\")]", property $ AFP_P.splitPath ("test//item/") == [("test//"), ("item/")]) + ,("AFP_W.splitPath (\"test//item/\") == [(\"test//\"), (\"item/\")]", property $ AFP_W.splitPath ("test//item/") == [("test//"), ("item/")]) + ,("P.splitPath \"test/item/file\" == [\"test/\", \"item/\", \"file\"]", property $ P.splitPath "test/item/file" == ["test/", "item/", "file"]) + ,("W.splitPath \"test/item/file\" == [\"test/\", \"item/\", \"file\"]", property $ W.splitPath "test/item/file" == ["test/", "item/", "file"]) + ,("AFP_P.splitPath (\"test/item/file\") == [(\"test/\"), (\"item/\"), (\"file\")]", property $ AFP_P.splitPath ("test/item/file") == [("test/"), ("item/"), ("file")]) + ,("AFP_W.splitPath (\"test/item/file\") == [(\"test/\"), (\"item/\"), (\"file\")]", property $ AFP_W.splitPath ("test/item/file") == [("test/"), ("item/"), ("file")]) + ,("P.splitPath \"\" == []", property $ P.splitPath "" == []) + ,("W.splitPath \"\" == []", property $ W.splitPath "" == []) + ,("AFP_P.splitPath (\"\") == []", property $ AFP_P.splitPath ("") == []) + ,("AFP_W.splitPath (\"\") == []", property $ AFP_W.splitPath ("") == []) + ,("W.splitPath \"c:\\\\test\\\\path\" == [\"c:\\\\\", \"test\\\\\", \"path\"]", property $ W.splitPath "c:\\test\\path" == ["c:\\", "test\\", "path"]) + ,("AFP_W.splitPath (\"c:\\\\test\\\\path\") == [(\"c:\\\\\"), (\"test\\\\\"), (\"path\")]", property $ AFP_W.splitPath ("c:\\test\\path") == [("c:\\"), ("test\\"), ("path")]) + ,("P.splitPath \"/file/test\" == [\"/\", \"file/\", \"test\"]", property $ P.splitPath "/file/test" == ["/", "file/", "test"]) + ,("AFP_P.splitPath (\"/file/test\") == [(\"/\"), (\"file/\"), (\"test\")]", property $ AFP_P.splitPath ("/file/test") == [("/"), ("file/"), ("test")]) + ,("P.splitDirectories \"/directory/file.ext\" == [\"/\", \"directory\", \"file.ext\"]", property $ P.splitDirectories "/directory/file.ext" == ["/", "directory", "file.ext"]) + ,("W.splitDirectories \"/directory/file.ext\" == [\"/\", \"directory\", \"file.ext\"]", property $ W.splitDirectories "/directory/file.ext" == ["/", "directory", "file.ext"]) + ,("AFP_P.splitDirectories (\"/directory/file.ext\") == [(\"/\"), (\"directory\"), (\"file.ext\")]", property $ AFP_P.splitDirectories ("/directory/file.ext") == [("/"), ("directory"), ("file.ext")]) + ,("AFP_W.splitDirectories (\"/directory/file.ext\") == [(\"/\"), (\"directory\"), (\"file.ext\")]", property $ AFP_W.splitDirectories ("/directory/file.ext") == [("/"), ("directory"), ("file.ext")]) + ,("P.splitDirectories \"test/file\" == [\"test\", \"file\"]", property $ P.splitDirectories "test/file" == ["test", "file"]) + ,("W.splitDirectories \"test/file\" == [\"test\", \"file\"]", property $ W.splitDirectories "test/file" == ["test", "file"]) + ,("AFP_P.splitDirectories (\"test/file\") == [(\"test\"), (\"file\")]", property $ AFP_P.splitDirectories ("test/file") == [("test"), ("file")]) + ,("AFP_W.splitDirectories (\"test/file\") == [(\"test\"), (\"file\")]", property $ AFP_W.splitDirectories ("test/file") == [("test"), ("file")]) + ,("P.splitDirectories \"/test/file\" == [\"/\", \"test\", \"file\"]", property $ P.splitDirectories "/test/file" == ["/", "test", "file"]) + ,("W.splitDirectories \"/test/file\" == [\"/\", \"test\", \"file\"]", property $ W.splitDirectories "/test/file" == ["/", "test", "file"]) + ,("AFP_P.splitDirectories (\"/test/file\") == [(\"/\"), (\"test\"), (\"file\")]", property $ AFP_P.splitDirectories ("/test/file") == [("/"), ("test"), ("file")]) + ,("AFP_W.splitDirectories (\"/test/file\") == [(\"/\"), (\"test\"), (\"file\")]", property $ AFP_W.splitDirectories ("/test/file") == [("/"), ("test"), ("file")]) + ,("W.splitDirectories \"C:\\\\test\\\\file\" == [\"C:\\\\\", \"test\", \"file\"]", property $ W.splitDirectories "C:\\test\\file" == ["C:\\", "test", "file"]) + ,("AFP_W.splitDirectories (\"C:\\\\test\\\\file\") == [(\"C:\\\\\"), (\"test\"), (\"file\")]", property $ AFP_W.splitDirectories ("C:\\test\\file") == [("C:\\"), ("test"), ("file")]) + ,("P.joinPath (P.splitDirectories x) `P.equalFilePath` x", property $ \(QFilePathValidP x) -> P.joinPath (P.splitDirectories x) `P.equalFilePath` x) + ,("W.joinPath (W.splitDirectories x) `W.equalFilePath` x", property $ \(QFilePathValidW x) -> W.joinPath (W.splitDirectories x) `W.equalFilePath` x) + ,("AFP_P.joinPath (AFP_P.splitDirectories x) `AFP_P.equalFilePath` x", property $ \(QFilePathValidAFP_P x) -> AFP_P.joinPath (AFP_P.splitDirectories x) `AFP_P.equalFilePath` x) + ,("AFP_W.joinPath (AFP_W.splitDirectories x) `AFP_W.equalFilePath` x", property $ \(QFilePathValidAFP_W x) -> AFP_W.joinPath (AFP_W.splitDirectories x) `AFP_W.equalFilePath` x) + ,("P.splitDirectories \"\" == []", property $ P.splitDirectories "" == []) + ,("W.splitDirectories \"\" == []", property $ W.splitDirectories "" == []) + ,("AFP_P.splitDirectories (\"\") == []", property $ AFP_P.splitDirectories ("") == []) + ,("AFP_W.splitDirectories (\"\") == []", property $ AFP_W.splitDirectories ("") == []) + ,("W.splitDirectories \"C:\\\\test\\\\\\\\\\\\file\" == [\"C:\\\\\", \"test\", \"file\"]", property $ W.splitDirectories "C:\\test\\\\\\file" == ["C:\\", "test", "file"]) + ,("AFP_W.splitDirectories (\"C:\\\\test\\\\\\\\\\\\file\") == [(\"C:\\\\\"), (\"test\"), (\"file\")]", property $ AFP_W.splitDirectories ("C:\\test\\\\\\file") == [("C:\\"), ("test"), ("file")]) + ,("P.splitDirectories \"/test///file\" == [\"/\", \"test\", \"file\"]", property $ P.splitDirectories "/test///file" == ["/", "test", "file"]) + ,("W.splitDirectories \"/test///file\" == [\"/\", \"test\", \"file\"]", property $ W.splitDirectories "/test///file" == ["/", "test", "file"]) + ,("AFP_P.splitDirectories (\"/test///file\") == [(\"/\"), (\"test\"), (\"file\")]", property $ AFP_P.splitDirectories ("/test///file") == [("/"), ("test"), ("file")]) + ,("AFP_W.splitDirectories (\"/test///file\") == [(\"/\"), (\"test\"), (\"file\")]", property $ AFP_W.splitDirectories ("/test///file") == [("/"), ("test"), ("file")]) + ,("P.joinPath z == foldr (P.) \"\" z", property $ \z -> P.joinPath z == foldr (P.) "" z) + ,("W.joinPath z == foldr (W.) \"\" z", property $ \z -> W.joinPath z == foldr (W.) "" z) + ,("AFP_P.joinPath z == foldr (AFP_P.) (\"\") z", property $ \(QFilePathsAFP_P z) -> AFP_P.joinPath z == foldr (AFP_P.) ("") z) + ,("AFP_W.joinPath z == foldr (AFP_W.) (\"\") z", property $ \(QFilePathsAFP_W z) -> AFP_W.joinPath z == foldr (AFP_W.) ("") z) + ,("P.joinPath [\"/\", \"directory/\", \"file.ext\"] == \"/directory/file.ext\"", property $ P.joinPath ["/", "directory/", "file.ext"] == "/directory/file.ext") + ,("W.joinPath [\"/\", \"directory/\", \"file.ext\"] == \"/directory/file.ext\"", property $ W.joinPath ["/", "directory/", "file.ext"] == "/directory/file.ext") + ,("AFP_P.joinPath [(\"/\"), (\"directory/\"), (\"file.ext\")] == (\"/directory/file.ext\")", property $ AFP_P.joinPath [("/"), ("directory/"), ("file.ext")] == ("/directory/file.ext")) + ,("AFP_W.joinPath [(\"/\"), (\"directory/\"), (\"file.ext\")] == (\"/directory/file.ext\")", property $ AFP_W.joinPath [("/"), ("directory/"), ("file.ext")] == ("/directory/file.ext")) + ,("P.joinPath (P.splitPath x) == x", property $ \(QFilePathValidP x) -> P.joinPath (P.splitPath x) == x) + ,("W.joinPath (W.splitPath x) == x", property $ \(QFilePathValidW x) -> W.joinPath (W.splitPath x) == x) + ,("AFP_P.joinPath (AFP_P.splitPath x) == x", property $ \(QFilePathValidAFP_P x) -> AFP_P.joinPath (AFP_P.splitPath x) == x) + ,("AFP_W.joinPath (AFP_W.splitPath x) == x", property $ \(QFilePathValidAFP_W x) -> AFP_W.joinPath (AFP_W.splitPath x) == x) + ,("P.joinPath [] == \"\"", property $ P.joinPath [] == "") + ,("W.joinPath [] == \"\"", property $ W.joinPath [] == "") + ,("AFP_P.joinPath [] == (\"\")", property $ AFP_P.joinPath [] == ("")) + ,("AFP_W.joinPath [] == (\"\")", property $ AFP_W.joinPath [] == ("")) + ,("P.joinPath [\"test\", \"file\", \"path\"] == \"test/file/path\"", property $ P.joinPath ["test", "file", "path"] == "test/file/path") + ,("AFP_P.joinPath [(\"test\"), (\"file\"), (\"path\")] == (\"test/file/path\")", property $ AFP_P.joinPath [("test"), ("file"), ("path")] == ("test/file/path")) + ,("x == y ==> P.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> x == y ==> P.equalFilePath x y) + ,("x == y ==> W.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> x == y ==> W.equalFilePath x y) + ,("x == y ==> AFP_P.equalFilePath x y", property $ \(QFilePathAFP_P x) (QFilePathAFP_P y) -> x == y ==> AFP_P.equalFilePath x y) + ,("x == y ==> AFP_W.equalFilePath x y", property $ \(QFilePathAFP_W x) (QFilePathAFP_W y) -> x == y ==> AFP_W.equalFilePath x y) + ,("P.normalise x == P.normalise y ==> P.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> P.normalise x == P.normalise y ==> P.equalFilePath x y) + ,("W.normalise x == W.normalise y ==> W.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> W.normalise x == W.normalise y ==> W.equalFilePath x y) + ,("AFP_P.normalise x == AFP_P.normalise y ==> AFP_P.equalFilePath x y", property $ \(QFilePathAFP_P x) (QFilePathAFP_P y) -> AFP_P.normalise x == AFP_P.normalise y ==> AFP_P.equalFilePath x y) + ,("AFP_W.normalise x == AFP_W.normalise y ==> AFP_W.equalFilePath x y", property $ \(QFilePathAFP_W x) (QFilePathAFP_W y) -> AFP_W.normalise x == AFP_W.normalise y ==> AFP_W.equalFilePath x y) + ,("P.equalFilePath \"foo\" \"foo/\"", property $ P.equalFilePath "foo" "foo/") + ,("W.equalFilePath \"foo\" \"foo/\"", property $ W.equalFilePath "foo" "foo/") + ,("AFP_P.equalFilePath (\"foo\") (\"foo/\")", property $ AFP_P.equalFilePath ("foo") ("foo/")) + ,("AFP_W.equalFilePath (\"foo\") (\"foo/\")", property $ AFP_W.equalFilePath ("foo") ("foo/")) + ,("not (P.equalFilePath \"/a/../c\" \"/c\")", property $ not (P.equalFilePath "/a/../c" "/c")) + ,("not (W.equalFilePath \"/a/../c\" \"/c\")", property $ not (W.equalFilePath "/a/../c" "/c")) + ,("not (AFP_P.equalFilePath (\"/a/../c\") (\"/c\"))", property $ not (AFP_P.equalFilePath ("/a/../c") ("/c"))) + ,("not (AFP_W.equalFilePath (\"/a/../c\") (\"/c\"))", property $ not (AFP_W.equalFilePath ("/a/../c") ("/c"))) + ,("not (P.equalFilePath \"foo\" \"/foo\")", property $ not (P.equalFilePath "foo" "/foo")) + ,("not (W.equalFilePath \"foo\" \"/foo\")", property $ not (W.equalFilePath "foo" "/foo")) + ,("not (AFP_P.equalFilePath (\"foo\") (\"/foo\"))", property $ not (AFP_P.equalFilePath ("foo") ("/foo"))) + ,("not (AFP_W.equalFilePath (\"foo\") (\"/foo\"))", property $ not (AFP_W.equalFilePath ("foo") ("/foo"))) + ,("not (P.equalFilePath \"foo\" \"FOO\")", property $ not (P.equalFilePath "foo" "FOO")) + ,("not (AFP_P.equalFilePath (\"foo\") (\"FOO\"))", property $ not (AFP_P.equalFilePath ("foo") ("FOO"))) + ,("W.equalFilePath \"foo\" \"FOO\"", property $ W.equalFilePath "foo" "FOO") + ,("AFP_W.equalFilePath (\"foo\") (\"FOO\")", property $ AFP_W.equalFilePath ("foo") ("FOO")) + ,("not (W.equalFilePath \"C:\" \"C:/\")", property $ not (W.equalFilePath "C:" "C:/")) + ,("not (AFP_W.equalFilePath (\"C:\") (\"C:/\"))", property $ not (AFP_W.equalFilePath ("C:") ("C:/"))) + ,("P.makeRelative \"/directory\" \"/directory/file.ext\" == \"file.ext\"", property $ P.makeRelative "/directory" "/directory/file.ext" == "file.ext") + ,("W.makeRelative \"/directory\" \"/directory/file.ext\" == \"file.ext\"", property $ W.makeRelative "/directory" "/directory/file.ext" == "file.ext") + ,("AFP_P.makeRelative (\"/directory\") (\"/directory/file.ext\") == (\"file.ext\")", property $ AFP_P.makeRelative ("/directory") ("/directory/file.ext") == ("file.ext")) + ,("AFP_W.makeRelative (\"/directory\") (\"/directory/file.ext\") == (\"file.ext\")", property $ AFP_W.makeRelative ("/directory") ("/directory/file.ext") == ("file.ext")) + ,("P.makeRelative (P.takeDirectory x) x `P.equalFilePath` P.takeFileName x", property $ \(QFilePathValidP x) -> P.makeRelative (P.takeDirectory x) x `P.equalFilePath` P.takeFileName x) + ,("W.makeRelative (W.takeDirectory x) x `W.equalFilePath` W.takeFileName x", property $ \(QFilePathValidW x) -> W.makeRelative (W.takeDirectory x) x `W.equalFilePath` W.takeFileName x) + ,("AFP_P.makeRelative (AFP_P.takeDirectory x) x `AFP_P.equalFilePath` AFP_P.takeFileName x", property $ \(QFilePathValidAFP_P x) -> AFP_P.makeRelative (AFP_P.takeDirectory x) x `AFP_P.equalFilePath` AFP_P.takeFileName x) + ,("AFP_W.makeRelative (AFP_W.takeDirectory x) x `AFP_W.equalFilePath` AFP_W.takeFileName x", property $ \(QFilePathValidAFP_W x) -> AFP_W.makeRelative (AFP_W.takeDirectory x) x `AFP_W.equalFilePath` AFP_W.takeFileName x) + ,("P.makeRelative x x == \".\"", property $ \(QFilePath x) -> P.makeRelative x x == ".") + ,("W.makeRelative x x == \".\"", property $ \(QFilePath x) -> W.makeRelative x x == ".") + ,("AFP_P.makeRelative x x == (\".\")", property $ \(QFilePathAFP_P x) -> AFP_P.makeRelative x x == (".")) + ,("AFP_W.makeRelative x x == (\".\")", property $ \(QFilePathAFP_W x) -> AFP_W.makeRelative x x == (".")) + ,("P.equalFilePath x y || (P.isRelative x && P.makeRelative y x == x) || P.equalFilePath (y P. P.makeRelative y x) x", property $ \(QFilePathValidP x) (QFilePathValidP y) -> P.equalFilePath x y || (P.isRelative x && P.makeRelative y x == x) || P.equalFilePath (y P. P.makeRelative y x) x) + ,("W.equalFilePath x y || (W.isRelative x && W.makeRelative y x == x) || W.equalFilePath (y W. W.makeRelative y x) x", property $ \(QFilePathValidW x) (QFilePathValidW y) -> W.equalFilePath x y || (W.isRelative x && W.makeRelative y x == x) || W.equalFilePath (y W. W.makeRelative y x) x) + ,("AFP_P.equalFilePath x y || (AFP_P.isRelative x && AFP_P.makeRelative y x == x) || AFP_P.equalFilePath (y AFP_P. AFP_P.makeRelative y x) x", property $ \(QFilePathValidAFP_P x) (QFilePathValidAFP_P y) -> AFP_P.equalFilePath x y || (AFP_P.isRelative x && AFP_P.makeRelative y x == x) || AFP_P.equalFilePath (y AFP_P. AFP_P.makeRelative y x) x) + ,("AFP_W.equalFilePath x y || (AFP_W.isRelative x && AFP_W.makeRelative y x == x) || AFP_W.equalFilePath (y AFP_W. AFP_W.makeRelative y x) x", property $ \(QFilePathValidAFP_W x) (QFilePathValidAFP_W y) -> AFP_W.equalFilePath x y || (AFP_W.isRelative x && AFP_W.makeRelative y x == x) || AFP_W.equalFilePath (y AFP_W. AFP_W.makeRelative y x) x) + ,("W.makeRelative \"C:\\\\Home\" \"c:\\\\home\\\\bob\" == \"bob\"", property $ W.makeRelative "C:\\Home" "c:\\home\\bob" == "bob") + ,("AFP_W.makeRelative (\"C:\\\\Home\") (\"c:\\\\home\\\\bob\") == (\"bob\")", property $ AFP_W.makeRelative ("C:\\Home") ("c:\\home\\bob") == ("bob")) + ,("W.makeRelative \"C:\\\\Home\" \"c:/home/bob\" == \"bob\"", property $ W.makeRelative "C:\\Home" "c:/home/bob" == "bob") + ,("AFP_W.makeRelative (\"C:\\\\Home\") (\"c:/home/bob\") == (\"bob\")", property $ AFP_W.makeRelative ("C:\\Home") ("c:/home/bob") == ("bob")) + ,("W.makeRelative \"C:\\\\Home\" \"D:\\\\Home\\\\Bob\" == \"D:\\\\Home\\\\Bob\"", property $ W.makeRelative "C:\\Home" "D:\\Home\\Bob" == "D:\\Home\\Bob") + ,("AFP_W.makeRelative (\"C:\\\\Home\") (\"D:\\\\Home\\\\Bob\") == (\"D:\\\\Home\\\\Bob\")", property $ AFP_W.makeRelative ("C:\\Home") ("D:\\Home\\Bob") == ("D:\\Home\\Bob")) + ,("W.makeRelative \"C:\\\\Home\" \"C:Home\\\\Bob\" == \"C:Home\\\\Bob\"", property $ W.makeRelative "C:\\Home" "C:Home\\Bob" == "C:Home\\Bob") + ,("AFP_W.makeRelative (\"C:\\\\Home\") (\"C:Home\\\\Bob\") == (\"C:Home\\\\Bob\")", property $ AFP_W.makeRelative ("C:\\Home") ("C:Home\\Bob") == ("C:Home\\Bob")) + ,("W.makeRelative \"/Home\" \"/home/bob\" == \"bob\"", property $ W.makeRelative "/Home" "/home/bob" == "bob") + ,("AFP_W.makeRelative (\"/Home\") (\"/home/bob\") == (\"bob\")", property $ AFP_W.makeRelative ("/Home") ("/home/bob") == ("bob")) + ,("W.makeRelative \"/\" \"//\" == \"//\"", property $ W.makeRelative "/" "//" == "//") + ,("AFP_W.makeRelative (\"/\") (\"//\") == (\"//\")", property $ AFP_W.makeRelative ("/") ("//") == ("//")) + ,("P.makeRelative \"/Home\" \"/home/bob\" == \"/home/bob\"", property $ P.makeRelative "/Home" "/home/bob" == "/home/bob") + ,("AFP_P.makeRelative (\"/Home\") (\"/home/bob\") == (\"/home/bob\")", property $ AFP_P.makeRelative ("/Home") ("/home/bob") == ("/home/bob")) + ,("P.makeRelative \"/home/\" \"/home/bob/foo/bar\" == \"bob/foo/bar\"", property $ P.makeRelative "/home/" "/home/bob/foo/bar" == "bob/foo/bar") + ,("AFP_P.makeRelative (\"/home/\") (\"/home/bob/foo/bar\") == (\"bob/foo/bar\")", property $ AFP_P.makeRelative ("/home/") ("/home/bob/foo/bar") == ("bob/foo/bar")) + ,("P.makeRelative \"/fred\" \"bob\" == \"bob\"", property $ P.makeRelative "/fred" "bob" == "bob") + ,("AFP_P.makeRelative (\"/fred\") (\"bob\") == (\"bob\")", property $ AFP_P.makeRelative ("/fred") ("bob") == ("bob")) + ,("P.makeRelative \"/file/test\" \"/file/test/fred\" == \"fred\"", property $ P.makeRelative "/file/test" "/file/test/fred" == "fred") + ,("AFP_P.makeRelative (\"/file/test\") (\"/file/test/fred\") == (\"fred\")", property $ AFP_P.makeRelative ("/file/test") ("/file/test/fred") == ("fred")) + ,("P.makeRelative \"/file/test\" \"/file/test/fred/\" == \"fred/\"", property $ P.makeRelative "/file/test" "/file/test/fred/" == "fred/") + ,("AFP_P.makeRelative (\"/file/test\") (\"/file/test/fred/\") == (\"fred/\")", property $ AFP_P.makeRelative ("/file/test") ("/file/test/fred/") == ("fred/")) + ,("P.makeRelative \"some/path\" \"some/path/a/b/c\" == \"a/b/c\"", property $ P.makeRelative "some/path" "some/path/a/b/c" == "a/b/c") + ,("AFP_P.makeRelative (\"some/path\") (\"some/path/a/b/c\") == (\"a/b/c\")", property $ AFP_P.makeRelative ("some/path") ("some/path/a/b/c") == ("a/b/c")) + ,("P.normalise \"/file/\\\\test////\" == \"/file/\\\\test/\"", property $ P.normalise "/file/\\test////" == "/file/\\test/") + ,("AFP_P.normalise (\"/file/\\\\test////\") == (\"/file/\\\\test/\")", property $ AFP_P.normalise ("/file/\\test////") == ("/file/\\test/")) + ,("P.normalise \"/file/./test\" == \"/file/test\"", property $ P.normalise "/file/./test" == "/file/test") + ,("AFP_P.normalise (\"/file/./test\") == (\"/file/test\")", property $ AFP_P.normalise ("/file/./test") == ("/file/test")) + ,("P.normalise \"/test/file/../bob/fred/\" == \"/test/file/../bob/fred/\"", property $ P.normalise "/test/file/../bob/fred/" == "/test/file/../bob/fred/") + ,("AFP_P.normalise (\"/test/file/../bob/fred/\") == (\"/test/file/../bob/fred/\")", property $ AFP_P.normalise ("/test/file/../bob/fred/") == ("/test/file/../bob/fred/")) + ,("P.normalise \"../bob/fred/\" == \"../bob/fred/\"", property $ P.normalise "../bob/fred/" == "../bob/fred/") + ,("AFP_P.normalise (\"../bob/fred/\") == (\"../bob/fred/\")", property $ AFP_P.normalise ("../bob/fred/") == ("../bob/fred/")) + ,("P.normalise \"/a/../c\" == \"/a/../c\"", property $ P.normalise "/a/../c" == "/a/../c") + ,("AFP_P.normalise (\"/a/../c\") == (\"/a/../c\")", property $ AFP_P.normalise ("/a/../c") == ("/a/../c")) + ,("P.normalise \"./bob/fred/\" == \"bob/fred/\"", property $ P.normalise "./bob/fred/" == "bob/fred/") + ,("AFP_P.normalise (\"./bob/fred/\") == (\"bob/fred/\")", property $ AFP_P.normalise ("./bob/fred/") == ("bob/fred/")) + ,("W.normalise \"c:\\\\file/bob\\\\\" == \"C:\\\\file\\\\bob\\\\\"", property $ W.normalise "c:\\file/bob\\" == "C:\\file\\bob\\") + ,("AFP_W.normalise (\"c:\\\\file/bob\\\\\") == (\"C:\\\\file\\\\bob\\\\\")", property $ AFP_W.normalise ("c:\\file/bob\\") == ("C:\\file\\bob\\")) + ,("W.normalise \"c:\\\\\" == \"C:\\\\\"", property $ W.normalise "c:\\" == "C:\\") + ,("AFP_W.normalise (\"c:\\\\\") == (\"C:\\\\\")", property $ AFP_W.normalise ("c:\\") == ("C:\\")) + ,("W.normalise \"c:\\\\\\\\\\\\\\\\\" == \"C:\\\\\"", property $ W.normalise "c:\\\\\\\\" == "C:\\") + ,("AFP_W.normalise (\"c:\\\\\\\\\\\\\\\\\") == (\"C:\\\\\")", property $ AFP_W.normalise ("c:\\\\\\\\") == ("C:\\")) + ,("W.normalise \"C:.\\\\\" == \"C:\"", property $ W.normalise "C:.\\" == "C:") + ,("AFP_W.normalise (\"C:.\\\\\") == (\"C:\")", property $ AFP_W.normalise ("C:.\\") == ("C:")) + ,("W.normalise \"\\\\\\\\server\\\\test\" == \"\\\\\\\\server\\\\test\"", property $ W.normalise "\\\\server\\test" == "\\\\server\\test") + ,("AFP_W.normalise (\"\\\\\\\\server\\\\test\") == (\"\\\\\\\\server\\\\test\")", property $ AFP_W.normalise ("\\\\server\\test") == ("\\\\server\\test")) + ,("W.normalise \"//server/test\" == \"\\\\\\\\server\\\\test\"", property $ W.normalise "//server/test" == "\\\\server\\test") + ,("AFP_W.normalise (\"//server/test\") == (\"\\\\\\\\server\\\\test\")", property $ AFP_W.normalise ("//server/test") == ("\\\\server\\test")) + ,("W.normalise \"c:/file\" == \"C:\\\\file\"", property $ W.normalise "c:/file" == "C:\\file") + ,("AFP_W.normalise (\"c:/file\") == (\"C:\\\\file\")", property $ AFP_W.normalise ("c:/file") == ("C:\\file")) + ,("W.normalise \"/file\" == \"\\\\file\"", property $ W.normalise "/file" == "\\file") + ,("AFP_W.normalise (\"/file\") == (\"\\\\file\")", property $ AFP_W.normalise ("/file") == ("\\file")) + ,("W.normalise \"\\\\\" == \"\\\\\"", property $ W.normalise "\\" == "\\") + ,("AFP_W.normalise (\"\\\\\") == (\"\\\\\")", property $ AFP_W.normalise ("\\") == ("\\")) + ,("W.normalise \"/./\" == \"\\\\\"", property $ W.normalise "/./" == "\\") + ,("AFP_W.normalise (\"/./\") == (\"\\\\\")", property $ AFP_W.normalise ("/./") == ("\\")) + ,("P.normalise \".\" == \".\"", property $ P.normalise "." == ".") + ,("W.normalise \".\" == \".\"", property $ W.normalise "." == ".") + ,("AFP_P.normalise (\".\") == (\".\")", property $ AFP_P.normalise (".") == (".")) + ,("AFP_W.normalise (\".\") == (\".\")", property $ AFP_W.normalise (".") == (".")) + ,("P.normalise \"./\" == \"./\"", property $ P.normalise "./" == "./") + ,("AFP_P.normalise (\"./\") == (\"./\")", property $ AFP_P.normalise ("./") == ("./")) + ,("P.normalise \"./.\" == \"./\"", property $ P.normalise "./." == "./") + ,("AFP_P.normalise (\"./.\") == (\"./\")", property $ AFP_P.normalise ("./.") == ("./")) + ,("P.normalise \"/./\" == \"/\"", property $ P.normalise "/./" == "/") + ,("AFP_P.normalise (\"/./\") == (\"/\")", property $ AFP_P.normalise ("/./") == ("/")) + ,("P.normalise \"/\" == \"/\"", property $ P.normalise "/" == "/") + ,("AFP_P.normalise (\"/\") == (\"/\")", property $ AFP_P.normalise ("/") == ("/")) + ,("P.normalise \"bob/fred/.\" == \"bob/fred/\"", property $ P.normalise "bob/fred/." == "bob/fred/") + ,("AFP_P.normalise (\"bob/fred/.\") == (\"bob/fred/\")", property $ AFP_P.normalise ("bob/fred/.") == ("bob/fred/")) + ,("P.normalise \"//home\" == \"/home\"", property $ P.normalise "//home" == "/home") + ,("AFP_P.normalise (\"//home\") == (\"/home\")", property $ AFP_P.normalise ("//home") == ("/home")) + ,("P.isValid \"\" == False", property $ P.isValid "" == False) + ,("W.isValid \"\" == False", property $ W.isValid "" == False) + ,("AFP_P.isValid (\"\") == False", property $ AFP_P.isValid ("") == False) + ,("AFP_W.isValid (\"\") == False", property $ AFP_W.isValid ("") == False) + ,("P.isValid \"\\0\" == False", property $ P.isValid "\0" == False) + ,("W.isValid \"\\0\" == False", property $ W.isValid "\0" == False) + ,("AFP_P.isValid (\"\\0\") == False", property $ AFP_P.isValid ("\0") == False) + ,("AFP_W.isValid (\"\\0\") == False", property $ AFP_W.isValid ("\0") == False) + ,("P.isValid \"/random_ path:*\" == True", property $ P.isValid "/random_ path:*" == True) + ,("AFP_P.isValid (\"/random_ path:*\") == True", property $ AFP_P.isValid ("/random_ path:*") == True) + ,("P.isValid x == not (null x)", property $ \(QFilePath x) -> P.isValid x == not (null x)) + ,("AFP_P.isValid x == not ((SBS.null . getPosixString) x)", property $ \(QFilePathAFP_P x) -> AFP_P.isValid x == not ((SBS.null . getPosixString) x)) + ,("W.isValid \"c:\\\\test\" == True", property $ W.isValid "c:\\test" == True) + ,("AFP_W.isValid (\"c:\\\\test\") == True", property $ AFP_W.isValid ("c:\\test") == True) + ,("W.isValid \"c:\\\\test:of_test\" == False", property $ W.isValid "c:\\test:of_test" == False) + ,("AFP_W.isValid (\"c:\\\\test:of_test\") == False", property $ AFP_W.isValid ("c:\\test:of_test") == False) + ,("W.isValid \"test*\" == False", property $ W.isValid "test*" == False) + ,("AFP_W.isValid (\"test*\") == False", property $ AFP_W.isValid ("test*") == False) + ,("W.isValid \"c:\\\\test\\\\nul\" == False", property $ W.isValid "c:\\test\\nul" == False) + ,("AFP_W.isValid (\"c:\\\\test\\\\nul\") == False", property $ AFP_W.isValid ("c:\\test\\nul") == False) + ,("W.isValid \"c:\\\\test\\\\prn.txt\" == False", property $ W.isValid "c:\\test\\prn.txt" == False) + ,("AFP_W.isValid (\"c:\\\\test\\\\prn.txt\") == False", property $ AFP_W.isValid ("c:\\test\\prn.txt") == False) + ,("W.isValid \"c:\\\\nul\\\\file\" == False", property $ W.isValid "c:\\nul\\file" == False) + ,("AFP_W.isValid (\"c:\\\\nul\\\\file\") == False", property $ AFP_W.isValid ("c:\\nul\\file") == False) + ,("W.isValid \"\\\\\\\\\" == False", property $ W.isValid "\\\\" == False) + ,("AFP_W.isValid (\"\\\\\\\\\") == False", property $ AFP_W.isValid ("\\\\") == False) + ,("W.isValid \"\\\\\\\\\\\\foo\" == False", property $ W.isValid "\\\\\\foo" == False) + ,("AFP_W.isValid (\"\\\\\\\\\\\\foo\") == False", property $ AFP_W.isValid ("\\\\\\foo") == False) + ,("W.isValid \"\\\\\\\\?\\\\D:file\" == False", property $ W.isValid "\\\\?\\D:file" == False) + ,("AFP_W.isValid (\"\\\\\\\\?\\\\D:file\") == False", property $ AFP_W.isValid ("\\\\?\\D:file") == False) + ,("W.isValid \"foo\\tbar\" == False", property $ W.isValid "foo\tbar" == False) + ,("AFP_W.isValid (\"foo\\tbar\") == False", property $ AFP_W.isValid ("foo\tbar") == False) + ,("W.isValid \"nul .txt\" == False", property $ W.isValid "nul .txt" == False) + ,("AFP_W.isValid (\"nul .txt\") == False", property $ AFP_W.isValid ("nul .txt") == False) + ,("W.isValid \" nul.txt\" == True", property $ W.isValid " nul.txt" == True) + ,("AFP_W.isValid (\" nul.txt\") == True", property $ AFP_W.isValid (" nul.txt") == True) + ,("P.isValid (P.makeValid x)", property $ \(QFilePath x) -> P.isValid (P.makeValid x)) + ,("W.isValid (W.makeValid x)", property $ \(QFilePath x) -> W.isValid (W.makeValid x)) + ,("AFP_P.isValid (AFP_P.makeValid x)", property $ \(QFilePathAFP_P x) -> AFP_P.isValid (AFP_P.makeValid x)) + ,("AFP_W.isValid (AFP_W.makeValid x)", property $ \(QFilePathAFP_W x) -> AFP_W.isValid (AFP_W.makeValid x)) + ,("P.isValid x ==> P.makeValid x == x", property $ \(QFilePath x) -> P.isValid x ==> P.makeValid x == x) + ,("W.isValid x ==> W.makeValid x == x", property $ \(QFilePath x) -> W.isValid x ==> W.makeValid x == x) + ,("AFP_P.isValid x ==> AFP_P.makeValid x == x", property $ \(QFilePathAFP_P x) -> AFP_P.isValid x ==> AFP_P.makeValid x == x) + ,("AFP_W.isValid x ==> AFP_W.makeValid x == x", property $ \(QFilePathAFP_W x) -> AFP_W.isValid x ==> AFP_W.makeValid x == x) + ,("P.makeValid \"\" == \"_\"", property $ P.makeValid "" == "_") + ,("W.makeValid \"\" == \"_\"", property $ W.makeValid "" == "_") + ,("AFP_P.makeValid (\"\") == (\"_\")", property $ AFP_P.makeValid ("") == ("_")) + ,("AFP_W.makeValid (\"\") == (\"_\")", property $ AFP_W.makeValid ("") == ("_")) + ,("P.makeValid \"file\\0name\" == \"file_name\"", property $ P.makeValid "file\0name" == "file_name") + ,("W.makeValid \"file\\0name\" == \"file_name\"", property $ W.makeValid "file\0name" == "file_name") + ,("AFP_P.makeValid (\"file\\0name\") == (\"file_name\")", property $ AFP_P.makeValid ("file\0name") == ("file_name")) + ,("AFP_W.makeValid (\"file\\0name\") == (\"file_name\")", property $ AFP_W.makeValid ("file\0name") == ("file_name")) + ,("W.makeValid \"c:\\\\already\\\\/valid\" == \"c:\\\\already\\\\/valid\"", property $ W.makeValid "c:\\already\\/valid" == "c:\\already\\/valid") + ,("AFP_W.makeValid (\"c:\\\\already\\\\/valid\") == (\"c:\\\\already\\\\/valid\")", property $ AFP_W.makeValid ("c:\\already\\/valid") == ("c:\\already\\/valid")) + ,("W.makeValid \"c:\\\\test:of_test\" == \"c:\\\\test_of_test\"", property $ W.makeValid "c:\\test:of_test" == "c:\\test_of_test") + ,("AFP_W.makeValid (\"c:\\\\test:of_test\") == (\"c:\\\\test_of_test\")", property $ AFP_W.makeValid ("c:\\test:of_test") == ("c:\\test_of_test")) + ,("W.makeValid \"test*\" == \"test_\"", property $ W.makeValid "test*" == "test_") + ,("AFP_W.makeValid (\"test*\") == (\"test_\")", property $ AFP_W.makeValid ("test*") == ("test_")) + ,("W.makeValid \"c:\\\\test\\\\nul\" == \"c:\\\\test\\\\nul_\"", property $ W.makeValid "c:\\test\\nul" == "c:\\test\\nul_") + ,("AFP_W.makeValid (\"c:\\\\test\\\\nul\") == (\"c:\\\\test\\\\nul_\")", property $ AFP_W.makeValid ("c:\\test\\nul") == ("c:\\test\\nul_")) + ,("W.makeValid \"c:\\\\test\\\\prn.txt\" == \"c:\\\\test\\\\prn_.txt\"", property $ W.makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt") + ,("AFP_W.makeValid (\"c:\\\\test\\\\prn.txt\") == (\"c:\\\\test\\\\prn_.txt\")", property $ AFP_W.makeValid ("c:\\test\\prn.txt") == ("c:\\test\\prn_.txt")) + ,("W.makeValid \"c:\\\\test/prn.txt\" == \"c:\\\\test/prn_.txt\"", property $ W.makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt") + ,("AFP_W.makeValid (\"c:\\\\test/prn.txt\") == (\"c:\\\\test/prn_.txt\")", property $ AFP_W.makeValid ("c:\\test/prn.txt") == ("c:\\test/prn_.txt")) + ,("W.makeValid \"c:\\\\nul\\\\file\" == \"c:\\\\nul_\\\\file\"", property $ W.makeValid "c:\\nul\\file" == "c:\\nul_\\file") + ,("AFP_W.makeValid (\"c:\\\\nul\\\\file\") == (\"c:\\\\nul_\\\\file\")", property $ AFP_W.makeValid ("c:\\nul\\file") == ("c:\\nul_\\file")) + ,("W.makeValid \"\\\\\\\\\\\\foo\" == \"\\\\\\\\drive\"", property $ W.makeValid "\\\\\\foo" == "\\\\drive") + ,("AFP_W.makeValid (\"\\\\\\\\\\\\foo\") == (\"\\\\\\\\drive\")", property $ AFP_W.makeValid ("\\\\\\foo") == ("\\\\drive")) + ,("W.makeValid \"\\\\\\\\?\\\\D:file\" == \"\\\\\\\\?\\\\D:\\\\file\"", property $ W.makeValid "\\\\?\\D:file" == "\\\\?\\D:\\file") + ,("AFP_W.makeValid (\"\\\\\\\\?\\\\D:file\") == (\"\\\\\\\\?\\\\D:\\\\file\")", property $ AFP_W.makeValid ("\\\\?\\D:file") == ("\\\\?\\D:\\file")) + ,("W.makeValid \"nul .txt\" == \"nul _.txt\"", property $ W.makeValid "nul .txt" == "nul _.txt") + ,("AFP_W.makeValid (\"nul .txt\") == (\"nul _.txt\")", property $ AFP_W.makeValid ("nul .txt") == ("nul _.txt")) + ,("W.isRelative \"path\\\\test\" == True", property $ W.isRelative "path\\test" == True) + ,("AFP_W.isRelative (\"path\\\\test\") == True", property $ AFP_W.isRelative ("path\\test") == True) + ,("W.isRelative \"c:\\\\test\" == False", property $ W.isRelative "c:\\test" == False) + ,("AFP_W.isRelative (\"c:\\\\test\") == False", property $ AFP_W.isRelative ("c:\\test") == False) + ,("W.isRelative \"c:test\" == True", property $ W.isRelative "c:test" == True) + ,("AFP_W.isRelative (\"c:test\") == True", property $ AFP_W.isRelative ("c:test") == True) + ,("W.isRelative \"c:\\\\\" == False", property $ W.isRelative "c:\\" == False) + ,("AFP_W.isRelative (\"c:\\\\\") == False", property $ AFP_W.isRelative ("c:\\") == False) + ,("W.isRelative \"c:/\" == False", property $ W.isRelative "c:/" == False) + ,("AFP_W.isRelative (\"c:/\") == False", property $ AFP_W.isRelative ("c:/") == False) + ,("W.isRelative \"c:\" == True", property $ W.isRelative "c:" == True) + ,("AFP_W.isRelative (\"c:\") == True", property $ AFP_W.isRelative ("c:") == True) + ,("W.isRelative \"\\\\\\\\foo\" == False", property $ W.isRelative "\\\\foo" == False) + ,("AFP_W.isRelative (\"\\\\\\\\foo\") == False", property $ AFP_W.isRelative ("\\\\foo") == False) + ,("W.isRelative \"\\\\\\\\?\\\\foo\" == False", property $ W.isRelative "\\\\?\\foo" == False) + ,("AFP_W.isRelative (\"\\\\\\\\?\\\\foo\") == False", property $ AFP_W.isRelative ("\\\\?\\foo") == False) + ,("W.isRelative \"\\\\\\\\?\\\\UNC\\\\foo\" == False", property $ W.isRelative "\\\\?\\UNC\\foo" == False) + ,("AFP_W.isRelative (\"\\\\\\\\?\\\\UNC\\\\foo\") == False", property $ AFP_W.isRelative ("\\\\?\\UNC\\foo") == False) + ,("W.isRelative \"/foo\" == True", property $ W.isRelative "/foo" == True) + ,("AFP_W.isRelative (\"/foo\") == True", property $ AFP_W.isRelative ("/foo") == True) + ,("W.isRelative \"\\\\foo\" == True", property $ W.isRelative "\\foo" == True) + ,("AFP_W.isRelative (\"\\\\foo\") == True", property $ AFP_W.isRelative ("\\foo") == True) + ,("P.isRelative \"test/path\" == True", property $ P.isRelative "test/path" == True) + ,("AFP_P.isRelative (\"test/path\") == True", property $ AFP_P.isRelative ("test/path") == True) + ,("P.isRelative \"/test\" == False", property $ P.isRelative "/test" == False) + ,("AFP_P.isRelative (\"/test\") == False", property $ AFP_P.isRelative ("/test") == False) + ,("P.isRelative \"/\" == False", property $ P.isRelative "/" == False) + ,("AFP_P.isRelative (\"/\") == False", property $ AFP_P.isRelative ("/") == False) + ,("P.isAbsolute x == not (P.isRelative x)", property $ \(QFilePath x) -> P.isAbsolute x == not (P.isRelative x)) + ,("W.isAbsolute x == not (W.isRelative x)", property $ \(QFilePath x) -> W.isAbsolute x == not (W.isRelative x)) + ,("AFP_P.isAbsolute x == not (AFP_P.isRelative x)", property $ \(QFilePathAFP_P x) -> AFP_P.isAbsolute x == not (AFP_P.isRelative x)) + ,("AFP_W.isAbsolute x == not (AFP_W.isRelative x)", property $ \(QFilePathAFP_W x) -> AFP_W.isAbsolute x == not (AFP_W.isRelative x)) + ] diff --git a/tests/integration/tests/mutable-deps/files/package.yaml b/tests/integration/tests/mutable-deps/files/package.yaml new file mode 100644 index 0000000000..79d39d13fa --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/package.yaml @@ -0,0 +1,18 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + dependencies: + - filemanip + +executables: + myExe: + source-dirs: app + main: Main.hs + dependencies: + - myPackage diff --git a/tests/integration/tests/mutable-deps/files/src/Lib.hs b/tests/integration/tests/mutable-deps/files/src/Lib.hs new file mode 100644 index 0000000000..77d29f1945 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/src/Lib.hs @@ -0,0 +1,6 @@ +module Lib where + +import System.FilePath.Glob + +allCFiles :: IO [FilePath] +allCFiles = namesMatching "*.c" diff --git a/tests/integration/tests/mutable-deps/files/stack.yaml b/tests/integration/tests/mutable-deps/files/stack.yaml new file mode 100644 index 0000000000..df1f84c742 --- /dev/null +++ b/tests/integration/tests/mutable-deps/files/stack.yaml @@ -0,0 +1,18 @@ +snapshot: lts-24.37 + +extra-deps: +- ./filepath-1.5.4.0 +# Required, otherwise Stack reports that GHC boot library has been pruned. +- Win32-2.14.1.0 +- directory-1.3.8.5 +- os-string-2.0.4 +- time-1.12.2 +- unix-2.8.6.0 + +flags: + Win32: + os-string: true + directory: + os-string: true + unix: + os-string: true diff --git a/tests/integration/tests/nice-resolver-names/Main.hs b/tests/integration/tests/nice-resolver-names/Main.hs new file mode 100644 index 0000000000..48a2d755de --- /dev/null +++ b/tests/integration/tests/nice-resolver-names/Main.hs @@ -0,0 +1,23 @@ +-- | Stack's init command uses a convenience synonym for the snapshot in the +-- project-level configuration file, if that is what was specified at the +-- command line. + +{-# LANGUAGE OverloadedStrings #-} + +import Control.Exception ( throwIO ) +import Data.Maybe ( mapMaybe ) +import Data.Foldable ( for_ ) +import Data.List ( stripPrefix ) +import StackTest + +main :: IO () +main = do + for_ ["lts-20.26", "lts-24.37"] $ \snapshot -> do + stack ["init", "--force", "--snapshot", snapshot] + str <- readFile "stack.yaml" + case mapMaybe (stripPrefix "snapshot: ") $ lines str of + [x] -> + if filter (/= '\r') x == snapshot + then pure () + else error $ "Mismatch: " ++ show (snapshot, x) + _ -> error $ "Wrong number of snapshots: " ++ show str diff --git a/tests/integration/tests/nice-resolver-names/files/.gitignore b/tests/integration/tests/nice-resolver-names/files/.gitignore new file mode 100644 index 0000000000..82bb5452c4 --- /dev/null +++ b/tests/integration/tests/nice-resolver-names/files/.gitignore @@ -0,0 +1,2 @@ +myPackage.cabal +stack.yaml diff --git a/tests/integration/tests/nice-resolver-names/files/package.yaml b/tests/integration/tests/nice-resolver-names/files/package.yaml new file mode 100644 index 0000000000..7e155848c9 --- /dev/null +++ b/tests/integration/tests/nice-resolver-names/files/package.yaml @@ -0,0 +1,5 @@ +spec-version: 0.36.0 + +name: myPackage + +library: {} diff --git a/tests/integration/tests/override-compiler/Main.hs b/tests/integration/tests/override-compiler/Main.hs new file mode 100644 index 0000000000..d7196f4e9c --- /dev/null +++ b/tests/integration/tests/override-compiler/Main.hs @@ -0,0 +1,10 @@ +-- | Stack can override the compiler specified in a snapshot. + +import Control.Monad ( unless ) +import StackTest + +main :: IO () +main = stackCheckStdout ["exec", "--", "ghc", "--numeric-version"] $ \ver -> + -- get rid of the newline character + unless (concat (lines ver) == "9.10.3") $ + error $ "Invalid version: " ++ show ver diff --git a/tests/integration/tests/override-compiler/files/stack.yaml b/tests/integration/tests/override-compiler/files/stack.yaml new file mode 100644 index 0000000000..0b41c84c37 --- /dev/null +++ b/tests/integration/tests/override-compiler/files/stack.yaml @@ -0,0 +1,3 @@ +snapshot: lts-22.43 +compiler: ghc-9.10.3 +packages: [] diff --git a/tests/integration/tests/proper-rebuilds/Main.hs b/tests/integration/tests/proper-rebuilds/Main.hs new file mode 100644 index 0000000000..4a12aac853 --- /dev/null +++ b/tests/integration/tests/proper-rebuilds/Main.hs @@ -0,0 +1,24 @@ +-- | Stack rebuilds an executable if profiling is enabled or disabled or if the +-- library on which it depends changes. + +import Control.Monad ( unless, when ) +import Data.List ( isInfixOf ) +import StackTest +import System.Directory ( copyFile ) + +main :: IO () +main = do + let expectRecompilation stderr = + unless ("> build" `isInfixOf` stderr) $ + error "package recompilation was expected" + expectNoRecompilation stderr = + when ("> build" `isInfixOf` stderr) $ + error "package recompilation was not expected" + copyFile "src/Lib.v1" "src/Lib.hs" + stackCheckStderr ["build"] expectRecompilation + stackCheckStderr ["build" , "--profile"] expectRecompilation + stackCheckStderr ["build" , "--profile"] expectNoRecompilation + -- changing source file to trigger recompilation + copyFile "src/Lib.v2" "src/Lib.hs" + stackCheckStderr ["build" , "--profile"] expectRecompilation + stackCheckStderr ["build"] expectRecompilation diff --git a/tests/integration/tests/proper-rebuilds/files/.gitignore b/tests/integration/tests/proper-rebuilds/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/proper-rebuilds/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/proper-rebuilds/files/app/Main.hs b/tests/integration/tests/proper-rebuilds/files/app/Main.hs new file mode 100644 index 0000000000..c081d93bae --- /dev/null +++ b/tests/integration/tests/proper-rebuilds/files/app/Main.hs @@ -0,0 +1,6 @@ +module Main where + +import Lib ( func ) + +main :: IO () +main = func diff --git a/tests/integration/tests/proper-rebuilds/files/package.yaml b/tests/integration/tests/proper-rebuilds/files/package.yaml new file mode 100644 index 0000000000..bea19d444e --- /dev/null +++ b/tests/integration/tests/proper-rebuilds/files/package.yaml @@ -0,0 +1,16 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: + source-dirs: src + +executables: + myExe: + source-dirs: app + main: Main.hs + dependencies: + - myPackage diff --git a/tests/integration/tests/proper-rebuilds/files/src/Lib.hs b/tests/integration/tests/proper-rebuilds/files/src/Lib.hs new file mode 100644 index 0000000000..412990b09b --- /dev/null +++ b/tests/integration/tests/proper-rebuilds/files/src/Lib.hs @@ -0,0 +1,6 @@ +module Lib + ( func + ) where + +func :: IO () +func = putStrLn "Version 1" diff --git a/tests/integration/tests/proper-rebuilds/files/src/Lib.v1 b/tests/integration/tests/proper-rebuilds/files/src/Lib.v1 new file mode 100644 index 0000000000..412990b09b --- /dev/null +++ b/tests/integration/tests/proper-rebuilds/files/src/Lib.v1 @@ -0,0 +1,6 @@ +module Lib + ( func + ) where + +func :: IO () +func = putStrLn "Version 1" diff --git a/tests/integration/tests/proper-rebuilds/files/src/Lib.v2 b/tests/integration/tests/proper-rebuilds/files/src/Lib.v2 new file mode 100644 index 0000000000..98438cc36e --- /dev/null +++ b/tests/integration/tests/proper-rebuilds/files/src/Lib.v2 @@ -0,0 +1,6 @@ +module Lib + ( func + ) where + +func :: IO () +func = putStrLn "Version 2" diff --git a/tests/integration/tests/proper-rebuilds/files/stack.yaml b/tests/integration/tests/proper-rebuilds/files/stack.yaml new file mode 100644 index 0000000000..e674eab75a --- /dev/null +++ b/tests/integration/tests/proper-rebuilds/files/stack.yaml @@ -0,0 +1 @@ +snapshot: ghc-9.10.3 diff --git a/tests/integration/tests/relative-script-snapshots/Main.hs b/tests/integration/tests/relative-script-snapshots/Main.hs new file mode 100644 index 0000000000..483add4c7d --- /dev/null +++ b/tests/integration/tests/relative-script-snapshots/Main.hs @@ -0,0 +1,7 @@ +-- Stack's script interpreter supports snapshot locations that are relative +-- local file paths. + +import StackTest + +main :: IO () +main = stack ["subdir/script.hs"] diff --git a/tests/integration/tests/relative-script-snapshots/files/subdir/mySnapshot.yaml b/tests/integration/tests/relative-script-snapshots/files/subdir/mySnapshot.yaml new file mode 100644 index 0000000000..3ab8bcbe35 --- /dev/null +++ b/tests/integration/tests/relative-script-snapshots/files/subdir/mySnapshot.yaml @@ -0,0 +1,7 @@ +name: mySnapshot + +snapshot: ghc-9.10.3 + +packages: +- acme-missiles-0.3@rev:0 +- stm-2.5.0.0@rev:0 diff --git a/tests/integration/tests/relative-script-snapshots/files/subdir/script.hs b/tests/integration/tests/relative-script-snapshots/files/subdir/script.hs new file mode 100644 index 0000000000..582c1d7b5a --- /dev/null +++ b/tests/integration/tests/relative-script-snapshots/files/subdir/script.hs @@ -0,0 +1,7 @@ +#!/usr/bin/env stack +-- stack --snapshot mySnapshot.yaml script + +import Acme.Missiles ( launchMissiles ) + +main :: IO () +main = launchMissiles diff --git a/tests/integration/tests/sanity/Main.hs b/tests/integration/tests/sanity/Main.hs new file mode 100644 index 0000000000..cb6f3f44bc --- /dev/null +++ b/tests/integration/tests/sanity/Main.hs @@ -0,0 +1,26 @@ +-- | Various tests of Stack's sanity. + +import Control.Monad ( unless ) +import StackTest +import System.Directory ( doesFileExist ) + +main :: IO () +main = do + stack ["--version"] + stack ["--help"] + removeDirIgnore "acme-missiles-0.2" + removeDirIgnore "acme-missiles-0.3" + stack ["unpack", "acme-missiles-0.2"] + stack ["unpack", "acme-missiles"] + stackErr ["command-does-not-exist"] + stackErr ["unpack", "invalid-package-name-"] + + -- When running outside of IntegrationSpec.hs, this will use the + -- stack.yaml from Stack itself + exists <- doesFileExist "../../../../../stack.yaml" + unless exists $ stackErr ["build"] + + doesNotExist "stack.yaml" + + let scriptFile = if isWindows then "./script.bat" else "./script.sh" + stack [defaultSnapshotArg, "exec", scriptFile] diff --git a/test/integration/tests/sanity/files/.gitignore b/tests/integration/tests/sanity/files/.gitignore similarity index 100% rename from test/integration/tests/sanity/files/.gitignore rename to tests/integration/tests/sanity/files/.gitignore diff --git a/test/integration/tests/sanity/files/foo.bat b/tests/integration/tests/sanity/files/script.bat old mode 100755 new mode 100644 similarity index 100% rename from test/integration/tests/sanity/files/foo.bat rename to tests/integration/tests/sanity/files/script.bat diff --git a/test/integration/tests/sanity/files/foo.sh b/tests/integration/tests/sanity/files/script.sh similarity index 100% rename from test/integration/tests/sanity/files/foo.sh rename to tests/integration/tests/sanity/files/script.sh diff --git a/tests/integration/tests/script-extra-dep/Main.hs b/tests/integration/tests/script-extra-dep/Main.hs new file mode 100644 index 0000000000..b575ce035d --- /dev/null +++ b/tests/integration/tests/script-extra-dep/Main.hs @@ -0,0 +1,6 @@ +-- Stack's script interpreter supports extra-deps. + +import StackTest + +main :: IO () +main = stack ["script.hs"] diff --git a/tests/integration/tests/script-extra-dep/files/script.hs b/tests/integration/tests/script-extra-dep/files/script.hs new file mode 100644 index 0000000000..cbb5e52b49 --- /dev/null +++ b/tests/integration/tests/script-extra-dep/files/script.hs @@ -0,0 +1,7 @@ +#!/usr/bin/env stack +-- stack --snapshot ghc-9.10.3 script --extra-dep acme-missiles-0.3@rev:0 + +import Acme.Missiles ( launchMissiles ) + +main :: IO () +main = launchMissiles diff --git a/tests/integration/tests/skip-unreachable-dirs/Main.hs b/tests/integration/tests/skip-unreachable-dirs/Main.hs new file mode 100644 index 0000000000..25983649fa --- /dev/null +++ b/tests/integration/tests/skip-unreachable-dirs/Main.hs @@ -0,0 +1,12 @@ +-- | Stack's init command skips unreachable directories. + +import Control.Exception ( IOException, catch) +import StackTest +import System.Directory + +main :: IO () +main = do + removeFileIgnore "stack.yaml" + createDirectory "unreachabledir" `catch` \(e :: IOException) -> pure () + setPermissions "unreachabledir" emptyPermissions + stack ["init"] diff --git a/tests/integration/tests/skip-unreachable-dirs/files/.gitignore b/tests/integration/tests/skip-unreachable-dirs/files/.gitignore new file mode 100644 index 0000000000..20fdd32928 --- /dev/null +++ b/tests/integration/tests/skip-unreachable-dirs/files/.gitignore @@ -0,0 +1,3 @@ +myPackage.cabal +stack.yaml +unreachabledir/ diff --git a/tests/integration/tests/skip-unreachable-dirs/files/package.yaml b/tests/integration/tests/skip-unreachable-dirs/files/package.yaml new file mode 100644 index 0000000000..4b373ba746 --- /dev/null +++ b/tests/integration/tests/skip-unreachable-dirs/files/package.yaml @@ -0,0 +1,8 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base + +library: {} diff --git a/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/Main.hs b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/Main.hs new file mode 100644 index 0000000000..eb84b525d1 --- /dev/null +++ b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/Main.hs @@ -0,0 +1,25 @@ +-- | The test case here is weird enough to warrant an explanation. What we +-- _really_ want to test is whether building the lts-3.12 snapshot's +-- semigroupoids package with rev-1 works. See +-- https://github.com/commercialhaskell/stackage/issues/3185. However, that test +-- requires that we use an older GHC, and as Manny commented: +-- +-- > Having integration tests with old resolvers will cause them to fail +-- > on Linux distributions with GCC with PIE enabled by default (which +-- > is the latest versions of most distributions now), since older GHC +-- > versions do not support it. I'm not sure what we should do about +-- > this, since it obviously does make sense to be able to test against +-- > old snapshots sometimes. +-- +-- So I am instead testing a totally different case here which repros the +-- same issue. If we use a custom snapshot with incompatible `stm` and +-- `async` versions, we want Stack to trust the build plan and allow a +-- `--dry-run` to succeed. But if we do this via `extra-deps`, we want it +-- to fail. + +import StackTest + +main :: IO () +main = do + stackErr ["build", "--stack-yaml", "stack1.yaml", "--dry-run"] + stack ["build", "--stack-yaml", "stack2.yaml", "--dry-run"] diff --git a/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/.gitignore b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/mySnapshot.yaml b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/mySnapshot.yaml new file mode 100644 index 0000000000..0c602a1e79 --- /dev/null +++ b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/mySnapshot.yaml @@ -0,0 +1,8 @@ +name: mySnapshot + +snapshot: ghc-9.10.3 + +packages: +# The Cabal file for async-2.1.1.1 specifies stm >= 2.2 +- async-2.1.1.1 +- stm-2.1.2.2 diff --git a/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/package.yaml b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/package.yaml new file mode 100644 index 0000000000..d04e500bd6 --- /dev/null +++ b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/package.yaml @@ -0,0 +1,9 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- async + +library: {} diff --git a/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/stack1.yaml b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/stack1.yaml new file mode 100644 index 0000000000..4813fc689c --- /dev/null +++ b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/stack1.yaml @@ -0,0 +1,6 @@ +snapshot: ghc-9.10.3 + +extra-deps: +# The Cabal file for async-2.1.1.1 specifies stm >= 2.2 +- async-2.1.1.1 +- stm-2.1.2.2 diff --git a/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/stack2.yaml b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/stack2.yaml new file mode 100644 index 0000000000..5d99800320 --- /dev/null +++ b/tests/integration/tests/stackage-3185-ignore-bounds-in-snapshot/files/stack2.yaml @@ -0,0 +1 @@ +snapshot: mySnapshot.yaml diff --git a/tests/integration/tests/upload/Main.hs b/tests/integration/tests/upload/Main.hs new file mode 100644 index 0000000000..792d75e3e4 --- /dev/null +++ b/tests/integration/tests/upload/Main.hs @@ -0,0 +1,32 @@ +import Control.Concurrent ( threadDelay ) +import StackTest +import System.Directory + ( createDirectoryIfMissing, getCurrentDirectory ) +import System.Environment ( getEnv, setEnv ) +import System.FilePath ( () ) +import System.Process ( proc, withCreateProcess ) + +main :: IO () +main = + withFakeHackage $ do + stackRoot <- getEnv "STACK_ROOT" + -- Ensure there are credentials available for uploading + createDirectoryIfMissing True (stackRoot "upload") + writeFile + (stackRoot "upload" "credentials.json") + "{\"username\":\"fake\",\"password\":\"fake\"}" + stack ["upload", "."] + +-- | Start a fake Hackage server to test the upload +withFakeHackage :: IO a -> IO a +withFakeHackage act = do + stackEnv <- stackExe + -- Build the dependencies for the fake server + stack $ withNetworkArgs ++ ["FakeHackageStart.hs"] + -- Start the fake server + withCreateProcess (proc stackEnv $ withNetworkArgs ++ ["FakeHackage.hs"]) $ \_ _ _ _ -> do + -- Wait for the fake server to start accepting requests + threadDelay 3000000 + act + where + withNetworkArgs = ["runghc", "--package", "network"] diff --git a/tests/integration/tests/upload/files/.gitignore b/tests/integration/tests/upload/files/.gitignore new file mode 100644 index 0000000000..b0a5a052a1 --- /dev/null +++ b/tests/integration/tests/upload/files/.gitignore @@ -0,0 +1 @@ +myPackage.cabal diff --git a/tests/integration/tests/upload/files/FakeHackage.hs b/tests/integration/tests/upload/files/FakeHackage.hs new file mode 100644 index 0000000000..3d39c7a445 --- /dev/null +++ b/tests/integration/tests/upload/files/FakeHackage.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE OverloadedStrings #-} + +import Control.Concurrent ( forkIO, threadDelay ) +import Control.Monad ( forever ) +import Network.Socket hiding ( recv ) +import Network.Socket.ByteString ( recv, sendAll ) +import System.Exit ( exitSuccess ) +import System.IO () + +-- | Fake server that always responds with HTTP OK +main = + withSocketsDo $ do + _ <- forkIO serve + -- Exit after a delay to ensure the process doesn't linger around + threadDelay 10000000 + exitSuccess + +serve :: IO () +serve = do + let hints = defaultHints {addrFlags = [AI_PASSIVE], addrSocketType = Stream} + (addr:_) <- getAddrInfo (Just hints) Nothing (Just "12415") + sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) + setSocketOption sock ReuseAddr 1 + bind sock (addrAddress addr) + listen sock 10 + forever $ do + (conn, _) <- accept sock + _ <- recv conn 1024 + sendAll + conn + "HTTP/1.1 200 OK\r\nContent-Length: 1\r\nContent-Type: text/plain\r\n\r\na" + shutdown conn ShutdownSend diff --git a/test/integration/tests/upload/files/FakeHackageStart.hs b/tests/integration/tests/upload/files/FakeHackageStart.hs similarity index 100% rename from test/integration/tests/upload/files/FakeHackageStart.hs rename to tests/integration/tests/upload/files/FakeHackageStart.hs diff --git a/test/integration/tests/upload/files/gpg-disabled/gpg b/tests/integration/tests/upload/files/gpg-disabled/gpg old mode 100755 new mode 100644 similarity index 100% rename from test/integration/tests/upload/files/gpg-disabled/gpg rename to tests/integration/tests/upload/files/gpg-disabled/gpg diff --git a/test/integration/tests/upload/files/gpg-disabled/gpg.bat b/tests/integration/tests/upload/files/gpg-disabled/gpg.bat similarity index 100% rename from test/integration/tests/upload/files/gpg-disabled/gpg.bat rename to tests/integration/tests/upload/files/gpg-disabled/gpg.bat diff --git a/test/integration/tests/upload/files/gpg-disabled/gpg2 b/tests/integration/tests/upload/files/gpg-disabled/gpg2 similarity index 100% rename from test/integration/tests/upload/files/gpg-disabled/gpg2 rename to tests/integration/tests/upload/files/gpg-disabled/gpg2 diff --git a/test/integration/tests/upload/files/gpg-disabled/gpg2.bat b/tests/integration/tests/upload/files/gpg-disabled/gpg2.bat similarity index 100% rename from test/integration/tests/upload/files/gpg-disabled/gpg2.bat rename to tests/integration/tests/upload/files/gpg-disabled/gpg2.bat diff --git a/tests/integration/tests/upload/files/package.yaml b/tests/integration/tests/upload/files/package.yaml new file mode 100644 index 0000000000..75c0d254af --- /dev/null +++ b/tests/integration/tests/upload/files/package.yaml @@ -0,0 +1,11 @@ +spec-version: 0.36.0 + +name: myPackage +description: Upload test description +license: BSD-3-Clause + +dependencies: +- base < 5 + +library: + source-dirs: src diff --git a/tests/integration/tests/upload/files/src/Lib.hs b/tests/integration/tests/upload/files/src/Lib.hs new file mode 100644 index 0000000000..6d85a26fe1 --- /dev/null +++ b/tests/integration/tests/upload/files/src/Lib.hs @@ -0,0 +1 @@ +module Lib where diff --git a/tests/integration/tests/upload/files/stack.yaml b/tests/integration/tests/upload/files/stack.yaml new file mode 100644 index 0000000000..f4adc04431 --- /dev/null +++ b/tests/integration/tests/upload/files/stack.yaml @@ -0,0 +1,3 @@ +snapshot: lts-24.37 + +hackage-base-url: http://localhost:12415/ diff --git a/tests/integration/tests/variables-in-template-file-names/Main.hs b/tests/integration/tests/variables-in-template-file-names/Main.hs new file mode 100644 index 0000000000..598874ea80 --- /dev/null +++ b/tests/integration/tests/variables-in-template-file-names/Main.hs @@ -0,0 +1,13 @@ +-- | A Stack project template can be populated with the name of the Stack +-- project. + +import Control.Monad ( unless ) +import StackTest +import System.Directory ( doesFileExist ) + +main :: IO () +main = do + removeDirIgnore "myPackage" + stack ["new", "myPackage", "./template.hsfiles"] + exists <- doesFileExist "myPackage/myPackage.cabal" + unless exists $ error "does not exist" diff --git a/tests/integration/tests/variables-in-template-file-names/files/.gitignore b/tests/integration/tests/variables-in-template-file-names/files/.gitignore new file mode 100644 index 0000000000..761adaef96 --- /dev/null +++ b/tests/integration/tests/variables-in-template-file-names/files/.gitignore @@ -0,0 +1 @@ +myPackage/ diff --git a/test/integration/tests/variables-in-template-file-names/files/template.hsfiles b/tests/integration/tests/variables-in-template-file-names/files/template.hsfiles similarity index 100% rename from test/integration/tests/variables-in-template-file-names/files/template.hsfiles rename to tests/integration/tests/variables-in-template-file-names/files/template.hsfiles diff --git a/tests/integration/tests/watched-files/Main.hs b/tests/integration/tests/watched-files/Main.hs new file mode 100644 index 0000000000..3cd9840459 --- /dev/null +++ b/tests/integration/tests/watched-files/Main.hs @@ -0,0 +1,16 @@ +-- | Stack recompiles when a file required for compilation is dirty. + +import Control.Monad ( unless ) +import Data.Foldable ( for_ ) +import StackTest + +main :: IO () +main = for_ (words "foo bar baz bin") $ \x -> do + writeFile "some-text-file.txt" x + stackCheckStdout ["run"] $ \y -> + unless (x == y) $ error $ concat + [ "Expected: " + , show x + , "\nActual: " + , show y + ] diff --git a/tests/integration/tests/watched-files/files/.gitignore b/tests/integration/tests/watched-files/files/.gitignore new file mode 100644 index 0000000000..c93a3fc3e5 --- /dev/null +++ b/tests/integration/tests/watched-files/files/.gitignore @@ -0,0 +1,2 @@ +some-text-file.txt +myPackage.cabal diff --git a/tests/integration/tests/watched-files/files/app/Main.hs b/tests/integration/tests/watched-files/files/app/Main.hs new file mode 100644 index 0000000000..34d36111aa --- /dev/null +++ b/tests/integration/tests/watched-files/files/app/Main.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE TemplateHaskell #-} + +module Main where + +import qualified Data.ByteString as B +import Data.FileEmbed ( embedFile ) +import System.IO ( stdout ) + +main :: IO () +main = B.hPut stdout $(embedFile "some-text-file.txt") diff --git a/tests/integration/tests/watched-files/files/package.yaml b/tests/integration/tests/watched-files/files/package.yaml new file mode 100644 index 0000000000..2132e12b97 --- /dev/null +++ b/tests/integration/tests/watched-files/files/package.yaml @@ -0,0 +1,13 @@ +spec-version: 0.36.0 + +name: myPackage + +dependencies: +- base +- bytestring +- file-embed + +executables: + myExe: + source-dirs: app + main: Main.hs diff --git a/tests/integration/tests/watched-files/files/stack.yaml b/tests/integration/tests/watched-files/files/stack.yaml new file mode 100644 index 0000000000..c292f63385 --- /dev/null +++ b/tests/integration/tests/watched-files/files/stack.yaml @@ -0,0 +1 @@ +snapshot: lts-24.37 diff --git a/tests/unit/Spec.hs b/tests/unit/Spec.hs new file mode 100644 index 0000000000..6dbd45d747 --- /dev/null +++ b/tests/unit/Spec.hs @@ -0,0 +1,2 @@ +{-# OPTIONS_GHC -Wno-missing-export-lists #-} +{-# OPTIONS_GHC -F -pgmF hspec-discover #-} diff --git a/tests/unit/Stack/ArgsSpec.hs b/tests/unit/Stack/ArgsSpec.hs new file mode 100644 index 0000000000..5732e344a1 --- /dev/null +++ b/tests/unit/Stack/ArgsSpec.hs @@ -0,0 +1,197 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedLists #-} + +-- | Args parser test suite. + +module Stack.ArgsSpec + ( spec + , argsSpec + , argsInputOutput + , interpreterArgsSpec + ) where + +import Data.Attoparsec.Args ( EscapingMode (..), parseArgsFromString ) +import Data.Attoparsec.Interpreter ( interpreterArgsParser ) +import qualified Data.Attoparsec.Text as P +import qualified Data.List.NonEmpty as NE +import Data.Text ( pack ) +import Stack.Constants ( stackProgName ) +import Stack.Prelude +import Test.Hspec ( Spec, describe, it ) + +-- | Test spec. +spec :: Spec +spec = do + argsSpec + interpreterArgsSpec + +argsSpec :: Spec +argsSpec = forM_ argsInputOutput + (\(input,output) -> it input (parseArgsFromString Escaping input == output)) + +-- | Fairly comprehensive checks. +argsInputOutput :: [(String, Either String [String])] +argsInputOutput = + [ ("x", Right ["x"]) + , ("x y z", Right ["x", "y", "z"]) + , ("aaa bbb ccc", Right ["aaa", "bbb", "ccc"]) + , (" aaa bbb ccc ", Right ["aaa", "bbb", "ccc"]) + , ("aaa\"", Left "unterminated string: endOfInput") + , ("\"", Left "unterminated string: endOfInput") + , ("\"\"", Right [""]) + , ("\"aaa", Left "unterminated string: endOfInput") + , ("\"aaa\" bbb ccc \"ddd\"", Right ["aaa", "bbb", "ccc", "ddd"]) + , ("\"aa\\\"a\" bbb ccc \"ddd\"", Right ["aa\"a", "bbb", "ccc", "ddd"]) + , ("\"aa\\\"a\" bb\\b ccc \"ddd\"", Right ["aa\"a", "bb\\b", "ccc", "ddd"]) + , ("\"\" \"\" c", Right ["","","c"])] + +interpreterArgsSpec :: Spec +interpreterArgsSpec = + describe "Script interpreter parser" $ do + describe "Success cases" $ do + describe "Line comments" $ do + checkLines "" + checkLines " --x" + checkLines " --x --y" + describe "Literate line comments" $ do + checkLiterateLines "" + checkLiterateLines " --x" + checkLiterateLines " --x --y" + describe "Block comments" $ do + checkBlocks "" + checkBlocks "\n" + checkBlocks " --x" + checkBlocks "\n--x" + checkBlocks " --x --y" + checkBlocks "\n--x\n--y" + checkBlocks "\n\t--x\n\t--y" + describe "Literate block comments" $ do + checkLiterateBlocks "" "" + checkLiterateBlocks "\n>" "" + checkLiterateBlocks " --x" " --x" + checkLiterateBlocks "\n>--x" "--x" + checkLiterateBlocks " --x --y " "--x --y" + checkLiterateBlocks "\n>--x\n>--y" "--x --y" + checkLiterateBlocks "\n>\t--x\n>\t--y" "--x --y" + describe "Failure cases" $ do + checkFailures + describe "Bare directives in literate files" $ do + forM_ (interpreterGenValid lineComment "") $ + testAndCheck (acceptFailure True) "" + forM_ (interpreterGenValid blockComment "") $ + testAndCheck (acceptFailure True) "" + where + parse isLiterate s = + P.parseOnly (interpreterArgsParser isLiterate stackProgName) (pack s) + + acceptSuccess :: Bool -> String -> String -> Bool + acceptSuccess isLiterate args s = case parse isLiterate s of + Right x | words x == words args -> True + _ -> False + + acceptFailure isLiterate _ s = case parse isLiterate s of + Left _ -> True + Right _ -> False + + testAndCheck checker out inp = it (show inp) $ checker out inp + + checkLines args = forM_ + (interpreterGenValid lineComment args) + (testAndCheck (acceptSuccess False) args) + + checkLiterateLines args = forM_ + (interpreterGenValid literateLineComment args) + (testAndCheck (acceptSuccess True) args) + + checkBlocks args = forM_ + (interpreterGenValid blockComment args) + (testAndCheck (acceptSuccess False) args) + + checkLiterateBlocks inp args = forM_ + (interpreterGenValid literateBlockComment inp) + (testAndCheck (acceptSuccess True) args) + + checkFailures = forM_ + interpreterGenInvalid + (testAndCheck (acceptFailure False) "unused") + + -- Generate a set of acceptable inputs for given format and args + interpreterGenValid :: + (String -> NonEmpty String) + -> String + -> NonEmpty String + interpreterGenValid fmt args = shebang <++> newLine <++> fmt args + + interpreterGenInvalid :: NonEmpty String + -- Generate a set of Invalid inputs + interpreterGenInvalid = + ["-stack\n"] -- random input + -- just the shebang + <> shebang <++> ["\n"] + -- invalid shebang + <> blockSpace <++> [NE.head (interpreterGenValid lineComment args)] + -- something between shebang and Stack comment + <> shebang + <++> newLine + <++> blockSpace + <++> ([NE.head (lineComment args)] <> [NE.head (blockComment args)]) + -- unterminated block comment + -- just chop the closing chars from a valid block comment + <> shebang + <++> ["\n"] + <++> let c = NE.head (blockComment args) + l = length c - 2 + in [assert (drop l c == "-}") (take l c)] + -- nested block comment + <> shebang + <++> ["\n"] + <++> [NE.head (blockComment "--x {- nested -} --y")] + where + args = " --x --y" + (<++>) = liftA2 (<>) + + -- Generative grammar for the interpreter comments + shebang :: NonEmpty String + shebang = ["#!/usr/bin/env stack"] + + newLine :: NonEmpty String + newLine = ["\n"] <> ["\r\n"] + + -- A comment may be the last line or followed by something else + postComment :: NonEmpty String + postComment = [""] <> newLine + + -- A command starts with zero or more whitespace followed by "stack" + makeComment :: + (String -> String) + -> NonEmpty String + -> String + -> NonEmpty String + makeComment maker space args = + let makePrefix :: NonEmpty String -> NonEmpty String + makePrefix s = (s <> [""]) <++> [stackProgName] + in (maker <$> (makePrefix space <&> (++ args))) <++> postComment + + lineSpace :: NonEmpty String + lineSpace = [" "] <> ["\t"] + + lineComment :: String -> NonEmpty String + lineComment = makeComment makeLine lineSpace + where + makeLine s = "--" ++ s + + literateLineComment :: String -> NonEmpty String + literateLineComment = makeComment ("> --" ++) lineSpace + + blockSpace :: NonEmpty String + blockSpace = lineSpace <> newLine + + blockComment :: String -> NonEmpty String + blockComment = makeComment makeBlock blockSpace + where + makeBlock s = "{-" ++ s ++ "-}" + + literateBlockComment :: String -> NonEmpty String + literateBlockComment = makeComment + (\s -> "> {-" ++ s ++ "-}") + (lineSpace <> NE.map (++ ">") newLine) diff --git a/tests/unit/Stack/Build/ExecuteSpec.hs b/tests/unit/Stack/Build/ExecuteSpec.hs new file mode 100644 index 0000000000..7b6a53b1c2 --- /dev/null +++ b/tests/unit/Stack/Build/ExecuteSpec.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +module Stack.Build.ExecuteSpec + ( main + , spec + ) where + +import Stack.Prelude +import Test.Hspec + +main :: IO () +main = hspec spec + +spec :: Spec +spec = pure () diff --git a/src/test/Stack/Build/TargetSpec.hs b/tests/unit/Stack/Build/TargetSpec.hs similarity index 95% rename from src/test/Stack/Build/TargetSpec.hs rename to tests/unit/Stack/Build/TargetSpec.hs index 90bdc9cb0c..e199a72a02 100644 --- a/src/test/Stack/Build/TargetSpec.hs +++ b/tests/unit/Stack/Build/TargetSpec.hs @@ -1,6 +1,10 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -module Stack.Build.TargetSpec (main, spec) where + +module Stack.Build.TargetSpec + ( main + , spec + ) where import qualified Data.Text as T import Distribution.Types.PackageName (mkPackageName) diff --git a/tests/unit/Stack/Config/DockerSpec.hs b/tests/unit/Stack/Config/DockerSpec.hs new file mode 100644 index 0000000000..25b8d26c18 --- /dev/null +++ b/tests/unit/Stack/Config/DockerSpec.hs @@ -0,0 +1,29 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +module Stack.Config.DockerSpec + ( spec + ) where + +import RIO.Time ( fromGregorian ) +import Stack.Config.Docker ( addDefaultTag ) +import Stack.Prelude +import Stack.Types.Snapshot ( AbstractSnapshot (..) ) +import Test.Hspec ( Spec, describe, it, shouldBe ) + +spec :: Spec +spec = do + describe "addDefaultTag" $ do + it "succeeds fails no snapshot" $ addDefaultTag "foo/bar" Nothing Nothing `shouldBe` Nothing + it "succeeds on LTS" $ + addDefaultTag + "foo/bar" + Nothing + (Just $ ASSnapshot $ RSLSynonym $ LTS 1 2) + `shouldBe` Just "foo/bar:lts-1.2" + it "fails on nightly" $ + addDefaultTag + "foo/bar" + Nothing + (Just $ ASSnapshot $ RSLSynonym $ Nightly $ fromGregorian 2018 1 1) + `shouldBe` Nothing diff --git a/tests/unit/Stack/ConfigSpec.hs b/tests/unit/Stack/ConfigSpec.hs new file mode 100644 index 0000000000..9bfe980c9b --- /dev/null +++ b/tests/unit/Stack/ConfigSpec.hs @@ -0,0 +1,336 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +module Stack.ConfigSpec + ( sampleConfig + , buildOptsConfig + , hpackConfig + , resolverConfig + , snapshotConfig + , resolverSnapshotConfig + , stackDotYaml + , setup + , noException + , spec + ) where + +import Control.Arrow ( left ) +import Data.Aeson.WarningParser ( WithJSONWarnings ) +import Data.Yaml ( decodeEither', parseEither ) +import Distribution.Verbosity ( verbose ) +import Pantry.Internal.Stackage ( pcHpackExecutable ) +import Path ( (), parent, parseAbsDir, parseRelDir, parseRelFile ) +import Path.IO ( getCurrentDir ) +import Stack.Config (defaultConfigYaml, loadConfig, loadConfigYaml ) +import Stack.Options.GlobalParser ( globalOptsFromMonoid ) +import Stack.Prelude +import Stack.Runners ( withBuildConfig, withRunnerGlobal ) +import Stack.Types.BuildConfig ( BuildConfig (..), configFileRootL ) +import Stack.Types.BuildOpts + ( BenchmarkOpts (..), BuildOpts (..), HaddockOpts (..) + , TestOpts (..) + ) +import Stack.Types.BuildOptsMonoid ( CabalVerbosity (..), ProgressBarFormat (NoBar) ) +import Stack.Types.Config ( Config (..) ) +import Stack.Types.ConfigMonoid + ( ConfigMonoid (..), parseConfigMonoid ) +import Stack.Types.GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.Project ( Project (..) ) +import Stack.Types.ProjectAndConfigMonoid + ( ProjectAndConfigMonoid (..), parseProjectAndConfigMonoid ) +import System.Directory + ( createDirectory, createDirectoryIfMissing + , getCurrentDirectory, setCurrentDirectory + ) +import System.Environment ( lookupEnv, setEnv, unsetEnv ) +import System.IO ( writeFile ) +import Test.Hspec + ( Selector, Spec, anyException, beforeAll, describe, example + , it, shouldBe, shouldThrow + ) + +sampleConfig :: String +sampleConfig = + "snapshot: lts-24.37\n" ++ + "packages: ['.']\n" + +buildOptsConfig :: String +buildOptsConfig = + "snapshot: lts-24.37\n" ++ + "packages: ['.']\n" ++ + "build:\n" ++ + " semaphore: true\n" ++ + " library-profiling: true\n" ++ + " executable-profiling: true\n" ++ + " library-stripping: false\n" ++ + " executable-stripping: false\n" ++ + " haddock: true\n" ++ + " haddock-arguments:\n" ++ + " haddock-args:\n" ++ + " - \"--css=/home/user/my-css\"\n" ++ + " open-haddocks: true\n" ++ + " haddock-deps: true\n" ++ + " haddock-executables: true\n" ++ + " haddock-tests: true\n" ++ + " haddock-benchmarks: true\n" ++ + " haddock-internal: true\n" ++ + " haddock-hyperlink-source: false\n" ++ + " haddock-for-hackage: false\n" ++ + " copy-bins: true\n" ++ + " copy-compiler-tool: true\n" ++ + " prefetch: true\n" ++ + " keep-going: true\n" ++ + " keep-tmp-files: true\n" ++ + " force-dirty: true\n" ++ + " test: true\n" ++ + " test-arguments:\n" ++ + " rerun-tests: true\n" ++ + " additional-args: ['-fprof']\n" ++ + " coverage: true\n" ++ + " no-run-tests: true\n" ++ + " test-suite-timeout-grace: 30\n" ++ + " bench: true\n" ++ + " benchmark-opts:\n" ++ + " benchmark-arguments: -O2\n" ++ + " no-run-benchmarks: true\n" ++ + " reconfigure: true\n" ++ + " cabal-verbosity: verbose\n" ++ + " cabal-verbose: true\n" ++ + " split-objs: true\n" ++ + " skip-components: ['my-test']\n" ++ + " interleaved-output: false\n" ++ + " progress-bar: none\n" ++ + " ddump-dir: my-ddump-dir\n" + +buildOptsHaddockForHackageConfig :: String +buildOptsHaddockForHackageConfig = + "snapshot: lts-24.37\n" ++ + "packages: ['.']\n" ++ + "build:\n" ++ + " haddock: true\n" ++ + " open-haddocks: true\n" ++ + " haddock-deps: true\n" ++ + " haddock-executables: true\n" ++ + " haddock-tests: true\n" ++ + " haddock-benchmarks: true\n" ++ + " haddock-internal: true\n" ++ + " haddock-hyperlink-source: false\n" ++ + " haddock-for-hackage: true\n" ++ + " force-dirty: false\n" + +hpackConfig :: String +hpackConfig = + "snapshot: lts-24.37\n" ++ + "with-hpack: /usr/local/bin/hpack\n" ++ + "packages: ['.']\n" + +resolverConfig :: String +resolverConfig = + "resolver: lts-24.37\n" ++ + "packages: ['.']\n" + +snapshotConfig :: String +snapshotConfig = + "snapshot: lts-24.37\n" ++ + "packages: ['.']\n" + +resolverSnapshotConfig :: String +resolverSnapshotConfig = + "resolver: lts-24.37\n" ++ + "snapshot: lts-24.37\n" ++ + "packages: ['.']\n" + +stackDotYaml :: Path Rel File +stackDotYaml = either impureThrow id (parseRelFile "stack.yaml") + +setup :: IO () +setup = unsetEnv "STACK_YAML" + +noException :: Selector SomeException +noException = const False + +spec :: Spec +spec = beforeAll setup $ do + let logLevel = LevelOther "silent" + -- TODO(danburton): not use inTempDir + let inTempDir action = do + currentDirectory <- getCurrentDirectory + withSystemTempDirectory "Stack_ConfigSpec" $ \tempDir -> do + let enterDir = setCurrentDirectory tempDir + let exitDir = setCurrentDirectory currentDirectory + bracket_ enterDir exitDir action + -- TODO(danburton): a safer version of this? + let withEnvVar name newValue action = do + originalValue <- fromMaybe "" <$> lookupEnv name + let setVar = setEnv name newValue + let resetVar = setEnv name originalValue + bracket_ setVar resetVar action + + describe "parseProjectAndConfigMonoid" $ do + let loadProject' fp inner = do + globalOpts <- globalOptsFromMonoid "" Nothing False mempty + withRunnerGlobal globalOpts { logLevel = logLevel } $ do + iopc <- loadConfigYaml ( + parseProjectAndConfigMonoid (parent fp) + ) fp + ProjectAndConfigMonoid project _ <- liftIO iopc + liftIO $ inner project + + toAbsPath path = do + parentDir <- getCurrentDirectory >>= parseAbsDir + pure (parentDir path) + + loadProject config inner = do + yamlAbs <- toAbsPath stackDotYaml + writeFile (toFilePath yamlAbs) config + loadProject' yamlAbs inner + + it "parses snapshot using 'resolver'" $ inTempDir $ do + loadProject resolverConfig $ \project -> + project.snapshot `shouldBe` RSLSynonym (LTS 24 37) + + it "parses snapshot using 'snapshot'" $ inTempDir $ do + loadProject snapshotConfig $ \project -> + project.snapshot `shouldBe` RSLSynonym (LTS 24 37) + + it "throws if both 'resolver' and 'snapshot' are present" $ inTempDir $ do + loadProject resolverSnapshotConfig (const (pure ())) + `shouldThrow` anyException + + describe "loadConfig" $ do + let loadConfig' inner = do + globalOpts <- globalOptsFromMonoid "" Nothing False mempty + withRunnerGlobal globalOpts { logLevel = logLevel } $ + loadConfig inner + -- TODO(danburton): make sure parent dirs also don't have config file + it "works even if no config file exists" $ example $ + loadConfig' $ const $ pure () + + it "works with a blank config file" $ inTempDir $ do + writeFile (toFilePath stackDotYaml) "" + -- TODO(danburton): more specific test for exception + loadConfig' (const (pure ())) `shouldThrow` anyException + + let configOverrideHpack = pcHpackExecutable . view pantryConfigL + + it "parses config option with-hpack" $ inTempDir $ do + writeFile (toFilePath stackDotYaml) hpackConfig + loadConfig' $ \config -> + liftIO $ configOverrideHpack config `shouldBe` + HpackCommand "/usr/local/bin/hpack" + + it "parses config bundled Hpack" $ inTempDir $ do + writeFile (toFilePath stackDotYaml) sampleConfig + loadConfig' $ \config -> + liftIO $ configOverrideHpack config `shouldBe` HpackBundled + + it "parses build config options" $ inTempDir $ do + writeFile (toFilePath stackDotYaml) buildOptsConfig + loadConfig' $ \config -> liftIO $ do + let bopts = config.build + bopts.semaphore `shouldBe` True + bopts.libProfile `shouldBe` True + bopts.exeProfile `shouldBe` True + bopts.libStrip `shouldBe` False + bopts.exeStrip `shouldBe` False + bopts.buildHaddocks `shouldBe` True + bopts.haddockOpts `shouldBe` HaddockOpts + { additionalArgs = ["--css=/home/user/my-css"] + } + bopts.openHaddocks `shouldBe` True + bopts.haddockDeps `shouldBe` Just True + bopts.haddockExecutables `shouldBe` True + bopts.haddockTests `shouldBe` True + bopts.haddockBenchmarks `shouldBe` True + bopts.haddockInternal `shouldBe` True + bopts.haddockHyperlinkSource `shouldBe` False + bopts.haddockForHackage `shouldBe` False + bopts.installExes `shouldBe` True + bopts.installCompilerTool `shouldBe` True + bopts.preFetch `shouldBe` True + bopts.keepGoing `shouldBe` Just True + bopts.keepTmpFiles `shouldBe` True + bopts.forceDirty `shouldBe` True + bopts.tests `shouldBe` True + bopts.testOpts `shouldBe` TestOpts + { rerunTests = True + , additionalArgs = ["-fprof"] + , coverage = True + , runTests = False + , maximumTimeSeconds = Nothing + , timeoutGraceSeconds = Just 30 + , allowStdin = True + } + bopts.benchmarks `shouldBe` True + bopts.benchmarkOpts `shouldBe` BenchmarkOpts + { additionalArgs = Just "-O2" + , runBenchmarks = False + } + bopts.reconfigure `shouldBe` True + bopts.cabalVerbose `shouldBe` CabalVerbosity verbose + bopts.splitObjs `shouldBe` True + bopts.skipComponents `shouldBe` ["my-test"] + bopts.interleavedOutput `shouldBe` False + bopts.progressBar `shouldBe` NoBar + bopts.ddumpDir `shouldBe` Just "my-ddump-dir" + + it "parses build config options with haddock-for-hackage" $ inTempDir $ do + writeFile (toFilePath stackDotYaml) buildOptsHaddockForHackageConfig + loadConfig' $ \config -> liftIO $ do + let bopts = config.build + bopts.buildHaddocks `shouldBe` True + bopts.openHaddocks `shouldBe` False + bopts.haddockDeps `shouldBe` Nothing + bopts.haddockInternal `shouldBe` False + bopts.haddockHyperlinkSource `shouldBe` True + bopts.haddockForHackage `shouldBe` True + bopts.forceDirty `shouldBe` True + + it "finds the config file in a parent directory" $ inTempDir $ do + writeFile "package.yaml" "name: foo" + writeFile (toFilePath stackDotYaml) sampleConfig + parentDir <- getCurrentDirectory >>= parseAbsDir + let childDir = "child" + createDirectory childDir + setCurrentDirectory childDir + loadConfig' $ \config -> liftIO $ do + bc <- runRIO config $ withBuildConfig ask + view configFileRootL bc `shouldBe` parentDir + + it "respects the STACK_YAML env variable" $ inTempDir $ do + withSystemTempDir "config-is-here" $ \dir -> do + let stackYamlFp = toFilePath (dir stackDotYaml) + writeFile stackYamlFp sampleConfig + writeFile (toFilePath dir ++ "/package.yaml") "name: foo" + withEnvVar "STACK_YAML" stackYamlFp $ + loadConfig' $ \config -> liftIO $ do + bc <- runRIO config $ withBuildConfig ask + bc.configFile `shouldBe` Right (dir stackDotYaml) + + it "STACK_YAML can be relative" $ inTempDir $ do + parentDir <- getCurrentDirectory >>= parseAbsDir + let childRel = either impureThrow id (parseRelDir "child") + yamlRel = + childRel either impureThrow id (parseRelFile "some-other-name.config") + yamlAbs = parentDir yamlRel + packageYaml = + childRel either impureThrow id (parseRelFile "package.yaml") + createDirectoryIfMissing True $ toFilePath $ parent yamlAbs + writeFile (toFilePath yamlAbs) "snapshot: ghc-9.10.3" + writeFile (toFilePath packageYaml) "name: foo" + withEnvVar "STACK_YAML" (toFilePath yamlRel) $ + loadConfig' $ \config -> liftIO $ do + bc <- runRIO config $ withBuildConfig ask + bc.configFile `shouldBe` Right yamlAbs + + describe "defaultConfigYaml" $ + it "is parseable" $ \_ -> do + curDir <- getCurrentDir + let parsed :: Either String (Either String (WithJSONWarnings ConfigMonoid)) + parsed = parseEither + (parseConfigMonoid curDir) <$> left show (decodeEither' defaultConfigYaml) + case parsed of + Right (Right _) -> pure () :: IO () + _ -> fail "Failed to parse default config yaml" diff --git a/tests/unit/Stack/DotSpec.hs b/tests/unit/Stack/DotSpec.hs new file mode 100644 index 0000000000..ceefbf9d02 --- /dev/null +++ b/tests/unit/Stack/DotSpec.hs @@ -0,0 +1,135 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +-- | Test suite for Stack.Dot +module Stack.DotSpec + ( dummyPayload + , spec + , sublistOf + , pkgName + , stubLoader + ) where + +import Data.List ((\\)) +import qualified Data.Map as Map +import qualified Data.Set as Set +import Distribution.License ( License (BSD3) ) +import qualified RIO.Text as T +import Stack.DependencyGraph ( pruneGraph, resolveDependencies ) +import Stack.Prelude hiding ( pkgName ) +import Stack.Types.DependencyTree ( DotPayload (..) ) +import Test.Hspec ( Spec, describe, it, shouldBe ) +import Test.Hspec.QuickCheck ( prop ) +import Test.QuickCheck ( Gen, choose, forAll ) + +dummyPayload :: DotPayload +dummyPayload = DotPayload (parseVersion "0.0.0.0") (Just (Right BSD3)) Nothing + +spec :: Spec +spec = do + let graph = + Map.mapKeys pkgName + . fmap (\p -> (Set.map pkgName p, dummyPayload)) + . Map.fromList $ [("one",Set.fromList ["base","free"]) + ,("two",Set.fromList ["base","free","mtl","transformers","one"]) + ] + describe "Stack.Dot" $ do + it "does nothing if depth is 0" $ + resolveDependencies (Just 0) graph stubLoader `shouldBe` pure graph + + it "with depth 1, more dependencies are resolved" $ do + let graph' = Map.insert (pkgName "cycle") + (Set.singleton (pkgName "cycle"), dummyPayload) + graph + resultGraph = runIdentity (resolveDependencies (Just 0) graph stubLoader) + resultGraph' = runIdentity (resolveDependencies (Just 1) graph' stubLoader) + Map.size resultGraph < Map.size resultGraph' `shouldBe` True + + it "cycles are ignored" $ do + let graph' = Map.insert (pkgName "cycle") + (Set.singleton (pkgName "cycle"), dummyPayload) + graph + resultGraph = resolveDependencies Nothing graph stubLoader + resultGraph' = resolveDependencies Nothing graph' stubLoader + fmap Map.size resultGraph' `shouldBe` fmap ((+1) . Map.size) resultGraph + + let graphElem e = Set.member e . Set.unions . Map.elems + + prop "requested packages are pruned" $ do + let resolvedGraph = runIdentity (resolveDependencies Nothing graph stubLoader) + allPackages g = Map.keysSet g `Set.union` foldMap fst g + forAll (sublistOf (Set.toList (allPackages resolvedGraph))) $ \toPrune -> + let pruned = pruneGraph [pkgName "one", pkgName "two"] toPrune resolvedGraph + in Set.null (allPackages pruned `Set.intersection` Set.fromList toPrune) + + prop "pruning removes orphans" $ do + let resolvedGraph = runIdentity (resolveDependencies Nothing graph stubLoader) + allPackages g = Map.keysSet g `Set.union` foldMap fst g + orphans g = Map.filterWithKey (\k _ -> not (graphElem k g)) g + forAll (sublistOf (Set.toList (allPackages resolvedGraph))) $ \toPrune -> + let pruned = pruneGraph [pkgName "one", pkgName "two"] toPrune resolvedGraph + in null (Map.keys (orphans (fmap fst pruned)) \\ [pkgName "one", pkgName "two"]) + +{- Helper functions below -} +-- Backport from QuickCheck 2.8 to 2.7.6 +sublistOf :: [a] -> Gen [a] +sublistOf = filterM (\_ -> choose (False, True)) + +-- Unsafe internal helper to create a package name +pkgName :: Text -> PackageName +pkgName = fromMaybe failure . parsePackageName . T.unpack + where + failure = error "Internal error during package name creation in DotSpec.pkgName" + +-- Stub, simulates the function to load package dependencies +stubLoader :: PackageName -> Identity (Set PackageName, DotPayload) +stubLoader name = pure $ (, dummyPayload) . Set.fromList . map pkgName $ + case show name of + "StateVar" -> ["stm", "transformers"] + "array" -> [] + "bifunctors" -> ["semigroupoids", "semigroups", "tagged"] + "binary" -> ["array", "bytestring", "containers"] + "bytestring" -> ["deepseq", "ghc-prim", "integer-gmp"] + "comonad" -> [ "containers", "contravariant", "distributive", "semigroups" + , "tagged","transformers","transformers-compat" + ] + "cont" -> [ "StateVar", "semigroups", "transformers", "transformers-compat" + , "void" + ] + "containers" -> ["array", "deepseq", "ghc-prim"] + "deepseq" -> ["array"] + "distributive" -> [ "ghc-prim", "tagged", "transformers" + , "transformers-compat" + ] + "free" -> [ "bifunctors", "comonad", "distributive", "mtl", "prelude-extras" + , "profunctors", "semigroupoids", "semigroups", "template-haskell" + , "transformers" + ] + "ghc" -> [] + "hashable" -> ["bytestring", "ghc-prim", "integer-gmp", "text"] + "integer" -> [] + "mtl" -> ["transformers"] + "nats" -> [] + "one" -> ["free"] + "prelude" -> [] + "profunctors" -> [ "comonad", "distributive", "semigroupoids", "tagged" + , "transformers" + ] + "semigroupoids" -> [ "comonad", "containers", "contravariant" + , "distributive", "semigroups", "transformers" + , "transformers-compat" + ] + "semigroups" -> [ "bytestring", "containers", "deepseq", "hashable", "nats" + , "text", "unordered-containers" + ] + "stm" -> ["array"] + "tagged" -> ["template-haskell"] + "template" -> [] + "text" -> [ "array", "binary", "bytestring", "deepseq", "ghc-prim" + , "integer-gmp" + ] + "transformers" -> [] + "two" -> ["free", "mtl", "one", "transformers"] + "unordered" -> ["deepseq", "hashable"] + "void" -> ["ghc-prim", "hashable", "semigroups"] + _ -> [] diff --git a/tests/unit/Stack/Ghci/ScriptSpec.hs b/tests/unit/Stack/Ghci/ScriptSpec.hs new file mode 100644 index 0000000000..8515f38fff --- /dev/null +++ b/tests/unit/Stack/Ghci/ScriptSpec.hs @@ -0,0 +1,56 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE OverloadedLists #-} +{-# LANGUAGE TemplateHaskell #-} + +-- | Test suite for the GhciScript DSL +module Stack.Ghci.ScriptSpec + ( spec + ) where + +import qualified Data.Set as S +import Distribution.ModuleName +import Path +import Path.Extra ( pathToLazyByteString ) +import Stack.Ghci.FakePaths +import Stack.Ghci.Script +import Stack.Prelude hiding ( fromString ) +import qualified System.FilePath as FP +import Test.Hspec + +spec :: Spec +spec = do + describe "GHCi" $ do + describe "Script DSL" $ do + + describe "script" $ do + it "should separate commands with a newline" $ do + let script = cmdAdd [Left (fromString "Lib.A")] + <> cmdAdd [Left (fromString "Lib.B")] + scriptToLazyByteString script `shouldBe` + ":add Lib.A\n:add Lib.B\n" + + describe ":add" $ do + it "should not render empty add commands" $ do + let script = cmdAdd S.empty + scriptToLazyByteString script `shouldBe` "" + + it "should ensure that a space exists between each module in an add command" $ do + let script = cmdAdd (S.fromList [Left (fromString "Lib.A"), Left (fromString "Lib.B")]) + scriptToLazyByteString script `shouldBe` ":add Lib.A Lib.B\n" + + describe ":add (by file)" $ do + it "should render a full file path" $ do + let file = $(mkAbsFile $ defaultDrive FP. "Users" FP. "someone" FP. "src" FP. "project" FP. "package-a" FP. "src" FP. "Main.hs") + script = cmdAdd (S.fromList [Right file]) + scriptToLazyByteString script `shouldBe` + ":add " <> pathToLazyByteString file <> "\n" + + describe ":module" $ do + it "should render empty module as ':module +'" $ do + let script = cmdModule [] + scriptToLazyByteString script `shouldBe` ":module +\n" + + it "should ensure that a space exists between each module in a module command" $ do + let script = cmdModule [fromString "Lib.A", fromString "Lib.B"] + scriptToLazyByteString script `shouldBe` ":module + Lib.A Lib.B\n" diff --git a/src/test/Stack/GhciSpec.hs b/tests/unit/Stack/GhciSpec.hs similarity index 98% rename from src/test/Stack/GhciSpec.hs rename to tests/unit/Stack/GhciSpec.hs index 4fd58cbf90..021740dc97 100644 --- a/src/test/Stack/GhciSpec.hs +++ b/tests/unit/Stack/GhciSpec.hs @@ -2,13 +2,15 @@ -- {-# LANGUAGE QuasiQuotes #-} -- {-# LANGUAGE TemplateHaskell #-} --- | Test suite for GHCi like applications including both GHCi and Intero. -module Stack.GhciSpec where +-- | Test suite for GHCi-like applications including GHCi. +module Stack.GhciSpec + ( spec + ) where -import Test.Hspec +import Test.Hspec ( Spec ) spec :: Spec -spec = return () +spec = pure () {- Commented out as part of the fix for https://github.com/commercialhaskell/stack/issues/3309 Not sure if maintaining this test is worth the effort. @@ -107,7 +109,7 @@ spec = do let res = scriptToLazyByteString $ renderScriptIntero packages_multiplePackages Nothing [] res `shouldBeLE` interoScript_multipleProjectsWithLib --- Exptected Intero scripts +-- Expected Intero scripts interoScript_projectWithLib :: Text interoScript_projectWithLib = [text| diff --git a/src/test/Stack/LockSpec.hs b/tests/unit/Stack/LockSpec.hs similarity index 78% rename from src/test/Stack/LockSpec.hs rename to tests/unit/Stack/LockSpec.hs index b6c0f8498c..af08e43ada 100644 --- a/src/test/Stack/LockSpec.hs +++ b/tests/unit/Stack/LockSpec.hs @@ -1,19 +1,30 @@ -{-# LANGUAGE QuasiQuotes #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} -module Stack.LockSpec where +module Stack.LockSpec + ( toBlobKey + , decodeSHA + , decodeLocked + , spec + ) where -import Pantry.Internal.AesonExtended (WithJSONWarnings(..)) +import Data.Aeson.WarningParser ( WithJSONWarnings (..) ) import qualified Data.Yaml as Yaml -import Distribution.Types.PackageName (mkPackageName) -import Distribution.Types.Version (mkVersion) -import Pantry +import Distribution.Types.PackageName ( mkPackageName ) +import Distribution.Types.Version ( mkVersion ) +import Pantry + ( BlobKey (..), FileSize (..), PackageIdentifier (..) + , PackageLocationImmutable (..), PackageMetadata (..) + , RawPackageLocationImmutable (..), RawPackageMetadata (..) + , Repo (..), RepoType (..), SHA256, TreeKey (..) + , resolvePaths + ) import qualified Pantry.SHA256 as SHA256 -import RIO -import Stack.Lock -import Test.Hspec -import Text.RawString.QQ +import RIO ( ByteString, displayException, throwIO, unless ) +import Stack.Lock ( Locked (..), LockedLocation (..) ) +import Test.Hspec ( Spec, it, shouldBe ) +import Text.RawString.QQ ( r ) toBlobKey :: ByteString -> Word -> BlobKey toBlobKey string size = BlobKey (decodeSHA string) (FileSize size) @@ -22,7 +33,7 @@ decodeSHA :: ByteString -> SHA256 decodeSHA string = case SHA256.fromHexBytes string of Right csha -> csha - Left err -> error $ "Failed decoding. Error: " <> show err + Left err -> error $ "Failed decoding. Error: " <> displayException err decodeLocked :: ByteString -> IO Locked decodeLocked bs = do @@ -37,7 +48,7 @@ decodeLocked bs = do spec :: Spec spec = do - it "parses lock file (empty with GHC resolver)" $ do + it "parses lock file (empty with GHC snapshot)" $ do let lockFile :: ByteString lockFile = [r|#some @@ -48,9 +59,9 @@ snapshots: compiler: ghc-8.6.5 packages: [] |] - pkgImm <- lckPkgImmutableLocations <$> decodeLocked lockFile + pkgImm <- (.pkgImmutableLocations) <$> decodeLocked lockFile pkgImm `shouldBe` [] - it "parses lock file (empty with LTS resolver)" $ do + it "parses lock file (empty with LTS snapshot)" $ do let lockFile :: ByteString lockFile = [r|#some @@ -66,7 +77,7 @@ snapshots: compiler: ghc-8.6.5 packages: [] |] - pkgImm <- lckPkgImmutableLocations <$> decodeLocked lockFile + pkgImm <- (.pkgImmutableLocations) <$> decodeLocked lockFile pkgImm `shouldBe` [] it "parses lock file (LTS, wai + warp)" $ do let lockFile :: ByteString @@ -110,7 +121,7 @@ packages: sha256: f808e075811b002563d24c393ce115be826bb66a317d38da22c513ee42b7443a commit: d11d63f1a6a92db8c637a8d33e7953ce6194a3e0 |] - pkgImm <- lckPkgImmutableLocations <$> decodeLocked lockFile + pkgImm <- (.pkgImmutableLocations) <$> decodeLocked lockFile let waiSubdirRepo subdir = Repo { repoType = RepoGit , repoUrl = "https://github.com/yesodweb/wai.git" diff --git a/tests/unit/Stack/NixSpec.hs b/tests/unit/Stack/NixSpec.hs new file mode 100644 index 0000000000..9a317f6139 --- /dev/null +++ b/tests/unit/Stack/NixSpec.hs @@ -0,0 +1,116 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +module Stack.NixSpec + ( sampleConfigNixEnabled + , sampleConfigNixDisabled + , setup + , spec + ) where + +import Data.Maybe ( fromJust ) +import Options.Applicative + ( defaultPrefs, execParserPure, getParseResult, info ) +import Prelude ( writeFile ) +import Stack.Config ( loadConfig ) +import Stack.Config.Nix ( nixCompiler ) +import Stack.Constants ( osIsWindows, stackDotYaml ) +import Stack.Options.GlobalParser ( globalOptsFromMonoid ) +import Stack.Options.NixParser ( nixOptsParser ) +import Stack.Prelude +import Stack.Runners ( withRunnerGlobal ) +import Stack.Types.Config ( Config (..) ) +import Stack.Types.ConfigMonoid ( ConfigMonoid (..) ) +import qualified Stack.Types.GlobalOpts as GlobalOpts ( GlobalOpts (..) ) +import Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid (..) ) +import Stack.Types.Nix ( NixOpts (..) ) +import System.Directory ( getCurrentDirectory, setCurrentDirectory ) +import System.Environment ( unsetEnv ) +import Test.Hspec ( Spec, around_, beforeAll, describe, it, shouldBe ) + +sampleConfigNixEnabled :: String +sampleConfigNixEnabled = + "snapshot: lts-19.22\n" ++ + "packages: ['.']\n" ++ + "system-ghc: true\n" ++ + "nix:\n" ++ + " enable: True\n" ++ + " packages: [glpk]" + +sampleConfigNixDisabled :: String +sampleConfigNixDisabled = + "snapshot: lts-19.22\n" ++ + "packages: ['.']\n" ++ + "nix:\n" ++ + " enable: False" + +setup :: IO () +setup = unsetEnv "STACK_YAML" + +spec :: Spec +spec = beforeAll setup $ do + let loadConfig' :: ConfigMonoid -> (Config -> IO ()) -> IO () + loadConfig' cmdLineArgs inner = do + globalOpts <- + globalOptsFromMonoid "" Nothing False mempty { configMonoid = cmdLineArgs } + withRunnerGlobal globalOpts { GlobalOpts.logLevel = LevelOther "silent" } $ + loadConfig (liftIO . inner) + inTempDir test = do + currentDirectory <- getCurrentDirectory + withSystemTempDirectory "Stack_ConfigSpec" $ \tempDir -> do + let enterDir = setCurrentDirectory tempDir + exitDir = setCurrentDirectory currentDirectory + bracket_ enterDir exitDir test + withStackDotYaml config test = inTempDir $ do + writeFile (toFilePath stackDotYaml) config + test + parseNixOpts cmdLineOpts = fromJust $ getParseResult $ execParserPure + defaultPrefs + (info (nixOptsParser False) mempty) + cmdLineOpts + parseOpts cmdLineOpts = mempty { nixOpts = parseNixOpts cmdLineOpts } + let trueOnNonWindows = not osIsWindows + describe "nix disabled in config file" $ + around_ (withStackDotYaml sampleConfigNixDisabled) $ do + it "sees that the nix shell is not enabled" $ loadConfig' mempty $ \config -> + config.nix.enable `shouldBe` False + describe "--nix given on command line" $ + it "sees that the nix shell is enabled" $ + loadConfig' (parseOpts ["--nix"]) $ \config -> + config.nix.enable `shouldBe` trueOnNonWindows + describe "--nix-pure given on command line" $ + it "sees that the nix shell is enabled" $ + loadConfig' (parseOpts ["--nix-pure"]) $ \config -> + config.nix.enable `shouldBe` trueOnNonWindows + describe "--no-nix given on command line" $ + it "sees that the nix shell is not enabled" $ + loadConfig' (parseOpts ["--no-nix"]) $ \config -> + config.nix.enable `shouldBe` False + describe "--no-nix-pure given on command line" $ + it "sees that the nix shell is not enabled" $ + loadConfig' (parseOpts ["--no-nix-pure"]) $ \config -> + config.nix.enable `shouldBe` False + describe "nix enabled in config file" $ + around_ (withStackDotYaml sampleConfigNixEnabled) $ do + it "sees that the nix shell is enabled" $ + loadConfig' mempty $ \config -> + config.nix.enable `shouldBe` trueOnNonWindows + describe "--no-nix given on command line" $ + it "sees that the nix shell is not enabled" $ + loadConfig' (parseOpts ["--no-nix"]) $ \config -> + config.nix.enable `shouldBe` False + describe "--nix-pure given on command line" $ + it "sees that the nix shell is enabled" $ + loadConfig' (parseOpts ["--nix-pure"]) $ \config -> + config.nix.enable `shouldBe` trueOnNonWindows + describe "--no-nix-pure given on command line" $ + it "sees that the nix shell is enabled" $ + loadConfig' (parseOpts ["--no-nix-pure"]) $ \config -> + config.nix.enable `shouldBe` trueOnNonWindows + it "sees that the only package asked for is glpk and asks for the correct GHC derivation" $ loadConfig' mempty $ \config -> do + config.nix.packages `shouldBe` ["glpk"] + v <- parseVersionThrowing "9.0.2" + ghc <- either throwIO pure $ nixCompiler (WCGhc v) + ghc `shouldBe` "haskell.compiler.ghc902" diff --git a/tests/unit/Stack/PackageDumpSpec.hs b/tests/unit/Stack/PackageDumpSpec.hs new file mode 100644 index 0000000000..80b5c1c386 --- /dev/null +++ b/tests/unit/Stack/PackageDumpSpec.hs @@ -0,0 +1,284 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +module Stack.PackageDumpSpec + ( main + , spec + , bestPrune + , checkDepsPresent + , runEnvNoLogging + ) where + +import Conduit ( withSourceFile, yield ) +import qualified Data.Conduit.List as CL +import Data.Conduit.Text ( decodeUtf8 ) +import qualified Data.Map as Map +import qualified Data.Set as Set +import Distribution.License ( License (..) ) +import Distribution.Types.PackageName ( mkPackageName ) +import Distribution.Version ( mkVersion ) +import Path ( parseAbsFile ) +import RIO.PrettyPrint.Simple ( SimplePrettyApp, mkSimplePrettyApp ) +import RIO.Process + ( envVarsL, findExecutable, mkDefaultProcessContext + , mkProcessContext + ) +import Stack.PackageDump + ( DumpPackage (..), conduitDumpPackage, eachPair + , eachSection, ghcPkgDump, pruneDeps, sinkMatching + ) +import Stack.Prelude +import Stack.Types.CompilerPaths ( GhcPkgExe (..) ) +import Stack.Types.GhcPkgId ( parseGhcPkgId ) +import Test.Hspec + ( Spec, describe, hspec, it, shouldBe ) +import Test.Hspec.QuickCheck ( prop ) + +main :: IO () +main = hspec spec + +spec :: Spec +spec = do + describe "eachSection" $ do + let test name content expected = it name $ do + actual <- runConduit $ yield content .| eachSection CL.consume .| CL.consume + actual `shouldBe` expected + test + "unix line endings" + "foo\nbar\n---\nbaz---\nbin\n---\n" + [ ["foo", "bar"] + , ["baz---", "bin"] + ] + test + "windows line endings" + "foo\r\nbar\r\n---\r\nbaz---\r\nbin\r\n---\r\n" + [ ["foo", "bar"] + , ["baz---", "bin"] + ] + + it "eachPair" $ do + let bss = + [ "key1: val1" + , "key2: val2a" + , " val2b" + , "key3:" + , "key4:" + , " val4a" + , " val4b" + ] + sink k = fmap (k, ) CL.consume + actual <- runConduit $ mapM_ yield bss .| eachPair sink .| CL.consume + actual `shouldBe` + [ ("key1", ["val1"]) + , ("key2", ["val2a", "val2b"]) + , ("key3", []) + , ("key4", ["val4a", "val4b"]) + ] + + describe "conduitDumpPackage" $ do + it "ghc 7.8" $ do + haskell2010:_ <- + withSourceFile "tests/unit/package-dump/ghc-7.8.txt" $ \src -> + runConduit + $ src + .| decodeUtf8 + .| conduitDumpPackage + .| CL.consume + ghcPkgId <- parseGhcPkgId "haskell2010-1.1.2.0-05c8dd51009e08c6371c82972d40f55a" + packageIdent <- maybe (fail "Not parsable package id") pure $ + parsePackageIdentifier "haskell2010-1.1.2.0" + depends <- mapM parseGhcPkgId + [ "array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b" + , "base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1" + , "ghc-prim-0.3.1.0-a24f9c14c632d75b683d0f93283aea37" + ] + haskell2010 { exposedModules = mempty } `shouldBe` DumpPackage + { ghcPkgId = ghcPkgId + , packageIdent = packageIdent + , sublib = Nothing + , license = Just BSD3 + , libDirs = ["/opt/ghc/7.8.4/lib/ghc-7.8.4/haskell2010-1.1.2.0"] + , depends = depends + , libraries = ["HShaskell2010-1.1.2.0"] + , hasExposedModules = True + , haddockInterfaces = ["/opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell2010-1.1.2.0/haskell2010.haddock"] + , haddockHtml = Just "/opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell2010-1.1.2.0" + , isExposed = False + , exposedModules = mempty + } + + it "ghc 7.10" $ do + haskell2010:_ <- + withSourceFile "tests/unit/package-dump/ghc-7.10.txt" $ \src -> + runConduit + $ src + .| decodeUtf8 + .| conduitDumpPackage + .| CL.consume + ghcPkgId <- parseGhcPkgId "ghc-7.10.1-325809317787a897b7a97d646ceaa3a3" + pkgIdent <- maybe (fail "Not parsable package id") pure $ + parsePackageIdentifier "ghc-7.10.1" + depends <- mapM parseGhcPkgId + [ "array-0.5.1.0-e29cdbe82692341ebb7ce6e2798294f9" + , "base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a" + , "bin-package-db-0.0.0.0-708fc7d634a370b311371a5bcde40b62" + , "bytestring-0.10.6.0-0909f8f31271f3d75749190bf2ee35db" + , "containers-0.5.6.2-2114032c163425cc264e6e1169dc2f6d" + , "directory-1.2.2.0-b4959b472d9eee380c6b32291ade29e0" + , "filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6" + , "hoopl-3.10.0.2-8c8dfc4c3140e5f7c982da224c3cb1f0" + , "hpc-0.6.0.2-ac9064885aa8cb08a93314222939ead4" + , "process-1.2.3.0-3b1e9bca6ac38225806ff7bbf3f845b1" + , "template-haskell-2.10.0.0-e895139a0ffff267d412e3d0191ce93b" + , "time-1.5.0.1-e17a9220d438435579d2914e90774246" + , "transformers-0.4.2.0-c1a7bb855a176fe475d7b665301cd48f" + , "unix-2.7.1.0-e5915eb989e568b732bc7286b0d0817f" + ] + haskell2010 { exposedModules = mempty } `shouldBe` DumpPackage + { ghcPkgId = ghcPkgId + , packageIdent = pkgIdent + , sublib = Nothing + , license = Just BSD3 + , libDirs = ["/opt/ghc/7.10.1/lib/ghc-7.10.1/ghc_EMlWrQ42XY0BNVbSrKixqY"] + , haddockInterfaces = ["/opt/ghc/7.10.1/share/doc/ghc/html/libraries/ghc-7.10.1/ghc.haddock"] + , haddockHtml = Just "/opt/ghc/7.10.1/share/doc/ghc/html/libraries/ghc-7.10.1" + , depends = depends + , libraries = ["HSghc-7.10.1-EMlWrQ42XY0BNVbSrKixqY"] + , hasExposedModules = True + , isExposed = False + , exposedModules = mempty + } + it "ghc 7.8.4 (osx)" $ do + hmatrix:_ <- + withSourceFile "tests/unit/package-dump/ghc-7.8.4-osx.txt" $ \src -> + runConduit + $ src + .| decodeUtf8 + .| conduitDumpPackage + .| CL.consume + ghcPkgId <- parseGhcPkgId "hmatrix-0.16.1.5-12d5d21f26aa98774cdd8edbc343fbfe" + pkgId <- maybe (fail "Not parsable package id") pure $ + parsePackageIdentifier "hmatrix-0.16.1.5" + depends <- mapM parseGhcPkgId + [ "array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b" + , "base-4.7.0.2-918c7ac27f65a87103264a9f51652d63" + , "binary-0.7.1.0-108d06eea2ef05e517f9c1facf10f63c" + , "bytestring-0.10.4.0-78bc8f2c724c765c78c004a84acf6cc3" + , "deepseq-1.3.0.2-0ddc77716bd2515426e1ba39f6788a4f" + , "random-1.1-822c19b7507b6ac1aaa4c66731e775ae" + , "split-0.2.2-34cfb851cc3784e22bfae7a7bddda9c5" + , "storable-complex-0.2.2-e962c368d58acc1f5b41d41edc93da72" + , "vector-0.10.12.3-f4222db607fd5fdd7545d3e82419b307"] + hmatrix `shouldBe` DumpPackage + { ghcPkgId = ghcPkgId + , packageIdent = pkgId + , sublib = Nothing + , license = Just BSD3 + , libDirs = + [ "/Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/lib/x86_64-osx-ghc-7.8.4/hmatrix-0.16.1.5" + , "/opt/local/lib/" + , "/usr/local/lib/" + , "C:/Program Files/Example/"] + , haddockInterfaces = ["/Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/doc/html/hmatrix.haddock"] + , haddockHtml = Just "/Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/doc/html" + , depends = depends + , libraries = ["HShmatrix-0.16.1.5"] + , hasExposedModules = True + , isExposed = True + , exposedModules = Set.fromList ["Data.Packed","Data.Packed.Vector","Data.Packed.Matrix","Data.Packed.Foreign","Data.Packed.ST","Data.Packed.Development","Numeric.LinearAlgebra","Numeric.LinearAlgebra.LAPACK","Numeric.LinearAlgebra.Algorithms","Numeric.Container","Numeric.LinearAlgebra.Util","Numeric.LinearAlgebra.Devel","Numeric.LinearAlgebra.Data","Numeric.LinearAlgebra.HMatrix","Numeric.LinearAlgebra.Static"] + } + it "ghc HEAD" $ do + ghcBoot:_ <- + withSourceFile "tests/unit/package-dump/ghc-head.txt" $ \src -> + runConduit + $ src + .| decodeUtf8 + .| conduitDumpPackage + .| CL.consume + ghcPkgId <- parseGhcPkgId "ghc-boot-0.0.0.0" + pkgId <- maybe (fail "Not parsable package id") pure $ + parsePackageIdentifier "ghc-boot-0.0.0.0" + depends <- mapM parseGhcPkgId + [ "base-4.9.0.0" + , "binary-0.7.5.0" + , "bytestring-0.10.7.0" + , "directory-1.2.5.0" + , "filepath-1.4.1.0" + ] + ghcBoot `shouldBe` DumpPackage + { ghcPkgId = ghcPkgId + , packageIdent = pkgId + , sublib = Nothing + , license = Just BSD3 + , libDirs = + ["/opt/ghc/head/lib/ghc-7.11.20151213/ghc-boot-0.0.0.0"] + , haddockInterfaces = ["/opt/ghc/head/share/doc/ghc/html/libraries/ghc-boot-0.0.0.0/ghc-boot.haddock"] + , haddockHtml = Just "/opt/ghc/head/share/doc/ghc/html/libraries/ghc-boot-0.0.0.0" + , depends = depends + , libraries = ["HSghc-boot-0.0.0.0"] + , hasExposedModules = True + , isExposed = True + , exposedModules = Set.fromList ["GHC.Lexeme", "GHC.PackageDb"] + } + + + it "sinkMatching" $ runEnvNoLogging $ \pkgexe -> do + m <- ghcPkgDump pkgexe [] + $ conduitDumpPackage + .| sinkMatching (Map.singleton (mkPackageName "transformers") (mkVersion [0, 0, 0, 0, 0, 0, 1])) + case Map.lookup (mkPackageName "base") m of + Nothing -> error "base not present" + Just _ -> pure () + liftIO $ do + Map.lookup (mkPackageName "transformers") m `shouldBe` Nothing + Map.lookup (mkPackageName "ghc") m `shouldBe` Nothing + + describe "pruneDeps" $ do + it "sanity check" $ do + let prunes = + [ ((1, 'a'), []) + , ((1, 'b'), []) + , ((2, 'a'), [(1, 'b')]) + , ((2, 'b'), [(1, 'a')]) + , ((3, 'a'), [(1, 'c')]) + , ((4, 'a'), [(2, 'a')]) + ] + actual = fst <$> pruneDeps fst fst snd bestPrune prunes + actual `shouldBe` Map.fromList + [ (1, (1, 'b')) + , (2, (2, 'a')) + , (4, (4, 'a')) + ] + + prop "invariant holds" $ \prunes' -> + -- Force uniqueness + let prunes = Map.toList $ Map.fromList prunes' + in checkDepsPresent prunes $ fst <$> pruneDeps fst fst snd bestPrune prunes + +type PruneCheck = ((Int, Char), [(Int, Char)]) + +bestPrune :: PruneCheck -> PruneCheck -> PruneCheck +bestPrune x y + | fst x > fst y = x + | otherwise = y + +checkDepsPresent :: [PruneCheck] -> Map Int (Int, Char) -> Bool +checkDepsPresent prunes selected = + all hasDeps $ Set.toList allIds + where + depMap = Map.fromList prunes + allIds = Set.fromList $ Map.elems selected + + hasDeps ident = + case Map.lookup ident depMap of + Nothing -> error "checkDepsPresent: missing in depMap" + Just deps -> Set.null $ Set.difference (Set.fromList deps) allIds + +runEnvNoLogging :: (GhcPkgExe -> RIO SimplePrettyApp a) -> IO a +runEnvNoLogging inner = do + envVars <- view envVarsL <$> mkDefaultProcessContext + menv <- mkProcessContext $ Map.delete "GHC_PACKAGE_PATH" envVars + let find name = runRIO menv (findExecutable name) >>= either throwIO parseAbsFile + pkg <- GhcPkgExe <$> find "ghc-pkg" + app <- mkSimplePrettyApp mempty (Just menv) True 80 mempty + runRIO app (inner pkg) diff --git a/src/test/Stack/Types/TemplateNameSpec.hs b/tests/unit/Stack/Types/TemplateNameSpec.hs similarity index 89% rename from src/test/Stack/Types/TemplateNameSpec.hs rename to tests/unit/Stack/Types/TemplateNameSpec.hs index 477593a0df..b5cff7fe91 100644 --- a/src/test/Stack/Types/TemplateNameSpec.hs +++ b/tests/unit/Stack/Types/TemplateNameSpec.hs @@ -1,6 +1,8 @@ {-# LANGUAGE OverloadedStrings #-} -module Stack.Types.TemplateNameSpec where +module Stack.Types.TemplateNameSpec + ( spec + ) where import Stack.Types.TemplateName import Path.Internal @@ -14,9 +16,10 @@ spec = let pathOf s = either error templatePath (parseTemplateNameFromString s) it "parses out the TemplatePath" $ do - pathOf "github:user/name" `shouldBe` RepoPath (RepoTemplatePath Github "user" "name.hsfiles") + pathOf "github:user/name" `shouldBe` RepoPath (RepoTemplatePath GitHub "user" "name.hsfiles") pathOf "bitbucket:user/name" `shouldBe` RepoPath (RepoTemplatePath Bitbucket "user" "name.hsfiles") - pathOf "gitlab:user/name" `shouldBe` RepoPath (RepoTemplatePath Gitlab "user" "name.hsfiles") + pathOf "gitlab:user/name" `shouldBe` RepoPath (RepoTemplatePath GitLab "user" "name.hsfiles") + pathOf "codeberg:user/name" `shouldBe` RepoPath (RepoTemplatePath Codeberg "user" "name.hsfiles") pathOf "http://www.com/file" `shouldBe` UrlPath "http://www.com/file" pathOf "https://www.com/file" `shouldBe` UrlPath "https://www.com/file" @@ -37,7 +40,7 @@ spec = let colonAction = do - return $! pathOf "with:colon" + pure $! pathOf "with:colon" colonAction `shouldThrow` anyErrorCall else do @@ -48,4 +51,3 @@ spec = pathOf "c:\\home\\file" `shouldBe` RelPath "c:\\home\\file.hsfiles" (Path "c:\\home\\file.hsfiles") pathOf "with/slash" `shouldBe` RelPath "with/slash.hsfiles" (Path "with/slash.hsfiles") pathOf "with:colon" `shouldBe` RelPath "with:colon.hsfiles" (Path "with:colon.hsfiles") - diff --git a/src/test/Stack/Untar/README.md b/tests/unit/Stack/Untar/README.md similarity index 100% rename from src/test/Stack/Untar/README.md rename to tests/unit/Stack/Untar/README.md diff --git a/src/test/Stack/Untar/createFiles.sh b/tests/unit/Stack/Untar/createFiles.sh old mode 100755 new mode 100644 similarity index 100% rename from src/test/Stack/Untar/createFiles.sh rename to tests/unit/Stack/Untar/createFiles.sh diff --git a/src/test/Stack/Untar/test1.tar.gz b/tests/unit/Stack/Untar/test1.tar.gz similarity index 100% rename from src/test/Stack/Untar/test1.tar.gz rename to tests/unit/Stack/Untar/test1.tar.gz diff --git a/src/test/Stack/Untar/test2.tar.gz b/tests/unit/Stack/Untar/test2.tar.gz similarity index 100% rename from src/test/Stack/Untar/test2.tar.gz rename to tests/unit/Stack/Untar/test2.tar.gz diff --git a/tests/unit/Stack/UploadSpec.hs b/tests/unit/Stack/UploadSpec.hs new file mode 100644 index 0000000000..3d8862ffb8 --- /dev/null +++ b/tests/unit/Stack/UploadSpec.hs @@ -0,0 +1,51 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +module Stack.UploadSpec + ( spec + ) where + +import Data.Bits ((.&.)) +import RIO + ( Bool (..), IO, IsString, Maybe (..), String, ($), finally + , readFileBinary, replicateM_, runRIO, unless + , withSystemTempDirectory + ) +import RIO.Directory + ( emptyPermissions, getPermissions, setOwnerReadable + , setOwnerWritable + ) +import RIO.FilePath ( () ) +import Stack.Upload + ( HackageKey (..), maybeGetHackageKey, writeFilePrivate ) +import System.Environment ( setEnv, unsetEnv ) +import System.Permissions ( osIsWindows ) +import System.PosixCompat.Files ( getFileStatus, fileMode ) +import Test.Hspec ( Spec, example, it, shouldBe, shouldReturn ) + +spec :: Spec +spec = do + it "writeFilePrivate" $ example $ withSystemTempDirectory "writeFilePrivate" $ \dir -> replicateM_ 2 $ do + let fp = dir "filename" + contents :: IsString s => s + contents = "These are the contents" + writeFilePrivate fp contents + actual <- readFileBinary fp + actual `shouldBe` contents + perms <- getPermissions fp + perms `shouldBe` setOwnerWritable True (setOwnerReadable True emptyPermissions) + + unless osIsWindows $ do + status <- getFileStatus fp + (fileMode status .&. 0o777) `shouldBe` 0o600 + + it "finds a HACKAGE_KEY env variable" $ do + runRIO () maybeGetHackageKey `shouldReturn` Nothing + + withEnv "HACKAGE_KEY" "api_key" + $ runRIO () maybeGetHackageKey `shouldReturn` Just (HackageKey "api_key") + +withEnv :: String -> String -> IO a -> IO a +withEnv k v f = do + setEnv k v + f `finally` unsetEnv k diff --git a/test/package-dump/ghc-7.10.txt b/tests/unit/package-dump/ghc-7.10.txt similarity index 99% rename from test/package-dump/ghc-7.10.txt rename to tests/unit/package-dump/ghc-7.10.txt index 12ecc16811..f66317db0b 100644 --- a/test/package-dump/ghc-7.10.txt +++ b/tests/unit/package-dump/ghc-7.10.txt @@ -844,7 +844,7 @@ description: serialisation or pretty printing. . There is also a 'ShortByteString' type which has a lower memory overhead - and can can be converted to or from a 'ByteString', but supports very few + and can be converted to or from a 'ByteString', but supports very few other operations. It is suitable for keeping many short strings in memory. . 'ByteString's are not designed for Unicode. For Unicode strings you should diff --git a/test/package-dump/ghc-7.8.4-osx.txt b/tests/unit/package-dump/ghc-7.8.4-osx.txt similarity index 100% rename from test/package-dump/ghc-7.8.4-osx.txt rename to tests/unit/package-dump/ghc-7.8.4-osx.txt diff --git a/test/package-dump/ghc-7.8.txt b/tests/unit/package-dump/ghc-7.8.txt similarity index 99% rename from test/package-dump/ghc-7.8.txt rename to tests/unit/package-dump/ghc-7.8.txt index 189d312c3d..c3ef0a2950 100644 --- a/test/package-dump/ghc-7.8.txt +++ b/tests/unit/package-dump/ghc-7.8.txt @@ -1105,7 +1105,7 @@ description: An efficient compact, immutable byte string type (both strict and l serialisation or pretty printing. . There is also a 'ShortByteString' type which has a lower memory overhead - and can can be converted to or from a 'ByteString', but supports very few + and can be converted to or from a 'ByteString', but supports very few other operations. It is suitable for keeping many short strings in memory. . 'ByteString's are not designed for Unicode. For Unicode strings you should diff --git a/test/package-dump/ghc-head.txt b/tests/unit/package-dump/ghc-head.txt similarity index 99% rename from test/package-dump/ghc-head.txt rename to tests/unit/package-dump/ghc-head.txt index 37521e7f34..9f1280b4fe 100644 --- a/test/package-dump/ghc-head.txt +++ b/tests/unit/package-dump/ghc-head.txt @@ -804,7 +804,7 @@ description: serialisation or pretty printing. . There is also a 'ShortByteString' type which has a lower memory overhead - and can can be converted to or from a 'ByteString', but supports very few + and can be converted to or from a 'ByteString', but supports very few other operations. It is suitable for keeping many short strings in memory. . 'ByteString's are not designed for Unicode. For Unicode strings you should diff --git a/tests/unit/unix/Stack/Ghci/FakePaths.hs b/tests/unit/unix/Stack/Ghci/FakePaths.hs new file mode 100644 index 0000000000..4897c7a47f --- /dev/null +++ b/tests/unit/unix/Stack/Ghci/FakePaths.hs @@ -0,0 +1,8 @@ +module Stack.Ghci.FakePaths + ( defaultDrive + ) where + +-- | Helpers for writing fake paths for test suite for the GhciScript DSL. This +-- must be a separate module because it is used in Template Haskell splices. +defaultDrive :: FilePath +defaultDrive = "/" diff --git a/tests/unit/windows/Stack/Ghci/FakePaths.hs b/tests/unit/windows/Stack/Ghci/FakePaths.hs new file mode 100644 index 0000000000..c9aa4356cd --- /dev/null +++ b/tests/unit/windows/Stack/Ghci/FakePaths.hs @@ -0,0 +1,8 @@ +module Stack.Ghci.FakePaths + ( defaultDrive + ) where + +-- | Helpers for writing fake paths for test suite for the GhciScript DSL. This +-- must be a separate module because it is used in Template Haskell splices. +defaultDrive :: FilePath +defaultDrive = "C:\\"