Compare commits

..

1 Commits

Author SHA1 Message Date
61e2801838 Windows fix 2022-05-12 18:03:04 +02:00
40 changed files with 468 additions and 1653 deletions

View File

@@ -13,7 +13,7 @@ variables:
# Sequential version number of all cached things.
# Bump to invalidate GitLab CI cache.
CACHE_REV: 1
CACHE_REV: 0
GIT_SUBMODULE_STRATEGY: recursive
@@ -125,10 +125,6 @@ variables:
- test/golden
- dist-newstyle/cache/
when: on_failure
cache:
key: ghcup-test-$CACHE_REV
paths:
- cabal-cache
# .test_ghcup_scoop:
# script:
@@ -138,77 +134,72 @@ variables:
extends:
- .test_ghcup_version
- .debian
- .root_cleanup
before_script:
- ./.gitlab/before_script/linux/install_deps.sh
- ./.gitlab/script/ci.sh extract_cabal_cache
after_script:
- ./.gitlab/script/ci.sh save_cabal_cache
- bash ./.gitlab/after_script.sh
.test_ghcup_version:linux32:
extends:
- .test_ghcup_version
- .alpine:32bit
- .root_cleanup
before_script:
- ./.gitlab/before_script/linux/alpine/install_deps.sh
- ./.gitlab/script/ci.sh extract_cabal_cache
after_script:
- ./.gitlab/script/ci.sh save_cabal_cache
- bash ./.gitlab/after_script.sh
.test_ghcup_version:armv7:
extends:
- .test_ghcup_version
- .linux:armv7
- .root_cleanup
before_script:
- ./.gitlab/before_script/linux/install_deps.sh
- ./.gitlab/script/ci.sh extract_cabal_cache
after_script:
- ./.gitlab/script/ci.sh save_cabal_cache
- bash ./.gitlab/after_script.sh
.test_ghcup_version:aarch64:
extends:
- .test_ghcup_version
- .linux:aarch64
- .root_cleanup
before_script:
- ./.gitlab/before_script/linux/install_deps.sh
- ./.gitlab/script/ci.sh extract_cabal_cache
after_script:
- ./.gitlab/script/ci.sh save_cabal_cache
- bash ./.gitlab/after_script.sh
.test_ghcup_version:darwin:
extends:
- .test_ghcup_version
- .darwin
- .root_cleanup
before_script:
- ./.gitlab/before_script/darwin/install_deps.sh
- ./.gitlab/script/ci.sh extract_cabal_cache
after_script:
- ./.gitlab/script/ci.sh save_cabal_cache
- bash ./.gitlab/after_script.sh
.test_ghcup_version:darwin:aarch64:
extends:
- .test_ghcup_version
- .darwin:aarch64
- .root_cleanup
cache:
key: darwin-brew-$CACHE_REV
paths:
- brew_cache
key: ghcup-test-$CACHE_REV
paths:
- cabal-cache
- .brew
- .brew_cache
before_script:
# extract brew cache
- ./.gitlab/script/ci.sh extract_brew_cache
# Install brew locally in the project dir. Packages will also be installed here.
- '[ -e "$CI_PROJECT_DIR/.brew" ] || git clone --depth=1 https://github.com/Homebrew/brew $CI_PROJECT_DIR/.brew'
- export PATH="$CI_PROJECT_DIR/.brew/bin:$CI_PROJECT_DIR/.brew/sbin:$PATH"
# otherwise we seem to get intel binaries
- export HOMEBREW_CHANGE_ARCH_TO_ARM=1
# make sure to not pollute the machine with temp files etc
- mkdir -p $CI_PROJECT_DIR/.brew_cache
- export HOMEBREW_CACHE=$CI_PROJECT_DIR/.brew_cache
- mkdir -p $CI_PROJECT_DIR/.brew_logs
- export HOMEBREW_LOGS=$CI_PROJECT_DIR/.brew_logs
- mkdir -p /private/tmp/.brew_tmp
- export HOMEBREW_TEMP=/private/tmp/.brew_tmp
# update and install packages
- /bin/bash ./.gitlab/script/brew.sh llvm autoconf automake
# extract cabal cache
- ./.gitlab/script/ci.sh extract_cabal_cache
- brew update
- brew install llvm
- brew install autoconf automake coreutils
script: |
export PATH="$CI_PROJECT_DIR/.brew/opt/llvm/bin:$CI_PROJECT_DIR/.brew/bin:$CI_PROJECT_DIR/.brew/sbin:$PATH"
export CC=$CI_PROJECT_DIR/.brew/opt/llvm/bin/clang
@@ -218,51 +209,40 @@ variables:
export RANLIB=$CI_PROJECT_DIR/.brew/opt/llvm/bin/llvm-ranlib
./.gitlab/before_script/darwin/install_deps.sh
./.gitlab/script/ghcup_version.sh
after_script:
- ./.gitlab/script/ci.sh save_cabal_cache
- ./.gitlab/script/ci.sh save_brew_cache
- bash ./.gitlab/after_script.sh
.test_ghcup_version:freebsd12:
extends:
- .test_ghcup_version
- .freebsd12
- .root_cleanup
before_script:
- ./.gitlab/before_script/freebsd/install_deps.sh
- ./.gitlab/script/ci.sh extract_cabal_cache
after_script:
- ./.gitlab/script/ci.sh save_cabal_cache
- bash ./.gitlab/after_script.sh
.test_ghcup_version:freebsd13:
extends:
- .test_ghcup_version
- .freebsd13
- .root_cleanup
before_script:
- sudo pkg update
- sudo pkg install --yes compat12x-amd64
- sudo ln -s libncurses.so.6 /usr/local/lib/libncurses.so.6.2
- ./.gitlab/before_script/freebsd/install_deps.sh
- ./.gitlab/script/ci.sh extract_cabal_cache
after_script:
- ./.gitlab/script/ci.sh save_cabal_cache
- bash ./.gitlab/after_script.sh
.test_ghcup_version:windows:
extends:
- .test_ghcup_version
- .windows
- .root_cleanup
before_script:
- set CABAL_DIR="$CI_PROJECT_DIR/cabal"
- bash ./.gitlab/before_script/windows/install_deps.sh
- bash ./.gitlab/script/ci.sh extract_cabal_cache
after_script:
- bash ./.gitlab/script/ci.sh save_cabal_cache
- bash ./.gitlab/after_script.sh
# .test_ghcup_scoop:windows:
# extends:
# - .windows
# - .test_ghcup_scoop
# - .root_cleanup
.release_ghcup:
script:
@@ -282,12 +262,9 @@ variables:
test:linux:stack:
stage: test
before_script:
- ./.gitlab/script/ci.sh extract_stack_cache
- ./.gitlab/before_script/linux/install_deps_minimal.sh
script:
- ./.gitlab/script/ghcup_stack.sh
after_script:
- ./.gitlab/script/ci.sh save_stack_cache
extends:
- .debian
needs: []
@@ -317,7 +294,6 @@ test:windows:bootstrap_powershell_script:
- "[Environment]::SetEnvironmentVariable('GHCUP_MSYS2', $null, [System.EnvironmentVariableTarget]::User)"
- "[Environment]::SetEnvironmentVariable('CABAL_DIR', $null, [System.EnvironmentVariableTarget]::User)"
- bash ./.gitlab/after_script.sh
- bash ./.gitlab/script/ci.sh save_cabal_cache
variables:
GHC_VERSION: "8.10.7"
CABAL_VERSION: "3.6.2.0"
@@ -564,17 +540,28 @@ release:darwin:aarch64:
cache:
key: darwin-brew-$CACHE_REV
paths:
- brew_cache
key: ghcup-test-$CACHE_REV
paths:
- cabal-cache
- .brew
- .brew_cache
before_script:
- ./.gitlab/script/ci.sh extract_brew_cache
- ./.gitlab/script/ci.sh extract_cabal_cache
# Install brew locally in the project dir. Packages will also be installed here.
- '[ -e "$CI_PROJECT_DIR/.brew" ] || git clone --depth=1 https://github.com/Homebrew/brew $CI_PROJECT_DIR/.brew'
- export PATH="$CI_PROJECT_DIR/.brew/bin:$CI_PROJECT_DIR/.brew/sbin:$PATH"
# otherwise we seem to get intel binaries
- export HOMEBREW_CHANGE_ARCH_TO_ARM=1
# make sure to not pollute the machine with temp files etc
- mkdir -p $CI_PROJECT_DIR/.brew_cache
- export HOMEBREW_CACHE=$CI_PROJECT_DIR/.brew_cache
- mkdir -p $CI_PROJECT_DIR/.brew_logs
- export HOMEBREW_LOGS=$CI_PROJECT_DIR/.brew_logs
- mkdir -p /private/tmp/.brew_tmp
- export HOMEBREW_TEMP=/private/tmp/.brew_tmp
# update and install packages
- /bin/bash ./.gitlab/script/brew.sh llvm autoconf automake
- brew update
- brew install llvm
- brew install autoconf automake
script: |
export PATH="$CI_PROJECT_DIR/.brew/opt/llvm/bin:$CI_PROJECT_DIR/.brew/bin:$CI_PROJECT_DIR/.brew/sbin:$PATH"
export CC=$CI_PROJECT_DIR/.brew/opt/llvm/bin/clang
@@ -584,9 +571,6 @@ release:darwin:aarch64:
export RANLIB=$CI_PROJECT_DIR/.brew/opt/llvm/bin/llvm-ranlib
./.gitlab/before_script/darwin/install_deps.sh
./.gitlab/script/ghcup_release.sh
after_script:
- ./.gitlab/script/ci.sh save_cabal_cache
- ./.gitlab/script/ci.sh save_brew_cache
variables:
ARTIFACT: "aarch64-apple-darwin-ghcup"
GHC_VERSION: "8.10.7"

View File

@@ -1,23 +1,11 @@
if [ "${OS}" = "WINDOWS" ] ; then
export GHCUP_INSTALL_BASE_PREFIX="$CI_PROJECT_DIR"
export GHCUP_BIN="$CI_PROJECT_DIR/ghcup/bin"
export PATH="$GHCUP_BIN:$CI_PROJECT_DIR/.local/bin:$PATH"
export TMPDIR="$CI_PROJECT_DIR/tmp"
export CABAL_DIR="$CI_PROJECT_DIR/cabal"
export CABAL_CACHE="$CI_PROJECT_DIR/cabal-cache"
export STACK_ROOT="$CI_PROJECT_DIR/stack"
export STACK_CACHE="$CI_PROJECT_DIR/stack-cache"
export BREW_DIR="$CI_PROJECT_DIR/.brew_cache"
export BREW_CACHE="$CI_PROJECT_DIR/brew-cache"
export GHCUP_INSTALL_BASE_PREFIX="$CI_PROJECT_DIR"
export GHCUP_BIN="$CI_PROJECT_DIR/ghcup/bin"
export PATH="$GHCUP_BIN:$CI_PROJECT_DIR/.local/bin:$PATH"
export TMPDIR="$CI_PROJECT_DIR/tmp"
else
export GHCUP_INSTALL_BASE_PREFIX="$CI_PROJECT_DIR"
export GHCUP_BIN="$CI_PROJECT_DIR/.ghcup/bin"
export PATH="$GHCUP_BIN:$CI_PROJECT_DIR/.local/bin:$PATH"
export TMPDIR="$CI_PROJECT_DIR/tmp"
export CABAL_DIR="$CI_PROJECT_DIR/cabal"
export CABAL_CACHE="$CI_PROJECT_DIR/cabal-cache"
export STACK_ROOT="$CI_PROJECT_DIR/stack"
export STACK_CACHE="$CI_PROJECT_DIR/stack-cache"
export BREW_DIR="$CI_PROJECT_DIR/.brew_cache"
export BREW_CACHE="$CI_PROJECT_DIR/brew-cache"
export GHCUP_INSTALL_BASE_PREFIX="$CI_PROJECT_DIR"
export GHCUP_BIN="$CI_PROJECT_DIR/.ghcup/bin"
export PATH="$GHCUP_BIN:$CI_PROJECT_DIR/.local/bin:$PATH"
export TMPDIR="$CI_PROJECT_DIR/tmp"
fi

View File

@@ -1,19 +0,0 @@
#!/usr/bin/env bash
set -Eeuxo pipefail
# Install brew locally in the project dir. Packages will also be installed here.
[ -e "$CI_PROJECT_DIR/.brew" ] || git clone --depth=1 https://github.com/Homebrew/brew $CI_PROJECT_DIR/.brew
export PATH="$CI_PROJECT_DIR/.brew/bin:$CI_PROJECT_DIR/.brew/sbin:$PATH"
# make sure to not pollute the machine with temp files etc
mkdir -p $CI_PROJECT_DIR/.brew_cache
export HOMEBREW_CACHE=$CI_PROJECT_DIR/.brew_cache
mkdir -p $CI_PROJECT_DIR/.brew_logs
export HOMEBREW_LOGS=$CI_PROJECT_DIR/.brew_logs
mkdir -p /private/tmp/.brew_tmp
export HOMEBREW_TEMP=/private/tmp/.brew_tmp
# update and install packages
brew update
brew install ${1+"$@"}

View File

@@ -1,70 +0,0 @@
#!/usr/bin/env bash
set -Eeuo pipefail
TOP="$( cd "$(dirname "$0")" ; pwd -P )"
. "${TOP}/../ghcup_env"
function save_cabal_cache () {
echo "Storing cabal cache from $CABAL_DIR to $CABAL_CACHE..."
rm -Rf "$CABAL_CACHE"
mkdir -p "$CABAL_CACHE"
if [ -d "$CABAL_DIR" ]; then
cp -Rf "$CABAL_DIR" "$CABAL_CACHE/"
fi
}
function extract_cabal_cache () {
if [ -d "$CABAL_CACHE" ]; then
echo "Extracting cabal cache from $CABAL_CACHE to $CABAL_DIR..."
mkdir -p "$CABAL_DIR"
cp -Rf "$CABAL_CACHE"/* "$CABAL_DIR"
fi
}
function save_stack_cache () {
echo "Storing stack cache from $STACK_ROOT to $STACK_CACHE..."
rm -Rf "$STACK_CACHE"
mkdir -p "$STACK_CACHE"
if [ -d "$STACK_ROOT" ]; then
cp -Rf "$STACK_DIR" "$STACK_CACHE"
fi
}
function extract_stack_cache () {
if [ -d "$STACK_CACHE" ]; then
echo "Extracting stack cache from $STACK_CACHE to $STACK_ROOT..."
mkdir -p "$STACK_ROOT"
cp -Rf "$STACK_CACHE"/* "$STACK_ROOT"
fi
}
function save_brew_cache () {
echo "Storing brew cache from $BREW_DIR to $BREW_CACHE..."
rm -Rf "$BREW_CACHE"
mkdir -p "$BREW_CACHE"
if [ -d "$BREW_DIR" ]; then
cp -Rf "$BREW_DIR" "$BREW_CACHE"
fi
}
function extract_brew_cache () {
if [ -d "$BREW_CACHE" ]; then
echo "Extracting stack cache from $BREW_CACHE to $BREW_DIR..."
mkdir -p "$BREW_DIR"
cp -Rf "$BREW_CACHE"/* "$BREW_DIR"
fi
}
case $1 in
extract_cabal_cache) extract_cabal_cache ;;
save_cabal_cache) save_cabal_cache ;;
extract_stack_cache) extract_stack_cache ;;
save_stack_cache) save_stack_cache ;;
extract_brew_cache) extract_brew_cache ;;
save_brew_cache) save_brew_cache ;;
*) echo "unknown mode $1" ; exit 11 ;;
esac

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/sh
set -eux
@@ -8,7 +8,6 @@ mkdir -p "$CI_PROJECT_DIR"/.local/bin
CI_PROJECT_DIR=$(pwd)
ecabal() {
cabal "$@"
}
@@ -35,8 +34,6 @@ git describe --always
### build
rm -rf "${GHCUP_DIR}"/share
ecabal update
if [ "${OS}" = "DARWIN" ] ; then
@@ -97,17 +94,16 @@ rm -rf "${GHCUP_DIR}"
eghcup --numeric-version
eghcup install ghc ${GHC_VERSION}
ls -lah "$(eghcup whereis -d ghc ${GHC_VERSION})"
[ "`$(eghcup whereis ghc ${GHC_VERSION}) --numeric-version`" = "${GHC_VERSION}" ]
[ "`eghcup run --ghc ${GHC_VERSION} -- ghc --numeric-version`" = "${GHC_VERSION}" ]
[ `$(eghcup whereis ghc ${GHC_VERSION}) --numeric-version` = "${GHC_VERSION}" ]
[ `eghcup run --ghc ${GHC_VERSION} -- ghc --numeric-version` = "${GHC_VERSION}" ]
eghcup set ghc ${GHC_VERSION}
eghcup install cabal ${CABAL_VERSION}
[ "`$(eghcup whereis cabal ${CABAL_VERSION}) --numeric-version`" = "${CABAL_VERSION}" ]
[ `$(eghcup whereis cabal ${CABAL_VERSION}) --numeric-version` = "${CABAL_VERSION}" ]
eghcup unset cabal
"$GHCUP_BIN"/cabal --version && exit 1 || echo yes
eghcup set cabal ${CABAL_VERSION}
[ "`$(eghcup whereis cabal ${CABAL_VERSION}) --numeric-version`" = "${CABAL_VERSION}" ]
[ "`eghcup run --cabal ${CABAL_VERSION} -- cabal --numeric-version`" = "${CABAL_VERSION}" ]
[ `$(eghcup whereis cabal ${CABAL_VERSION}) --numeric-version` = "${CABAL_VERSION}" ]
[ `eghcup run --cabal ${CABAL_VERSION} -- cabal --numeric-version` = "${CABAL_VERSION}" ]
if [ "${OS}" != "FREEBSD" ] ; then
if [ "${ARCH}" = "64" ] ; then

View File

@@ -20,7 +20,6 @@
- ignore: {name: "Avoid lambda"}
- ignore: {name: "Use uncurry"}
- ignore: {name: "Use replicateM"}
- ignore: {name: "Use unless"}
- ignore: {name: "Redundant irrefutable pattern"}

View File

@@ -44,6 +44,7 @@ import Data.Vector ( Vector
import Data.Versions hiding ( str )
import Haskus.Utils.Variant.Excepts
import Prelude hiding ( appendFile )
import System.Directory ( canonicalizePath )
import System.FilePath
import System.Exit
import System.IO.Unsafe
@@ -437,7 +438,6 @@ install' _ (_, ListResult {..}) = do
, FileAlreadyExistsError
, ProcessError
, GHCupShadowed
, UninstallFailed
]
run (do
@@ -512,7 +512,7 @@ del' :: (MonadReader AppState m, MonadIO m, MonadFail m, MonadMask m, MonadUnlif
del' _ (_, ListResult {..}) = do
AppState { ghcupInfo = GHCupInfo { _ghcupDownloads = dls }} <- ask
let run = runE @'[NotInstalled, UninstallFailed]
let run = runE @'[NotInstalled]
run (do
let vi = getVersionInfo lVer lTool dls

View File

@@ -52,6 +52,7 @@ import Haskus.Utils.Variant.Excepts
import Options.Applicative hiding ( style )
import Prelude hiding ( appendFile )
import Safe
import System.Directory
import System.Process ( readProcess )
import System.FilePath
import Text.HTML.TagSoup hiding ( Tag )

View File

@@ -388,7 +388,6 @@ type GHCEffects = '[ AlreadyInstalled
, ProcessError
, CopyError
, BuildFailed
, UninstallFailed
]
type HLSEffects = '[ AlreadyInstalled
, BuildFailed
@@ -407,7 +406,6 @@ type HLSEffects = '[ AlreadyInstalled
, NotInstalled
, DirNotEmpty
, ArchiveResult
, UninstallFailed
]
@@ -494,7 +492,7 @@ compile compileCommand settings Dirs{..} runAppState runLogger = do
case keepDirs settings of
Never -> runLogger $ logError $ T.pack $ prettyShow err
_ -> runLogger (logError $ T.pack (prettyShow err) <> "\n" <>
"Check the logs at " <> T.pack (fromGHCupPath logsDir) <> " and the build directory "
"Check the logs at " <> T.pack logsDir <> " and the build directory "
<> T.pack tmpdir <> " for more clues." <> "\n" <>
"Make sure to clean up " <> T.pack tmpdir <> " afterwards.")
pure $ ExitFailure 9
@@ -553,7 +551,7 @@ compile compileCommand settings Dirs{..} runAppState runLogger = do
case keepDirs settings of
Never -> runLogger $ logError $ T.pack $ prettyShow err
_ -> runLogger (logError $ T.pack (prettyShow err) <> "\n" <>
"Check the logs at " <> T.pack (fromGHCupPath logsDir) <> " and the build directory "
"Check the logs at " <> T.pack logsDir <> " and the build directory "
<> T.pack tmpdir <> " for more clues." <> "\n" <>
"Make sure to clean up " <> T.pack tmpdir <> " afterwards.")
pure $ ExitFailure 9

View File

@@ -98,7 +98,7 @@ gcFooter = [s|Discussion:
---------------------------
type GCEffects = '[ NotInstalled, UninstallFailed ]
type GCEffects = '[ NotInstalled ]
runGC :: MonadUnliftIO m
@@ -129,7 +129,7 @@ gc :: ( Monad m
-> (ReaderT LeanAppState m () -> m ())
-> m ExitCode
gc GCOptions{..} runAppState runLogger = runGC runAppState (do
when gcOldGHC (liftE rmOldGHC)
when gcOldGHC rmOldGHC
lift $ when gcProfilingLibs rmProfilingLibs
lift $ when gcShareDir rmShareDir
liftE $ when gcHLSNoGHC rmHLSNoGHC

View File

@@ -18,7 +18,6 @@ import GHCup.OptParse.Common
import GHCup
import GHCup.Errors
import GHCup.Types
import GHCup.Utils.Dirs
import GHCup.Utils.Logger
import GHCup.Utils.String.QQ
@@ -258,7 +257,6 @@ type InstallEffects = '[ AlreadyInstalled
, NoToolVersionSet
, FileAlreadyExistsError
, ProcessError
, UninstallFailed
, (AlreadyInstalled, ())
, (UnknownArchive, ())
@@ -266,9 +264,9 @@ type InstallEffects = '[ AlreadyInstalled
, (FileDoesNotExistError, ())
, (CopyError, ())
, (NotInstalled, ())
, (UninstallFailed, ())
, (DirNotEmpty, ())
, (NoDownload, ())
, (NotInstalled, ())
, (BuildFailed, ())
, (TagNotFound, ())
, (DigestError, ())
@@ -289,7 +287,6 @@ type InstallEffects = '[ AlreadyInstalled
, (DirNotEmpty, NotInstalled)
, (NoDownload, NotInstalled)
, (NotInstalled, NotInstalled)
, (UninstallFailed, NotInstalled)
, (BuildFailed, NotInstalled)
, (TagNotFound, NotInstalled)
, (DigestError, NotInstalled)
@@ -322,7 +319,6 @@ type InstallGHCEffects = '[ TagNotFound
, BuildFailed
, DirNotEmpty
, AlreadyInstalled
, UninstallFailed
, (AlreadyInstalled, NotInstalled)
, (UnknownArchive, NotInstalled)
@@ -332,7 +328,6 @@ type InstallGHCEffects = '[ TagNotFound
, (NotInstalled, NotInstalled)
, (DirNotEmpty, NotInstalled)
, (NoDownload, NotInstalled)
, (UninstallFailed, NotInstalled)
, (BuildFailed, NotInstalled)
, (TagNotFound, NotInstalled)
, (DigestError, NotInstalled)
@@ -352,7 +347,6 @@ type InstallGHCEffects = '[ TagNotFound
, (NotInstalled, ())
, (DirNotEmpty, ())
, (NoDownload, ())
, (UninstallFailed, ())
, (BuildFailed, ())
, (TagNotFound, ())
, (DigestError, ())
@@ -447,21 +441,21 @@ install installCommand settings getAppState' runLogger = case installCommand of
case keepDirs settings of
Never -> runLogger (logError $ T.pack $ prettyShow err)
_ -> runLogger (logError $ T.pack (prettyShow err) <> "\n" <>
"Check the logs at " <> T.pack (fromGHCupPath logsDir) <> " and the build directory " <> T.pack tmpdir <> " for more clues." <> "\n" <>
"Check the logs at " <> T.pack logsDir <> " and the build directory " <> T.pack tmpdir <> " for more clues." <> "\n" <>
"Make sure to clean up " <> T.pack tmpdir <> " afterwards.")
pure $ ExitFailure 3
VLeft err@(V (BuildFailed tmpdir _, ())) -> do
case keepDirs settings of
Never -> runLogger (logError $ T.pack $ prettyShow err)
_ -> runLogger (logError $ T.pack (prettyShow err) <> "\n" <>
"Check the logs at " <> T.pack (fromGHCupPath logsDir) <> " and the build directory " <> T.pack tmpdir <> " for more clues." <> "\n" <>
"Check the logs at " <> T.pack logsDir <> " and the build directory " <> T.pack tmpdir <> " for more clues." <> "\n" <>
"Make sure to clean up " <> T.pack tmpdir <> " afterwards.")
pure $ ExitFailure 3
VLeft e -> do
runLogger $ do
logError $ T.pack $ prettyShow e
logError $ "Also check the logs in " <> T.pack (fromGHCupPath logsDir)
logError $ "Also check the logs in " <> T.pack logsDir
pure $ ExitFailure 3
@@ -513,7 +507,7 @@ install installCommand settings getAppState' runLogger = case installCommand of
VLeft e -> do
runLogger $ do
logError $ T.pack $ prettyShow e
logError $ "Also check the logs in " <> T.pack (fromGHCupPath logsDir)
logError $ "Also check the logs in " <> T.pack logsDir
pure $ ExitFailure 4
installHLS :: InstallOptions -> IO ExitCode
@@ -573,7 +567,7 @@ install installCommand settings getAppState' runLogger = case installCommand of
VLeft e -> do
runLogger $ do
logError $ T.pack $ prettyShow e
logError $ "Also check the logs in " <> T.pack (fromGHCupPath logsDir)
logError $ "Also check the logs in " <> T.pack logsDir
pure $ ExitFailure 4
installStack :: InstallOptions -> IO ExitCode
@@ -624,6 +618,6 @@ install installCommand settings getAppState' runLogger = case installCommand of
VLeft e -> do
runLogger $ do
logError $ T.pack $ prettyShow e
logError $ "Also check the logs in " <> T.pack (fromGHCupPath logsDir)
logError $ "Also check the logs in " <> T.pack logsDir
pure $ ExitFailure 4

View File

@@ -42,7 +42,7 @@ import Control.Concurrent (threadDelay)
---------------------------
type NukeEffects = '[ NotInstalled, UninstallFailed ]
type NukeEffects = '[ NotInstalled ]
runNuke :: AppState

View File

@@ -127,7 +127,7 @@ rmFooter = [s|Discussion:
---------------------------
type RmEffects = '[ NotInstalled, UninstallFailed ]
type RmEffects = '[ NotInstalled ]
runRm :: (ReaderT env m (VEither RmEffects a) -> m (VEither RmEffects a))

View File

@@ -32,6 +32,7 @@ import Data.List ( intercalate )
import Haskus.Utils.Variant.Excepts
import Options.Applicative hiding ( style )
import Prelude hiding ( appendFile )
import System.Directory
import System.FilePath
import System.Environment
import System.Exit
@@ -175,7 +176,6 @@ type RunEffects = '[ AlreadyInstalled
, NoToolVersionSet
, FileAlreadyExistsError
, ProcessError
, UninstallFailed
]
runLeanRUN :: (MonadUnliftIO m, MonadIO m)
@@ -339,7 +339,6 @@ run RunOptions{..} runAppState leanAppstate runLogger = do
, AlreadyInstalled
, FileAlreadyExistsError
, CopyError
, UninstallFailed
] (ResourceT (ReaderT AppState m)) ()
installToolChainFull Toolchain{..} tmp = do
forM_ [(GHC,) <$> ghcVer, (Cabal,) <$> cabalVer, (HLS,) <$> hlsVer, (Stack,) <$> stackVer] $ \mt -> do

View File

@@ -17,7 +17,6 @@ import GHCup
import GHCup.Errors
import GHCup.OptParse.Common
import GHCup.Types
import GHCup.Utils
import GHCup.Utils.Logger
import GHCup.Utils.String.QQ
@@ -300,7 +299,7 @@ whereis whereisCommand whereisOptions runAppState leanAppstate runLogger = do
pure $ ExitFailure 30
(WhereisBaseDir, _) -> do
liftIO $ putStr $ fromGHCupPath baseDir
liftIO $ putStr baseDir
pure ExitSuccess
(WhereisBinDir, _) -> do
@@ -308,13 +307,13 @@ whereis whereisCommand whereisOptions runAppState leanAppstate runLogger = do
pure ExitSuccess
(WhereisCacheDir, _) -> do
liftIO $ putStr $ fromGHCupPath cacheDir
liftIO $ putStr cacheDir
pure ExitSuccess
(WhereisLogsDir, _) -> do
liftIO $ putStr $ fromGHCupPath logsDir
liftIO $ putStr logsDir
pure ExitSuccess
(WhereisConfDir, _) -> do
liftIO $ putStr $ fromGHCupPath confDir
liftIO $ putStr confDir
pure ExitSuccess

View File

@@ -220,7 +220,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
let s' = AppState settings dirs keybindings ghcupInfo pfreq loggerConfig
race_ (liftIO $ runReaderT cleanupTrash s')
(threadDelay 5000000 >> runLogger (logWarn $ "Killing cleanup thread (exceeded 5s timeout)... please remove leftover files in " <> T.pack (fromGHCupPath recycleDir) <> " manually"))
(threadDelay 5000000 >> runLogger (logWarn $ "Killing cleanup thread (exceeded 5s timeout)... please remove leftover files in " <> T.pack recycleDir <> " manually"))
case optCommand of
Nuke -> pure ()

View File

@@ -31,7 +31,4 @@ package cabal-plan
package aeson
flags: +ordered-keymap
package streamly
flags: +use-unliftio
allow-newer: base, ghc-prim, template-haskell, language-c

View File

@@ -1,7 +0,0 @@
#include "dirutils.h"
unsigned int
__posixdir_d_type(struct dirent* d)
{
return(d -> d_type);
}

View File

@@ -1,15 +0,0 @@
#ifndef POSIXPATHS_CBITS_DIRUTILS_H
#define POSIXPATHS_CBITS_DIRUTILS_H
#include <stdlib.h>
#include <dirent.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
extern unsigned int
__posixdir_d_type(struct dirent* d)
;
#endif

View File

@@ -1,6 +1,6 @@
cabal-version: 3.0
name: ghcup
version: 0.1.18.0
version: 0.1.17.8
license: LGPL-3.0-only
license-file: LICENSE
copyright: Julian Ospald 2020
@@ -126,7 +126,6 @@ library
, safe ^>=0.3.18
, safe-exceptions ^>=0.1
, split ^>=0.2.3.4
, streamly ^>=0.8.2
, strict-base ^>=0.4
, template-haskell >=2.7 && <2.18
, temporary ^>=1.3
@@ -166,12 +165,9 @@ library
else
other-modules:
GHCup.Utils.File.Posix
GHCup.Utils.File.Posix.Foreign
GHCup.Utils.File.Posix.Traversals
GHCup.Utils.Posix
GHCup.Utils.Prelude.Posix
c-sources: cbits/dirutils.c
build-depends:
, bz2 >=0.5.0.5 && <1.1
, terminal-size ^>=0.3.2.1
@@ -275,6 +271,7 @@ executable ghcup
if flag(no-exe)
buildable: False
test-suite ghcup-test
type: exitcode-stdio-1.0
main-is: Main.hs
@@ -283,7 +280,6 @@ test-suite ghcup-test
other-modules:
GHCup.ArbitraryTypes
GHCup.Types.JSONSpec
GHCup.Utils.FileSpec
Spec
default-language: Haskell2010
@@ -303,15 +299,12 @@ test-suite ghcup-test
, base >=4.12 && <5
, bytestring ^>=0.10
, containers ^>=0.6
, directory ^>=1.3.6.0
, filepath ^>=1.4.2.1
, generic-arbitrary >=0.1.0 && <0.3
, ghcup
, hspec >=2.7.10 && <2.10
, hspec-golden-aeson ^>=0.9
, QuickCheck ^>=2.14.1
, quickcheck-arbitrary-adt ^>=0.3.1.0
, streamly ^>=0.8.2
, text ^>=1.2.4.0
, uri-bytestring ^>=0.3.2.2
, versions >=4.0.1 && <5.1

View File

@@ -42,6 +42,8 @@ import GHCup.Version
import Codec.Archive ( ArchiveResult )
import Control.Applicative
import Control.DeepSeq ( force )
import Control.Exception ( evaluate )
import Control.Exception.Safe
import Control.Monad
#if !MIN_VERSION_base(4,13,0)
@@ -50,6 +52,7 @@ import Control.Monad.Fail ( MonadFail )
import Control.Monad.Reader
import Control.Monad.Trans.Resource
hiding ( throwM )
import Control.Monad.IO.Unlift ( MonadUnliftIO( withRunInIO ) )
import Data.ByteString ( ByteString )
import Data.Either
import Data.List
@@ -74,9 +77,11 @@ import Prelude hiding ( abs
, writeFile
)
import Safe hiding ( at )
import System.Directory hiding ( findFiles )
import System.Environment
import System.FilePath
import System.IO.Error
import System.IO.Temp
import Text.PrettyPrint.HughesPJClass ( prettyShow )
import Text.Regex.Posix
import URI.ByteString
@@ -91,7 +96,6 @@ import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.Encoding as E
import qualified Text.Megaparsec as MP
import qualified Streamly.Prelude as S
import GHCup.Utils.MegaParsec
import Control.Concurrent (threadDelay)
@@ -198,7 +202,6 @@ installGHCBindist :: ( MonadFail m
, DirNotEmpty
, ArchiveResult
, ProcessError
, UninstallFailed
]
m
()
@@ -266,7 +269,6 @@ installPackedGHC :: ( MonadMask m
, MonadIO m
, MonadUnliftIO m
, MonadFail m
, MonadResource m
)
=> FilePath -- ^ Path to the packed GHC bindist
-> Maybe TarDir -- ^ Subdir of the archive
@@ -289,8 +291,8 @@ installPackedGHC dl msubdir inst ver forceInstall = do
-- unpack
tmpUnpack <- lift mkGhcupTmpDir
liftE $ cleanUpOnError tmpUnpack (unpackToDir (fromGHCupPath tmpUnpack) dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpUnpack)
liftE $ cleanUpOnError tmpUnpack (unpackToDir tmpUnpack dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform tmpUnpack
-- the subdir of the archive where we do the work
workdir <- maybe (pure tmpUnpack)
@@ -298,7 +300,12 @@ installPackedGHC dl msubdir inst ver forceInstall = do
msubdir
liftE $ runBuildAction tmpUnpack
(installUnpackedGHC workdir inst ver forceInstall)
(case inst of
IsolateDirResolved _ -> Nothing -- don't clean up for isolated installs, since that'd potentially delete other
-- user files if '--force' is supplied
GHCupDir d -> Just d
)
(installUnpackedGHC workdir inst ver)
-- | Install an unpacked GHC distribution. This only deals with the GHC
@@ -312,21 +319,21 @@ installUnpackedGHC :: ( MonadReader env m
, MonadIO m
, MonadUnliftIO m
, MonadMask m
, MonadResource m
, MonadFail m
)
=> GHCupPath -- ^ Path to the unpacked GHC bindist (where the configure script resides)
=> FilePath -- ^ Path to the unpacked GHC bindist (where the configure script resides)
-> InstallDirResolved -- ^ Path to install to
-> Version -- ^ The GHC version
-> Bool -- ^ Force install
-> Excepts '[ProcessError] m ()
installUnpackedGHC path inst ver forceInstall
installUnpackedGHC path inst ver
| isWindows = do
lift $ logInfo "Installing GHC (this may take a while)"
-- Windows bindists are relocatable and don't need
-- to run configure.
-- We also must make sure to preserve mtime to not confuse ghc-pkg.
lift $ mergeFileTree path inst GHC (mkTVer ver) $ \source dest -> liftIO $ do
lift $ withRunInIO $ \run -> flip onException (case inst of
IsolateDirResolved _ -> pure ()
GHCupDir d -> run $ recyclePathForcibly d
) $ copyDirectoryRecursive path (fromInstallDir inst) $ \source dest -> do
mtime <- getModificationTime source
moveFilePortable source dest
setModificationTime dest mtime
@@ -344,18 +351,10 @@ installUnpackedGHC path inst ver forceInstall
("./configure" : ("--prefix=" <> fromInstallDir inst)
: alpineArgs
)
(Just $ fromGHCupPath path)
(Just path)
"ghc-configure"
Nothing
tmpInstallDest <- lift withGHCupTmpDir
lEM $ make ["DESTDIR=" <> fromGHCupPath tmpInstallDest, "install"] (Just $ fromGHCupPath path)
lift $ logInfo $ "Merging file tree from \"" <> T.pack (fromGHCupPath tmpInstallDest) <> "\" to \"" <> T.pack (fromInstallDir inst) <> "\""
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpInstallDest)
lift $ mergeFileTree (tmpInstallDest `appendGHCupPath` dropDrive (fromInstallDir inst))
inst
GHC
(mkTVer ver)
(\f t -> liftIO $ install f t (not forceInstall))
lEM $ make ["install"] (Just path)
pure ()
@@ -393,7 +392,6 @@ installGHCBin :: ( MonadFail m
, DirNotEmpty
, ArchiveResult
, ProcessError
, UninstallFailed
]
m
()
@@ -464,11 +462,11 @@ installCabalBindist dlinfo ver installDir forceInstall = do
-- unpack
tmpUnpack <- lift withGHCupTmpDir
liftE $ cleanUpOnError tmpUnpack (unpackToDir (fromGHCupPath tmpUnpack) dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpUnpack)
liftE $ cleanUpOnError tmpUnpack (unpackToDir tmpUnpack dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform tmpUnpack
-- the subdir of the archive where we do the work
workdir <- fromGHCupPath <$> maybe (pure tmpUnpack) (liftE . intoSubdir tmpUnpack) (view dlSubdir dlinfo)
workdir <- maybe (pure tmpUnpack) (liftE . intoSubdir tmpUnpack) (view dlSubdir dlinfo)
case installDir of
IsolateDir isoDir -> do -- isolated install
@@ -476,7 +474,7 @@ installCabalBindist dlinfo ver installDir forceInstall = do
liftE $ installCabalUnpacked workdir (IsolateDirResolved isoDir) ver forceInstall
GHCupInternal -> do -- regular install
liftE $ installCabalUnpacked workdir (GHCupBinDir binDir) ver forceInstall
liftE $ installCabalUnpacked workdir (GHCupDir binDir) ver forceInstall
-- | Install an unpacked cabal distribution.Symbol
@@ -493,15 +491,17 @@ installCabalUnpacked path inst ver forceInstall = do
let destFileName = cabalFile
<> (case inst of
IsolateDirResolved _ -> ""
_ -> ("-" <>) . T.unpack . prettyVer $ ver
GHCupDir _ -> ("-" <>) . T.unpack . prettyVer $ ver
)
<> exeExt
let destPath = fromInstallDir inst </> destFileName
unless forceInstall -- Overwrite it when it IS a force install
(liftE $ throwIfFileAlreadyExists destPath)
copyFileE
(path </> cabalFile <> exeExt)
destPath
(not forceInstall)
lift $ chmod_755 destPath
-- | Installs cabal into @~\/.ghcup\/bin/cabal-\<ver\>@ and
@@ -575,7 +575,6 @@ installHLSBindist :: ( MonadMask m
, FileAlreadyExistsError
, ProcessError
, DirNotEmpty
, UninstallFailed
]
m
()
@@ -606,11 +605,11 @@ installHLSBindist dlinfo ver installDir forceInstall = do
-- unpack
tmpUnpack <- lift withGHCupTmpDir
liftE $ cleanUpOnError tmpUnpack (unpackToDir (fromGHCupPath tmpUnpack) dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpUnpack)
liftE $ cleanUpOnError tmpUnpack (unpackToDir tmpUnpack dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform tmpUnpack
-- the subdir of the archive where we do the work
workdir <- fromGHCupPath <$> maybe (pure tmpUnpack) (liftE . intoSubdir tmpUnpack) (view dlSubdir dlinfo)
workdir <- maybe (pure tmpUnpack) (liftE . intoSubdir tmpUnpack) (view dlSubdir dlinfo)
legacy <- liftIO $ isLegacyHLSBindist workdir
if
@@ -624,15 +623,15 @@ installHLSBindist dlinfo ver installDir forceInstall = do
lift $ logInfo $ "isolated installing HLS to " <> T.pack isoDir
if legacy
then liftE $ installHLSUnpackedLegacy workdir (IsolateDirResolved isoDir) ver forceInstall
else liftE $ runBuildAction tmpUnpack $ installHLSUnpacked workdir (IsolateDirResolved isoDir) ver forceInstall
else liftE $ runBuildAction tmpUnpack Nothing $ installHLSUnpacked workdir (IsolateDirResolved isoDir) ver
GHCupInternal -> do
if legacy
then liftE $ installHLSUnpackedLegacy workdir (GHCupBinDir binDir) ver forceInstall
then liftE $ installHLSUnpackedLegacy workdir (GHCupDir binDir) ver forceInstall
else do
inst <- ghcupHLSDir ver
liftE $ runBuildAction tmpUnpack
$ installHLSUnpacked workdir (GHCupDir inst) ver forceInstall
liftE $ runBuildAction tmpUnpack (Just inst)
$ installHLSUnpacked workdir (GHCupDir inst) ver
liftE $ setHLS ver SetHLS_XYZ Nothing
@@ -642,34 +641,15 @@ isLegacyHLSBindist path = do
not <$> doesFileExist (path </> "GNUmakefile")
-- | Install an unpacked hls distribution.
installHLSUnpacked :: ( MonadMask m
, MonadUnliftIO m
, MonadReader env m
, MonadFail m
, HasLog env
, HasDirs env
, HasSettings env
, MonadCatch m
, MonadIO m
, MonadResource m
, HasPlatformReq env
)
installHLSUnpacked :: (MonadMask m, MonadUnliftIO m, MonadReader env m, MonadFail m, HasLog env, HasDirs env, HasSettings env, MonadCatch m, MonadIO m)
=> FilePath -- ^ Path to the unpacked hls bindist (where the executable resides)
-> InstallDirResolved -- ^ Path to install to
-> Version
-> Bool
-> Excepts '[ProcessError, CopyError, FileAlreadyExistsError, NotInstalled] m ()
installHLSUnpacked path inst ver forceInstall = do
PlatformRequest { .. } <- lift getPlatformReq
installHLSUnpacked path (fromInstallDir -> inst) _ = do
lift $ logInfo "Installing HLS"
tmpInstallDest <- lift withGHCupTmpDir
lEM $ make ["DESTDIR=" <> fromGHCupPath tmpInstallDest, "PREFIX=" <> fromInstallDir inst, "install"] (Just path)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpInstallDest)
lift $ mergeFileTree (tmpInstallDest `appendGHCupPath` dropDrive (fromInstallDir inst))
inst
HLS
(mkTVer ver)
(\f t -> liftIO $ install f t (not forceInstall))
liftIO $ createDirRecursive' inst
lEM $ make ["PREFIX=" <> inst, "install"] (Just path)
-- | Install an unpacked hls distribution (legacy).
installHLSUnpackedLegacy :: (MonadReader env m, MonadFail m, HasLog env, MonadCatch m, MonadIO m)
@@ -693,17 +673,19 @@ installHLSUnpackedLegacy path installDir ver forceInstall = do
let toF = dropSuffix exeExt f
<> (case installDir of
IsolateDirResolved _ -> ""
_ -> ("~" <>) . T.unpack . prettyVer $ ver
GHCupDir _ -> ("~" <>) . T.unpack . prettyVer $ ver
)
<> exeExt
let srcPath = path </> f
let destPath = fromInstallDir installDir </> toF
unless forceInstall -- if it is a force install, overwrite it.
(liftE $ throwIfFileAlreadyExists destPath)
copyFileE
srcPath
destPath
(not forceInstall)
lift $ chmod_755 destPath
-- install haskell-language-server-wrapper
@@ -711,16 +693,18 @@ installHLSUnpackedLegacy path installDir ver forceInstall = do
toF = wrapper
<> (case installDir of
IsolateDirResolved _ -> ""
_ -> ("-" <>) . T.unpack . prettyVer $ ver
GHCupDir _ -> ("-" <>) . T.unpack . prettyVer $ ver
)
<> exeExt
srcWrapperPath = path </> wrapper <> exeExt
destWrapperPath = fromInstallDir installDir </> toF
unless forceInstall
(liftE $ throwIfFileAlreadyExists destWrapperPath)
copyFileE
srcWrapperPath
destWrapperPath
(not forceInstall)
lift $ chmod_755 destWrapperPath
@@ -758,7 +742,6 @@ installHLSBin :: ( MonadMask m
, FileAlreadyExistsError
, ProcessError
, DirNotEmpty
, UninstallFailed
]
m
()
@@ -818,8 +801,8 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal patc
-- unpack
tmpUnpack <- lift mkGhcupTmpDir
liftE $ cleanUpOnError tmpUnpack (unpackToDir (fromGHCupPath tmpUnpack) dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpUnpack)
liftE $ cleanUpOnError tmpUnpack (unpackToDir tmpUnpack dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform tmpUnpack
workdir <- maybe (pure tmpUnpack)
(liftE . intoSubdir tmpUnpack)
@@ -830,7 +813,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal patc
-- clone from git
Right GitBranch{..} -> do
tmpUnpack <- lift mkGhcupTmpDir
let git args = execLogged "git" ("--no-pager":args) (Just $ fromGHCupPath tmpUnpack) "git" Nothing
let git args = execLogged "git" ("--no-pager":args) (Just tmpUnpack) "git" Nothing
tver <- reThrowAll @_ @'[ProcessError] DownloadFailed $ do
let rep = fromMaybe "https://github.com/haskell/haskell-language-server.git" repo
lift $ logInfo $ "Fetching git repo " <> T.pack rep <> " at ref " <> T.pack ref <> " (this may take a while)"
@@ -850,7 +833,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal patc
lEM $ git fetch_args
lEM $ git [ "checkout", "FETCH_HEAD" ]
(Just gpd) <- parseGenericPackageDescriptionMaybe <$> liftIO (B.readFile (fromGHCupPath tmpUnpack </> "haskell-language-server.cabal"))
(Just gpd) <- parseGenericPackageDescriptionMaybe <$> liftIO (B.readFile (tmpUnpack </> "haskell-language-server.cabal"))
pure . (\c -> Version Nothing c [] Nothing)
. NE.fromList . fmap (NE.fromList . (:[]) . digits . fromIntegral)
. versionNumbers
@@ -859,7 +842,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal patc
. packageDescription
$ gpd
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpUnpack)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform tmpUnpack
lift $ logInfo $ "Git version " <> T.pack ref <> " corresponds to HLS version " <> prettyVer tver
pure (tmpUnpack, tver)
@@ -870,30 +853,31 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal patc
liftE $ runBuildAction
workdir
(reThrowAll @_ @'[GPGError, DownloadFailed, DigestError, PatchFailed, ProcessError, FileAlreadyExistsError, CopyError] @'[BuildFailed] (BuildFailed $ fromGHCupPath workdir) $ do
let tmpInstallDir = fromGHCupPath workdir </> "out"
Nothing
(reThrowAll @_ @'[GPGError, DownloadFailed, DigestError, PatchFailed, ProcessError, FileAlreadyExistsError, CopyError] @'[BuildFailed] (BuildFailed workdir) $ do
let tmpInstallDir = workdir </> "out"
liftIO $ createDirRecursive' tmpInstallDir
-- apply patches
liftE $ applyAnyPatch patches (fromGHCupPath workdir)
liftE $ applyAnyPatch patches workdir
-- set up project files
cp <- case cabalProject of
Just (Left cp)
| isAbsolute cp -> do
copyFileE cp (fromGHCupPath workdir </> "cabal.project") False
copyFileE cp (workdir </> "cabal.project")
pure "cabal.project"
| otherwise -> pure (takeFileName cp)
Just (Right uri) -> do
tmpUnpack <- lift withGHCupTmpDir
cp <- liftE $ download uri Nothing Nothing (fromGHCupPath tmpUnpack) (Just "cabal.project") False
copyFileE cp (fromGHCupPath workdir </> "cabal.project") False
cp <- liftE $ download uri Nothing Nothing tmpUnpack (Just "cabal.project") False
copyFileE cp (workdir </> "cabal.project")
pure "cabal.project"
Nothing -> pure "cabal.project"
forM_ cabalProjectLocal $ \uri -> do
tmpUnpack <- lift withGHCupTmpDir
cpl <- liftE $ download uri Nothing Nothing (fromGHCupPath tmpUnpack) (Just (cp <.> "local")) False
copyFileE cpl (fromGHCupPath workdir </> cp <.> "local") False
cpl <- liftE $ download uri Nothing Nothing tmpUnpack (Just (cp <.> "local")) False
copyFileE cpl (workdir </> cp <.> "local")
artifacts <- forM (sort ghcs) $ \ghc -> do
let ghcInstallDir = tmpInstallDir </> T.unpack (prettyVer ghc)
liftIO $ createDirRecursive' tmpInstallDir
@@ -914,9 +898,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal patc
"exe:haskell-language-server"
, "exe:haskell-language-server-wrapper"]
)
(Just $ fromGHCupPath workdir)
"cabal"
Nothing
(Just workdir) "cabal" Nothing
pure ghcInstallDir
forM_ artifacts $ \artifact -> do
@@ -924,14 +906,14 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal patc
(tmpInstallDir </> "haskell-language-server-" <> takeFileName artifact <.> exeExt)
liftIO $ renameFile (artifact </> "haskell-language-server-wrapper" <.> exeExt)
(tmpInstallDir </> "haskell-language-server-wrapper" <.> exeExt)
liftIO $ hideError NoSuchThing $ rmFile artifact
liftIO $ rmPathForcibly artifact
case installDir of
IsolateDir isoDir -> do
lift $ logInfo $ "isolated installing HLS to " <> T.pack isoDir
liftE $ installHLSUnpackedLegacy tmpInstallDir (IsolateDirResolved isoDir) installVer True
GHCupInternal -> do
liftE $ installHLSUnpackedLegacy tmpInstallDir (GHCupBinDir binDir) installVer True
liftE $ installHLSUnpackedLegacy tmpInstallDir (GHCupDir binDir) installVer True
)
pure installVer
@@ -1037,8 +1019,8 @@ installStackBindist dlinfo ver installDir forceInstall = do
-- unpack
tmpUnpack <- lift withGHCupTmpDir
liftE $ cleanUpOnError tmpUnpack (unpackToDir (fromGHCupPath tmpUnpack) dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpUnpack)
liftE $ cleanUpOnError tmpUnpack (unpackToDir tmpUnpack dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform tmpUnpack
-- the subdir of the archive where we do the work
workdir <- maybe (pure tmpUnpack) (liftE . intoSubdir tmpUnpack) (view dlSubdir dlinfo)
@@ -1048,12 +1030,12 @@ installStackBindist dlinfo ver installDir forceInstall = do
lift $ logInfo $ "isolated installing Stack to " <> T.pack isoDir
liftE $ installStackUnpacked workdir (IsolateDirResolved isoDir) ver forceInstall
GHCupInternal -> do -- regular install
liftE $ installStackUnpacked workdir (GHCupBinDir binDir) ver forceInstall
liftE $ installStackUnpacked workdir (GHCupDir binDir) ver forceInstall
-- | Install an unpacked stack distribution.
installStackUnpacked :: (MonadReader env m, HasLog env, MonadCatch m, MonadIO m)
=> GHCupPath -- ^ Path to the unpacked stack bindist (where the executable resides)
=> FilePath -- ^ Path to the unpacked stack bindist (where the executable resides)
-> InstallDirResolved
-> Version
-> Bool -- ^ Force install
@@ -1065,15 +1047,17 @@ installStackUnpacked path installDir ver forceInstall = do
let destFileName = stackFile
<> (case installDir of
IsolateDirResolved _ -> ""
_ -> ("-" <>) . T.unpack . prettyVer $ ver
GHCupDir _ -> ("-" <>) . T.unpack . prettyVer $ ver
)
<> exeExt
destPath = fromInstallDir installDir </> destFileName
unless forceInstall
(liftE $ throwIfFileAlreadyExists destPath)
copyFileE
(fromGHCupPath path </> stackFile <> exeExt)
(path </> stackFile <> exeExt)
destPath
(not forceInstall)
lift $ chmod_755 destPath
@@ -1153,7 +1137,7 @@ setGHC ver sghc mBinDir = do
when (isNothing mBinDir) $ do
-- create symlink for share dir
when (isNothing . _tvTarget $ ver) $ lift $ symlinkShareDir (fromGHCupPath ghcdir) verS
when (isNothing . _tvTarget $ ver) $ lift $ symlinkShareDir ghcdir verS
when (sghc == SetGHCOnly) $ lift warnAboutHlsCompatibility
@@ -1173,7 +1157,7 @@ setGHC ver sghc mBinDir = do
-> m ()
symlinkShareDir ghcdir ver' = do
Dirs {..} <- getDirs
let destdir = fromGHCupPath baseDir
let destdir = baseDir
case sghc of
SetGHCOnly -> do
let sharedir = "share"
@@ -1773,11 +1757,12 @@ rmGHCVer :: ( MonadReader env m
, MonadUnliftIO m
)
=> GHCTargetVersion
-> Excepts '[NotInstalled, UninstallFailed] m ()
-> Excepts '[NotInstalled] m ()
rmGHCVer ver = do
isSetGHC <- lift $ fmap (== Just ver) $ ghcSet (_tvTarget ver)
whenM (lift $ fmap not $ ghcInstalled ver) (throwE (NotInstalled GHC ver))
dir <- lift $ ghcupGHCDir ver
-- this isn't atomic, order matters
when isSetGHC $ do
@@ -1792,20 +1777,8 @@ rmGHCVer ver = do
handle (\(_ :: ParseError) -> pure ()) $ liftE $ rmMajorGHCSymlinks ver
-- then fix them (e.g. with an earlier version)
dir' <- lift $ ghcupGHCDir ver
let dir = fromGHCupPath dir'
lift (getInstalledFiles GHC ver) >>= \case
Just files -> do
lift $ logInfo $ "Removing files safely from: " <> T.pack dir
forM_ files (lift . recycleFile . (\f -> dir </> dropDrive f))
removeEmptyDirsRecursive dir
survivors <- liftIO $ hideErrorDef [doesNotExistErrorType] [] $ listDirectory dir
f <- recordedInstallationFile GHC ver
lift $ recycleFile f
when (not (null survivors)) $ throwE $ UninstallFailed dir survivors
Nothing -> do
lift $ logInfo $ "Removing legacy directory recursively: " <> T.pack dir
lift $ recyclePathForcibly dir'
lift $ logInfo $ "Removing directory recursively: " <> T.pack dir
lift $ recyclePathForcibly dir
v' <-
handle
@@ -1817,7 +1790,7 @@ rmGHCVer ver = do
Dirs {..} <- lift getDirs
lift $ hideError doesNotExistErrorType $ rmDirectoryLink (fromGHCupPath baseDir </> "share")
lift $ hideError doesNotExistErrorType $ rmDirectoryLink (baseDir </> "share")
-- | Delete a cabal version. Will try to fix the @cabal@ symlink
@@ -1864,38 +1837,23 @@ rmHLSVer :: ( MonadMask m
, MonadUnliftIO m
)
=> Version
-> Excepts '[NotInstalled, UninstallFailed] m ()
-> Excepts '[NotInstalled] m ()
rmHLSVer ver = do
whenM (lift $ fmap not $ hlsInstalled ver) $ throwE (NotInstalled HLS (GHCTargetVersion Nothing ver))
isHlsSet <- lift hlsSet
liftE $ rmMinorHLSSymlinks ver
hlsDir <- ghcupHLSDir ver
recyclePathForcibly hlsDir
when (Just ver == isHlsSet) $ do
-- delete all set symlinks
liftE rmPlainHLS
hlsDir' <- ghcupHLSDir ver
let hlsDir = fromGHCupPath hlsDir'
lift (getInstalledFiles HLS (mkTVer ver)) >>= \case
Just files -> do
lift $ logInfo $ "Removing files safely from: " <> T.pack hlsDir
forM_ files (lift . recycleFile . (\f -> hlsDir </> dropDrive f))
removeEmptyDirsRecursive hlsDir
survivors <- liftIO $ hideErrorDef [doesNotExistErrorType] [] $ listDirectory hlsDir
f <- recordedInstallationFile HLS (mkTVer ver)
lift $ recycleFile f
when (not (null survivors)) $ throwE $ UninstallFailed hlsDir survivors
Nothing -> do
lift $ logInfo $ "Removing legacy directory recursively: " <> T.pack hlsDir
recyclePathForcibly hlsDir'
when (Just ver == isHlsSet) $ do
rmPlainHLS
-- set latest hls
hlsVers <- lift $ fmap rights getInstalledHLSs
case headMay . reverse . sort $ hlsVers of
Just latestver -> liftE $ setHLS latestver SetHLSOnly Nothing
Just latestver -> setHLS latestver SetHLSOnly Nothing
Nothing -> pure ()
@@ -1969,7 +1927,7 @@ rmGhcup = do
tempFilepath <- mkGhcupTmpDir
hideError UnsupportedOperation $
liftIO $ hideError NoSuchThing $
moveFile ghcupFilepath (fromGHCupPath tempFilepath </> "ghcup")
moveFile ghcupFilepath (tempFilepath </> "ghcup")
else
-- delete it.
hideError doesNotExistErrorType $ rmFile ghcupFilepath
@@ -1991,15 +1949,15 @@ rmTool :: ( MonadReader env m
, MonadMask m
, MonadUnliftIO m)
=> ListResult
-> Excepts '[NotInstalled, UninstallFailed] m ()
-> Excepts '[NotInstalled ] m ()
rmTool ListResult {lVer, lTool, lCross} = do
case lTool of
GHC ->
let ghcTargetVersion = GHCTargetVersion lCross lVer
in rmGHCVer ghcTargetVersion
HLS -> rmHLSVer lVer
Cabal -> liftE $ rmCabalVer lVer
Stack -> liftE $ rmStackVer lVer
Cabal -> rmCabalVer lVer
Stack -> rmStackVer lVer
GHCup -> lift rmGhcup
@@ -2017,10 +1975,9 @@ rmGhcupDirs = do
, logsDir
, cacheDir
, recycleDir
, dbDir
} <- getDirs
let envFilePath = fromGHCupPath baseDir </> "env"
let envFilePath = baseDir </> "env"
confFilePath <- getConfigFilePath
@@ -2028,21 +1985,20 @@ rmGhcupDirs = do
handleRm $ rmConfFile confFilePath
-- for xdg dirs, the order matters here
handleRm $ rmPathForcibly logsDir
handleRm $ rmPathForcibly cacheDir
handleRm $ rmDir logsDir
handleRm $ rmDir cacheDir
handleRm $ rmBinDir binDir
handleRm $ rmPathForcibly recycleDir
handleRm $ rmPathForcibly dbDir
handleRm $ rmDir recycleDir
when isWindows $ do
logInfo $ "removing " <> T.pack (fromGHCupPath baseDir </> "msys64")
handleRm $ rmPathForcibly (baseDir `appendGHCupPath` "msys64")
logInfo $ "removing " <> T.pack (baseDir </> "msys64")
handleRm $ rmPathForcibly (baseDir </> "msys64")
handleRm $ removeEmptyDirsRecursive (fromGHCupPath baseDir)
handleRm $ removeEmptyDirsRecursive baseDir
-- report files in baseDir that are left-over after
-- the standard location deletions above
hideErrorDef [doesNotExistErrorType] [] $ reportRemainingFiles (fromGHCupPath baseDir)
hideErrorDef [doesNotExistErrorType] [] $ reportRemainingFiles baseDir
where
handleRm :: (MonadReader env m, MonadCatch m, HasLog env, MonadIO m) => m () -> m ()
@@ -2052,12 +2008,22 @@ rmGhcupDirs = do
rmEnvFile :: (HasLog env, MonadReader env m, HasDirs env, MonadMask m, MonadIO m, MonadCatch m) => FilePath -> m ()
rmEnvFile enFilePath = do
logInfo "Removing Ghcup Environment File"
hideErrorDef [permissionErrorType] () $ deleteFile' enFilePath
hideErrorDef [permissionErrorType] () $ deleteFile enFilePath
rmConfFile :: (HasLog env, MonadReader env m, HasDirs env, MonadMask m, MonadIO m, MonadCatch m) => FilePath -> m ()
rmConfFile confFilePath = do
logInfo "removing Ghcup Config File"
hideErrorDef [permissionErrorType] () $ deleteFile' confFilePath
hideErrorDef [permissionErrorType] () $ deleteFile confFilePath
rmDir :: (HasLog env, MonadReader env m, HasDirs env, MonadMask m, MonadIO m, MonadCatch m) => FilePath -> m ()
rmDir dir =
-- 'getDirectoryContentsRecursive' is lazy IO. In case
-- an error leaks through, we catch it here as well,
-- althought 'deleteFile' should already handle it.
hideErrorDef [doesNotExistErrorType] () $ do
logInfo $ "removing " <> T.pack dir
contents <- liftIO $ getDirectoryContentsRecursive dir
forM_ contents (deleteFile . (dir </>))
rmBinDir :: (MonadReader env m, HasDirs env, MonadMask m, MonadIO m, MonadCatch m) => FilePath -> m ()
rmBinDir binDir
@@ -2068,9 +2034,11 @@ rmGhcupDirs = do
then removeDirIfEmptyOrIsSymlink binDir
else pure ()
reportRemainingFiles :: (MonadMask m, MonadIO m) => FilePath -> m [FilePath]
reportRemainingFiles :: MonadIO m => FilePath -> m [FilePath]
reportRemainingFiles dir = do
remainingFiles <- liftIO $ S.toList (getDirectoryContentsRecursiveUnsafe dir)
-- force the files so the errors don't leak
(force -> !remainingFiles) <- liftIO
(getDirectoryContentsRecursive dir >>= evaluate)
let normalizedFilePaths = fmap normalise remainingFiles
let sortedByDepthRemainingFiles = sortBy (flip compareFn) normalizedFilePaths
let remainingFilesAbsolute = fmap (dir </>) sortedByDepthRemainingFiles
@@ -2084,33 +2052,35 @@ rmGhcupDirs = do
compareFn :: FilePath -> FilePath -> Ordering
compareFn fp1 fp2 = compare (calcDepth fp1) (calcDepth fp2)
-- we expect only files inside cache/log dir
-- we report remaining files/dirs later,
-- hence the force/quiet mode in these delete functions below.
removeEmptyDirsRecursive :: (MonadReader env m, HasDirs env, MonadMask m, MonadIO m, MonadCatch m) => FilePath -> m ()
removeEmptyDirsRecursive fp = do
cs <- liftIO $ listDirectory fp >>= filterM doesDirectoryExist . fmap (fp </>)
forM_ cs removeEmptyDirsRecursive
hideError InappropriateType $ removeDirIfEmptyOrIsSymlink fp
deleteFile' :: (MonadMask m, MonadIO m) => FilePath -> m ()
deleteFile' filepath = do
hideError doesNotExistErrorType
$ hideError InappropriateType $ rmFile filepath
removeDirIfEmptyOrIsSymlink :: (MonadReader env m, HasDirs env, MonadMask m, MonadIO m, MonadCatch m) => FilePath -> m ()
removeDirIfEmptyOrIsSymlink filepath =
hideError UnsatisfiedConstraints $
handleIO' InappropriateType
(handleIfSym filepath)
(liftIO $ removeEmptyDirectory filepath)
where
handleIfSym fp e = do
isSym <- liftIO $ pathIsSymbolicLink fp
if isSym
then deleteFile' fp
else liftIO $ ioError e
-- we expect only files inside cache/log dir
-- we report remaining files/dirs later,
-- hence the force/quiet mode in these delete functions below.
deleteFile :: (MonadReader env m, HasDirs env, MonadMask m, MonadIO m) => FilePath -> m ()
deleteFile filepath = do
hideError doesNotExistErrorType
$ hideError InappropriateType $ rmFile filepath
removeDirIfEmptyOrIsSymlink :: (MonadReader env m, HasDirs env, MonadMask m, MonadIO m, MonadCatch m) => FilePath -> m ()
removeDirIfEmptyOrIsSymlink filepath =
hideError UnsatisfiedConstraints $
handleIO' InappropriateType
(handleIfSym filepath)
(liftIO $ rmDirectory filepath)
where
handleIfSym fp e = do
isSym <- liftIO $ pathIsSymbolicLink fp
if isSym
then deleteFile fp
else liftIO $ ioError e
removeEmptyDirsRecursive :: (MonadReader env m, HasDirs env, MonadMask m, MonadIO m, MonadCatch m) => FilePath -> m ()
removeEmptyDirsRecursive fp = do
cs <- liftIO $ listDirectory fp >>= filterM doesDirectoryExist . fmap (fp </>)
forM_ cs removeEmptyDirsRecursive
hideError InappropriateType $ removeDirIfEmptyOrIsSymlink fp
------------------
@@ -2132,10 +2102,10 @@ getDebugInfo :: ( Alternative m
DebugInfo
getDebugInfo = do
Dirs {..} <- lift getDirs
let diBaseDir = fromGHCupPath baseDir
let diBaseDir = baseDir
let diBinDir = binDir
diGHCDir <- fromGHCupPath <$> lift ghcupGHCBaseDir
let diCacheDir = fromGHCupPath cacheDir
diGHCDir <- lift ghcupGHCBaseDir
let diCacheDir = cacheDir
diArch <- lE getArchitecture
diPlatform <- liftE getPlatform
pure $ DebugInfo { .. }
@@ -2194,7 +2164,6 @@ compileGHC :: ( MonadMask m
, ProcessError
, CopyError
, BuildFailed
, UninstallFailed
]
m
GHCTargetVersion
@@ -2216,20 +2185,20 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
-- unpack
tmpUnpack <- lift mkGhcupTmpDir
liftE $ cleanUpOnError tmpUnpack (unpackToDir (fromGHCupPath tmpUnpack) dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform $ fromGHCupPath tmpUnpack
liftE $ cleanUpOnError tmpUnpack (unpackToDir tmpUnpack dl)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform tmpUnpack
workdir <- maybe (pure tmpUnpack)
(liftE . intoSubdir tmpUnpack)
(view dlSubdir dlInfo)
liftE $ applyAnyPatch patches (fromGHCupPath workdir)
liftE $ applyAnyPatch patches workdir
pure (workdir, tmpUnpack, tver)
-- clone from git
Right GitBranch{..} -> do
tmpUnpack <- lift mkGhcupTmpDir
let git args = execLogged "git" ("--no-pager":args) (Just $ fromGHCupPath tmpUnpack) "git" Nothing
let git args = execLogged "git" ("--no-pager":args) (Just tmpUnpack) "git" Nothing
tver <- reThrowAll @_ @'[PatchFailed, ProcessError, NotFoundInPATH, DigestError, DownloadFailed, GPGError] DownloadFailed $ do
let rep = fromMaybe "https://gitlab.haskell.org/ghc/ghc.git" repo
lift $ logInfo $ "Fetching git repo " <> T.pack rep <> " at ref " <> T.pack ref <> " (this may take a while)"
@@ -2250,16 +2219,16 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
lEM $ git [ "checkout", "FETCH_HEAD" ]
lEM $ git [ "submodule", "update", "--init", "--depth", "1" ]
liftE $ applyAnyPatch patches (fromGHCupPath tmpUnpack)
lEM $ execWithGhcEnv "python3" ["./boot"] (Just $ fromGHCupPath tmpUnpack) "ghc-bootstrap"
lEM $ execWithGhcEnv "sh" ["./configure"] (Just $ fromGHCupPath tmpUnpack) "ghc-bootstrap"
liftE $ applyAnyPatch patches tmpUnpack
lEM $ execWithGhcEnv "python3" ["./boot"] (Just tmpUnpack) "ghc-bootstrap"
lEM $ execWithGhcEnv "sh" ["./configure"] (Just tmpUnpack) "ghc-bootstrap"
CapturedProcess {..} <- lift $ makeOut
["show!", "--quiet", "VALUE=ProjectVersion" ] (Just $ fromGHCupPath tmpUnpack)
["show!", "--quiet", "VALUE=ProjectVersion" ] (Just tmpUnpack)
case _exitCode of
ExitSuccess -> throwEither . MP.parse ghcProjectVersion "" . decUTF8Safe' $ _stdOut
ExitFailure c -> fail ("Could not figure out GHC project version. Exit code was: " <> show c <> ". Error was: " <> T.unpack (decUTF8Safe' _stdErr))
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpUnpack)
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform tmpUnpack
lift $ logInfo $ "Git version " <> T.pack ref <> " corresponds to GHC version " <> prettyVer tver
pure (tmpUnpack, tmpUnpack, GHCTargetVersion Nothing tver)
@@ -2286,11 +2255,12 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
(mBindist, bmk) <- liftE $ runBuildAction
tmpUnpack
Nothing
(do
b <- if hadrian
then compileHadrianBindist tver (fromGHCupPath workdir) ghcdir
else compileMakeBindist tver (fromGHCupPath workdir) ghcdir
bmk <- liftIO $ handleIO (\_ -> pure "") $ B.readFile (build_mk $ fromGHCupPath workdir)
then compileHadrianBindist tver workdir ghcdir
else compileMakeBindist tver workdir ghcdir
bmk <- liftIO $ handleIO (\_ -> pure "") $ B.readFile (build_mk workdir)
pure (b, bmk)
)
@@ -2420,7 +2390,7 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
Just bc -> liftIOException
doesNotExistErrorType
(FileDoesNotExistError bc)
(liftIO $ copyFile bc (build_mk workdir) False)
(liftIO $ copyFile bc (build_mk workdir))
Nothing ->
liftIO $ T.writeFile (build_mk workdir) (addBuildFlavourToConf defaultConf)
@@ -2485,8 +2455,9 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
<> T.unpack cDigest
<> ".tar"
<> takeExtension tar)
let tarPath = fromGHCupPath cacheDir </> tarName
copyFileE (workdir </> tar) tarPath False
let tarPath = cacheDir </> tarName
copyFileE (workdir </> tar)
tarPath
lift $ logInfo $ "Copied bindist to " <> T.pack tarPath
pure tarPath
@@ -2659,7 +2630,7 @@ upgradeGHCup mtarget force' fatal = do
(Just ghcupPVPVer) <- pure $ pvpToVersion ghcUpVer ""
when (not force' && (latestVer <= ghcupPVPVer)) $ throwE NoUpdate
dli <- liftE $ getDownloadInfo GHCup latestVer
tmp <- fromGHCupPath <$> lift withGHCupTmpDir
tmp <- lift withGHCupTmpDir
let fn = "ghcup" <> exeExt
p <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmp (Just fn) False
let destDir = takeDirectory destFile
@@ -2669,7 +2640,8 @@ upgradeGHCup mtarget force' fatal = do
lift $ logDebug $ "rm -f " <> T.pack destFile
lift $ hideError NoSuchThing $ recycleFile destFile
lift $ logDebug $ "cp " <> T.pack p <> " " <> T.pack destFile
copyFileE p destFile False
copyFileE p
destFile
lift $ chmod_755 destFile
liftIO (isInPath destFile) >>= \b -> unless b $
@@ -2753,7 +2725,7 @@ whereIsTool tool ver@GHCTargetVersion {..} = do
GHC -> do
whenM (lift $ fmap not $ ghcInstalled ver)
$ throwE (NotInstalled GHC ver)
bdir <- fromGHCupPath <$> lift (ghcupGHCDir ver)
bdir <- lift $ ghcupGHCDir ver
pure (bdir </> "bin" </> ghcBinaryName ver)
Cabal -> do
whenM (lift $ fmap not $ cabalInstalled _tvVersion)
@@ -2765,7 +2737,7 @@ whereIsTool tool ver@GHCTargetVersion {..} = do
ifM (lift $ isLegacyHLS _tvVersion)
(pure (binDir dirs </> "haskell-language-server-wrapper-" <> T.unpack (prettyVer _tvVersion) <> exeExt))
$ do
bdir <- fromGHCupPath <$> lift (ghcupHLSDir _tvVersion)
bdir <- lift $ ghcupHLSDir _tvVersion
pure (bdir </> "bin" </> "haskell-language-server-wrapper" <> exeExt)
Stack -> do
@@ -2824,7 +2796,7 @@ rmOldGHC :: ( MonadReader env m
, MonadMask m
, MonadUnliftIO m
)
=> Excepts '[NotInstalled, UninstallFailed] m ()
=> Excepts '[NotInstalled] m ()
rmOldGHC = do
GHCupInfo { _ghcupDownloads = dls } <- lift getGHCupInfo
let oldGHCs = mkTVer <$> toListOf (ix GHC % getTagged Old % to fst) dls
@@ -2851,7 +2823,6 @@ rmProfilingLibs = do
forM_ regexes $ \regex ->
forM_ ghcs $ \ghc -> do
d <- ghcupGHCDir ghc
-- TODO: audit findFilesDeep
matches <- liftIO $ handleIO (\_ -> pure []) $ findFilesDeep
d
(makeRegexOpts compExtended
@@ -2859,7 +2830,7 @@ rmProfilingLibs = do
regex
)
forM_ matches $ \m -> do
let p = fromGHCupPath d </> m
let p = d </> m
logDebug $ "rm " <> T.pack p
rmFile p
@@ -2878,8 +2849,8 @@ rmShareDir = do
ghcs <- fmap rights getInstalledGHCs
forM_ ghcs $ \ghc -> do
d <- ghcupGHCDir ghc
let p = d `appendGHCupPath` "share"
logDebug $ "rm -rf " <> T.pack (fromGHCupPath p)
let p = d </> "share"
logDebug $ "rm -rf " <> T.pack p
rmPathForcibly p
@@ -2891,7 +2862,7 @@ rmHLSNoGHC :: ( MonadReader env m
, MonadFail m
, MonadUnliftIO m
)
=> Excepts '[NotInstalled, UninstallFailed] m ()
=> Excepts '[NotInstalled] m ()
rmHLSNoGHC = do
Dirs {..} <- getDirs
ghcs <- fmap rights getInstalledGHCs
@@ -2924,9 +2895,9 @@ rmCache :: ( MonadReader env m
=> m ()
rmCache = do
Dirs {..} <- getDirs
contents <- liftIO $ listDirectory (fromGHCupPath cacheDir)
contents <- liftIO $ listDirectory cacheDir
forM_ contents $ \f -> do
let p = fromGHCupPath cacheDir </> f
let p = cacheDir </> f
logDebug $ "rm " <> T.pack p
rmFile p
@@ -2939,10 +2910,17 @@ rmTmp :: ( MonadReader env m
)
=> m ()
rmTmp = do
ghcup_dirs <- liftIO getGHCupTmpDirs
tmpdir <- liftIO getCanonicalTemporaryDirectory
ghcup_dirs <- liftIO $ handleIO (\_ -> pure []) $ findFiles
tmpdir
(makeRegexOpts compExtended
execBlank
([s|^ghcup-.*$|] :: ByteString)
)
forM_ ghcup_dirs $ \f -> do
logDebug $ "rm -rf " <> T.pack (fromGHCupPath f)
rmPathForcibly f
let p = tmpdir </> f
logDebug $ "rm -rf " <> T.pack p
rmPathForcibly p
applyAnyPatch :: ( MonadReader env m
@@ -2961,7 +2939,7 @@ applyAnyPatch :: ( MonadReader env m
applyAnyPatch Nothing _ = pure ()
applyAnyPatch (Just (Left pdir)) workdir = liftE $ applyPatches pdir workdir
applyAnyPatch (Just (Right uris)) workdir = do
tmpUnpack <- fromGHCupPath <$> lift withGHCupTmpDir
tmpUnpack <- lift withGHCupTmpDir
forM_ uris $ \uri -> do
patch <- liftE $ download uri Nothing Nothing tmpUnpack Nothing False
liftE $ applyPatch patch workdir

View File

@@ -69,6 +69,7 @@ import Prelude hiding ( abs
, writeFile
)
import Safe
import System.Directory
import System.Environment
import System.Exit
import System.FilePath
@@ -144,7 +145,7 @@ getDownloadsF = do
yamlFromCache :: (MonadReader env m, HasDirs env) => URI -> m FilePath
yamlFromCache uri = do
Dirs{..} <- getDirs
pure (fromGHCupPath cacheDir </> (T.unpack . decUTF8Safe . urlBaseName . view pathL' $ uri))
pure (cacheDir </> (T.unpack . decUTF8Safe . urlBaseName . view pathL' $ uri))
etagsFile :: FilePath -> FilePath
@@ -241,7 +242,7 @@ getBase uri = do
Settings { metaCache } <- lift getSettings
-- for local files, let's short-circuit and ignore access time
if | scheme == "file" -> liftE $ download uri' Nothing Nothing (fromGHCupPath cacheDir) Nothing True
if | scheme == "file" -> liftE $ download uri' Nothing Nothing cacheDir Nothing True
| e -> do
accessTime <- fmap utcTimeToPOSIXSeconds $ liftIO $ getAccessTime json_file
let sinceLastAccess = utcTimeToPOSIXSeconds currentTime - accessTime
@@ -580,7 +581,7 @@ downloadCached dli mfn = do
True -> downloadCached' dli mfn Nothing
False -> do
tmp <- lift withGHCupTmpDir
liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) (fromGHCupPath tmp) mfn False
liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmp mfn False
downloadCached' :: ( MonadReader env m
@@ -598,7 +599,7 @@ downloadCached' :: ( MonadReader env m
-> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
downloadCached' dli mfn mDestDir = do
Dirs { cacheDir } <- lift getDirs
let destDir = fromMaybe (fromGHCupPath cacheDir) mDestDir
let destDir = fromMaybe cacheDir mDestDir
let fn = fromMaybe ((T.unpack . decUTF8Safe) $ urlBaseName $ view (dlUri % pathL') dli) mfn
let cachfile = destDir </> fn
fileExists <- liftIO $ doesFileExist cachfile

View File

@@ -146,13 +146,6 @@ instance Pretty NotInstalled where
pPrint (NotInstalled tool ver) =
text "The version" <+> pPrint ver <+> text "of the tool" <+> pPrint tool <+> text "is not installed."
data UninstallFailed = UninstallFailed FilePath [FilePath]
deriving Show
instance Pretty UninstallFailed where
pPrint (UninstallFailed dir files) =
text "The following files survived uninstallation: " <+> pPrint files <+> text "...consider removing" <+> pPrint dir <+> text "manually."
-- | An executable was expected to be in PATH, but was not found.
data NotFoundInPATH = NotFoundInPATH FilePath
deriving Show

View File

@@ -23,7 +23,6 @@ import GHCup.Errors
import GHCup.Types
import GHCup.Types.Optics
import GHCup.Types.JSON ( )
import GHCup.Utils.Dirs
import GHCup.Utils.File
import GHCup.Utils.Logger
import GHCup.Utils.Prelude
@@ -47,6 +46,7 @@ import Prelude hiding ( abs
, writeFile
)
import System.Info
import System.Directory
import System.OsRelease
import Text.PrettyPrint.HughesPJClass ( prettyShow )
import Text.Regex.Posix

View File

@@ -26,9 +26,6 @@ module GHCup.Types
)
where
import {-# SOURCE #-} GHCup.Utils.Dirs ( fromGHCupPath )
import {-# SOURCE #-} GHCup.Utils.Dirs ( GHCupPath )
import Control.DeepSeq ( NFData, rnf )
import Data.Map.Strict ( Map )
import Data.List.NonEmpty ( NonEmpty (..) )
@@ -441,13 +438,12 @@ defaultSettings = Settings False defaultMetaCache False Never Curl False GHCupUR
instance NFData Settings
data Dirs = Dirs
{ baseDir :: GHCupPath
{ baseDir :: FilePath
, binDir :: FilePath
, cacheDir :: GHCupPath
, logsDir :: GHCupPath
, confDir :: GHCupPath
, dbDir :: GHCupPath
, recycleDir :: GHCupPath -- mainly used on windows
, cacheDir :: FilePath
, logsDir :: FilePath
, confDir :: FilePath
, recycleDir :: FilePath -- mainly used on windows
}
deriving (Show, GHC.Generic)
@@ -639,11 +635,9 @@ data InstallDir = IsolateDir FilePath
deriving (Eq, Show)
data InstallDirResolved = IsolateDirResolved FilePath
| GHCupDir GHCupPath
| GHCupBinDir FilePath
| GHCupDir FilePath
deriving (Eq, Show)
fromInstallDir :: InstallDirResolved -> FilePath
fromInstallDir (IsolateDirResolved fp) = fp
fromInstallDir (GHCupDir fp) = fromGHCupPath fp
fromInstallDir (GHCupBinDir fp) = fp
fromInstallDir (GHCupDir fp) = fp

View File

@@ -1,5 +1,4 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
@@ -72,6 +71,7 @@ import GHC.IO.Exception
import Haskus.Utils.Variant.Excepts
import Optics
import Safe
import System.Directory hiding ( findFiles )
import System.FilePath
import System.IO.Error
import Text.Regex.Posix
@@ -86,9 +86,6 @@ import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import qualified Text.Megaparsec as MP
import qualified Data.List.NonEmpty as NE
import qualified Streamly.Prelude as S
import Control.DeepSeq (force)
import GHC.IO (evaluate)
-- $setup
@@ -280,14 +277,14 @@ rmPlainHLS = do
ghcInstalled :: (MonadIO m, MonadReader env m, HasDirs env, MonadThrow m) => GHCTargetVersion -> m Bool
ghcInstalled ver = do
ghcdir <- ghcupGHCDir ver
liftIO $ doesDirectoryExist (fromGHCupPath ghcdir)
liftIO $ doesDirectoryExist ghcdir
-- | Whether the given GHC version is installed from source.
ghcSrcInstalled :: (MonadIO m, MonadReader env m, HasDirs env, MonadThrow m) => GHCTargetVersion -> m Bool
ghcSrcInstalled ver = do
ghcdir <- ghcupGHCDir ver
liftIO $ doesFileExist (fromGHCupPath ghcdir </> ghcUpSrcBuiltFile)
liftIO $ doesFileExist (ghcdir </> ghcUpSrcBuiltFile)
-- | Whether the given GHC version is set as the current.
@@ -330,7 +327,7 @@ ghcSet mtarget = do
getInstalledGHCs :: (MonadReader env m, HasDirs env, MonadIO m) => m [Either FilePath GHCTargetVersion]
getInstalledGHCs = do
ghcdir <- ghcupGHCBaseDir
fs <- liftIO $ hideErrorDef [NoSuchThing] [] $ listDirectory (fromGHCupPath ghcdir)
fs <- liftIO $ hideErrorDef [NoSuchThing] [] $ listDirectory ghcdir
forM fs $ \f -> case parseGHCupGHCDir f of
Right r -> pure $ Right r
Left _ -> pure $ Left f
@@ -433,7 +430,7 @@ getInstalledHLSs = do
Nothing -> pure $ Left f
hlsdir <- ghcupHLSBaseDir
fs <- liftIO $ hideErrorDef [NoSuchThing] [] $ listDirectory (fromGHCupPath hlsdir)
fs <- liftIO $ hideErrorDef [NoSuchThing] [] $ listDirectory hlsdir
new <- forM fs $ \f -> case parseGHCupHLSDir f of
Right r -> pure $ Right r
Left _ -> pure $ Left f
@@ -518,7 +515,7 @@ hlsInstalled ver = do
isLegacyHLS :: (MonadIO m, MonadReader env m, HasDirs env, MonadCatch m) => Version -> m Bool
isLegacyHLS ver = do
bdir <- ghcupHLSDir ver
not <$> liftIO (doesDirectoryExist $ fromGHCupPath bdir)
not <$> liftIO (doesDirectoryExist bdir)
-- Return the currently set hls version, if any.
@@ -619,7 +616,7 @@ hlsInternalServerScripts :: (MonadReader env m, HasDirs env, MonadIO m, MonadThr
-> m [FilePath]
hlsInternalServerScripts ver mghcVer = do
dir <- ghcupHLSDir ver
let bdir = fromGHCupPath dir </> "bin"
let bdir = dir </> "bin"
fmap (bdir </>) . filter (\f -> maybe True (\gv -> ("-" <> T.unpack (prettyVer gv)) `isSuffixOf` f) mghcVer)
<$> liftIO (listDirectory bdir)
@@ -630,7 +627,7 @@ hlsInternalServerBinaries :: (MonadReader env m, HasDirs env, MonadIO m, MonadTh
-> Maybe Version -- ^ optional GHC version
-> m [FilePath]
hlsInternalServerBinaries ver mghcVer = do
dir <- fromGHCupPath <$> ghcupHLSDir ver
dir <- ghcupHLSDir ver
let regex = makeRegexOpts compExtended execBlank ([s|^haskell-language-server-.*$|] :: ByteString)
(Just bdir) <- fmap headMay $ liftIO $ expandFilePath [Left (dir </> "lib"), Right regex, Left "bin"]
fmap (bdir </>) . filter (\f -> maybe True (\gv -> ("-" <> T.unpack (prettyVer gv)) `isSuffixOf` f) mghcVer)
@@ -644,7 +641,7 @@ hlsInternalServerLibs :: (MonadReader env m, HasDirs env, MonadIO m, MonadThrow
-> Version -- ^ GHC version
-> m [FilePath]
hlsInternalServerLibs ver ghcVer = do
dir <- fromGHCupPath <$> ghcupHLSDir ver
dir <- ghcupHLSDir ver
let regex = makeRegexOpts compExtended execBlank ([s|^haskell-language-server-.*$|] :: ByteString)
(Just bdir) <- fmap headMay $ liftIO $ expandFilePath [Left (dir </> "lib"), Right regex, Left ("lib" </> T.unpack (prettyVer ghcVer))]
fmap (bdir </>) <$> liftIO (listDirectory bdir)
@@ -848,21 +845,21 @@ getArchiveFiles av = do
intoSubdir :: (MonadReader env m, HasLog env, MonadIO m, MonadThrow m, MonadCatch m)
=> GHCupPath -- ^ unpacked tar dir
=> FilePath -- ^ unpacked tar dir
-> TarDir -- ^ how to descend
-> Excepts '[TarDirDoesNotExist] m GHCupPath
-> Excepts '[TarDirDoesNotExist] m FilePath
intoSubdir bdir tardir = case tardir of
RealDir pr -> do
whenM (fmap not . liftIO . doesDirectoryExist $ fromGHCupPath (bdir `appendGHCupPath` pr))
whenM (fmap not . liftIO . doesDirectoryExist $ (bdir </> pr))
(throwE $ TarDirDoesNotExist tardir)
pure (bdir `appendGHCupPath` pr)
pure (bdir </> pr)
RegexDir r -> do
let rs = split (`elem` pathSeparators) r
foldlM
(\y x ->
(handleIO (\_ -> pure []) . liftIO . findFiles (fromGHCupPath y) . regex $ x) >>= (\case
(handleIO (\_ -> pure []) . liftIO . findFiles y . regex $ x) >>= (\case
[] -> throwE $ TarDirDoesNotExist tardir
(p : _) -> pure (y `appendGHCupPath` p)) . sort
(p : _) -> pure (y </> p)) . sort
)
bdir
rs
@@ -908,7 +905,7 @@ ghcInternalBinDir :: (MonadReader env m, HasDirs env, MonadThrow m, MonadFail m,
=> GHCTargetVersion
-> m FilePath
ghcInternalBinDir ver = do
ghcdir <- fromGHCupPath <$> ghcupGHCDir ver
ghcdir <- ghcupGHCDir ver
pure (ghcdir </> "bin")
@@ -1032,8 +1029,6 @@ darwinNotarization Darwin path = exec
darwinNotarization _ _ = pure $ Right ()
getChangeLog :: GHCupDownloads -> Tool -> Either Version Tag -> Maybe URI
getChangeLog dls tool (Left v') =
preview (ix tool % ix v' % viChangeLog % _Just) dls
@@ -1044,6 +1039,7 @@ getChangeLog dls tool (Right tag) =
-- | Execute a build action while potentially cleaning up:
--
-- 1. the build directory, depending on the KeepDirs setting
-- 2. the install destination, depending on whether the build failed
runBuildAction :: ( MonadReader env m
, HasDirs env
, HasSettings env
@@ -1054,12 +1050,15 @@ runBuildAction :: ( MonadReader env m
, MonadFail m
, MonadCatch m
)
=> GHCupPath -- ^ build directory (cleaned up depending on Settings)
=> FilePath -- ^ build directory (cleaned up depending on Settings)
-> Maybe FilePath -- ^ dir to *always* clean up on exception
-> Excepts e m a
-> Excepts e m a
runBuildAction bdir action = do
runBuildAction bdir instdir action = do
Settings {..} <- lift getSettings
let exAction = do
forM_ instdir $ \dir ->
hideError doesNotExistErrorType $ recyclePathForcibly dir
when (keepDirs == Never)
$ rmBDir bdir
v <-
@@ -1081,7 +1080,7 @@ cleanUpOnError :: ( MonadReader env m
, MonadFail m
, MonadCatch m
)
=> GHCupPath -- ^ build directory (cleaned up depending on Settings)
=> FilePath -- ^ build directory (cleaned up depending on Settings)
-> Excepts e m a
-> Excepts e m a
cleanUpOnError bdir action = do
@@ -1090,33 +1089,13 @@ cleanUpOnError bdir action = do
flip onException (lift exAction) $ onE_ exAction action
-- | Clean up the given directory if the action fails,
-- depending on the Settings.
cleanFinally :: ( MonadReader env m
, HasDirs env
, HasSettings env
, MonadIO m
, MonadMask m
, HasLog env
, MonadUnliftIO m
, MonadFail m
, MonadCatch m
)
=> GHCupPath -- ^ build directory (cleaned up depending on Settings)
-> Excepts e m a
-> Excepts e m a
cleanFinally bdir action = do
Settings {..} <- lift getSettings
let exAction = when (keepDirs == Never) $ rmBDir bdir
flip finally (lift exAction) $ onE_ exAction action
-- | Remove a build directory, ignoring if it doesn't exist and gracefully
-- printing other errors without crashing.
rmBDir :: (MonadReader env m, HasLog env, MonadUnliftIO m, MonadIO m) => GHCupPath -> m ()
rmBDir :: (MonadReader env m, HasLog env, MonadUnliftIO m, MonadIO m) => FilePath -> m ()
rmBDir dir = withRunInIO (\run -> run $
liftIO $ handleIO (\e -> run $ logWarn $
"Couldn't remove build dir " <> T.pack (fromGHCupPath dir) <> ", error was: " <> T.pack (displayException e))
"Couldn't remove build dir " <> T.pack dir <> ", error was: " <> T.pack (displayException e))
$ hideError doesNotExistErrorType
$ rmPathForcibly dir)
@@ -1202,7 +1181,7 @@ createLink :: ( MonadMask m
createLink link exe
| isWindows = do
dirs <- getDirs
let shimGen = fromGHCupPath (cacheDir dirs) </> "gs.exe"
let shimGen = cacheDir dirs </> "gs.exe"
let shim = dropExtension exe <.> "shim"
-- For hardlinks, link needs to be absolute.
@@ -1215,7 +1194,7 @@ createLink link exe
rmLink exe
logDebug $ "ln -s " <> T.pack fullLink <> " " <> T.pack exe
liftIO $ copyFile shimGen exe False
liftIO $ copyFile shimGen exe
liftIO $ writeFile shim shimContents
| otherwise = do
logDebug $ "rm -f " <> T.pack exe
@@ -1246,8 +1225,8 @@ ensureGlobalTools
let dl = downloadCached' shimDownload (Just "gs.exe") Nothing
void $ (\DigestError{} -> do
lift $ logWarn "Digest doesn't match, redownloading gs.exe..."
lift $ logDebug ("rm -f " <> T.pack (fromGHCupPath (cacheDir dirs) </> "gs.exe"))
lift $ hideError doesNotExistErrorType $ recycleFile (fromGHCupPath (cacheDir dirs) </> "gs.exe")
lift $ logDebug ("rm -f " <> T.pack (cacheDir dirs </> "gs.exe"))
lift $ hideError doesNotExistErrorType $ recycleFile (cacheDir dirs </> "gs.exe")
liftE @'[GPGError, DigestError , DownloadFailed] $ dl
) `catchE` liftE @'[GPGError, DigestError , DownloadFailed] dl
| otherwise = pure ()
@@ -1255,15 +1234,14 @@ ensureGlobalTools
-- | Ensure ghcup directory structure exists.
ensureDirectories :: Dirs -> IO ()
ensureDirectories (Dirs baseDir binDir cacheDir logsDir confDir trashDir dbDir) = do
createDirRecursive' (fromGHCupPath baseDir)
createDirRecursive' (fromGHCupPath baseDir </> "ghc")
ensureDirectories (Dirs baseDir binDir cacheDir logsDir confDir trashDir) = do
createDirRecursive' baseDir
createDirRecursive' (baseDir </> "ghc")
createDirRecursive' binDir
createDirRecursive' (fromGHCupPath cacheDir)
createDirRecursive' (fromGHCupPath logsDir)
createDirRecursive' (fromGHCupPath confDir)
createDirRecursive' (fromGHCupPath trashDir)
createDirRecursive' (fromGHCupPath dbDir)
createDirRecursive' cacheDir
createDirRecursive' logsDir
createDirRecursive' confDir
createDirRecursive' trashDir
pure ()
@@ -1286,31 +1264,11 @@ ghcBinaryName (GHCTargetVersion Nothing _) = T.unpack ("ghc" <> T.pack exeExt)
-- 3. if it exists and is non-empty -> panic and leave the house
installDestSanityCheck :: ( MonadIO m
, MonadCatch m
, MonadMask m
) =>
InstallDirResolved ->
Excepts '[DirNotEmpty] m ()
installDestSanityCheck (IsolateDirResolved isoDir) = do
hideErrorDef [doesNotExistErrorType] () $ do
empty' <- liftIO $ S.null $ getDirectoryContentsRecursiveUnsafe isoDir
when (not empty') (throwE $ DirNotEmpty isoDir)
contents <- liftIO $ getDirectoryContentsRecursive isoDir
unless (null contents) (throwE $ DirNotEmpty isoDir)
installDestSanityCheck _ = pure ()
-- | Returns 'Nothing' for legacy installs.
getInstalledFiles :: ( MonadIO m
, MonadCatch m
, MonadReader env m
, HasDirs env
, MonadFail m
)
=> Tool
-> GHCTargetVersion
-> m (Maybe [FilePath])
getInstalledFiles t v' = hideErrorDef [doesNotExistErrorType] Nothing $ do
f <- recordedInstallationFile t v'
(force -> !c) <- liftIO
(readFile f >>= evaluate)
pure (Just $ lines c)

View File

@@ -3,7 +3,6 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE QuasiQuotes #-}
{-|
Module : GHCup.Utils.Dirs
@@ -31,74 +30,6 @@ module GHCup.Utils.Dirs
, getConfigFilePath
, useXDG
, cleanupTrash
, GHCupPath
, appendGHCupPath
, fromGHCupPath
, createTempGHCupDirectory
, getGHCupTmpDirs
, removeDirectory
, removeDirectoryRecursive
, removePathForcibly
-- System.Directory re-exports
, createDirectory
, createDirectoryIfMissing
, renameDirectory
, listDirectory
, getDirectoryContents
, getCurrentDirectory
, setCurrentDirectory
, withCurrentDirectory
, getHomeDirectory
, XdgDirectory(..)
, getXdgDirectory
, XdgDirectoryList(..)
, getXdgDirectoryList
, getAppUserDataDirectory
, getUserDocumentsDirectory
, getTemporaryDirectory
, removeFile
, renameFile
, renamePath
, getFileSize
, canonicalizePath
, makeAbsolute
, makeRelativeToCurrentDirectory
, doesPathExist
, doesFileExist
, doesDirectoryExist
, findExecutable
, findExecutables
, findExecutablesInDirectories
, findFile
, findFileWith
, findFilesWith
, exeExtension
, createFileLink
, createDirectoryLink
, removeDirectoryLink
, pathIsSymbolicLink
, getSymbolicLinkTarget
, Permissions
, emptyPermissions
, readable
, writable
, executable
, searchable
, setOwnerReadable
, setOwnerWritable
, setOwnerExecutable
, setOwnerSearchable
, getPermissions
, setPermissions
, copyPermissions
, getAccessTime
, getModificationTime
, setAccessTime
, setModificationTime
, isSymbolicLink
)
where
@@ -110,36 +41,23 @@ import GHCup.Types.Optics
import GHCup.Utils.MegaParsec
import GHCup.Utils.Logger
import GHCup.Utils.Prelude
import GHCup.Utils.File.Common
import GHCup.Utils.String.QQ
import Control.DeepSeq (NFData, rnf)
import Control.Exception.Safe
import Control.Monad
import Control.Monad.IO.Unlift
import Control.Monad.Reader
import Control.Monad.Trans.Resource hiding (throwM)
import Data.List
import Data.ByteString ( ByteString )
import Data.Bifunctor
import Data.Maybe
import Data.Versions
import GHC.IO.Exception ( IOErrorType(NoSuchThing) )
import Haskus.Utils.Variant.Excepts
import Optics
import Safe
import System.Directory hiding ( removeDirectory
, removeDirectoryRecursive
, removePathForcibly
, findFiles
)
import qualified System.Directory as SD
import System.DiskSpace
import System.Directory
import System.DiskSpace
import System.Environment
import System.FilePath
import System.IO.Temp
import Text.Regex.Posix
import qualified Data.ByteString as BS
import qualified Data.Text as T
@@ -149,41 +67,6 @@ import Control.Concurrent (threadDelay)
---------------------------
--[ GHCupPath utilities ]--
---------------------------
-- | A 'GHCupPath' is a safe sub-path that can be recursively deleted.
--
-- The constructor is not exported.
newtype GHCupPath = GHCupPath FilePath
deriving (Show, Eq, Ord)
instance NFData GHCupPath where
rnf (GHCupPath fp) = rnf fp
appendGHCupPath :: GHCupPath -> FilePath -> GHCupPath
appendGHCupPath (GHCupPath gp) fp = GHCupPath (gp </> fp)
fromGHCupPath :: GHCupPath -> FilePath
fromGHCupPath (GHCupPath gp) = gp
createTempGHCupDirectory :: GHCupPath -> FilePath -> IO GHCupPath
createTempGHCupDirectory (GHCupPath gp) d = GHCupPath <$> createTempDirectory gp d
getGHCupTmpDirs :: IO [GHCupPath]
getGHCupTmpDirs = do
tmpdir <- getCanonicalTemporaryDirectory
ghcup_dirs <- handleIO (\_ -> pure []) $ findFiles
tmpdir
(makeRegexOpts compExtended
execBlank
([s|^ghcup-.*$|] :: ByteString)
)
pure (fmap (\p -> GHCupPath (tmpdir </> p)) $ filter (maybe False ("ghcup-" `isPrefixOf`) . lastMay . splitPath) ghcup_dirs)
------------------------------
--[ GHCup base directories ]--
------------------------------
@@ -193,11 +76,11 @@ getGHCupTmpDirs = do
--
-- If 'GHCUP_USE_XDG_DIRS' is set (to anything),
-- then uses 'XDG_DATA_HOME/ghcup' as per xdg spec.
ghcupBaseDir :: IO GHCupPath
ghcupBaseDir :: IO FilePath
ghcupBaseDir
| isWindows = do
bdir <- fromMaybe "C:\\" <$> lookupEnv "GHCUP_INSTALL_BASE_PREFIX"
pure (GHCupPath (bdir </> "ghcup"))
pure (bdir </> "ghcup")
| otherwise = do
xdg <- useXDG
if xdg
@@ -207,19 +90,19 @@ ghcupBaseDir
Nothing -> do
home <- liftIO getHomeDirectory
pure (home </> ".local" </> "share")
pure (GHCupPath (bdir </> "ghcup"))
pure (bdir </> "ghcup")
else do
bdir <- lookupEnv "GHCUP_INSTALL_BASE_PREFIX" >>= \case
Just r -> pure r
Nothing -> liftIO getHomeDirectory
pure (GHCupPath (bdir </> ".ghcup"))
pure (bdir </> ".ghcup")
-- | ~/.ghcup by default
--
-- If 'GHCUP_USE_XDG_DIRS' is set (to anything),
-- then uses 'XDG_CONFIG_HOME/ghcup' as per xdg spec.
ghcupConfigDir :: IO GHCupPath
ghcupConfigDir :: IO FilePath
ghcupConfigDir
| isWindows = ghcupBaseDir
| otherwise = do
@@ -231,12 +114,12 @@ ghcupConfigDir
Nothing -> do
home <- liftIO getHomeDirectory
pure (home </> ".config")
pure (GHCupPath (bdir </> "ghcup"))
pure (bdir </> "ghcup")
else do
bdir <- lookupEnv "GHCUP_INSTALL_BASE_PREFIX" >>= \case
Just r -> pure r
Nothing -> liftIO getHomeDirectory
pure (GHCupPath (bdir </> ".ghcup"))
pure (bdir </> ".ghcup")
-- | If 'GHCUP_USE_XDG_DIRS' is set (to anything),
@@ -244,7 +127,7 @@ ghcupConfigDir
-- (which, sadly is not strictly xdg spec).
ghcupBinDir :: IO FilePath
ghcupBinDir
| isWindows = (fromGHCupPath <$> ghcupBaseDir) <&> (</> "bin")
| isWindows = ghcupBaseDir <&> (</> "bin")
| otherwise = do
xdg <- useXDG
if xdg
@@ -254,16 +137,16 @@ ghcupBinDir
Nothing -> do
home <- liftIO getHomeDirectory
pure (home </> ".local" </> "bin")
else (fromGHCupPath <$> ghcupBaseDir) <&> (</> "bin")
else ghcupBaseDir <&> (</> "bin")
-- | Defaults to '~/.ghcup/cache'.
--
-- If 'GHCUP_USE_XDG_DIRS' is set (to anything),
-- then uses 'XDG_CACHE_HOME/ghcup' as per xdg spec.
ghcupCacheDir :: IO GHCupPath
ghcupCacheDir :: IO FilePath
ghcupCacheDir
| isWindows = ghcupBaseDir <&> (\(GHCupPath gp) -> GHCupPath (gp </> "cache"))
| isWindows = ghcupBaseDir <&> (</> "cache")
| otherwise = do
xdg <- useXDG
if xdg
@@ -273,17 +156,17 @@ ghcupCacheDir
Nothing -> do
home <- liftIO getHomeDirectory
pure (home </> ".cache")
pure (GHCupPath (bdir </> "ghcup"))
else ghcupBaseDir <&> (\(GHCupPath gp) -> GHCupPath (gp </> "cache"))
pure (bdir </> "ghcup")
else ghcupBaseDir <&> (</> "cache")
-- | Defaults to '~/.ghcup/logs'.
--
-- If 'GHCUP_USE_XDG_DIRS' is set (to anything),
-- then uses 'XDG_CACHE_HOME/ghcup/logs' as per xdg spec.
ghcupLogsDir :: IO GHCupPath
ghcupLogsDir :: IO FilePath
ghcupLogsDir
| isWindows = ghcupBaseDir <&> (\(GHCupPath gp) -> GHCupPath (gp </> "logs"))
| isWindows = ghcupBaseDir <&> (</> "logs")
| otherwise = do
xdg <- useXDG
if xdg
@@ -293,34 +176,14 @@ ghcupLogsDir
Nothing -> do
home <- liftIO getHomeDirectory
pure (home </> ".cache")
pure (GHCupPath (bdir </> "ghcup" </> "logs"))
else ghcupBaseDir <&> (\(GHCupPath gp) -> GHCupPath (gp </> "logs"))
-- | Defaults to '~/.ghcup/db.
--
-- If 'GHCUP_USE_XDG_DIRS' is set (to anything),
-- then uses 'XDG_CACHE_HOME/ghcup/db as per xdg spec.
ghcupDbDir :: IO GHCupPath
ghcupDbDir
| isWindows = ghcupBaseDir <&> (\(GHCupPath gp) -> GHCupPath (gp </> "db"))
| otherwise = do
xdg <- useXDG
if xdg
then do
bdir <- lookupEnv "XDG_CACHE_HOME" >>= \case
Just r -> pure r
Nothing -> do
home <- liftIO getHomeDirectory
pure (home </> ".cache")
pure (GHCupPath (bdir </> "ghcup" </> "db"))
else ghcupBaseDir <&> (\(GHCupPath gp) -> GHCupPath (gp </> "db"))
pure (bdir </> "ghcup" </> "logs")
else ghcupBaseDir <&> (</> "logs")
-- | '~/.ghcup/trash'.
-- Mainly used on windows to improve file removal operations
ghcupRecycleDir :: IO GHCupPath
ghcupRecycleDir = ghcupBaseDir <&> (\(GHCupPath gp) -> GHCupPath (gp </> "trash"))
ghcupRecycleDir :: IO FilePath
ghcupRecycleDir = ghcupBaseDir <&> (</> "trash")
@@ -332,7 +195,6 @@ getAllDirs = do
logsDir <- ghcupLogsDir
confDir <- ghcupConfigDir
recycleDir <- ghcupRecycleDir
dbDir <- ghcupDbDir
pure Dirs { .. }
@@ -344,7 +206,7 @@ getAllDirs = do
getConfigFilePath :: (MonadIO m) => m FilePath
getConfigFilePath = do
confDir <- liftIO ghcupConfigDir
pure $ fromGHCupPath confDir </> "config.yaml"
pure $ confDir </> "config.yaml"
ghcupConfigFile :: (MonadIO m)
=> Excepts '[JSONError] m UserSettings
@@ -362,10 +224,10 @@ ghcupConfigFile = do
-- | ~/.ghcup/ghc by default.
ghcupGHCBaseDir :: (MonadReader env m, HasDirs env) => m GHCupPath
ghcupGHCBaseDir :: (MonadReader env m, HasDirs env) => m FilePath
ghcupGHCBaseDir = do
Dirs {..} <- getDirs
pure (baseDir `appendGHCupPath` "ghc")
pure (baseDir </> "ghc")
-- | Gets '~/.ghcup/ghc/<ghcupGHCDir>'.
@@ -374,11 +236,11 @@ ghcupGHCBaseDir = do
-- * 8.8.4
ghcupGHCDir :: (MonadReader env m, HasDirs env, MonadThrow m)
=> GHCTargetVersion
-> m GHCupPath
-> m FilePath
ghcupGHCDir ver = do
ghcbasedir <- ghcupGHCBaseDir
let verdir = T.unpack $ tVerToText ver
pure (ghcbasedir `appendGHCupPath` verdir)
pure (ghcbasedir </> verdir)
-- | See 'ghcupToolParser'.
@@ -391,19 +253,19 @@ parseGHCupHLSDir (T.pack -> fp) =
throwEither $ MP.parse version' "" fp
-- | ~/.ghcup/hls by default, for new-style installs.
ghcupHLSBaseDir :: (MonadReader env m, HasDirs env) => m GHCupPath
ghcupHLSBaseDir :: (MonadReader env m, HasDirs env) => m FilePath
ghcupHLSBaseDir = do
Dirs {..} <- getDirs
pure (baseDir `appendGHCupPath` "hls")
pure (baseDir </> "hls")
-- | Gets '~/.ghcup/hls/<hls-ver>' for new-style installs.
ghcupHLSDir :: (MonadReader env m, HasDirs env, MonadThrow m)
=> Version
-> m GHCupPath
-> m FilePath
ghcupHLSDir ver = do
basedir <- ghcupHLSBaseDir
let verdir = T.unpack $ prettyVer ver
pure (basedir `appendGHCupPath` verdir)
pure (basedir </> verdir)
mkGhcupTmpDir :: ( MonadReader env m
, HasDirs env
@@ -413,8 +275,8 @@ mkGhcupTmpDir :: ( MonadReader env m
, MonadThrow m
, MonadMask m
, MonadIO m)
=> m GHCupPath
mkGhcupTmpDir = GHCupPath <$> do
=> m FilePath
mkGhcupTmpDir = do
tmpdir <- liftIO getCanonicalTemporaryDirectory
let minSpace = 5000 -- a rough guess, aight?
@@ -450,14 +312,14 @@ withGHCupTmpDir :: ( MonadReader env m
, MonadThrow m
, MonadMask m
, MonadIO m)
=> m GHCupPath
=> m FilePath
withGHCupTmpDir = snd <$> withRunInIO (\run ->
run
$ allocate
(run mkGhcupTmpDir)
(\fp ->
handleIO (\e -> run
$ logDebug ("Resource cleanup failed for " <> T.pack (fromGHCupPath fp) <> ", error was: " <> T.pack (displayException e)))
$ logDebug ("Resource cleanup failed for " <> T.pack fp <> ", error was: " <> T.pack (displayException e)))
. rmPathForcibly
$ fp))
@@ -498,27 +360,12 @@ cleanupTrash :: ( MonadIO m
=> m ()
cleanupTrash = do
Dirs { recycleDir } <- getDirs
contents <- liftIO $ listDirectory (fromGHCupPath recycleDir)
contents <- liftIO $ listDirectory recycleDir
if null contents
then pure ()
else do
logWarn ("Removing leftover files in " <> T.pack (fromGHCupPath recycleDir))
logWarn ("Removing leftover files in " <> T.pack recycleDir)
forM_ contents (\fp -> handleIO (\e ->
logDebug ("Resource cleanup failed for " <> T.pack fp <> ", error was: " <> T.pack (displayException e))
) $ liftIO $ removePathForcibly (recycleDir `appendGHCupPath` fp))
-- System.Directory re-exports with GHCupPath
removeDirectory :: GHCupPath -> IO ()
removeDirectory (GHCupPath fp) = SD.removeDirectory fp
removeDirectoryRecursive :: GHCupPath -> IO ()
removeDirectoryRecursive (GHCupPath fp) = SD.removeDirectoryRecursive fp
removePathForcibly :: GHCupPath -> IO ()
removePathForcibly (GHCupPath fp) = SD.removePathForcibly fp
) $ liftIO $ removePathForcibly (recycleDir </> fp))

View File

@@ -1,37 +0,0 @@
module GHCup.Utils.Dirs
( GHCupPath
, appendGHCupPath
, fromGHCupPath
, createTempGHCupDirectory
, removeDirectory
, removeDirectoryRecursive
, removePathForcibly
)
where
import Control.DeepSeq (NFData)
-- | A 'GHCupPath' is a safe sub-path that can be recursively deleted.
newtype GHCupPath = GHCupPath FilePath
instance Show GHCupPath where
instance Eq GHCupPath where
instance Ord GHCupPath where
instance NFData GHCupPath where
appendGHCupPath :: GHCupPath -> FilePath -> GHCupPath
fromGHCupPath :: GHCupPath -> FilePath
createTempGHCupDirectory :: GHCupPath -> FilePath -> IO GHCupPath
removeDirectory :: GHCupPath -> IO ()
removeDirectoryRecursive :: GHCupPath -> IO ()
removePathForcibly :: GHCupPath -> IO ()

View File

@@ -1,160 +1,17 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module GHCup.Utils.File (
mergeFileTree,
copyFileE,
findFilesDeep,
getDirectoryContentsRecursive,
getDirectoryContentsRecursiveBFS,
getDirectoryContentsRecursiveDFS,
getDirectoryContentsRecursiveUnsafe,
getDirectoryContentsRecursiveBFSUnsafe,
getDirectoryContentsRecursiveDFSUnsafe,
recordedInstallationFile,
module GHCup.Utils.File.Common,
executeOut,
execLogged,
exec,
toProcessError,
chmod_755,
isBrokenSymlink,
copyFile,
deleteFile,
install,
removeEmptyDirectory,
#if IS_WINDOWS
module GHCup.Utils.File.Windows
#else
module GHCup.Utils.File.Posix
#endif
) where
import GHCup.Utils.Dirs
import GHCup.Utils.File.Common
#if IS_WINDOWS
import GHCup.Utils.File.Windows
#else
import GHCup.Utils.File.Posix
#endif
import GHCup.Errors
import GHCup.Types
import GHCup.Types.Optics
import GHCup.Utils.Prelude
import Text.Regex.Posix
import Control.Exception.Safe
import Haskus.Utils.Variant.Excepts
import Control.Monad.Reader
import System.FilePath
import Text.PrettyPrint.HughesPJClass (prettyShow)
import qualified Data.Text as T
import qualified Streamly.Prelude as S
mergeFileTree :: (MonadMask m, S.MonadAsync m, MonadReader env m, HasDirs env)
=> GHCupPath -- ^ source base directory from which to install findFiles
-> InstallDirResolved -- ^ destination base dir
-> Tool
-> GHCTargetVersion
-> (FilePath -> FilePath -> m ()) -- ^ file copy operation
-> m ()
mergeFileTree sourceBase destBase tool v' copyOp = do
-- These checks are not atomic, but we perform them to have
-- the opportunity to abort before copying has started.
--
-- The actual copying might still fail.
liftIO $ baseCheck (fromGHCupPath sourceBase)
liftIO $ destCheck (fromInstallDir destBase)
recFile <- recordedInstallationFile tool v'
case destBase of
IsolateDirResolved _ -> pure ()
_ -> do
whenM (liftIO $ doesFileExist recFile) $ throwIO $ userError ("mergeFileTree: DB file " <> recFile <> " already exists!")
liftIO $ createDirectoryIfMissing True (takeDirectory recFile)
flip S.mapM_ (getDirectoryContentsRecursive sourceBase) $ \f -> do
copy f
recordInstalledFile f recFile
pure f
where
recordInstalledFile f recFile = do
case destBase of
IsolateDirResolved _ -> pure ()
_ -> liftIO $ appendFile recFile (f <> "\n")
copy source = do
let dest = fromInstallDir destBase </> source
src = fromGHCupPath sourceBase </> source
when (isAbsolute source)
$ throwIO $ userError ("mergeFileTree: source file " <> source <> " is not relative!")
liftIO . createDirectoryIfMissing True . takeDirectory $ dest
copyOp src dest
baseCheck src = do
when (isRelative src)
$ throwIO $ userError ("mergeFileTree: source base directory " <> src <> " is not absolute!")
whenM (not <$> doesDirectoryExist src)
$ throwIO $ userError ("mergeFileTree: source base directory " <> src <> " does not exist!")
destCheck dest = do
when (isRelative dest)
$ throwIO $ userError ("mergeFileTree: destination base directory " <> dest <> " is not absolute!")
copyFileE :: (CopyError :< xs, MonadCatch m, MonadIO m) => FilePath -> FilePath -> Bool -> Excepts xs m ()
copyFileE from to = handleIO (throwE . CopyError . show) . liftIO . copyFile from to
-- | List all the files in a directory and all subdirectories.
--
-- The order places files in sub-directories after all the files in their
-- parent directories. The list is generated lazily so is not well defined if
-- the source directory structure changes before the list is used.
--
-- depth first
getDirectoryContentsRecursiveDFS :: (MonadCatch m, S.MonadAsync m, MonadMask m)
=> GHCupPath
-> S.SerialT m FilePath
getDirectoryContentsRecursiveDFS (fromGHCupPath -> fp) = getDirectoryContentsRecursiveDFSUnsafe fp
-- breadth first
getDirectoryContentsRecursiveBFS :: (MonadCatch m, S.MonadAsync m, MonadMask m)
=> GHCupPath
-> S.SerialT m FilePath
getDirectoryContentsRecursiveBFS (fromGHCupPath -> fp) = getDirectoryContentsRecursiveBFSUnsafe fp
getDirectoryContentsRecursive :: (MonadCatch m, S.MonadAsync m, MonadMask m)
=> GHCupPath
-> S.SerialT m FilePath
getDirectoryContentsRecursive = getDirectoryContentsRecursiveBFS
getDirectoryContentsRecursiveUnsafe :: (MonadCatch m, S.MonadAsync m, MonadMask m)
=> FilePath
-> S.SerialT m FilePath
getDirectoryContentsRecursiveUnsafe = getDirectoryContentsRecursiveBFSUnsafe
findFilesDeep :: GHCupPath -> Regex -> IO [FilePath]
findFilesDeep path regex =
S.toList $ S.filter (match regex) $ getDirectoryContentsRecursive path
recordedInstallationFile :: ( MonadReader env m
, HasDirs env
)
=> Tool
-> GHCTargetVersion
-> m FilePath
recordedInstallationFile t v' = do
Dirs {..} <- getDirs
pure (fromGHCupPath dbDir </> prettyShow t </> T.unpack (tVerToText v'))

View File

@@ -1,4 +1,3 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
@@ -16,16 +15,14 @@ import Data.Maybe
import Data.Text ( Text )
import Data.Void
import GHC.IO.Exception
import System.Directory hiding ( removeDirectory
, removeDirectoryRecursive
, removePathForcibly
, findFiles
)
import Optics hiding ((<|), (|>))
import System.Directory hiding (findFiles)
import System.FilePath
import Text.PrettyPrint.HughesPJClass hiding ( (<>) )
import Text.Regex.Posix
import qualified Data.Text as T
import qualified Data.ByteString.Lazy as BL
import qualified Text.Megaparsec as MP
@@ -99,6 +96,10 @@ findFiles path regex = do
contents <- listDirectory path
pure $ filter (match regex) contents
findFilesDeep :: FilePath -> Regex -> IO [FilePath]
findFilesDeep path regex = do
contents <- getDirectoryContentsRecursive path
pure $ filter (match regex) contents
findFiles' :: FilePath -> MP.Parsec Void Text a -> IO [FilePath]
findFiles' path parser = do
@@ -108,5 +109,3 @@ findFiles' path parser = do
checkFileAlreadyExists :: (MonadIO m) => FilePath -> m Bool
checkFileAlreadyExists fp = liftIO $ doesFileExist fp

View File

@@ -1,7 +1,5 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE MultiWayIf #-}
{-|
Module : GHCup.Utils.File.Posix
@@ -17,17 +15,15 @@ Some of these functions use sophisticated logging.
-}
module GHCup.Utils.File.Posix where
import GHCup.Utils.Dirs
import GHCup.Utils.File.Common
import GHCup.Utils.Prelude
import GHCup.Utils.Logger
import GHCup.Types
import GHCup.Types.Optics
import GHCup.Utils.File.Posix.Traversals
import Control.Concurrent
import Control.Concurrent.Async
import qualified Control.Exception as E
import Control.Exception ( evaluate )
import Control.Exception.Safe
import Control.Monad
import Control.Monad.Reader
@@ -38,15 +34,12 @@ import Data.IORef
import Data.Sequence ( Seq, (|>) )
import Data.List
import Data.Word8
import Foreign.C.String
import Foreign.C.Types
import GHC.IO.Exception
import System.IO ( stderr, hClose, hSetBinaryMode )
import System.IO ( stderr )
import System.IO.Error
import System.FilePath
import System.Directory
import System.Posix.Directory
import System.Posix.Error ( throwErrnoPathIfMinus1Retry )
import System.Posix.Internals ( withFilePath )
import System.Posix.Files
import System.Posix.IO
import System.Posix.Process ( ProcessStatus(..) )
@@ -57,27 +50,12 @@ import qualified Control.Exception as EX
import qualified Data.Sequence as Sq
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import qualified System.Posix.Directory as PD
import qualified System.Posix.Files as PF
import qualified System.Posix.Process as SPP
import qualified System.Posix.IO as SPI
import qualified System.Console.Terminal.Size as TP
import qualified System.Posix as Posix
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import qualified "unix-bytestring" System.Posix.IO.ByteString
as SPIB
import qualified Streamly.FileSystem.Handle as FH
import qualified Streamly.Internal.FileSystem.Handle
as IFH
import qualified Streamly.Prelude as S
import qualified GHCup.Utils.File.Posix.Foreign as FD
import qualified Streamly.Internal.Data.Stream.StreamD.Type
as D
import Streamly.Internal.Data.Unfold.Type
import qualified Streamly.Internal.Data.Unfold as U
import Streamly.Internal.Control.Concurrent ( withRunInIO )
import Streamly.Internal.Data.IOFinalizer ( newIOFinalizer, runIOFinalizer )
@@ -109,7 +87,7 @@ execLogged exe args chdir lfile env = do
Settings {..} <- getSettings
Dirs {..} <- getDirs
logDebug $ T.pack $ "Running " <> exe <> " with arguments " <> show args
let logfile = fromGHCupPath logsDir </> lfile <> ".log"
let logfile = logsDir </> lfile <> ".log"
liftIO $ bracket (openFd logfile WriteOnly (Just newFilePerms) defaultFileFlags{ append = True })
closeFd
(action verbose noColor)
@@ -284,7 +262,7 @@ captureOutStreams action = do
-- execute the action
a <- action
void $ E.evaluate a
void $ evaluate a
-- close everything we don't need
closeFd childStdoutWrite
@@ -421,201 +399,3 @@ isBrokenSymlink fp = do
Right b -> pure b
Left e | isDoesNotExistError e -> pure False
| otherwise -> throwIO e
copyFile :: FilePath -- ^ source file
-> FilePath -- ^ destination file
-> Bool -- ^ fail if file exists
-> IO ()
copyFile from to fail' = do
bracket
(openFdHandle from SPI.ReadOnly [FD.oNofollow] Nothing)
(hClose . snd)
$ \(fromFd, fH) -> do
sourceFileMode <- fileMode <$> getFdStatus fromFd
let dflags = [ FD.oNofollow
, if fail' then FD.oExcl else FD.oTrunc
]
bracket
(openFdHandle to SPI.WriteOnly dflags $ Just sourceFileMode)
(hClose . snd)
$ \(_, tH) -> do
hSetBinaryMode fH True
hSetBinaryMode tH True
streamlyCopy (fH, tH)
where
openFdHandle fp omode flags fM = do
fd <- openFd' fp omode flags fM
handle' <- SPI.fdToHandle fd
pure (fd, handle')
streamlyCopy (fH, tH) =
S.fold (FH.writeChunks tH) $ IFH.toChunksWithBufferOf (256 * 1024) fH
foreign import ccall unsafe "open"
c_open :: CString -> CInt -> Posix.CMode -> IO CInt
open_ :: CString
-> Posix.OpenMode
-> [FD.Flags]
-> Maybe Posix.FileMode
-> IO Posix.Fd
open_ str how optional_flags maybe_mode = do
fd <- c_open str all_flags mode_w
return (Posix.Fd fd)
where
all_flags = FD.unionFlags $ optional_flags ++ [open_mode] ++ creat
(creat, mode_w) = case maybe_mode of
Nothing -> ([],0)
Just x -> ([FD.oCreat], x)
open_mode = case how of
Posix.ReadOnly -> FD.oRdonly
Posix.WriteOnly -> FD.oWronly
Posix.ReadWrite -> FD.oRdwr
-- |Open and optionally create this file. See 'System.Posix.Files'
-- for information on how to use the 'FileMode' type.
--
-- Note that passing @Just x@ as the 4th argument triggers the
-- `oCreat` status flag, which must be set when you pass in `oExcl`
-- to the status flags. Also see the manpage for @open(2)@.
openFd' :: FilePath
-> Posix.OpenMode
-> [FD.Flags] -- ^ status flags of @open(2)@
-> Maybe Posix.FileMode -- ^ @Just x@ => creates the file with the given modes, Nothing => the file must exist.
-> IO Posix.Fd
openFd' name how optional_flags maybe_mode =
withFilePath name $ \str ->
throwErrnoPathIfMinus1Retry "openFd" name $
open_ str how optional_flags maybe_mode
-- |Deletes the given file. Raises `eISDIR`
-- if run on a directory. Does not follow symbolic links.
--
-- Throws:
--
-- - `InappropriateType` for wrong file type (directory)
-- - `NoSuchThing` if the file does not exist
-- - `PermissionDenied` if the directory cannot be read
--
-- Notes: calls `unlink`
deleteFile :: FilePath -> IO ()
deleteFile = removeLink
-- |Recreate a symlink.
--
-- In `Overwrite` copy mode only files and empty directories are deleted.
--
-- Safety/reliability concerns:
--
-- * `Overwrite` mode is inherently non-atomic
--
-- Throws:
--
-- - `InvalidArgument` if source file is wrong type (not a symlink)
-- - `PermissionDenied` if output directory cannot be written to
-- - `PermissionDenied` if source directory cannot be opened
-- - `SameFile` if source and destination are the same file
-- (`HPathIOException`)
--
--
-- Throws in `Strict` mode only:
--
-- - `AlreadyExists` if destination already exists
--
-- Throws in `Overwrite` mode only:
--
-- - `UnsatisfiedConstraints` if destination file is non-empty directory
--
-- Notes:
--
-- - calls `symlink`
recreateSymlink :: FilePath -- ^ the old symlink file
-> FilePath -- ^ destination file
-> Bool -- ^ fail if destination file exists
-> IO ()
recreateSymlink symsource newsym fail' = do
sympoint <- readSymbolicLink symsource
case fail' of
True -> pure ()
False ->
hideError doesNotExistErrorType $ deleteFile newsym
createSymbolicLink sympoint newsym
-- copys files, recreates symlinks, fails on all other types
install :: FilePath -> FilePath -> Bool -> IO ()
install from to fail' = do
fs <- PF.getSymbolicLinkStatus from
decide fs
where
decide fs | PF.isRegularFile fs = copyFile from to fail'
| PF.isSymbolicLink fs = recreateSymlink from to fail'
| otherwise = ioError $ mkIOError illegalOperationErrorType "install: not a regular file or symlink" Nothing (Just from)
removeEmptyDirectory :: FilePath -> IO ()
removeEmptyDirectory = PD.removeDirectory
-- | Create an 'Unfold' of directory contents.
unfoldDirContents :: (MonadMask m, MonadIO m, S.MonadAsync m) => Unfold m FilePath (FD.DirType, FilePath)
unfoldDirContents = U.bracket (liftIO . openDirStream) (liftIO . closeDirStream) (Unfold step return)
where
{-# INLINE [0] step #-}
step dirstream = do
(typ, e) <- liftIO $ readDirEnt dirstream
return $ if
| null e -> D.Stop
| "." == e -> D.Skip dirstream
| ".." == e -> D.Skip dirstream
| otherwise -> D.Yield (typ, e) dirstream
getDirectoryContentsRecursiveDFSUnsafe :: (MonadMask m, MonadIO m, S.MonadAsync m)
=> FilePath
-> S.SerialT m FilePath
getDirectoryContentsRecursiveDFSUnsafe fp = go ""
where
go cd = flip S.concatMap (S.unfold unfoldDirContents (fp </> cd)) $ \(t, f) ->
if | t == FD.dtDir -> go (cd </> f)
| otherwise -> pure (cd </> f)
getDirectoryContentsRecursiveUnfold :: (MonadMask m, MonadIO m, S.MonadAsync m) => Unfold m FilePath FilePath
getDirectoryContentsRecursiveUnfold = Unfold step (\s -> return (s, Nothing, [""]))
where
{-# INLINE [0] step #-}
step (_, Nothing, []) = return D.Stop
step (topdir, Just (cdir, dirstream, finalizer), dirs) = flip onException (runIOFinalizer finalizer) $ do
(dt, f) <- liftIO $ readDirEnt dirstream
if | FD.dtUnknown == dt -> do
runIOFinalizer finalizer
return $ D.Skip (topdir, Nothing, dirs)
| f == "." || f == ".."
-> return $ D.Skip (topdir, Just (cdir, dirstream, finalizer), dirs)
| FD.dtDir == dt -> return $ D.Skip (topdir, Just (cdir, dirstream, finalizer), (cdir </> f):dirs)
| otherwise -> return $ D.Yield (cdir </> f) (topdir, Just (cdir, dirstream, finalizer), dirs)
step (topdir, Nothing, dir:dirs) = do
(s, f) <- acquire (topdir </> dir)
return $ D.Skip (topdir, Just (dir, s, f), dirs)
acquire dir =
withRunInIO $ \run -> mask_ $ run $ do
dirstream <- liftIO $ openDirStream dir
ref <- newIOFinalizer (liftIO $ closeDirStream dirstream)
return (dirstream, ref)
getDirectoryContentsRecursiveBFSUnsafe :: (MonadMask m, MonadIO m, S.MonadAsync m)
=> FilePath
-> S.SerialT m FilePath
getDirectoryContentsRecursiveBFSUnsafe = S.unfold getDirectoryContentsRecursiveUnfold

View File

@@ -1,58 +0,0 @@
{-# LANGUAGE PatternSynonyms #-}
module GHCup.Utils.File.Posix.Foreign where
import Data.Bits
import Data.List (foldl')
import Foreign.C.Types
#include <limits.h>
#include <stdlib.h>
#include <dirent.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
newtype DirType = DirType Int deriving (Eq, Show)
data Flags = Flags Int | UnsupportedFlag String deriving (Eq, Show)
unFlags :: Flags -> Int
unFlags (Flags i) = i
unFlags (UnsupportedFlag name) = error (name ++ " is not supported on this platform")
-- |Returns @True@ if posix-paths was compiled with support for the provided
-- flag. (As of this writing, the only flag for which this check may be
-- necessary is 'oCloexec'; all other flags will always yield @True@.)
isSupported :: Flags -> Bool
isSupported (Flags _) = True
isSupported _ = False
-- |@O_CLOEXEC@ is not supported on every POSIX platform. Use
-- @'isSupported' oCloexec@ to determine if support for @O_CLOEXEC@ was
-- compiled into your version of posix-paths. (If not, using @oCloexec@ will
-- throw an exception.)
oCloexec :: Flags
#ifdef O_CLOEXEC
oCloexec = Flags #{const O_CLOEXEC}
#else
{-# WARNING oCloexec
"This version of posix-paths was compiled without @O_CLOEXEC@ support." #-}
oCloexec = UnsupportedFlag "O_CLOEXEC"
#endif
-- If these enum declarations occur earlier in the file, haddock
-- gets royally confused about the above doc comments.
-- Probably http://trac.haskell.org/haddock/ticket/138
#{enum DirType, DirType, DT_BLK, DT_CHR, DT_DIR, DT_FIFO, DT_LNK, DT_REG, DT_SOCK, DT_UNKNOWN}
#{enum Flags, Flags, O_APPEND, O_ASYNC, O_CREAT, O_DIRECTORY, O_EXCL, O_NOCTTY, O_NOFOLLOW, O_NONBLOCK, O_RDONLY, O_WRONLY, O_RDWR, O_SYNC, O_TRUNC}
pathMax :: Int
pathMax = #{const PATH_MAX}
unionFlags :: [Flags] -> CInt
unionFlags = fromIntegral . foldl' ((. unFlags) . (.|.)) 0

View File

@@ -1,92 +0,0 @@
{-# LANGUAGE CApiFFI #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wall #-}
module GHCup.Utils.File.Posix.Traversals (
-- lower-level stuff
readDirEnt
, unpackDirStream
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>))
#endif
import GHCup.Utils.File.Posix.Foreign
import Unsafe.Coerce (unsafeCoerce)
import Foreign.C.Error
import Foreign.C.String
import Foreign.C.Types
import Foreign.Ptr
import Foreign.Storable
import System.Posix
import Foreign (alloca)
import System.Posix.Internals (peekFilePath)
----------------------------------------------------------
-- dodgy stuff
type CDir = ()
type CDirent = ()
-- Posix doesn't export DirStream, so to re-use that type we need to use
-- unsafeCoerce. It's just a newtype, so this is a legitimate usage.
-- ugly trick.
unpackDirStream :: DirStream -> Ptr CDir
unpackDirStream = unsafeCoerce
-- the __hscore_* functions are defined in the unix package. We can import them and let
-- the linker figure it out.
foreign import ccall unsafe "__hscore_readdir"
c_readdir :: Ptr CDir -> Ptr (Ptr CDirent) -> IO CInt
foreign import ccall unsafe "__hscore_free_dirent"
c_freeDirEnt :: Ptr CDirent -> IO ()
foreign import ccall unsafe "__hscore_d_name"
c_name :: Ptr CDirent -> IO CString
foreign import ccall unsafe "__posixdir_d_type"
c_type :: Ptr CDirent -> IO DirType
----------------------------------------------------------
-- less dodgy but still lower-level
readDirEnt :: DirStream -> IO (DirType, FilePath)
readDirEnt (unpackDirStream -> dirp) =
alloca $ \ptr_dEnt -> loop ptr_dEnt
where
loop ptr_dEnt = do
resetErrno
r <- c_readdir dirp ptr_dEnt
if r == 0
then do
dEnt <- peek ptr_dEnt
if dEnt == nullPtr
then return (dtUnknown, mempty)
else do
dName <- c_name dEnt >>= peekFilePath
dType <- c_type dEnt
c_freeDirEnt dEnt
return (dType, dName)
else do
errno <- getErrno
if errno == eINTR
then loop ptr_dEnt
else do
let (Errno eo) = errno
if eo == 0
then return (dtUnknown, mempty)
else throwErrno "readDirEnt"

View File

@@ -1,6 +1,5 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-|
Module : GHCup.Utils.File.Windows
@@ -32,30 +31,18 @@ import Data.List
import Foreign.C.Error
import GHC.IO.Exception
import GHC.IO.Handle
import qualified GHC.Unicode as U
import System.Directory
import System.Environment
import System.FilePath
import System.IO
import qualified System.IO.Error as IOE
import System.Process
import qualified System.Win32.Info as WS
import qualified System.Win32.File as WS
import qualified Control.Exception as EX
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import qualified Streamly.Internal.Data.Stream.StreamD.Type
as D
import Streamly.Internal.Data.Unfold.Type hiding ( concatMap )
import Data.Bits ((.&.))
import qualified Streamly.Prelude as S
import qualified Streamly.Internal.Data.Unfold as U
import Streamly.Internal.Control.Concurrent ( withRunInIO )
import Streamly.Internal.Data.IOFinalizer ( newIOFinalizer, runIOFinalizer )
toProcessError :: FilePath
@@ -177,8 +164,8 @@ execLogged :: ( MonadReader env m
execLogged exe args chdir lfile env = do
Dirs {..} <- getDirs
logDebug $ T.pack $ "Running " <> exe <> " with arguments " <> show args
let stdoutLogfile = fromGHCupPath logsDir </> lfile <> ".stdout.log"
stderrLogfile = fromGHCupPath logsDir </> lfile <> ".stderr.log"
let stdoutLogfile = logsDir </> lfile <> ".stdout.log"
stderrLogfile = logsDir </> lfile <> ".stderr.log"
cp <- createProcessWithMingwPath ((proc exe args)
{ cwd = chdir
, env = env
@@ -212,7 +199,7 @@ execLogged exe args chdir lfile env = do
-- subprocess stdout also goes to stderr for logging
void $ BS.hPut stderr some
go
-- | Thin wrapper around `executeFile`.
exec :: MonadIO m
@@ -269,7 +256,7 @@ ghcupMsys2Dir =
Just fp -> pure fp
Nothing -> do
baseDir <- liftIO ghcupBaseDir
pure (fromGHCupPath baseDir </> "msys64")
pure (baseDir </> "msys64")
-- | Checks whether the binary is a broken link.
isBrokenSymlink :: FilePath -> IO Bool
@@ -282,229 +269,3 @@ isBrokenSymlink fp = do
-- this drops 'symDir' if 'tfp' is absolute
(takeDirectory fp </> tfp)
else pure False
copyFile :: FilePath -- ^ source file
-> FilePath -- ^ destination file
-> Bool -- ^ fail if file exists
-> IO ()
copyFile = WS.copyFile
deleteFile :: FilePath -> IO ()
deleteFile = WS.deleteFile
install :: FilePath -> FilePath -> Bool -> IO ()
install = copyFile
removeEmptyDirectory :: FilePath -> IO ()
removeEmptyDirectory = WS.removeDirectory
unfoldDirContents :: (S.MonadAsync m, MonadIO m, MonadCatch m, MonadMask m) => Unfold m FilePath (WS.FileAttributeOrFlag, FilePath)
unfoldDirContents = U.bracket alloc dealloc (Unfold step return)
where
{-# INLINE [0] step #-}
step (_, False, _, _) = return D.Stop
step (topdir, True, h, fd) = flip onException (liftIO $ WS.findClose h) $ do
f <- liftIO $ WS.getFindDataFileName fd
more <- liftIO $ WS.findNextFile h fd
-- can't get file attribute from FindData yet (needs Win32 PR)
fattr <- liftIO $ WS.getFileAttributes (topdir </> f)
if | f == "." || f == ".." -> return $ D.Skip (topdir, more, h, fd)
| otherwise -> return $ D.Yield (fattr, f) (topdir, more, h, fd)
alloc topdir = do
query <- liftIO $ furnishPath (topdir </> "*")
(h, fd) <- liftIO $ WS.findFirstFile query
pure (topdir, True, h, fd)
dealloc (_, _, fd, _) = liftIO $ WS.findClose fd
getDirectoryContentsRecursiveDFSUnsafe :: (MonadCatch m, S.MonadAsync m, MonadMask m, S.IsStream t)
=> FilePath
-> t m FilePath
getDirectoryContentsRecursiveDFSUnsafe fp = go ""
where
isDir attrs = attrs .&. WS.fILE_ATTRIBUTE_DIRECTORY /= 0
go cd = flip S.concatMap (S.unfold unfoldDirContents (fp </> cd)) $ \(t, f) ->
if | isDir t -> go (cd </> f)
| otherwise -> pure (cd </> f)
getDirectoryContentsRecursiveUnfold :: (MonadCatch m, S.MonadAsync m, MonadMask m) => Unfold m FilePath FilePath
getDirectoryContentsRecursiveUnfold = Unfold step init'
where
{-# INLINE [0] step #-}
step (_, Nothing, []) = return D.Stop
step (topdir, state@(Just (cdir, (h, findData, ref))), dirs) = flip onException (runIOFinalizer ref) $ do
f <- liftIO $ WS.getFindDataFileName findData
more <- liftIO $ WS.findNextFile h findData
when (not more) $ runIOFinalizer ref
let nextState = if more then state else Nothing
-- can't get file attribute from FindData yet (needs Win32 PR)
fattr <- liftIO $ WS.getFileAttributes (topdir </> cdir </> f)
if | f == "." || f == ".." -> return $ D.Skip (topdir, nextState, dirs)
| isDir fattr -> return $ D.Skip (topdir, nextState, (cdir </> f):dirs)
| otherwise -> return $ D.Yield (cdir </> f) (topdir, nextState, dirs)
step (topdir, Nothing, dir:dirs) = do
(h, findData, ref) <- acquire (topdir </> dir)
return $ D.Skip (topdir, Just (dir, (h, findData, ref)), dirs)
init' topdir = do
(h, findData, ref) <- acquire topdir
return (topdir, Just ("", (h, findData, ref)), [])
isDir attrs = attrs .&. WS.fILE_ATTRIBUTE_DIRECTORY /= 0
acquire dir = do
query <- liftIO $ furnishPath (dir </> "*")
withRunInIO $ \run -> mask_ $ run $ do
(h, findData) <- liftIO $ WS.findFirstFile query
ref <- newIOFinalizer (liftIO $ WS.findClose h)
return (h, findData, ref)
getDirectoryContentsRecursiveBFSUnsafe :: (MonadMask m, MonadIO m, S.MonadAsync m)
=> FilePath
-> S.SerialT m FilePath
getDirectoryContentsRecursiveBFSUnsafe = S.unfold getDirectoryContentsRecursiveUnfold
--------------------------------------
--[ Inlined from directory package ]--
--------------------------------------
furnishPath :: FilePath -> IO FilePath
furnishPath path =
(toExtendedLengthPath <$> rawPrependCurrentDirectory path)
`IOE.catchIOError` \ _ ->
pure path
toExtendedLengthPath :: FilePath -> FilePath
toExtendedLengthPath path
| isRelative path = simplifiedPath
| otherwise =
case simplifiedPath of
'\\' : '?' : '?' : '\\' : _ -> simplifiedPath
'\\' : '\\' : '?' : '\\' : _ -> simplifiedPath
'\\' : '\\' : '.' : '\\' : _ -> simplifiedPath
'\\' : subpath@('\\' : _) -> "\\\\?\\UNC" <> subpath
_ -> "\\\\?\\" <> simplifiedPath
where simplifiedPath = simplify path
simplify :: FilePath -> FilePath
simplify = simplifyWindows
simplifyWindows :: FilePath -> FilePath
simplifyWindows "" = ""
simplifyWindows path =
case drive' of
"\\\\?\\" -> drive' <> subpath
_ -> simplifiedPath
where
simplifiedPath = joinDrive drive' subpath'
(drive, subpath) = splitDrive path
drive' = upperDrive (normaliseTrailingSep (normalisePathSeps drive))
subpath' = appendSep . avoidEmpty . prependSep . joinPath .
stripPardirs . expandDots . skipSeps .
splitDirectories $ subpath
upperDrive d = case d of
c : ':' : s | U.isAlpha c && all isPathSeparator s -> U.toUpper c : ':' : s
_ -> d
skipSeps = filter (not . (`elem` (pure <$> pathSeparators)))
stripPardirs | pathIsAbsolute || subpathIsAbsolute = dropWhile (== "..")
| otherwise = id
prependSep | subpathIsAbsolute = (pathSeparator :)
| otherwise = id
avoidEmpty | not pathIsAbsolute
&& (null drive || hasTrailingPathSep) -- prefer "C:" over "C:."
= emptyToCurDir
| otherwise = id
appendSep p | hasTrailingPathSep
&& not (pathIsAbsolute && null p)
= addTrailingPathSeparator p
| otherwise = p
pathIsAbsolute = not (isRelative path)
subpathIsAbsolute = any isPathSeparator (take 1 subpath)
hasTrailingPathSep = hasTrailingPathSeparator subpath
emptyToCurDir :: FilePath -> FilePath
emptyToCurDir "" = "."
emptyToCurDir path = path
normaliseTrailingSep :: FilePath -> FilePath
normaliseTrailingSep path = do
let path' = reverse path
let (sep, path'') = span isPathSeparator path'
let addSep = if null sep then id else (pathSeparator :)
reverse (addSep path'')
normalisePathSeps :: FilePath -> FilePath
normalisePathSeps p = (\ c -> if isPathSeparator c then pathSeparator else c) <$> p
expandDots :: [FilePath] -> [FilePath]
expandDots = reverse . go []
where
go ys' xs' =
case xs' of
[] -> ys'
x : xs ->
case x of
"." -> go ys' xs
".." ->
case ys' of
[] -> go (x : ys') xs
".." : _ -> go (x : ys') xs
_ : ys -> go ys xs
_ -> go (x : ys') xs
rawPrependCurrentDirectory :: FilePath -> IO FilePath
rawPrependCurrentDirectory path
| isRelative path =
((`ioeAddLocation` "prependCurrentDirectory") .
(`IOE.ioeSetFileName` path)) `IOE.modifyIOError` do
getFullPathName path
| otherwise = pure path
ioeAddLocation :: IOError -> String -> IOError
ioeAddLocation e loc = do
IOE.ioeSetLocation e newLoc
where
newLoc = loc <> if null oldLoc then "" else ":" <> oldLoc
oldLoc = IOE.ioeGetLocation e
getFullPathName :: FilePath -> IO FilePath
getFullPathName path =
fromExtendedLengthPath <$> WS.getFullPathName (toExtendedLengthPath path)
fromExtendedLengthPath :: FilePath -> FilePath
fromExtendedLengthPath ePath =
case ePath of
'\\' : '\\' : '?' : '\\' : path ->
case path of
'U' : 'N' : 'C' : subpath@('\\' : _) -> "\\" <> subpath
drive : ':' : subpath
-- if the path is not "regular", then the prefix is necessary
-- to ensure the path is interpreted literally
| U.isAlpha drive && U.isAscii drive && isPathRegular subpath -> path
_ -> ePath
_ -> ePath
where
isPathRegular path =
not ('/' `elem` path ||
"." `elem` splitDirectories path ||
".." `elem` splitDirectories path)

View File

@@ -17,7 +17,6 @@ module GHCup.Utils.Logger where
import GHCup.Types
import GHCup.Types.Optics
import {-# SOURCE #-} GHCup.Utils.Dirs (fromGHCupPath)
import {-# SOURCE #-} GHCup.Utils.File.Common (findFiles)
import GHCup.Utils.String.QQ
@@ -118,14 +117,14 @@ initGHCupFileLogging :: ( MonadReader env m
) => m FilePath
initGHCupFileLogging = do
Dirs { logsDir } <- getDirs
let logfile = fromGHCupPath logsDir </> "ghcup.log"
let logfile = logsDir </> "ghcup.log"
logFiles <- liftIO $ findFiles
(fromGHCupPath logsDir)
logsDir
(makeRegexOpts compExtended
execBlank
([s|^.*\.log$|] :: B.ByteString)
)
forM_ logFiles $ hideError doesNotExistErrorType . recycleFile . (fromGHCupPath logsDir </>)
forM_ logFiles $ hideError doesNotExistErrorType . recycleFile . (logsDir </>)
liftIO $ writeFile logfile ""
pure logfile

View File

@@ -27,7 +27,6 @@ module GHCup.Utils.Prelude
)
where
import {-# SOURCE #-} GHCup.Utils.Dirs (GHCupPath, fromGHCupPath, createTempGHCupDirectory, appendGHCupPath, removePathForcibly, removeDirectory)
import GHCup.Types
import GHCup.Errors
import GHCup.Types.Optics
@@ -45,8 +44,9 @@ import Control.Monad.IO.Class
import Control.Monad.Reader
import Data.Bifunctor
import Data.ByteString ( ByteString )
import Data.List ( intercalate, stripPrefix, isPrefixOf, dropWhileEnd, intersperse )
import Data.List ( nub, intercalate, stripPrefix, isPrefixOf, dropWhileEnd, intersperse )
import Data.Maybe
import Data.Foldable
import Data.List.NonEmpty ( NonEmpty( (:|) ))
import Data.String
import Data.Text ( Text )
@@ -56,11 +56,9 @@ import Haskus.Utils.Types.List
import Haskus.Utils.Variant.Excepts
import Text.PrettyPrint.HughesPJClass ( prettyShow, Pretty )
import System.IO.Error
import System.Directory hiding ( removeDirectory
, removeDirectoryRecursive
, removePathForcibly
, copyFile
)
import System.IO.Temp
import System.IO.Unsafe
import System.Directory
import System.FilePath
import Control.Retry
@@ -80,7 +78,6 @@ import qualified Data.Text.Lazy.Builder.Int as B
import qualified Data.Text.Lazy.Encoding as TLE
-- $setup
-- >>> import Data.ByteString.Internal (c2w, w2c)
-- >>> import Test.QuickCheck
@@ -400,6 +397,64 @@ createDirRecursive' p =
_ -> throwIO e
-- | Recursively copy the contents of one directory to another path.
--
-- This is a rip-off of Cabal library.
copyDirectoryRecursive :: FilePath -> FilePath -> (FilePath -> FilePath -> IO ()) -> IO ()
copyDirectoryRecursive srcDir destDir doCopy = do
srcFiles <- getDirectoryContentsRecursive srcDir
copyFilesWith destDir [ (srcDir, f)
| f <- srcFiles ]
where
-- | Common implementation of 'copyFiles', 'installOrdinaryFiles',
-- 'installExecutableFiles' and 'installMaybeExecutableFiles'.
copyFilesWith :: FilePath -> [(FilePath, FilePath)] -> IO ()
copyFilesWith targetDir srcFiles = do
-- Create parent directories for everything
let dirs = map (targetDir </>) . nub . map (takeDirectory . snd) $ srcFiles
traverse_ (createDirectoryIfMissing True) dirs
-- Copy all the files
sequence_ [ let src = srcBase </> srcFile
dest = targetDir </> srcFile
in doCopy src dest
| (srcBase, srcFile) <- srcFiles ]
-- | List all the files in a directory and all subdirectories.
--
-- The order places files in sub-directories after all the files in their
-- parent directories. The list is generated lazily so is not well defined if
-- the source directory structure changes before the list is used.
--
getDirectoryContentsRecursive :: FilePath -> IO [FilePath]
getDirectoryContentsRecursive topdir = recurseDirectories [""]
where
recurseDirectories :: [FilePath] -> IO [FilePath]
recurseDirectories [] = return []
recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do
(files, dirs') <- collect [] [] =<< getDirectoryContents (topdir </> dir)
files' <- recurseDirectories (dirs' ++ dirs)
return (files ++ files')
where
collect files dirs' [] = return (reverse files
,reverse dirs')
collect files dirs' (entry:entries) | ignore entry
= collect files dirs' entries
collect files dirs' (entry:entries) = do
let dirEntry = dir </> entry
isDirectory <- doesDirectoryExist (topdir </> dirEntry)
if isDirectory
then collect files (dirEntry:dirs') entries
else collect (dirEntry:files) dirs' entries
ignore ['.'] = True
ignore ['.', '.'] = True
ignore _ = False
-- https://github.com/haskell/directory/issues/110
-- https://github.com/haskell/directory/issues/96
-- https://www.sqlite.org/src/info/89f1848d7f
@@ -408,14 +463,14 @@ recyclePathForcibly :: ( MonadIO m
, HasDirs env
, MonadMask m
)
=> GHCupPath
=> FilePath
-> m ()
recyclePathForcibly fp
| isWindows = do
Dirs { recycleDir } <- getDirs
tmp <- liftIO $ createTempGHCupDirectory recycleDir "recyclePathForcibly"
let dest = tmp `appendGHCupPath` takeFileName (fromGHCupPath fp)
liftIO (moveFile (fromGHCupPath fp) (fromGHCupPath dest))
tmp <- liftIO $ createTempDirectory recycleDir "recyclePathForcibly"
let dest = tmp </> takeFileName fp
liftIO (moveFile fp dest)
`catch`
(\e -> if | isDoesNotExistError e -> pure ()
| isPermissionError e || ioeGetErrorType e == UnsupportedOperation {- EXDEV on windows -} -> recover (liftIO $ removePathForcibly fp)
@@ -428,7 +483,7 @@ recyclePathForcibly fp
rmPathForcibly :: ( MonadIO m
, MonadMask m
)
=> GHCupPath
=> FilePath
-> m ()
rmPathForcibly fp
| isWindows = recover (liftIO $ removePathForcibly fp)
@@ -436,7 +491,7 @@ rmPathForcibly fp
rmDirectory :: (MonadIO m, MonadMask m)
=> GHCupPath
=> FilePath
-> m ()
rmDirectory fp
| isWindows = recover (liftIO $ removeDirectory fp)
@@ -456,11 +511,11 @@ recycleFile fp
| isWindows = do
Dirs { recycleDir } <- getDirs
liftIO $ whenM (doesDirectoryExist fp) $ ioError (IOError Nothing InappropriateType "recycleFile" "" Nothing (Just fp))
tmp <- liftIO $ createTempGHCupDirectory recycleDir "recycleFile"
let dest = fromGHCupPath tmp </> takeFileName fp
tmp <- liftIO $ createTempDirectory recycleDir "recycleFile"
let dest = tmp </> takeFileName fp
liftIO (moveFile fp dest)
`catch`
(\e -> if isPermissionError e || ioeGetErrorType e == UnsupportedOperation {- EXDEV on windows -} then recover (liftIO $ rmFile fp) else throwIO e)
(\e -> if isPermissionError e || ioeGetErrorType e == UnsupportedOperation {- EXDEV on windows -} then recover (liftIO $ removePathForcibly fp) else throwIO e)
`finally`
liftIO (handleIO (\_ -> pure ()) $ removePathForcibly tmp)
| otherwise = liftIO $ removeFile fp
@@ -494,6 +549,10 @@ recover action =
(\_ -> action)
copyFileE :: (CopyError :< xs, MonadCatch m, MonadIO m) => FilePath -> FilePath -> Excepts xs m ()
copyFileE from = handleIO (throwE . CopyError . show) . liftIO . copyFile from
-- | Gathering monoidal values
--
-- >>> traverseFold (pure . (:["0"])) ["1","2"]
@@ -703,3 +762,4 @@ breakOn :: Eq a => [a] -> [a] -> ([a], [a])
breakOn needle haystack | needle `isPrefixOf` haystack = ([], haystack)
breakOn _ [] = ([], [])
breakOn needle (x:xs) = first (x:) $ breakOn needle xs

View File

@@ -1,10 +1,6 @@
module GHCup.Utils.Prelude.Posix where
import System.Directory hiding ( removeDirectory
, removeDirectoryRecursive
, removePathForcibly
, findFiles
)
import System.Directory
import System.Posix.Files
@@ -21,4 +17,4 @@ moveFilePortable :: FilePath -> FilePath -> IO ()
moveFilePortable from to = do
copyFile from to
removeFile from

View File

@@ -26,7 +26,7 @@ extra-deps:
- hspec-golden-aeson-0.9.0.0@sha256:aa17274114026661ba4dfc9c60c230673c8f408bd86482fd611d2d5cb6aff996,2179
- http-io-streams-0.1.6.0@sha256:53f5bab177efb52cd65ec396fd04ed59b93e5f919fb3700cd7dacd6cfce6f06d,3582
- libarchive-3.0.3.0
- libyaml-streamly-0.2.1
- libyaml-streamly-0.2.0
- lzma-static-5.2.5.3@sha256:2758ee58c35992fcf7db78e98684c357a16a82fa2a4e7c352a6c210c08c555d8,7308
- optics-0.4@sha256:9fb69bf0195b8d8f1f8cd0098000946868b8a3c3ffb51e5b64f79fc600c3eb4c,6568
- optics-core-0.4@sha256:59e04aebca536bd011ae50c781937f45af4c1456af1eb9fb578f9a69eee293cd,4995
@@ -35,11 +35,10 @@ extra-deps:
- os-release-1.0.1@sha256:1281c62081f438fc3f0874d3bae6a4887d5964ac25261ba06e29d368ab173467,2716
- primitive-0.7.1.0@sha256:29de6bfd0cf8ba023ceb806203dfbec0e51e3524e75ffe41056f70b4229c6f0f,2728
- regex-posix-clib-2.7
- streamly-0.8.2@sha256:ec521b7c1c4db068501c35804af77f40b7d34232f5e29d9b99e722229040eb80,23500
- unicode-data-0.3.0@sha256:0545e079705a5381d0893f8fe8daaa08fc9174baeab269b9cf651817d8eadbc6,5123
- streamly-0.8.0@sha256:9784c80ee1ada51477520cabc4e92a0c76a6bb265f968a188f2fce818e7398e0,19654
- strict-base-0.4.0.0@sha256:2ff4e43cb95eedf2995558d7fc34d19362846413dd39e6aa6a5b3ea8228fef9f,1248
- xor-0.0.1.0@sha256:f8362b4a68562b9afbcd727ff64c1a303970df3a032e0033d2f4c094c3501df3,2243
- yaml-streamly-0.12.1
- yaml-streamly-0.12.0
flags:
http-io-streams:

View File

@@ -1,58 +0,0 @@
module GHCup.Utils.FileSpec where
import GHCup.Utils.File
import Data.List
import System.Directory
import System.FilePath
import System.IO.Unsafe
import qualified Streamly.Prelude as S
import Test.Hspec
spec :: Spec
spec = do
describe "GHCup.Utils.File" $ do
it "getDirectoryContentsRecursiveBFS" $ do
l1 <- sort <$> S.toList (getDirectoryContentsRecursiveBFSUnsafe ".")
l2 <- sort <$> getDirectoryContentsRecursiveLazy "."
not (null l1) `shouldBe` True
not (null l2) `shouldBe` True
l1 `shouldBe` l2
it "getDirectoryContentsRecursiveDFS" $ do
l1 <- sort <$> S.toList (getDirectoryContentsRecursiveDFSUnsafe ".")
l2 <- sort <$> getDirectoryContentsRecursiveLazy "."
not (null l1) `shouldBe` True
not (null l2) `shouldBe` True
l1 `shouldBe` l2
getDirectoryContentsRecursiveLazy :: FilePath -> IO [FilePath]
getDirectoryContentsRecursiveLazy topdir = recurseDirectories [""]
where
recurseDirectories :: [FilePath] -> IO [FilePath]
recurseDirectories [] = return []
recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do
(files, dirs') <- collect [] [] =<< getDirectoryContents (topdir </> dir)
files' <- recurseDirectories (dirs' ++ dirs)
return (files ++ files')
where
collect files dirs' [] = return (reverse files
,reverse dirs')
collect files dirs' (entry:entries) | ignore entry
= collect files dirs' entries
collect files dirs' (entry:entries) = do
let dirEntry = dir </> entry
isDirectory <- doesDirectoryExist (topdir </> dirEntry)
if isDirectory
then collect files (dirEntry:dirs') entries
else collect (dirEntry:files) dirs' entries
ignore ['.'] = True
ignore ['.', '.'] = True
ignore _ = False

View File

@@ -1,9 +1,10 @@
import Test.Hspec.Runner
import Test.Hspec.Formatters
import qualified Spec
main :: IO ()
main =
hspecWith
defaultConfig
defaultConfig { configFormatter = Just progress }
Spec.spec