Compare commits

...

30 Commits

Author SHA1 Message Date
eb9a0b66c4 Document distribution policies 2023-01-04 18:51:40 +08:00
109187eb6f Merge branch 'issue-367-content-prop' 2023-01-03 23:17:35 +08:00
e881705323 Merge branch 'issue-440' 2023-01-03 22:47:12 +08:00
ea06c155a7 Merge branch 'issue-695' 2023-01-03 22:46:52 +08:00
d4732e15a7 Merge branch 'issue-716' 2023-01-03 22:46:13 +08:00
db6f784a1f Merge branch 'error-handling' 2023-01-03 22:45:25 +08:00
82e3837dd9 Update windows golden test file 2023-01-02 21:42:52 +08:00
957c5918b8 Upload golden files on failure 2023-01-02 20:47:49 +08:00
9d4c923649 Add content-length property to downloads
This is optional for now. Fixes #367
2023-01-02 20:41:42 +08:00
24c36ef856 Fix failure with --isolate=dir --force
Fixes #695
2023-01-02 20:39:27 +08:00
2783b8f693 Fix 'ghcup install hls -u' on windows
Fixes #716
2023-01-02 20:38:58 +08:00
d5a680e3c6 Don't clean up tmp dirs when --keep=always 2023-01-02 20:38:26 +08:00
d1075987de Fix ARM cleanup 2023-01-02 20:35:46 +08:00
e116a2392e Enable arm tests 2023-01-01 21:40:04 +08:00
7dd6f1f4a4 Expose metadata-caching to --help 2023-01-01 19:19:37 +08:00
4d82c37539 Add --metadata-fetching-mode arg, fixes #440 2023-01-01 19:16:32 +08:00
801b1edfa7 Merge remote-tracking branch 'origin/pr/730' 2022-12-31 21:10:59 +08:00
c1b67e1787 Merge branch 'issue-433' 2022-12-27 00:11:23 +08:00
70dd106549 Merge branch 'issue-708' 2022-12-27 00:10:41 +08:00
b098aa4e65 Merge branch 'issue-368' 2022-12-27 00:09:59 +08:00
74b784fcfb Merge branch 'issue-391-gh' 2022-12-27 00:09:39 +08:00
673db344d6 Merge branch 'issue-414' 2022-12-27 00:09:28 +08:00
5594a19c02 Merge branch 'issue-384' 2022-12-27 00:09:05 +08:00
Kristoffer Grundström
a5bc13fe50 Added Mageia
Signed-off-by: Kristoffer Grundström <lovaren@gmail.com>
2022-12-25 08:02:41 +01:00
a5f2067d76 Improve stripping logic 2022-12-21 13:44:31 +08:00
be8fa57be1 Freeze the MSYS2 version we install
Fixes #368
2022-12-20 23:38:48 +08:00
6ad9963889 Allow building newer GHCs from git
The user will have to pass --overwrite-version=<ver> because
we can't discover the GHC version from git anymore.

https://gitlab.haskell.org/ghc/ghc/-/issues/22322
2022-12-20 23:18:44 +08:00
bcddb05b1d Fix bootstrap under windows when msys2 has non-posix login shell 2022-12-20 22:46:58 +08:00
f7d2033e25 Improve postRm message, fixes #384 2022-12-20 22:20:10 +08:00
e8586cf993 Improve add-release-channel, fixes #708 2022-12-20 00:57:56 +08:00
33 changed files with 8193 additions and 5165 deletions

View File

@@ -87,39 +87,29 @@ download_cabal_cache() {
cd /tmp
case "${RUNNER_OS}" in
"Linux")
case "${DISTRO}" in
"Alpine")
case "${ARCH}" in
"32") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/i386-linux-alpine-cabal-cache-1.0.5.1
;;
"64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/x86_64-linux-alpine-cabal-cache-1.0.5.1
;;
esac
case "${ARCH}" in
"32") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/i386-linux-cabal-cache
;;
*)
case "${ARCH}" in
"64") url=https://github.com/haskell-works/cabal-cache/releases/download/v1.0.5.1/cabal-cache-x86_64-linux.gz
;;
"ARM64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/aarch64-linux-cabal-cache-1.0.5.1
;;
"ARM") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/armv7-linux-cabal-cache-1.0.5.1
;;
esac
"64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/x86_64-linux-cabal-cache
;;
"ARM64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/aarch64-linux-cabal-cache
;;
"ARM") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/armv7-linux-cabal-cache
;;
esac
;;
"FreeBSD")
url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/x86_64-freebsd-cabal-cache-1.0.5.1
url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/x86_64-portbld-freebsd-cabal-cache
;;
"Windows")
exe=".exe"
url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/x86_64-mingw64-cabal-cache-1.0.5.1.exe
url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/x86_64-mingw64-cabal-cache
;;
"macOS")
case "${ARCH}" in
"ARM64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/aarch64-apple-darwin-cabal-cache-1.0.5.1
"ARM64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/aarch64-apple-darwin-cabal-cache
;;
"64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/x86_64-apple-darwin-cabal-cache-1.0.5.1
"64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/x86_64-apple-darwin-cabal-cache
;;
esac
;;
@@ -134,8 +124,9 @@ download_cabal_cache() {
curl -o cabal-cache${exe} -L "${url}"
;;
esac
chmod +x cabal-cache${exe}
cp "cabal-cache${exe}" "${dest}${exe}"
sha_sum cabal-cache${exe}
mv "cabal-cache${exe}" "${dest}${exe}"
chmod +x "${dest}${exe}"
fi
)
}
@@ -167,13 +158,15 @@ install_ghcup() {
strip_binary() {
(
set -e
binary=$1
if [ "${RUNNER_OS}" = "macOS" ] ; then
strip "${binary}"
else
if [ "${RUNNER_OS}" != "Windows" ] ; then
local binary=$1
case "$(uname -s)" in
"Darwin"|"darwin")
;;
MSYS_*|MINGW*)
;;
*)
strip -s "${binary}"
fi
fi
;;
esac
)
}

View File

@@ -102,7 +102,7 @@ jobs:
- uses: docker://arm64v8/ubuntu:focal
name: Cleanup (aarch64 linux)
with:
args: rm -rf .ghcup/ cabal/ dist-newstyle/ out/
args: "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +"
- name: git config
run: |
@@ -272,6 +272,14 @@ jobs:
DISTRO: ${{ matrix.DISTRO }}
APT_GET: "sudo apt-get"
- if: failure()
name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: testfiles
path: |
./test/golden/unix/GHCupInfo*json
test-arm:
name: Test ARM
needs: "build-arm"
@@ -297,7 +305,7 @@ jobs:
- uses: docker://arm64v8/ubuntu:focal
name: Cleanup (aarch64 linux)
with:
args: rm -rf .ghcup/ cabal/ dist-newstyle/ out/
args: "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +"
- name: Checkout code
uses: actions/checkout@v3
@@ -311,9 +319,9 @@ jobs:
- if: matrix.ARCH == 'ARM'
uses: docker://hasufell/arm32v7-ubuntu-haskell:focal
name: Run build (armv7 linux)
name: Run test (armv7 linux)
with:
run: sh .github/scripts/test.sh
args: sh .github/scripts/test.sh
env:
ARTIFACT: ${{ matrix.ARTIFACT }}
ARCH: ${{ matrix.ARCH }}
@@ -322,15 +330,23 @@ jobs:
- if: matrix.ARCH == 'ARM64'
uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
name: Run build (aarch64 linux)
name: Run test (aarch64 linux)
with:
run: sh .github/scripts/test.sh
args: sh .github/scripts/test.sh
env:
ARTIFACT: ${{ matrix.ARTIFACT }}
ARCH: ${{ matrix.ARCH }}
GHC_VER: ${{ matrix.GHC_VER }}
DISTRO: Ubuntu
- if: failure()
name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: testfiles
path: |
./test/golden/unix/GHCupInfo*json
test-macwin:
name: Test Mac/Win
needs: "build-macwin"
@@ -377,6 +393,22 @@ jobs:
GHC_VER: ${{ matrix.GHC_VER }}
DISTRO: ${{ matrix.DISTRO }}
HOMEBREW_CHANGE_ARCH_TO_ARM: 1
- if: failure() && runner.os == 'Windows'
name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: testfiles
path: |
./test/golden/windows/GHCupInfo*json
- if: failure() && runner.os != 'Windows'
name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: testfiles
path: |
./test/golden/unix/GHCupInfo*json
hls:
name: hls
needs: build-linux

View File

@@ -13,6 +13,7 @@ import GHCup.Errors
import GHCup.Types.Optics ( getDirs )
import GHCup.Types hiding ( LeanAppState(..) )
import GHCup.Utils
import GHCup.OptParse.Common (logGHCPostRm)
import GHCup.Prelude ( decUTF8Safe )
import GHCup.Prelude.File
import GHCup.Prelude.Logger
@@ -433,6 +434,7 @@ install' _ (_, ListResult {..}) = do
, BuildFailed
, TagNotFound
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, DirNotEmpty
@@ -554,6 +556,7 @@ del' _ (_, ListResult {..}) = do
)
>>= \case
VRight vi -> do
logGHCPostRm (mkTVer lVer)
forM_ (_viPostRemove =<< vi) $ \msg ->
logInfo msg
pure $ Right ()
@@ -630,7 +633,7 @@ getGHCupInfo = do
r <-
flip runReaderT settings
. runE @'[DigestError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
. runE @'[DigestError, ContentLengthError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
$ liftE getDownloadsF
case r of

View File

@@ -67,13 +67,13 @@ import URI.ByteString
import qualified Data.ByteString.UTF8 as UTF8
data Options = Options
{
-- global options
optVerbose :: Maybe Bool
, optCache :: Maybe Bool
, optMetaCache :: Maybe Integer
, optMetaMode :: Maybe MetaMode
, optPlatform :: Maybe PlatformRequest
, optUrlSource :: Maybe URI
, optNoVerify :: Maybe Bool
@@ -116,7 +116,8 @@ opts =
Options
<$> invertableSwitch "verbose" (Just 'v') False (help "Enable verbosity (default: disabled)")
<*> invertableSwitch "cache" (Just 'c') False (help "Cache downloads in ~/.ghcup/cache (default: disabled)")
<*> optional (option auto (long "metadata-caching" <> help "How long the yaml metadata caching interval is (in seconds), 0 to disable" <> internal))
<*> optional (option auto (long "metadata-caching" <> metavar "SEC" <> help "How long the yaml metadata caching interval is (in seconds), 0 to disable"))
<*> optional (option auto (long "metadata-fetching-mode" <> metavar "<Strict|Lax>" <> help "Whether to fail on metadata download failure (Strict) or fall back to cached version (Lax (default))"))
<*> optional
(option
(eitherReader platformParser)

View File

@@ -25,6 +25,7 @@ import Control.DeepSeq
import Control.Concurrent
import Control.Concurrent.Async
import Control.Exception.Safe
import Control.Monad.Identity (Identity(..))
#if !MIN_VERSION_base(4,13,0)
import Control.Monad.Fail ( MonadFail )
#endif
@@ -64,6 +65,7 @@ import qualified Text.Megaparsec as MP
import qualified System.FilePath.Posix as FP
import GHCup.Version
import Control.Exception (evaluate)
import qualified Cabal.Config as CC
-------------
@@ -789,3 +791,12 @@ checkForUpdates = do
pure $ catMaybes (ghcup:otherTools)
where
forMM a f = fmap join $ forM a f
logGHCPostRm :: (MonadReader env m, HasLog env, MonadIO m) => GHCTargetVersion -> m ()
logGHCPostRm ghcVer = do
cabalStore <- liftIO $ handleIO (\_ -> if isWindows then pure "C:\\cabal\\store" else pure "~/.cabal/store")
(runIdentity . CC.cfgStoreDir <$> CC.readConfig)
let storeGhcDir = cabalStore </> ("ghc-" <> T.unpack (prettyVer $ _tvVersion ghcVer))
logInfo $ T.pack $ "After removing GHC you might also want to clean up your cabal store at: " <> storeGhcDir

View File

@@ -420,6 +420,7 @@ hlsCompileOpts =
type GHCEffects = '[ AlreadyInstalled
, BuildFailed
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, GHCupSetError
@@ -443,6 +444,7 @@ type GHCEffects = '[ AlreadyInstalled
type HLSEffects = '[ AlreadyInstalled
, BuildFailed
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, GHCupSetError

View File

@@ -59,7 +59,7 @@ data ConfigCommand
--[ Parsers ]--
---------------
configP :: Parser ConfigCommand
configP = subparser
( command "init" initP
@@ -124,6 +124,7 @@ updateSettings :: UserSettings -> Settings -> Settings
updateSettings UserSettings{..} Settings{..} =
let cache' = fromMaybe cache uCache
metaCache' = fromMaybe metaCache uMetaCache
metaMode' = fromMaybe metaMode uMetaMode
noVerify' = fromMaybe noVerify uNoVerify
keepDirs' = fromMaybe keepDirs uKeepDirs
downloader' = fromMaybe downloader uDownloader
@@ -132,7 +133,7 @@ updateSettings UserSettings{..} Settings{..} =
noNetwork' = fromMaybe noNetwork uNoNetwork
gpgSetting' = fromMaybe gpgSetting uGPGSetting
platformOverride' = uPlatformOverride <|> platformOverride
in Settings cache' metaCache' noVerify' keepDirs' downloader' verbose' urlSource' noNetwork' gpgSetting' noColor platformOverride'
in Settings cache' metaCache' metaMode' noVerify' keepDirs' downloader' verbose' urlSource' noNetwork' gpgSetting' noColor platformOverride'
@@ -188,9 +189,15 @@ config configCommand settings keybindings runLogger = case configCommand of
AddSource xs -> do
doConfig (defaultUserSettings { uUrlSource = Just $ AddSource (xs <> [Right uri]) })
pure ExitSuccess
_ -> do
GHCupURL -> do
doConfig (defaultUserSettings { uUrlSource = Just $ AddSource [Right uri] })
pure ExitSuccess
OwnSource xs -> do
doConfig (defaultUserSettings { uUrlSource = Just $ OwnSource (xs <> [Right uri]) })
pure ExitSuccess
OwnSpec spec -> do
doConfig (defaultUserSettings { uUrlSource = Just $ OwnSource ([Left spec, Right uri]) })
pure ExitSuccess
where
doConfig :: MonadIO m => UserSettings -> m ()

View File

@@ -243,6 +243,7 @@ type InstallEffects = '[ AlreadyInstalled
, BuildFailed
, TagNotFound
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, TarDirDoesNotExist
@@ -271,6 +272,7 @@ type InstallGHCEffects = '[ AlreadyInstalled
, BuildFailed
, CopyError
, DigestError
, ContentLengthError
, DirNotEmpty
, DownloadFailed
, FileAlreadyExistsError
@@ -332,7 +334,7 @@ install installCommand settings getAppState' runLogger = case installCommand of
runInstGHC s'{ settings = settings {noVerify = True}} $ do
(v, vi) <- liftE $ fromVersion instVer GHC
liftE $ runBothE' (installGHCBindist
(DownloadInfo uri (Just $ RegexDir "ghc-.*") "")
(DownloadInfo uri (Just $ RegexDir "ghc-.*") "" Nothing)
(_tvVersion v)
(maybe GHCupInternal IsolateDir isolateDir)
forceInstall
@@ -402,7 +404,7 @@ install installCommand settings getAppState' runLogger = case installCommand of
runInstTool s'{ settings = settings { noVerify = True}} $ do
(_tvVersion -> v, vi) <- liftE $ fromVersion instVer Cabal
liftE $ runBothE' (installCabalBindist
(DownloadInfo uri Nothing "")
(DownloadInfo uri Nothing "" Nothing)
v
(maybe GHCupInternal IsolateDir isolateDir)
forceInstall
@@ -452,7 +454,7 @@ install installCommand settings getAppState' runLogger = case installCommand of
(_tvVersion -> v, vi) <- liftE $ fromVersion instVer HLS
-- TODO: support legacy
liftE $ runBothE' (installHLSBindist
(DownloadInfo uri (Just $ RegexDir "haskell-language-server-*") "")
(DownloadInfo uri (if isWindows then Nothing else Just (RegexDir "haskell-language-server-*")) "" Nothing)
v
(maybe GHCupInternal IsolateDir isolateDir)
forceInstall
@@ -501,7 +503,7 @@ install installCommand settings getAppState' runLogger = case installCommand of
runInstTool s'{ settings = settings { noVerify = True}} $ do
(_tvVersion -> v, vi) <- liftE $ fromVersion instVer Stack
liftE $ runBothE' (installStackBindist
(DownloadInfo uri Nothing "")
(DownloadInfo uri Nothing "" Nothing)
v
(maybe GHCupInternal IsolateDir isolateDir)
forceInstall

View File

@@ -153,6 +153,7 @@ type PrefetchEffects = '[ TagNotFound
, NoToolVersionSet
, NoDownload
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, JSONError

View File

@@ -175,8 +175,8 @@ rm rmCommand runAppState runLogger = case rmCommand of
)
>>= \case
VRight vi -> do
forM_ (_viPostRemove =<< vi) $ \msg ->
runLogger $ logInfo msg
runLogger $ logGHCPostRm ghcVer
postRmLog vi
pure ExitSuccess
VLeft e -> do
runLogger $ logError $ T.pack $ prettyShow e
@@ -191,8 +191,7 @@ rm rmCommand runAppState runLogger = case rmCommand of
)
>>= \case
VRight vi -> do
forM_ (_viPostRemove =<< vi) $ \msg ->
runLogger $ logInfo msg
postRmLog vi
pure ExitSuccess
VLeft e -> do
runLogger $ logError $ T.pack $ prettyShow e
@@ -207,8 +206,7 @@ rm rmCommand runAppState runLogger = case rmCommand of
)
>>= \case
VRight vi -> do
forM_ (_viPostRemove =<< vi) $ \msg ->
runLogger $ logInfo msg
postRmLog vi
pure ExitSuccess
VLeft e -> do
runLogger $ logError $ T.pack $ prettyShow e
@@ -223,10 +221,12 @@ rm rmCommand runAppState runLogger = case rmCommand of
)
>>= \case
VRight vi -> do
forM_ (_viPostRemove =<< vi) $ \msg ->
runLogger $ logInfo msg
postRmLog vi
pure ExitSuccess
VLeft e -> do
runLogger $ logError $ T.pack $ prettyShow e
pure $ ExitFailure 15
postRmLog vi =
forM_ (_viPostRemove =<< vi) $ \msg ->
runLogger $ logInfo msg

View File

@@ -177,6 +177,7 @@ type RunEffects = '[ AlreadyInstalled
, BuildFailed
, TagNotFound
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, TarDirDoesNotExist
@@ -343,6 +344,7 @@ run RunOptions{..} runAppState leanAppstate runLogger = do
, DownloadFailed
, DirNotEmpty
, DigestError
, ContentLengthError
, BuildFailed
, ArchiveResult
, AlreadyInstalled

View File

@@ -88,6 +88,7 @@ upgradeOptsP =
type UpgradeEffects = '[ DigestError
, ContentLengthError
, GPGError
, NoDownload
, NoUpdate

View File

@@ -79,6 +79,7 @@ toSettings options = do
mergeConf Options{..} UserSettings{..} noColor =
let cache = fromMaybe (fromMaybe (Types.cache defaultSettings) uCache) optCache
metaCache = fromMaybe (fromMaybe (Types.metaCache defaultSettings) uMetaCache) optMetaCache
metaMode = fromMaybe (fromMaybe (Types.metaMode defaultSettings) uMetaMode) optMetaMode
noVerify = fromMaybe (fromMaybe (Types.noVerify defaultSettings) uNoVerify) optNoVerify
verbose = fromMaybe (fromMaybe (Types.verbose defaultSettings) uVerbose) optVerbose
keepDirs = fromMaybe (fromMaybe (Types.keepDirs defaultSettings) uKeepDirs) optKeepDirs
@@ -210,7 +211,7 @@ Report bugs at <https://github.com/haskell/ghcup-hs/issues>|]
ghcupInfo <-
( flip runReaderT leanAppstate
. runE @'[DigestError, GPGError, JSONError , DownloadFailed, FileDoesNotExistError]
. runE @'[DigestError, ContentLengthError, GPGError, JSONError , DownloadFailed, FileDoesNotExistError]
$ liftE getDownloadsF
)
>>= \case

View File

@@ -40,6 +40,12 @@ key-bindings:
# of the file. These usually are in '~/.ghcup/cache/ghcup-<ver>.yaml'.
meta-cache: 300 # in seconds
# When trying to download ghcup metadata, this option decides what to do
# when the download fails:
# 1. Lax: use existing ~/.ghcup/cache/ghcup-<ver>.yaml as fallback (default)
# 2. Strict: fail hard
meta-mode: Lax # Strict | Lax
# Where to get GHC/cabal/hls download info/versions from. For more detailed explanation
# check the 'URLSource' type in the code.
url-source:

View File

@@ -60,6 +60,29 @@ All you wanted to know about GHCup.
3. handling cabal projects
4. being a stack alternative
## Distribution policies
Like most Linux distros and other distribution channels, GHCup also
follows certain policies. These are as follows:
1. The end-user experience is our primary concern
- ghcup in CI systems as a use case is a first class citizen
2. We strive to collaborate with all maintainers of all the tools we support and maintain a good relationship
3. We may fix build system or other distribution bugs in upstream bindists
- these are always communicated upstream
4. We may even patch source code of supported tools in very rare cases if that is required to ensure that the end-user experience does not break
- we'll first try to upstream any such required patch and demand a new release to avoid downstream patching
- patches will be communicated to the maintainers either way and we'll strive to get their review
- they will also be communicated to the end-user
- they will be uploaded along with the bindist
- we will avoid maintaining long-running downstream patches (currently zero)
5. We may add bindists for platforms that upstream does not support
- this is currently the case for GHC for e.g. Alpine and possibly FreeBSD in the future
- this is currently also the case for stack on darwin M1
- we don't guarantee for unofficial bindists that the test suite passes at the moment (this may change in the future)
6. We GPG sign all the GHCup metadata as well as the unofficial bindists
- any trust issues relating to missing checksums or GPG signatures is a bug and given high priority
## How
Installs a specified GHC version into `~/.ghcup/ghc/<ver>`, and places `ghc-<ver>` symlinks in `~/.ghcup/bin/`.
@@ -75,15 +98,15 @@ cabal-install/HLS/stack are installed in `~/.ghcup/bin/<tool>-<ver>` and have un
## Known users
* CI:
- [Github actions/virtual-environments](https://github.com/actions/virtual-environments)
- [Github haskell/actions/setup](https://github.com/haskell/actions/tree/main/setup)
- [haskell-ci](https://github.com/haskell-CI/haskell-ci)
- [Github actions/virtual-environments](https://github.com/actions/virtual-environments)
- [Github haskell/actions/setup](https://github.com/haskell/actions/tree/main/setup)
- [haskell-ci](https://github.com/haskell-CI/haskell-ci)
* mirrors:
- [sjtug](https://mirror.sjtu.edu.cn/docs/ghcup)
- [sjtug](https://mirror.sjtu.edu.cn/docs/ghcup)
* tools:
- [vscode-haskell](https://github.com/haskell/vscode-haskell)
- [nvim-lsp-installer](https://github.com/williamboman/nvim-lsp-installer)
- [vabal](https://github.com/Franciman/vabal)
- [vscode-haskell](https://github.com/haskell/vscode-haskell)
- [nvim-lsp-installer](https://github.com/williamboman/nvim-lsp-installer)
- [vabal](https://github.com/Franciman/vabal)
## Known problems

View File

@@ -48,6 +48,10 @@ The following distro packages are required: `build-essential curl libffi-dev lib
The following distro packages are required: `gcc gcc-c++ gmp gmp-devel make ncurses ncurses-compat-libs xz perl`
### Linux Mageia
The following distro packages are required: `curl gcc gcc-c++ gmp libffi-devel libffi7 libgmp-devel libgmp10 make libncurses-devel libncurses5 xz perl`
### Linux CentOS
The following distro packages are required: `gcc gcc-c++ gmp gmp-devel make ncurses ncurses-compat-libs xz perl`

View File

@@ -247,6 +247,7 @@ executable ghcup
, base >=4.12 && <5
, bytestring >=0.10 && <0.12
, cabal-plan ^>=0.7.2
, cabal-install-parsers >=0.4.5
, containers ^>=0.6
, deepseq ^>=1.4
, directory ^>=1.3.6.0

View File

@@ -106,6 +106,7 @@ fetchToolBindist :: ( MonadFail m
-> Maybe FilePath
-> Excepts
'[ DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -288,6 +289,7 @@ upgradeGHCup :: ( MonadMask m
-> Excepts
'[ CopyError
, DigestError
, ContentLengthError
, GPGError
, GPGError
, DownloadFailed
@@ -308,7 +310,7 @@ upgradeGHCup mtarget force' fatal = do
dli <- liftE $ getDownloadInfo GHCup latestVer
tmp <- fromGHCupPath <$> lift withGHCupTmpDir
let fn = "ghcup" <> exeExt
p <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmp (Just fn) False
p <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) (_dlCSize dli) tmp (Just fn) False
let destDir = takeDirectory destFile
destFile = fromMaybe (binDir </> fn) mtarget
lift $ logDebug $ "mkdir -p " <> T.pack destDir

View File

@@ -81,6 +81,7 @@ installCabalBindist :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -184,6 +185,7 @@ installCabalBin :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload

View File

@@ -114,7 +114,7 @@ getDownloadsF :: ( FromJSONKey Tool
, MonadMask m
)
=> Excepts
'[DigestError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
'[DigestError, ContentLengthError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
m
GHCupInfo
getDownloadsF = do
@@ -162,17 +162,21 @@ getBase :: ( MonadReader env m
, MonadMask m
)
=> URI
-> Excepts '[GPGError, DigestError, JSONError, FileDoesNotExistError] m GHCupInfo
-> Excepts '[DownloadFailed, GPGError, DigestError, ContentLengthError, JSONError, FileDoesNotExistError] m GHCupInfo
getBase uri = do
Settings { noNetwork, downloader } <- lift getSettings
Settings { noNetwork, downloader, metaMode } <- lift getSettings
-- try to download yaml... usually this writes it into cache dir,
-- but in some cases not (e.g. when using file://), so we honour
-- the return filepath, if any
mYaml <- if noNetwork && view (uriSchemeL' % schemeBSL') uri /= "file" -- for file://, let it fall through
then pure Nothing
else handleIO (\e -> lift (warnCache (displayException e) downloader) >> pure Nothing)
. catchE @_ @_ @'[] (\e@(DownloadFailed _) -> lift (warnCache (prettyShow e) downloader) >> pure Nothing)
else handleIO (\e -> case metaMode of
Strict -> throwIO e
Lax -> lift (warnCache (displayException e) downloader) >> pure Nothing)
. catchE @_ @_ @'[DownloadFailed] (\e@(DownloadFailed _) -> case metaMode of
Strict -> throwE e
Lax -> lift (warnCache (prettyShow e) downloader) >> pure Nothing)
. fmap Just
. smartDl
$ uri
@@ -184,7 +188,7 @@ getBase uri = do
liftE
. onE_ (onError actualYaml)
. lEM' @_ @_ @'[JSONError] (\(displayException -> e) -> JSONDecodeError $ unlines [e, "Consider removing " <> actualYaml <> " manually."])
. liftIO
. liftIO
. Y.decodeFileEither
$ actualYaml
where
@@ -229,6 +233,7 @@ getBase uri = do
-> Excepts
'[ DownloadFailed
, DigestError
, ContentLengthError
, GPGError
]
m1
@@ -242,7 +247,7 @@ getBase uri = do
Settings { metaCache } <- lift getSettings
-- for local files, let's short-circuit and ignore access time
if | scheme == "file" -> liftE $ download uri' Nothing Nothing (fromGHCupPath cacheDir) Nothing True
if | scheme == "file" -> liftE $ download uri' Nothing Nothing Nothing (fromGHCupPath cacheDir) Nothing True
| e -> do
accessTime <- fmap utcTimeToPOSIXSeconds $ liftIO $ getAccessTime json_file
let sinceLastAccess = utcTimeToPOSIXSeconds currentTime - accessTime
@@ -258,7 +263,7 @@ getBase uri = do
where
dlWithMod modTime json_file = do
let (dir, fn) = splitFileName json_file
f <- liftE $ download uri' (Just $ over pathL' (<> ".sig") uri') Nothing dir (Just fn) True
f <- liftE $ download uri' (Just $ over pathL' (<> ".sig") uri') Nothing Nothing dir (Just fn) True
liftIO $ setModificationTime f modTime
liftIO $ setAccessTime f modTime
pure f
@@ -324,13 +329,14 @@ download :: ( MonadReader env m
=> URI
-> Maybe URI -- ^ URI for gpg sig
-> Maybe T.Text -- ^ expected hash
-> Maybe Integer -- ^ expected content length
-> FilePath -- ^ destination dir (ignored for file:// scheme)
-> Maybe FilePath -- ^ optional filename
-> Bool -- ^ whether to read an write etags
-> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
download uri gpgUri eDigest dest mfn etags
| scheme == "https" = dl
| scheme == "http" = dl
-> Excepts '[DigestError, ContentLengthError, DownloadFailed, GPGError] m FilePath
download uri gpgUri eDigest eCSize dest mfn etags
| scheme == "https" = liftE dl
| scheme == "http" = liftE dl
| scheme == "file" = do
let destFile' = T.unpack . decUTF8Safe $ view pathL' uri
lift $ logDebug $ "using local file: " <> T.pack destFile'
@@ -351,7 +357,7 @@ download uri gpgUri eDigest dest mfn etags
-- download
flip onException
(lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile baseDestFile))
$ catchAllE @_ @'[GPGError, ProcessError, DownloadFailed, UnsupportedScheme, DigestError] @'[DigestError, DownloadFailed, GPGError]
$ catchAllE @_ @'[GPGError, ProcessError, DownloadFailed, UnsupportedScheme, DigestError, ContentLengthError] @'[DigestError, ContentLengthError, DownloadFailed, GPGError]
(\e' -> do
lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile baseDestFile)
case e' of
@@ -401,19 +407,37 @@ download uri gpgUri eDigest dest mfn etags
CapturedProcess { _stdErr } -> lift $ logDebug $ decUTF8Safe' _stdErr
_ -> pure ()
forM_ eCSize (liftE . flip checkCSize baseDestFile)
forM_ eDigest (liftE . flip checkDigest baseDestFile)
pure baseDestFile
curlDL :: (MonadCatch m, MonadMask m, MonadIO m) => [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
curlDL :: ( MonadCatch m
, MonadMask m
, MonadIO m
)
=> [String]
-> FilePath
-> URI
-> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
curlDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
liftE $ lEM @_ @'[ProcessError] $ exec "curl"
(o' ++ ["-fL", "-o", destFileTemp, T.unpack uri']) Nothing Nothing
(o' ++ ["-fL", "-o", destFileTemp, T.unpack uri']
++ maybe [] (\s -> ["--max-filesize", show s]) eCSize
) Nothing Nothing
liftIO $ renameFile destFileTemp destFile
curlEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
=> [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
curlEtagsDL :: ( MonadReader env m
, HasLog env
, MonadCatch m
, MonadMask m
, MonadIO m
)
=> [String]
-> FilePath
-> URI
-> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
curlEtagsDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
dh <- liftIO $ emptySystemTempFile "curl-header"
@@ -440,7 +464,14 @@ download uri gpgUri eDigest dest mfn etags
lift $ writeEtags destFile (parseEtags headers)
wgetDL :: (MonadCatch m, MonadMask m, MonadIO m) => [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
wgetDL :: ( MonadCatch m
, MonadMask m
, MonadIO m
)
=> [String]
-> FilePath
-> URI
-> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
wgetDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
@@ -449,7 +480,12 @@ download uri gpgUri eDigest dest mfn etags
liftIO $ renameFile destFileTemp destFile
wgetEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
wgetEtagsDL :: ( MonadReader env m
, HasLog env
, MonadCatch m
, MonadMask m
, MonadIO m
)
=> [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
wgetEtagsDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
@@ -471,7 +507,10 @@ download uri gpgUri eDigest dest mfn etags
| otherwise -> throwE (NonZeroExit i' "wget" opts)
#if defined(INTERNAL_DOWNLOADER)
internalDL :: (MonadCatch m, MonadMask m, MonadIO m)
internalDL :: ( MonadCatch m
, MonadMask m
, MonadIO m
)
=> FilePath -> URI -> Excepts '[DownloadFailed, UnsupportedScheme] m ()
internalDL destFile uri' = do
let destFileTemp = tmpFile destFile
@@ -481,11 +520,16 @@ download uri gpgUri eDigest dest mfn etags
@'[DownloadFailed]
(\e@(HTTPNotModified _) ->
throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 e :: V '[HTTPNotModified])))
$ downloadToFile https host fullPath port destFileTemp mempty
$ downloadToFile https host fullPath port destFileTemp mempty eCSize
liftIO $ renameFile destFileTemp destFile
internalEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
internalEtagsDL :: ( MonadReader env m
, HasLog env
, MonadCatch m
, MonadMask m
, MonadIO m
)
=> FilePath -> URI -> Excepts '[DownloadFailed, UnsupportedScheme] m ()
internalEtagsDL destFile uri' = do
let destFileTemp = tmpFile destFile
@@ -497,7 +541,7 @@ download uri gpgUri eDigest dest mfn etags
liftE
$ catchE @HTTPNotModified @'[DownloadFailed] @'[] (\(HTTPNotModified etag) -> lift $ writeEtags destFile (pure $ Just etag))
$ do
r <- downloadToFile https host fullPath port destFileTemp addHeaders
r <- downloadToFile https host fullPath port destFileTemp addHeaders eCSize
liftIO $ renameFile destFileTemp destFile
lift $ writeEtags destFile (pure $ decUTF8Safe <$> getHeader r "etag")
#endif
@@ -505,7 +549,7 @@ download uri gpgUri eDigest dest mfn etags
-- Manage to find a file we can write the body into.
getDestFile :: Monad m => URI -> Maybe FilePath -> Excepts '[NoUrlBase] m FilePath
getDestFile uri' mfn' =
getDestFile uri' mfn' =
let path = view pathL' uri'
in case mfn' of
Just fn -> pure (dest </> fn)
@@ -574,14 +618,14 @@ downloadCached :: ( MonadReader env m
)
=> DownloadInfo
-> Maybe FilePath -- ^ optional filename
-> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
-> Excepts '[DigestError, ContentLengthError, DownloadFailed, GPGError] m FilePath
downloadCached dli mfn = do
Settings{ cache } <- lift getSettings
case cache of
True -> downloadCached' dli mfn Nothing
False -> do
tmp <- lift withGHCupTmpDir
liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) (fromGHCupPath tmp) mfn False
liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) (_dlCSize dli) (fromGHCupPath tmp) mfn False
downloadCached' :: ( MonadReader env m
@@ -596,7 +640,7 @@ downloadCached' :: ( MonadReader env m
=> DownloadInfo
-> Maybe FilePath -- ^ optional filename
-> Maybe FilePath -- ^ optional destination dir (default: cacheDir)
-> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
-> Excepts '[DigestError, ContentLengthError, DownloadFailed, GPGError] m FilePath
downloadCached' dli mfn mDestDir = do
Dirs { cacheDir } <- lift getDirs
let destDir = fromMaybe (fromGHCupPath cacheDir) mDestDir
@@ -605,9 +649,10 @@ downloadCached' dli mfn mDestDir = do
fileExists <- liftIO $ doesFileExist cachfile
if
| fileExists -> do
forM_ (view dlCSize dli) $ \s -> liftE $ checkCSize s cachfile
liftE $ checkDigest (view dlHash dli) cachfile
pure cachfile
| otherwise -> liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) destDir mfn False
| otherwise -> liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) (_dlCSize dli) destDir mfn False
@@ -638,6 +683,25 @@ checkDigest eDigest file = do
cDigest <- throwEither . E.decodeUtf8' . B16.encode . SHA256.hashlazy $ c
when ((cDigest /= eDigest) && verify) $ throwE (DigestError file cDigest eDigest)
checkCSize :: ( MonadReader env m
, HasDirs env
, HasSettings env
, MonadIO m
, MonadThrow m
, HasLog env
)
=> Integer
-> FilePath
-> Excepts '[ContentLengthError] m ()
checkCSize eCSize file = do
Settings{ noVerify } <- lift getSettings
let verify = not noVerify
when verify $ do
let p' = takeFileName file
lift $ logInfo $ "verifying content length of: " <> T.pack p'
cSize <- liftIO $ getFileSize file
when ((eCSize /= cSize) && verify) $ throwE (ContentLengthError (Just file) (Just cSize) eCSize)
-- | Get additional curl args from env. This is an undocumented option.
getCurlOpts :: IO [String]

View File

@@ -17,14 +17,12 @@ import Control.Exception.Safe
import Control.Monad
import Control.Monad.Reader
import Data.ByteString ( ByteString )
import Data.ByteString.Builder
import Data.CaseInsensitive ( CI, original, mk )
import Data.IORef
import Data.Maybe
import Data.Text.Read
import Haskus.Utils.Variant.Excepts
import Network.Http.Client hiding ( URL )
import Optics
import Prelude hiding ( abs
, readFile
, writeFile
@@ -33,7 +31,6 @@ import System.ProgressBar
import URI.ByteString
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as L
import qualified Data.Map.Strict as M
import qualified System.IO.Streams as Streams
@@ -46,27 +43,6 @@ import qualified System.IO.Streams as Streams
----------------------------
-- | Load the result of this download into memory at once.
downloadBS' :: MonadIO m
=> Bool -- ^ https?
-> ByteString -- ^ host (e.g. "www.example.com")
-> ByteString -- ^ path (e.g. "/my/file") including query
-> Maybe Int -- ^ optional port (e.g. 3000)
-> Excepts
'[ HTTPStatusError
, URIParseError
, UnsupportedScheme
, NoLocationHeader
, TooManyRedirs
]
m
L.ByteString
downloadBS' https host path port = do
bref <- liftIO $ newIORef (mempty :: Builder)
let stepper bs = modifyIORef bref (<> byteString bs)
void $ downloadInternal False https host path port stepper (pure ()) mempty
liftIO (readIORef bref <&> toLazyByteString)
downloadToFile :: (MonadMask m, MonadIO m)
=> Bool -- ^ https?
@@ -75,8 +51,9 @@ downloadToFile :: (MonadMask m, MonadIO m)
-> Maybe Int -- ^ optional port (e.g. 3000)
-> FilePath -- ^ destination file to create and write to
-> M.Map (CI ByteString) ByteString -- ^ additional headers
-> Maybe Integer -- ^ expected content length
-> Excepts '[DownloadFailed, HTTPNotModified] m Response
downloadToFile https host fullPath port destFile addHeaders = do
downloadToFile https host fullPath port destFile addHeaders eCSize = do
let stepper = BS.appendFile destFile
setup = BS.writeFile destFile mempty
catchAllE (\case
@@ -84,7 +61,7 @@ downloadToFile https host fullPath port destFile addHeaders = do
| i == 304
, Just e <- M.lookup (mk "etag") headers -> throwE $ HTTPNotModified (decUTF8Safe e)
v -> throwE $ DownloadFailed v
) $ downloadInternal True https host fullPath port stepper setup addHeaders
) $ downloadInternal True https host fullPath port stepper setup addHeaders eCSize
downloadInternal :: MonadIO m
@@ -96,19 +73,21 @@ downloadInternal :: MonadIO m
-> (ByteString -> IO a) -- ^ the consuming step function
-> IO a -- ^ setup action
-> M.Map (CI ByteString) ByteString -- ^ additional headers
-> Maybe Integer
-> Excepts
'[ HTTPStatusError
, URIParseError
, UnsupportedScheme
, NoLocationHeader
, TooManyRedirs
, ContentLengthError
]
m
Response
downloadInternal = go (5 :: Int)
where
go redirs progressBar https host path port consumer setup addHeaders = do
go redirs progressBar https host path port consumer setup addHeaders eCSize = do
r <- liftIO $ withConnection' https host port action
veitherToExcepts r >>= \case
Right r' ->
@@ -138,25 +117,39 @@ downloadInternal = go (5 :: Int)
followRedirectURL bs = case parseURI strictURIParserOptions bs of
Right uri' -> do
(https', host', fullPath', port') <- liftE $ uriToQuadruple uri'
go (redirs - 1) progressBar https' host' fullPath' port' consumer setup addHeaders
go (redirs - 1) progressBar https' host' fullPath' port' consumer setup addHeaders eCSize
Left e -> throwE e
downloadStream r i' = do
void setup
let size = case getHeader r "Content-Length" of
Just x' -> case decimal $ decUTF8Safe x' of
Left _ -> 0
Right (r', _) -> r'
Nothing -> 0
Left _ -> Nothing
Right (r', _) -> Just r'
Nothing -> Nothing
(mpb :: Maybe (ProgressBar ())) <- if progressBar
then Just <$> newProgressBar defStyle 10 (Progress 0 size ())
else pure Nothing
forM_ size $ \s -> forM_ eCSize $ \es -> when (es /= s) $ throwIO (ContentLengthError Nothing (Just s) es)
let size' = eCSize <|> size
(mpb :: Maybe (ProgressBar ())) <- case (progressBar, size') of
(True, Just size'') -> Just <$> newProgressBar defStyle 10 (Progress 0 (fromInteger size'') ())
_ -> pure Nothing
ior <- liftIO $ newIORef 0
outStream <- liftIO $ Streams.makeOutputStream
(\case
Just bs -> do
forM_ mpb $ \pb -> incProgress pb (BS.length bs)
let len = BS.length bs
forM_ mpb $ \pb -> incProgress pb len
-- check we don't exceed size
forM_ size' $ \s -> do
cs <- readIORef ior
when ((cs + toInteger len) > s) $ throwIO (ContentLengthError Nothing (Just (cs + toInteger len)) s)
modifyIORef ior (+ toInteger len)
void $ consumer bs
Nothing -> pure ()
)

View File

@@ -219,6 +219,29 @@ instance Pretty DigestError where
<+> text (T.unpack expectedDigest) <+> text "but got" <+> pPrint currentDigest <+> text
"\nConsider removing the file in case it's cached and try again."
-- | File content length verification failed.
data ContentLengthError = ContentLengthError (Maybe FilePath) (Maybe Integer) Integer
deriving Show
instance Pretty ContentLengthError where
pPrint (ContentLengthError Nothing Nothing expectedSize) =
text "Content length exceeded expected size:"
<+> text (show expectedSize)
<+> text "\nConsider removing the file in case it's cached and try again."
pPrint (ContentLengthError Nothing (Just currentSize) expectedSize) =
text "Content length error. Expected"
<+> text (show expectedSize) <+> text "but got" <+> pPrint currentSize <+> text
"\nConsider removing the file in case it's cached and try again."
pPrint (ContentLengthError (Just fp) (Just currentSize) expectedSize) =
text "Content length error for" <+> text (fp <> ": expected")
<+> text (show expectedSize) <+> text "but got" <+> pPrint currentSize <+> text
"\nConsider removing the file in case it's cached and try again."
pPrint (ContentLengthError (Just fp) Nothing expectedSize) =
text "Content length error for" <+> text (fp <> ": expected")
<+> text (show expectedSize) <+> text "\nConsider removing the file in case it's cached and try again."
instance Exception ContentLengthError
-- | File digest verification failed.
data GPGError = forall xs . (ToVariantMaybe DownloadFailed xs, PopVariant DownloadFailed xs, Show (V xs), Pretty (V xs)) => GPGError (V xs)

View File

@@ -109,6 +109,7 @@ fetchGHCSrc :: ( MonadFail m
-> Maybe FilePath
-> Excepts
'[ DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -152,6 +153,7 @@ installGHCBindist :: ( MonadFail m
'[ AlreadyInstalled
, BuildFailed
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -357,6 +359,7 @@ installGHCBin :: ( MonadFail m
'[ AlreadyInstalled
, BuildFailed
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -628,6 +631,7 @@ compileGHC :: ( MonadMask m
'[ AlreadyInstalled
, BuildFailed
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, GHCupSetError
@@ -676,7 +680,7 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
(view dlSubdir dlInfo)
liftE $ applyAnyPatch patches (fromGHCupPath workdir)
pure (workdir, tmpUnpack, tver)
pure (workdir, tmpUnpack, Just tver)
RemoteDist uri -> do
lift $ logDebug $ "Requested to compile (from uri): " <> T.pack (show uri)
@@ -684,7 +688,7 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
-- download source tarball
tmpDownload <- lift withGHCupTmpDir
tmpUnpack <- lift mkGhcupTmpDir
tar <- liftE $ download uri Nothing Nothing (fromGHCupPath tmpDownload) Nothing False
tar <- liftE $ download uri Nothing Nothing Nothing (fromGHCupPath tmpDownload) Nothing False
(bf, tver) <- liftE $ cleanUpOnError @'[UnknownArchive, ArchiveResult, ProcessError] tmpUnpack $ do
liftE $ unpackToDir (fromGHCupPath tmpUnpack) tar
let regex = [s|^(.*/)*boot$|] :: B.ByteString
@@ -694,18 +698,19 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
execBlank
regex
)
tver <- liftE $ getGHCVer (appendGHCupPath tmpUnpack (takeDirectory bootFile))
tver <- liftE $ catchAllE @_ @'[ProcessError, ParseError] @'[] (\_ -> pure Nothing) $ fmap Just $ getGHCVer
(appendGHCupPath tmpUnpack (takeDirectory bootFile))
pure (bootFile, tver)
let workdir = appendGHCupPath tmpUnpack (takeDirectory bf)
pure (workdir, tmpUnpack, mkTVer tver)
pure (workdir, tmpUnpack, mkTVer <$> tver)
-- clone from git
GitDist GitBranch{..} -> do
tmpUnpack <- lift mkGhcupTmpDir
let git args = execLogged "git" ("--no-pager":args) (Just $ fromGHCupPath tmpUnpack) "git" Nothing
tver <- reThrowAll @_ @'[PatchFailed, ProcessError, NotFoundInPATH, DigestError, DownloadFailed, GPGError] DownloadFailed $ do
tver <- reThrowAll @_ @'[PatchFailed, ProcessError, NotFoundInPATH, DigestError, ContentLengthError, DownloadFailed, GPGError] DownloadFailed $ do
let rep = fromMaybe "https://gitlab.haskell.org/ghc/ghc.git" repo
lift $ logInfo $ "Fetching git repo " <> T.pack rep <> " at ref " <> T.pack ref <> " (this may take a while)"
lEM $ git [ "init" ]
@@ -715,7 +720,7 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
, fromString rep ]
-- figure out if we can do a shallow clone
remoteBranches <- catchE @ProcessError @'[PatchFailed, ProcessError, NotFoundInPATH, DigestError, DownloadFailed, GPGError] @'[PatchFailed, NotFoundInPATH, DigestError, DownloadFailed, GPGError] (\(_ :: ProcessError) -> pure [])
remoteBranches <- catchE @ProcessError @'[PatchFailed, ProcessError, NotFoundInPATH, DigestError, ContentLengthError, DownloadFailed, GPGError] @'[PatchFailed, NotFoundInPATH, DigestError, DownloadFailed, GPGError] (\(_ :: ProcessError) -> pure [])
$ fmap processBranches $ gitOut ["ls-remote", "--heads", "origin"] (fromGHCupPath tmpUnpack)
let shallow_clone
| isCommitHash ref = True
@@ -745,20 +750,23 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
liftE $ applyAnyPatch patches (fromGHCupPath tmpUnpack)
-- bootstrap
tver <- liftE $ getGHCVer tmpUnpack
tver <- liftE $ catchAllE @_ @'[ProcessError, ParseError] @'[] (\_ -> pure Nothing) $ fmap Just $ getGHCVer
tmpUnpack
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpUnpack)
lift $ logInfo $ "Examining git ref " <> T.pack ref <> "\n " <>
"GHC version (from Makefile): " <> prettyVer tver <>
"GHC version (from Makefile): " <> T.pack (show (prettyVer <$> tver)) <>
(if not shallow_clone then "\n " <> "'git describe' output: " <> fromJust git_describe else mempty) <>
(if isCommitHash ref then mempty else "\n " <> "commit hash: " <> chash)
liftIO $ threadDelay 5000000 -- give the user a sec to intervene
pure tver
pure (tmpUnpack, tmpUnpack, GHCTargetVersion Nothing tver)
pure (tmpUnpack, tmpUnpack, mkTVer <$> tver)
-- the version that's installed may differ from the
-- compiled version, so the user can overwrite it
let installVer = maybe tver (\ov' -> tver { _tvVersion = ov' }) ov
installVer <- if | Just ov' <- ov -> pure (mkTVer ov')
| Just tver' <- tver -> pure tver'
| otherwise -> fail "Newer GHCs don't support discovering the version in git. Complain to GHC devs: https://gitlab.haskell.org/ghc/ghc/-/issues/22322"
alreadyInstalled <- lift $ ghcInstalled installVer
alreadySet <- fmap (== Just installVer) $ lift $ ghcSet (_tvTarget installVer)
@@ -781,8 +789,10 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
tmpUnpack
(do
b <- if hadrian
then compileHadrianBindist tver (fromGHCupPath workdir) ghcdir
else compileMakeBindist tver (fromGHCupPath workdir) ghcdir
-- prefer 'tver', because the real version carries out compatibility checks
-- we don't want the user to do funny things with it
then compileHadrianBindist (fromMaybe installVer tver) (fromGHCupPath workdir) ghcdir
else compileMakeBindist (fromMaybe installVer tver) (fromGHCupPath workdir) ghcdir
bmk <- liftIO $ handleIO (\_ -> pure "") $ B.readFile (build_mk $ fromGHCupPath workdir)
pure (b, bmk)
)
@@ -826,14 +836,14 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
, MonadThrow m
)
=> GHCupPath
-> Excepts '[ProcessError] m Version
-> Excepts '[ProcessError, ParseError] m Version
getGHCVer tmpUnpack = do
lEM $ execWithGhcEnv "python3" ["./boot"] (Just $ fromGHCupPath tmpUnpack) "ghc-bootstrap"
lEM $ execWithGhcEnv "sh" ["./configure"] (Just $ fromGHCupPath tmpUnpack) "ghc-bootstrap"
CapturedProcess {..} <- lift $ makeOut
["show!", "--quiet", "VALUE=ProjectVersion" ] (Just $ fromGHCupPath tmpUnpack)
case _exitCode of
ExitSuccess -> throwEither . MP.parse ghcProjectVersion "" . T.pack . stripNewlineEnd . T.unpack . decUTF8Safe' $ _stdOut
ExitSuccess -> either (throwE . ParseError . show) pure . MP.parse ghcProjectVersion "" . T.pack . stripNewlineEnd . T.unpack . decUTF8Safe' $ _stdOut
ExitFailure c -> throwE $ NonZeroExit c "make" ["show!", "--quiet", "VALUE=ProjectVersion" ]
defaultConf =

View File

@@ -105,6 +105,7 @@ installHLSBindist :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -297,6 +298,7 @@ installHLSBin :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -344,6 +346,7 @@ compileHLS :: ( MonadMask m
, GPGError
, DownloadFailed
, DigestError
, ContentLengthError
, UnknownArchive
, TarDirDoesNotExist
, ArchiveResult
@@ -401,7 +404,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal upda
-- download source tarball
tmpDownload <- lift withGHCupTmpDir
tmpUnpack <- lift mkGhcupTmpDir
tar <- liftE $ download uri Nothing Nothing (fromGHCupPath tmpDownload) Nothing False
tar <- liftE $ download uri Nothing Nothing Nothing (fromGHCupPath tmpDownload) Nothing False
(cf, tver) <- liftE $ cleanUpOnError tmpUnpack $ do
unpackToDir (fromGHCupPath tmpUnpack) tar
let regex = [s|^(.*/)*haskell-language-server\.cabal$|] :: B.ByteString
@@ -481,7 +484,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal upda
liftE $ runBuildAction
tmpUnpack
(reThrowAll @_ @'[GPGError, DownloadFailed, DigestError, PatchFailed, ProcessError, FileAlreadyExistsError, CopyError] @'[BuildFailed] (BuildFailed $ fromGHCupPath workdir) $ do
(reThrowAll @_ @'[GPGError, DownloadFailed, DigestError, ContentLengthError, PatchFailed, ProcessError, FileAlreadyExistsError, CopyError] @'[BuildFailed] (BuildFailed $ fromGHCupPath workdir) $ do
let tmpInstallDir = fromGHCupPath workdir </> "out"
liftIO $ createDirRecursive' tmpInstallDir
@@ -497,7 +500,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal upda
| otherwise -> pure (takeFileName cp)
Just (Right uri) -> do
tmpUnpack' <- lift withGHCupTmpDir
cp <- liftE $ download uri Nothing Nothing (fromGHCupPath tmpUnpack') (Just "cabal.project") False
cp <- liftE $ download uri Nothing Nothing Nothing (fromGHCupPath tmpUnpack') (Just "cabal.project") False
copyFileE cp (fromGHCupPath workdir </> "cabal.project") False
pure "cabal.project"
Nothing
@@ -511,7 +514,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal upda
| otherwise -> pure "cabal.project"
forM_ cabalProjectLocal $ \uri -> do
tmpUnpack' <- lift withGHCupTmpDir
cpl <- liftE $ download uri Nothing Nothing (fromGHCupPath tmpUnpack') (Just (cp <.> "local")) False
cpl <- liftE $ download uri Nothing Nothing Nothing (fromGHCupPath tmpUnpack') (Just (cp <.> "local")) False
copyFileE cpl (fromGHCupPath workdir </> cp <.> "local") False
artifacts <- forM (sort ghcs) $ \ghc -> do
let ghcInstallDir = tmpInstallDir </> T.unpack (prettyVer ghc)

View File

@@ -48,6 +48,7 @@ import Streamly.Internal.Data.Unfold.Type
import qualified Streamly.Internal.Data.Unfold as U
import Streamly.Internal.Control.Concurrent ( withRunInIO )
import Streamly.Internal.Data.IOFinalizer ( newIOFinalizer, runIOFinalizer )
import GHC.IO.Exception (IOException(ioe_type), IOErrorType (..))
-- | On unix, we can use symlinks, so we just get the
@@ -116,8 +117,18 @@ copyFile from to fail' = do
let dflags = [ FD.oNofollow
, if fail' then FD.oExcl else FD.oTrunc
]
let openFdHandle' = openFdHandle to SPI.WriteOnly dflags $ Just sourceFileMode
bracket
(openFdHandle to SPI.WriteOnly dflags $ Just sourceFileMode)
(handleIO (\e -> if
-- if we copy from regular file to symlink, we need
-- to delete the symlink
| ioe_type e == InvalidArgument
, not fail' -> do
removeLink to
openFdHandle'
| otherwise -> throwIO e
)
openFdHandle')
(hClose . snd)
$ \(_, tH) -> do
hSetBinaryMode fH True

View File

@@ -82,6 +82,7 @@ installStackBin :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -120,6 +121,7 @@ installStackBindist :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload

View File

@@ -262,6 +262,7 @@ data DownloadInfo = DownloadInfo
{ _dlUri :: URI
, _dlSubdir :: Maybe TarDir
, _dlHash :: Text
, _dlCSize :: Maybe Integer
}
deriving (Eq, Ord, GHC.Generic, Show)
@@ -297,10 +298,16 @@ instance NFData URLSource
instance NFData (URIRef Absolute) where
rnf (URI !_ !_ !_ !_ !_) = ()
data MetaMode = Strict
| Lax
deriving (Show, Read, Eq, GHC.Generic)
instance NFData MetaMode
data UserSettings = UserSettings
{ uCache :: Maybe Bool
, uMetaCache :: Maybe Integer
, uMetaMode :: Maybe MetaMode
, uNoVerify :: Maybe Bool
, uVerbose :: Maybe Bool
, uKeepDirs :: Maybe KeepDirs
@@ -314,13 +321,14 @@ data UserSettings = UserSettings
deriving (Show, GHC.Generic)
defaultUserSettings :: UserSettings
defaultUserSettings = UserSettings Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
defaultUserSettings = UserSettings Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
fromSettings :: Settings -> Maybe KeyBindings -> UserSettings
fromSettings Settings{..} Nothing =
UserSettings {
uCache = Just cache
, uMetaCache = Just metaCache
, uMetaMode = Just metaMode
, uNoVerify = Just noVerify
, uVerbose = Just verbose
, uKeepDirs = Just keepDirs
@@ -346,6 +354,7 @@ fromSettings Settings{..} (Just KeyBindings{..}) =
in UserSettings {
uCache = Just cache
, uMetaCache = Just metaCache
, uMetaMode = Just metaMode
, uNoVerify = Just noVerify
, uVerbose = Just verbose
, uKeepDirs = Just keepDirs
@@ -426,6 +435,7 @@ instance NFData LeanAppState
data Settings = Settings
{ cache :: Bool
, metaCache :: Integer
, metaMode :: MetaMode
, noVerify :: Bool
, keepDirs :: KeepDirs
, downloader :: Downloader
@@ -442,7 +452,7 @@ defaultMetaCache :: Integer
defaultMetaCache = 300 -- 5 minutes
defaultSettings :: Settings
defaultSettings = Settings False defaultMetaCache False Never Curl False GHCupURL False GPGNone False Nothing
defaultSettings = Settings False defaultMetaCache Lax False Never Curl False GHCupURL False GPGNone False Nothing
instance NFData Settings

View File

@@ -43,6 +43,7 @@ import qualified Text.Megaparsec as MP
import qualified Text.Megaparsec.Char as MPC
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''MetaMode
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Architecture
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''LinuxDistro
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''VSep

View File

@@ -1035,13 +1035,13 @@ applyAnyPatch :: ( MonadReader env m
, MonadIO m)
=> Maybe (Either FilePath [URI])
-> FilePath
-> Excepts '[PatchFailed, DownloadFailed, DigestError, GPGError] m ()
-> Excepts '[PatchFailed, DownloadFailed, DigestError, ContentLengthError, GPGError] m ()
applyAnyPatch Nothing _ = pure ()
applyAnyPatch (Just (Left pdir)) workdir = liftE $ applyPatches pdir workdir
applyAnyPatch (Just (Right uris)) workdir = do
tmpUnpack <- fromGHCupPath <$> lift withGHCupTmpDir
forM_ uris $ \uri -> do
patch <- liftE $ download uri Nothing Nothing tmpUnpack Nothing False
patch <- liftE $ download uri Nothing Nothing Nothing tmpUnpack Nothing False
liftE $ applyPatch patch workdir
@@ -1172,7 +1172,7 @@ ensureGlobalTools :: ( MonadMask m
, MonadUnliftIO m
, MonadFail m
)
=> Excepts '[GPGError, DigestError , DownloadFailed, NoDownload] m ()
=> Excepts '[GPGError, DigestError, ContentLengthError, DownloadFailed, NoDownload] m ()
ensureGlobalTools
| isWindows = do
(GHCupInfo _ _ gTools) <- lift getGHCupInfo
@@ -1184,8 +1184,8 @@ ensureGlobalTools
lift $ logWarn "Digest doesn't match, redownloading gs.exe..."
lift $ logDebug ("rm -f " <> T.pack (fromGHCupPath (cacheDir dirs) </> "gs.exe"))
lift $ hideError doesNotExistErrorType $ recycleFile (fromGHCupPath (cacheDir dirs) </> "gs.exe")
liftE @'[GPGError, DigestError , DownloadFailed] $ dl
) `catchE` liftE @'[GPGError, DigestError , DownloadFailed] dl
liftE @'[GPGError, DigestError, ContentLengthError, DownloadFailed] $ dl
) `catchE` liftE @'[GPGError, DigestError, ContentLengthError, DownloadFailed] dl
| otherwise = pure ()

View File

@@ -465,15 +465,22 @@ withGHCupTmpDir :: ( MonadReader env m
, MonadMask m
, MonadIO m)
=> m GHCupPath
withGHCupTmpDir = snd <$> withRunInIO (\run ->
run
$ allocate
(run mkGhcupTmpDir)
(\fp ->
handleIO (\e -> run
$ logDebug ("Resource cleanup failed for " <> T.pack (fromGHCupPath fp) <> ", error was: " <> T.pack (displayException e)))
. removePathForcibly
$ fp))
withGHCupTmpDir = do
Settings{keepDirs} <- getSettings
snd <$> withRunInIO (\run ->
run
$ allocate
(run mkGhcupTmpDir)
(\fp -> if -- we don't know whether there was a failure, so can only
-- decide for 'Always'
| keepDirs == Always -> pure ()
| otherwise -> handleIO (\e -> run
$ logDebug ("Resource cleanup failed for "
<> T.pack (fromGHCupPath fp)
<> ", error was: "
<> T.pack (displayException e)))
. removePathForcibly
$ fp))

View File

@@ -38,9 +38,13 @@ param (
# Specify the cabal root directory (default: '$InstallDir\cabal')
[string]$CabalDir,
# Whether to disable use of curl.exe
[switch]$DisableCurl
[switch]$DisableCurl,
# The Msys2 version to download (e.g. 20221216)
[string]$Msys2Version
)
$DefaultMsys2Version = "20221216"
$Silent = !$Interactive
function Print-Msg {
@@ -423,12 +427,15 @@ if (!(Test-Path -Path ('{0}' -f $MsysDir))) {
Start-Sleep -s 5
# Download the archive
Print-Msg -msg 'Downloading Msys2 archive...'
$archive = 'msys2-x86_64-latest.sfx.exe'
if (!($Msys2Version)) {
$Msys2Version = $DefaultMsys2Version
}
Print-Msg -msg ('Downloading Msys2 archive {0}...' -f $Msys2Version)
$archive = ('msys2-base-x86_64-{0}.sfx.exe' -f $Msys2Version)
$archivePath = ('{0}\{1}' -f ([IO.Path]::GetTempPath()), "$archive")
if ((Get-Command -Name 'curl.exe' -ErrorAction SilentlyContinue) -and !($DisableCurl)) {
Exec "curl.exe" '-o' "$archivePath" ('https://repo.msys2.org/distrib/{0}' -f "$archive")
Exec "curl.exe" '-o' "$archivePath" ('https://repo.msys2.org/distrib/x86_64/{0}' -f "$archive")
} else {
Get-FileWCSynchronous -url ('https://repo.msys2.org/distrib/{0}' -f $archive) -destinationFolder ([IO.Path]::GetTempPath()) -includeStats
}
@@ -603,7 +610,7 @@ if ($DisableCurl) {
if ((Get-Process -ID $PID).ProcessName.StartsWith("bootstrap-haskell") -Or $InBash) {
Exec "$Bash" '-lc' ('{4} {6} {7} {8} {9} {10} [ -n ''{1}'' ] && export GHCUP_MSYS2=$(cygpath -m ''{1}'') ; [ -n ''{2}'' ] && export GHCUP_INSTALL_BASE_PREFIX=$(cygpath -m ''{2}/'') ; export PATH=$(cygpath -u ''{3}/bin''):$PATH ; export CABAL_DIR=''{5}'' ; [[ ''{0}'' = https* ]] && {11} {0} | bash || cat $(cygpath -m ''{0}'') | bash' -f $BootstrapUrl, $MsysDir, $GhcupBasePrefix, $GhcupDir, $SilentExport, $CabalDirFull, $StackInstallExport, $HLSInstallExport, $AdjustCabalConfigExport, $MinimalExport, $BootstrapDownloader, $DownloadScript)
} else {
Exec "$Msys2Shell" '-mingw64' '-mintty' '-c' ('{4} {6} {7} {8} {9} {10} [ -n ''{1}'' ] && export GHCUP_MSYS2=$(cygpath -m ''{1}'') ; [ -n ''{2}'' ] && export GHCUP_INSTALL_BASE_PREFIX=$(cygpath -m ''{2}/'') ; export PATH=$(cygpath -u ''{3}/bin''):$PATH ; export CABAL_DIR=''{5}'' ; trap ''echo Press any key to exit && read -n 1 && exit'' 2 ; [[ ''{0}'' = https* ]] && {11} {0} | bash || cat $(cygpath -m ''{0}'') | bash ; echo ''Press any key to exit'' && read -n 1' -f $BootstrapUrl, $MsysDir, $GhcupBasePrefix, $GhcupDir, $SilentExport, $CabalDirFull, $StackInstallExport, $HLSInstallExport, $AdjustCabalConfigExport, $MinimalExport, $BootstrapDownloader, $DownloadScript)
Exec "$Msys2Shell" '-mingw64' '-mintty' '-shell' 'bash' '-c' ('{4} {6} {7} {8} {9} {10} [ -n ''{1}'' ] && export GHCUP_MSYS2=$(cygpath -m ''{1}'') ; [ -n ''{2}'' ] && export GHCUP_INSTALL_BASE_PREFIX=$(cygpath -m ''{2}/'') ; export PATH=$(cygpath -u ''{3}/bin''):$PATH ; export CABAL_DIR=''{5}'' ; trap ''echo Press any key to exit && read -n 1 && exit'' 2 ; [[ ''{0}'' = https* ]] && {11} {0} | bash || cat $(cygpath -m ''{0}'') | bash ; echo ''Press any key to exit'' && read -n 1' -f $BootstrapUrl, $MsysDir, $GhcupBasePrefix, $GhcupDir, $SilentExport, $CabalDirFull, $StackInstallExport, $HLSInstallExport, $AdjustCabalConfigExport, $MinimalExport, $BootstrapDownloader, $DownloadScript)
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff