Merge branch 'gpg'

This commit is contained in:
Julian Ospald 2021-09-19 12:28:46 +02:00
commit 709658462c
Signed by: hasufell
GPG Key ID: 3786C5262ECB4A3F
12 changed files with 290 additions and 124 deletions

View File

@ -18,6 +18,7 @@ Similar in scope to [rustup](https://github.com/rust-lang-nursery/rustup.rs), [p
* [Vim integration](#vim-integration) * [Vim integration](#vim-integration)
* [Usage](#usage) * [Usage](#usage)
* [Configuration](#configuration) * [Configuration](#configuration)
* [GPG verification](#gpg-verification)
* [Manpages](#manpages) * [Manpages](#manpages)
* [Shell-completion](#shell-completion) * [Shell-completion](#shell-completion)
* [Compiling GHC from source](#compiling-ghc-from-source) * [Compiling GHC from source](#compiling-ghc-from-source)
@ -146,6 +147,34 @@ explaining all possible configurations can be found in this repo: [config.yaml](
Partial configuration is fine. Command line options always override the config file settings. Partial configuration is fine. Command line options always override the config file settings.
### GPG verification
GHCup supports verifying the GPG signature of the metadata file. The metadata file then contains SHA256 hashes of all downloads, so
this is cryptographically secure.
First, obtain the gpg key:
```sh
gpg --batch --keyserver keys.openpgp.org --recv-keys 7784930957807690A66EBDBE3786C5262ECB4A3F
```
Then verify the gpg key in one of these ways:
1. find out where I live and visit me to do offline key signing
2. figure out my mobile phone number and call me to verify the fingerprint
3. more boring: contact me on Libera IRC (`maerwald`) and verify the fingerprint
Once you've verified the key, you have to figure out if you trust me.
If you trust me, then you can configure gpg in `~/.ghcup/config.yaml`:
```yml
gpg-setting: GPGLax # GPGStrict | GPGLax | GPGNone
```
In `GPGStrict` mode, ghcup will fail if verification fails. In `GPGLax` mode it will just print a warning.
You can also pass the mode via `ghcup --gpg <strict|lax|none>`.
### Manpages ### Manpages
For man pages to work you need [man-db](http://man-db.nongnu.org/) as your `man` provider, then issue `man ghc`. Manpages only work for the currently set ghc. For man pages to work you need [man-db](http://man-db.nongnu.org/) as your `man` provider, then issue `man ghc`. Manpages only work for the currently set ghc.
@ -206,6 +235,7 @@ This is the complete list of env variables that change GHCup behavior:
* `GHCUP_INSTALL_BASE_PREFIX`: the base of ghcup (default: `$HOME`) * `GHCUP_INSTALL_BASE_PREFIX`: the base of ghcup (default: `$HOME`)
* `GHCUP_CURL_OPTS`: additional options that can be passed to curl * `GHCUP_CURL_OPTS`: additional options that can be passed to curl
* `GHCUP_WGET_OPTS`: additional options that can be passed to wget * `GHCUP_WGET_OPTS`: additional options that can be passed to wget
* `GHCUP_GPG_OPTS`: additional options that can be passed to gpg
* `GHCUP_SKIP_UPDATE_CHECK`: Skip the (possibly annoying) update check when you run a command * `GHCUP_SKIP_UPDATE_CHECK`: Skip the (possibly annoying) update check when you run a command
* `CC`/`LD` etc.: full environment is passed to the build system when compiling GHC via GHCup * `CC`/`LD` etc.: full environment is passed to the build system when compiling GHC via GHCup

View File

@ -119,7 +119,7 @@ main = do
, rawOutter = \_ -> pure () , rawOutter = \_ -> pure ()
} }
dirs <- liftIO getAllDirs dirs <- liftIO getAllDirs
let leanAppstate = LeanAppState (Settings True False Never Curl True GHCupURL False) dirs defaultKeyBindings loggerConfig let leanAppstate = LeanAppState (Settings True False Never Curl True GHCupURL False GPGNone) dirs defaultKeyBindings loggerConfig
pfreq <- ( pfreq <- (
flip runReaderT leanAppstate . runE @'[NoCompatiblePlatform, NoCompatibleArch, DistroNotFound] $ platformRequest flip runReaderT leanAppstate . runE @'[NoCompatiblePlatform, NoCompatibleArch, DistroNotFound] $ platformRequest
@ -129,7 +129,7 @@ main = do
flip runReaderT leanAppstate $ logError $ T.pack $ prettyShow e flip runReaderT leanAppstate $ logError $ T.pack $ prettyShow e
liftIO $ exitWith (ExitFailure 2) liftIO $ exitWith (ExitFailure 2)
let appstate = AppState (Settings True False Never Curl True GHCupURL False) dirs defaultKeyBindings (GHCupInfo mempty mempty mempty) pfreq loggerConfig let appstate = AppState (Settings True False Never Curl True GHCupURL False GPGNone) dirs defaultKeyBindings (GHCupInfo mempty mempty mempty) pfreq loggerConfig
_ <- customExecParser (prefs showHelpOnError) (info (opts <**> helper) idm) _ <- customExecParser (prefs showHelpOnError) (info (opts <**> helper) idm)
>>= \Options {..} -> case optCommand of >>= \Options {..} -> case optCommand of

View File

@ -229,6 +229,7 @@ validateTarballs (TarballFilter etool versionRegex) dls gt = do
downloadAll ref dli = do downloadAll ref dli = do
r <- runResourceT r <- runResourceT
. runE @'[DigestError . runE @'[DigestError
, GPGError
, DownloadFailed , DownloadFailed
, UnknownArchive , UnknownArchive
, ArchiveResult , ArchiveResult
@ -237,7 +238,7 @@ validateTarballs (TarballFilter etool versionRegex) dls gt = do
case etool of case etool of
Right (Just GHCup) -> do Right (Just GHCup) -> do
tmpUnpack <- lift mkGhcupTmpDir tmpUnpack <- lift mkGhcupTmpDir
_ <- liftE $ download (_dlUri dli) (Just (_dlHash dli)) tmpUnpack Nothing False _ <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmpUnpack Nothing False
pure Nothing pure Nothing
Right _ -> do Right _ -> do
p <- liftE $ downloadCached dli Nothing p <- liftE $ downloadCached dli Nothing
@ -247,7 +248,7 @@ validateTarballs (TarballFilter etool versionRegex) dls gt = do
$ p $ p
Left ShimGen -> do Left ShimGen -> do
tmpUnpack <- lift mkGhcupTmpDir tmpUnpack <- lift mkGhcupTmpDir
_ <- liftE $ download (_dlUri dli) (Just (_dlHash dli)) tmpUnpack Nothing False _ <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmpUnpack Nothing False
pure Nothing pure Nothing
case r of case r of
VRight (Just basePath) -> do VRight (Just basePath) -> do

View File

@ -429,6 +429,7 @@ install' _ (_, ListResult {..}) = do
, BuildFailed , BuildFailed
, TagNotFound , TagNotFound
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, DirNotEmpty , DirNotEmpty
, NoUpdate , NoUpdate
@ -547,6 +548,7 @@ settings' = unsafePerformIO $ do
, verbose = False , verbose = False
, urlSource = GHCupURL , urlSource = GHCupURL
, noNetwork = False , noNetwork = False
, gpgSetting = GPGNone
, .. , ..
}) })
dirs dirs
@ -591,7 +593,7 @@ getGHCupInfo = do
r <- r <-
flip runReaderT settings flip runReaderT settings
. runE @'[JSONError , DownloadFailed , FileDoesNotExistError] . runE @'[DigestError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
$ liftE $ liftE
$ getDownloadsF $ getDownloadsF

View File

@ -87,15 +87,16 @@ import qualified Text.Megaparsec.Char as MPC
data Options = Options data Options = Options
{ {
-- global options -- global options
optVerbose :: Maybe Bool optVerbose :: Maybe Bool
, optCache :: Maybe Bool , optCache :: Maybe Bool
, optUrlSource :: Maybe URI , optUrlSource :: Maybe URI
, optNoVerify :: Maybe Bool , optNoVerify :: Maybe Bool
, optKeepDirs :: Maybe KeepDirs , optKeepDirs :: Maybe KeepDirs
, optsDownloader :: Maybe Downloader , optsDownloader :: Maybe Downloader
, optNoNetwork :: Maybe Bool , optNoNetwork :: Maybe Bool
, optGpg :: Maybe GPGSetting
-- commands -- commands
, optCommand :: Command , optCommand :: Command
} }
data Command data Command
@ -310,6 +311,13 @@ opts =
<> hidden <> hidden
)) ))
<*> invertableSwitch "offline" 'o' False (help "Don't do any network calls, trying cached assets and failing if missing.") <*> invertableSwitch "offline" 'o' False (help "Don't do any network calls, trying cached assets and failing if missing.")
<*> optional (option
(eitherReader gpgParser)
( long "gpg"
<> metavar "<strict|lax|none>"
<> help
"GPG verification (default: none)"
))
<*> com <*> com
where where
parseUri s' = parseUri s' =
@ -1145,7 +1153,7 @@ tagCompleter tool add = listIOCompleter $ do
, rawOutter = mempty , rawOutter = mempty
} }
let appState = LeanAppState let appState = LeanAppState
(Settings True False Never Curl False GHCupURL True) (Settings True False Never Curl False GHCupURL True GPGNone)
dirs' dirs'
defaultKeyBindings defaultKeyBindings
loggerConfig loggerConfig
@ -1170,7 +1178,7 @@ versionCompleter criteria tool = listIOCompleter $ do
, colorOutter = mempty , colorOutter = mempty
, rawOutter = mempty , rawOutter = mempty
} }
let settings = Settings True False Never Curl False GHCupURL True let settings = Settings True False Never Curl False GHCupURL True GPGNone
let leanAppState = LeanAppState let leanAppState = LeanAppState
settings settings
dirs' dirs'
@ -1257,6 +1265,13 @@ downloaderParser s' | t == T.pack "curl" = Right Curl
| otherwise = Left ("Unknown downloader value: " <> s') | otherwise = Left ("Unknown downloader value: " <> s')
where t = T.toLower (T.pack s') where t = T.toLower (T.pack s')
gpgParser :: String -> Either String GPGSetting
gpgParser s' | t == T.pack "strict" = Right GPGStrict
| t == T.pack "lax" = Right GPGLax
| t == T.pack "none" = Right GPGNone
| otherwise = Left ("Unknown gpg setting value: " <> s')
where t = T.toLower (T.pack s')
platformParser :: String -> Either String PlatformRequest platformParser :: String -> Either String PlatformRequest
platformParser s' = case MP.parse (platformP <* MP.eof) "" (T.pack s') of platformParser s' = case MP.parse (platformP <* MP.eof) "" (T.pack s') of
@ -1336,6 +1351,7 @@ toSettings options = do
keyBindings = maybe defaultKeyBindings mergeKeys uKeyBindings keyBindings = maybe defaultKeyBindings mergeKeys uKeyBindings
urlSource = maybe (fromMaybe GHCupURL uUrlSource) OwnSource optUrlSource urlSource = maybe (fromMaybe GHCupURL uUrlSource) OwnSource optUrlSource
noNetwork = fromMaybe (fromMaybe False uNoNetwork) optNoNetwork noNetwork = fromMaybe (fromMaybe False uNoNetwork) optNoNetwork
gpgSetting = fromMaybe (fromMaybe GPGNone uGPGSetting) optGpg
in (Settings {..}, keyBindings) in (Settings {..}, keyBindings)
#if defined(INTERNAL_DOWNLOADER) #if defined(INTERNAL_DOWNLOADER)
defaultDownloader = Internal defaultDownloader = Internal
@ -1371,7 +1387,8 @@ updateSettings config settings = do
verbose' = fromMaybe verbose uVerbose verbose' = fromMaybe verbose uVerbose
urlSource' = fromMaybe urlSource uUrlSource urlSource' = fromMaybe urlSource uUrlSource
noNetwork' = fromMaybe noNetwork uNoNetwork noNetwork' = fromMaybe noNetwork uNoNetwork
in Settings cache' noVerify' keepDirs' downloader' verbose' urlSource' noNetwork' gpgSetting' = fromMaybe gpgSetting uGPGSetting
in Settings cache' noVerify' keepDirs' downloader' verbose' urlSource' noNetwork' gpgSetting'
upgradeOptsP :: Parser UpgradeOpts upgradeOptsP :: Parser UpgradeOpts
upgradeOptsP = upgradeOptsP =
@ -1396,7 +1413,7 @@ describe_result = $( LitE . StringL <$>
runIO (do runIO (do
CapturedProcess{..} <- do CapturedProcess{..} <- do
dirs <- liftIO getAllDirs dirs <- liftIO getAllDirs
let settings = AppState (Settings True False Never Curl False GHCupURL False) let settings = AppState (Settings True False Never Curl False GHCupURL False GPGNone)
dirs dirs
defaultKeyBindings defaultKeyBindings
flip runReaderT settings $ executeOut "git" ["describe"] Nothing flip runReaderT settings $ executeOut "git" ["describe"] Nothing
@ -1506,7 +1523,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
ghcupInfo <- ghcupInfo <-
( flip runReaderT leanAppstate ( flip runReaderT leanAppstate
. runE @'[JSONError , DownloadFailed, FileDoesNotExistError] . runE @'[DigestError, GPGError, JSONError , DownloadFailed, FileDoesNotExistError]
$ liftE $ liftE
$ getDownloadsF $ getDownloadsF
) )
@ -1579,6 +1596,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
, BuildFailed , BuildFailed
, TagNotFound , TagNotFound
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, TarDirDoesNotExist , TarDirDoesNotExist
, NextVerNotFound , NextVerNotFound
@ -1669,6 +1687,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
@'[ AlreadyInstalled @'[ AlreadyInstalled
, BuildFailed , BuildFailed
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, GHCupSetError , GHCupSetError
, NoDownload , NoDownload
@ -1707,6 +1726,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
. runResourceT . runResourceT
. runE . runE
@'[ DigestError @'[ DigestError
, GPGError
, NoDownload , NoDownload
, NoUpdate , NoUpdate
, FileDoesNotExistError , FileDoesNotExistError
@ -1723,6 +1743,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
, NoToolVersionSet , NoToolVersionSet
, NoDownload , NoDownload
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, JSONError , JSONError
, FileDoesNotExistError , FileDoesNotExistError

View File

@ -8,6 +8,10 @@ verbose: False
keep-dirs: Errors # Always | Never | Errors keep-dirs: Errors # Always | Never | Errors
# Which downloader to use # Which downloader to use
downloader: Curl # Curl | Wget | Internal downloader: Curl # Curl | Wget | Internal
# whether to run in offline mode
no-network: False
# whether/how to do gpg verification
gpg-setting: GPGNone # GPGStrict | GPGLax | GPGNone
# TUI key bindings, # TUI key bindings,
# see https://hackage.haskell.org/package/vty-5.31/docs/Graphics-Vty-Input-Events.html#t:Key # see https://hackage.haskell.org/package/vty-5.31/docs/Graphics-Vty-Input-Events.html#t:Key

View File

@ -121,6 +121,7 @@ fetchToolBindist :: ( MonadFail m
-> Maybe FilePath -> Maybe FilePath
-> Excepts -> Excepts
'[ DigestError '[ DigestError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
] ]
@ -148,6 +149,7 @@ fetchGHCSrc :: ( MonadFail m
-> Maybe FilePath -> Maybe FilePath
-> Excepts -> Excepts
'[ DigestError '[ DigestError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
] ]
@ -189,6 +191,7 @@ installGHCBindist :: ( MonadFail m
'[ AlreadyInstalled '[ AlreadyInstalled
, BuildFailed , BuildFailed
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
, NotInstalled , NotInstalled
@ -384,6 +387,7 @@ installGHCBin :: ( MonadFail m
'[ AlreadyInstalled '[ AlreadyInstalled
, BuildFailed , BuildFailed
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
, NotInstalled , NotInstalled
@ -421,6 +425,7 @@ installCabalBindist :: ( MonadMask m
'[ AlreadyInstalled '[ AlreadyInstalled
, CopyError , CopyError
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
, NotInstalled , NotInstalled
@ -526,6 +531,7 @@ installCabalBin :: ( MonadMask m
'[ AlreadyInstalled '[ AlreadyInstalled
, CopyError , CopyError
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
, NotInstalled , NotInstalled
@ -563,6 +569,7 @@ installHLSBindist :: ( MonadMask m
'[ AlreadyInstalled '[ AlreadyInstalled
, CopyError , CopyError
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
, NotInstalled , NotInstalled
@ -693,6 +700,7 @@ installHLSBin :: ( MonadMask m
'[ AlreadyInstalled '[ AlreadyInstalled
, CopyError , CopyError
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
, NotInstalled , NotInstalled
@ -731,6 +739,7 @@ installStackBin :: ( MonadMask m
'[ AlreadyInstalled '[ AlreadyInstalled
, CopyError , CopyError
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
, NotInstalled , NotInstalled
@ -768,6 +777,7 @@ installStackBindist :: ( MonadMask m
'[ AlreadyInstalled '[ AlreadyInstalled
, CopyError , CopyError
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
, NotInstalled , NotInstalled
@ -1862,6 +1872,7 @@ compileGHC :: ( MonadMask m
'[ AlreadyInstalled '[ AlreadyInstalled
, BuildFailed , BuildFailed
, DigestError , DigestError
, GPGError
, DownloadFailed , DownloadFailed
, GHCupSetError , GHCupSetError
, NoDownload , NoDownload
@ -2309,6 +2320,8 @@ upgradeGHCup :: ( MonadMask m
-> Excepts -> Excepts
'[ CopyError '[ CopyError
, DigestError , DigestError
, GPGError
, GPGError
, DownloadFailed , DownloadFailed
, NoDownload , NoDownload
, NoUpdate , NoUpdate
@ -2325,7 +2338,7 @@ upgradeGHCup mtarget force' = do
dli <- liftE $ getDownloadInfo GHCup latestVer dli <- liftE $ getDownloadInfo GHCup latestVer
tmp <- lift withGHCupTmpDir tmp <- lift withGHCupTmpDir
let fn = "ghcup" <> exeExt let fn = "ghcup" <> exeExt
p <- liftE $ download (_dlUri dli) (Just (_dlHash dli)) tmp (Just fn) False p <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmp (Just fn) False
let destDir = takeDirectory destFile let destDir = takeDirectory destFile
destFile = fromMaybe (binDir </> fn) mtarget destFile = fromMaybe (binDir </> fn) mtarget
lift $ logDebug $ "mkdir -p " <> T.pack destDir lift $ logDebug $ "mkdir -p " <> T.pack destDir

View File

@ -114,7 +114,7 @@ getDownloadsF :: ( FromJSONKey Tool
, MonadMask m , MonadMask m
) )
=> Excepts => Excepts
'[JSONError , DownloadFailed , FileDoesNotExistError] '[DigestError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
m m
GHCupInfo GHCupInfo
getDownloadsF = do getDownloadsF = do
@ -165,7 +165,7 @@ getBase :: ( MonadReader env m
, MonadMask m , MonadMask m
) )
=> URI => URI
-> Excepts '[JSONError, FileDoesNotExistError] m GHCupInfo -> Excepts '[GPGError, DigestError, JSONError, FileDoesNotExistError] m GHCupInfo
getBase uri = do getBase uri = do
Settings { noNetwork, downloader } <- lift getSettings Settings { noNetwork, downloader } <- lift getSettings
@ -176,7 +176,6 @@ getBase uri = do
then pure Nothing then pure Nothing
else handleIO (\e -> lift (warnCache (displayException e) downloader) >> pure Nothing) else handleIO (\e -> lift (warnCache (displayException e) downloader) >> pure Nothing)
. catchE @_ @_ @'[] (\e@(DownloadFailed _) -> lift (warnCache (prettyShow e) downloader) >> pure Nothing) . catchE @_ @_ @'[] (\e@(DownloadFailed _) -> lift (warnCache (prettyShow e) downloader) >> pure Nothing)
. reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed
. fmap Just . fmap Just
. smartDl . smartDl
$ uri $ uri
@ -234,6 +233,7 @@ getBase uri = do
-> Excepts -> Excepts
'[ DownloadFailed '[ DownloadFailed
, DigestError , DigestError
, GPGError
] ]
m1 m1
FilePath FilePath
@ -245,7 +245,7 @@ getBase uri = do
Dirs { cacheDir } <- lift getDirs Dirs { cacheDir } <- lift getDirs
-- for local files, let's short-circuit and ignore access time -- for local files, let's short-circuit and ignore access time
if | scheme == "file" -> liftE $ download uri' Nothing cacheDir Nothing True if | scheme == "file" -> liftE $ download uri' Nothing Nothing cacheDir Nothing True
| e -> do | e -> do
accessTime <- liftIO $ getAccessTime json_file accessTime <- liftIO $ getAccessTime json_file
@ -258,7 +258,7 @@ getBase uri = do
where where
dlWithMod modTime json_file = do dlWithMod modTime json_file = do
let (dir, fn) = splitFileName json_file let (dir, fn) = splitFileName json_file
f <- liftE $ download uri' Nothing dir (Just fn) True f <- liftE $ download uri' (Just $ over pathL' (<> ".sig") uri') Nothing dir (Just fn) True
liftIO $ setModificationTime f modTime liftIO $ setModificationTime f modTime
liftIO $ setAccessTime f modTime liftIO $ setAccessTime f modTime
pure f pure f
@ -322,16 +322,17 @@ download :: ( MonadReader env m
, MonadIO m , MonadIO m
) )
=> URI => URI
-> Maybe URI -- ^ URI for gpg sig
-> Maybe T.Text -- ^ expected hash -> Maybe T.Text -- ^ expected hash
-> FilePath -- ^ destination dir (ignored for file:// scheme) -> FilePath -- ^ destination dir (ignored for file:// scheme)
-> Maybe FilePath -- ^ optional filename -> Maybe FilePath -- ^ optional filename
-> Bool -- ^ whether to read an write etags -> Bool -- ^ whether to read an write etags
-> Excepts '[DigestError , DownloadFailed] m FilePath -> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
download uri eDigest dest mfn etags download uri gpgUri eDigest dest mfn etags
| scheme == "https" = dl | scheme == "https" = dl
| scheme == "http" = dl | scheme == "http" = dl
| scheme == "file" = do | scheme == "file" = do
let destFile' = T.unpack . decUTF8Safe $ path let destFile' = T.unpack . decUTF8Safe $ view pathL' uri
lift $ logDebug $ "using local file: " <> T.pack destFile' lift $ logDebug $ "using local file: " <> T.pack destFile'
forM_ eDigest (liftE . flip checkDigest destFile') forM_ eDigest (liftE . flip checkDigest destFile')
pure destFile' pure destFile'
@ -340,115 +341,179 @@ download uri eDigest dest mfn etags
where where
scheme = view (uriSchemeL' % schemeBSL') uri scheme = view (uriSchemeL' % schemeBSL') uri
dl = do dl = do
destFile <- liftE . reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed $ getDestFile baseDestFile <- liftE . reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed $ getDestFile uri mfn
lift $ logInfo $ "downloading: " <> uri' <> " as file " <> T.pack destFile lift $ logInfo $ "downloading: " <> (decUTF8Safe . serializeURIRef') uri <> " as file " <> T.pack baseDestFile
-- destination dir must exist -- destination dir must exist
liftIO $ createDirRecursive' dest liftIO $ createDirRecursive' dest
-- download -- download
flip onException flip onException
(lift $ hideError doesNotExistErrorType $ recycleFile destFile) (lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile baseDestFile))
$ catchAllE @_ @'[ProcessError, DownloadFailed, UnsupportedScheme] $ catchAllE @_ @'[GPGError, ProcessError, DownloadFailed, UnsupportedScheme, DigestError] @'[DigestError, DownloadFailed, GPGError]
(\e -> (\e' -> do
lift (hideError doesNotExistErrorType $ recycleFile destFile) lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile baseDestFile)
>> (throwE . DownloadFailed $ e) case e' of
V e@GPGError {} -> throwE e
V e@DigestError {} -> throwE e
_ -> throwE (DownloadFailed e')
) $ do ) $ do
Settings{ downloader, noNetwork } <- lift getSettings Settings{ downloader, noNetwork, gpgSetting } <- lift getSettings
when noNetwork $ throwE (DownloadFailed (V NoNetwork :: V '[NoNetwork])) when noNetwork $ throwE (DownloadFailed (V NoNetwork :: V '[NoNetwork]))
case downloader of downloadAction <- case downloader of
Curl -> do Curl -> do
o' <- liftIO getCurlOpts o' <- liftIO getCurlOpts
if etags if etags
then do then pure $ curlEtagsDL o'
dh <- liftIO $ emptySystemTempFile "curl-header" else pure $ curlDL o'
flip finally (try @_ @SomeException $ rmFile dh) $ Wget -> do
flip finally (try @_ @SomeException $ rmFile (destFile <.> "tmp")) $ do o' <- liftIO getWgetOpts
metag <- lift $ readETag destFile if etags
liftE $ lEM @_ @'[ProcessError] $ exec "curl" then pure $ wgetEtagsDL o'
(o' ++ (if etags then ["--dump-header", dh] else []) else pure $ wgetDL o'
++ maybe [] (\t -> ["-H", "If-None-Match: " <> T.unpack t]) metag
++ ["-fL", "-o", destFile <.> "tmp", T.unpack uri']) Nothing Nothing
headers <- liftIO $ T.readFile dh
-- this nonsense is necessary, because some older versions of curl would overwrite
-- the destination file when 304 is returned
case fmap T.words . listToMaybe . fmap T.strip . T.lines . getLastHeader $ headers of
Just (http':sc:_)
| sc == "304"
, T.pack "HTTP" `T.isPrefixOf` http' -> lift $ logDebug "Status code was 304, not overwriting"
| T.pack "HTTP" `T.isPrefixOf` http' -> do
lift $ logDebug $ "Status code was " <> sc <> ", overwriting"
liftIO $ copyFile (destFile <.> "tmp") destFile
_ -> liftE $ throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 (MalformedHeaders headers)
:: V '[MalformedHeaders]))
lift $ writeEtags destFile (parseEtags headers)
else
liftE $ lEM @_ @'[ProcessError] $ exec "curl"
(o' ++ ["-fL", "-o", destFile, T.unpack uri']) Nothing Nothing
Wget -> do
destFileTemp <- liftIO $ emptySystemTempFile "wget-tmp"
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
o' <- liftIO getWgetOpts
if etags
then do
metag <- lift $ readETag destFile
let opts = o' ++ maybe [] (\t -> ["--header", "If-None-Match: " <> T.unpack t]) metag
++ ["-q", "-S", "-O", destFileTemp , T.unpack uri']
CapturedProcess {_exitCode, _stdErr} <- lift $ executeOut "wget" opts Nothing
case _exitCode of
ExitSuccess -> do
liftIO $ copyFile destFileTemp destFile
lift $ writeEtags destFile (parseEtags (decUTF8Safe' _stdErr))
ExitFailure i'
| i' == 8
, Just _ <- find (T.pack "304 Not Modified" `T.isInfixOf`) . T.lines . decUTF8Safe' $ _stdErr
-> do
lift $ logDebug "Not modified, skipping download"
lift $ writeEtags destFile (parseEtags (decUTF8Safe' _stdErr))
| otherwise -> throwE (NonZeroExit i' "wget" opts)
else do
let opts = o' ++ ["-O", destFileTemp , T.unpack uri']
liftE $ lEM @_ @'[ProcessError] $ exec "wget" opts Nothing Nothing
liftIO $ copyFile destFileTemp destFile
#if defined(INTERNAL_DOWNLOADER) #if defined(INTERNAL_DOWNLOADER)
Internal -> do Internal -> do
(https, host, fullPath, port) <- liftE $ uriToQuadruple uri if etags
if etags then pure (\fp -> liftE . internalEtagsDL fp)
then do else pure (\fp -> liftE . internalDL fp)
metag <- lift $ readETag destFile
let addHeaders = maybe mempty (\etag -> M.fromList [ (mk . E.encodeUtf8 . T.pack $ "If-None-Match"
, E.encodeUtf8 etag)]) metag
liftE
$ catchE @HTTPNotModified @'[DownloadFailed] @'[] (\(HTTPNotModified etag) -> lift $ writeEtags destFile (pure $ Just etag))
$ do
r <- downloadToFile https host fullPath port destFile addHeaders
lift $ writeEtags destFile (pure $ decUTF8Safe <$> getHeader r "etag")
else void $ liftE $ catchE @HTTPNotModified
@'[DownloadFailed]
(\e@(HTTPNotModified _) ->
throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 e :: V '[HTTPNotModified])))
$ downloadToFile https host fullPath port destFile mempty
#endif #endif
liftE $ downloadAction baseDestFile uri
case (gpgUri, gpgSetting) of
(_, GPGNone) -> pure ()
(Just gpgUri', _) -> do
gpgDestFile <- liftE . reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed $ getDestFile gpgUri' Nothing
liftE $ flip onException
(lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile gpgDestFile))
$ catchAllE @_ @'[GPGError, ProcessError, UnsupportedScheme, DownloadFailed] @'[GPGError]
(\e -> if gpgSetting == GPGStrict then throwE (GPGError e) else lift $ logWarn $ T.pack (prettyShow (GPGError e))
) $ do
o' <- liftIO getGpgOpts
lift $ logDebug $ "downloading: " <> (decUTF8Safe . serializeURIRef') gpgUri' <> " as file " <> T.pack gpgDestFile
liftE $ downloadAction gpgDestFile gpgUri'
lift $ logInfo $ "verifying signature of: " <> T.pack baseDestFile
let args = o' ++ ["--batch", "--verify", "--quiet", "--no-tty", gpgDestFile, baseDestFile]
cp <- lift $ executeOut "gpg" args Nothing
case cp of
CapturedProcess { _exitCode = ExitFailure i, _stdErr } -> do
lift $ logDebug $ decUTF8Safe' _stdErr
throwE (GPGError @'[ProcessError] (V (NonZeroExit i "gpg" args)))
CapturedProcess { _stdErr } -> lift $ logDebug $ decUTF8Safe' _stdErr
_ -> pure ()
forM_ eDigest (liftE . flip checkDigest destFile) forM_ eDigest (liftE . flip checkDigest baseDestFile)
pure destFile pure baseDestFile
curlDL :: (MonadCatch m, MonadMask m, MonadIO m) => [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
curlDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
liftE $ lEM @_ @'[ProcessError] $ exec "curl"
(o' ++ ["-fL", "-o", destFileTemp, T.unpack uri']) Nothing Nothing
liftIO $ renameFile destFileTemp destFile
curlEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
=> [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
curlEtagsDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
dh <- liftIO $ emptySystemTempFile "curl-header"
flip finally (try @_ @SomeException $ rmFile dh) $
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
metag <- lift $ readETag destFile
liftE $ lEM @_ @'[ProcessError] $ exec "curl"
(o' ++ (if etags then ["--dump-header", dh] else [])
++ maybe [] (\t -> ["-H", "If-None-Match: " <> T.unpack t]) metag
++ ["-fL", "-o", destFileTemp, T.unpack uri']) Nothing Nothing
headers <- liftIO $ T.readFile dh
-- this nonsense is necessary, because some older versions of curl would overwrite
-- the destination file when 304 is returned
case fmap T.words . listToMaybe . fmap T.strip . T.lines . getLastHeader $ headers of
Just (http':sc:_)
| sc == "304"
, T.pack "HTTP" `T.isPrefixOf` http' -> lift $ logDebug "Status code was 304, not overwriting"
| T.pack "HTTP" `T.isPrefixOf` http' -> do
lift $ logDebug $ "Status code was " <> sc <> ", overwriting"
liftIO $ renameFile destFileTemp destFile
_ -> liftE $ throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 (MalformedHeaders headers)
:: V '[MalformedHeaders]))
lift $ writeEtags destFile (parseEtags headers)
wgetDL :: (MonadCatch m, MonadMask m, MonadIO m) => [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
wgetDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
let opts = o' ++ ["-O", destFileTemp , T.unpack uri']
liftE $ lEM @_ @'[ProcessError] $ exec "wget" opts Nothing Nothing
liftIO $ renameFile destFileTemp destFile
wgetEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
=> [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
wgetEtagsDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
metag <- lift $ readETag destFile
let opts = o' ++ maybe [] (\t -> ["--header", "If-None-Match: " <> T.unpack t]) metag
++ ["-q", "-S", "-O", destFileTemp , T.unpack uri']
CapturedProcess {_exitCode, _stdErr} <- lift $ executeOut "wget" opts Nothing
case _exitCode of
ExitSuccess -> do
liftIO $ renameFile destFileTemp destFile
lift $ writeEtags destFile (parseEtags (decUTF8Safe' _stdErr))
ExitFailure i'
| i' == 8
, Just _ <- find (T.pack "304 Not Modified" `T.isInfixOf`) . T.lines . decUTF8Safe' $ _stdErr
-> do
lift $ logDebug "Not modified, skipping download"
lift $ writeEtags destFile (parseEtags (decUTF8Safe' _stdErr))
| otherwise -> throwE (NonZeroExit i' "wget" opts)
#if defined(INTERNAL_DOWNLOADER)
internalDL :: (MonadCatch m, MonadMask m, MonadIO m)
=> FilePath -> URI -> Excepts '[DownloadFailed, UnsupportedScheme] m ()
internalDL destFile uri' = do
let destFileTemp = tmpFile destFile
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
(https, host, fullPath, port) <- liftE $ uriToQuadruple uri'
void $ liftE $ catchE @HTTPNotModified
@'[DownloadFailed]
(\e@(HTTPNotModified _) ->
throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 e :: V '[HTTPNotModified])))
$ downloadToFile https host fullPath port destFileTemp mempty
liftIO $ renameFile destFileTemp destFile
internalEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
=> FilePath -> URI -> Excepts '[DownloadFailed, UnsupportedScheme] m ()
internalEtagsDL destFile uri' = do
let destFileTemp = tmpFile destFile
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
(https, host, fullPath, port) <- liftE $ uriToQuadruple uri'
metag <- lift $ readETag destFile
let addHeaders = maybe mempty (\etag -> M.fromList [ (mk . E.encodeUtf8 . T.pack $ "If-None-Match"
, E.encodeUtf8 etag)]) metag
liftE
$ catchE @HTTPNotModified @'[DownloadFailed] @'[] (\(HTTPNotModified etag) -> lift $ writeEtags destFile (pure $ Just etag))
$ do
r <- downloadToFile https host fullPath port destFileTemp addHeaders
liftIO $ renameFile destFileTemp destFile
lift $ writeEtags destFile (pure $ decUTF8Safe <$> getHeader r "etag")
#endif
-- Manage to find a file we can write the body into. -- Manage to find a file we can write the body into.
getDestFile :: Monad m => Excepts '[NoUrlBase] m FilePath getDestFile :: Monad m => URI -> Maybe FilePath -> Excepts '[NoUrlBase] m FilePath
getDestFile = getDestFile uri' mfn' =
case mfn of let path = view pathL' uri'
in case mfn' of
Just fn -> pure (dest </> fn) Just fn -> pure (dest </> fn)
Nothing Nothing
| let urlBase = T.unpack (decUTF8Safe (urlBaseName path)) | let urlBase = T.unpack (decUTF8Safe (urlBaseName path))
, not (null urlBase) -> pure (dest </> urlBase) , not (null urlBase) -> pure (dest </> urlBase)
-- TODO: remove this once we use hpath again -- TODO: remove this once we use hpath again
| otherwise -> throwE $ NoUrlBase uri' | otherwise -> throwE $ NoUrlBase (decUTF8Safe . serializeURIRef' $ uri')
path = view pathL' uri
uri' = decUTF8Safe (serializeURIRef' uri)
parseEtags :: (MonadReader env m, HasLog env, MonadIO m, MonadThrow m) => T.Text -> m (Maybe T.Text) parseEtags :: (MonadReader env m, HasLog env, MonadIO m, MonadThrow m) => T.Text -> m (Maybe T.Text)
parseEtags stderr = do parseEtags stderr = do
@ -509,14 +574,14 @@ downloadCached :: ( MonadReader env m
) )
=> DownloadInfo => DownloadInfo
-> Maybe FilePath -- ^ optional filename -> Maybe FilePath -- ^ optional filename
-> Excepts '[DigestError , DownloadFailed] m FilePath -> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
downloadCached dli mfn = do downloadCached dli mfn = do
Settings{ cache } <- lift getSettings Settings{ cache } <- lift getSettings
case cache of case cache of
True -> downloadCached' dli mfn Nothing True -> downloadCached' dli mfn Nothing
False -> do False -> do
tmp <- lift withGHCupTmpDir tmp <- lift withGHCupTmpDir
liftE $ download (_dlUri dli) (Just (_dlHash dli)) tmp mfn False liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmp mfn False
downloadCached' :: ( MonadReader env m downloadCached' :: ( MonadReader env m
@ -531,7 +596,7 @@ downloadCached' :: ( MonadReader env m
=> DownloadInfo => DownloadInfo
-> Maybe FilePath -- ^ optional filename -> Maybe FilePath -- ^ optional filename
-> Maybe FilePath -- ^ optional destination dir (default: cacheDir) -> Maybe FilePath -- ^ optional destination dir (default: cacheDir)
-> Excepts '[DigestError , DownloadFailed] m FilePath -> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
downloadCached' dli mfn mDestDir = do downloadCached' dli mfn mDestDir = do
Dirs { cacheDir } <- lift getDirs Dirs { cacheDir } <- lift getDirs
let destDir = fromMaybe cacheDir mDestDir let destDir = fromMaybe cacheDir mDestDir
@ -542,7 +607,7 @@ downloadCached' dli mfn mDestDir = do
| fileExists -> do | fileExists -> do
liftE $ checkDigest (view dlHash dli) cachfile liftE $ checkDigest (view dlHash dli) cachfile
pure cachfile pure cachfile
| otherwise -> liftE $ download (_dlUri dli) (Just (_dlHash dli)) destDir mfn False | otherwise -> liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) destDir mfn False
@ -589,6 +654,12 @@ getWgetOpts =
Just r -> pure $ splitOn " " r Just r -> pure $ splitOn " " r
Nothing -> pure [] Nothing -> pure []
-- | Get additional gpg args from env. This is an undocumented option.
getGpgOpts :: IO [String]
getGpgOpts =
lookupEnv "GHCUP_GPG_OPTS" >>= \case
Just r -> pure $ splitOn " " r
Nothing -> pure []
-- | Get the url base name. -- | Get the url base name.
-- --
@ -610,3 +681,7 @@ urlBaseName = snd . B.breakEnd (== _slash) . urlDecode False
-- "HTTP/1.1 304 Not Modified\n" -- "HTTP/1.1 304 Not Modified\n"
getLastHeader :: T.Text -> T.Text getLastHeader :: T.Text -> T.Text
getLastHeader = T.unlines . lastDef [] . filter (\x -> not (null x)) . splitOn [""] . fmap T.stripEnd . T.lines getLastHeader = T.unlines . lastDef [] . filter (\x -> not (null x)) . splitOn [""] . fmap T.stripEnd . T.lines
tmpFile :: FilePath -> FilePath
tmpFile = (<.> "tmp")

View File

@ -195,6 +195,14 @@ instance Pretty DigestError where
pPrint (DigestError currentDigest expectedDigest) = pPrint (DigestError currentDigest expectedDigest) =
text "Digest error: expected" <+> text (T.unpack expectedDigest) <+> text "but got" <+> pPrint currentDigest text "Digest error: expected" <+> text (T.unpack expectedDigest) <+> text "but got" <+> pPrint currentDigest
-- | File digest verification failed.
data GPGError = forall xs . (ToVariantMaybe DownloadFailed xs, PopVariant DownloadFailed xs, Show (V xs), Pretty (V xs)) => GPGError (V xs)
deriving instance Show GPGError
instance Pretty GPGError where
pPrint (GPGError reason) = text "GPG verify failed:" <+> pPrint reason
-- | Unexpected HTTP status. -- | Unexpected HTTP status.
data HTTPStatusError = HTTPStatusError Int (M.Map (CI ByteString) ByteString) data HTTPStatusError = HTTPStatusError Int (M.Map (CI ByteString) ByteString)
deriving Show deriving Show

View File

@ -303,11 +303,12 @@ data UserSettings = UserSettings
, uKeyBindings :: Maybe UserKeyBindings , uKeyBindings :: Maybe UserKeyBindings
, uUrlSource :: Maybe URLSource , uUrlSource :: Maybe URLSource
, uNoNetwork :: Maybe Bool , uNoNetwork :: Maybe Bool
, uGPGSetting :: Maybe GPGSetting
} }
deriving (Show, GHC.Generic) deriving (Show, GHC.Generic)
defaultUserSettings :: UserSettings defaultUserSettings :: UserSettings
defaultUserSettings = UserSettings Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing defaultUserSettings = UserSettings Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
fromSettings :: Settings -> Maybe KeyBindings -> UserSettings fromSettings :: Settings -> Maybe KeyBindings -> UserSettings
fromSettings Settings{..} Nothing = fromSettings Settings{..} Nothing =
@ -320,6 +321,7 @@ fromSettings Settings{..} Nothing =
, uNoNetwork = Just noNetwork , uNoNetwork = Just noNetwork
, uKeyBindings = Nothing , uKeyBindings = Nothing
, uUrlSource = Just urlSource , uUrlSource = Just urlSource
, uGPGSetting = Just gpgSetting
} }
fromSettings Settings{..} (Just KeyBindings{..}) = fromSettings Settings{..} (Just KeyBindings{..}) =
let ukb = UserKeyBindings let ukb = UserKeyBindings
@ -342,6 +344,7 @@ fromSettings Settings{..} (Just KeyBindings{..}) =
, uNoNetwork = Just noNetwork , uNoNetwork = Just noNetwork
, uKeyBindings = Just ukb , uKeyBindings = Just ukb
, uUrlSource = Just urlSource , uUrlSource = Just urlSource
, uGPGSetting = Just gpgSetting
} }
data UserKeyBindings = UserKeyBindings data UserKeyBindings = UserKeyBindings
@ -415,6 +418,7 @@ data Settings = Settings
, verbose :: Bool , verbose :: Bool
, urlSource :: URLSource , urlSource :: URLSource
, noNetwork :: Bool , noNetwork :: Bool
, gpgSetting :: GPGSetting
} }
deriving (Show, GHC.Generic) deriving (Show, GHC.Generic)
@ -448,6 +452,13 @@ data Downloader = Curl
instance NFData Downloader instance NFData Downloader
data GPGSetting = GPGStrict
| GPGLax
| GPGNone
deriving (Eq, Show, Ord, GHC.Generic)
instance NFData GPGSetting
data DebugInfo = DebugInfo data DebugInfo = DebugInfo
{ diBaseDir :: FilePath { diBaseDir :: FilePath
, diBinDir :: FilePath , diBinDir :: FilePath

View File

@ -54,6 +54,7 @@ deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Tool
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''GlobalTool deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''GlobalTool
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''KeepDirs deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''KeepDirs
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Downloader deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Downloader
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''GPGSetting
instance ToJSON Tag where instance ToJSON Tag where
toJSON Latest = String "Latest" toJSON Latest = String "Latest"

View File

@ -1031,7 +1031,7 @@ ensureGlobalTools :: ( MonadMask m
, MonadUnliftIO m , MonadUnliftIO m
, MonadFail m , MonadFail m
) )
=> Excepts '[DigestError , DownloadFailed, NoDownload] m () => Excepts '[GPGError, DigestError , DownloadFailed, NoDownload] m ()
ensureGlobalTools = do ensureGlobalTools = do
#if defined(IS_WINDOWS) #if defined(IS_WINDOWS)
(GHCupInfo _ _ gTools) <- lift getGHCupInfo (GHCupInfo _ _ gTools) <- lift getGHCupInfo
@ -1043,8 +1043,8 @@ ensureGlobalTools = do
lift $ logWarn "Digest doesn't match, redownloading gs.exe..." lift $ logWarn "Digest doesn't match, redownloading gs.exe..."
lift $ logDebug ("rm -f " <> T.pack (cacheDir dirs </> "gs.exe")) lift $ logDebug ("rm -f " <> T.pack (cacheDir dirs </> "gs.exe"))
lift $ hideError doesNotExistErrorType $ recycleFile (cacheDir dirs </> "gs.exe") lift $ hideError doesNotExistErrorType $ recycleFile (cacheDir dirs </> "gs.exe")
liftE @'[DigestError , DownloadFailed] $ dl liftE @'[GPGError, DigestError , DownloadFailed] $ dl
) `catchE` (liftE @'[DigestError , DownloadFailed] dl) ) `catchE` (liftE @'[GPGError, DigestError , DownloadFailed] dl)
pure () pure ()
#else #else
pure () pure ()