Implement GPG verification wrt #236
This commit is contained in:
parent
6f61b5dbef
commit
c431c0ae00
30
README.md
30
README.md
@ -18,6 +18,7 @@ Similar in scope to [rustup](https://github.com/rust-lang-nursery/rustup.rs), [p
|
||||
* [Vim integration](#vim-integration)
|
||||
* [Usage](#usage)
|
||||
* [Configuration](#configuration)
|
||||
* [GPG verification](#gpg-verification)
|
||||
* [Manpages](#manpages)
|
||||
* [Shell-completion](#shell-completion)
|
||||
* [Compiling GHC from source](#compiling-ghc-from-source)
|
||||
@ -146,6 +147,34 @@ explaining all possible configurations can be found in this repo: [config.yaml](
|
||||
|
||||
Partial configuration is fine. Command line options always override the config file settings.
|
||||
|
||||
### GPG verification
|
||||
|
||||
GHCup supports verifying the GPG signature of the metadata file. The metadata file then contains SHA256 hashes of all downloads, so
|
||||
this is cryptographically secure.
|
||||
|
||||
First, obtain the gpg key:
|
||||
|
||||
```sh
|
||||
gpg --batch --keyserver keys.openpgp.org --recv-keys 7784930957807690A66EBDBE3786C5262ECB4A3F
|
||||
```
|
||||
|
||||
Then verify the gpg key in one of these ways:
|
||||
|
||||
1. find out where I live and visit me to do offline key signing
|
||||
2. figure out my mobile phone number and call me to verify the fingerprint
|
||||
3. more boring: contact me on Libera IRC (`maerwald`) and verify the fingerprint
|
||||
|
||||
Once you've verified the key, you have to figure out if you trust me.
|
||||
|
||||
If you trust me, then you can configure gpg in `~/.ghcup/config.yaml`:
|
||||
|
||||
```yml
|
||||
gpg-setting: GPGLax # GPGStrict | GPGLax | GPGNone
|
||||
```
|
||||
|
||||
In `GPGStrict` mode, ghcup will fail if verification fails. In `GPGLax` mode it will just print a warning.
|
||||
You can also pass the mode via `ghcup --gpg <strict|lax|none>`.
|
||||
|
||||
### Manpages
|
||||
|
||||
For man pages to work you need [man-db](http://man-db.nongnu.org/) as your `man` provider, then issue `man ghc`. Manpages only work for the currently set ghc.
|
||||
@ -206,6 +235,7 @@ This is the complete list of env variables that change GHCup behavior:
|
||||
* `GHCUP_INSTALL_BASE_PREFIX`: the base of ghcup (default: `$HOME`)
|
||||
* `GHCUP_CURL_OPTS`: additional options that can be passed to curl
|
||||
* `GHCUP_WGET_OPTS`: additional options that can be passed to wget
|
||||
* `GHCUP_GPG_OPTS`: additional options that can be passed to gpg
|
||||
* `GHCUP_SKIP_UPDATE_CHECK`: Skip the (possibly annoying) update check when you run a command
|
||||
* `CC`/`LD` etc.: full environment is passed to the build system when compiling GHC via GHCup
|
||||
|
||||
|
@ -119,7 +119,7 @@ main = do
|
||||
, rawOutter = \_ -> pure ()
|
||||
}
|
||||
dirs <- liftIO getAllDirs
|
||||
let leanAppstate = LeanAppState (Settings True False Never Curl True GHCupURL False) dirs defaultKeyBindings loggerConfig
|
||||
let leanAppstate = LeanAppState (Settings True False Never Curl True GHCupURL False GPGNone) dirs defaultKeyBindings loggerConfig
|
||||
|
||||
pfreq <- (
|
||||
flip runReaderT leanAppstate . runE @'[NoCompatiblePlatform, NoCompatibleArch, DistroNotFound] $ platformRequest
|
||||
@ -129,7 +129,7 @@ main = do
|
||||
flip runReaderT leanAppstate $ logError $ T.pack $ prettyShow e
|
||||
liftIO $ exitWith (ExitFailure 2)
|
||||
|
||||
let appstate = AppState (Settings True False Never Curl True GHCupURL False) dirs defaultKeyBindings (GHCupInfo mempty mempty mempty) pfreq loggerConfig
|
||||
let appstate = AppState (Settings True False Never Curl True GHCupURL False GPGNone) dirs defaultKeyBindings (GHCupInfo mempty mempty mempty) pfreq loggerConfig
|
||||
|
||||
_ <- customExecParser (prefs showHelpOnError) (info (opts <**> helper) idm)
|
||||
>>= \Options {..} -> case optCommand of
|
||||
|
@ -229,6 +229,7 @@ validateTarballs (TarballFilter etool versionRegex) dls gt = do
|
||||
downloadAll ref dli = do
|
||||
r <- runResourceT
|
||||
. runE @'[DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, UnknownArchive
|
||||
, ArchiveResult
|
||||
@ -237,7 +238,7 @@ validateTarballs (TarballFilter etool versionRegex) dls gt = do
|
||||
case etool of
|
||||
Right (Just GHCup) -> do
|
||||
tmpUnpack <- lift mkGhcupTmpDir
|
||||
_ <- liftE $ download (_dlUri dli) (Just (_dlHash dli)) tmpUnpack Nothing False
|
||||
_ <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmpUnpack Nothing False
|
||||
pure Nothing
|
||||
Right _ -> do
|
||||
p <- liftE $ downloadCached dli Nothing
|
||||
@ -247,7 +248,7 @@ validateTarballs (TarballFilter etool versionRegex) dls gt = do
|
||||
$ p
|
||||
Left ShimGen -> do
|
||||
tmpUnpack <- lift mkGhcupTmpDir
|
||||
_ <- liftE $ download (_dlUri dli) (Just (_dlHash dli)) tmpUnpack Nothing False
|
||||
_ <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmpUnpack Nothing False
|
||||
pure Nothing
|
||||
case r of
|
||||
VRight (Just basePath) -> do
|
||||
|
@ -429,6 +429,7 @@ install' _ (_, ListResult {..}) = do
|
||||
, BuildFailed
|
||||
, TagNotFound
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, DirNotEmpty
|
||||
, NoUpdate
|
||||
@ -547,6 +548,7 @@ settings' = unsafePerformIO $ do
|
||||
, verbose = False
|
||||
, urlSource = GHCupURL
|
||||
, noNetwork = False
|
||||
, gpgSetting = GPGNone
|
||||
, ..
|
||||
})
|
||||
dirs
|
||||
@ -591,7 +593,7 @@ getGHCupInfo = do
|
||||
|
||||
r <-
|
||||
flip runReaderT settings
|
||||
. runE @'[JSONError , DownloadFailed , FileDoesNotExistError]
|
||||
. runE @'[DigestError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
|
||||
$ liftE
|
||||
$ getDownloadsF
|
||||
|
||||
|
@ -87,15 +87,16 @@ import qualified Text.Megaparsec.Char as MPC
|
||||
data Options = Options
|
||||
{
|
||||
-- global options
|
||||
optVerbose :: Maybe Bool
|
||||
, optCache :: Maybe Bool
|
||||
, optUrlSource :: Maybe URI
|
||||
, optNoVerify :: Maybe Bool
|
||||
, optKeepDirs :: Maybe KeepDirs
|
||||
optVerbose :: Maybe Bool
|
||||
, optCache :: Maybe Bool
|
||||
, optUrlSource :: Maybe URI
|
||||
, optNoVerify :: Maybe Bool
|
||||
, optKeepDirs :: Maybe KeepDirs
|
||||
, optsDownloader :: Maybe Downloader
|
||||
, optNoNetwork :: Maybe Bool
|
||||
, optNoNetwork :: Maybe Bool
|
||||
, optGpg :: Maybe GPGSetting
|
||||
-- commands
|
||||
, optCommand :: Command
|
||||
, optCommand :: Command
|
||||
}
|
||||
|
||||
data Command
|
||||
@ -310,6 +311,13 @@ opts =
|
||||
<> hidden
|
||||
))
|
||||
<*> invertableSwitch "offline" 'o' False (help "Don't do any network calls, trying cached assets and failing if missing.")
|
||||
<*> optional (option
|
||||
(eitherReader gpgParser)
|
||||
( long "gpg"
|
||||
<> metavar "<strict|lax|none>"
|
||||
<> help
|
||||
"GPG verification (default: none)"
|
||||
))
|
||||
<*> com
|
||||
where
|
||||
parseUri s' =
|
||||
@ -1145,7 +1153,7 @@ tagCompleter tool add = listIOCompleter $ do
|
||||
, rawOutter = mempty
|
||||
}
|
||||
let appState = LeanAppState
|
||||
(Settings True False Never Curl False GHCupURL True)
|
||||
(Settings True False Never Curl False GHCupURL True GPGNone)
|
||||
dirs'
|
||||
defaultKeyBindings
|
||||
loggerConfig
|
||||
@ -1170,7 +1178,7 @@ versionCompleter criteria tool = listIOCompleter $ do
|
||||
, colorOutter = mempty
|
||||
, rawOutter = mempty
|
||||
}
|
||||
let settings = Settings True False Never Curl False GHCupURL True
|
||||
let settings = Settings True False Never Curl False GHCupURL True GPGNone
|
||||
let leanAppState = LeanAppState
|
||||
settings
|
||||
dirs'
|
||||
@ -1257,6 +1265,13 @@ downloaderParser s' | t == T.pack "curl" = Right Curl
|
||||
| otherwise = Left ("Unknown downloader value: " <> s')
|
||||
where t = T.toLower (T.pack s')
|
||||
|
||||
gpgParser :: String -> Either String GPGSetting
|
||||
gpgParser s' | t == T.pack "strict" = Right GPGStrict
|
||||
| t == T.pack "lax" = Right GPGLax
|
||||
| t == T.pack "none" = Right GPGNone
|
||||
| otherwise = Left ("Unknown gpg setting value: " <> s')
|
||||
where t = T.toLower (T.pack s')
|
||||
|
||||
|
||||
platformParser :: String -> Either String PlatformRequest
|
||||
platformParser s' = case MP.parse (platformP <* MP.eof) "" (T.pack s') of
|
||||
@ -1336,6 +1351,7 @@ toSettings options = do
|
||||
keyBindings = maybe defaultKeyBindings mergeKeys uKeyBindings
|
||||
urlSource = maybe (fromMaybe GHCupURL uUrlSource) OwnSource optUrlSource
|
||||
noNetwork = fromMaybe (fromMaybe False uNoNetwork) optNoNetwork
|
||||
gpgSetting = fromMaybe (fromMaybe GPGNone uGPGSetting) optGpg
|
||||
in (Settings {..}, keyBindings)
|
||||
#if defined(INTERNAL_DOWNLOADER)
|
||||
defaultDownloader = Internal
|
||||
@ -1371,7 +1387,8 @@ updateSettings config settings = do
|
||||
verbose' = fromMaybe verbose uVerbose
|
||||
urlSource' = fromMaybe urlSource uUrlSource
|
||||
noNetwork' = fromMaybe noNetwork uNoNetwork
|
||||
in Settings cache' noVerify' keepDirs' downloader' verbose' urlSource' noNetwork'
|
||||
gpgSetting' = fromMaybe gpgSetting uGPGSetting
|
||||
in Settings cache' noVerify' keepDirs' downloader' verbose' urlSource' noNetwork' gpgSetting'
|
||||
|
||||
upgradeOptsP :: Parser UpgradeOpts
|
||||
upgradeOptsP =
|
||||
@ -1396,7 +1413,7 @@ describe_result = $( LitE . StringL <$>
|
||||
runIO (do
|
||||
CapturedProcess{..} <- do
|
||||
dirs <- liftIO getAllDirs
|
||||
let settings = AppState (Settings True False Never Curl False GHCupURL False)
|
||||
let settings = AppState (Settings True False Never Curl False GHCupURL False GPGNone)
|
||||
dirs
|
||||
defaultKeyBindings
|
||||
flip runReaderT settings $ executeOut "git" ["describe"] Nothing
|
||||
@ -1506,7 +1523,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
|
||||
|
||||
ghcupInfo <-
|
||||
( flip runReaderT leanAppstate
|
||||
. runE @'[JSONError , DownloadFailed, FileDoesNotExistError]
|
||||
. runE @'[DigestError, GPGError, JSONError , DownloadFailed, FileDoesNotExistError]
|
||||
$ liftE
|
||||
$ getDownloadsF
|
||||
)
|
||||
@ -1579,6 +1596,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
|
||||
, BuildFailed
|
||||
, TagNotFound
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, TarDirDoesNotExist
|
||||
, NextVerNotFound
|
||||
@ -1669,6 +1687,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
|
||||
@'[ AlreadyInstalled
|
||||
, BuildFailed
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, GHCupSetError
|
||||
, NoDownload
|
||||
@ -1707,6 +1726,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
|
||||
. runResourceT
|
||||
. runE
|
||||
@'[ DigestError
|
||||
, GPGError
|
||||
, NoDownload
|
||||
, NoUpdate
|
||||
, FileDoesNotExistError
|
||||
@ -1723,6 +1743,7 @@ Report bugs at <https://gitlab.haskell.org/haskell/ghcup-hs/issues>|]
|
||||
, NoToolVersionSet
|
||||
, NoDownload
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, JSONError
|
||||
, FileDoesNotExistError
|
||||
|
@ -8,6 +8,10 @@ verbose: False
|
||||
keep-dirs: Errors # Always | Never | Errors
|
||||
# Which downloader to use
|
||||
downloader: Curl # Curl | Wget | Internal
|
||||
# whether to run in offline mode
|
||||
no-network: False
|
||||
# whether/how to do gpg verification
|
||||
gpg-setting: GPGNone # GPGStrict | GPGLax | GPGNone
|
||||
|
||||
# TUI key bindings,
|
||||
# see https://hackage.haskell.org/package/vty-5.31/docs/Graphics-Vty-Input-Events.html#t:Key
|
||||
|
15
lib/GHCup.hs
15
lib/GHCup.hs
@ -121,6 +121,7 @@ fetchToolBindist :: ( MonadFail m
|
||||
-> Maybe FilePath
|
||||
-> Excepts
|
||||
'[ DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
]
|
||||
@ -148,6 +149,7 @@ fetchGHCSrc :: ( MonadFail m
|
||||
-> Maybe FilePath
|
||||
-> Excepts
|
||||
'[ DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
]
|
||||
@ -189,6 +191,7 @@ installGHCBindist :: ( MonadFail m
|
||||
'[ AlreadyInstalled
|
||||
, BuildFailed
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
, NotInstalled
|
||||
@ -384,6 +387,7 @@ installGHCBin :: ( MonadFail m
|
||||
'[ AlreadyInstalled
|
||||
, BuildFailed
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
, NotInstalled
|
||||
@ -421,6 +425,7 @@ installCabalBindist :: ( MonadMask m
|
||||
'[ AlreadyInstalled
|
||||
, CopyError
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
, NotInstalled
|
||||
@ -526,6 +531,7 @@ installCabalBin :: ( MonadMask m
|
||||
'[ AlreadyInstalled
|
||||
, CopyError
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
, NotInstalled
|
||||
@ -563,6 +569,7 @@ installHLSBindist :: ( MonadMask m
|
||||
'[ AlreadyInstalled
|
||||
, CopyError
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
, NotInstalled
|
||||
@ -693,6 +700,7 @@ installHLSBin :: ( MonadMask m
|
||||
'[ AlreadyInstalled
|
||||
, CopyError
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
, NotInstalled
|
||||
@ -731,6 +739,7 @@ installStackBin :: ( MonadMask m
|
||||
'[ AlreadyInstalled
|
||||
, CopyError
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
, NotInstalled
|
||||
@ -768,6 +777,7 @@ installStackBindist :: ( MonadMask m
|
||||
'[ AlreadyInstalled
|
||||
, CopyError
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
, NotInstalled
|
||||
@ -1862,6 +1872,7 @@ compileGHC :: ( MonadMask m
|
||||
'[ AlreadyInstalled
|
||||
, BuildFailed
|
||||
, DigestError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, GHCupSetError
|
||||
, NoDownload
|
||||
@ -2309,6 +2320,8 @@ upgradeGHCup :: ( MonadMask m
|
||||
-> Excepts
|
||||
'[ CopyError
|
||||
, DigestError
|
||||
, GPGError
|
||||
, GPGError
|
||||
, DownloadFailed
|
||||
, NoDownload
|
||||
, NoUpdate
|
||||
@ -2325,7 +2338,7 @@ upgradeGHCup mtarget force' = do
|
||||
dli <- liftE $ getDownloadInfo GHCup latestVer
|
||||
tmp <- lift withGHCupTmpDir
|
||||
let fn = "ghcup" <> exeExt
|
||||
p <- liftE $ download (_dlUri dli) (Just (_dlHash dli)) tmp (Just fn) False
|
||||
p <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmp (Just fn) False
|
||||
let destDir = takeDirectory destFile
|
||||
destFile = fromMaybe (binDir </> fn) mtarget
|
||||
lift $ logDebug $ "mkdir -p " <> T.pack destDir
|
||||
|
@ -114,7 +114,7 @@ getDownloadsF :: ( FromJSONKey Tool
|
||||
, MonadMask m
|
||||
)
|
||||
=> Excepts
|
||||
'[JSONError , DownloadFailed , FileDoesNotExistError]
|
||||
'[DigestError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
|
||||
m
|
||||
GHCupInfo
|
||||
getDownloadsF = do
|
||||
@ -165,7 +165,7 @@ getBase :: ( MonadReader env m
|
||||
, MonadMask m
|
||||
)
|
||||
=> URI
|
||||
-> Excepts '[JSONError, FileDoesNotExistError] m GHCupInfo
|
||||
-> Excepts '[GPGError, DigestError, JSONError, FileDoesNotExistError] m GHCupInfo
|
||||
getBase uri = do
|
||||
Settings { noNetwork, downloader } <- lift getSettings
|
||||
|
||||
@ -176,7 +176,6 @@ getBase uri = do
|
||||
then pure Nothing
|
||||
else handleIO (\e -> lift (warnCache (displayException e) downloader) >> pure Nothing)
|
||||
. catchE @_ @_ @'[] (\e@(DownloadFailed _) -> lift (warnCache (prettyShow e) downloader) >> pure Nothing)
|
||||
. reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed
|
||||
. fmap Just
|
||||
. smartDl
|
||||
$ uri
|
||||
@ -234,6 +233,7 @@ getBase uri = do
|
||||
-> Excepts
|
||||
'[ DownloadFailed
|
||||
, DigestError
|
||||
, GPGError
|
||||
]
|
||||
m1
|
||||
FilePath
|
||||
@ -245,7 +245,7 @@ getBase uri = do
|
||||
Dirs { cacheDir } <- lift getDirs
|
||||
|
||||
-- for local files, let's short-circuit and ignore access time
|
||||
if | scheme == "file" -> liftE $ download uri' Nothing cacheDir Nothing True
|
||||
if | scheme == "file" -> liftE $ download uri' Nothing Nothing cacheDir Nothing True
|
||||
| e -> do
|
||||
accessTime <- liftIO $ getAccessTime json_file
|
||||
|
||||
@ -258,7 +258,7 @@ getBase uri = do
|
||||
where
|
||||
dlWithMod modTime json_file = do
|
||||
let (dir, fn) = splitFileName json_file
|
||||
f <- liftE $ download uri' Nothing dir (Just fn) True
|
||||
f <- liftE $ download uri' (Just $ over pathL' (<> ".sig") uri') Nothing dir (Just fn) True
|
||||
liftIO $ setModificationTime f modTime
|
||||
liftIO $ setAccessTime f modTime
|
||||
pure f
|
||||
@ -322,16 +322,17 @@ download :: ( MonadReader env m
|
||||
, MonadIO m
|
||||
)
|
||||
=> URI
|
||||
-> Maybe URI -- ^ URI for gpg sig
|
||||
-> Maybe T.Text -- ^ expected hash
|
||||
-> FilePath -- ^ destination dir (ignored for file:// scheme)
|
||||
-> Maybe FilePath -- ^ optional filename
|
||||
-> Bool -- ^ whether to read an write etags
|
||||
-> Excepts '[DigestError , DownloadFailed] m FilePath
|
||||
download uri eDigest dest mfn etags
|
||||
-> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
|
||||
download uri gpgUri eDigest dest mfn etags
|
||||
| scheme == "https" = dl
|
||||
| scheme == "http" = dl
|
||||
| scheme == "file" = do
|
||||
let destFile' = T.unpack . decUTF8Safe $ path
|
||||
let destFile' = T.unpack . decUTF8Safe $ view pathL' uri
|
||||
lift $ logDebug $ "using local file: " <> T.pack destFile'
|
||||
forM_ eDigest (liftE . flip checkDigest destFile')
|
||||
pure destFile'
|
||||
@ -340,115 +341,179 @@ download uri eDigest dest mfn etags
|
||||
where
|
||||
scheme = view (uriSchemeL' % schemeBSL') uri
|
||||
dl = do
|
||||
destFile <- liftE . reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed $ getDestFile
|
||||
lift $ logInfo $ "downloading: " <> uri' <> " as file " <> T.pack destFile
|
||||
baseDestFile <- liftE . reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed $ getDestFile uri mfn
|
||||
lift $ logInfo $ "downloading: " <> (decUTF8Safe . serializeURIRef') uri <> " as file " <> T.pack baseDestFile
|
||||
|
||||
-- destination dir must exist
|
||||
liftIO $ createDirRecursive' dest
|
||||
|
||||
|
||||
-- download
|
||||
flip onException
|
||||
(lift $ hideError doesNotExistErrorType $ recycleFile destFile)
|
||||
$ catchAllE @_ @'[ProcessError, DownloadFailed, UnsupportedScheme]
|
||||
(\e ->
|
||||
lift (hideError doesNotExistErrorType $ recycleFile destFile)
|
||||
>> (throwE . DownloadFailed $ e)
|
||||
(lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile baseDestFile))
|
||||
$ catchAllE @_ @'[GPGError, ProcessError, DownloadFailed, UnsupportedScheme, DigestError] @'[DigestError, DownloadFailed, GPGError]
|
||||
(\e' -> do
|
||||
lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile baseDestFile)
|
||||
case e' of
|
||||
V e@GPGError {} -> throwE e
|
||||
V e@DigestError {} -> throwE e
|
||||
_ -> throwE (DownloadFailed e')
|
||||
) $ do
|
||||
Settings{ downloader, noNetwork } <- lift getSettings
|
||||
Settings{ downloader, noNetwork, gpgSetting } <- lift getSettings
|
||||
when noNetwork $ throwE (DownloadFailed (V NoNetwork :: V '[NoNetwork]))
|
||||
case downloader of
|
||||
Curl -> do
|
||||
o' <- liftIO getCurlOpts
|
||||
if etags
|
||||
then do
|
||||
dh <- liftIO $ emptySystemTempFile "curl-header"
|
||||
flip finally (try @_ @SomeException $ rmFile dh) $
|
||||
flip finally (try @_ @SomeException $ rmFile (destFile <.> "tmp")) $ do
|
||||
metag <- lift $ readETag destFile
|
||||
liftE $ lEM @_ @'[ProcessError] $ exec "curl"
|
||||
(o' ++ (if etags then ["--dump-header", dh] else [])
|
||||
++ maybe [] (\t -> ["-H", "If-None-Match: " <> T.unpack t]) metag
|
||||
++ ["-fL", "-o", destFile <.> "tmp", T.unpack uri']) Nothing Nothing
|
||||
headers <- liftIO $ T.readFile dh
|
||||
|
||||
-- this nonsense is necessary, because some older versions of curl would overwrite
|
||||
-- the destination file when 304 is returned
|
||||
case fmap T.words . listToMaybe . fmap T.strip . T.lines . getLastHeader $ headers of
|
||||
Just (http':sc:_)
|
||||
| sc == "304"
|
||||
, T.pack "HTTP" `T.isPrefixOf` http' -> lift $ logDebug "Status code was 304, not overwriting"
|
||||
| T.pack "HTTP" `T.isPrefixOf` http' -> do
|
||||
lift $ logDebug $ "Status code was " <> sc <> ", overwriting"
|
||||
liftIO $ copyFile (destFile <.> "tmp") destFile
|
||||
_ -> liftE $ throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 (MalformedHeaders headers)
|
||||
:: V '[MalformedHeaders]))
|
||||
|
||||
lift $ writeEtags destFile (parseEtags headers)
|
||||
else
|
||||
liftE $ lEM @_ @'[ProcessError] $ exec "curl"
|
||||
(o' ++ ["-fL", "-o", destFile, T.unpack uri']) Nothing Nothing
|
||||
Wget -> do
|
||||
destFileTemp <- liftIO $ emptySystemTempFile "wget-tmp"
|
||||
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
|
||||
o' <- liftIO getWgetOpts
|
||||
if etags
|
||||
then do
|
||||
metag <- lift $ readETag destFile
|
||||
let opts = o' ++ maybe [] (\t -> ["--header", "If-None-Match: " <> T.unpack t]) metag
|
||||
++ ["-q", "-S", "-O", destFileTemp , T.unpack uri']
|
||||
CapturedProcess {_exitCode, _stdErr} <- lift $ executeOut "wget" opts Nothing
|
||||
case _exitCode of
|
||||
ExitSuccess -> do
|
||||
liftIO $ copyFile destFileTemp destFile
|
||||
lift $ writeEtags destFile (parseEtags (decUTF8Safe' _stdErr))
|
||||
ExitFailure i'
|
||||
| i' == 8
|
||||
, Just _ <- find (T.pack "304 Not Modified" `T.isInfixOf`) . T.lines . decUTF8Safe' $ _stdErr
|
||||
-> do
|
||||
lift $ logDebug "Not modified, skipping download"
|
||||
lift $ writeEtags destFile (parseEtags (decUTF8Safe' _stdErr))
|
||||
| otherwise -> throwE (NonZeroExit i' "wget" opts)
|
||||
else do
|
||||
let opts = o' ++ ["-O", destFileTemp , T.unpack uri']
|
||||
liftE $ lEM @_ @'[ProcessError] $ exec "wget" opts Nothing Nothing
|
||||
liftIO $ copyFile destFileTemp destFile
|
||||
downloadAction <- case downloader of
|
||||
Curl -> do
|
||||
o' <- liftIO getCurlOpts
|
||||
if etags
|
||||
then pure $ curlEtagsDL o'
|
||||
else pure $ curlDL o'
|
||||
Wget -> do
|
||||
o' <- liftIO getWgetOpts
|
||||
if etags
|
||||
then pure $ wgetEtagsDL o'
|
||||
else pure $ wgetDL o'
|
||||
#if defined(INTERNAL_DOWNLOADER)
|
||||
Internal -> do
|
||||
(https, host, fullPath, port) <- liftE $ uriToQuadruple uri
|
||||
if etags
|
||||
then do
|
||||
metag <- lift $ readETag destFile
|
||||
let addHeaders = maybe mempty (\etag -> M.fromList [ (mk . E.encodeUtf8 . T.pack $ "If-None-Match"
|
||||
, E.encodeUtf8 etag)]) metag
|
||||
liftE
|
||||
$ catchE @HTTPNotModified @'[DownloadFailed] @'[] (\(HTTPNotModified etag) -> lift $ writeEtags destFile (pure $ Just etag))
|
||||
$ do
|
||||
r <- downloadToFile https host fullPath port destFile addHeaders
|
||||
lift $ writeEtags destFile (pure $ decUTF8Safe <$> getHeader r "etag")
|
||||
else void $ liftE $ catchE @HTTPNotModified
|
||||
@'[DownloadFailed]
|
||||
(\e@(HTTPNotModified _) ->
|
||||
throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 e :: V '[HTTPNotModified])))
|
||||
$ downloadToFile https host fullPath port destFile mempty
|
||||
Internal -> do
|
||||
if etags
|
||||
then pure (\fp -> liftE . internalEtagsDL fp)
|
||||
else pure (\fp -> liftE . internalDL fp)
|
||||
#endif
|
||||
liftE $ downloadAction baseDestFile uri
|
||||
case (gpgUri, gpgSetting) of
|
||||
(_, GPGNone) -> pure ()
|
||||
(Just gpgUri', _) -> do
|
||||
gpgDestFile <- liftE . reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed $ getDestFile gpgUri' Nothing
|
||||
liftE $ flip onException
|
||||
(lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile gpgDestFile))
|
||||
$ catchAllE @_ @'[GPGError, ProcessError, UnsupportedScheme, DownloadFailed] @'[GPGError]
|
||||
(\e -> if gpgSetting == GPGStrict then throwE (GPGError e) else lift $ logWarn $ T.pack (prettyShow (GPGError e))
|
||||
) $ do
|
||||
o' <- liftIO getGpgOpts
|
||||
lift $ logDebug $ "downloading: " <> (decUTF8Safe . serializeURIRef') gpgUri' <> " as file " <> T.pack gpgDestFile
|
||||
liftE $ downloadAction gpgDestFile gpgUri'
|
||||
lift $ logInfo $ "verifying signature of: " <> T.pack baseDestFile
|
||||
let args = o' ++ ["--batch", "--verify", "--quiet", "--no-tty", gpgDestFile, baseDestFile]
|
||||
cp <- lift $ executeOut "gpg" args Nothing
|
||||
case cp of
|
||||
CapturedProcess { _exitCode = ExitFailure i, _stdErr } -> do
|
||||
lift $ logDebug $ decUTF8Safe' _stdErr
|
||||
throwE (GPGError @'[ProcessError] (V (NonZeroExit i "gpg" args)))
|
||||
CapturedProcess { _stdErr } -> lift $ logDebug $ decUTF8Safe' _stdErr
|
||||
_ -> pure ()
|
||||
|
||||
forM_ eDigest (liftE . flip checkDigest destFile)
|
||||
pure destFile
|
||||
forM_ eDigest (liftE . flip checkDigest baseDestFile)
|
||||
pure baseDestFile
|
||||
|
||||
curlDL :: (MonadCatch m, MonadMask m, MonadIO m) => [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
|
||||
curlDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
|
||||
let destFileTemp = tmpFile destFile
|
||||
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
|
||||
liftE $ lEM @_ @'[ProcessError] $ exec "curl"
|
||||
(o' ++ ["-fL", "-o", destFileTemp, T.unpack uri']) Nothing Nothing
|
||||
liftIO $ renameFile destFileTemp destFile
|
||||
|
||||
curlEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
|
||||
=> [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
|
||||
curlEtagsDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
|
||||
let destFileTemp = tmpFile destFile
|
||||
dh <- liftIO $ emptySystemTempFile "curl-header"
|
||||
flip finally (try @_ @SomeException $ rmFile dh) $
|
||||
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
|
||||
metag <- lift $ readETag destFile
|
||||
liftE $ lEM @_ @'[ProcessError] $ exec "curl"
|
||||
(o' ++ (if etags then ["--dump-header", dh] else [])
|
||||
++ maybe [] (\t -> ["-H", "If-None-Match: " <> T.unpack t]) metag
|
||||
++ ["-fL", "-o", destFileTemp, T.unpack uri']) Nothing Nothing
|
||||
headers <- liftIO $ T.readFile dh
|
||||
|
||||
-- this nonsense is necessary, because some older versions of curl would overwrite
|
||||
-- the destination file when 304 is returned
|
||||
case fmap T.words . listToMaybe . fmap T.strip . T.lines . getLastHeader $ headers of
|
||||
Just (http':sc:_)
|
||||
| sc == "304"
|
||||
, T.pack "HTTP" `T.isPrefixOf` http' -> lift $ logDebug "Status code was 304, not overwriting"
|
||||
| T.pack "HTTP" `T.isPrefixOf` http' -> do
|
||||
lift $ logDebug $ "Status code was " <> sc <> ", overwriting"
|
||||
liftIO $ renameFile destFileTemp destFile
|
||||
_ -> liftE $ throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 (MalformedHeaders headers)
|
||||
:: V '[MalformedHeaders]))
|
||||
|
||||
lift $ writeEtags destFile (parseEtags headers)
|
||||
|
||||
wgetDL :: (MonadCatch m, MonadMask m, MonadIO m) => [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
|
||||
wgetDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
|
||||
let destFileTemp = tmpFile destFile
|
||||
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
|
||||
let opts = o' ++ ["-O", destFileTemp , T.unpack uri']
|
||||
liftE $ lEM @_ @'[ProcessError] $ exec "wget" opts Nothing Nothing
|
||||
liftIO $ renameFile destFileTemp destFile
|
||||
|
||||
|
||||
wgetEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
|
||||
=> [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
|
||||
wgetEtagsDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
|
||||
let destFileTemp = tmpFile destFile
|
||||
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
|
||||
metag <- lift $ readETag destFile
|
||||
let opts = o' ++ maybe [] (\t -> ["--header", "If-None-Match: " <> T.unpack t]) metag
|
||||
++ ["-q", "-S", "-O", destFileTemp , T.unpack uri']
|
||||
CapturedProcess {_exitCode, _stdErr} <- lift $ executeOut "wget" opts Nothing
|
||||
case _exitCode of
|
||||
ExitSuccess -> do
|
||||
liftIO $ renameFile destFileTemp destFile
|
||||
lift $ writeEtags destFile (parseEtags (decUTF8Safe' _stdErr))
|
||||
ExitFailure i'
|
||||
| i' == 8
|
||||
, Just _ <- find (T.pack "304 Not Modified" `T.isInfixOf`) . T.lines . decUTF8Safe' $ _stdErr
|
||||
-> do
|
||||
lift $ logDebug "Not modified, skipping download"
|
||||
lift $ writeEtags destFile (parseEtags (decUTF8Safe' _stdErr))
|
||||
| otherwise -> throwE (NonZeroExit i' "wget" opts)
|
||||
|
||||
#if defined(INTERNAL_DOWNLOADER)
|
||||
internalDL :: (MonadCatch m, MonadMask m, MonadIO m)
|
||||
=> FilePath -> URI -> Excepts '[DownloadFailed, UnsupportedScheme] m ()
|
||||
internalDL destFile uri' = do
|
||||
let destFileTemp = tmpFile destFile
|
||||
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
|
||||
(https, host, fullPath, port) <- liftE $ uriToQuadruple uri'
|
||||
void $ liftE $ catchE @HTTPNotModified
|
||||
@'[DownloadFailed]
|
||||
(\e@(HTTPNotModified _) ->
|
||||
throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 e :: V '[HTTPNotModified])))
|
||||
$ downloadToFile https host fullPath port destFileTemp mempty
|
||||
liftIO $ renameFile destFileTemp destFile
|
||||
|
||||
|
||||
internalEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
|
||||
=> FilePath -> URI -> Excepts '[DownloadFailed, UnsupportedScheme] m ()
|
||||
internalEtagsDL destFile uri' = do
|
||||
let destFileTemp = tmpFile destFile
|
||||
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
|
||||
(https, host, fullPath, port) <- liftE $ uriToQuadruple uri'
|
||||
metag <- lift $ readETag destFile
|
||||
let addHeaders = maybe mempty (\etag -> M.fromList [ (mk . E.encodeUtf8 . T.pack $ "If-None-Match"
|
||||
, E.encodeUtf8 etag)]) metag
|
||||
liftE
|
||||
$ catchE @HTTPNotModified @'[DownloadFailed] @'[] (\(HTTPNotModified etag) -> lift $ writeEtags destFile (pure $ Just etag))
|
||||
$ do
|
||||
r <- downloadToFile https host fullPath port destFileTemp addHeaders
|
||||
liftIO $ renameFile destFileTemp destFile
|
||||
lift $ writeEtags destFile (pure $ decUTF8Safe <$> getHeader r "etag")
|
||||
#endif
|
||||
|
||||
|
||||
-- Manage to find a file we can write the body into.
|
||||
getDestFile :: Monad m => Excepts '[NoUrlBase] m FilePath
|
||||
getDestFile =
|
||||
case mfn of
|
||||
getDestFile :: Monad m => URI -> Maybe FilePath -> Excepts '[NoUrlBase] m FilePath
|
||||
getDestFile uri' mfn' =
|
||||
let path = view pathL' uri'
|
||||
in case mfn' of
|
||||
Just fn -> pure (dest </> fn)
|
||||
Nothing
|
||||
| let urlBase = T.unpack (decUTF8Safe (urlBaseName path))
|
||||
, not (null urlBase) -> pure (dest </> urlBase)
|
||||
-- TODO: remove this once we use hpath again
|
||||
| otherwise -> throwE $ NoUrlBase uri'
|
||||
|
||||
path = view pathL' uri
|
||||
uri' = decUTF8Safe (serializeURIRef' uri)
|
||||
| otherwise -> throwE $ NoUrlBase (decUTF8Safe . serializeURIRef' $ uri')
|
||||
|
||||
parseEtags :: (MonadReader env m, HasLog env, MonadIO m, MonadThrow m) => T.Text -> m (Maybe T.Text)
|
||||
parseEtags stderr = do
|
||||
@ -509,14 +574,14 @@ downloadCached :: ( MonadReader env m
|
||||
)
|
||||
=> DownloadInfo
|
||||
-> Maybe FilePath -- ^ optional filename
|
||||
-> Excepts '[DigestError , DownloadFailed] m FilePath
|
||||
-> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
|
||||
downloadCached dli mfn = do
|
||||
Settings{ cache } <- lift getSettings
|
||||
case cache of
|
||||
True -> downloadCached' dli mfn Nothing
|
||||
False -> do
|
||||
tmp <- lift withGHCupTmpDir
|
||||
liftE $ download (_dlUri dli) (Just (_dlHash dli)) tmp mfn False
|
||||
liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmp mfn False
|
||||
|
||||
|
||||
downloadCached' :: ( MonadReader env m
|
||||
@ -531,7 +596,7 @@ downloadCached' :: ( MonadReader env m
|
||||
=> DownloadInfo
|
||||
-> Maybe FilePath -- ^ optional filename
|
||||
-> Maybe FilePath -- ^ optional destination dir (default: cacheDir)
|
||||
-> Excepts '[DigestError , DownloadFailed] m FilePath
|
||||
-> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
|
||||
downloadCached' dli mfn mDestDir = do
|
||||
Dirs { cacheDir } <- lift getDirs
|
||||
let destDir = fromMaybe cacheDir mDestDir
|
||||
@ -542,7 +607,7 @@ downloadCached' dli mfn mDestDir = do
|
||||
| fileExists -> do
|
||||
liftE $ checkDigest (view dlHash dli) cachfile
|
||||
pure cachfile
|
||||
| otherwise -> liftE $ download (_dlUri dli) (Just (_dlHash dli)) destDir mfn False
|
||||
| otherwise -> liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) destDir mfn False
|
||||
|
||||
|
||||
|
||||
@ -589,6 +654,12 @@ getWgetOpts =
|
||||
Just r -> pure $ splitOn " " r
|
||||
Nothing -> pure []
|
||||
|
||||
-- | Get additional gpg args from env. This is an undocumented option.
|
||||
getGpgOpts :: IO [String]
|
||||
getGpgOpts =
|
||||
lookupEnv "GHCUP_GPG_OPTS" >>= \case
|
||||
Just r -> pure $ splitOn " " r
|
||||
Nothing -> pure []
|
||||
|
||||
-- | Get the url base name.
|
||||
--
|
||||
@ -610,3 +681,7 @@ urlBaseName = snd . B.breakEnd (== _slash) . urlDecode False
|
||||
-- "HTTP/1.1 304 Not Modified\n"
|
||||
getLastHeader :: T.Text -> T.Text
|
||||
getLastHeader = T.unlines . lastDef [] . filter (\x -> not (null x)) . splitOn [""] . fmap T.stripEnd . T.lines
|
||||
|
||||
|
||||
tmpFile :: FilePath -> FilePath
|
||||
tmpFile = (<.> "tmp")
|
||||
|
@ -195,6 +195,14 @@ instance Pretty DigestError where
|
||||
pPrint (DigestError currentDigest expectedDigest) =
|
||||
text "Digest error: expected" <+> text (T.unpack expectedDigest) <+> text "but got" <+> pPrint currentDigest
|
||||
|
||||
-- | File digest verification failed.
|
||||
data GPGError = forall xs . (ToVariantMaybe DownloadFailed xs, PopVariant DownloadFailed xs, Show (V xs), Pretty (V xs)) => GPGError (V xs)
|
||||
|
||||
deriving instance Show GPGError
|
||||
|
||||
instance Pretty GPGError where
|
||||
pPrint (GPGError reason) = text "GPG verify failed:" <+> pPrint reason
|
||||
|
||||
-- | Unexpected HTTP status.
|
||||
data HTTPStatusError = HTTPStatusError Int (M.Map (CI ByteString) ByteString)
|
||||
deriving Show
|
||||
|
@ -303,11 +303,12 @@ data UserSettings = UserSettings
|
||||
, uKeyBindings :: Maybe UserKeyBindings
|
||||
, uUrlSource :: Maybe URLSource
|
||||
, uNoNetwork :: Maybe Bool
|
||||
, uGPGSetting :: Maybe GPGSetting
|
||||
}
|
||||
deriving (Show, GHC.Generic)
|
||||
|
||||
defaultUserSettings :: UserSettings
|
||||
defaultUserSettings = UserSettings Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
|
||||
defaultUserSettings = UserSettings Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
|
||||
|
||||
fromSettings :: Settings -> Maybe KeyBindings -> UserSettings
|
||||
fromSettings Settings{..} Nothing =
|
||||
@ -320,6 +321,7 @@ fromSettings Settings{..} Nothing =
|
||||
, uNoNetwork = Just noNetwork
|
||||
, uKeyBindings = Nothing
|
||||
, uUrlSource = Just urlSource
|
||||
, uGPGSetting = Just gpgSetting
|
||||
}
|
||||
fromSettings Settings{..} (Just KeyBindings{..}) =
|
||||
let ukb = UserKeyBindings
|
||||
@ -342,6 +344,7 @@ fromSettings Settings{..} (Just KeyBindings{..}) =
|
||||
, uNoNetwork = Just noNetwork
|
||||
, uKeyBindings = Just ukb
|
||||
, uUrlSource = Just urlSource
|
||||
, uGPGSetting = Just gpgSetting
|
||||
}
|
||||
|
||||
data UserKeyBindings = UserKeyBindings
|
||||
@ -415,6 +418,7 @@ data Settings = Settings
|
||||
, verbose :: Bool
|
||||
, urlSource :: URLSource
|
||||
, noNetwork :: Bool
|
||||
, gpgSetting :: GPGSetting
|
||||
}
|
||||
deriving (Show, GHC.Generic)
|
||||
|
||||
@ -448,6 +452,13 @@ data Downloader = Curl
|
||||
|
||||
instance NFData Downloader
|
||||
|
||||
data GPGSetting = GPGStrict
|
||||
| GPGLax
|
||||
| GPGNone
|
||||
deriving (Eq, Show, Ord, GHC.Generic)
|
||||
|
||||
instance NFData GPGSetting
|
||||
|
||||
data DebugInfo = DebugInfo
|
||||
{ diBaseDir :: FilePath
|
||||
, diBinDir :: FilePath
|
||||
|
@ -54,6 +54,7 @@ deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Tool
|
||||
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''GlobalTool
|
||||
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''KeepDirs
|
||||
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''Downloader
|
||||
deriveJSON defaultOptions { fieldLabelModifier = removeLensFieldLabel } ''GPGSetting
|
||||
|
||||
instance ToJSON Tag where
|
||||
toJSON Latest = String "Latest"
|
||||
|
@ -1031,7 +1031,7 @@ ensureGlobalTools :: ( MonadMask m
|
||||
, MonadUnliftIO m
|
||||
, MonadFail m
|
||||
)
|
||||
=> Excepts '[DigestError , DownloadFailed, NoDownload] m ()
|
||||
=> Excepts '[GPGError, DigestError , DownloadFailed, NoDownload] m ()
|
||||
ensureGlobalTools = do
|
||||
#if defined(IS_WINDOWS)
|
||||
(GHCupInfo _ _ gTools) <- lift getGHCupInfo
|
||||
@ -1043,8 +1043,8 @@ ensureGlobalTools = do
|
||||
lift $ logWarn "Digest doesn't match, redownloading gs.exe..."
|
||||
lift $ logDebug ("rm -f " <> T.pack (cacheDir dirs </> "gs.exe"))
|
||||
lift $ hideError doesNotExistErrorType $ recycleFile (cacheDir dirs </> "gs.exe")
|
||||
liftE @'[DigestError , DownloadFailed] $ dl
|
||||
) `catchE` (liftE @'[DigestError , DownloadFailed] dl)
|
||||
liftE @'[GPGError, DigestError , DownloadFailed] $ dl
|
||||
) `catchE` (liftE @'[GPGError, DigestError , DownloadFailed] dl)
|
||||
pure ()
|
||||
#else
|
||||
pure ()
|
||||
|
Loading…
Reference in New Issue
Block a user