From 35b6359c1b5f8b819ead72c34b45c84f50b52649 Mon Sep 17 00:00:00 2001 From: Julian Ospald Date: Tue, 17 Mar 2020 18:40:25 +0100 Subject: [PATCH] Improve error handling --- lib/GHCup.hs | 38 +++++++++++++++++++++++--------------- lib/GHCup/Download.hs | 8 ++++++-- 2 files changed, 29 insertions(+), 17 deletions(-) diff --git a/lib/GHCup.hs b/lib/GHCup.hs index ec73d73..de6ddaa 100644 --- a/lib/GHCup.hs +++ b/lib/GHCup.hs @@ -108,14 +108,18 @@ installGHCBin bDls ver mpfReq = do ghcdir <- liftIO $ ghcupGHCDir ver -- the subdir of the archive where we do the work - let archiveSubdir = maybe tmpUnpack (tmpUnpack ) (view dlSubdir dlinfo) + let workdir = maybe tmpUnpack (tmpUnpack ) (view dlSubdir dlinfo) - catchAllE - (\es -> - liftIO (hideError doesNotExistErrorType $ deleteDirRecursive ghcdir) - >> throwE (BuildFailed archiveSubdir es) - ) - $ installGHC' archiveSubdir ghcdir + -- Be careful about cleanup. We must catch both pure exceptions + -- as well as async ones. + flip onException + (liftIO $ hideError doesNotExistErrorType $ deleteDirRecursive ghcdir) + $ catchAllE + (\es -> do + liftIO (hideError doesNotExistErrorType $ deleteDirRecursive ghcdir) + >> throwE (BuildFailed workdir es) + ) + $ installGHC' workdir ghcdir -- only clean up dir if the build succeeded liftIO $ deleteDirRecursive tmpUnpack @@ -183,9 +187,9 @@ installCabalBin bDls ver mpfReq = do bindir <- liftIO ghcupBinDir -- the subdir of the archive where we do the work - let archiveSubdir = maybe tmpUnpack (tmpUnpack ) (view dlSubdir dlinfo) + let workdir = maybe tmpUnpack (tmpUnpack ) (view dlSubdir dlinfo) - liftE $ installCabal' archiveSubdir bindir + liftE $ installCabal' workdir bindir pure () where @@ -335,7 +339,7 @@ listVersions av lt criteria = case lt of fromSrc <- ghcSrcInstalled v pure ListResult { lVer = v, lTag = tags, lTool = t, .. } Cabal -> do - lSet <- fmap (== v) $ cabalSet + lSet <- fmap (== v) $ cabalSet let lInstalled = lSet pure ListResult { lVer = v, lTag = tags, lTool = t, fromSrc = False, .. } GHCup -> do @@ -467,11 +471,15 @@ compileGHC dls tver bver jobs mbuildConfig = do let workdir = maybe id (flip ()) (view dlSubdir dlInfo) $ tmpUnpack ghcdir <- liftIO $ ghcupGHCDir tver - catchAllE - (\es -> - liftIO (hideError doesNotExistErrorType $ deleteDirRecursive ghcdir) - >> throwE (BuildFailed workdir es) - ) + -- Be careful about cleanup. We must catch both pure exceptions + -- as well as async ones. + flip onException + (liftIO $ hideError doesNotExistErrorType $ deleteDirRecursive ghcdir) + $ catchAllE + (\es -> + liftIO (hideError doesNotExistErrorType $ deleteDirRecursive ghcdir) + >> throwE (BuildFailed workdir es) + ) $ compile bghc ghcdir workdir markSrcBuilt ghcdir workdir diff --git a/lib/GHCup/Download.hs b/lib/GHCup/Download.hs index 9f44fee..7e15f4f 100644 --- a/lib/GHCup/Download.hs +++ b/lib/GHCup/Download.hs @@ -43,7 +43,7 @@ import Data.Time.Format import Data.Versions import GHC.IO.Exception import HPath -import HPath.IO +import HPath.IO as HIO import Haskus.Utils.Variant.Excepts import Network.Http.Client hiding ( URL ) import OpenSSL.Digest @@ -137,7 +137,11 @@ getDownloads urlSource = do let path = view pathL' uri' json_file <- (liftIO $ ghcupCacheDir) >>= \cacheDir -> (cacheDir ) <$> urlBaseName path - e <- liftIO $ doesFileExist json_file + e <- + liftIO + $ HIO.handleIOError + (\e -> if isDoesNotExistError e then pure False else throwIO e) + $ doesFileExist json_file if e then do accessTime <-