Compare commits

...

10 Commits

8 changed files with 19126 additions and 49 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -116,7 +116,20 @@ else
if [ "${OS}" = "LINUX" ] ; then if [ "${OS}" = "LINUX" ] ; then
eghcup --downloader=wget prefetch ghc 8.10.3 eghcup --downloader=wget prefetch ghc 8.10.3
eghcup --offline install ghc 8.10.3 eghcup --offline install ghc 8.10.3
else # test wget a bit if [ "${ARCH}" = "64" ] ; then
expected=$(cat "$( cd "$(dirname "$0")" ; pwd -P )/../ghc-8.10.3-linux.files" | sort)
actual=$(cd "${GHCUP_DIR}/ghc/8.10.3/" && find | sort)
[ "${actual}" = "${expected}" ]
unset actual expected
fi
elif [ "${OS}" = "WINDOWS" ] ; then
eghcup prefetch ghc 8.10.3
eghcup --offline install ghc 8.10.3
expected=$(cat "$( cd "$(dirname "$0")" ; pwd -P )/../ghc-8.10.3-windows.files" | sort)
actual=$(cd "${GHCUP_DIR}/ghc/8.10.3/" && find | sort)
[ "${actual}" = "${expected}" ]
unset actual expected
else
eghcup prefetch ghc 8.10.3 eghcup prefetch ghc 8.10.3
eghcup --offline install ghc 8.10.3 eghcup --offline install ghc 8.10.3
fi fi

View File

@@ -237,7 +237,7 @@ validateTarballs (TarballFilter etool versionRegex) dls gt = do
($(logError) $ T.pack $ prettyShow e) ($(logError) $ T.pack $ prettyShow e)
liftIO $ exitWith (ExitFailure 2) liftIO $ exitWith (ExitFailure 2)
let appstate = AppState (Settings True False Never Curl False GHCupURL False) dirs defaultKeyBindings (GHCupInfo mempty mempty mempty) pfreq let appstate = AppState (Settings True False Never Curl True GHCupURL False) dirs defaultKeyBindings (GHCupInfo mempty mempty mempty) pfreq
r <- r <-
runLogger runLogger

View File

@@ -371,7 +371,7 @@ if (!(Test-Path -Path ('{0}' -f $MsysDir))) {
Exec "$Bash" '-lc' 'pacman --noconfirm -Syuu' Exec "$Bash" '-lc' 'pacman --noconfirm -Syuu'
Print-Msg -msg 'Installing Dependencies...' Print-Msg -msg 'Installing Dependencies...'
Exec "$Bash" '-lc' 'pacman --noconfirm -S --needed curl mingw-w64-x86_64-pkgconf' Exec "$Bash" '-lc' 'pacman --noconfirm -S --needed curl autoconf mingw-w64-x86_64-pkgconf'
Print-Msg -msg 'Updating SSL root certificate authorities...' Print-Msg -msg 'Updating SSL root certificate authorities...'
Exec "$Bash" '-lc' 'pacman --noconfirm -S ca-certificates' Exec "$Bash" '-lc' 'pacman --noconfirm -S ca-certificates'

View File

@@ -300,10 +300,6 @@ installUnpackedGHC path inst ver = do
setModificationTime dest mtime setModificationTime dest mtime
#else #else
PlatformRequest {..} <- lift getPlatformReq PlatformRequest {..} <- lift getPlatformReq
liftIO $ copyDirectoryRecursive path inst $ \source dest -> do
mtime <- getModificationTime source
copyFile source dest
setModificationTime dest mtime
let alpineArgs let alpineArgs
| ver >= [vver|8.2.2|], Linux Alpine <- _rPlatform | ver >= [vver|8.2.2|], Linux Alpine <- _rPlatform

View File

@@ -172,21 +172,31 @@ getBase :: ( MonadReader env m
-> Excepts '[JSONError] m GHCupInfo -> Excepts '[JSONError] m GHCupInfo
getBase uri = do getBase uri = do
Settings { noNetwork } <- lift getSettings Settings { noNetwork } <- lift getSettings
yaml <- lift $ yamlFromCache uri
unless noNetwork $ -- try to download yaml... usually this writes it into cache dir,
handleIO (\e -> warnCache (displayException e)) -- but in some cases not (e.g. when using file://), so we honour
. catchE @_ @_ @'[] (\e@(DownloadFailed _) -> warnCache (prettyShow e)) -- the return filepath, if any
. reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed mYaml <- if noNetwork && view (uriSchemeL' % schemeBSL') uri /= "file" -- for file://, let it fall through
. smartDl then pure Nothing
$ uri else handleIO (\e -> warnCache (displayException e) >> pure Nothing)
. catchE @_ @_ @'[] (\e@(DownloadFailed _) -> warnCache (prettyShow e) >> pure Nothing)
. reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed
. fmap Just
. smartDl
$ uri
-- if we didn't get a filepath from the download, use the cached yaml
actualYaml <- maybe (lift $ yamlFromCache uri) pure mYaml
lift $ $(logDebug) [i|Decoding yaml at: #{actualYaml}|]
liftE liftE
. onE_ (onError yaml) . onE_ (onError actualYaml)
. lEM' @_ @_ @'[JSONError] JSONDecodeError . lEM' @_ @_ @'[JSONError] JSONDecodeError
. fmap (first (\e -> [i|#{displayException e} . fmap (first (\e -> [i|#{displayException e}
Consider removing "#{yaml}" manually.|])) Consider removing "#{actualYaml}" manually.|]))
. liftIO . liftIO
. Y.decodeFileEither . Y.decodeFileEither
$ yaml $ actualYaml
where where
-- On error, remove the etags file and set access time to 0. This should ensure the next invocation -- On error, remove the etags file and set access time to 0. This should ensure the next invocation
-- may re-download and succeed. -- may re-download and succeed.
@@ -221,28 +231,32 @@ Consider removing "#{yaml}" manually.|]))
, DigestError , DigestError
] ]
m1 m1
() FilePath
smartDl uri' = do smartDl uri' = do
json_file <- lift $ yamlFromCache uri' json_file <- lift $ yamlFromCache uri'
let scheme = view (uriSchemeL' % schemeBSL') uri'
e <- liftIO $ doesFileExist json_file e <- liftIO $ doesFileExist json_file
currentTime <- liftIO getCurrentTime currentTime <- liftIO getCurrentTime
if e Dirs { cacheDir } <- lift getDirs
then do
accessTime <- liftIO $ getAccessTime json_file
-- access time won't work on most linuxes, but we can try regardless -- for local files, let's short-circuit and ignore access time
when ((utcTimeToPOSIXSeconds currentTime - utcTimeToPOSIXSeconds accessTime) > 300) $ if | scheme == "file" -> liftE $ download uri' Nothing cacheDir Nothing True
-- no access in last 5 minutes, re-check upstream mod time | e -> do
dlWithMod currentTime json_file accessTime <- liftIO $ getAccessTime json_file
else
dlWithMod currentTime json_file -- access time won't work on most linuxes, but we can try regardless
if | ((utcTimeToPOSIXSeconds currentTime - utcTimeToPOSIXSeconds accessTime) > 300) ->
-- no access in last 5 minutes, re-check upstream mod time
dlWithMod currentTime json_file
| otherwise -> pure json_file
| otherwise -> dlWithMod currentTime json_file
where where
dlWithMod modTime json_file = do dlWithMod modTime json_file = do
let (dir, fn) = splitFileName json_file let (dir, fn) = splitFileName json_file
f <- liftE $ download uri' Nothing dir (Just fn) True f <- liftE $ download uri' Nothing dir (Just fn) True
liftIO $ setModificationTime f modTime liftIO $ setModificationTime f modTime
liftIO $ setAccessTime f modTime liftIO $ setAccessTime f modTime
pure f
getDownloadInfo :: ( MonadReader env m getDownloadInfo :: ( MonadReader env m
@@ -304,27 +318,25 @@ download :: ( MonadReader env m
) )
=> URI => URI
-> Maybe T.Text -- ^ expected hash -> Maybe T.Text -- ^ expected hash
-> FilePath -- ^ destination dir -> FilePath -- ^ destination dir (ignored for file:// scheme)
-> Maybe FilePath -- ^ optional filename -> Maybe FilePath -- ^ optional filename
-> Bool -- ^ whether to read an write etags -> Bool -- ^ whether to read an write etags
-> Excepts '[DigestError , DownloadFailed] m FilePath -> Excepts '[DigestError , DownloadFailed] m FilePath
download uri eDigest dest mfn etags download uri eDigest dest mfn etags
| scheme == "https" = dl | scheme == "https" = dl
| scheme == "http" = dl | scheme == "http" = dl
| scheme == "file" = cp | scheme == "file" = do
let destFile' = T.unpack . decUTF8Safe $ path
lift $ $(logDebug) [i|using local file: #{destFile'}|]
forM_ eDigest (liftE . flip checkDigest destFile')
pure destFile'
| otherwise = throwE $ DownloadFailed (variantFromValue UnsupportedScheme) | otherwise = throwE $ DownloadFailed (variantFromValue UnsupportedScheme)
where where
scheme = view (uriSchemeL' % schemeBSL') uri scheme = view (uriSchemeL' % schemeBSL') uri
cp = do
-- destination dir must exist
liftIO $ createDirRecursive' dest
let fromFile = T.unpack . decUTF8Safe $ path
liftIO $ copyFile fromFile destFile
pure destFile
dl = do dl = do
let uri' = decUTF8Safe (serializeURIRef' uri) destFile <- liftE . reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed $ getDestFile
lift $ $(logInfo) [i|downloading: #{uri'}|] lift $ $(logInfo) [i|downloading: #{uri'} as file #{destFile}|]
-- destination dir must exist -- destination dir must exist
liftIO $ createDirRecursive' dest liftIO $ createDirRecursive' dest
@@ -366,7 +378,7 @@ download uri eDigest dest mfn etags
_ -> liftE $ throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 (MalformedHeaders headers) _ -> liftE $ throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 (MalformedHeaders headers)
:: V '[MalformedHeaders])) :: V '[MalformedHeaders]))
writeEtags (parseEtags headers) writeEtags destFile (parseEtags headers)
else else
liftE $ lEM @_ @'[ProcessError] $ exec "curl" liftE $ lEM @_ @'[ProcessError] $ exec "curl"
(o' ++ ["-fL", "-o", destFile, T.unpack uri']) Nothing Nothing (o' ++ ["-fL", "-o", destFile, T.unpack uri']) Nothing Nothing
@@ -383,13 +395,13 @@ download uri eDigest dest mfn etags
case _exitCode of case _exitCode of
ExitSuccess -> do ExitSuccess -> do
liftIO $ copyFile destFileTemp destFile liftIO $ copyFile destFileTemp destFile
writeEtags (parseEtags (decUTF8Safe' _stdErr)) writeEtags destFile (parseEtags (decUTF8Safe' _stdErr))
ExitFailure i' ExitFailure i'
| i' == 8 | i' == 8
, Just _ <- find (T.pack "304 Not Modified" `T.isInfixOf`) . T.lines . decUTF8Safe' $ _stdErr , Just _ <- find (T.pack "304 Not Modified" `T.isInfixOf`) . T.lines . decUTF8Safe' $ _stdErr
-> do -> do
$logDebug "Not modified, skipping download" $logDebug "Not modified, skipping download"
writeEtags (parseEtags (decUTF8Safe' _stdErr)) writeEtags destFile (parseEtags (decUTF8Safe' _stdErr))
| otherwise -> throwE (NonZeroExit i' "wget" opts) | otherwise -> throwE (NonZeroExit i' "wget" opts)
else do else do
let opts = o' ++ ["-O", destFileTemp , T.unpack uri'] let opts = o' ++ ["-O", destFileTemp , T.unpack uri']
@@ -404,10 +416,10 @@ download uri eDigest dest mfn etags
let addHeaders = maybe mempty (\etag -> M.fromList [ (mk . E.encodeUtf8 . T.pack $ "If-None-Match" let addHeaders = maybe mempty (\etag -> M.fromList [ (mk . E.encodeUtf8 . T.pack $ "If-None-Match"
, E.encodeUtf8 etag)]) metag , E.encodeUtf8 etag)]) metag
liftE liftE
$ catchE @HTTPNotModified @'[DownloadFailed] @'[] (\(HTTPNotModified etag) -> lift $ writeEtags (pure $ Just etag)) $ catchE @HTTPNotModified @'[DownloadFailed] @'[] (\(HTTPNotModified etag) -> lift $ writeEtags destFile (pure $ Just etag))
$ do $ do
r <- downloadToFile https host fullPath port destFile addHeaders r <- downloadToFile https host fullPath port destFile addHeaders
writeEtags (pure $ decUTF8Safe <$> getHeader r "etag") writeEtags destFile (pure $ decUTF8Safe <$> getHeader r "etag")
else void $ liftE $ catchE @HTTPNotModified else void $ liftE $ catchE @HTTPNotModified
@'[DownloadFailed] @'[DownloadFailed]
(\e@(HTTPNotModified _) -> (\e@(HTTPNotModified _) ->
@@ -420,12 +432,18 @@ download uri eDigest dest mfn etags
-- Manage to find a file we can write the body into. -- Manage to find a file we can write the body into.
destFile :: FilePath getDestFile :: Monad m => Excepts '[NoUrlBase] m FilePath
destFile = maybe (dest </> T.unpack (decUTF8Safe (urlBaseName path))) getDestFile =
(dest </>) case mfn of
mfn Just fn -> pure (dest </> fn)
Nothing
| let urlBase = T.unpack (decUTF8Safe (urlBaseName path))
, not (null urlBase) -> pure (dest </> urlBase)
-- TODO: remove this once we use hpath again
| otherwise -> throwE $ NoUrlBase uri'
path = view pathL' uri path = view pathL' uri
uri' = decUTF8Safe (serializeURIRef' uri)
parseEtags :: (MonadLogger m, MonadIO m, MonadThrow m) => T.Text -> m (Maybe T.Text) parseEtags :: (MonadLogger m, MonadIO m, MonadThrow m) => T.Text -> m (Maybe T.Text)
parseEtags stderr = do parseEtags stderr = do
@@ -444,8 +462,8 @@ download uri eDigest dest mfn etags
$logDebug "No etags header found" $logDebug "No etags header found"
pure Nothing pure Nothing
writeEtags :: (MonadLogger m, MonadIO m, MonadThrow m) => m (Maybe T.Text) -> m () writeEtags :: (MonadLogger m, MonadIO m, MonadThrow m) => FilePath -> m (Maybe T.Text) -> m ()
writeEtags getTags = do writeEtags destFile getTags = do
getTags >>= \case getTags >>= \case
Just t -> do Just t -> do
$logDebug [i|Writing etagsFile #{(etagsFile destFile)}|] $logDebug [i|Writing etagsFile #{(etagsFile destFile)}|]

View File

@@ -327,6 +327,15 @@ instance Pretty UnexpectedListLength where
instance Exception UnexpectedListLength instance Exception UnexpectedListLength
data NoUrlBase = NoUrlBase Text
deriving Show
instance Pretty NoUrlBase where
pPrint (NoUrlBase url) =
text [i|Couldn't get a base filename from url #{url}|]
instance Exception NoUrlBase
------------------------ ------------------------