2020-04-09 14:59:25 +00:00
|
|
|
{-# LANGUAGE CPP #-}
|
2020-01-11 20:15:05 +00:00
|
|
|
{-# LANGUAGE DataKinds #-}
|
|
|
|
{-# LANGUAGE DeriveGeneric #-}
|
|
|
|
{-# LANGUAGE FlexibleContexts #-}
|
2020-03-21 21:19:37 +00:00
|
|
|
{-# LANGUAGE OverloadedStrings #-}
|
2020-01-11 20:15:05 +00:00
|
|
|
{-# LANGUAGE QuasiQuotes #-}
|
|
|
|
{-# LANGUAGE TemplateHaskell #-}
|
|
|
|
{-# LANGUAGE TypeApplications #-}
|
2020-03-21 21:19:37 +00:00
|
|
|
{-# LANGUAGE TypeFamilies #-}
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
|
2020-07-21 23:08:58 +00:00
|
|
|
{-|
|
|
|
|
Module : GHCup.Download
|
|
|
|
Description : Downloading
|
|
|
|
Copyright : (c) Julian Ospald, 2020
|
2020-07-30 18:04:02 +00:00
|
|
|
License : LGPL-3.0
|
2020-07-21 23:08:58 +00:00
|
|
|
Maintainer : hasufell@hasufell.de
|
|
|
|
Stability : experimental
|
2021-05-14 21:09:45 +00:00
|
|
|
Portability : portable
|
2020-07-21 23:08:58 +00:00
|
|
|
|
|
|
|
Module for handling all download related functions.
|
|
|
|
|
|
|
|
Generally we support downloading via:
|
|
|
|
|
|
|
|
- curl (default)
|
|
|
|
- wget
|
|
|
|
- internal downloader (only when compiled)
|
|
|
|
-}
|
2020-01-11 20:15:05 +00:00
|
|
|
module GHCup.Download where
|
|
|
|
|
2020-04-28 15:56:39 +00:00
|
|
|
#if defined(INTERNAL_DOWNLOADER)
|
2020-04-09 14:59:25 +00:00
|
|
|
import GHCup.Download.IOStreams
|
|
|
|
import GHCup.Download.Utils
|
|
|
|
#endif
|
2020-01-11 20:15:05 +00:00
|
|
|
import GHCup.Errors
|
|
|
|
import GHCup.Types
|
|
|
|
import GHCup.Types.JSON ( )
|
|
|
|
import GHCup.Types.Optics
|
2021-05-14 21:09:45 +00:00
|
|
|
import GHCup.Utils.Dirs
|
2020-01-11 20:15:05 +00:00
|
|
|
import GHCup.Utils.File
|
|
|
|
import GHCup.Utils.Prelude
|
2020-04-09 14:59:25 +00:00
|
|
|
import GHCup.Version
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
import Control.Applicative
|
|
|
|
import Control.Exception.Safe
|
|
|
|
import Control.Monad
|
2020-04-09 17:53:22 +00:00
|
|
|
#if !MIN_VERSION_base(4,13,0)
|
|
|
|
import Control.Monad.Fail ( MonadFail )
|
|
|
|
#endif
|
2020-01-11 20:15:05 +00:00
|
|
|
import Control.Monad.Logger
|
|
|
|
import Control.Monad.Reader
|
|
|
|
import Control.Monad.Trans.Resource
|
|
|
|
hiding ( throwM )
|
|
|
|
import Data.Aeson
|
2020-08-09 15:39:02 +00:00
|
|
|
import Data.Bifunctor
|
2020-01-11 20:15:05 +00:00
|
|
|
import Data.ByteString ( ByteString )
|
2020-04-29 17:36:16 +00:00
|
|
|
#if defined(INTERNAL_DOWNLOADER)
|
2020-01-11 20:15:05 +00:00
|
|
|
import Data.CaseInsensitive ( CI )
|
2020-04-09 17:53:22 +00:00
|
|
|
#endif
|
2021-05-14 21:09:45 +00:00
|
|
|
import Data.List.Extra
|
2020-01-11 20:15:05 +00:00
|
|
|
import Data.Maybe
|
|
|
|
import Data.String.Interpolate
|
|
|
|
import Data.Time.Clock
|
|
|
|
import Data.Time.Clock.POSIX
|
2020-04-28 15:56:39 +00:00
|
|
|
#if defined(INTERNAL_DOWNLOADER)
|
2020-01-11 20:15:05 +00:00
|
|
|
import Data.Time.Format
|
2020-04-09 17:53:22 +00:00
|
|
|
#endif
|
2020-01-11 20:15:05 +00:00
|
|
|
import Data.Versions
|
2020-04-29 17:12:58 +00:00
|
|
|
import Data.Word8
|
2020-01-11 20:15:05 +00:00
|
|
|
import GHC.IO.Exception
|
|
|
|
import Haskus.Utils.Variant.Excepts
|
|
|
|
import Optics
|
|
|
|
import Prelude hiding ( abs
|
|
|
|
, readFile
|
|
|
|
, writeFile
|
|
|
|
)
|
2021-05-14 21:09:45 +00:00
|
|
|
import System.Directory
|
|
|
|
import System.Environment
|
|
|
|
import System.FilePath
|
2020-01-11 20:15:05 +00:00
|
|
|
import System.IO.Error
|
|
|
|
import URI.ByteString
|
|
|
|
|
2020-04-09 16:27:07 +00:00
|
|
|
import qualified Crypto.Hash.SHA256 as SHA256
|
2021-05-14 21:09:45 +00:00
|
|
|
import qualified Data.ByteString as B
|
2020-04-09 16:27:07 +00:00
|
|
|
import qualified Data.ByteString.Base16 as B16
|
2020-01-11 20:15:05 +00:00
|
|
|
import qualified Data.ByteString.Lazy as L
|
2020-10-25 13:17:17 +00:00
|
|
|
import qualified Data.Map.Strict as M
|
2020-04-28 15:56:39 +00:00
|
|
|
#if defined(INTERNAL_DOWNLOADER)
|
2020-01-11 20:15:05 +00:00
|
|
|
import qualified Data.CaseInsensitive as CI
|
2020-04-09 17:53:22 +00:00
|
|
|
#endif
|
2021-05-14 21:09:45 +00:00
|
|
|
import qualified Data.Text as T
|
2020-01-11 20:15:05 +00:00
|
|
|
import qualified Data.Text.Encoding as E
|
2020-08-09 15:39:02 +00:00
|
|
|
import qualified Data.Yaml as Y
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
------------------
|
|
|
|
--[ High-level ]--
|
|
|
|
------------------
|
|
|
|
|
|
|
|
|
2020-10-25 13:17:17 +00:00
|
|
|
|
|
|
|
-- | Downloads the download information! But only if we need to ;P
|
2020-04-27 21:23:34 +00:00
|
|
|
getDownloadsF :: ( FromJSONKey Tool
|
|
|
|
, FromJSONKey Version
|
|
|
|
, FromJSON VersionInfo
|
|
|
|
, MonadIO m
|
|
|
|
, MonadCatch m
|
|
|
|
, MonadLogger m
|
|
|
|
, MonadThrow m
|
|
|
|
, MonadFail m
|
|
|
|
)
|
2021-05-14 21:09:45 +00:00
|
|
|
=> Settings
|
|
|
|
-> Dirs
|
2020-04-27 21:23:34 +00:00
|
|
|
-> Excepts
|
|
|
|
'[JSONError , DownloadFailed , FileDoesNotExistError]
|
|
|
|
m
|
|
|
|
GHCupInfo
|
2021-05-14 21:09:45 +00:00
|
|
|
getDownloadsF settings@Settings{ urlSource } dirs = do
|
2020-04-27 21:23:34 +00:00
|
|
|
case urlSource of
|
2021-05-14 21:09:45 +00:00
|
|
|
GHCupURL -> liftE $ getBase dirs settings
|
2020-10-25 13:17:17 +00:00
|
|
|
(OwnSource url) -> do
|
2021-05-14 21:09:45 +00:00
|
|
|
bs <- reThrowAll DownloadFailed $ downloadBS (downloader settings) url
|
2021-03-11 16:03:51 +00:00
|
|
|
lE' JSONDecodeError $ first show $ Y.decodeEither' (L.toStrict bs)
|
2020-10-25 13:17:17 +00:00
|
|
|
(OwnSpec av) -> pure av
|
|
|
|
(AddSource (Left ext)) -> do
|
2021-05-14 21:09:45 +00:00
|
|
|
base <- liftE $ getBase dirs settings
|
2020-10-25 13:17:17 +00:00
|
|
|
pure (mergeGhcupInfo base ext)
|
|
|
|
(AddSource (Right uri)) -> do
|
2021-05-14 21:09:45 +00:00
|
|
|
base <- liftE $ getBase dirs settings
|
|
|
|
bsExt <- reThrowAll DownloadFailed $ downloadBS (downloader settings) uri
|
2021-03-11 16:03:51 +00:00
|
|
|
ext <- lE' JSONDecodeError $ first show $ Y.decodeEither' (L.toStrict bsExt)
|
2020-10-25 13:17:17 +00:00
|
|
|
pure (mergeGhcupInfo base ext)
|
2021-02-24 23:07:38 +00:00
|
|
|
|
|
|
|
where
|
2020-10-25 13:17:17 +00:00
|
|
|
|
|
|
|
mergeGhcupInfo :: GHCupInfo -- ^ base to merge with
|
|
|
|
-> GHCupInfo -- ^ extension overwriting the base
|
|
|
|
-> GHCupInfo
|
2021-05-14 21:09:45 +00:00
|
|
|
mergeGhcupInfo (GHCupInfo tr base base2) (GHCupInfo _ ext ext2) =
|
|
|
|
let newDownloads = M.mapWithKey (\k a -> case M.lookup k ext of
|
2020-10-25 13:17:17 +00:00
|
|
|
Just a' -> M.union a' a
|
|
|
|
Nothing -> a
|
2021-05-14 21:09:45 +00:00
|
|
|
) base
|
|
|
|
newGlobalTools = M.union base2 ext2
|
|
|
|
in GHCupInfo tr newDownloads newGlobalTools
|
2020-04-27 21:23:34 +00:00
|
|
|
|
2021-02-24 23:07:38 +00:00
|
|
|
|
2021-05-14 21:09:45 +00:00
|
|
|
readFromCache :: (MonadIO m, MonadCatch m, MonadLogger m)
|
|
|
|
=> Dirs
|
|
|
|
-> Excepts '[JSONError, FileDoesNotExistError] m GHCupInfo
|
|
|
|
readFromCache Dirs {..} = do
|
2021-02-24 23:07:38 +00:00
|
|
|
lift $ $(logWarn)
|
|
|
|
[i|Could not get download info, trying cached version (this may not be recent!)|]
|
|
|
|
let path = view pathL' ghcupURL
|
2021-05-14 21:09:45 +00:00
|
|
|
let yaml_file = cacheDir </> (T.unpack . decUTF8Safe . urlBaseName $ path)
|
2021-02-24 23:07:38 +00:00
|
|
|
bs <-
|
|
|
|
handleIO' NoSuchThing
|
2021-05-14 21:09:45 +00:00
|
|
|
(\_ -> throwE $ FileDoesNotExistError yaml_file)
|
2021-02-24 23:07:38 +00:00
|
|
|
$ liftIO
|
2021-05-14 21:09:45 +00:00
|
|
|
$ L.readFile yaml_file
|
2021-03-11 16:03:51 +00:00
|
|
|
lE' JSONDecodeError $ first show $ Y.decodeEither' (L.toStrict bs)
|
2021-02-24 23:07:38 +00:00
|
|
|
|
|
|
|
|
2021-05-14 21:09:45 +00:00
|
|
|
getBase :: (MonadFail m, MonadIO m, MonadCatch m, MonadLogger m)
|
|
|
|
=> Dirs
|
|
|
|
-> Settings
|
|
|
|
-> Excepts '[JSONError , FileDoesNotExistError] m GHCupInfo
|
|
|
|
getBase dirs@Dirs{..} Settings{ downloader } =
|
|
|
|
handleIO (\_ -> readFromCache dirs)
|
2021-02-24 23:07:38 +00:00
|
|
|
$ catchE @_ @'[JSONError, FileDoesNotExistError]
|
2021-05-14 21:09:45 +00:00
|
|
|
(\(DownloadFailed _) -> readFromCache dirs)
|
2021-03-11 16:03:51 +00:00
|
|
|
(reThrowAll @_ @_ @'[JSONError, DownloadFailed] DownloadFailed (smartDl ghcupURL)
|
|
|
|
>>= (liftE . lE' @_ @_ @'[JSONError] JSONDecodeError . first show . Y.decodeEither' . L.toStrict))
|
2021-02-24 23:07:38 +00:00
|
|
|
where
|
2020-01-11 20:15:05 +00:00
|
|
|
-- First check if the json file is in the ~/.ghcup/cache dir
|
|
|
|
-- and check it's access time. If it has been accessed within the
|
|
|
|
-- last 5 minutes, just reuse it.
|
|
|
|
--
|
|
|
|
-- If not, then send a HEAD request and check for modification time.
|
|
|
|
-- Only download the file if the modification time is newer
|
|
|
|
-- than the local file.
|
|
|
|
--
|
|
|
|
-- Always save the local file with the mod time of the remote file.
|
2020-03-09 19:49:10 +00:00
|
|
|
smartDl :: forall m1
|
2020-04-29 17:12:58 +00:00
|
|
|
. ( MonadCatch m1
|
|
|
|
, MonadIO m1
|
|
|
|
, MonadFail m1
|
|
|
|
, MonadLogger m1
|
|
|
|
)
|
2020-03-17 17:39:01 +00:00
|
|
|
=> URI
|
|
|
|
-> Excepts
|
|
|
|
'[ FileDoesNotExistError
|
|
|
|
, HTTPStatusError
|
|
|
|
, URIParseError
|
|
|
|
, UnsupportedScheme
|
|
|
|
, NoLocationHeader
|
|
|
|
, TooManyRedirs
|
2020-04-09 14:59:25 +00:00
|
|
|
, ProcessError
|
2020-03-17 17:39:01 +00:00
|
|
|
]
|
|
|
|
m1
|
|
|
|
L.ByteString
|
2020-03-09 19:49:10 +00:00
|
|
|
smartDl uri' = do
|
2020-01-11 20:15:05 +00:00
|
|
|
let path = view pathL' uri'
|
2021-05-14 21:09:45 +00:00
|
|
|
let json_file = cacheDir </> (T.unpack . decUTF8Safe . urlBaseName $ path)
|
2020-03-17 22:21:38 +00:00
|
|
|
e <- liftIO $ doesFileExist json_file
|
2020-01-11 20:15:05 +00:00
|
|
|
if e
|
|
|
|
then do
|
2021-05-14 21:09:45 +00:00
|
|
|
accessTime <- liftIO $ getAccessTime json_file
|
|
|
|
currentTime <- liftIO getCurrentTime
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
-- access time won't work on most linuxes, but we can try regardless
|
2021-05-14 21:09:45 +00:00
|
|
|
if (utcTimeToPOSIXSeconds currentTime - utcTimeToPOSIXSeconds accessTime) > 300
|
2020-01-11 20:15:05 +00:00
|
|
|
then do -- no access in last 5 minutes, re-check upstream mod time
|
|
|
|
getModTime >>= \case
|
|
|
|
Just modTime -> do
|
|
|
|
fileMod <- liftIO $ getModificationTime json_file
|
|
|
|
if modTime > fileMod
|
2020-04-27 21:23:34 +00:00
|
|
|
then dlWithMod modTime json_file
|
2021-05-14 21:09:45 +00:00
|
|
|
else liftIO $ L.readFile json_file
|
2020-01-11 20:15:05 +00:00
|
|
|
Nothing -> do
|
2020-04-09 14:59:25 +00:00
|
|
|
lift $ $(logDebug) [i|Unable to get/parse Last-Modified header|]
|
2020-04-27 21:23:34 +00:00
|
|
|
dlWithoutMod json_file
|
2020-01-11 20:15:05 +00:00
|
|
|
else -- access in less than 5 minutes, re-use file
|
2021-05-14 21:09:45 +00:00
|
|
|
liftIO $ L.readFile json_file
|
2020-01-11 20:15:05 +00:00
|
|
|
else do
|
|
|
|
getModTime >>= \case
|
2020-04-27 21:23:34 +00:00
|
|
|
Just modTime -> dlWithMod modTime json_file
|
2020-01-11 20:15:05 +00:00
|
|
|
Nothing -> do
|
2020-04-27 21:23:34 +00:00
|
|
|
-- although we don't know last-modified, we still save
|
|
|
|
-- it to a file, so we might use it in offline mode
|
2020-04-09 14:59:25 +00:00
|
|
|
lift $ $(logDebug) [i|Unable to get/parse Last-Modified header|]
|
2020-04-27 21:23:34 +00:00
|
|
|
dlWithoutMod json_file
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
where
|
2020-04-27 21:23:34 +00:00
|
|
|
dlWithMod modTime json_file = do
|
2021-05-14 21:09:45 +00:00
|
|
|
bs <- liftE $ downloadBS downloader uri'
|
2020-04-27 21:23:34 +00:00
|
|
|
liftIO $ writeFileWithModTime modTime json_file bs
|
|
|
|
pure bs
|
|
|
|
dlWithoutMod json_file = do
|
2021-05-14 21:09:45 +00:00
|
|
|
bs <- liftE $ downloadBS downloader uri'
|
|
|
|
liftIO $ hideError doesNotExistErrorType $ rmFile json_file
|
|
|
|
liftIO $ L.writeFile json_file bs
|
|
|
|
liftIO $ setModificationTime json_file (posixSecondsToUTCTime (fromIntegral @Int 0))
|
2020-04-27 21:23:34 +00:00
|
|
|
pure bs
|
|
|
|
|
|
|
|
|
2020-01-11 20:15:05 +00:00
|
|
|
getModTime = do
|
2020-04-28 15:56:39 +00:00
|
|
|
#if !defined(INTERNAL_DOWNLOADER)
|
2020-04-09 14:59:25 +00:00
|
|
|
pure Nothing
|
|
|
|
#else
|
2020-01-11 20:15:05 +00:00
|
|
|
headers <-
|
|
|
|
handleIO (\_ -> pure mempty)
|
|
|
|
$ liftE
|
|
|
|
$ ( catchAllE
|
|
|
|
(\_ ->
|
|
|
|
pure mempty :: Excepts '[] m1 (M.Map (CI ByteString) ByteString)
|
|
|
|
)
|
|
|
|
$ getHead uri'
|
|
|
|
)
|
|
|
|
pure $ parseModifiedHeader headers
|
|
|
|
|
|
|
|
parseModifiedHeader :: (M.Map (CI ByteString) ByteString) -> Maybe UTCTime
|
|
|
|
parseModifiedHeader headers =
|
2020-03-21 21:19:37 +00:00
|
|
|
(M.lookup (CI.mk "Last-Modified") headers) >>= \h -> parseTimeM
|
2020-01-11 20:15:05 +00:00
|
|
|
True
|
|
|
|
defaultTimeLocale
|
|
|
|
"%a, %d %b %Y %H:%M:%S %Z"
|
2020-04-17 07:30:45 +00:00
|
|
|
(T.unpack . decUTF8Safe $ h)
|
2020-01-11 20:15:05 +00:00
|
|
|
|
2020-04-09 17:53:22 +00:00
|
|
|
#endif
|
|
|
|
|
2021-05-14 21:09:45 +00:00
|
|
|
writeFileWithModTime :: UTCTime -> FilePath -> L.ByteString -> IO ()
|
2020-01-11 20:15:05 +00:00
|
|
|
writeFileWithModTime utctime path content = do
|
2021-05-14 21:09:45 +00:00
|
|
|
L.writeFile path content
|
|
|
|
setModificationTime path utctime
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
|
2020-04-10 17:27:17 +00:00
|
|
|
getDownloadInfo :: Tool
|
2020-01-11 20:15:05 +00:00
|
|
|
-> Version
|
|
|
|
-- ^ tool version
|
2020-04-10 17:27:17 +00:00
|
|
|
-> PlatformRequest
|
|
|
|
-> GHCupDownloads
|
|
|
|
-> Either NoDownload DownloadInfo
|
|
|
|
getDownloadInfo t v (PlatformRequest a p mv) dls = maybe
|
2020-01-11 20:15:05 +00:00
|
|
|
(Left NoDownload)
|
|
|
|
Right
|
2020-07-20 18:47:45 +00:00
|
|
|
(case p of
|
|
|
|
-- non-musl won't work on alpine
|
|
|
|
Linux Alpine -> with_distro <|> without_distro_ver
|
2020-11-20 17:37:48 +00:00
|
|
|
_ -> with_distro <|> without_distro_ver <|> without_distro
|
|
|
|
)
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
where
|
|
|
|
with_distro = distro_preview id id
|
|
|
|
without_distro_ver = distro_preview id (const Nothing)
|
|
|
|
without_distro = distro_preview (set _Linux UnknownLinux) (const Nothing)
|
|
|
|
|
|
|
|
distro_preview f g =
|
2020-11-20 17:37:48 +00:00
|
|
|
let platformVersionSpec =
|
|
|
|
preview (ix t % ix v % viArch % ix a % ix (f p)) dls
|
|
|
|
mv' = g mv
|
|
|
|
in fmap snd
|
|
|
|
. find
|
|
|
|
(\(mverRange, _) -> maybe
|
2021-03-11 16:03:51 +00:00
|
|
|
(isNothing mv')
|
|
|
|
(\range -> maybe False (`versionRange` range) mv')
|
2020-11-20 17:37:48 +00:00
|
|
|
mverRange
|
|
|
|
)
|
|
|
|
. M.toList
|
|
|
|
=<< platformVersionSpec
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
-- | Tries to download from the given http or https url
|
|
|
|
-- and saves the result in continuous memory into a file.
|
|
|
|
-- If the filename is not provided, then we:
|
|
|
|
-- 1. try to guess the filename from the url path
|
|
|
|
-- 2. otherwise create a random file
|
|
|
|
--
|
|
|
|
-- The file must not exist.
|
|
|
|
download :: ( MonadMask m
|
|
|
|
, MonadThrow m
|
|
|
|
, MonadLogger m
|
|
|
|
, MonadIO m
|
|
|
|
)
|
2021-05-14 21:09:45 +00:00
|
|
|
=> Settings
|
|
|
|
-> DownloadInfo
|
|
|
|
-> FilePath -- ^ destination dir
|
|
|
|
-> Maybe FilePath -- ^ optional filename
|
|
|
|
-> Excepts '[DigestError , DownloadFailed] m FilePath
|
|
|
|
download settings@Settings{ downloader } dli dest mfn
|
2020-03-21 21:19:37 +00:00
|
|
|
| scheme == "https" = dl
|
|
|
|
| scheme == "http" = dl
|
|
|
|
| scheme == "file" = cp
|
2020-01-11 20:15:05 +00:00
|
|
|
| otherwise = throwE $ DownloadFailed (variantFromValue UnsupportedScheme)
|
|
|
|
|
|
|
|
where
|
|
|
|
scheme = view (dlUri % uriSchemeL' % schemeBSL') dli
|
|
|
|
cp = do
|
|
|
|
-- destination dir must exist
|
2020-08-31 11:03:12 +00:00
|
|
|
liftIO $ createDirRecursive' dest
|
2021-05-14 21:09:45 +00:00
|
|
|
let destFile = getDestFile
|
|
|
|
let fromFile = T.unpack . decUTF8Safe $ path
|
|
|
|
liftIO $ copyFile fromFile destFile
|
2020-01-11 20:15:05 +00:00
|
|
|
pure destFile
|
|
|
|
dl = do
|
2020-04-17 07:30:45 +00:00
|
|
|
let uri' = decUTF8Safe (serializeURIRef' (view dlUri dli))
|
2020-01-11 20:15:05 +00:00
|
|
|
lift $ $(logInfo) [i|downloading: #{uri'}|]
|
|
|
|
|
|
|
|
-- destination dir must exist
|
2020-08-31 11:03:12 +00:00
|
|
|
liftIO $ createDirRecursive' dest
|
2021-05-14 21:09:45 +00:00
|
|
|
let destFile = getDestFile
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
-- download
|
2020-03-17 22:21:38 +00:00
|
|
|
flip onException
|
2021-05-14 21:09:45 +00:00
|
|
|
(liftIO $ hideError doesNotExistErrorType $ rmFile destFile)
|
2020-04-09 14:59:25 +00:00
|
|
|
$ catchAllE @_ @'[ProcessError, DownloadFailed, UnsupportedScheme]
|
2020-03-17 22:21:38 +00:00
|
|
|
(\e ->
|
2021-05-14 21:09:45 +00:00
|
|
|
liftIO (hideError doesNotExistErrorType $ rmFile destFile)
|
2020-03-17 22:21:38 +00:00
|
|
|
>> (throwE . DownloadFailed $ e)
|
2020-04-09 14:59:25 +00:00
|
|
|
) $ do
|
2021-05-14 21:09:45 +00:00
|
|
|
case downloader of
|
2020-04-29 17:12:58 +00:00
|
|
|
Curl -> do
|
|
|
|
o' <- liftIO getCurlOpts
|
2021-05-14 21:09:45 +00:00
|
|
|
liftE $ lEM @_ @'[ProcessError] $ exec "curl"
|
|
|
|
(o' ++ ["-fL", "-o", destFile, (T.unpack . decUTF8Safe) $ serializeURIRef' $ view dlUri dli]) Nothing Nothing
|
2020-04-29 17:12:58 +00:00
|
|
|
Wget -> do
|
|
|
|
o' <- liftIO getWgetOpts
|
2021-05-14 21:09:45 +00:00
|
|
|
liftE $ lEM @_ @'[ProcessError] $ exec "wget"
|
|
|
|
(o' ++ ["-O", destFile , (T.unpack . decUTF8Safe) $ serializeURIRef' $ view dlUri dli]) Nothing Nothing
|
2020-04-29 17:12:58 +00:00
|
|
|
#if defined(INTERNAL_DOWNLOADER)
|
|
|
|
Internal -> do
|
|
|
|
(https, host, fullPath, port) <- liftE $ uriToQuadruple (view dlUri dli)
|
|
|
|
liftE $ downloadToFile https host fullPath port destFile
|
2020-04-09 14:59:25 +00:00
|
|
|
#endif
|
2020-01-11 20:15:05 +00:00
|
|
|
|
2021-05-14 21:09:45 +00:00
|
|
|
liftE $ checkDigest settings dli destFile
|
2020-01-11 20:15:05 +00:00
|
|
|
pure destFile
|
|
|
|
|
|
|
|
-- Manage to find a file we can write the body into.
|
2021-05-14 21:09:45 +00:00
|
|
|
getDestFile :: FilePath
|
|
|
|
getDestFile = maybe (dest </> T.unpack (decUTF8Safe (urlBaseName path))) (dest </>) mfn
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
path = view (dlUri % pathL') dli
|
|
|
|
|
|
|
|
|
|
|
|
-- | Download into tmpdir or use cached version, if it exists. If filename
|
|
|
|
-- is omitted, infers the filename from the url.
|
|
|
|
downloadCached :: ( MonadMask m
|
|
|
|
, MonadResource m
|
|
|
|
, MonadThrow m
|
|
|
|
, MonadLogger m
|
|
|
|
, MonadIO m
|
2021-04-25 15:22:07 +00:00
|
|
|
, MonadUnliftIO m
|
2020-01-11 20:15:05 +00:00
|
|
|
)
|
2021-05-14 21:09:45 +00:00
|
|
|
=> Settings
|
|
|
|
-> Dirs
|
|
|
|
-> DownloadInfo
|
|
|
|
-> Maybe FilePath -- ^ optional filename
|
|
|
|
-> Excepts '[DigestError , DownloadFailed] m FilePath
|
|
|
|
downloadCached settings@Settings{ cache } dirs dli mfn = do
|
2020-01-11 20:15:05 +00:00
|
|
|
case cache of
|
2021-05-14 21:09:45 +00:00
|
|
|
True -> downloadCached' settings dirs dli mfn
|
2020-01-11 20:15:05 +00:00
|
|
|
False -> do
|
|
|
|
tmp <- lift withGHCupTmpDir
|
2021-05-14 21:09:45 +00:00
|
|
|
liftE $ download settings dli tmp mfn
|
|
|
|
|
|
|
|
|
|
|
|
downloadCached' :: ( MonadMask m
|
|
|
|
, MonadThrow m
|
|
|
|
, MonadLogger m
|
|
|
|
, MonadIO m
|
|
|
|
, MonadUnliftIO m
|
|
|
|
)
|
|
|
|
=> Settings
|
|
|
|
-> Dirs
|
|
|
|
-> DownloadInfo
|
|
|
|
-> Maybe FilePath -- ^ optional filename
|
|
|
|
-> Excepts '[DigestError , DownloadFailed] m FilePath
|
|
|
|
downloadCached' settings Dirs{..} dli mfn = do
|
|
|
|
let fn = fromMaybe ((T.unpack . decUTF8Safe) $ urlBaseName $ view (dlUri % pathL') dli) mfn
|
|
|
|
let cachfile = cacheDir </> fn
|
|
|
|
fileExists <- liftIO $ doesFileExist cachfile
|
|
|
|
if
|
|
|
|
| fileExists -> do
|
|
|
|
liftE $ checkDigest settings dli cachfile
|
|
|
|
pure cachfile
|
|
|
|
| otherwise -> liftE $ download settings dli cacheDir mfn
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
------------------
|
|
|
|
--[ Low-level ]--
|
|
|
|
------------------
|
|
|
|
|
|
|
|
|
2020-04-09 14:59:25 +00:00
|
|
|
|
|
|
|
|
2020-01-11 20:15:05 +00:00
|
|
|
-- | This is used for downloading the JSON.
|
2021-05-14 21:09:45 +00:00
|
|
|
downloadBS :: (MonadCatch m, MonadIO m, MonadLogger m)
|
|
|
|
=> Downloader
|
|
|
|
-> URI
|
2020-01-11 20:15:05 +00:00
|
|
|
-> Excepts
|
|
|
|
'[ FileDoesNotExistError
|
|
|
|
, HTTPStatusError
|
|
|
|
, URIParseError
|
|
|
|
, UnsupportedScheme
|
|
|
|
, NoLocationHeader
|
|
|
|
, TooManyRedirs
|
2020-04-09 14:59:25 +00:00
|
|
|
, ProcessError
|
2020-01-11 20:15:05 +00:00
|
|
|
]
|
|
|
|
m
|
|
|
|
L.ByteString
|
2021-05-14 21:09:45 +00:00
|
|
|
downloadBS downloader uri'
|
2020-03-21 21:19:37 +00:00
|
|
|
| scheme == "https"
|
2020-01-11 20:15:05 +00:00
|
|
|
= dl True
|
2020-03-21 21:19:37 +00:00
|
|
|
| scheme == "http"
|
2020-01-11 20:15:05 +00:00
|
|
|
= dl False
|
2020-03-21 21:19:37 +00:00
|
|
|
| scheme == "file"
|
2021-05-14 21:09:45 +00:00
|
|
|
= liftIOException doesNotExistErrorType (FileDoesNotExistError $ T.unpack $ decUTF8Safe path)
|
|
|
|
(liftIO $ L.readFile (T.unpack $ decUTF8Safe path))
|
2020-01-11 20:15:05 +00:00
|
|
|
| otherwise
|
|
|
|
= throwE UnsupportedScheme
|
|
|
|
|
|
|
|
where
|
|
|
|
scheme = view (uriSchemeL' % schemeBSL') uri'
|
|
|
|
path = view pathL' uri'
|
2020-04-29 17:12:58 +00:00
|
|
|
#if defined(INTERNAL_DOWNLOADER)
|
|
|
|
dl https = do
|
|
|
|
#else
|
2020-04-09 17:53:22 +00:00
|
|
|
dl _ = do
|
2020-04-29 17:12:58 +00:00
|
|
|
#endif
|
2020-04-27 21:23:34 +00:00
|
|
|
lift $ $(logDebug) [i|downloading: #{serializeURIRef' uri'}|]
|
2021-05-14 21:09:45 +00:00
|
|
|
case downloader of
|
2020-04-29 17:12:58 +00:00
|
|
|
Curl -> do
|
|
|
|
o' <- liftIO getCurlOpts
|
2021-05-14 21:09:45 +00:00
|
|
|
let exe = "curl"
|
|
|
|
args = o' ++ ["-sSfL", T.unpack $ decUTF8Safe $ serializeURIRef' uri']
|
|
|
|
lift (executeOut exe args Nothing) >>= \case
|
2020-04-29 17:12:58 +00:00
|
|
|
CapturedProcess ExitSuccess stdout _ -> do
|
2021-05-14 21:09:45 +00:00
|
|
|
pure stdout
|
|
|
|
CapturedProcess (ExitFailure i') _ _ -> throwE $ NonZeroExit i' exe args
|
2020-04-29 17:12:58 +00:00
|
|
|
Wget -> do
|
|
|
|
o' <- liftIO getWgetOpts
|
2021-05-14 21:09:45 +00:00
|
|
|
let exe = "wget"
|
|
|
|
args = o' ++ ["-qO-", T.unpack $ decUTF8Safe $ serializeURIRef' uri']
|
|
|
|
lift (executeOut exe args Nothing) >>= \case
|
2020-04-29 17:12:58 +00:00
|
|
|
CapturedProcess ExitSuccess stdout _ -> do
|
2021-05-14 21:09:45 +00:00
|
|
|
pure stdout
|
|
|
|
CapturedProcess (ExitFailure i') _ _ -> throwE $ NonZeroExit i' exe args
|
2020-04-29 17:12:58 +00:00
|
|
|
#if defined(INTERNAL_DOWNLOADER)
|
|
|
|
Internal -> do
|
|
|
|
(_, host', fullPath', port') <- liftE $ uriToQuadruple uri'
|
|
|
|
liftE $ downloadBS' https host' fullPath' port'
|
2020-04-09 14:59:25 +00:00
|
|
|
#endif
|
2020-01-11 20:15:05 +00:00
|
|
|
|
|
|
|
|
2021-05-14 21:09:45 +00:00
|
|
|
checkDigest :: (MonadIO m, MonadThrow m, MonadLogger m)
|
|
|
|
=> Settings
|
|
|
|
-> DownloadInfo
|
|
|
|
-> FilePath
|
2020-01-11 20:15:05 +00:00
|
|
|
-> Excepts '[DigestError] m ()
|
2021-05-14 21:09:45 +00:00
|
|
|
checkDigest Settings{ noVerify } dli file = do
|
|
|
|
let verify = not noVerify
|
2020-01-11 20:15:05 +00:00
|
|
|
when verify $ do
|
2021-05-14 21:09:45 +00:00
|
|
|
let p' = takeFileName file
|
2020-03-17 00:59:23 +00:00
|
|
|
lift $ $(logInfo) [i|verifying digest of: #{p'}|]
|
2021-05-14 21:09:45 +00:00
|
|
|
c <- liftIO $ L.readFile file
|
2020-04-17 07:30:45 +00:00
|
|
|
cDigest <- throwEither . E.decodeUtf8' . B16.encode . SHA256.hashlazy $ c
|
|
|
|
let eDigest = view dlHash dli
|
2020-01-11 20:15:05 +00:00
|
|
|
when ((cDigest /= eDigest) && verify) $ throwE (DigestError cDigest eDigest)
|
2020-04-09 14:59:25 +00:00
|
|
|
|
2020-04-29 17:12:58 +00:00
|
|
|
|
|
|
|
-- | Get additional curl args from env. This is an undocumented option.
|
2021-05-14 21:09:45 +00:00
|
|
|
getCurlOpts :: IO [String]
|
2020-04-29 17:12:58 +00:00
|
|
|
getCurlOpts =
|
2021-05-14 21:09:45 +00:00
|
|
|
lookupEnv "GHCUP_CURL_OPTS" >>= \case
|
|
|
|
Just r -> pure $ splitOn " " r
|
2020-04-29 17:12:58 +00:00
|
|
|
Nothing -> pure []
|
|
|
|
|
|
|
|
|
|
|
|
-- | Get additional wget args from env. This is an undocumented option.
|
2021-05-14 21:09:45 +00:00
|
|
|
getWgetOpts :: IO [String]
|
2020-04-29 17:12:58 +00:00
|
|
|
getWgetOpts =
|
2021-05-14 21:09:45 +00:00
|
|
|
lookupEnv "GHCUP_WGET_OPTS" >>= \case
|
|
|
|
Just r -> pure $ splitOn " " r
|
2020-04-29 17:12:58 +00:00
|
|
|
Nothing -> pure []
|
|
|
|
|
2021-05-14 21:09:45 +00:00
|
|
|
|
|
|
|
urlBaseName :: ByteString -- ^ the url path (without scheme and host)
|
|
|
|
-> ByteString
|
|
|
|
urlBaseName = snd . B.breakEnd (== _slash) . urlDecode False
|
|
|
|
|