Compare commits

...

13 Commits

24 changed files with 8085 additions and 5119 deletions

View File

@@ -87,39 +87,29 @@ download_cabal_cache() {
cd /tmp
case "${RUNNER_OS}" in
"Linux")
case "${DISTRO}" in
"Alpine")
case "${ARCH}" in
"32") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/i386-linux-alpine-cabal-cache-1.0.5.1
;;
"64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/x86_64-linux-alpine-cabal-cache-1.0.5.1
;;
esac
case "${ARCH}" in
"32") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/i386-linux-cabal-cache
;;
*)
case "${ARCH}" in
"64") url=https://github.com/haskell-works/cabal-cache/releases/download/v1.0.5.1/cabal-cache-x86_64-linux.gz
;;
"ARM64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/aarch64-linux-cabal-cache-1.0.5.1
;;
"ARM") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/armv7-linux-cabal-cache-1.0.5.1
;;
esac
"64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/x86_64-linux-cabal-cache
;;
"ARM64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/aarch64-linux-cabal-cache
;;
"ARM") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/armv7-linux-cabal-cache
;;
esac
;;
"FreeBSD")
url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/x86_64-freebsd-cabal-cache-1.0.5.1
url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/x86_64-portbld-freebsd-cabal-cache
;;
"Windows")
exe=".exe"
url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/x86_64-mingw64-cabal-cache-1.0.5.1.exe
url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/x86_64-mingw64-cabal-cache
;;
"macOS")
case "${ARCH}" in
"ARM64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/aarch64-apple-darwin-cabal-cache-1.0.5.1
"ARM64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/aarch64-apple-darwin-cabal-cache
;;
"64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/x86_64-apple-darwin-cabal-cache-1.0.5.1
"64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental4/x86_64-apple-darwin-cabal-cache
;;
esac
;;
@@ -134,8 +124,9 @@ download_cabal_cache() {
curl -o cabal-cache${exe} -L "${url}"
;;
esac
chmod +x cabal-cache${exe}
cp "cabal-cache${exe}" "${dest}${exe}"
sha_sum cabal-cache${exe}
mv "cabal-cache${exe}" "${dest}${exe}"
chmod +x "${dest}${exe}"
fi
)
}

View File

@@ -102,7 +102,7 @@ jobs:
- uses: docker://arm64v8/ubuntu:focal
name: Cleanup (aarch64 linux)
with:
args: rm -rf .ghcup/ cabal/ dist-newstyle/ out/
args: "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +"
- name: git config
run: |
@@ -272,6 +272,14 @@ jobs:
DISTRO: ${{ matrix.DISTRO }}
APT_GET: "sudo apt-get"
- if: failure()
name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: testfiles
path: |
./test/golden/unix/GHCupInfo*json
test-arm:
name: Test ARM
needs: "build-arm"
@@ -297,7 +305,7 @@ jobs:
- uses: docker://arm64v8/ubuntu:focal
name: Cleanup (aarch64 linux)
with:
args: rm -rf .ghcup/ cabal/ dist-newstyle/ out/
args: "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +"
- name: Checkout code
uses: actions/checkout@v3
@@ -331,6 +339,14 @@ jobs:
GHC_VER: ${{ matrix.GHC_VER }}
DISTRO: Ubuntu
- if: failure()
name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: testfiles
path: |
./test/golden/unix/GHCupInfo*json
test-macwin:
name: Test Mac/Win
needs: "build-macwin"
@@ -377,6 +393,22 @@ jobs:
GHC_VER: ${{ matrix.GHC_VER }}
DISTRO: ${{ matrix.DISTRO }}
HOMEBREW_CHANGE_ARCH_TO_ARM: 1
- if: failure() && runner.os == 'Windows'
name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: testfiles
path: |
./test/golden/windows/GHCupInfo*json
- if: failure() && runner.os != 'Windows'
name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: testfiles
path: |
./test/golden/unix/GHCupInfo*json
hls:
name: hls
needs: build-linux

View File

@@ -434,6 +434,7 @@ install' _ (_, ListResult {..}) = do
, BuildFailed
, TagNotFound
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, DirNotEmpty
@@ -632,7 +633,7 @@ getGHCupInfo = do
r <-
flip runReaderT settings
. runE @'[DigestError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
. runE @'[DigestError, ContentLengthError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
$ liftE getDownloadsF
case r of

View File

@@ -420,6 +420,7 @@ hlsCompileOpts =
type GHCEffects = '[ AlreadyInstalled
, BuildFailed
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, GHCupSetError
@@ -443,6 +444,7 @@ type GHCEffects = '[ AlreadyInstalled
type HLSEffects = '[ AlreadyInstalled
, BuildFailed
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, GHCupSetError

View File

@@ -243,6 +243,7 @@ type InstallEffects = '[ AlreadyInstalled
, BuildFailed
, TagNotFound
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, TarDirDoesNotExist
@@ -271,6 +272,7 @@ type InstallGHCEffects = '[ AlreadyInstalled
, BuildFailed
, CopyError
, DigestError
, ContentLengthError
, DirNotEmpty
, DownloadFailed
, FileAlreadyExistsError
@@ -332,7 +334,7 @@ install installCommand settings getAppState' runLogger = case installCommand of
runInstGHC s'{ settings = settings {noVerify = True}} $ do
(v, vi) <- liftE $ fromVersion instVer GHC
liftE $ runBothE' (installGHCBindist
(DownloadInfo uri (Just $ RegexDir "ghc-.*") "")
(DownloadInfo uri (Just $ RegexDir "ghc-.*") "" Nothing)
(_tvVersion v)
(maybe GHCupInternal IsolateDir isolateDir)
forceInstall
@@ -402,7 +404,7 @@ install installCommand settings getAppState' runLogger = case installCommand of
runInstTool s'{ settings = settings { noVerify = True}} $ do
(_tvVersion -> v, vi) <- liftE $ fromVersion instVer Cabal
liftE $ runBothE' (installCabalBindist
(DownloadInfo uri Nothing "")
(DownloadInfo uri Nothing "" Nothing)
v
(maybe GHCupInternal IsolateDir isolateDir)
forceInstall
@@ -452,7 +454,7 @@ install installCommand settings getAppState' runLogger = case installCommand of
(_tvVersion -> v, vi) <- liftE $ fromVersion instVer HLS
-- TODO: support legacy
liftE $ runBothE' (installHLSBindist
(DownloadInfo uri (Just $ RegexDir "haskell-language-server-*") "")
(DownloadInfo uri (if isWindows then Nothing else Just (RegexDir "haskell-language-server-*")) "" Nothing)
v
(maybe GHCupInternal IsolateDir isolateDir)
forceInstall
@@ -501,7 +503,7 @@ install installCommand settings getAppState' runLogger = case installCommand of
runInstTool s'{ settings = settings { noVerify = True}} $ do
(_tvVersion -> v, vi) <- liftE $ fromVersion instVer Stack
liftE $ runBothE' (installStackBindist
(DownloadInfo uri Nothing "")
(DownloadInfo uri Nothing "" Nothing)
v
(maybe GHCupInternal IsolateDir isolateDir)
forceInstall

View File

@@ -153,6 +153,7 @@ type PrefetchEffects = '[ TagNotFound
, NoToolVersionSet
, NoDownload
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, JSONError

View File

@@ -177,6 +177,7 @@ type RunEffects = '[ AlreadyInstalled
, BuildFailed
, TagNotFound
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, TarDirDoesNotExist
@@ -343,6 +344,7 @@ run RunOptions{..} runAppState leanAppstate runLogger = do
, DownloadFailed
, DirNotEmpty
, DigestError
, ContentLengthError
, BuildFailed
, ArchiveResult
, AlreadyInstalled

View File

@@ -88,6 +88,7 @@ upgradeOptsP =
type UpgradeEffects = '[ DigestError
, ContentLengthError
, GPGError
, NoDownload
, NoUpdate

View File

@@ -211,7 +211,7 @@ Report bugs at <https://github.com/haskell/ghcup-hs/issues>|]
ghcupInfo <-
( flip runReaderT leanAppstate
. runE @'[DigestError, GPGError, JSONError , DownloadFailed, FileDoesNotExistError]
. runE @'[DigestError, ContentLengthError, GPGError, JSONError , DownloadFailed, FileDoesNotExistError]
$ liftE getDownloadsF
)
>>= \case

View File

@@ -60,6 +60,29 @@ All you wanted to know about GHCup.
3. handling cabal projects
4. being a stack alternative
## Distribution policies
Like most Linux distros and other distribution channels, GHCup also
follows certain policies. These are as follows:
1. The end-user experience is our primary concern
- ghcup in CI systems as a use case is a first class citizen
2. We strive to collaborate with all maintainers of all the tools we support and maintain a good relationship
3. We may fix build system or other distribution bugs in upstream bindists
- these are always communicated upstream
4. We may even patch source code of supported tools in very rare cases if that is required to ensure that the end-user experience does not break
- we'll first try to upstream any such required patch and demand a new release to avoid downstream patching
- patches will be communicated to the maintainers either way and we'll strive to get their review
- they will also be communicated to the end-user
- they will be uploaded along with the bindist
- we will avoid maintaining long-running downstream patches (currently zero)
5. We may add bindists for platforms that upstream does not support
- this is currently the case for GHC for e.g. Alpine and possibly FreeBSD in the future
- this is currently also the case for stack on darwin M1
- we don't guarantee for unofficial bindists that the test suite passes at the moment (this may change in the future)
6. We GPG sign all the GHCup metadata as well as the unofficial bindists
- any trust issues relating to missing checksums or GPG signatures is a bug and given high priority
## How
Installs a specified GHC version into `~/.ghcup/ghc/<ver>`, and places `ghc-<ver>` symlinks in `~/.ghcup/bin/`.
@@ -75,15 +98,15 @@ cabal-install/HLS/stack are installed in `~/.ghcup/bin/<tool>-<ver>` and have un
## Known users
* CI:
- [Github actions/virtual-environments](https://github.com/actions/virtual-environments)
- [Github haskell/actions/setup](https://github.com/haskell/actions/tree/main/setup)
- [haskell-ci](https://github.com/haskell-CI/haskell-ci)
- [Github actions/virtual-environments](https://github.com/actions/virtual-environments)
- [Github haskell/actions/setup](https://github.com/haskell/actions/tree/main/setup)
- [haskell-ci](https://github.com/haskell-CI/haskell-ci)
* mirrors:
- [sjtug](https://mirror.sjtu.edu.cn/docs/ghcup)
- [sjtug](https://mirror.sjtu.edu.cn/docs/ghcup)
* tools:
- [vscode-haskell](https://github.com/haskell/vscode-haskell)
- [nvim-lsp-installer](https://github.com/williamboman/nvim-lsp-installer)
- [vabal](https://github.com/Franciman/vabal)
- [vscode-haskell](https://github.com/haskell/vscode-haskell)
- [nvim-lsp-installer](https://github.com/williamboman/nvim-lsp-installer)
- [vabal](https://github.com/Franciman/vabal)
## Known problems

View File

@@ -106,6 +106,7 @@ fetchToolBindist :: ( MonadFail m
-> Maybe FilePath
-> Excepts
'[ DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -288,6 +289,7 @@ upgradeGHCup :: ( MonadMask m
-> Excepts
'[ CopyError
, DigestError
, ContentLengthError
, GPGError
, GPGError
, DownloadFailed
@@ -308,7 +310,7 @@ upgradeGHCup mtarget force' fatal = do
dli <- liftE $ getDownloadInfo GHCup latestVer
tmp <- fromGHCupPath <$> lift withGHCupTmpDir
let fn = "ghcup" <> exeExt
p <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) tmp (Just fn) False
p <- liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) (_dlCSize dli) tmp (Just fn) False
let destDir = takeDirectory destFile
destFile = fromMaybe (binDir </> fn) mtarget
lift $ logDebug $ "mkdir -p " <> T.pack destDir

View File

@@ -81,6 +81,7 @@ installCabalBindist :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -184,6 +185,7 @@ installCabalBin :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload

View File

@@ -114,7 +114,7 @@ getDownloadsF :: ( FromJSONKey Tool
, MonadMask m
)
=> Excepts
'[DigestError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
'[DigestError, ContentLengthError, GPGError, JSONError , DownloadFailed , FileDoesNotExistError]
m
GHCupInfo
getDownloadsF = do
@@ -162,7 +162,7 @@ getBase :: ( MonadReader env m
, MonadMask m
)
=> URI
-> Excepts '[DownloadFailed, GPGError, DigestError, JSONError, FileDoesNotExistError] m GHCupInfo
-> Excepts '[DownloadFailed, GPGError, DigestError, ContentLengthError, JSONError, FileDoesNotExistError] m GHCupInfo
getBase uri = do
Settings { noNetwork, downloader, metaMode } <- lift getSettings
@@ -188,7 +188,7 @@ getBase uri = do
liftE
. onE_ (onError actualYaml)
. lEM' @_ @_ @'[JSONError] (\(displayException -> e) -> JSONDecodeError $ unlines [e, "Consider removing " <> actualYaml <> " manually."])
. liftIO
. liftIO
. Y.decodeFileEither
$ actualYaml
where
@@ -233,6 +233,7 @@ getBase uri = do
-> Excepts
'[ DownloadFailed
, DigestError
, ContentLengthError
, GPGError
]
m1
@@ -246,7 +247,7 @@ getBase uri = do
Settings { metaCache } <- lift getSettings
-- for local files, let's short-circuit and ignore access time
if | scheme == "file" -> liftE $ download uri' Nothing Nothing (fromGHCupPath cacheDir) Nothing True
if | scheme == "file" -> liftE $ download uri' Nothing Nothing Nothing (fromGHCupPath cacheDir) Nothing True
| e -> do
accessTime <- fmap utcTimeToPOSIXSeconds $ liftIO $ getAccessTime json_file
let sinceLastAccess = utcTimeToPOSIXSeconds currentTime - accessTime
@@ -262,7 +263,7 @@ getBase uri = do
where
dlWithMod modTime json_file = do
let (dir, fn) = splitFileName json_file
f <- liftE $ download uri' (Just $ over pathL' (<> ".sig") uri') Nothing dir (Just fn) True
f <- liftE $ download uri' (Just $ over pathL' (<> ".sig") uri') Nothing Nothing dir (Just fn) True
liftIO $ setModificationTime f modTime
liftIO $ setAccessTime f modTime
pure f
@@ -328,13 +329,14 @@ download :: ( MonadReader env m
=> URI
-> Maybe URI -- ^ URI for gpg sig
-> Maybe T.Text -- ^ expected hash
-> Maybe Integer -- ^ expected content length
-> FilePath -- ^ destination dir (ignored for file:// scheme)
-> Maybe FilePath -- ^ optional filename
-> Bool -- ^ whether to read an write etags
-> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
download uri gpgUri eDigest dest mfn etags
| scheme == "https" = dl
| scheme == "http" = dl
-> Excepts '[DigestError, ContentLengthError, DownloadFailed, GPGError] m FilePath
download uri gpgUri eDigest eCSize dest mfn etags
| scheme == "https" = liftE dl
| scheme == "http" = liftE dl
| scheme == "file" = do
let destFile' = T.unpack . decUTF8Safe $ view pathL' uri
lift $ logDebug $ "using local file: " <> T.pack destFile'
@@ -355,7 +357,7 @@ download uri gpgUri eDigest dest mfn etags
-- download
flip onException
(lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile baseDestFile))
$ catchAllE @_ @'[GPGError, ProcessError, DownloadFailed, UnsupportedScheme, DigestError] @'[DigestError, DownloadFailed, GPGError]
$ catchAllE @_ @'[GPGError, ProcessError, DownloadFailed, UnsupportedScheme, DigestError, ContentLengthError] @'[DigestError, ContentLengthError, DownloadFailed, GPGError]
(\e' -> do
lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile baseDestFile)
case e' of
@@ -405,19 +407,37 @@ download uri gpgUri eDigest dest mfn etags
CapturedProcess { _stdErr } -> lift $ logDebug $ decUTF8Safe' _stdErr
_ -> pure ()
forM_ eCSize (liftE . flip checkCSize baseDestFile)
forM_ eDigest (liftE . flip checkDigest baseDestFile)
pure baseDestFile
curlDL :: (MonadCatch m, MonadMask m, MonadIO m) => [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
curlDL :: ( MonadCatch m
, MonadMask m
, MonadIO m
)
=> [String]
-> FilePath
-> URI
-> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
curlDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
liftE $ lEM @_ @'[ProcessError] $ exec "curl"
(o' ++ ["-fL", "-o", destFileTemp, T.unpack uri']) Nothing Nothing
(o' ++ ["-fL", "-o", destFileTemp, T.unpack uri']
++ maybe [] (\s -> ["--max-filesize", show s]) eCSize
) Nothing Nothing
liftIO $ renameFile destFileTemp destFile
curlEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
=> [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
curlEtagsDL :: ( MonadReader env m
, HasLog env
, MonadCatch m
, MonadMask m
, MonadIO m
)
=> [String]
-> FilePath
-> URI
-> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
curlEtagsDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
dh <- liftIO $ emptySystemTempFile "curl-header"
@@ -444,7 +464,14 @@ download uri gpgUri eDigest dest mfn etags
lift $ writeEtags destFile (parseEtags headers)
wgetDL :: (MonadCatch m, MonadMask m, MonadIO m) => [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
wgetDL :: ( MonadCatch m
, MonadMask m
, MonadIO m
)
=> [String]
-> FilePath
-> URI
-> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
wgetDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
flip finally (try @_ @SomeException $ rmFile destFileTemp) $ do
@@ -453,7 +480,12 @@ download uri gpgUri eDigest dest mfn etags
liftIO $ renameFile destFileTemp destFile
wgetEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
wgetEtagsDL :: ( MonadReader env m
, HasLog env
, MonadCatch m
, MonadMask m
, MonadIO m
)
=> [String] -> FilePath -> URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m ()
wgetEtagsDL o' destFile (decUTF8Safe . serializeURIRef' -> uri') = do
let destFileTemp = tmpFile destFile
@@ -475,7 +507,10 @@ download uri gpgUri eDigest dest mfn etags
| otherwise -> throwE (NonZeroExit i' "wget" opts)
#if defined(INTERNAL_DOWNLOADER)
internalDL :: (MonadCatch m, MonadMask m, MonadIO m)
internalDL :: ( MonadCatch m
, MonadMask m
, MonadIO m
)
=> FilePath -> URI -> Excepts '[DownloadFailed, UnsupportedScheme] m ()
internalDL destFile uri' = do
let destFileTemp = tmpFile destFile
@@ -485,11 +520,16 @@ download uri gpgUri eDigest dest mfn etags
@'[DownloadFailed]
(\e@(HTTPNotModified _) ->
throwE @_ @'[DownloadFailed] (DownloadFailed (toVariantAt @0 e :: V '[HTTPNotModified])))
$ downloadToFile https host fullPath port destFileTemp mempty
$ downloadToFile https host fullPath port destFileTemp mempty eCSize
liftIO $ renameFile destFileTemp destFile
internalEtagsDL :: (MonadReader env m, HasLog env, MonadCatch m, MonadMask m, MonadIO m)
internalEtagsDL :: ( MonadReader env m
, HasLog env
, MonadCatch m
, MonadMask m
, MonadIO m
)
=> FilePath -> URI -> Excepts '[DownloadFailed, UnsupportedScheme] m ()
internalEtagsDL destFile uri' = do
let destFileTemp = tmpFile destFile
@@ -501,7 +541,7 @@ download uri gpgUri eDigest dest mfn etags
liftE
$ catchE @HTTPNotModified @'[DownloadFailed] @'[] (\(HTTPNotModified etag) -> lift $ writeEtags destFile (pure $ Just etag))
$ do
r <- downloadToFile https host fullPath port destFileTemp addHeaders
r <- downloadToFile https host fullPath port destFileTemp addHeaders eCSize
liftIO $ renameFile destFileTemp destFile
lift $ writeEtags destFile (pure $ decUTF8Safe <$> getHeader r "etag")
#endif
@@ -509,7 +549,7 @@ download uri gpgUri eDigest dest mfn etags
-- Manage to find a file we can write the body into.
getDestFile :: Monad m => URI -> Maybe FilePath -> Excepts '[NoUrlBase] m FilePath
getDestFile uri' mfn' =
getDestFile uri' mfn' =
let path = view pathL' uri'
in case mfn' of
Just fn -> pure (dest </> fn)
@@ -578,14 +618,14 @@ downloadCached :: ( MonadReader env m
)
=> DownloadInfo
-> Maybe FilePath -- ^ optional filename
-> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
-> Excepts '[DigestError, ContentLengthError, DownloadFailed, GPGError] m FilePath
downloadCached dli mfn = do
Settings{ cache } <- lift getSettings
case cache of
True -> downloadCached' dli mfn Nothing
False -> do
tmp <- lift withGHCupTmpDir
liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) (fromGHCupPath tmp) mfn False
liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) (_dlCSize dli) (fromGHCupPath tmp) mfn False
downloadCached' :: ( MonadReader env m
@@ -600,7 +640,7 @@ downloadCached' :: ( MonadReader env m
=> DownloadInfo
-> Maybe FilePath -- ^ optional filename
-> Maybe FilePath -- ^ optional destination dir (default: cacheDir)
-> Excepts '[DigestError , DownloadFailed, GPGError] m FilePath
-> Excepts '[DigestError, ContentLengthError, DownloadFailed, GPGError] m FilePath
downloadCached' dli mfn mDestDir = do
Dirs { cacheDir } <- lift getDirs
let destDir = fromMaybe (fromGHCupPath cacheDir) mDestDir
@@ -609,9 +649,10 @@ downloadCached' dli mfn mDestDir = do
fileExists <- liftIO $ doesFileExist cachfile
if
| fileExists -> do
forM_ (view dlCSize dli) $ \s -> liftE $ checkCSize s cachfile
liftE $ checkDigest (view dlHash dli) cachfile
pure cachfile
| otherwise -> liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) destDir mfn False
| otherwise -> liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) (_dlCSize dli) destDir mfn False
@@ -642,6 +683,25 @@ checkDigest eDigest file = do
cDigest <- throwEither . E.decodeUtf8' . B16.encode . SHA256.hashlazy $ c
when ((cDigest /= eDigest) && verify) $ throwE (DigestError file cDigest eDigest)
checkCSize :: ( MonadReader env m
, HasDirs env
, HasSettings env
, MonadIO m
, MonadThrow m
, HasLog env
)
=> Integer
-> FilePath
-> Excepts '[ContentLengthError] m ()
checkCSize eCSize file = do
Settings{ noVerify } <- lift getSettings
let verify = not noVerify
when verify $ do
let p' = takeFileName file
lift $ logInfo $ "verifying content length of: " <> T.pack p'
cSize <- liftIO $ getFileSize file
when ((eCSize /= cSize) && verify) $ throwE (ContentLengthError (Just file) (Just cSize) eCSize)
-- | Get additional curl args from env. This is an undocumented option.
getCurlOpts :: IO [String]

View File

@@ -17,14 +17,12 @@ import Control.Exception.Safe
import Control.Monad
import Control.Monad.Reader
import Data.ByteString ( ByteString )
import Data.ByteString.Builder
import Data.CaseInsensitive ( CI, original, mk )
import Data.IORef
import Data.Maybe
import Data.Text.Read
import Haskus.Utils.Variant.Excepts
import Network.Http.Client hiding ( URL )
import Optics
import Prelude hiding ( abs
, readFile
, writeFile
@@ -33,7 +31,6 @@ import System.ProgressBar
import URI.ByteString
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as L
import qualified Data.Map.Strict as M
import qualified System.IO.Streams as Streams
@@ -46,27 +43,6 @@ import qualified System.IO.Streams as Streams
----------------------------
-- | Load the result of this download into memory at once.
downloadBS' :: MonadIO m
=> Bool -- ^ https?
-> ByteString -- ^ host (e.g. "www.example.com")
-> ByteString -- ^ path (e.g. "/my/file") including query
-> Maybe Int -- ^ optional port (e.g. 3000)
-> Excepts
'[ HTTPStatusError
, URIParseError
, UnsupportedScheme
, NoLocationHeader
, TooManyRedirs
]
m
L.ByteString
downloadBS' https host path port = do
bref <- liftIO $ newIORef (mempty :: Builder)
let stepper bs = modifyIORef bref (<> byteString bs)
void $ downloadInternal False https host path port stepper (pure ()) mempty
liftIO (readIORef bref <&> toLazyByteString)
downloadToFile :: (MonadMask m, MonadIO m)
=> Bool -- ^ https?
@@ -75,8 +51,9 @@ downloadToFile :: (MonadMask m, MonadIO m)
-> Maybe Int -- ^ optional port (e.g. 3000)
-> FilePath -- ^ destination file to create and write to
-> M.Map (CI ByteString) ByteString -- ^ additional headers
-> Maybe Integer -- ^ expected content length
-> Excepts '[DownloadFailed, HTTPNotModified] m Response
downloadToFile https host fullPath port destFile addHeaders = do
downloadToFile https host fullPath port destFile addHeaders eCSize = do
let stepper = BS.appendFile destFile
setup = BS.writeFile destFile mempty
catchAllE (\case
@@ -84,7 +61,7 @@ downloadToFile https host fullPath port destFile addHeaders = do
| i == 304
, Just e <- M.lookup (mk "etag") headers -> throwE $ HTTPNotModified (decUTF8Safe e)
v -> throwE $ DownloadFailed v
) $ downloadInternal True https host fullPath port stepper setup addHeaders
) $ downloadInternal True https host fullPath port stepper setup addHeaders eCSize
downloadInternal :: MonadIO m
@@ -96,19 +73,21 @@ downloadInternal :: MonadIO m
-> (ByteString -> IO a) -- ^ the consuming step function
-> IO a -- ^ setup action
-> M.Map (CI ByteString) ByteString -- ^ additional headers
-> Maybe Integer
-> Excepts
'[ HTTPStatusError
, URIParseError
, UnsupportedScheme
, NoLocationHeader
, TooManyRedirs
, ContentLengthError
]
m
Response
downloadInternal = go (5 :: Int)
where
go redirs progressBar https host path port consumer setup addHeaders = do
go redirs progressBar https host path port consumer setup addHeaders eCSize = do
r <- liftIO $ withConnection' https host port action
veitherToExcepts r >>= \case
Right r' ->
@@ -138,25 +117,39 @@ downloadInternal = go (5 :: Int)
followRedirectURL bs = case parseURI strictURIParserOptions bs of
Right uri' -> do
(https', host', fullPath', port') <- liftE $ uriToQuadruple uri'
go (redirs - 1) progressBar https' host' fullPath' port' consumer setup addHeaders
go (redirs - 1) progressBar https' host' fullPath' port' consumer setup addHeaders eCSize
Left e -> throwE e
downloadStream r i' = do
void setup
let size = case getHeader r "Content-Length" of
Just x' -> case decimal $ decUTF8Safe x' of
Left _ -> 0
Right (r', _) -> r'
Nothing -> 0
Left _ -> Nothing
Right (r', _) -> Just r'
Nothing -> Nothing
(mpb :: Maybe (ProgressBar ())) <- if progressBar
then Just <$> newProgressBar defStyle 10 (Progress 0 size ())
else pure Nothing
forM_ size $ \s -> forM_ eCSize $ \es -> when (es /= s) $ throwIO (ContentLengthError Nothing (Just s) es)
let size' = eCSize <|> size
(mpb :: Maybe (ProgressBar ())) <- case (progressBar, size') of
(True, Just size'') -> Just <$> newProgressBar defStyle 10 (Progress 0 (fromInteger size'') ())
_ -> pure Nothing
ior <- liftIO $ newIORef 0
outStream <- liftIO $ Streams.makeOutputStream
(\case
Just bs -> do
forM_ mpb $ \pb -> incProgress pb (BS.length bs)
let len = BS.length bs
forM_ mpb $ \pb -> incProgress pb len
-- check we don't exceed size
forM_ size' $ \s -> do
cs <- readIORef ior
when ((cs + toInteger len) > s) $ throwIO (ContentLengthError Nothing (Just (cs + toInteger len)) s)
modifyIORef ior (+ toInteger len)
void $ consumer bs
Nothing -> pure ()
)

View File

@@ -219,6 +219,29 @@ instance Pretty DigestError where
<+> text (T.unpack expectedDigest) <+> text "but got" <+> pPrint currentDigest <+> text
"\nConsider removing the file in case it's cached and try again."
-- | File content length verification failed.
data ContentLengthError = ContentLengthError (Maybe FilePath) (Maybe Integer) Integer
deriving Show
instance Pretty ContentLengthError where
pPrint (ContentLengthError Nothing Nothing expectedSize) =
text "Content length exceeded expected size:"
<+> text (show expectedSize)
<+> text "\nConsider removing the file in case it's cached and try again."
pPrint (ContentLengthError Nothing (Just currentSize) expectedSize) =
text "Content length error. Expected"
<+> text (show expectedSize) <+> text "but got" <+> pPrint currentSize <+> text
"\nConsider removing the file in case it's cached and try again."
pPrint (ContentLengthError (Just fp) (Just currentSize) expectedSize) =
text "Content length error for" <+> text (fp <> ": expected")
<+> text (show expectedSize) <+> text "but got" <+> pPrint currentSize <+> text
"\nConsider removing the file in case it's cached and try again."
pPrint (ContentLengthError (Just fp) Nothing expectedSize) =
text "Content length error for" <+> text (fp <> ": expected")
<+> text (show expectedSize) <+> text "\nConsider removing the file in case it's cached and try again."
instance Exception ContentLengthError
-- | File digest verification failed.
data GPGError = forall xs . (ToVariantMaybe DownloadFailed xs, PopVariant DownloadFailed xs, Show (V xs), Pretty (V xs)) => GPGError (V xs)

View File

@@ -109,6 +109,7 @@ fetchGHCSrc :: ( MonadFail m
-> Maybe FilePath
-> Excepts
'[ DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -152,6 +153,7 @@ installGHCBindist :: ( MonadFail m
'[ AlreadyInstalled
, BuildFailed
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -357,6 +359,7 @@ installGHCBin :: ( MonadFail m
'[ AlreadyInstalled
, BuildFailed
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -628,6 +631,7 @@ compileGHC :: ( MonadMask m
'[ AlreadyInstalled
, BuildFailed
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, GHCupSetError
@@ -684,7 +688,7 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
-- download source tarball
tmpDownload <- lift withGHCupTmpDir
tmpUnpack <- lift mkGhcupTmpDir
tar <- liftE $ download uri Nothing Nothing (fromGHCupPath tmpDownload) Nothing False
tar <- liftE $ download uri Nothing Nothing Nothing (fromGHCupPath tmpDownload) Nothing False
(bf, tver) <- liftE $ cleanUpOnError @'[UnknownArchive, ArchiveResult, ProcessError] tmpUnpack $ do
liftE $ unpackToDir (fromGHCupPath tmpUnpack) tar
let regex = [s|^(.*/)*boot$|] :: B.ByteString
@@ -706,7 +710,7 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
GitDist GitBranch{..} -> do
tmpUnpack <- lift mkGhcupTmpDir
let git args = execLogged "git" ("--no-pager":args) (Just $ fromGHCupPath tmpUnpack) "git" Nothing
tver <- reThrowAll @_ @'[PatchFailed, ProcessError, NotFoundInPATH, DigestError, DownloadFailed, GPGError] DownloadFailed $ do
tver <- reThrowAll @_ @'[PatchFailed, ProcessError, NotFoundInPATH, DigestError, ContentLengthError, DownloadFailed, GPGError] DownloadFailed $ do
let rep = fromMaybe "https://gitlab.haskell.org/ghc/ghc.git" repo
lift $ logInfo $ "Fetching git repo " <> T.pack rep <> " at ref " <> T.pack ref <> " (this may take a while)"
lEM $ git [ "init" ]
@@ -716,7 +720,7 @@ compileGHC targetGhc ov bstrap jobs mbuildConfig patches aargs buildFlavour hadr
, fromString rep ]
-- figure out if we can do a shallow clone
remoteBranches <- catchE @ProcessError @'[PatchFailed, ProcessError, NotFoundInPATH, DigestError, DownloadFailed, GPGError] @'[PatchFailed, NotFoundInPATH, DigestError, DownloadFailed, GPGError] (\(_ :: ProcessError) -> pure [])
remoteBranches <- catchE @ProcessError @'[PatchFailed, ProcessError, NotFoundInPATH, DigestError, ContentLengthError, DownloadFailed, GPGError] @'[PatchFailed, NotFoundInPATH, DigestError, DownloadFailed, GPGError] (\(_ :: ProcessError) -> pure [])
$ fmap processBranches $ gitOut ["ls-remote", "--heads", "origin"] (fromGHCupPath tmpUnpack)
let shallow_clone
| isCommitHash ref = True

View File

@@ -105,6 +105,7 @@ installHLSBindist :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -297,6 +298,7 @@ installHLSBin :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -344,6 +346,7 @@ compileHLS :: ( MonadMask m
, GPGError
, DownloadFailed
, DigestError
, ContentLengthError
, UnknownArchive
, TarDirDoesNotExist
, ArchiveResult
@@ -401,7 +404,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal upda
-- download source tarball
tmpDownload <- lift withGHCupTmpDir
tmpUnpack <- lift mkGhcupTmpDir
tar <- liftE $ download uri Nothing Nothing (fromGHCupPath tmpDownload) Nothing False
tar <- liftE $ download uri Nothing Nothing Nothing (fromGHCupPath tmpDownload) Nothing False
(cf, tver) <- liftE $ cleanUpOnError tmpUnpack $ do
unpackToDir (fromGHCupPath tmpUnpack) tar
let regex = [s|^(.*/)*haskell-language-server\.cabal$|] :: B.ByteString
@@ -481,7 +484,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal upda
liftE $ runBuildAction
tmpUnpack
(reThrowAll @_ @'[GPGError, DownloadFailed, DigestError, PatchFailed, ProcessError, FileAlreadyExistsError, CopyError] @'[BuildFailed] (BuildFailed $ fromGHCupPath workdir) $ do
(reThrowAll @_ @'[GPGError, DownloadFailed, DigestError, ContentLengthError, PatchFailed, ProcessError, FileAlreadyExistsError, CopyError] @'[BuildFailed] (BuildFailed $ fromGHCupPath workdir) $ do
let tmpInstallDir = fromGHCupPath workdir </> "out"
liftIO $ createDirRecursive' tmpInstallDir
@@ -497,7 +500,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal upda
| otherwise -> pure (takeFileName cp)
Just (Right uri) -> do
tmpUnpack' <- lift withGHCupTmpDir
cp <- liftE $ download uri Nothing Nothing (fromGHCupPath tmpUnpack') (Just "cabal.project") False
cp <- liftE $ download uri Nothing Nothing Nothing (fromGHCupPath tmpUnpack') (Just "cabal.project") False
copyFileE cp (fromGHCupPath workdir </> "cabal.project") False
pure "cabal.project"
Nothing
@@ -511,7 +514,7 @@ compileHLS targetHLS ghcs jobs ov installDir cabalProject cabalProjectLocal upda
| otherwise -> pure "cabal.project"
forM_ cabalProjectLocal $ \uri -> do
tmpUnpack' <- lift withGHCupTmpDir
cpl <- liftE $ download uri Nothing Nothing (fromGHCupPath tmpUnpack') (Just (cp <.> "local")) False
cpl <- liftE $ download uri Nothing Nothing Nothing (fromGHCupPath tmpUnpack') (Just (cp <.> "local")) False
copyFileE cpl (fromGHCupPath workdir </> cp <.> "local") False
artifacts <- forM (sort ghcs) $ \ghc -> do
let ghcInstallDir = tmpInstallDir </> T.unpack (prettyVer ghc)

View File

@@ -48,6 +48,7 @@ import Streamly.Internal.Data.Unfold.Type
import qualified Streamly.Internal.Data.Unfold as U
import Streamly.Internal.Control.Concurrent ( withRunInIO )
import Streamly.Internal.Data.IOFinalizer ( newIOFinalizer, runIOFinalizer )
import GHC.IO.Exception (IOException(ioe_type), IOErrorType (..))
-- | On unix, we can use symlinks, so we just get the
@@ -116,8 +117,18 @@ copyFile from to fail' = do
let dflags = [ FD.oNofollow
, if fail' then FD.oExcl else FD.oTrunc
]
let openFdHandle' = openFdHandle to SPI.WriteOnly dflags $ Just sourceFileMode
bracket
(openFdHandle to SPI.WriteOnly dflags $ Just sourceFileMode)
(handleIO (\e -> if
-- if we copy from regular file to symlink, we need
-- to delete the symlink
| ioe_type e == InvalidArgument
, not fail' -> do
removeLink to
openFdHandle'
| otherwise -> throwIO e
)
openFdHandle')
(hClose . snd)
$ \(_, tH) -> do
hSetBinaryMode fH True

View File

@@ -82,6 +82,7 @@ installStackBin :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload
@@ -120,6 +121,7 @@ installStackBindist :: ( MonadMask m
'[ AlreadyInstalled
, CopyError
, DigestError
, ContentLengthError
, GPGError
, DownloadFailed
, NoDownload

View File

@@ -262,6 +262,7 @@ data DownloadInfo = DownloadInfo
{ _dlUri :: URI
, _dlSubdir :: Maybe TarDir
, _dlHash :: Text
, _dlCSize :: Maybe Integer
}
deriving (Eq, Ord, GHC.Generic, Show)

View File

@@ -1035,13 +1035,13 @@ applyAnyPatch :: ( MonadReader env m
, MonadIO m)
=> Maybe (Either FilePath [URI])
-> FilePath
-> Excepts '[PatchFailed, DownloadFailed, DigestError, GPGError] m ()
-> Excepts '[PatchFailed, DownloadFailed, DigestError, ContentLengthError, GPGError] m ()
applyAnyPatch Nothing _ = pure ()
applyAnyPatch (Just (Left pdir)) workdir = liftE $ applyPatches pdir workdir
applyAnyPatch (Just (Right uris)) workdir = do
tmpUnpack <- fromGHCupPath <$> lift withGHCupTmpDir
forM_ uris $ \uri -> do
patch <- liftE $ download uri Nothing Nothing tmpUnpack Nothing False
patch <- liftE $ download uri Nothing Nothing Nothing tmpUnpack Nothing False
liftE $ applyPatch patch workdir
@@ -1172,7 +1172,7 @@ ensureGlobalTools :: ( MonadMask m
, MonadUnliftIO m
, MonadFail m
)
=> Excepts '[GPGError, DigestError , DownloadFailed, NoDownload] m ()
=> Excepts '[GPGError, DigestError, ContentLengthError, DownloadFailed, NoDownload] m ()
ensureGlobalTools
| isWindows = do
(GHCupInfo _ _ gTools) <- lift getGHCupInfo
@@ -1184,8 +1184,8 @@ ensureGlobalTools
lift $ logWarn "Digest doesn't match, redownloading gs.exe..."
lift $ logDebug ("rm -f " <> T.pack (fromGHCupPath (cacheDir dirs) </> "gs.exe"))
lift $ hideError doesNotExistErrorType $ recycleFile (fromGHCupPath (cacheDir dirs) </> "gs.exe")
liftE @'[GPGError, DigestError , DownloadFailed] $ dl
) `catchE` liftE @'[GPGError, DigestError , DownloadFailed] dl
liftE @'[GPGError, DigestError, ContentLengthError, DownloadFailed] $ dl
) `catchE` liftE @'[GPGError, DigestError, ContentLengthError, DownloadFailed] dl
| otherwise = pure ()

View File

@@ -465,15 +465,22 @@ withGHCupTmpDir :: ( MonadReader env m
, MonadMask m
, MonadIO m)
=> m GHCupPath
withGHCupTmpDir = snd <$> withRunInIO (\run ->
run
$ allocate
(run mkGhcupTmpDir)
(\fp ->
handleIO (\e -> run
$ logDebug ("Resource cleanup failed for " <> T.pack (fromGHCupPath fp) <> ", error was: " <> T.pack (displayException e)))
. removePathForcibly
$ fp))
withGHCupTmpDir = do
Settings{keepDirs} <- getSettings
snd <$> withRunInIO (\run ->
run
$ allocate
(run mkGhcupTmpDir)
(\fp -> if -- we don't know whether there was a failure, so can only
-- decide for 'Always'
| keepDirs == Always -> pure ()
| otherwise -> handleIO (\e -> run
$ logDebug ("Resource cleanup failed for "
<> T.pack (fromGHCupPath fp)
<> ", error was: "
<> T.pack (displayException e)))
. removePathForcibly
$ fp))

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff