Compare commits
1 Commits
1304e22235
...
issues/874
| Author | SHA1 | Date | |
|---|---|---|---|
| 03253c27e4 |
@@ -824,7 +824,7 @@ checkForUpdates = do
|
||||
|
||||
logGHCPostRm :: (MonadReader env m, HasLog env, MonadIO m) => GHCTargetVersion -> m ()
|
||||
logGHCPostRm ghcVer = do
|
||||
cabalStore <- liftIO $ handleIO (\_ -> if isWindows then pure "C:\\cabal\\store" else pure "~/.cabal/store or ~/.local/state/cabal/store")
|
||||
cabalStore <- liftIO $ handleIO (\_ -> if isWindows then pure "C:\\cabal\\store" else pure "~/.cabal/store")
|
||||
(runIdentity . CC.cfgStoreDir <$> CC.readConfig)
|
||||
let storeGhcDir = cabalStore </> ("ghc-" <> T.unpack (prettyVer $ _tvVersion ghcVer))
|
||||
logInfo $ T.pack $ "After removing GHC you might also want to clean up your cabal store at: " <> storeGhcDir
|
||||
@@ -43,12 +43,6 @@ All of the following are valid arguments to `ghcup install ghc`:
|
||||
|
||||
If the argument is omitted, the default is `recommended`.
|
||||
|
||||
Other tags include:
|
||||
|
||||
- `prerelease`: a prerelease version
|
||||
- `latest-prerelease`: the latest prerelease version
|
||||
|
||||
|
||||
## Manpages
|
||||
|
||||
For man pages to work you need [man-db](http://man-db.nongnu.org/) as your `man` provider, then issue `man ghc`. Manpages only work for the currently set ghc.
|
||||
@@ -209,6 +203,34 @@ url-source:
|
||||
- "https://raw.githubusercontent.com/haskell/ghcup-metadata/master/ghcup-prereleases-0.0.7.yaml"
|
||||
```
|
||||
|
||||
### Nightlies
|
||||
|
||||
Nightlies are just another release channel. Currently, only GHC supports nightlies, which are binary releases
|
||||
that are built every night from `master`.
|
||||
|
||||
To add the nightly channel, run:
|
||||
|
||||
```sh
|
||||
ghcup config add-release-channel https://ghc.gitlab.haskell.org/ghcup-metadata/ghcup-nightlies-0.0.7.yaml
|
||||
```
|
||||
|
||||
To list all nightlies from 2023, run:
|
||||
|
||||
```sh
|
||||
ghcup list --show-nightly --tool=ghc --since=2023-01-01
|
||||
```
|
||||
|
||||
Ways to install a nightly:
|
||||
|
||||
```sh
|
||||
# by date
|
||||
ghcup install ghc 2023-06-20
|
||||
# by version
|
||||
ghcup install ghc 9.7.20230619
|
||||
# by tag
|
||||
ghcup install ghc latest-nightly
|
||||
```
|
||||
|
||||
## Stack integration
|
||||
|
||||
Stack manages GHC versions internally by default. In order to make it use ghcup installed
|
||||
@@ -456,48 +478,8 @@ variables and, in the case of Windows, parameters to tweak the script behavior.
|
||||
|
||||
### github workflows
|
||||
|
||||
On github workflows GHCup itself is pre-installed on all platforms, but may use non-standard install locations.
|
||||
Here's an example workflow with a GHC matrix:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-22.04, macOS-latest]
|
||||
ghc: ['9.6', '9.4', '9.2', '9.0', '8.10', '8.8', '8.6']
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup toolchain
|
||||
run: |
|
||||
ghcup install cabal --set recommended
|
||||
ghcup install ghc --set ${{ matrix.ghc }}
|
||||
- name: Build
|
||||
run: |
|
||||
cabal update
|
||||
cabal test all --test-show-details=direct
|
||||
|
||||
i386:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: i386/ubuntu:bionic
|
||||
steps:
|
||||
- name: Install GHCup in container
|
||||
run: |
|
||||
apt-get update -y
|
||||
apt-get install -y autoconf build-essential zlib1g-dev libgmp-dev curl
|
||||
# we just go with recommended versions of cabal and GHC
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://get-ghcup.haskell.org | BOOTSTRAP_HASKELL_NONINTERACTIVE=1 BOOTSTRAP_HASKELL_INSTALL_NO_STACK=1 sh
|
||||
- uses: actions/checkout@v1
|
||||
- name: Test
|
||||
run: |
|
||||
# in containers we need to fix PATH
|
||||
source ~/.ghcup/env
|
||||
cabal update
|
||||
cabal test all --test-show-details=direct
|
||||
```
|
||||
On github workflows you can use [https://github.com/haskell/actions/](https://github.com/haskell/actions/).
|
||||
GHCup itself is also pre-installed on all platforms, but may use non-standard install locations.
|
||||
|
||||
## GPG verification
|
||||
|
||||
@@ -507,10 +489,9 @@ this is cryptographically secure.
|
||||
First, obtain the gpg keys:
|
||||
|
||||
```sh
|
||||
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys 7D1E8AFD1D4A16D71FADA2F2CCC85C0E40C06A8C
|
||||
gpg --batch --keyserver keys.openpgp.org --recv-keys 7D1E8AFD1D4A16D71FADA2F2CCC85C0E40C06A8C
|
||||
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys FE5AB6C91FEA597C3B31180B73EDE9E8CFBAEF01
|
||||
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys 88B57FCF7DB53B4DB3BFA4B1588764FBE22D19C4
|
||||
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys EAF2A9A722C0C96F2B431CA511AAD8CEDEE0CAEF
|
||||
```
|
||||
|
||||
Then verify the gpg key in one of these ways:
|
||||
|
||||
@@ -38,78 +38,49 @@ Also see [tags and shortcuts](../guide/#tags-and-shortcuts) for more information
|
||||
|
||||
### Linux Debian
|
||||
|
||||
#### Generic
|
||||
|
||||
The following distro packages are required: `build-essential curl libffi-dev libffi6 libgmp-dev libgmp10 libncurses-dev libncurses5 libtinfo5`
|
||||
|
||||
#### Version >= 11
|
||||
|
||||
The following distro packages are required: `build-essential curl libffi-dev libffi7 libgmp-dev libgmp10 libncurses-dev libncurses5 libtinfo5`
|
||||
|
||||
### Linux Ubuntu
|
||||
|
||||
#### Generic
|
||||
|
||||
The following distro packages are required: `build-essential curl libffi-dev libffi6 libgmp-dev libgmp10 libncurses-dev libncurses5 libtinfo5`
|
||||
|
||||
#### Version >= 20.04 && < 20.10
|
||||
|
||||
The following distro packages are required: `build-essential curl libffi-dev libffi7 libgmp-dev libgmp10 libncurses-dev libncurses5 libtinfo5`
|
||||
|
||||
#### Version >= 20.10
|
||||
|
||||
The following distro packages are required: `build-essential curl libffi-dev libffi8ubuntu1 libgmp-dev libgmp10 libncurses-dev libncurses5 libtinfo5`
|
||||
|
||||
|
||||
### Linux Fedora
|
||||
|
||||
#### Generic
|
||||
|
||||
The following distro packages are required: `gcc gcc-c++ gmp gmp-devel make ncurses ncurses-compat-libs xz perl`
|
||||
|
||||
### Linux Mageia
|
||||
|
||||
The following distro packages are required: `curl gcc gcc-c++ gmp libffi-devel libffi7 libgmp-devel libgmp10 make libncurses-devel libncurses5 xz perl`
|
||||
|
||||
### Linux CentOS
|
||||
|
||||
#### Generic
|
||||
|
||||
The following distro packages are required: `gcc gcc-c++ gmp gmp-devel make ncurses ncurses-compat-libs xz perl`
|
||||
|
||||
#### Version >= 7 && < 8
|
||||
|
||||
The following distro packages are required: `gcc gcc-c++ gmp gmp-devel make ncurses xz perl`
|
||||
|
||||
|
||||
### Linux Alpine
|
||||
|
||||
#### Generic
|
||||
|
||||
The following distro packages are required: `binutils-gold curl gcc g++ gmp-dev libc-dev libffi-dev make musl-dev ncurses-dev perl tar xz`
|
||||
|
||||
### Linux VoidLinux
|
||||
|
||||
The following distro packages are required: `gcc gmp curl coreutils xz ncurses make ncurses-libtinfo-libs perl tar`
|
||||
|
||||
### Linux (generic)
|
||||
|
||||
#### Generic
|
||||
|
||||
You need the following packages: curl g++ gcc gmp make ncurses realpath xz-utils. Consult your distro documentation on the exact names of those packages.
|
||||
|
||||
### Darwin
|
||||
|
||||
#### Generic
|
||||
|
||||
On OS X, in the course of running ghcup you will be given a dialog box to install the command line tools. Accept and the requirements will be installed for you. You will then need to run the command again.
|
||||
On Darwin M1 you might also need a working llvm installed (e.g. via brew) and have the toolchain exposed in PATH.
|
||||
|
||||
### FreeBSD
|
||||
|
||||
#### Generic
|
||||
|
||||
The following distro packages are required: `curl gcc gmp gmake ncurses perl5 libffi libiconv`
|
||||
|
||||
Notice that only FreeBSD 13.x is supported. If the installation fails, complaining about `libncursesw.8.so`, you will need to install FreeBSD 12 compat package first, for example, `pkg install misc/compat12x`.
|
||||
|
||||
### Windows
|
||||
|
||||
#### Generic
|
||||
|
||||
On Windows, msys2 should already have been set up during the installation, so most users should just proceed. If you are installing manually, make sure to have a working mingw64 toolchain and shell.
|
||||
|
||||
## Next steps
|
||||
@@ -296,14 +267,7 @@ Lower availability of bindists. Stack and HLS binaries are experimental.
|
||||
Download the binary for your platform at [https://downloads.haskell.org/~ghcup/](https://downloads.haskell.org/~ghcup/)
|
||||
and place it into your `PATH` anywhere.
|
||||
|
||||
If you want to GPG verify the binaries, import the following keys first:
|
||||
|
||||
```sh
|
||||
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys 7D1E8AFD1D4A16D71FADA2F2CCC85C0E40C06A8C
|
||||
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys FE5AB6C91FEA597C3B31180B73EDE9E8CFBAEF01
|
||||
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys 88B57FCF7DB53B4DB3BFA4B1588764FBE22D19C4
|
||||
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys EAF2A9A722C0C96F2B431CA511AAD8CEDEE0CAEF
|
||||
```
|
||||
If you want to GPG verify the binaries, import the following keys first: `7D1E8AFD1D4A16D71FADA2F2CCC85C0E40C06A8C` and `FE5AB6C91FEA597C3B31180B73EDE9E8CFBAEF01`.
|
||||
|
||||
Then adjust your `PATH` in `~/.bashrc` (or similar, depending on your shell) like so:
|
||||
|
||||
|
||||
40
ghcup.cabal
40
ghcup.cabal
@@ -25,10 +25,10 @@ extra-source-files:
|
||||
cbits/dirutils.h
|
||||
data/build_mk/cross
|
||||
data/build_mk/default
|
||||
test/ghcup-test/data/dir/.keep
|
||||
test/ghcup-test/data/file
|
||||
test/ghcup-test/golden/unix/GHCupInfo.json
|
||||
test/ghcup-test/golden/windows/GHCupInfo.json
|
||||
test/data/dir/.keep
|
||||
test/data/file
|
||||
test/golden/unix/GHCupInfo.json
|
||||
test/golden/windows/GHCupInfo.json
|
||||
|
||||
source-repository head
|
||||
type: git
|
||||
@@ -236,7 +236,7 @@ library
|
||||
|
||||
if (flag(tui) && !os(windows))
|
||||
cpp-options: -DBRICK
|
||||
build-depends: vty ^>=5.39
|
||||
build-depends: vty ^>=5.37
|
||||
|
||||
library ghcup-optparse
|
||||
import: app-common-depends
|
||||
@@ -261,7 +261,7 @@ library ghcup-optparse
|
||||
GHCup.OptParse.Upgrade
|
||||
GHCup.OptParse.Whereis
|
||||
|
||||
hs-source-dirs: lib-opt
|
||||
hs-source-dirs: app/ghcup
|
||||
default-language: Haskell2010
|
||||
default-extensions:
|
||||
LambdaCase
|
||||
@@ -284,6 +284,12 @@ library ghcup-optparse
|
||||
|
||||
if (flag(tui) && !os(windows))
|
||||
cpp-options: -DBRICK
|
||||
other-modules: BrickMain
|
||||
build-depends:
|
||||
, brick ^>=1.5
|
||||
, transformers ^>=0.5
|
||||
, unix ^>=2.7
|
||||
, vty ^>=5.37
|
||||
|
||||
if os(windows)
|
||||
cpp-options: -DIS_WINDOWS
|
||||
@@ -294,6 +300,26 @@ library ghcup-optparse
|
||||
executable ghcup
|
||||
import: app-common-depends
|
||||
main-is: Main.hs
|
||||
other-modules:
|
||||
GHCup.OptParse
|
||||
GHCup.OptParse.ChangeLog
|
||||
GHCup.OptParse.Common
|
||||
GHCup.OptParse.Compile
|
||||
GHCup.OptParse.Config
|
||||
GHCup.OptParse.DInfo
|
||||
GHCup.OptParse.GC
|
||||
GHCup.OptParse.Install
|
||||
GHCup.OptParse.List
|
||||
GHCup.OptParse.Nuke
|
||||
GHCup.OptParse.Prefetch
|
||||
GHCup.OptParse.Rm
|
||||
GHCup.OptParse.Run
|
||||
GHCup.OptParse.Set
|
||||
GHCup.OptParse.Test
|
||||
GHCup.OptParse.ToolRequirements
|
||||
GHCup.OptParse.UnSet
|
||||
GHCup.OptParse.Upgrade
|
||||
GHCup.OptParse.Whereis
|
||||
|
||||
hs-source-dirs: app/ghcup
|
||||
default-language: Haskell2010
|
||||
@@ -325,7 +351,7 @@ executable ghcup
|
||||
, brick ^>=1.5
|
||||
, transformers ^>=0.5
|
||||
, unix ^>=2.7
|
||||
, vty ^>=5.39
|
||||
, vty ^>=5.37
|
||||
|
||||
if os(windows)
|
||||
cpp-options: -DIS_WINDOWS
|
||||
|
||||
@@ -246,7 +246,7 @@ getBase uri = do
|
||||
Settings { metaCache } <- lift getSettings
|
||||
|
||||
-- for local files, let's short-circuit and ignore access time
|
||||
if | scheme == "file" -> liftE $ download uri' Nothing Nothing Nothing (fromGHCupPath cacheDir) Nothing True
|
||||
if | scheme == "file" -> liftE $ download uri' (Just $ over pathL' (<> ".sig") uri') Nothing Nothing (fromGHCupPath cacheDir) Nothing True
|
||||
| e -> do
|
||||
accessTime <- fmap utcTimeToPOSIXSeconds $ liftIO $ getAccessTime json_file
|
||||
let sinceLastAccess = utcTimeToPOSIXSeconds currentTime - accessTime
|
||||
@@ -352,15 +352,20 @@ download :: ( MonadReader env m
|
||||
download rawUri gpgUri eDigest eCSize dest mfn etags
|
||||
| scheme == "https" = liftE dl
|
||||
| scheme == "http" = liftE dl
|
||||
| scheme == "file"
|
||||
, Just s <- gpgScheme
|
||||
, s /= "file" = throwIO $ userError $ "gpg scheme does not match base file scheme: " <> (T.unpack . decUTF8Safe $ s)
|
||||
| scheme == "file" = do
|
||||
Settings{ gpgSetting } <- lift getSettings
|
||||
let destFile' = T.unpack . decUTF8Safe $ view pathL' rawUri
|
||||
lift $ logDebug $ "using local file: " <> T.pack destFile'
|
||||
forM_ eDigest (liftE . flip checkDigest destFile')
|
||||
liftE $ verify gpgSetting destFile' (pure . T.unpack . decUTF8Safe . view pathL')
|
||||
pure destFile'
|
||||
| otherwise = throwE $ DownloadFailed (variantFromValue UnsupportedScheme)
|
||||
|
||||
where
|
||||
scheme = view (uriSchemeL' % schemeBSL') rawUri
|
||||
scheme = view (uriSchemeL' % schemeBSL') rawUri
|
||||
gpgScheme = view (uriSchemeL' % schemeBSL') <$> gpgUri
|
||||
dl = do
|
||||
Settings{ mirrors } <- lift getSettings
|
||||
let uri = applyMirrors mirrors rawUri
|
||||
@@ -402,30 +407,14 @@ download rawUri gpgUri eDigest eCSize dest mfn etags
|
||||
else pure (\fp -> liftE . internalDL fp)
|
||||
#endif
|
||||
liftE $ downloadAction baseDestFile uri
|
||||
case (gpgUri, gpgSetting) of
|
||||
(_, GPGNone) -> pure ()
|
||||
(Just gpgUri', _) -> do
|
||||
gpgDestFile <- liftE . reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed $ getDestFile gpgUri' Nothing
|
||||
liftE $ flip onException
|
||||
(lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile gpgDestFile))
|
||||
$ catchAllE @_ @'[GPGError, ProcessError, UnsupportedScheme, DownloadFailed] @'[GPGError]
|
||||
(\e -> if gpgSetting == GPGStrict then throwE (GPGError e) else lift $ logWarn $ T.pack (prettyHFError (GPGError e))
|
||||
) $ do
|
||||
o' <- liftIO getGpgOpts
|
||||
lift $ logDebug $ "downloading: " <> (decUTF8Safe . serializeURIRef') gpgUri' <> " as file " <> T.pack gpgDestFile
|
||||
liftE $ downloadAction gpgDestFile gpgUri'
|
||||
lift $ logInfo $ "verifying signature of: " <> T.pack baseDestFile
|
||||
let args = o' ++ ["--batch", "--verify", "--quiet", "--no-tty", gpgDestFile, baseDestFile]
|
||||
cp <- lift $ executeOut "gpg" args Nothing
|
||||
case cp of
|
||||
CapturedProcess { _exitCode = ExitFailure i, _stdErr } -> do
|
||||
lift $ logDebug $ decUTF8Safe' _stdErr
|
||||
throwE (GPGError @'[ProcessError] (V (NonZeroExit i "gpg" args)))
|
||||
CapturedProcess { _stdErr } -> lift $ logDebug $ decUTF8Safe' _stdErr
|
||||
_ -> pure ()
|
||||
|
||||
forM_ eCSize (liftE . flip checkCSize baseDestFile)
|
||||
forM_ eDigest (liftE . flip checkDigest baseDestFile)
|
||||
liftE $ verify gpgSetting baseDestFile
|
||||
(\uri' -> do
|
||||
gpgDestFile <- liftE . reThrowAll @_ @_ @'[DownloadFailed] DownloadFailed $ getDestFile uri' Nothing
|
||||
lift $ logDebug $ "downloading: " <> (decUTF8Safe . serializeURIRef') uri' <> " as file " <> T.pack gpgDestFile
|
||||
flip onException (lift $ hideError doesNotExistErrorType $ recycleFile (tmpFile gpgDestFile)) $
|
||||
downloadAction gpgDestFile uri'
|
||||
pure gpgDestFile
|
||||
)
|
||||
pure baseDestFile
|
||||
|
||||
curlDL :: ( MonadCatch m
|
||||
@@ -623,6 +612,41 @@ download rawUri gpgUri eDigest eCSize dest mfn etags
|
||||
liftIO $ hideError doesNotExistErrorType $ rmFile (etagsFile fp)
|
||||
pure Nothing
|
||||
|
||||
verify :: ( MonadReader env m
|
||||
, HasLog env
|
||||
, HasDirs env
|
||||
, HasSettings env
|
||||
, MonadCatch m
|
||||
, MonadMask m
|
||||
, MonadIO m
|
||||
)
|
||||
=> GPGSetting
|
||||
-> FilePath
|
||||
-> (URI -> Excepts '[ProcessError, DownloadFailed, UnsupportedScheme] m FilePath)
|
||||
-> Excepts '[DigestError, ContentLengthError, DownloadFailed, GPGError] m ()
|
||||
verify gpgSetting destFile' downloadAction' = do
|
||||
case (gpgUri, gpgSetting) of
|
||||
(_, GPGNone) -> pure ()
|
||||
(Just gpgUri', _) -> do
|
||||
liftE $ catchAllE @_ @'[GPGError, ProcessError, UnsupportedScheme, DownloadFailed] @'[GPGError]
|
||||
(\e -> if gpgSetting == GPGStrict then throwE (GPGError e) else lift $ logWarn $ T.pack (prettyHFError (GPGError e))
|
||||
) $ do
|
||||
o' <- liftIO getGpgOpts
|
||||
gpgDestFile <- liftE $ downloadAction' gpgUri'
|
||||
lift $ logInfo $ "verifying signature of: " <> T.pack destFile'
|
||||
let args = o' ++ ["--batch", "--verify", "--quiet", "--no-tty", gpgDestFile, destFile']
|
||||
cp <- lift $ executeOut "gpg" args Nothing
|
||||
case cp of
|
||||
CapturedProcess { _exitCode = ExitFailure i, _stdErr } -> do
|
||||
lift $ logDebug $ decUTF8Safe' _stdErr
|
||||
throwE (GPGError @'[ProcessError] (V (NonZeroExit i "gpg" args)))
|
||||
CapturedProcess { _stdErr } -> lift $ logDebug $ decUTF8Safe' _stdErr
|
||||
_ -> pure ()
|
||||
|
||||
forM_ eCSize (liftE . flip checkCSize destFile')
|
||||
forM_ eDigest (liftE . flip checkDigest destFile')
|
||||
|
||||
|
||||
|
||||
-- | Download into tmpdir or use cached version, if it exists. If filename
|
||||
-- is omitted, infers the filename from the url.
|
||||
@@ -642,7 +666,7 @@ downloadCached :: ( MonadReader env m
|
||||
downloadCached dli mfn = do
|
||||
Settings{ cache } <- lift getSettings
|
||||
case cache of
|
||||
True -> downloadCached' dli mfn Nothing
|
||||
True -> liftE $ downloadCached' dli mfn Nothing
|
||||
False -> do
|
||||
tmp <- lift withGHCupTmpDir
|
||||
liftE $ download (_dlUri dli) Nothing (Just (_dlHash dli)) (_dlCSize dli) (fromGHCupPath tmp) outputFileName False
|
||||
|
||||
@@ -157,17 +157,13 @@ instance NFData VersionInfo
|
||||
|
||||
|
||||
-- | A tag. These are currently attached to a version of a tool.
|
||||
data Tag = Latest -- ^ the latest version of a tool (unique per tool)
|
||||
| Recommended -- ^ the recommended version of a tool (unique per tool)
|
||||
| Prerelease -- ^ denotes a prerelease version
|
||||
-- (a version should either be 'Prerelease' or
|
||||
-- 'LatestPrerelease', but not both)
|
||||
| LatestPrerelease -- ^ the latest prerelease (unique per tool)
|
||||
| Nightly -- ^ denotes a nightly version
|
||||
-- (a version should either be 'Nightly' or
|
||||
-- 'LatestNightly', but not both)
|
||||
| LatestNightly -- ^ the latest nightly (unique per tool)
|
||||
| Base PVP -- ^ the base version shipped with GHC
|
||||
data Tag = Latest
|
||||
| Recommended
|
||||
| Prerelease
|
||||
| LatestPrerelease
|
||||
| Nightly
|
||||
| LatestNightly
|
||||
| Base PVP
|
||||
| Old -- ^ old versions are hidden by default in TUI
|
||||
| UnknownTag String -- ^ used for upwardscompat
|
||||
deriving (Ord, Eq, GHC.Generic, Show) -- FIXME: manual JSON instance
|
||||
@@ -249,8 +245,6 @@ data LinuxDistro = Debian
|
||||
| RedHat
|
||||
| Alpine
|
||||
| AmazonLinux
|
||||
| RockyLinux
|
||||
| VoidLinux
|
||||
-- rolling
|
||||
| Gentoo
|
||||
| Exherbo
|
||||
@@ -270,8 +264,6 @@ distroToString CentOS = "centos"
|
||||
distroToString RedHat = "redhat"
|
||||
distroToString Alpine = "alpine"
|
||||
distroToString AmazonLinux = "amazon"
|
||||
distroToString RockyLinux = "rocky"
|
||||
distroToString VoidLinux = "void"
|
||||
distroToString Gentoo = "gentoo"
|
||||
distroToString Exherbo = "exherbo"
|
||||
distroToString UnknownLinux = "unknown"
|
||||
|
||||
@@ -1033,7 +1033,7 @@ applyPatches pdir ddir = do
|
||||
|
||||
patches <- liftIO $ quilt `catchIO` (\e ->
|
||||
if isDoesNotExistError e || isPermissionError e then
|
||||
lexicographical
|
||||
lexicographical
|
||||
else throwIO e)
|
||||
forM_ patches $ \patch' -> applyPatch patch' ddir
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
resolver: lts-20.26
|
||||
resolver: lts-20.20
|
||||
|
||||
packages:
|
||||
- .
|
||||
|
||||
Reference in New Issue
Block a user