Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
72f8e53344
|
|||
|
9c464ec9fc
|
|||
|
f575dcdad6
|
|||
|
6d3e8d65e1
|
|||
|
895e4b3f18
|
|||
|
31e83cac5e
|
2
.github/scripts/bootstrap.sh
vendored
2
.github/scripts/bootstrap.sh
vendored
@@ -13,4 +13,6 @@ git describe --always
|
|||||||
./scripts/bootstrap/bootstrap-haskell
|
./scripts/bootstrap/bootstrap-haskell
|
||||||
|
|
||||||
[ "$(ghc --numeric-version)" = "${BOOTSTRAP_HASKELL_GHC_VERSION}" ]
|
[ "$(ghc --numeric-version)" = "${BOOTSTRAP_HASKELL_GHC_VERSION}" ]
|
||||||
|
# https://github.com/actions/runner-images/issues/7061
|
||||||
|
[ "$(ghcup config | grep --color=never meta-mode)" = "meta-mode: Lax" ]
|
||||||
|
|
||||||
|
|||||||
4
.github/scripts/test.sh
vendored
4
.github/scripts/test.sh
vendored
@@ -190,7 +190,7 @@ sha=$(sha_sum "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml")
|
|||||||
# invalidate access time timer, which is 5minutes, so we re-download
|
# invalidate access time timer, which is 5minutes, so we re-download
|
||||||
touch -a -m -t '199901010101' "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml"
|
touch -a -m -t '199901010101' "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml"
|
||||||
# redownload same file with some newlines added
|
# redownload same file with some newlines added
|
||||||
raw_eghcup -s https://www.haskell.org/ghcup/exp/ghcup-${JSON_VERSION}.yaml list
|
raw_eghcup -s https://raw.githubusercontent.com/haskell/ghcup-metadata/exp/ghcup-0.0.7.yaml list
|
||||||
# snapshot new yaml and etags file
|
# snapshot new yaml and etags file
|
||||||
etag2=$(cat "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml.etags")
|
etag2=$(cat "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml.etags")
|
||||||
sha2=$(sha_sum "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml")
|
sha2=$(sha_sum "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml")
|
||||||
@@ -200,7 +200,7 @@ sha2=$(sha_sum "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml")
|
|||||||
# invalidate access time timer, which is 5minutes, but don't expect a re-download
|
# invalidate access time timer, which is 5minutes, but don't expect a re-download
|
||||||
touch -a -m -t '199901010101' "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml"
|
touch -a -m -t '199901010101' "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml"
|
||||||
# this time, we expect the same hash and etag
|
# this time, we expect the same hash and etag
|
||||||
raw_eghcup -s https://www.haskell.org/ghcup/exp/ghcup-${JSON_VERSION}.yaml list
|
raw_eghcup -s https://raw.githubusercontent.com/haskell/ghcup-metadata/exp/ghcup-0.0.7.yaml list
|
||||||
etag3=$(cat "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml.etags")
|
etag3=$(cat "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml.etags")
|
||||||
sha3=$(sha_sum "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml")
|
sha3=$(sha_sum "${GHCUP_DIR}/cache/ghcup-${JSON_VERSION}.yaml")
|
||||||
[ "${etag2}" = "${etag3}" ]
|
[ "${etag2}" = "${etag3}" ]
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ data ConfigCommand
|
|||||||
= ShowConfig
|
= ShowConfig
|
||||||
| SetConfig String (Maybe String)
|
| SetConfig String (Maybe String)
|
||||||
| InitConfig
|
| InitConfig
|
||||||
| AddReleaseChannel URI
|
| AddReleaseChannel Bool URI
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -74,7 +74,7 @@ configP = subparser
|
|||||||
showP = info (pure ShowConfig) (progDesc "Show current config (default)")
|
showP = info (pure ShowConfig) (progDesc "Show current config (default)")
|
||||||
setP = info argsP (progDesc "Set config KEY to VALUE (or specify as single json value)" <> footerDoc (Just $ text configSetFooter))
|
setP = info argsP (progDesc "Set config KEY to VALUE (or specify as single json value)" <> footerDoc (Just $ text configSetFooter))
|
||||||
argsP = SetConfig <$> argument str (metavar "<JSON_VALUE | YAML_KEY>") <*> optional (argument str (metavar "YAML_VALUE"))
|
argsP = SetConfig <$> argument str (metavar "<JSON_VALUE | YAML_KEY>") <*> optional (argument str (metavar "YAML_VALUE"))
|
||||||
addP = info (AddReleaseChannel <$> argument (eitherReader uriParser) (metavar "URI" <> completer fileUri))
|
addP = info (AddReleaseChannel <$> switch (long "force" <> help "Delete existing entry (if any) and append instead of failing") <*> argument (eitherReader uriParser) (metavar "URI" <> completer fileUri))
|
||||||
(progDesc "Add a release channel from a URI")
|
(progDesc "Add a release channel from a URI")
|
||||||
|
|
||||||
|
|
||||||
@@ -159,6 +159,9 @@ updateSettings usl usr =
|
|||||||
--[ Entrypoint ]--
|
--[ Entrypoint ]--
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
|
data Duplicate = Duplicate -- ^ there is a duplicate somewhere in the middle
|
||||||
|
| NoDuplicate -- ^ there is no duplicate
|
||||||
|
| DuplicateLast -- ^ there's a duplicate, but it's the last element
|
||||||
|
|
||||||
|
|
||||||
config :: forall m. ( Monad m
|
config :: forall m. ( Monad m
|
||||||
@@ -203,22 +206,50 @@ config configCommand settings userConf keybindings runLogger = case configComman
|
|||||||
pure $ ExitFailure 65
|
pure $ ExitFailure 65
|
||||||
VLeft _ -> pure $ ExitFailure 65
|
VLeft _ -> pure $ ExitFailure 65
|
||||||
|
|
||||||
AddReleaseChannel uri -> do
|
AddReleaseChannel force uri -> do
|
||||||
case urlSource settings of
|
r <- runE @'[DuplicateReleaseChannel] $ do
|
||||||
AddSource xs -> do
|
case urlSource settings of
|
||||||
doConfig (defaultUserSettings { uUrlSource = Just $ AddSource (xs <> [Right uri]) })
|
AddSource xs -> do
|
||||||
pure ExitSuccess
|
case checkDuplicate xs (Right uri) of
|
||||||
GHCupURL -> do
|
Duplicate
|
||||||
doConfig (defaultUserSettings { uUrlSource = Just $ AddSource [Right uri] })
|
| not force -> throwE (DuplicateReleaseChannel uri)
|
||||||
pure ExitSuccess
|
DuplicateLast -> pure ()
|
||||||
OwnSource xs -> do
|
_ -> lift $ doConfig (defaultUserSettings { uUrlSource = Just $ AddSource (appendUnique xs (Right uri)) })
|
||||||
doConfig (defaultUserSettings { uUrlSource = Just $ OwnSource (xs <> [Right uri]) })
|
GHCupURL -> do
|
||||||
pure ExitSuccess
|
lift $ doConfig (defaultUserSettings { uUrlSource = Just $ AddSource [Right uri] })
|
||||||
OwnSpec spec -> do
|
pure ()
|
||||||
doConfig (defaultUserSettings { uUrlSource = Just $ OwnSource ([Left spec, Right uri]) })
|
OwnSource xs -> do
|
||||||
|
case checkDuplicate xs (Right uri) of
|
||||||
|
Duplicate
|
||||||
|
| not force -> throwE (DuplicateReleaseChannel uri)
|
||||||
|
DuplicateLast -> pure ()
|
||||||
|
_ -> lift $ doConfig (defaultUserSettings { uUrlSource = Just $ OwnSource (appendUnique xs (Right uri)) })
|
||||||
|
OwnSpec spec -> do
|
||||||
|
lift $ doConfig (defaultUserSettings { uUrlSource = Just $ OwnSource [Left spec, Right uri] })
|
||||||
|
pure ()
|
||||||
|
case r of
|
||||||
|
VRight _ -> do
|
||||||
pure ExitSuccess
|
pure ExitSuccess
|
||||||
|
VLeft e -> do
|
||||||
|
runLogger $ logError $ T.pack $ prettyHFError e
|
||||||
|
pure $ ExitFailure 15
|
||||||
|
|
||||||
where
|
where
|
||||||
|
checkDuplicate :: Eq a => [a] -> a -> Duplicate
|
||||||
|
checkDuplicate xs a
|
||||||
|
| last xs == a = DuplicateLast
|
||||||
|
| a `elem` xs = Duplicate
|
||||||
|
| otherwise = NoDuplicate
|
||||||
|
|
||||||
|
-- appends the element to the end of the list, but also removes it from the existing list
|
||||||
|
appendUnique :: Eq a => [a] -> a -> [a]
|
||||||
|
appendUnique xs' e = go xs'
|
||||||
|
where
|
||||||
|
go [] = [e]
|
||||||
|
go (x:xs)
|
||||||
|
| x == e = go xs -- skip
|
||||||
|
| otherwise = x : go xs
|
||||||
|
|
||||||
doConfig :: MonadIO m => UserSettings -> m ()
|
doConfig :: MonadIO m => UserSettings -> m ()
|
||||||
doConfig usersettings = do
|
doConfig usersettings = do
|
||||||
let settings' = updateSettings usersettings userConf
|
let settings' = updateSettings usersettings userConf
|
||||||
|
|||||||
@@ -35,6 +35,8 @@ import URI.ByteString
|
|||||||
|
|
||||||
import qualified Data.Map.Strict as M
|
import qualified Data.Map.Strict as M
|
||||||
import qualified Data.Text as T
|
import qualified Data.Text as T
|
||||||
|
import qualified Data.Text.Encoding as E
|
||||||
|
import qualified Data.Text.Encoding.Error as E
|
||||||
import Data.Data (Proxy(..))
|
import Data.Data (Proxy(..))
|
||||||
|
|
||||||
|
|
||||||
@@ -82,6 +84,7 @@ allHFError = unlines allErrors
|
|||||||
, let proxy = Proxy :: Proxy HadrianNotFound in format proxy
|
, let proxy = Proxy :: Proxy HadrianNotFound in format proxy
|
||||||
, let proxy = Proxy :: Proxy ToolShadowed in format proxy
|
, let proxy = Proxy :: Proxy ToolShadowed in format proxy
|
||||||
, let proxy = Proxy :: Proxy ContentLengthError in format proxy
|
, let proxy = Proxy :: Proxy ContentLengthError in format proxy
|
||||||
|
, let proxy = Proxy :: Proxy DuplicateReleaseChannel in format proxy
|
||||||
, ""
|
, ""
|
||||||
, "# high level errors (4000+)"
|
, "# high level errors (4000+)"
|
||||||
, let proxy = Proxy :: Proxy DownloadFailed in format proxy
|
, let proxy = Proxy :: Proxy DownloadFailed in format proxy
|
||||||
@@ -640,6 +643,19 @@ instance HFErrorProject ContentLengthError where
|
|||||||
eBase _ = 340
|
eBase _ = 340
|
||||||
eDesc _ = "File content length verification failed"
|
eDesc _ = "File content length verification failed"
|
||||||
|
|
||||||
|
data DuplicateReleaseChannel = DuplicateReleaseChannel URI
|
||||||
|
deriving Show
|
||||||
|
|
||||||
|
instance HFErrorProject DuplicateReleaseChannel where
|
||||||
|
eBase _ = 350
|
||||||
|
eDesc _ = "Duplicate release channel detected when adding URI.\nGiving up. You can use '--force' to remove and append the duplicate URI (this may change order/semantics)."
|
||||||
|
|
||||||
|
instance Pretty DuplicateReleaseChannel where
|
||||||
|
pPrint (DuplicateReleaseChannel uri) =
|
||||||
|
text $ "Duplicate release channel detected when adding: \n "
|
||||||
|
<> (T.unpack . E.decodeUtf8With E.lenientDecode . serializeURIRef') uri
|
||||||
|
<> "\nGiving up. You can use '--force' to remove and append the duplicate URI (this may change order/semantics)."
|
||||||
|
|
||||||
-------------------------
|
-------------------------
|
||||||
--[ High-level errors ]--
|
--[ High-level errors ]--
|
||||||
-------------------------
|
-------------------------
|
||||||
|
|||||||
@@ -117,7 +117,15 @@ readDirEntPortable :: DirStreamPortable -> IO (DirType, FilePath)
|
|||||||
readDirEntPortable (DirStreamPortable (basedir, dirs)) = do
|
readDirEntPortable (DirStreamPortable (basedir, dirs)) = do
|
||||||
(dt, fp) <- readDirEnt dirs
|
(dt, fp) <- readDirEnt dirs
|
||||||
case (dt, fp) of
|
case (dt, fp) of
|
||||||
(DirType #{const DT_UNKNOWN}, _)
|
(DirType #{const DT_BLK}, _) -> pure (dt, fp)
|
||||||
|
(DirType #{const DT_CHR}, _) -> pure (dt, fp)
|
||||||
|
(DirType #{const DT_DIR}, _) -> pure (dt, fp)
|
||||||
|
(DirType #{const DT_FIFO}, _) -> pure (dt, fp)
|
||||||
|
(DirType #{const DT_LNK}, _) -> pure (dt, fp)
|
||||||
|
(DirType #{const DT_REG}, _) -> pure (dt, fp)
|
||||||
|
(DirType #{const DT_SOCK}, _) -> pure (dt, fp)
|
||||||
|
(DirType #{const DT_UNKNOWN}, _) -> pure (dt, fp)
|
||||||
|
(_, _)
|
||||||
| fp /= "" -> do
|
| fp /= "" -> do
|
||||||
stat <- getSymbolicLinkStatus (basedir </> fp)
|
stat <- getSymbolicLinkStatus (basedir </> fp)
|
||||||
pure $ (, fp) $ if | isBlockDevice stat -> DirType #{const DT_BLK}
|
pure $ (, fp) $ if | isBlockDevice stat -> DirType #{const DT_BLK}
|
||||||
@@ -128,5 +136,4 @@ readDirEntPortable (DirStreamPortable (basedir, dirs)) = do
|
|||||||
| isRegularFile stat -> DirType #{const DT_REG}
|
| isRegularFile stat -> DirType #{const DT_REG}
|
||||||
| isSocket stat -> DirType #{const DT_SOCK}
|
| isSocket stat -> DirType #{const DT_SOCK}
|
||||||
| otherwise -> DirType #{const DT_UNKNOWN}
|
| otherwise -> DirType #{const DT_UNKNOWN}
|
||||||
_ -> pure (dt, fp)
|
|
||||||
|
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ data GHCupInfo = GHCupInfo
|
|||||||
, _ghcupDownloads :: GHCupDownloads
|
, _ghcupDownloads :: GHCupDownloads
|
||||||
, _globalTools :: Map GlobalTool DownloadInfo
|
, _globalTools :: Map GlobalTool DownloadInfo
|
||||||
}
|
}
|
||||||
deriving (Show, GHC.Generic)
|
deriving (Show, GHC.Generic, Eq)
|
||||||
|
|
||||||
instance NFData GHCupInfo
|
instance NFData GHCupInfo
|
||||||
|
|
||||||
@@ -87,7 +87,7 @@ data Requirements = Requirements
|
|||||||
{ _distroPKGs :: [Text]
|
{ _distroPKGs :: [Text]
|
||||||
, _notes :: Text
|
, _notes :: Text
|
||||||
}
|
}
|
||||||
deriving (Show, GHC.Generic)
|
deriving (Show, GHC.Generic, Eq)
|
||||||
|
|
||||||
instance NFData Requirements
|
instance NFData Requirements
|
||||||
|
|
||||||
|
|||||||
@@ -119,6 +119,10 @@ edo() {
|
|||||||
"$@" || die "\"$*\" failed!"
|
"$@" || die "\"$*\" failed!"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
eghcup_raw() {
|
||||||
|
"${GHCUP_BIN}/ghcup" "$@" || die "\"ghcup $*\" failed!"
|
||||||
|
}
|
||||||
|
|
||||||
eghcup() {
|
eghcup() {
|
||||||
_eghcup "$@"
|
_eghcup "$@"
|
||||||
}
|
}
|
||||||
@@ -381,10 +385,10 @@ download_ghcup() {
|
|||||||
edo . "${GHCUP_DIR}"/env
|
edo . "${GHCUP_DIR}"/env
|
||||||
case "${BOOTSTRAP_HASKELL_DOWNLOADER}" in
|
case "${BOOTSTRAP_HASKELL_DOWNLOADER}" in
|
||||||
"curl")
|
"curl")
|
||||||
eghcup config set downloader Curl
|
eghcup_raw config set downloader Curl
|
||||||
;;
|
;;
|
||||||
"wget")
|
"wget")
|
||||||
eghcup config set downloader Wget
|
eghcup_raw config set downloader Wget
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
die "Unknown downloader: ${BOOTSTRAP_HASKELL_DOWNLOADER}"
|
die "Unknown downloader: ${BOOTSTRAP_HASKELL_DOWNLOADER}"
|
||||||
|
|||||||
Reference in New Issue
Block a user