Compare commits

...

No commits in common. "a93aaf9a5fc9f2b77e9c947f2303d64eae7ac610" and "b87d252fec0952ef65761f5cd82d215308d412c7" have entirely different histories.

4 changed files with 69 additions and 54 deletions

View File

@ -2,12 +2,6 @@
A rewrite of ghcup in haskell.
## TODO
* create static ghcup binaries
* adjust url in GHCupDownloads
* add print-system-reqs command
## Motivation
Maintenance problems:

41
TODO.md Normal file
View File

@ -0,0 +1,41 @@
# TODOs and Remarks
## Now
* print-system-reqs
## Cleanups
* avoid alternative for IO
* don't use Excepts?
## Maybe
* maybe: changelog Show the changelog of a GHC release (online)
* OS faking
* sign the JSON? (Or check gpg keys?)
* testing (especially distro detection -> unit tests)
* hard cleanup command?
## Later
* static builds and host ghcup
* do bootstrap-haskell with new ghcup
* add support for RC/alpha/HEAD versions
* check for updates on start
* use plucky or oops instead of Excepts
## Questions
* handling of SIGTERM and SIGUSR
* installing musl on demand?
* redo/rethink how tool tags works
* tarball tags as well as version tags?
* mirror support
* check for new version on start
* how to propagate updates? Automatically? Might solve the versioning problem
* maybe add deprecation notice into JSON
* interactive handling when distro doesn't exist and we know the tarball is incompatible?
* ghcup-with wrapper to execute a command with a given ghc in PATH?

View File

@ -902,7 +902,7 @@ cabal_3000_64_darwin = DownloadInfo
ghcup_010_64_linux :: DownloadInfo
ghcup_010_64_linux = DownloadInfo
[uri|file:///home/maerwald/tmp/ghcup-exe|]
[uri|file:///home/ospa_ju/tmp/ghcup-exe|]
Nothing
[s|558126339252788a3d44a3f910417277c7ab656f0796b68bdc58afe73296b8cd|]

View File

@ -71,7 +71,6 @@ import qualified Data.Map.Strict as M
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import qualified System.IO.Streams as Streams
import qualified System.Posix.Files.ByteString as PF
import qualified System.Posix.RawFilePath.Directory
as RD
@ -112,15 +111,10 @@ getDownloads = do
(OwnSpec av) -> pure $ av
where
-- First check if the json file is in the ~/.ghcup/cache dir
-- and check it's access time. If it has been accessed within the
-- last 5 minutes, just reuse it.
--
-- If not, then send a HEAD request and check for modification time.
-- First send a HEAD request and check for modification time.
-- Only download the file if the modification time is newer
-- than the local file.
--
-- Always save the local file with the mod time of the remote file.
-- than the local file. Always save the local file with the
-- mod time of the remote file.
dl :: forall m1
. (MonadCatch m1, MonadIO m1, MonadFail m1, MonadLogger m1)
=> URI
@ -138,34 +132,34 @@ getDownloads = do
let path = view pathL' uri'
json_file <- (liftIO $ ghcupCacheDir)
>>= \cacheDir -> (cacheDir </>) <$> urlBaseName path
headers <-
handleIO (\_ -> pure mempty)
$ liftE
$ ( catchAllE
(\_ ->
pure mempty :: Excepts '[] m1 (M.Map (CI ByteString) ByteString)
)
$ getHead uri'
)
let mModT = parseModifiedHeader headers
e <- liftIO $ doesFileExist json_file
if e
then do
accessTime <-
PF.accessTimeHiRes
<$> (liftIO $ PF.getFileStatus (toFilePath json_file))
currentTime <- liftIO $ getPOSIXTime
-- access time won't work on most linuxes, but we can try regardless
if (currentTime - accessTime) > 300
then do -- no access in last 5 minutes, re-check upstream mod time
getModTime >>= \case
Just modTime -> do
fileMod <- liftIO $ getModificationTime json_file
if modTime > fileMod
then do
bs <- liftE $ downloadBS uri'
liftIO $ writeFileWithModTime modTime json_file bs
pure bs
else liftIO $ readFile json_file
Nothing -> do
lift $ $(logWarn) [i|Unable to get/parse Last-Modified header|]
liftIO $ deleteFile json_file
liftE $ downloadBS uri'
else -- access in less than 5 minutes, re-use file
liftIO $ readFile json_file
case mModT of
Just modTime -> do
fileMod <- liftIO $ getModificationTime json_file
if modTime > fileMod
then do
bs <- liftE $ downloadBS uri'
liftIO $ writeFileWithModTime modTime json_file bs
pure bs
else liftIO $ readFile json_file
Nothing -> do
lift $ $(logWarn) [i|Unable to get/parse Last-Modified header|]
liftIO $ deleteFile json_file
liftE $ downloadBS uri'
else do
getModTime >>= \case
case mModT of
Just modTime -> do
bs <- liftE $ downloadBS uri'
liftIO $ writeFileWithModTime modTime json_file bs
@ -174,20 +168,6 @@ getDownloads = do
lift $ $(logWarn) [i|Unable to get/parse Last-Modified header|]
liftE $ downloadBS uri'
where
getModTime = do
headers <-
handleIO (\_ -> pure mempty)
$ liftE
$ ( catchAllE
(\_ ->
pure mempty :: Excepts '[] m1 (M.Map (CI ByteString) ByteString)
)
$ getHead uri'
)
pure $ parseModifiedHeader headers
parseModifiedHeader :: (M.Map (CI ByteString) ByteString) -> Maybe UTCTime
parseModifiedHeader headers =
(M.lookup (CI.mk [s|Last-Modified|]) headers) >>= \h -> parseTimeM