WIP
This commit is contained in:
parent
c9790e5823
commit
55fdc41137
@ -35,6 +35,8 @@ git describe --always
|
||||
|
||||
### build
|
||||
|
||||
rm -rf "${GHCUP_DIR}"/share
|
||||
|
||||
ecabal update
|
||||
|
||||
if [ "${OS}" = "DARWIN" ] ; then
|
||||
|
@ -31,4 +31,7 @@ package cabal-plan
|
||||
package aeson
|
||||
flags: +ordered-keymap
|
||||
|
||||
package streamly
|
||||
flags: +use-unliftio
|
||||
|
||||
allow-newer: base, ghc-prim, template-haskell, language-c
|
||||
|
7
cbits/dirutils.c
Normal file
7
cbits/dirutils.c
Normal file
@ -0,0 +1,7 @@
|
||||
#include "dirutils.h"
|
||||
|
||||
unsigned int
|
||||
__posixdir_d_type(struct dirent* d)
|
||||
{
|
||||
return(d -> d_type);
|
||||
}
|
15
cbits/dirutils.h
Normal file
15
cbits/dirutils.h
Normal file
@ -0,0 +1,15 @@
|
||||
#ifndef POSIXPATHS_CBITS_DIRUTILS_H
|
||||
#define POSIXPATHS_CBITS_DIRUTILS_H
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <dirent.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
|
||||
extern unsigned int
|
||||
__posixdir_d_type(struct dirent* d)
|
||||
;
|
||||
|
||||
#endif
|
@ -126,8 +126,8 @@ library
|
||||
, safe ^>=0.3.18
|
||||
, safe-exceptions ^>=0.1
|
||||
, split ^>=0.2.3.4
|
||||
, strict-base ^>=0.4
|
||||
, streamly ^>=0.8.2
|
||||
, strict-base ^>=0.4
|
||||
, template-haskell >=2.7 && <2.18
|
||||
, temporary ^>=1.3
|
||||
, text ^>=1.2.4.0
|
||||
@ -167,9 +167,11 @@ library
|
||||
other-modules:
|
||||
GHCup.Utils.File.Posix
|
||||
GHCup.Utils.File.Posix.Foreign
|
||||
GHCup.Utils.File.Posix.Traversals
|
||||
GHCup.Utils.Posix
|
||||
GHCup.Utils.Prelude.Posix
|
||||
|
||||
c-sources: cbits/dirutils.c
|
||||
build-depends:
|
||||
, bz2 >=0.5.0.5 && <1.1
|
||||
, terminal-size ^>=0.3.2.1
|
||||
@ -273,7 +275,6 @@ executable ghcup
|
||||
if flag(no-exe)
|
||||
buildable: False
|
||||
|
||||
|
||||
test-suite ghcup-test
|
||||
type: exitcode-stdio-1.0
|
||||
main-is: Main.hs
|
||||
@ -282,6 +283,7 @@ test-suite ghcup-test
|
||||
other-modules:
|
||||
GHCup.ArbitraryTypes
|
||||
GHCup.Types.JSONSpec
|
||||
GHCup.Utils.FileSpec
|
||||
Spec
|
||||
|
||||
default-language: Haskell2010
|
||||
@ -301,12 +303,15 @@ test-suite ghcup-test
|
||||
, base >=4.12 && <5
|
||||
, bytestring ^>=0.10
|
||||
, containers ^>=0.6
|
||||
, directory ^>=1.3.6.0
|
||||
, filepath ^>=1.4.2.1
|
||||
, generic-arbitrary >=0.1.0 && <0.3
|
||||
, ghcup
|
||||
, hspec >=2.7.10 && <2.10
|
||||
, hspec-golden-aeson ^>=0.9
|
||||
, QuickCheck ^>=2.14.1
|
||||
, quickcheck-arbitrary-adt ^>=0.3.1.0
|
||||
, streamly ^>=0.8.2
|
||||
, text ^>=1.2.4.0
|
||||
, uri-bytestring ^>=0.3.2.2
|
||||
, versions >=4.0.1 && <5.1
|
||||
|
58
lib/GHCup.hs
58
lib/GHCup.hs
@ -42,8 +42,6 @@ import GHCup.Version
|
||||
|
||||
import Codec.Archive ( ArchiveResult )
|
||||
import Control.Applicative
|
||||
import Control.DeepSeq ( force )
|
||||
import Control.Exception ( evaluate )
|
||||
import Control.Exception.Safe
|
||||
import Control.Monad
|
||||
#if !MIN_VERSION_base(4,13,0)
|
||||
@ -52,7 +50,6 @@ import Control.Monad.Fail ( MonadFail )
|
||||
import Control.Monad.Reader
|
||||
import Control.Monad.Trans.Resource
|
||||
hiding ( throwM )
|
||||
import Control.Monad.IO.Unlift ( MonadUnliftIO( withRunInIO ) )
|
||||
import Data.ByteString ( ByteString )
|
||||
import Data.Either
|
||||
import Data.List
|
||||
@ -94,6 +91,7 @@ import qualified Data.Text as T
|
||||
import qualified Data.Text.IO as T
|
||||
import qualified Data.Text.Encoding as E
|
||||
import qualified Text.Megaparsec as MP
|
||||
import qualified Streamly.Prelude as S
|
||||
import GHCup.Utils.MegaParsec
|
||||
import Control.Concurrent (threadDelay)
|
||||
|
||||
@ -328,13 +326,10 @@ installUnpackedGHC path inst ver forceInstall
|
||||
-- Windows bindists are relocatable and don't need
|
||||
-- to run configure.
|
||||
-- We also must make sure to preserve mtime to not confuse ghc-pkg.
|
||||
fs <- lift $ withRunInIO $ \_ -> mergeFileTreeAll path (fromInstallDir inst) $ \source dest -> do
|
||||
lift $ mergeFileTree path inst GHC (mkTVer ver) $ \source dest -> liftIO $ do
|
||||
mtime <- getModificationTime source
|
||||
moveFilePortable source dest
|
||||
setModificationTime dest mtime
|
||||
case inst of
|
||||
IsolateDirResolved _ -> pure ()
|
||||
_ -> recordInstalledFiles fs GHC (mkTVer ver)
|
||||
| otherwise = do
|
||||
PlatformRequest {..} <- lift getPlatformReq
|
||||
|
||||
@ -355,13 +350,12 @@ installUnpackedGHC path inst ver forceInstall
|
||||
tmpInstallDest <- lift withGHCupTmpDir
|
||||
lEM $ make ["DESTDIR=" <> fromGHCupPath tmpInstallDest, "install"] (Just $ fromGHCupPath path)
|
||||
lift $ logInfo $ "Merging file tree from \"" <> T.pack (fromGHCupPath tmpInstallDest) <> "\" to \"" <> T.pack (fromInstallDir inst) <> "\""
|
||||
fs <- mergeFileTreeAll (tmpInstallDest `appendGHCupPath` dropDrive (fromInstallDir inst))
|
||||
(fromInstallDir inst)
|
||||
(\f t -> liftIO (install f t (not forceInstall)))
|
||||
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromInstallDir inst)
|
||||
case inst of
|
||||
IsolateDirResolved _ -> pure ()
|
||||
_ -> recordInstalledFiles fs GHC (mkTVer ver)
|
||||
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpInstallDest)
|
||||
lift $ mergeFileTree (tmpInstallDest `appendGHCupPath` dropDrive (fromInstallDir inst))
|
||||
inst
|
||||
GHC
|
||||
(mkTVer ver)
|
||||
(\f t -> liftIO $ install f t (not forceInstall))
|
||||
pure ()
|
||||
|
||||
|
||||
@ -670,13 +664,12 @@ installHLSUnpacked path inst ver forceInstall = do
|
||||
lift $ logInfo "Installing HLS"
|
||||
tmpInstallDest <- lift withGHCupTmpDir
|
||||
lEM $ make ["DESTDIR=" <> fromGHCupPath tmpInstallDest, "PREFIX=" <> fromInstallDir inst, "install"] (Just path)
|
||||
fs <- mergeFileTreeAll (tmpInstallDest `appendGHCupPath` dropDrive (fromInstallDir inst))
|
||||
(fromInstallDir inst)
|
||||
(\f t -> liftIO (install f t (not forceInstall)))
|
||||
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromInstallDir inst)
|
||||
case inst of
|
||||
IsolateDirResolved _ -> pure ()
|
||||
_ -> recordInstalledFiles fs HLS (mkTVer ver)
|
||||
liftE $ catchWarn $ lEM @_ @'[ProcessError] $ darwinNotarization _rPlatform (fromGHCupPath tmpInstallDest)
|
||||
lift $ mergeFileTree (tmpInstallDest `appendGHCupPath` dropDrive (fromInstallDir inst))
|
||||
inst
|
||||
HLS
|
||||
(mkTVer ver)
|
||||
(\f t -> liftIO $ install f t (not forceInstall))
|
||||
|
||||
-- | Install an unpacked hls distribution (legacy).
|
||||
installHLSUnpackedLegacy :: (MonadReader env m, MonadFail m, HasLog env, MonadCatch m, MonadIO m)
|
||||
@ -1804,11 +1797,11 @@ rmGHCVer ver = do
|
||||
lift (getInstalledFiles GHC ver) >>= \case
|
||||
Just files -> do
|
||||
lift $ logInfo $ "Removing files safely from: " <> T.pack dir
|
||||
forM_ files (liftIO . deleteFile . (\f -> dir </> dropDrive f))
|
||||
f <- recordedInstallationFile GHC ver
|
||||
liftIO $ hideError doesNotExistErrorType $ deleteFile f
|
||||
forM_ files (lift . recycleFile . (\f -> dir </> dropDrive f))
|
||||
removeEmptyDirsRecursive dir
|
||||
survivors <- liftIO $ hideErrorDef [doesNotExistErrorType] [] $ listDirectory dir
|
||||
f <- recordedInstallationFile GHC ver
|
||||
lift $ recycleFile f
|
||||
when (not (null survivors)) $ throwE $ UninstallFailed dir survivors
|
||||
Nothing -> do
|
||||
lift $ logInfo $ "Removing legacy directory recursively: " <> T.pack dir
|
||||
@ -1888,11 +1881,11 @@ rmHLSVer ver = do
|
||||
lift (getInstalledFiles HLS (mkTVer ver)) >>= \case
|
||||
Just files -> do
|
||||
lift $ logInfo $ "Removing files safely from: " <> T.pack hlsDir
|
||||
forM_ files (liftIO . deleteFile . (\f -> hlsDir </> dropDrive f))
|
||||
f <- recordedInstallationFile HLS (mkTVer ver)
|
||||
liftIO $ deleteFile f
|
||||
forM_ files (lift . recycleFile . (\f -> hlsDir </> dropDrive f))
|
||||
removeEmptyDirsRecursive hlsDir
|
||||
survivors <- liftIO $ hideErrorDef [doesNotExistErrorType] [] $ listDirectory hlsDir
|
||||
f <- recordedInstallationFile HLS (mkTVer ver)
|
||||
lift $ recycleFile f
|
||||
when (not (null survivors)) $ throwE $ UninstallFailed hlsDir survivors
|
||||
Nothing -> do
|
||||
lift $ logInfo $ "Removing legacy directory recursively: " <> T.pack hlsDir
|
||||
@ -2071,8 +2064,7 @@ rmGhcupDirs = do
|
||||
-- althought 'deleteFile' should already handle it.
|
||||
hideErrorDef [doesNotExistErrorType] () $ do
|
||||
logInfo $ "removing " <> T.pack (fromGHCupPath dir)
|
||||
contents <- liftIO $ getDirectoryContentsRecursive dir
|
||||
forM_ contents (deleteFile' . (fromGHCupPath dir </>))
|
||||
liftIO $ flip S.mapM_ (getDirectoryContentsRecursive dir) $ deleteFile' . (fromGHCupPath dir </>)
|
||||
|
||||
rmBinDir :: (MonadReader env m, HasDirs env, MonadMask m, MonadIO m, MonadCatch m) => FilePath -> m ()
|
||||
rmBinDir binDir
|
||||
@ -2083,11 +2075,9 @@ rmGhcupDirs = do
|
||||
then removeDirIfEmptyOrIsSymlink binDir
|
||||
else pure ()
|
||||
|
||||
reportRemainingFiles :: MonadIO m => FilePath -> m [FilePath]
|
||||
reportRemainingFiles :: (MonadMask m, MonadIO m) => FilePath -> m [FilePath]
|
||||
reportRemainingFiles dir = do
|
||||
-- force the files so the errors don't leak
|
||||
(force -> !remainingFiles) <- liftIO
|
||||
(getDirectoryContentsRecursiveUnsafe dir >>= evaluate)
|
||||
remainingFiles <- liftIO $ S.toList (getDirectoryContentsRecursiveUnsafe dir)
|
||||
let normalizedFilePaths = fmap normalise remainingFiles
|
||||
let sortedByDepthRemainingFiles = sortBy (flip compareFn) normalizedFilePaths
|
||||
let remainingFilesAbsolute = fmap (dir </>) sortedByDepthRemainingFiles
|
||||
@ -2105,7 +2095,7 @@ rmGhcupDirs = do
|
||||
-- we report remaining files/dirs later,
|
||||
-- hence the force/quiet mode in these delete functions below.
|
||||
|
||||
deleteFile' :: (MonadReader env m, HasDirs env, MonadMask m, MonadIO m) => FilePath -> m ()
|
||||
deleteFile' :: (MonadMask m, MonadIO m) => FilePath -> m ()
|
||||
deleteFile' filepath = do
|
||||
hideError doesNotExistErrorType
|
||||
$ hideError InappropriateType $ rmFile filepath
|
||||
|
@ -86,7 +86,7 @@ import qualified Data.Text as T
|
||||
import qualified Data.Text.Encoding as E
|
||||
import qualified Text.Megaparsec as MP
|
||||
import qualified Data.List.NonEmpty as NE
|
||||
import Text.PrettyPrint.HughesPJClass (prettyShow)
|
||||
import qualified Streamly.Prelude as S
|
||||
import Control.DeepSeq (force)
|
||||
import GHC.IO (evaluate)
|
||||
|
||||
@ -853,7 +853,7 @@ intoSubdir :: (MonadReader env m, HasLog env, MonadIO m, MonadThrow m, MonadCatc
|
||||
-> Excepts '[TarDirDoesNotExist] m GHCupPath
|
||||
intoSubdir bdir tardir = case tardir of
|
||||
RealDir pr -> do
|
||||
whenM (fmap not . liftIO . doesDirectoryExist $ (fromGHCupPath (bdir `appendGHCupPath` pr)))
|
||||
whenM (fmap not . liftIO . doesDirectoryExist $ fromGHCupPath (bdir `appendGHCupPath` pr))
|
||||
(throwE $ TarDirDoesNotExist tardir)
|
||||
pure (bdir `appendGHCupPath` pr)
|
||||
RegexDir r -> do
|
||||
@ -1286,35 +1286,17 @@ ghcBinaryName (GHCTargetVersion Nothing _) = T.unpack ("ghc" <> T.pack exeExt)
|
||||
-- 3. if it exists and is non-empty -> panic and leave the house
|
||||
installDestSanityCheck :: ( MonadIO m
|
||||
, MonadCatch m
|
||||
, MonadMask m
|
||||
) =>
|
||||
InstallDirResolved ->
|
||||
Excepts '[DirNotEmpty] m ()
|
||||
installDestSanityCheck (IsolateDirResolved isoDir) = do
|
||||
hideErrorDef [doesNotExistErrorType] () $ do
|
||||
contents <- liftIO $ getDirectoryContentsRecursiveUnsafe isoDir
|
||||
unless (null contents) (throwE $ DirNotEmpty isoDir)
|
||||
empty' <- liftIO $ S.null $ getDirectoryContentsRecursiveUnsafe isoDir
|
||||
when (not empty') (throwE $ DirNotEmpty isoDir)
|
||||
installDestSanityCheck _ = pure ()
|
||||
|
||||
|
||||
-- | Write installed files into database.
|
||||
recordInstalledFiles :: ( MonadIO m
|
||||
, MonadReader env m
|
||||
, HasDirs env
|
||||
, MonadFail m
|
||||
)
|
||||
=> [FilePath]
|
||||
-> Tool
|
||||
-> GHCTargetVersion
|
||||
-> m ()
|
||||
recordInstalledFiles files tool v' = do
|
||||
dest <- recordedInstallationFile tool v'
|
||||
liftIO $ createDirectoryIfMissing True (takeDirectory dest)
|
||||
-- TODO: what if the filepath has newline? :)
|
||||
let contents = unlines files
|
||||
liftIO $ writeFile dest contents
|
||||
pure ()
|
||||
|
||||
|
||||
-- | Returns 'Nothing' for legacy installs.
|
||||
getInstalledFiles :: ( MonadIO m
|
||||
, MonadCatch m
|
||||
@ -1332,14 +1314,3 @@ getInstalledFiles t v' = hideErrorDef [doesNotExistErrorType] Nothing $ do
|
||||
pure (Just $ lines c)
|
||||
|
||||
|
||||
recordedInstallationFile :: ( MonadReader env m
|
||||
, HasDirs env
|
||||
)
|
||||
=> Tool
|
||||
-> GHCTargetVersion
|
||||
-> m FilePath
|
||||
recordedInstallationFile t v' = do
|
||||
Dirs {..} <- getDirs
|
||||
pure (fromGHCupPath dbDir </> prettyShow t </> T.unpack (tVerToText v'))
|
||||
|
||||
|
||||
|
@ -127,6 +127,7 @@ import Data.Versions
|
||||
import GHC.IO.Exception ( IOErrorType(NoSuchThing) )
|
||||
import Haskus.Utils.Variant.Excepts
|
||||
import Optics
|
||||
import Safe
|
||||
import System.Directory hiding ( removeDirectory
|
||||
, removeDirectoryRecursive
|
||||
, removePathForcibly
|
||||
@ -180,7 +181,7 @@ getGHCupTmpDirs = do
|
||||
execBlank
|
||||
([s|^ghcup-.*$|] :: ByteString)
|
||||
)
|
||||
pure (fmap (\p -> GHCupPath (tmpdir </> p)) $ filter (("ghcup-" `isPrefixOf`) . takeDirectory) $ ghcup_dirs)
|
||||
pure (fmap (\p -> GHCupPath (tmpdir </> p)) $ filter (maybe False ("ghcup-" `isPrefixOf`) . lastMay . splitPath) ghcup_dirs)
|
||||
|
||||
|
||||
------------------------------
|
||||
|
@ -1,6 +1,5 @@
|
||||
{-# LANGUAGE CPP #-}
|
||||
{-# LANGUAGE DataKinds #-}
|
||||
{-# LANGUAGE BangPatterns #-}
|
||||
{-# LANGUAGE FlexibleContexts #-}
|
||||
{-# LANGUAGE FlexibleInstances #-}
|
||||
{-# LANGUAGE ScopedTypeVariables #-}
|
||||
@ -9,14 +8,27 @@
|
||||
|
||||
module GHCup.Utils.File (
|
||||
mergeFileTree,
|
||||
mergeFileTreeAll,
|
||||
copyFileE,
|
||||
findFilesDeep,
|
||||
getDirectoryContentsRecursive,
|
||||
getDirectoryContentsRecursiveBFS,
|
||||
getDirectoryContentsRecursiveDFS,
|
||||
getDirectoryContentsRecursiveUnsafe,
|
||||
getDirectoryContentsRecursiveBFSUnsafe,
|
||||
getDirectoryContentsRecursiveDFSUnsafe,
|
||||
recordedInstallationFile,
|
||||
module GHCup.Utils.File.Common,
|
||||
#if IS_WINDOWS
|
||||
module GHCup.Utils.File.Windows
|
||||
#else
|
||||
module GHCup.Utils.File.Posix
|
||||
#endif
|
||||
|
||||
executeOut,
|
||||
execLogged,
|
||||
exec,
|
||||
toProcessError,
|
||||
chmod_755,
|
||||
isBrokenSymlink,
|
||||
copyFile,
|
||||
deleteFile,
|
||||
install,
|
||||
removeEmptyDirectory,
|
||||
) where
|
||||
|
||||
import GHCup.Utils.Dirs
|
||||
@ -27,77 +39,122 @@ import GHCup.Utils.File.Windows
|
||||
import GHCup.Utils.File.Posix
|
||||
#endif
|
||||
import GHCup.Errors
|
||||
import GHCup.Types
|
||||
import GHCup.Types.Optics
|
||||
import GHCup.Utils.Prelude
|
||||
|
||||
import GHC.IO ( evaluate )
|
||||
import Text.Regex.Posix
|
||||
import Control.Exception.Safe
|
||||
import Haskus.Utils.Variant.Excepts
|
||||
import Control.Monad.Reader
|
||||
import System.FilePath
|
||||
import Text.PrettyPrint.HughesPJClass (prettyShow)
|
||||
|
||||
import Data.List (nub)
|
||||
import Data.Foldable (traverse_)
|
||||
import Control.DeepSeq (force)
|
||||
import qualified Data.Text as T
|
||||
import qualified Streamly.Prelude as S
|
||||
|
||||
|
||||
-- | Like 'mergeFileTree', except reads the entire source base dir to determine files to copy recursively.
|
||||
mergeFileTreeAll :: MonadIO m
|
||||
mergeFileTree :: (MonadMask m, S.MonadAsync m, MonadReader env m, HasDirs env)
|
||||
=> GHCupPath -- ^ source base directory from which to install findFiles
|
||||
-> FilePath -- ^ destination base dir
|
||||
-> (FilePath -> FilePath -> m ()) -- ^ file copy operation
|
||||
-> m [FilePath]
|
||||
mergeFileTreeAll sourceBase destBase copyOp = do
|
||||
(force -> !sourceFiles) <- liftIO
|
||||
(getDirectoryContentsRecursive sourceBase >>= evaluate)
|
||||
mergeFileTree sourceBase sourceFiles destBase copyOp
|
||||
pure sourceFiles
|
||||
|
||||
|
||||
mergeFileTree :: MonadIO m
|
||||
=> GHCupPath -- ^ source base directory from which to install findFiles
|
||||
-> [FilePath] -- ^ relative filepaths from source base directory
|
||||
-> FilePath -- ^ destination base dir
|
||||
-> InstallDirResolved -- ^ destination base dir
|
||||
-> Tool
|
||||
-> GHCTargetVersion
|
||||
-> (FilePath -> FilePath -> m ()) -- ^ file copy operation
|
||||
-> m ()
|
||||
mergeFileTree (fromGHCupPath -> sourceBase) sources destBase copyOp = do
|
||||
mergeFileTree sourceBase destBase tool v' copyOp = do
|
||||
-- These checks are not atomic, but we perform them to have
|
||||
-- the opportunity to abort before copying has started.
|
||||
--
|
||||
-- The actual copying might still fail.
|
||||
liftIO baseCheck
|
||||
liftIO destCheck
|
||||
liftIO sourcesCheck
|
||||
liftIO $ baseCheck (fromGHCupPath sourceBase)
|
||||
liftIO $ destCheck (fromInstallDir destBase)
|
||||
|
||||
-- finally copy
|
||||
copy
|
||||
recFile <- recordedInstallationFile tool v'
|
||||
case destBase of
|
||||
IsolateDirResolved _ -> pure ()
|
||||
_ -> do
|
||||
whenM (liftIO $ doesFileExist recFile) $ throwIO $ userError ("mergeFileTree: DB file " <> recFile <> " already exists!")
|
||||
liftIO $ createDirectoryIfMissing True (takeDirectory recFile)
|
||||
|
||||
flip S.mapM_ (getDirectoryContentsRecursive sourceBase) $ \f -> do
|
||||
copy f
|
||||
recordInstalledFile f recFile
|
||||
pure f
|
||||
|
||||
where
|
||||
copy = do
|
||||
let dirs = map (destBase </>) . nub . fmap takeDirectory $ sources
|
||||
traverse_ (liftIO . createDirectoryIfMissing True) dirs
|
||||
recordInstalledFile f recFile = do
|
||||
case destBase of
|
||||
IsolateDirResolved _ -> pure ()
|
||||
_ -> liftIO $ appendFile recFile (f <> "\n")
|
||||
|
||||
forM_ sources $ \source -> do
|
||||
let dest = destBase </> source
|
||||
src = sourceBase </> source
|
||||
copyOp src dest
|
||||
copy source = do
|
||||
let dest = fromInstallDir destBase </> source
|
||||
src = fromGHCupPath sourceBase </> source
|
||||
|
||||
baseCheck = do
|
||||
when (isRelative sourceBase)
|
||||
$ throwIO $ userError ("mergeFileTree: source base directory " <> sourceBase <> " is not absolute!")
|
||||
whenM (not <$> doesDirectoryExist sourceBase)
|
||||
$ throwIO $ userError ("mergeFileTree: source base directory " <> sourceBase <> " does not exist!")
|
||||
destCheck = do
|
||||
when (isRelative destBase)
|
||||
$ throwIO $ userError ("mergeFileTree: destination base directory " <> destBase <> " is not absolute!")
|
||||
whenM (doesDirectoryExist destBase)
|
||||
$ throwIO $ userError ("mergeFileTree: destination base directory " <> destBase <> " does already exist!")
|
||||
sourcesCheck =
|
||||
forM_ sources $ \source -> do
|
||||
-- TODO: use Excepts or HPath
|
||||
when (isAbsolute source)
|
||||
$ throwIO $ userError ("mergeFileTree: source file " <> source <> " is not relative!")
|
||||
whenM (not <$> doesFileExist (sourceBase </> source))
|
||||
$ throwIO $ userError ("mergeFileTree: source file " <> (sourceBase </> source) <> " does not exist!")
|
||||
|
||||
liftIO . createDirectoryIfMissing True . takeDirectory $ dest
|
||||
|
||||
copyOp src dest
|
||||
|
||||
|
||||
baseCheck src = do
|
||||
when (isRelative src)
|
||||
$ throwIO $ userError ("mergeFileTree: source base directory " <> src <> " is not absolute!")
|
||||
whenM (not <$> doesDirectoryExist src)
|
||||
$ throwIO $ userError ("mergeFileTree: source base directory " <> src <> " does not exist!")
|
||||
destCheck dest = do
|
||||
when (isRelative dest)
|
||||
$ throwIO $ userError ("mergeFileTree: destination base directory " <> dest <> " is not absolute!")
|
||||
|
||||
|
||||
|
||||
copyFileE :: (CopyError :< xs, MonadCatch m, MonadIO m) => FilePath -> FilePath -> Bool -> Excepts xs m ()
|
||||
copyFileE from to = handleIO (throwE . CopyError . show) . liftIO . copyFile from to
|
||||
|
||||
|
||||
-- | List all the files in a directory and all subdirectories.
|
||||
--
|
||||
-- The order places files in sub-directories after all the files in their
|
||||
-- parent directories. The list is generated lazily so is not well defined if
|
||||
-- the source directory structure changes before the list is used.
|
||||
--
|
||||
-- depth first
|
||||
getDirectoryContentsRecursiveDFS :: (MonadCatch m, S.MonadAsync m, MonadMask m)
|
||||
=> GHCupPath
|
||||
-> S.SerialT m FilePath
|
||||
getDirectoryContentsRecursiveDFS (fromGHCupPath -> fp) = getDirectoryContentsRecursiveDFSUnsafe fp
|
||||
|
||||
-- breadth first
|
||||
getDirectoryContentsRecursiveBFS :: (MonadCatch m, S.MonadAsync m, MonadMask m)
|
||||
=> GHCupPath
|
||||
-> S.SerialT m FilePath
|
||||
getDirectoryContentsRecursiveBFS (fromGHCupPath -> fp) = getDirectoryContentsRecursiveBFSUnsafe fp
|
||||
|
||||
|
||||
getDirectoryContentsRecursive :: (MonadCatch m, S.MonadAsync m, MonadMask m)
|
||||
=> GHCupPath
|
||||
-> S.SerialT m FilePath
|
||||
getDirectoryContentsRecursive = getDirectoryContentsRecursiveBFS
|
||||
|
||||
getDirectoryContentsRecursiveUnsafe :: (MonadCatch m, S.MonadAsync m, MonadMask m)
|
||||
=> FilePath
|
||||
-> S.SerialT m FilePath
|
||||
getDirectoryContentsRecursiveUnsafe = getDirectoryContentsRecursiveBFSUnsafe
|
||||
|
||||
findFilesDeep :: GHCupPath -> Regex -> IO [FilePath]
|
||||
findFilesDeep path regex =
|
||||
S.toList $ S.filter (match regex) $ getDirectoryContentsRecursive path
|
||||
|
||||
|
||||
recordedInstallationFile :: ( MonadReader env m
|
||||
, HasDirs env
|
||||
)
|
||||
=> Tool
|
||||
-> GHCTargetVersion
|
||||
-> m FilePath
|
||||
recordedInstallationFile t v' = do
|
||||
Dirs {..} <- getDirs
|
||||
pure (fromGHCupPath dbDir </> prettyShow t </> T.unpack (tVerToText v'))
|
||||
|
||||
|
@ -9,7 +9,6 @@ module GHCup.Utils.File.Common (
|
||||
) where
|
||||
|
||||
import GHCup.Utils.Prelude
|
||||
import {-# SOURCE #-} GHCup.Utils.Dirs ( GHCupPath )
|
||||
import GHCup.Types(ProcessError(..), CapturedProcess(..))
|
||||
|
||||
import Control.Monad.Reader
|
||||
@ -25,6 +24,7 @@ import System.Directory hiding ( removeDirectory
|
||||
import System.FilePath
|
||||
import Text.Regex.Posix
|
||||
|
||||
|
||||
import qualified Data.Text as T
|
||||
import qualified Text.Megaparsec as MP
|
||||
|
||||
@ -99,10 +99,6 @@ findFiles path regex = do
|
||||
contents <- listDirectory path
|
||||
pure $ filter (match regex) contents
|
||||
|
||||
findFilesDeep :: GHCupPath -> Regex -> IO [FilePath]
|
||||
findFilesDeep path regex = do
|
||||
contents <- getDirectoryContentsRecursive path
|
||||
pure $ filter (match regex) contents
|
||||
|
||||
findFiles' :: FilePath -> MP.Parsec Void Text a -> IO [FilePath]
|
||||
findFiles' path parser = do
|
||||
|
@ -23,10 +23,11 @@ import GHCup.Utils.Prelude
|
||||
import GHCup.Utils.Logger
|
||||
import GHCup.Types
|
||||
import GHCup.Types.Optics
|
||||
import GHCup.Utils.File.Posix.Traversals
|
||||
|
||||
import Control.Concurrent
|
||||
import Control.Concurrent.Async
|
||||
import Control.Exception ( evaluate )
|
||||
import qualified Control.Exception as E
|
||||
import Control.Exception.Safe
|
||||
import Control.Monad
|
||||
import Control.Monad.Reader
|
||||
@ -71,6 +72,12 @@ import qualified Streamly.Internal.FileSystem.Handle
|
||||
as IFH
|
||||
import qualified Streamly.Prelude as S
|
||||
import qualified GHCup.Utils.File.Posix.Foreign as FD
|
||||
import qualified Streamly.Internal.Data.Stream.StreamD.Type
|
||||
as D
|
||||
import Streamly.Internal.Data.Unfold.Type
|
||||
import qualified Streamly.Internal.Data.Unfold as U
|
||||
import Streamly.Internal.Control.Concurrent ( withRunInIO )
|
||||
import Streamly.Internal.Data.IOFinalizer ( newIOFinalizer, runIOFinalizer )
|
||||
|
||||
|
||||
|
||||
@ -277,7 +284,7 @@ captureOutStreams action = do
|
||||
|
||||
-- execute the action
|
||||
a <- action
|
||||
void $ evaluate a
|
||||
void $ E.evaluate a
|
||||
|
||||
-- close everything we don't need
|
||||
closeFd childStdoutWrite
|
||||
@ -554,3 +561,61 @@ install from to fail' = do
|
||||
|
||||
removeEmptyDirectory :: FilePath -> IO ()
|
||||
removeEmptyDirectory = PD.removeDirectory
|
||||
|
||||
|
||||
-- | Create an 'Unfold' of directory contents.
|
||||
unfoldDirContents :: (MonadMask m, MonadIO m, S.MonadAsync m) => Unfold m FilePath (FD.DirType, FilePath)
|
||||
unfoldDirContents = U.bracket (liftIO . openDirStream) (liftIO . closeDirStream) (Unfold step return)
|
||||
where
|
||||
{-# INLINE [0] step #-}
|
||||
step dirstream = do
|
||||
(typ, e) <- liftIO $ readDirEnt dirstream
|
||||
return $ if
|
||||
| null e -> D.Stop
|
||||
| "." == e -> D.Skip dirstream
|
||||
| ".." == e -> D.Skip dirstream
|
||||
| otherwise -> D.Yield (typ, e) dirstream
|
||||
|
||||
|
||||
getDirectoryContentsRecursiveDFSUnsafe :: (MonadMask m, MonadIO m, S.MonadAsync m)
|
||||
=> FilePath
|
||||
-> S.SerialT m FilePath
|
||||
getDirectoryContentsRecursiveDFSUnsafe fp = go ""
|
||||
where
|
||||
go cd = flip S.concatMap (S.unfold unfoldDirContents (fp </> cd)) $ \(t, f) ->
|
||||
if | t == FD.dtDir -> go (cd </> f)
|
||||
| otherwise -> pure (cd </> f)
|
||||
|
||||
|
||||
getDirectoryContentsRecursiveUnfold :: (MonadMask m, MonadIO m, S.MonadAsync m) => Unfold m FilePath FilePath
|
||||
getDirectoryContentsRecursiveUnfold = Unfold step (\s -> return (s, Nothing, [""]))
|
||||
where
|
||||
{-# INLINE [0] step #-}
|
||||
step (_, Nothing, []) = return D.Stop
|
||||
|
||||
step (topdir, Just (cdir, dirstream, finalizer), dirs) = flip onException (runIOFinalizer finalizer) $ do
|
||||
(dt, f) <- liftIO $ readDirEnt dirstream
|
||||
if | FD.dtUnknown == dt -> do
|
||||
runIOFinalizer finalizer
|
||||
return $ D.Skip (topdir, Nothing, dirs)
|
||||
| f == "." || f == ".."
|
||||
-> return $ D.Skip (topdir, Just (cdir, dirstream, finalizer), dirs)
|
||||
| FD.dtDir == dt -> return $ D.Skip (topdir, Just (cdir, dirstream, finalizer), (cdir </> f):dirs)
|
||||
| otherwise -> return $ D.Yield (cdir </> f) (topdir, Just (cdir, dirstream, finalizer), dirs)
|
||||
|
||||
step (topdir, Nothing, dir:dirs) = do
|
||||
(s, f) <- acquire (topdir </> dir)
|
||||
return $ D.Skip (topdir, Just (dir, s, f), dirs)
|
||||
|
||||
acquire dir =
|
||||
withRunInIO $ \run -> mask_ $ run $ do
|
||||
dirstream <- liftIO $ openDirStream dir
|
||||
ref <- newIOFinalizer (liftIO $ closeDirStream dirstream)
|
||||
return (dirstream, ref)
|
||||
|
||||
getDirectoryContentsRecursiveBFSUnsafe :: (MonadMask m, MonadIO m, S.MonadAsync m)
|
||||
=> FilePath
|
||||
-> S.SerialT m FilePath
|
||||
getDirectoryContentsRecursiveBFSUnsafe = S.unfold getDirectoryContentsRecursiveUnfold
|
||||
|
||||
|
||||
|
@ -56,22 +56,3 @@ pathMax = #{const PATH_MAX}
|
||||
unionFlags :: [Flags] -> CInt
|
||||
unionFlags = fromIntegral . foldl' ((. unFlags) . (.|.)) 0
|
||||
|
||||
|
||||
pattern DtBlk :: DirType
|
||||
pattern DtBlk <- dtBlk
|
||||
pattern DtChr :: DirType
|
||||
pattern DtChr <- dtChr
|
||||
pattern DtDir :: DirType
|
||||
pattern DtDir <- dtdir
|
||||
pattern DtFifo :: DirType
|
||||
pattern DtFifo <- dtFifo
|
||||
pattern DtLnk :: DirType
|
||||
pattern DtLnk <- dtLnk
|
||||
pattern DtReg :: DirType
|
||||
pattern DtReg <- dtReg
|
||||
pattern DtSock :: DirType
|
||||
pattern DtSock <- dtSock
|
||||
pattern DtUnknown :: DirType
|
||||
pattern DtUnknown <- dtUnknown
|
||||
|
||||
{-# COMPLETE DtBlk, DtChr, DtDir, DtFifo, DtLnk, DtReg, DtSock, DtUnknown #-}
|
||||
|
92
lib/GHCup/Utils/File/Posix/Traversals.hs
Normal file
92
lib/GHCup/Utils/File/Posix/Traversals.hs
Normal file
@ -0,0 +1,92 @@
|
||||
{-# LANGUAGE CApiFFI #-}
|
||||
{-# LANGUAGE CPP #-}
|
||||
{-# LANGUAGE ForeignFunctionInterface #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
{-# OPTIONS_GHC -Wall #-}
|
||||
|
||||
|
||||
module GHCup.Utils.File.Posix.Traversals (
|
||||
-- lower-level stuff
|
||||
readDirEnt
|
||||
, unpackDirStream
|
||||
) where
|
||||
|
||||
|
||||
#if __GLASGOW_HASKELL__ < 710
|
||||
import Control.Applicative ((<$>))
|
||||
#endif
|
||||
import GHCup.Utils.File.Posix.Foreign
|
||||
|
||||
import Unsafe.Coerce (unsafeCoerce)
|
||||
import Foreign.C.Error
|
||||
import Foreign.C.String
|
||||
import Foreign.C.Types
|
||||
import Foreign.Ptr
|
||||
import Foreign.Storable
|
||||
import System.Posix
|
||||
import Foreign (alloca)
|
||||
import System.Posix.Internals (peekFilePath)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
----------------------------------------------------------
|
||||
-- dodgy stuff
|
||||
|
||||
type CDir = ()
|
||||
type CDirent = ()
|
||||
|
||||
-- Posix doesn't export DirStream, so to re-use that type we need to use
|
||||
-- unsafeCoerce. It's just a newtype, so this is a legitimate usage.
|
||||
-- ugly trick.
|
||||
unpackDirStream :: DirStream -> Ptr CDir
|
||||
unpackDirStream = unsafeCoerce
|
||||
|
||||
-- the __hscore_* functions are defined in the unix package. We can import them and let
|
||||
-- the linker figure it out.
|
||||
foreign import ccall unsafe "__hscore_readdir"
|
||||
c_readdir :: Ptr CDir -> Ptr (Ptr CDirent) -> IO CInt
|
||||
|
||||
foreign import ccall unsafe "__hscore_free_dirent"
|
||||
c_freeDirEnt :: Ptr CDirent -> IO ()
|
||||
|
||||
foreign import ccall unsafe "__hscore_d_name"
|
||||
c_name :: Ptr CDirent -> IO CString
|
||||
|
||||
foreign import ccall unsafe "__posixdir_d_type"
|
||||
c_type :: Ptr CDirent -> IO DirType
|
||||
|
||||
----------------------------------------------------------
|
||||
-- less dodgy but still lower-level
|
||||
|
||||
|
||||
readDirEnt :: DirStream -> IO (DirType, FilePath)
|
||||
readDirEnt (unpackDirStream -> dirp) =
|
||||
alloca $ \ptr_dEnt -> loop ptr_dEnt
|
||||
where
|
||||
loop ptr_dEnt = do
|
||||
resetErrno
|
||||
r <- c_readdir dirp ptr_dEnt
|
||||
if r == 0
|
||||
then do
|
||||
dEnt <- peek ptr_dEnt
|
||||
if dEnt == nullPtr
|
||||
then return (dtUnknown, mempty)
|
||||
else do
|
||||
dName <- c_name dEnt >>= peekFilePath
|
||||
dType <- c_type dEnt
|
||||
c_freeDirEnt dEnt
|
||||
return (dType, dName)
|
||||
else do
|
||||
errno <- getErrno
|
||||
if errno == eINTR
|
||||
then loop ptr_dEnt
|
||||
else do
|
||||
let (Errno eo) = errno
|
||||
if eo == 0
|
||||
then return (dtUnknown, mempty)
|
||||
else throwErrno "readDirEnt"
|
||||
|
@ -17,7 +17,7 @@ Some of these functions use sophisticated logging.
|
||||
module GHCup.Utils.File.Windows where
|
||||
|
||||
import {-# SOURCE #-} GHCup.Utils ( getLinkTarget, pathIsLink )
|
||||
import GHCup.Utils.Dirs hiding ( copyFile )
|
||||
import GHCup.Utils.Dirs
|
||||
import GHCup.Utils.File.Common
|
||||
import GHCup.Utils.Logger
|
||||
import GHCup.Types
|
||||
@ -32,11 +32,14 @@ import Data.List
|
||||
import Foreign.C.Error
|
||||
import GHC.IO.Exception
|
||||
import GHC.IO.Handle
|
||||
import qualified GHC.Unicode as U
|
||||
import System.Environment
|
||||
import System.FilePath
|
||||
import System.IO
|
||||
import qualified System.IO.Error as IOE
|
||||
import System.Process
|
||||
|
||||
import qualified System.Win32.Info as WS
|
||||
import qualified System.Win32.File as WS
|
||||
import qualified Control.Exception as EX
|
||||
import qualified Data.ByteString as BS
|
||||
@ -44,6 +47,15 @@ import qualified Data.ByteString.Lazy as BL
|
||||
import qualified Data.Map.Strict as Map
|
||||
import qualified Data.Text as T
|
||||
|
||||
import qualified Streamly.Internal.Data.Stream.StreamD.Type
|
||||
as D
|
||||
import Streamly.Internal.Data.Unfold.Type hiding ( concatMap )
|
||||
import Data.Bits ((.&.))
|
||||
import qualified Streamly.Prelude as S
|
||||
import qualified Streamly.Internal.Data.Unfold as U
|
||||
import Streamly.Internal.Control.Concurrent ( withRunInIO )
|
||||
import Streamly.Internal.Data.IOFinalizer ( newIOFinalizer, runIOFinalizer )
|
||||
|
||||
|
||||
|
||||
toProcessError :: FilePath
|
||||
@ -165,8 +177,8 @@ execLogged :: ( MonadReader env m
|
||||
execLogged exe args chdir lfile env = do
|
||||
Dirs {..} <- getDirs
|
||||
logDebug $ T.pack $ "Running " <> exe <> " with arguments " <> show args
|
||||
let stdoutLogfile = logsDir </> lfile <> ".stdout.log"
|
||||
stderrLogfile = logsDir </> lfile <> ".stderr.log"
|
||||
let stdoutLogfile = fromGHCupPath logsDir </> lfile <> ".stdout.log"
|
||||
stderrLogfile = fromGHCupPath logsDir </> lfile <> ".stderr.log"
|
||||
cp <- createProcessWithMingwPath ((proc exe args)
|
||||
{ cwd = chdir
|
||||
, env = env
|
||||
@ -257,7 +269,7 @@ ghcupMsys2Dir =
|
||||
Just fp -> pure fp
|
||||
Nothing -> do
|
||||
baseDir <- liftIO ghcupBaseDir
|
||||
pure (baseDir </> "msys64")
|
||||
pure (fromGHCupPath baseDir </> "msys64")
|
||||
|
||||
-- | Checks whether the binary is a broken link.
|
||||
isBrokenSymlink :: FilePath -> IO Bool
|
||||
@ -286,3 +298,213 @@ install = copyFile
|
||||
|
||||
removeEmptyDirectory :: FilePath -> IO ()
|
||||
removeEmptyDirectory = WS.removeDirectory
|
||||
|
||||
|
||||
unfoldDirContents :: (S.MonadAsync m, MonadIO m, MonadCatch m, MonadMask m) => Unfold m FilePath (WS.FileAttributeOrFlag, FilePath)
|
||||
unfoldDirContents = U.bracket alloc dealloc (Unfold step return)
|
||||
where
|
||||
{-# INLINE [0] step #-}
|
||||
step (_, False, _, _) = return D.Stop
|
||||
step (topdir, True, h, fd) = flip onException (liftIO $ WS.findClose h) $ do
|
||||
f <- liftIO $ WS.getFindDataFileName fd
|
||||
more <- liftIO $ WS.findNextFile h fd
|
||||
|
||||
-- can't get file attribute from FindData yet (needs Win32 PR)
|
||||
fattr <- liftIO $ WS.getFileAttributes (topdir </> f)
|
||||
|
||||
if | f == "." || f == ".." -> return $ D.Skip (topdir, more, h, fd)
|
||||
| otherwise -> return $ D.Yield (fattr, f) (topdir, more, h, fd)
|
||||
|
||||
alloc topdir = do
|
||||
query <- liftIO $ furnishPath (topdir </> "*")
|
||||
(h, fd) <- liftIO $ WS.findFirstFile query
|
||||
pure (topdir, True, h, fd)
|
||||
|
||||
dealloc (_, _, fd, _) = liftIO $ WS.findClose fd
|
||||
|
||||
|
||||
getDirectoryContentsRecursiveDFSUnsafe :: (MonadCatch m, S.MonadAsync m, MonadMask m, S.IsStream t)
|
||||
=> FilePath
|
||||
-> t m FilePath
|
||||
getDirectoryContentsRecursiveDFSUnsafe fp = go ""
|
||||
where
|
||||
isDir attrs = attrs .&. WS.fILE_ATTRIBUTE_DIRECTORY /= 0
|
||||
|
||||
go cd = flip S.concatMap (S.unfold unfoldDirContents (fp </> cd)) $ \(t, f) ->
|
||||
if | isDir t -> go (cd </> f)
|
||||
| otherwise -> pure (cd </> f)
|
||||
|
||||
|
||||
getDirectoryContentsRecursiveUnfold :: (MonadCatch m, S.MonadAsync m, MonadMask m) => Unfold m FilePath FilePath
|
||||
getDirectoryContentsRecursiveUnfold = Unfold step init'
|
||||
where
|
||||
{-# INLINE [0] step #-}
|
||||
step (_, Nothing, []) = return D.Stop
|
||||
|
||||
step (topdir, state@(Just (cdir, (h, findData, ref))), dirs) = flip onException (runIOFinalizer ref) $ do
|
||||
f <- liftIO $ WS.getFindDataFileName findData
|
||||
|
||||
more <- liftIO $ WS.findNextFile h findData
|
||||
when (not more) $ runIOFinalizer ref
|
||||
let nextState = if more then state else Nothing
|
||||
|
||||
-- can't get file attribute from FindData yet (needs Win32 PR)
|
||||
fattr <- liftIO $ WS.getFileAttributes (topdir </> cdir </> f)
|
||||
|
||||
if | f == "." || f == ".." -> return $ D.Skip (topdir, nextState, dirs)
|
||||
| isDir fattr -> return $ D.Skip (topdir, nextState, (cdir </> f):dirs)
|
||||
| otherwise -> return $ D.Yield (cdir </> f) (topdir, nextState, dirs)
|
||||
|
||||
step (topdir, Nothing, dir:dirs) = do
|
||||
(h, findData, ref) <- acquire (topdir </> dir)
|
||||
return $ D.Skip (topdir, Just (dir, (h, findData, ref)), dirs)
|
||||
|
||||
init' topdir = do
|
||||
(h, findData, ref) <- acquire topdir
|
||||
return (topdir, Just ("", (h, findData, ref)), [])
|
||||
|
||||
isDir attrs = attrs .&. WS.fILE_ATTRIBUTE_DIRECTORY /= 0
|
||||
|
||||
acquire dir = do
|
||||
query <- liftIO $ furnishPath (dir </> "*")
|
||||
withRunInIO $ \run -> mask_ $ run $ do
|
||||
(h, findData) <- liftIO $ WS.findFirstFile query
|
||||
ref <- newIOFinalizer (liftIO $ WS.findClose h)
|
||||
return (h, findData, ref)
|
||||
|
||||
|
||||
getDirectoryContentsRecursiveBFSUnsafe :: (MonadMask m, MonadIO m, S.MonadAsync m)
|
||||
=> FilePath
|
||||
-> S.SerialT m FilePath
|
||||
getDirectoryContentsRecursiveBFSUnsafe = S.unfold getDirectoryContentsRecursiveUnfold
|
||||
|
||||
|
||||
|
||||
--------------------------------------
|
||||
--[ Inlined from directory package ]--
|
||||
--------------------------------------
|
||||
|
||||
|
||||
furnishPath :: FilePath -> IO FilePath
|
||||
furnishPath path =
|
||||
(toExtendedLengthPath <$> rawPrependCurrentDirectory path)
|
||||
`IOE.catchIOError` \ _ ->
|
||||
pure path
|
||||
|
||||
|
||||
toExtendedLengthPath :: FilePath -> FilePath
|
||||
toExtendedLengthPath path
|
||||
| isRelative path = simplifiedPath
|
||||
| otherwise =
|
||||
case simplifiedPath of
|
||||
'\\' : '?' : '?' : '\\' : _ -> simplifiedPath
|
||||
'\\' : '\\' : '?' : '\\' : _ -> simplifiedPath
|
||||
'\\' : '\\' : '.' : '\\' : _ -> simplifiedPath
|
||||
'\\' : subpath@('\\' : _) -> "\\\\?\\UNC" <> subpath
|
||||
_ -> "\\\\?\\" <> simplifiedPath
|
||||
where simplifiedPath = simplify path
|
||||
|
||||
|
||||
simplify :: FilePath -> FilePath
|
||||
simplify = simplifyWindows
|
||||
|
||||
simplifyWindows :: FilePath -> FilePath
|
||||
simplifyWindows "" = ""
|
||||
simplifyWindows path =
|
||||
case drive' of
|
||||
"\\\\?\\" -> drive' <> subpath
|
||||
_ -> simplifiedPath
|
||||
where
|
||||
simplifiedPath = joinDrive drive' subpath'
|
||||
(drive, subpath) = splitDrive path
|
||||
drive' = upperDrive (normaliseTrailingSep (normalisePathSeps drive))
|
||||
subpath' = appendSep . avoidEmpty . prependSep . joinPath .
|
||||
stripPardirs . expandDots . skipSeps .
|
||||
splitDirectories $ subpath
|
||||
|
||||
upperDrive d = case d of
|
||||
c : ':' : s | U.isAlpha c && all isPathSeparator s -> U.toUpper c : ':' : s
|
||||
_ -> d
|
||||
skipSeps = filter (not . (`elem` (pure <$> pathSeparators)))
|
||||
stripPardirs | pathIsAbsolute || subpathIsAbsolute = dropWhile (== "..")
|
||||
| otherwise = id
|
||||
prependSep | subpathIsAbsolute = (pathSeparator :)
|
||||
| otherwise = id
|
||||
avoidEmpty | not pathIsAbsolute
|
||||
&& (null drive || hasTrailingPathSep) -- prefer "C:" over "C:."
|
||||
= emptyToCurDir
|
||||
| otherwise = id
|
||||
appendSep p | hasTrailingPathSep
|
||||
&& not (pathIsAbsolute && null p)
|
||||
= addTrailingPathSeparator p
|
||||
| otherwise = p
|
||||
pathIsAbsolute = not (isRelative path)
|
||||
subpathIsAbsolute = any isPathSeparator (take 1 subpath)
|
||||
hasTrailingPathSep = hasTrailingPathSeparator subpath
|
||||
|
||||
emptyToCurDir :: FilePath -> FilePath
|
||||
emptyToCurDir "" = "."
|
||||
emptyToCurDir path = path
|
||||
|
||||
normaliseTrailingSep :: FilePath -> FilePath
|
||||
normaliseTrailingSep path = do
|
||||
let path' = reverse path
|
||||
let (sep, path'') = span isPathSeparator path'
|
||||
let addSep = if null sep then id else (pathSeparator :)
|
||||
reverse (addSep path'')
|
||||
|
||||
normalisePathSeps :: FilePath -> FilePath
|
||||
normalisePathSeps p = (\ c -> if isPathSeparator c then pathSeparator else c) <$> p
|
||||
|
||||
expandDots :: [FilePath] -> [FilePath]
|
||||
expandDots = reverse . go []
|
||||
where
|
||||
go ys' xs' =
|
||||
case xs' of
|
||||
[] -> ys'
|
||||
x : xs ->
|
||||
case x of
|
||||
"." -> go ys' xs
|
||||
".." ->
|
||||
case ys' of
|
||||
[] -> go (x : ys') xs
|
||||
".." : _ -> go (x : ys') xs
|
||||
_ : ys -> go ys xs
|
||||
_ -> go (x : ys') xs
|
||||
|
||||
rawPrependCurrentDirectory :: FilePath -> IO FilePath
|
||||
rawPrependCurrentDirectory path
|
||||
| isRelative path =
|
||||
((`ioeAddLocation` "prependCurrentDirectory") .
|
||||
(`IOE.ioeSetFileName` path)) `IOE.modifyIOError` do
|
||||
getFullPathName path
|
||||
| otherwise = pure path
|
||||
|
||||
ioeAddLocation :: IOError -> String -> IOError
|
||||
ioeAddLocation e loc = do
|
||||
IOE.ioeSetLocation e newLoc
|
||||
where
|
||||
newLoc = loc <> if null oldLoc then "" else ":" <> oldLoc
|
||||
oldLoc = IOE.ioeGetLocation e
|
||||
|
||||
getFullPathName :: FilePath -> IO FilePath
|
||||
getFullPathName path =
|
||||
fromExtendedLengthPath <$> WS.getFullPathName (toExtendedLengthPath path)
|
||||
|
||||
fromExtendedLengthPath :: FilePath -> FilePath
|
||||
fromExtendedLengthPath ePath =
|
||||
case ePath of
|
||||
'\\' : '\\' : '?' : '\\' : path ->
|
||||
case path of
|
||||
'U' : 'N' : 'C' : subpath@('\\' : _) -> "\\" <> subpath
|
||||
drive : ':' : subpath
|
||||
-- if the path is not "regular", then the prefix is necessary
|
||||
-- to ensure the path is interpreted literally
|
||||
| U.isAlpha drive && U.isAscii drive && isPathRegular subpath -> path
|
||||
_ -> ePath
|
||||
_ -> ePath
|
||||
where
|
||||
isPathRegular path =
|
||||
not ('/' `elem` path ||
|
||||
"." `elem` splitDirectories path ||
|
||||
".." `elem` splitDirectories path)
|
||||
|
@ -56,7 +56,6 @@ import Haskus.Utils.Types.List
|
||||
import Haskus.Utils.Variant.Excepts
|
||||
import Text.PrettyPrint.HughesPJClass ( prettyShow, Pretty )
|
||||
import System.IO.Error
|
||||
import System.IO.Unsafe
|
||||
import System.Directory hiding ( removeDirectory
|
||||
, removeDirectoryRecursive
|
||||
, removePathForcibly
|
||||
@ -81,6 +80,7 @@ import qualified Data.Text.Lazy.Builder.Int as B
|
||||
import qualified Data.Text.Lazy.Encoding as TLE
|
||||
|
||||
|
||||
|
||||
-- $setup
|
||||
-- >>> import Data.ByteString.Internal (c2w, w2c)
|
||||
-- >>> import Test.QuickCheck
|
||||
@ -400,45 +400,6 @@ createDirRecursive' p =
|
||||
_ -> throwIO e
|
||||
|
||||
|
||||
|
||||
-- | List all the files in a directory and all subdirectories.
|
||||
--
|
||||
-- The order places files in sub-directories after all the files in their
|
||||
-- parent directories. The list is generated lazily so is not well defined if
|
||||
-- the source directory structure changes before the list is used.
|
||||
--
|
||||
-- TODO: use streamly
|
||||
getDirectoryContentsRecursive :: GHCupPath -> IO [FilePath]
|
||||
getDirectoryContentsRecursive (fromGHCupPath -> topdir) = getDirectoryContentsRecursiveUnsafe topdir
|
||||
|
||||
|
||||
getDirectoryContentsRecursiveUnsafe :: FilePath -> IO [FilePath]
|
||||
getDirectoryContentsRecursiveUnsafe topdir = recurseDirectories [""]
|
||||
where
|
||||
recurseDirectories :: [FilePath] -> IO [FilePath]
|
||||
recurseDirectories [] = return []
|
||||
recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do
|
||||
(files, dirs') <- collect [] [] =<< getDirectoryContents (topdir </> dir)
|
||||
files' <- recurseDirectories (dirs' ++ dirs)
|
||||
return (files ++ files')
|
||||
|
||||
where
|
||||
collect files dirs' [] = return (reverse files
|
||||
,reverse dirs')
|
||||
collect files dirs' (entry:entries) | ignore entry
|
||||
= collect files dirs' entries
|
||||
collect files dirs' (entry:entries) = do
|
||||
let dirEntry = dir </> entry
|
||||
isDirectory <- doesDirectoryExist (topdir </> dirEntry)
|
||||
if isDirectory
|
||||
then collect files (dirEntry:dirs') entries
|
||||
else collect (dirEntry:files) dirs' entries
|
||||
|
||||
ignore ['.'] = True
|
||||
ignore ['.', '.'] = True
|
||||
ignore _ = False
|
||||
|
||||
|
||||
-- https://github.com/haskell/directory/issues/110
|
||||
-- https://github.com/haskell/directory/issues/96
|
||||
-- https://www.sqlite.org/src/info/89f1848d7f
|
||||
|
58
test/GHCup/Utils/FileSpec.hs
Normal file
58
test/GHCup/Utils/FileSpec.hs
Normal file
@ -0,0 +1,58 @@
|
||||
module GHCup.Utils.FileSpec where
|
||||
|
||||
import GHCup.Utils.File
|
||||
|
||||
import Data.List
|
||||
import System.Directory
|
||||
import System.FilePath
|
||||
import System.IO.Unsafe
|
||||
import qualified Streamly.Prelude as S
|
||||
|
||||
import Test.Hspec
|
||||
|
||||
|
||||
|
||||
spec :: Spec
|
||||
spec = do
|
||||
describe "GHCup.Utils.File" $ do
|
||||
it "getDirectoryContentsRecursiveBFS" $ do
|
||||
l1 <- sort <$> S.toList (getDirectoryContentsRecursiveBFSUnsafe ".")
|
||||
l2 <- sort <$> getDirectoryContentsRecursiveLazy "."
|
||||
not (null l1) `shouldBe` True
|
||||
not (null l2) `shouldBe` True
|
||||
l1 `shouldBe` l2
|
||||
it "getDirectoryContentsRecursiveDFS" $ do
|
||||
l1 <- sort <$> S.toList (getDirectoryContentsRecursiveDFSUnsafe ".")
|
||||
l2 <- sort <$> getDirectoryContentsRecursiveLazy "."
|
||||
not (null l1) `shouldBe` True
|
||||
not (null l2) `shouldBe` True
|
||||
l1 `shouldBe` l2
|
||||
|
||||
|
||||
getDirectoryContentsRecursiveLazy :: FilePath -> IO [FilePath]
|
||||
getDirectoryContentsRecursiveLazy topdir = recurseDirectories [""]
|
||||
where
|
||||
recurseDirectories :: [FilePath] -> IO [FilePath]
|
||||
recurseDirectories [] = return []
|
||||
recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do
|
||||
(files, dirs') <- collect [] [] =<< getDirectoryContents (topdir </> dir)
|
||||
files' <- recurseDirectories (dirs' ++ dirs)
|
||||
return (files ++ files')
|
||||
|
||||
where
|
||||
collect files dirs' [] = return (reverse files
|
||||
,reverse dirs')
|
||||
collect files dirs' (entry:entries) | ignore entry
|
||||
= collect files dirs' entries
|
||||
collect files dirs' (entry:entries) = do
|
||||
let dirEntry = dir </> entry
|
||||
isDirectory <- doesDirectoryExist (topdir </> dirEntry)
|
||||
if isDirectory
|
||||
then collect files (dirEntry:dirs') entries
|
||||
else collect (dirEntry:files) dirs' entries
|
||||
|
||||
ignore ['.'] = True
|
||||
ignore ['.', '.'] = True
|
||||
ignore _ = False
|
||||
|
||||
|
@ -1,10 +1,9 @@
|
||||
import Test.Hspec.Runner
|
||||
import Test.Hspec.Formatters
|
||||
import qualified Spec
|
||||
|
||||
|
||||
main :: IO ()
|
||||
main =
|
||||
hspecWith
|
||||
defaultConfig { configFormatter = Just progress }
|
||||
defaultConfig
|
||||
Spec.spec
|
||||
|
Loading…
Reference in New Issue
Block a user