Alexey Levan <bugs@darcs.net> writes:
Use temporary files in doOptimizeHTTP
-------------------------------------
> hunk ./src/Darcs/Commands/Optimize.lhs 27
> import Control.Applicative ( (<$>) )
> import Control.Monad ( when, unless )
> import Data.Maybe ( isJust )
> -import System.Directory ( getDirectoryContents, doesDirectoryExist, doesFileExist )
> +import System.Directory ( getDirectoryContents, doesDirectoryExist,
> + doesFileExist, renameFile )
> import System.IO.Unsafe ( unsafeInterleaveIO )
> import qualified Data.ByteString.Char8 as BS
> import qualified Data.ByteString.Lazy as BL
> hunk ./src/Darcs/Commands/Optimize.lhs 378
> "Unsupported repository format:\n" ++
> " only hashed repositories can be optimized for HTTP"
> createDirectoryIfMissing False packsDir
> + ps <- dirContents' "patches" $ \x -> all (x /=) ["unrevert", "pending",
> + "pending.tentative"]
> + BL.writeFile (packsDir </> "patches.tar.gz.part") . compress . write =<<
> + mapM fileEntry' ps
> + renameFile (packsDir </> "patches.tar.gz.part") $
> + packsDir </> "patches.tar.gz"
> let i = darcsdir </> "hashed_inventory"
> is <- dirContents "inventories"
> pr <- dirContents "pristine.hashed"
> hunk ./src/Darcs/Commands/Optimize.lhs 387
> - BL.writeFile (packsDir </> "basic.tar.gz") . compress . write =<<
> + BL.writeFile (packsDir </> "basic.tar.gz.part") . compress . write =<<
> mapM fileEntry' (i : (is ++ pr))
> hunk ./src/Darcs/Commands/Optimize.lhs 389
> - ps <- dirContents' "patches" $ \x -> all (x /=) ["unrevert", "pending",
> - "pending.tentative"]
> - BL.writeFile (packsDir </> "patches.tar.gz") . compress . write =<<
> - mapM fileEntry' ps
> + renameFile (packsDir </> "basic.tar.gz.part") $ packsDir </> "basic.tar.gz"
> where
> packsDir = darcsdir </> "packs"
> fileEntry' x = unsafeInterleaveIO $ do
OK, although at this point it may be worth adding a few bindings ...
basic_tar = packsDir </> "basic.tar.gz"
patches_tar = pacskDir </> "patches.tar.gz"
you can then also use (basic_tar <.> "part") for the partial
files. (Need to import <.> from System.FilePath...)
If you want to amend this one, I'll wait a bit with pushing...
Don't use packs when getting a local repository
-----------------------------------------------
> hunk ./src/Darcs/Repository.hs 120
> AllowUnrelatedRepos, NoUpdateWorking )
> , compression )
> import Darcs.Global ( darcsdir )
> +import Darcs.URL ( isFile )
>
> import Storage.Hashed.Tree( Tree, emptyTree )
> import Storage.Hashed.Hash( encodeBase16 )
> hunk ./src/Darcs/Repository.hs 240
> debugMessage "Copying prefs"
> copyFileOrUrl opts (fromDir ++ "/" ++ darcsdir ++ "/prefs/prefs")
> (darcsdir ++ "/prefs/prefs") (MaxAge 600) `catchall` return ()
> - b <- (Just <$> fetchFileLazyPS (fromDir ++ "/" ++ darcsdir ++
> - "/packs/basic.tar.gz") Uncachable) `catchall` return Nothing
> - case b of
> - Nothing -> copyNotPackedRepository fromRepo
> - Just b' -> copyPackedRepository fromRepo b'
> + if isFile fromDir
> + then copyNotPackedRepository fromRepo
> + else do
> + b <- (Just <$> fetchFileLazyPS (fromDir ++ "/" ++ darcsdir ++
> + "/packs/basic.tar.gz") Uncachable) `catchall` return Nothing
> + case b of
> + Nothing -> copyNotPackedRepository fromRepo
> + Just b' -> copyPackedRepository fromRepo b'
>
> copyNotPackedRepository :: forall p C(r u t). RepoPatch p => Repository p C(r u t) -> IO ()
> copyNotPackedRepository fromrepository@(Repo _ opts rffrom _) = do
OK
Add support for lazy getting a packed repository
------------------------------------------------
> hunk ./src/Darcs/Repository.hs 87
>
> import Control.Applicative ( (<$>) )
> import Control.Monad ( unless, when )
> -import System.Directory ( createDirectory, renameDirectory, createDirectoryIfMissing )
> +import System.Directory ( createDirectory, renameDirectory,
> + createDirectoryIfMissing, renameFile )
> import System.IO.Error ( isAlreadyExistsError )
>
> import qualified Darcs.Repository.DarcsRepo as DarcsRepo
> hunk ./src/Darcs/Repository.hs 114
> import Darcs.External ( copyFileOrUrl, Cachable(..), fetchFileLazyPS )
> import Progress ( debugMessage, tediousSize, beginTedious, endTedious )
> import Darcs.ProgressPatches (progressRLShowTags, progressFL)
> -import Darcs.Lock ( writeBinFile, writeDocBinFile, rmRecursive )
> +import Darcs.Lock ( writeBinFile, writeDocBinFile, rmRecursive, withTemp )
> import Darcs.Witnesses.Sealed ( Sealed(..), FlippedSeal(..), flipSeal, mapFlipped )
>
> import Darcs.Flags ( DarcsFlag( Quiet, Partial, Lazy, Ephemeral, Complete,
> hunk ./src/Darcs/Repository.hs 301
> applyToWorking toRepo opts pw
> return ()
> -- get old patches
> - writeCompressed . Tar.read . decompress =<< fetchFileLazyPS (fromPacksDir ++
> - "patches.tar.gz") Uncachable
> + unless (any (`elem` opts) [Partial, Lazy, Ephemeral]) $ do
> + putInfo "Copying patches, to get lazy repository hit ctrl-C..."
> + writeCompressed . Tar.read . decompress =<< fetchFileLazyPS (fromPacksDir ++
> + "patches.tar.gz") Uncachable
> where
> writeCompressed Tar.Done = return ()
> writeCompressed (Tar.Next x xs) = case Tar.entryContent x of
Clear.
> hunk ./src/Darcs/Repository.hs 310
> Tar.NormalFile x' _ -> do
> let p = Tar.entryPath x
> - BL.writeFile p $ if "hashed_inventory" `isSuffixOf` p
> - then x'
> - else compress x'
> + withTemp $ \p' -> do
> + BL.writeFile p' $ if "hashed_inventory" `isSuffixOf` p
> + then x'
> + else compress x'
> + renameFile p' p
> writeCompressed xs
> _ -> fail "Unexpected non-file tar entry"
> writeCompressed (Tar.Fail e) = fail e
This made me wonder for a while, but I see it makes the unpacking work
atomically, in the sense that upon an interrupt, no partially unpacked
files stay in the way (each file is either complete or not there at
all). OK.
> hunk ./src/Darcs/Repository.hs 318
> + putInfo = when (not $ Quiet `elem` opts) . putStrLn
>
> -- | writePatchSet is like patchSetToRepository, except that it doesn't
> -- touch the working directory or pristine cache.
Ack.
I'll push this shortly (depending on what you say about amending the
first patch). May I also remind you that an automated regression test
for this functionality would be most welcome? :)
Yours,
Petr.
|