summaryrefslogtreecommitdiff
path: root/lib/Hakyll/Core
diff options
context:
space:
mode:
Diffstat (limited to 'lib/Hakyll/Core')
-rw-r--r--lib/Hakyll/Core/Dependencies.hs51
-rw-r--r--lib/Hakyll/Core/Item.hs19
-rw-r--r--lib/Hakyll/Core/Rules.hs9
-rw-r--r--lib/Hakyll/Core/Runtime.hs319
-rw-r--r--lib/Hakyll/Core/Store.hs29
-rw-r--r--lib/Hakyll/Core/Util/File.hs30
6 files changed, 265 insertions, 192 deletions
diff --git a/lib/Hakyll/Core/Dependencies.hs b/lib/Hakyll/Core/Dependencies.hs
index 4a51b9c..f9b8048 100644
--- a/lib/Hakyll/Core/Dependencies.hs
+++ b/lib/Hakyll/Core/Dependencies.hs
@@ -33,6 +33,7 @@ import Hakyll.Core.Identifier.Pattern
data Dependency
= PatternDependency Pattern (Set Identifier)
| IdentifierDependency Identifier
+ | AlwaysOutOfDate
deriving (Show, Typeable)
@@ -40,9 +41,11 @@ data Dependency
instance Binary Dependency where
put (PatternDependency p is) = putWord8 0 >> put p >> put is
put (IdentifierDependency i) = putWord8 1 >> put i
+ put AlwaysOutOfDate = putWord8 2
get = getWord8 >>= \t -> case t of
0 -> PatternDependency <$> get <*> get
1 -> IdentifierDependency <$> get
+ 2 -> pure AlwaysOutOfDate
_ -> error "Data.Binary.get: Invalid Dependency"
@@ -84,13 +87,30 @@ markOod id' = State.modify $ \s ->
--------------------------------------------------------------------------------
-dependenciesFor :: Identifier -> DependencyM [Identifier]
+-- | Collection of dependencies that should be checked to determine
+-- if an identifier needs rebuilding.
+data Dependencies
+ = DependsOn [Identifier]
+ | MustRebuild
+ deriving (Show)
+
+instance Semigroup Dependencies where
+ DependsOn ids <> DependsOn moreIds = DependsOn (ids <> moreIds)
+ MustRebuild <> _ = MustRebuild
+ _ <> MustRebuild = MustRebuild
+
+instance Monoid Dependencies where
+ mempty = DependsOn []
+
+--------------------------------------------------------------------------------
+dependenciesFor :: Identifier -> DependencyM Dependencies
dependenciesFor id' = do
facts <- dependencyFacts <$> State.get
- return $ concatMap dependenciesFor' $ fromMaybe [] $ M.lookup id' facts
+ return $ foldMap dependenciesFor' $ fromMaybe [] $ M.lookup id' facts
where
- dependenciesFor' (IdentifierDependency i) = [i]
- dependenciesFor' (PatternDependency _ is) = S.toList is
+ dependenciesFor' (IdentifierDependency i) = DependsOn [i]
+ dependenciesFor' (PatternDependency _ is) = DependsOn $ S.toList is
+ dependenciesFor' AlwaysOutOfDate = MustRebuild
--------------------------------------------------------------------------------
@@ -113,6 +133,7 @@ checkChangedPatterns = do
{dependencyFacts = M.insert id' deps' $ dependencyFacts s}
where
go _ ds (IdentifierDependency i) = return $ IdentifierDependency i : ds
+ go _ ds AlwaysOutOfDate = return $ AlwaysOutOfDate : ds
go id' ds (PatternDependency p ls) = do
universe <- ask
let ls' = S.fromList $ filterMatches p universe
@@ -136,11 +157,17 @@ bruteForce = do
check (todo, changed) id' = do
deps <- dependenciesFor id'
- ood <- dependencyOod <$> State.get
- case find (`S.member` ood) deps of
- Nothing -> return (id' : todo, changed)
- Just d -> do
- tell [show id' ++ " is out-of-date because " ++
- show d ++ " is out-of-date"]
- markOod id'
- return (todo, True)
+ case deps of
+ DependsOn depList -> do
+ ood <- dependencyOod <$> State.get
+ case find (`S.member` ood) depList of
+ Nothing -> return (id' : todo, changed)
+ Just d -> do
+ tell [show id' ++ " is out-of-date because " ++
+ show d ++ " is out-of-date"]
+ markOod id'
+ return (todo, True)
+ MustRebuild -> do
+ tell [show id' ++ " will be forcibly rebuilt"]
+ markOod id'
+ return (todo, True)
diff --git a/lib/Hakyll/Core/Item.hs b/lib/Hakyll/Core/Item.hs
index e05df42..af15b94 100644
--- a/lib/Hakyll/Core/Item.hs
+++ b/lib/Hakyll/Core/Item.hs
@@ -2,6 +2,7 @@
-- | An item is a combination of some content and its 'Identifier'. This way, we
-- can still use the 'Identifier' to access metadata.
{-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE DeriveTraversable #-}
module Hakyll.Core.Item
( Item (..)
, itemSetBody
@@ -25,23 +26,7 @@ import Hakyll.Core.Identifier
data Item a = Item
{ itemIdentifier :: Identifier
, itemBody :: a
- } deriving (Show, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Functor Item where
- fmap f (Item i x) = Item i (f x)
-
-
---------------------------------------------------------------------------------
-instance Foldable Item where
- foldr f z (Item _ x) = f x z
-
-
---------------------------------------------------------------------------------
-instance Traversable Item where
- traverse f (Item i x) = Item i <$> f x
-
+ } deriving (Show, Typeable, Functor, Foldable, Traversable)
--------------------------------------------------------------------------------
instance Binary a => Binary (Item a) where
diff --git a/lib/Hakyll/Core/Rules.hs b/lib/Hakyll/Core/Rules.hs
index 41b9a73..695665a 100644
--- a/lib/Hakyll/Core/Rules.hs
+++ b/lib/Hakyll/Core/Rules.hs
@@ -29,6 +29,7 @@ module Hakyll.Core.Rules
, preprocess
, Dependency (..)
, rulesExtraDependencies
+ , forceCompile
) where
@@ -221,3 +222,11 @@ rulesExtraDependencies deps rules =
| (i, c) <- rulesCompilers ruleSet
]
}
+
+
+--------------------------------------------------------------------------------
+-- | Force the item(s) to always be recompiled, whether or not the
+-- dependencies are out of date. This can be useful if you are using
+-- I/O to generate part (or all) of an item.
+forceCompile :: Rules a -> Rules a
+forceCompile = rulesExtraDependencies [AlwaysOutOfDate]
diff --git a/lib/Hakyll/Core/Runtime.hs b/lib/Hakyll/Core/Runtime.hs
index 68970cd..cc15b3e 100644
--- a/lib/Hakyll/Core/Runtime.hs
+++ b/lib/Hakyll/Core/Runtime.hs
@@ -5,19 +5,24 @@ module Hakyll.Core.Runtime
--------------------------------------------------------------------------------
-import Control.Monad (unless)
-import Control.Monad.Except (ExceptT, runExceptT, throwError)
-import Control.Monad.Reader (ask)
-import Control.Monad.RWS (RWST, runRWST)
-import Control.Monad.State (get, modify)
-import Control.Monad.Trans (liftIO)
-import Data.List (intercalate)
-import Data.Map (Map)
-import qualified Data.Map as M
-import Data.Set (Set)
-import qualified Data.Set as S
-import System.Exit (ExitCode (..))
-import System.FilePath ((</>))
+import Control.Concurrent.Async.Lifted (forConcurrently_)
+import Control.Concurrent.STM (atomically, modifyTVar', readTVarIO, newTVarIO, TVar)
+import Control.Monad (unless)
+import Control.Monad.Except (ExceptT, runExceptT, throwError)
+import Control.Monad.Reader (ask)
+import Control.Monad.RWS (RWST, runRWST)
+import Control.Monad.State (get)
+import Control.Monad.Trans (liftIO)
+import qualified Data.Array as A
+import Data.Graph (Graph)
+import qualified Data.Graph as G
+import Data.List (intercalate)
+import Data.Map (Map)
+import qualified Data.Map as M
+import Data.Set (Set)
+import qualified Data.Set as S
+import System.Exit (ExitCode (..))
+import System.FilePath ((</>))
--------------------------------------------------------------------------------
@@ -67,11 +72,13 @@ run config logger rules = do
, runtimeRoutes = rulesRoutes ruleSet
, runtimeUniverse = M.fromList compilers
}
- state = RuntimeState
- { runtimeDone = S.empty
- , runtimeSnapshots = S.empty
- , runtimeTodo = M.empty
- , runtimeFacts = oldFacts
+
+ state <- newTVarIO $ RuntimeState
+ { runtimeDone = S.empty
+ , runtimeSnapshots = S.empty
+ , runtimeTodo = M.empty
+ , runtimeFacts = oldFacts
+ , runtimeDependencies = M.empty
}
-- Run the program and fetch the resulting state
@@ -83,7 +90,8 @@ run config logger rules = do
return (ExitFailure 1, ruleSet)
Right (_, s, _) -> do
- Store.set store factsKey $ runtimeFacts s
+ facts <- fmap runtimeFacts . liftIO . readTVarIO $ s
+ Store.set store factsKey facts
Logger.debug logger "Removing tmp directory..."
removeDirectory $ tmpDirectory config
@@ -107,15 +115,30 @@ data RuntimeRead = RuntimeRead
--------------------------------------------------------------------------------
data RuntimeState = RuntimeState
- { runtimeDone :: Set Identifier
- , runtimeSnapshots :: Set (Identifier, Snapshot)
- , runtimeTodo :: Map Identifier (Compiler SomeItem)
- , runtimeFacts :: DependencyFacts
+ { runtimeDone :: Set Identifier
+ , runtimeSnapshots :: Set (Identifier, Snapshot)
+ , runtimeTodo :: Map Identifier (Compiler SomeItem)
+ , runtimeFacts :: DependencyFacts
+ , runtimeDependencies :: Map Identifier (Set Identifier)
}
--------------------------------------------------------------------------------
-type Runtime a = RWST RuntimeRead () RuntimeState (ExceptT String IO) a
+type Runtime a = RWST RuntimeRead () (TVar RuntimeState) (ExceptT String IO) a
+
+
+--------------------------------------------------------------------------------
+-- Because compilation of rules often revolves around IO,
+-- it is not possible to live in the STM monad and hence benefit from
+-- its guarantees.
+-- Be very careful when modifying the state
+modifyRuntimeState :: (RuntimeState -> RuntimeState) -> Runtime ()
+modifyRuntimeState f = get >>= \s -> liftIO . atomically $ modifyTVar' s f
+
+
+--------------------------------------------------------------------------------
+getRuntimeState :: Runtime RuntimeState
+getRuntimeState = liftIO . readTVarIO =<< get
--------------------------------------------------------------------------------
@@ -135,13 +158,15 @@ scheduleOutOfDate = do
logger <- runtimeLogger <$> ask
provider <- runtimeProvider <$> ask
universe <- runtimeUniverse <$> ask
- facts <- runtimeFacts <$> get
- todo <- runtimeTodo <$> get
let identifiers = M.keys universe
modified = S.fromList $ flip filter identifiers $
resourceModified provider
-
+
+ state <- getRuntimeState
+ let facts = runtimeFacts state
+ todo = runtimeTodo state
+
let (ood, facts', msgs) = outOfDate identifiers modified facts
todo' = M.filterWithKey
(\id' _ -> id' `S.member` ood) universe
@@ -150,7 +175,7 @@ scheduleOutOfDate = do
mapM_ (Logger.debug logger) msgs
-- Update facts and todo items
- modify $ \s -> s
+ modifyRuntimeState $ \s -> s
{ runtimeDone = runtimeDone s `S.union`
(S.fromList identifiers `S.difference` ood)
, runtimeTodo = todo `M.union` todo'
@@ -161,116 +186,138 @@ scheduleOutOfDate = do
--------------------------------------------------------------------------------
pickAndChase :: Runtime ()
pickAndChase = do
- todo <- runtimeTodo <$> get
- case M.minViewWithKey todo of
- Nothing -> return ()
- Just ((id', _), _) -> do
- chase [] id'
- pickAndChase
+ todo <- runtimeTodo <$> getRuntimeState
+ unless (null todo) $ do
+ checkForDependencyCycle
+ forConcurrently_ (M.keys todo) chase
+ pickAndChase
--------------------------------------------------------------------------------
-chase :: [Identifier] -> Identifier -> Runtime ()
-chase trail id'
- | id' `elem` trail = throwError $ "Hakyll.Core.Runtime.chase: " ++
- "Dependency cycle detected: " ++ intercalate " depends on "
- (map show $ dropWhile (/= id') (reverse trail) ++ [id'])
- | otherwise = do
- logger <- runtimeLogger <$> ask
- todo <- runtimeTodo <$> get
- provider <- runtimeProvider <$> ask
- universe <- runtimeUniverse <$> ask
- routes <- runtimeRoutes <$> ask
- store <- runtimeStore <$> ask
- config <- runtimeConfiguration <$> ask
- Logger.debug logger $ "Processing " ++ show id'
-
- let compiler = todo M.! id'
- read' = CompilerRead
- { compilerConfig = config
- , compilerUnderlying = id'
- , compilerProvider = provider
- , compilerUniverse = M.keysSet universe
- , compilerRoutes = routes
- , compilerStore = store
- , compilerLogger = logger
+-- | Check for cyclic dependencies in the current state
+checkForDependencyCycle :: Runtime ()
+checkForDependencyCycle = do
+ deps <- runtimeDependencies <$> getRuntimeState
+ let (depgraph, nodeFromVertex, _) = G.graphFromEdges [(k, k, S.toList dps) | (k, dps) <- M.toList deps]
+ dependencyCycles = map ((\(_, k, _) -> k) . nodeFromVertex) $ cycles depgraph
+
+ unless (null dependencyCycles) $ do
+ throwError $ "Hakyll.Core.Runtime.pickAndChase: " ++
+ "Dependency cycle detected: " ++ intercalate ", " (map show dependencyCycles) ++
+ " are inter-dependent."
+ where
+ cycles :: Graph -> [G.Vertex]
+ cycles g = map fst . filter (uncurry $ reachableFromAny g) . A.assocs $ g
+
+ reachableFromAny :: Graph -> G.Vertex -> [G.Vertex] -> Bool
+ reachableFromAny graph node = elem node . concatMap (G.reachable graph)
+
+
+--------------------------------------------------------------------------------
+chase :: Identifier -> Runtime ()
+chase id' = do
+ logger <- runtimeLogger <$> ask
+ provider <- runtimeProvider <$> ask
+ universe <- runtimeUniverse <$> ask
+ routes <- runtimeRoutes <$> ask
+ store <- runtimeStore <$> ask
+ config <- runtimeConfiguration <$> ask
+
+ state <- getRuntimeState
+
+ Logger.debug logger $ "Processing " ++ show id'
+
+ let compiler = (runtimeTodo state) M.! id'
+ read' = CompilerRead
+ { compilerConfig = config
+ , compilerUnderlying = id'
+ , compilerProvider = provider
+ , compilerUniverse = M.keysSet universe
+ , compilerRoutes = routes
+ , compilerStore = store
+ , compilerLogger = logger
+ }
+
+ result <- liftIO $ runCompiler compiler read'
+ case result of
+ -- Rethrow error
+ CompilerError e -> throwError $ case compilerErrorMessages e of
+ [] -> "Compiler failed but no info given, try running with -v?"
+ es -> intercalate "; " es
+
+ -- Signal that a snapshot was saved ->
+ CompilerSnapshot snapshot c -> do
+ -- Update info. The next 'chase' will pick us again at some
+ -- point so we can continue then.
+ modifyRuntimeState $ \s -> s
+ { runtimeSnapshots = S.insert (id', snapshot) (runtimeSnapshots s)
+ , runtimeTodo = M.insert id' c (runtimeTodo s)
+ }
+
+
+ -- Huge success
+ CompilerDone (SomeItem item) cwrite -> do
+ -- Print some info
+ let facts = compilerDependencies cwrite
+ cacheHits
+ | compilerCacheHits cwrite <= 0 = "updated"
+ | otherwise = "cached "
+ Logger.message logger $ cacheHits ++ " " ++ show id'
+
+ -- Sanity check
+ unless (itemIdentifier item == id') $ throwError $
+ "The compiler yielded an Item with Identifier " ++
+ show (itemIdentifier item) ++ ", but we were expecting " ++
+ "an Item with Identifier " ++ show id' ++ " " ++
+ "(you probably want to call makeItem to solve this problem)"
+
+ -- Write if necessary
+ (mroute, _) <- liftIO $ runRoutes routes provider id'
+ case mroute of
+ Nothing -> return ()
+ Just route -> do
+ let path = destinationDirectory config </> route
+ liftIO $ makeDirectories path
+ liftIO $ write path item
+ Logger.debug logger $ "Routed to " ++ path
+
+ -- Save! (For load)
+ liftIO $ save store item
+
+ modifyRuntimeState $ \s -> s
+ { runtimeDone = S.insert id' (runtimeDone s)
+ , runtimeTodo = M.delete id' (runtimeTodo s)
+ , runtimeFacts = M.insert id' facts (runtimeFacts s)
+ }
+
+ -- Try something else first
+ CompilerRequire dep c -> do
+ let (depId, depSnapshot) = dep
+ Logger.debug logger $
+ "Compiler requirement found for: " ++ show id' ++
+ ", requirement: " ++ show depId
+
+ let done = runtimeDone state
+ snapshots = runtimeSnapshots state
+ deps = runtimeDependencies state
+
+ -- Done if we either completed the entire item (runtimeDone) or
+ -- if we previously saved the snapshot (runtimeSnapshots).
+ let depDone =
+ depId `S.member` done ||
+ (depId, depSnapshot) `S.member` snapshots
+
+ let deps' = if depDone
+ then deps
+ else M.insertWith S.union id' (S.singleton depId) deps
+
+ modifyRuntimeState $ \s -> s
+ { runtimeTodo = M.insert id'
+ (if depDone then c else compilerResult result)
+ (runtimeTodo s)
+ , runtimeDependencies = deps'
}
- result <- liftIO $ runCompiler compiler read'
- case result of
- -- Rethrow error
- CompilerError e -> throwError $ case compilerErrorMessages e of
- [] -> "Compiler failed but no info given, try running with -v?"
- es -> intercalate "; " es
-
- -- Signal that a snapshot was saved ->
- CompilerSnapshot snapshot c -> do
- -- Update info. The next 'chase' will pick us again at some
- -- point so we can continue then.
- modify $ \s -> s
- { runtimeSnapshots =
- S.insert (id', snapshot) (runtimeSnapshots s)
- , runtimeTodo = M.insert id' c (runtimeTodo s)
- }
-
- -- Huge success
- CompilerDone (SomeItem item) cwrite -> do
- -- Print some info
- let facts = compilerDependencies cwrite
- cacheHits
- | compilerCacheHits cwrite <= 0 = "updated"
- | otherwise = "cached "
- Logger.message logger $ cacheHits ++ " " ++ show id'
-
- -- Sanity check
- unless (itemIdentifier item == id') $ throwError $
- "The compiler yielded an Item with Identifier " ++
- show (itemIdentifier item) ++ ", but we were expecting " ++
- "an Item with Identifier " ++ show id' ++ " " ++
- "(you probably want to call makeItem to solve this problem)"
-
- -- Write if necessary
- (mroute, _) <- liftIO $ runRoutes routes provider id'
- case mroute of
- Nothing -> return ()
- Just route -> do
- let path = destinationDirectory config </> route
- liftIO $ makeDirectories path
- liftIO $ write path item
- Logger.debug logger $ "Routed to " ++ path
-
- -- Save! (For load)
- liftIO $ save store item
-
- -- Update state
- modify $ \s -> s
- { runtimeDone = S.insert id' (runtimeDone s)
- , runtimeTodo = M.delete id' (runtimeTodo s)
- , runtimeFacts = M.insert id' facts (runtimeFacts s)
- }
-
- -- Try something else first
- CompilerRequire dep c -> do
- -- Update the compiler so we don't execute it twice
- let (depId, depSnapshot) = dep
- done <- runtimeDone <$> get
- snapshots <- runtimeSnapshots <$> get
-
- -- Done if we either completed the entire item (runtimeDone) or
- -- if we previously saved the snapshot (runtimeSnapshots).
- let depDone =
- depId `S.member` done ||
- (depId, depSnapshot) `S.member` snapshots
-
- modify $ \s -> s
- { runtimeTodo = M.insert id'
- (if depDone then c else compilerResult result)
- (runtimeTodo s)
- }
-
- -- If the required item is already compiled, continue, or, start
- -- chasing that
- Logger.debug logger $ "Require " ++ show depId ++
- " (snapshot " ++ depSnapshot ++ "): " ++
- (if depDone then "OK" else "chasing")
- if depDone then chase trail id' else chase (id' : trail) depId
+ Logger.debug logger $ "Require " ++ show depId ++
+ " (snapshot " ++ depSnapshot ++ ") "
+ \ No newline at end of file
diff --git a/lib/Hakyll/Core/Store.hs b/lib/Hakyll/Core/Store.hs
index bfcd191..da16c6f 100644
--- a/lib/Hakyll/Core/Store.hs
+++ b/lib/Hakyll/Core/Store.hs
@@ -16,20 +16,14 @@ module Hakyll.Core.Store
--------------------------------------------------------------------------------
-import qualified Data.ByteArray as BA
-import qualified Crypto.Hash as CH
+import qualified Data.Hashable as DH
import Data.Binary (Binary, decode, encodeFile)
-import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Cache.LRU.IO as Lru
import Data.List (intercalate)
import Data.Maybe (isJust)
-import qualified Data.Text as T
-import qualified Data.Text.Encoding as T
import Data.Typeable (TypeRep, Typeable, cast, typeOf)
-import Numeric (showHex)
-import System.Directory (createDirectoryIfMissing)
-import System.Directory (doesFileExist, removeFile)
+import System.Directory (createDirectoryIfMissing, doesFileExist, removeFile)
import System.FilePath ((</>))
import System.IO (IOMode (..), hClose, openFile)
import System.IO.Error (catchIOError, ioeSetFileName,
@@ -194,21 +188,4 @@ deleteFile = (`catchIOError` \_ -> return ()) . removeFile
--------------------------------------------------------------------------------
-- | Mostly meant for internal usage
hash :: [String] -> String
-hash = toHex . B.unpack . hashMD5 . T.encodeUtf8 . T.pack . intercalate "/"
- where
- toHex [] = ""
- toHex (x : xs) | x < 16 = '0' : showHex x (toHex xs)
- | otherwise = showHex x (toHex xs)
-
-
---------------------------------------------------------------------------------
--- | Hash by MD5
-hashMD5 :: B.ByteString -> B.ByteString
-hashMD5 x =
- let
- digest :: CH.Digest CH.MD5
- digest = CH.hash x
- bytes :: B.ByteString
- bytes = BA.convert digest
- in
- bytes
+hash = show . DH.hash . intercalate "/" \ No newline at end of file
diff --git a/lib/Hakyll/Core/Util/File.hs b/lib/Hakyll/Core/Util/File.hs
index 9db6b11..02b8ece 100644
--- a/lib/Hakyll/Core/Util/File.hs
+++ b/lib/Hakyll/Core/Util/File.hs
@@ -1,3 +1,5 @@
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE CPP #-}
--------------------------------------------------------------------------------
-- | A module containing various file utility functions
module Hakyll.Core.Util.File
@@ -8,10 +10,12 @@ module Hakyll.Core.Util.File
--------------------------------------------------------------------------------
+import Control.Concurrent (threadDelay)
+import Control.Exception (SomeException, catch)
import Control.Monad (filterM, forM, when)
import System.Directory (createDirectoryIfMissing,
doesDirectoryExist, getDirectoryContents,
- removeDirectoryRecursive)
+ removeDirectoryRecursive, removePathForcibly)
import System.FilePath (takeDirectory, (</>))
@@ -51,6 +55,30 @@ getRecursiveContents ignore top = go ""
--------------------------------------------------------------------------------
removeDirectory :: FilePath -> IO ()
+#ifndef mingw32_HOST_OS
removeDirectory fp = do
e <- doesDirectoryExist fp
when e $ removeDirectoryRecursive fp
+#else
+-- Deleting files on Windows is unreliable. If a file/directory is open by a program (e.g. antivirus),
+-- then removing related directories *quickly* may fail with strange messages.
+-- See here for discussions:
+-- https://github.com/haskell/directory/issues/96
+-- https://github.com/haskell/win32/pull/129
+--
+-- The hacky solution is to retry deleting directories a few times,
+-- with a delay, on Windows only.
+removeDirectory = retryWithDelay 10 . removePathForcibly
+#endif
+
+
+--------------------------------------------------------------------------------
+-- | Retry an operation at most /n/ times (/n/ must be positive).
+-- If the operation fails the /n/th time it will throw that final exception.
+-- A delay of 100ms is introduced between every retry.
+retryWithDelay :: Int -> IO a -> IO a
+retryWithDelay i x
+ | i <= 0 = error "Hakyll.Core.Util.File.retry: retry count must be 1 or more"
+ | i == 1 = x
+ | otherwise = catch x $ \(_::SomeException) -> threadDelay 100 >> retryWithDelay (i-1) x
+