summaryrefslogtreecommitdiff
path: root/src/Hakyll/Core
diff options
context:
space:
mode:
authorJasper Van der Jeugt <jaspervdj@gmail.com>2011-03-01 14:50:41 +0100
committerJasper Van der Jeugt <jaspervdj@gmail.com>2011-03-01 14:50:41 +0100
commit90b25105830d6e4b0943ab55f9317bd142533acf (patch)
tree6eefb80a8a84724e70539dd8fa449530f7b17fe0 /src/Hakyll/Core
parent8ef5a3ed0307be5d34a9564d02af3ed494f8e228 (diff)
parent8b727b994d482d593046f9b01a5c40b97c166d62 (diff)
downloadhakyll-90b25105830d6e4b0943ab55f9317bd142533acf.tar.gz
Merge branch 'hakyll3'
Conflicts: hakyll.cabal src/Text/Hakyll/Tags.hs
Diffstat (limited to 'src/Hakyll/Core')
-rw-r--r--src/Hakyll/Core/CompiledItem.hs45
-rw-r--r--src/Hakyll/Core/Compiler.hs333
-rw-r--r--src/Hakyll/Core/Compiler/Internal.hs146
-rw-r--r--src/Hakyll/Core/Configuration.hs44
-rw-r--r--src/Hakyll/Core/CopyFile.hs29
-rw-r--r--src/Hakyll/Core/DirectedGraph.hs85
-rw-r--r--src/Hakyll/Core/DirectedGraph/DependencySolver.hs70
-rw-r--r--src/Hakyll/Core/DirectedGraph/Dot.hs30
-rw-r--r--src/Hakyll/Core/DirectedGraph/Internal.hs43
-rw-r--r--src/Hakyll/Core/Identifier.hs59
-rw-r--r--src/Hakyll/Core/Identifier/Pattern.hs160
-rw-r--r--src/Hakyll/Core/Logger.hs90
-rw-r--r--src/Hakyll/Core/ResourceProvider.hs75
-rw-r--r--src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs29
-rw-r--r--src/Hakyll/Core/Routes.hs136
-rw-r--r--src/Hakyll/Core/Rules.hs161
-rw-r--r--src/Hakyll/Core/Rules/Internal.hs75
-rw-r--r--src/Hakyll/Core/Run.hs207
-rw-r--r--src/Hakyll/Core/Store.hs88
-rw-r--r--src/Hakyll/Core/UnixFilter.hs76
-rw-r--r--src/Hakyll/Core/Util/Arrow.hs25
-rw-r--r--src/Hakyll/Core/Util/File.hs90
-rw-r--r--src/Hakyll/Core/Util/String.hs48
-rw-r--r--src/Hakyll/Core/Writable.hs22
24 files changed, 2166 insertions, 0 deletions
diff --git a/src/Hakyll/Core/CompiledItem.hs b/src/Hakyll/Core/CompiledItem.hs
new file mode 100644
index 0000000..5dd0efc
--- /dev/null
+++ b/src/Hakyll/Core/CompiledItem.hs
@@ -0,0 +1,45 @@
+-- | A module containing a box datatype representing a compiled item. This
+-- item can be of any type, given that a few restrictions hold:
+--
+-- * we need a 'Typeable' instance to perform type-safe casts;
+--
+-- * we need a 'Binary' instance so we can serialize these items to the cache;
+--
+-- * we need a 'Writable' instance so the results can be saved.
+--
+{-# LANGUAGE ExistentialQuantification #-}
+module Hakyll.Core.CompiledItem
+ ( CompiledItem (..)
+ , compiledItem
+ , unCompiledItem
+ ) where
+
+import Data.Binary (Binary)
+import Data.Typeable (Typeable, cast)
+import Data.Maybe (fromMaybe)
+
+import Hakyll.Core.Writable
+
+-- | Box type for a compiled item
+--
+data CompiledItem = forall a. (Binary a, Typeable a, Writable a)
+ => CompiledItem a
+
+instance Writable CompiledItem where
+ write p (CompiledItem x) = write p x
+
+-- | Box a value into a 'CompiledItem'
+--
+compiledItem :: (Binary a, Typeable a, Writable a)
+ => a
+ -> CompiledItem
+compiledItem = CompiledItem
+
+-- | Unbox a value from a 'CompiledItem'
+--
+unCompiledItem :: (Binary a, Typeable a, Writable a)
+ => CompiledItem
+ -> a
+unCompiledItem (CompiledItem x) = fromMaybe error' $ cast x
+ where
+ error' = error "Hakyll.Core.CompiledItem.unCompiledItem: Unsupported type"
diff --git a/src/Hakyll/Core/Compiler.hs b/src/Hakyll/Core/Compiler.hs
new file mode 100644
index 0000000..e5da9b8
--- /dev/null
+++ b/src/Hakyll/Core/Compiler.hs
@@ -0,0 +1,333 @@
+-- | A Compiler manages targets and dependencies between targets
+--
+-- The most distinguishing property of a 'Compiler' is that it is an Arrow. A
+-- compiler of the type @Compiler a b@ is simply a compilation phase which takes
+-- an @a@ as input, and produces a @b@ as output.
+--
+-- Compilers are chained using the '>>>' arrow operation. If we have a compiler
+--
+-- > getResourceString :: Compiler Resource String
+--
+-- which reads the resource, and a compiler
+--
+-- > readPage :: Compiler String (Page String)
+--
+-- we can chain these two compilers to get a
+--
+-- > (getResourceString >>> readPage) :: Compiler Resource (Page String)
+--
+-- Most compilers can be created by combining smaller compilers using '>>>'.
+--
+-- More advanced constructions are also possible using arrow, and sometimes
+-- these are needed. For a good introduction to arrow, you can refer to
+--
+-- <http://en.wikibooks.org/wiki/Haskell/Understanding_arrows>
+--
+-- A construction worth writing a few paragraphs about here are the 'require'
+-- functions. Different variants of this function are exported here, but they
+-- all serve more or less the same goal.
+--
+-- When you use only '>>>' to chain your compilers, you get a linear pipeline --
+-- it is not possible to add extra items from other compilers along the way.
+-- This is where the 'require' functions come in.
+--
+-- This function allows you to reference other items, which are then added to
+-- the pipeline. Let's look at this crappy ASCII illustration which represents
+-- a pretty common scenario:
+--
+-- > read resource >>> pandoc render >>> layout >>> relativize URL's
+-- >
+-- > @templates/fancy.html@
+--
+-- We want to construct a pipeline of compilers to go from our resource to a
+-- proper webpage. However, the @layout@ compiler takes more than just the
+-- rendered page as input: it needs the @templates/fancy.html@ template as well.
+--
+-- This is an example of where we need the @require@ function. We can solve
+-- this using a construction that looks like:
+--
+-- > ... >>> pandoc render >>> require >>> layout >>> ...
+-- > |
+-- > @templates/fancy.html@ ------/
+--
+-- This illustration can help us understand the type signature of 'require'.
+--
+-- > require :: (Binary a, Typeable a, Writable a)
+-- > => Identifier
+-- > -> (b -> a -> c)
+-- > -> Compiler b c
+--
+-- Let's look at it in detail:
+--
+-- > (Binary a, Typeable a, Writable a)
+--
+-- These are constraints for the @a@ type. @a@ (the template) needs to have
+-- certain properties for it to be required.
+--
+-- > Identifier
+--
+-- This is simply @templates/fancy.html@: the 'Identifier' of the item we want
+-- to 'require', in other words, the name of the item we want to add to the
+-- pipeline somehow.
+--
+-- > (b -> a -> c)
+--
+-- This is a function given by the user, specifying /how/ the two items shall be
+-- merged. @b@ is the output of the previous compiler, and @a@ is the item we
+-- just required -- the template. This means @c@ will be the final output of the
+-- 'require' combinator.
+--
+-- > Compiler b c
+--
+-- Indeed, we have now constructed a compiler which takes a @b@ and produces a
+-- @c@. This means that we have a linear pipeline again, thanks to the 'require'
+-- function. So, the 'require' function actually helps to reduce to complexity
+-- of Hakyll applications!
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Compiler
+ ( Compiler
+ , runCompiler
+ , getIdentifier
+ , getRoute
+ , getRouteFor
+ , getResourceString
+ , fromDependency
+ , require_
+ , require
+ , requireA
+ , requireAll_
+ , requireAll
+ , requireAllA
+ , cached
+ , unsafeCompiler
+ , traceShowCompiler
+ , mapCompiler
+ , timedCompiler
+ , byExtension
+ ) where
+
+import Prelude hiding ((.), id)
+import Control.Arrow ((>>>), (&&&), arr)
+import Control.Applicative ((<$>))
+import Control.Monad.Reader (ask)
+import Control.Monad.Trans (liftIO)
+import Control.Category (Category, (.), id)
+import Data.Maybe (fromMaybe)
+import System.FilePath (takeExtension)
+
+import Data.Binary (Binary)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Store
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.Logger
+
+-- | Run a compiler, yielding the resulting target and it's dependencies. This
+-- version of 'runCompilerJob' also stores the result
+--
+runCompiler :: Compiler () CompileRule -- ^ Compiler to run
+ -> Identifier -- ^ Target identifier
+ -> ResourceProvider -- ^ Resource provider
+ -> Routes -- ^ Route
+ -> Store -- ^ Store
+ -> Bool -- ^ Was the resource modified?
+ -> Logger -- ^ Logger
+ -> IO CompileRule -- ^ Resulting item
+runCompiler compiler identifier provider routes store modified logger = do
+ -- Run the compiler job
+ result <-
+ runCompilerJob compiler identifier provider routes store modified logger
+
+ -- Inspect the result
+ case result of
+ -- In case we compiled an item, we will store a copy in the cache first,
+ -- before we return control. This makes sure the compiled item can later
+ -- be accessed by e.g. require.
+ CompileRule (CompiledItem x) ->
+ storeSet store "Hakyll.Core.Compiler.runCompiler" identifier x
+
+ -- Otherwise, we do nothing here
+ _ -> return ()
+
+ return result
+
+-- | Get the identifier of the item that is currently being compiled
+--
+getIdentifier :: Compiler a Identifier
+getIdentifier = fromJob $ const $ CompilerM $ compilerIdentifier <$> ask
+
+-- | Get the route we are using for this item
+--
+getRoute :: Compiler a (Maybe FilePath)
+getRoute = getIdentifier >>> getRouteFor
+
+-- | Get the route for a specified item
+--
+getRouteFor :: Compiler Identifier (Maybe FilePath)
+getRouteFor = fromJob $ \identifier -> CompilerM $ do
+ routes <- compilerRoutes <$> ask
+ return $ runRoutes routes identifier
+
+-- | Get the resource we are compiling as a string
+--
+getResourceString :: Compiler Resource String
+getResourceString = fromJob $ \resource -> CompilerM $ do
+ provider <- compilerResourceProvider <$> ask
+ liftIO $ resourceString provider resource
+
+-- | Auxiliary: get a dependency
+--
+getDependency :: (Binary a, Writable a, Typeable a)
+ => Identifier -> CompilerM a
+getDependency identifier = CompilerM $ do
+ store <- compilerStore <$> ask
+ fmap (fromMaybe error') $ liftIO $
+ storeGet store "Hakyll.Core.Compiler.runCompiler" identifier
+ where
+ error' = error $ "Hakyll.Core.Compiler.getDependency: "
+ ++ show identifier
+ ++ " not found in the cache, the cache might be corrupted or"
+ ++ " the item you are referring to might not exist"
+
+
+-- | Variant of 'require' which drops the current value
+--
+require_ :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> Compiler b a
+require_ identifier =
+ fromDependency identifier >>> fromJob (const $ getDependency identifier)
+
+-- | Require another target. Using this function ensures automatic handling of
+-- dependencies
+--
+require :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> (b -> a -> c)
+ -> Compiler b c
+require identifier = requireA identifier . arr . uncurry
+
+-- | Arrow-based variant of 'require'
+--
+requireA :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> Compiler (b, a) c
+ -> Compiler b c
+requireA identifier = (id &&& require_ identifier >>>)
+
+-- | Variant of 'requireAll' which drops the current value
+--
+requireAll_ :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> Compiler b [a]
+requireAll_ pattern = fromDependencies (const getDeps) >>> fromJob requireAll_'
+ where
+ getDeps = matches pattern . map unResource . resourceList
+ requireAll_' = const $ CompilerM $ do
+ deps <- getDeps . compilerResourceProvider <$> ask
+ mapM (unCompilerM . getDependency) deps
+
+-- | Require a number of targets. Using this function ensures automatic handling
+-- of dependencies
+--
+requireAll :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> (b -> [a] -> c)
+ -> Compiler b c
+requireAll pattern = requireAllA pattern . arr . uncurry
+
+-- | Arrow-based variant of 'requireAll'
+--
+requireAllA :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> Compiler (b, [a]) c
+ -> Compiler b c
+requireAllA pattern = (id &&& requireAll_ pattern >>>)
+
+cached :: (Binary a, Typeable a, Writable a)
+ => String
+ -> Compiler Resource a
+ -> Compiler Resource a
+cached name (Compiler d j) = Compiler d $ const $ CompilerM $ do
+ logger <- compilerLogger <$> ask
+ identifier <- compilerIdentifier <$> ask
+ store <- compilerStore <$> ask
+ modified <- compilerResourceModified <$> ask
+ report logger $ "Checking cache: " ++ if modified then "modified" else "OK"
+ if modified
+ then do v <- unCompilerM $ j $ Resource identifier
+ liftIO $ storeSet store name identifier v
+ return v
+ else do v <- liftIO $ storeGet store name identifier
+ case v of Just v' -> return v'
+ Nothing -> error'
+ where
+ error' = error "Hakyll.Core.Compiler.cached: Cache corrupt!"
+
+-- | Create an unsafe compiler from a function in IO
+--
+unsafeCompiler :: (a -> IO b) -- ^ Function to lift
+ -> Compiler a b -- ^ Resulting compiler
+unsafeCompiler f = fromJob $ CompilerM . liftIO . f
+
+-- | Compiler for debugging purposes
+--
+traceShowCompiler :: Show a => Compiler a a
+traceShowCompiler = fromJob $ \x -> CompilerM $ do
+ logger <- compilerLogger <$> ask
+ report logger $ show x
+ return x
+
+-- | Map over a compiler
+--
+mapCompiler :: Compiler a b
+ -> Compiler [a] [b]
+mapCompiler (Compiler d j) = Compiler d $ mapM j
+
+-- | Log and time a compiler
+--
+timedCompiler :: String -- ^ Message
+ -> Compiler a b -- ^ Compiler to time
+ -> Compiler a b -- ^ Resulting compiler
+timedCompiler msg (Compiler d j) = Compiler d $ \x -> CompilerM $ do
+ logger <- compilerLogger <$> ask
+ timed logger msg $ unCompilerM $ j x
+
+-- | Choose a compiler by extension
+--
+-- Example:
+--
+-- > route "css/*" $ setExtension "css"
+-- > compile "css/*" $ byExtension (error "Not a (S)CSS file")
+-- > [ (".css", compressCssCompiler)
+-- > , (".scss", sass)
+-- > ]
+--
+-- This piece of code will select the @compressCssCompiler@ for @.css@ files,
+-- and the @sass@ compiler (defined elsewhere) for @.scss@ files.
+--
+byExtension :: Compiler a b -- ^ Default compiler
+ -> [(String, Compiler a b)] -- ^ Choices
+ -> Compiler a b -- ^ Resulting compiler
+byExtension defaultCompiler choices = Compiler deps job
+ where
+ -- Lookup the compiler, give an error when it is not found
+ lookup' identifier =
+ let extension = takeExtension $ toFilePath identifier
+ in fromMaybe defaultCompiler $ lookup extension choices
+ -- Collect the dependencies of the choice
+ deps = do
+ identifier <- dependencyIdentifier <$> ask
+ compilerDependencies $ lookup' identifier
+ -- Collect the job of the choice
+ job x = CompilerM $ do
+ identifier <- compilerIdentifier <$> ask
+ unCompilerM $ compilerJob (lookup' identifier) x
diff --git a/src/Hakyll/Core/Compiler/Internal.hs b/src/Hakyll/Core/Compiler/Internal.hs
new file mode 100644
index 0000000..53df044
--- /dev/null
+++ b/src/Hakyll/Core/Compiler/Internal.hs
@@ -0,0 +1,146 @@
+-- | Internally used compiler module
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Compiler.Internal
+ ( Dependencies
+ , DependencyEnvironment (..)
+ , CompilerEnvironment (..)
+ , CompilerM (..)
+ , Compiler (..)
+ , runCompilerJob
+ , runCompilerDependencies
+ , fromJob
+ , fromDependencies
+ , fromDependency
+ ) where
+
+import Prelude hiding ((.), id)
+import Control.Applicative (Applicative, pure, (<*>), (<$>))
+import Control.Monad.Reader (ReaderT, Reader, ask, runReaderT, runReader)
+import Control.Monad ((<=<), liftM2)
+import Data.Set (Set)
+import qualified Data.Set as S
+import Control.Category (Category, (.), id)
+import Control.Arrow (Arrow, ArrowChoice, arr, first, left)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Store
+import Hakyll.Core.Routes
+import Hakyll.Core.Logger
+
+-- | A set of dependencies
+--
+type Dependencies = Set Identifier
+
+-- | Environment in which the dependency analyzer runs
+--
+data DependencyEnvironment = DependencyEnvironment
+ { -- | Target identifier
+ dependencyIdentifier :: Identifier
+ , -- | Resource provider
+ dependencyResourceProvider :: ResourceProvider
+ }
+
+-- | Environment in which a compiler runs
+--
+data CompilerEnvironment = CompilerEnvironment
+ { -- | Target identifier
+ compilerIdentifier :: Identifier
+ , -- | Resource provider
+ compilerResourceProvider :: ResourceProvider
+ , -- | Site routes
+ compilerRoutes :: Routes
+ , -- | Compiler store
+ compilerStore :: Store
+ , -- | Flag indicating if the underlying resource was modified
+ compilerResourceModified :: Bool
+ , -- | Logger
+ compilerLogger :: Logger
+ }
+
+-- | The compiler monad
+--
+newtype CompilerM a = CompilerM
+ { unCompilerM :: ReaderT CompilerEnvironment IO a
+ } deriving (Monad, Functor, Applicative)
+
+-- | The compiler arrow
+--
+data Compiler a b = Compiler
+ { compilerDependencies :: Reader DependencyEnvironment Dependencies
+ , compilerJob :: a -> CompilerM b
+ }
+
+instance Functor (Compiler a) where
+ fmap f ~(Compiler d j) = Compiler d $ fmap f . j
+
+instance Applicative (Compiler a) where
+ pure = Compiler (return S.empty) . const . return
+ ~(Compiler d1 f) <*> ~(Compiler d2 j) =
+ Compiler (liftM2 S.union d1 d2) $ \x -> f x <*> j x
+
+instance Category Compiler where
+ id = Compiler (return S.empty) return
+ ~(Compiler d1 j1) . ~(Compiler d2 j2) =
+ Compiler (liftM2 S.union d1 d2) (j1 <=< j2)
+
+instance Arrow Compiler where
+ arr f = Compiler (return S.empty) (return . f)
+ first ~(Compiler d j) = Compiler d $ \(x, y) -> do
+ x' <- j x
+ return (x', y)
+
+instance ArrowChoice Compiler where
+ left ~(Compiler d j) = Compiler d $ \e -> case e of
+ Left l -> Left <$> j l
+ Right r -> Right <$> return r
+
+-- | Run a compiler, yielding the resulting target and it's dependencies
+--
+runCompilerJob :: Compiler () a -- ^ Compiler to run
+ -> Identifier -- ^ Target identifier
+ -> ResourceProvider -- ^ Resource provider
+ -> Routes -- ^ Route
+ -> Store -- ^ Store
+ -> Bool -- ^ Was the resource modified?
+ -> Logger -- ^ Logger
+ -> IO a
+runCompilerJob compiler identifier provider route store modified logger =
+ runReaderT (unCompilerM $ compilerJob compiler ()) env
+ where
+ env = CompilerEnvironment
+ { compilerIdentifier = identifier
+ , compilerResourceProvider = provider
+ , compilerRoutes = route
+ , compilerStore = store
+ , compilerResourceModified = modified
+ , compilerLogger = logger
+ }
+
+runCompilerDependencies :: Compiler () a
+ -> Identifier
+ -> ResourceProvider
+ -> Dependencies
+runCompilerDependencies compiler identifier provider =
+ runReader (compilerDependencies compiler) env
+ where
+ env = DependencyEnvironment
+ { dependencyIdentifier = identifier
+ , dependencyResourceProvider = provider
+ }
+
+fromJob :: (a -> CompilerM b)
+ -> Compiler a b
+fromJob = Compiler (return S.empty)
+
+fromDependencies :: (Identifier -> ResourceProvider -> [Identifier])
+ -> Compiler b b
+fromDependencies collectDeps = flip Compiler return $ do
+ DependencyEnvironment identifier provider <- ask
+ return $ S.fromList $ collectDeps identifier provider
+
+-- | Wait until another compiler has finished before running this compiler
+--
+fromDependency :: Identifier -> Compiler a a
+fromDependency = fromDependencies . const . const . return
diff --git a/src/Hakyll/Core/Configuration.hs b/src/Hakyll/Core/Configuration.hs
new file mode 100644
index 0000000..242b68f
--- /dev/null
+++ b/src/Hakyll/Core/Configuration.hs
@@ -0,0 +1,44 @@
+-- | Exports a datastructure for the top-level hakyll configuration
+--
+module Hakyll.Core.Configuration
+ ( HakyllConfiguration (..)
+ , defaultHakyllConfiguration
+ ) where
+
+import System.FilePath (takeFileName)
+import Data.List (isPrefixOf, isSuffixOf)
+
+data HakyllConfiguration = HakyllConfiguration
+ { -- | Directory in which the output written
+ destinationDirectory :: FilePath
+ , -- | Directory where hakyll's internal store is kept
+ storeDirectory :: FilePath
+ , -- | Function to determine ignored files
+ --
+ -- In 'defaultHakyllConfiguration', the following files are ignored:
+ --
+ -- * files starting with a @.@
+ --
+ -- * files ending with a @~@
+ --
+ -- * files ending with @.swp@
+ --
+ ignoreFile :: FilePath -> Bool
+ }
+
+-- | Default configuration for a hakyll application
+--
+defaultHakyllConfiguration :: HakyllConfiguration
+defaultHakyllConfiguration = HakyllConfiguration
+ { destinationDirectory = "_site"
+ , storeDirectory = "_cache"
+ , ignoreFile = ignoreFile'
+ }
+ where
+ ignoreFile' path
+ | "." `isPrefixOf` fileName = True
+ | "~" `isSuffixOf` fileName = True
+ | ".swp" `isSuffixOf` fileName = True
+ | otherwise = False
+ where
+ fileName = takeFileName path
diff --git a/src/Hakyll/Core/CopyFile.hs b/src/Hakyll/Core/CopyFile.hs
new file mode 100644
index 0000000..dbbaaa1
--- /dev/null
+++ b/src/Hakyll/Core/CopyFile.hs
@@ -0,0 +1,29 @@
+-- | Exports simple compilers to just copy files
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
+module Hakyll.Core.CopyFile
+ ( CopyFile (..)
+ , copyFileCompiler
+ ) where
+
+import Control.Arrow ((>>^))
+import System.Directory (copyFile)
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Writable
+import Hakyll.Core.Compiler
+import Hakyll.Core.Identifier
+
+-- | Newtype construct around 'FilePath' which will copy the file directly
+--
+newtype CopyFile = CopyFile {unCopyFile :: FilePath}
+ deriving (Show, Eq, Ord, Binary, Typeable)
+
+instance Writable CopyFile where
+ write dst (CopyFile src) = copyFile src dst
+
+copyFileCompiler :: Compiler Resource CopyFile
+copyFileCompiler = getIdentifier >>^ CopyFile . toFilePath
diff --git a/src/Hakyll/Core/DirectedGraph.hs b/src/Hakyll/Core/DirectedGraph.hs
new file mode 100644
index 0000000..76a030b
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph.hs
@@ -0,0 +1,85 @@
+-- | Representation of a directed graph. In Hakyll, this is used for dependency
+-- tracking.
+--
+module Hakyll.Core.DirectedGraph
+ ( DirectedGraph
+ , fromList
+ , member
+ , nodes
+ , neighbours
+ , reverse
+ , reachableNodes
+ , sanitize
+ ) where
+
+import Prelude hiding (reverse)
+import Data.Monoid (mconcat)
+import Data.Set (Set)
+import Data.Maybe (fromMaybe)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+import Hakyll.Core.DirectedGraph.Internal
+
+-- | Construction of directed graphs
+--
+fromList :: Ord a
+ => [(a, Set a)] -- ^ List of (node, reachable neighbours)
+ -> DirectedGraph a -- ^ Resulting directed graph
+fromList = DirectedGraph . M.fromList . map (\(t, d) -> (t, Node t d))
+
+-- | Check if a node lies in the given graph
+--
+member :: Ord a
+ => a -- ^ Node to check for
+ -> DirectedGraph a -- ^ Directed graph to check in
+ -> Bool -- ^ If the node lies in the graph
+member n = M.member n . unDirectedGraph
+
+-- | Get all nodes in the graph
+--
+nodes :: Ord a
+ => DirectedGraph a -- ^ Graph to get the nodes from
+ -> Set a -- ^ All nodes in the graph
+nodes = M.keysSet . unDirectedGraph
+
+-- | Get a set of reachable neighbours from a directed graph
+--
+neighbours :: Ord a
+ => a -- ^ Node to get the neighbours of
+ -> DirectedGraph a -- ^ Graph to search in
+ -> Set a -- ^ Set containing the neighbours
+neighbours x = fromMaybe S.empty . fmap nodeNeighbours
+ . M.lookup x . unDirectedGraph
+
+-- | Reverse a directed graph (i.e. flip all edges)
+--
+reverse :: Ord a
+ => DirectedGraph a
+ -> DirectedGraph a
+reverse = mconcat . map reverse' . M.toList . unDirectedGraph
+ where
+ reverse' (id', Node _ neighbours') = fromList $
+ zip (S.toList neighbours') $ repeat $ S.singleton id'
+
+-- | Find all reachable nodes from a given set of nodes in the directed graph
+--
+reachableNodes :: Ord a => Set a -> DirectedGraph a -> Set a
+reachableNodes set graph = reachable (setNeighbours set) set
+ where
+ reachable next visited
+ | S.null next = visited
+ | otherwise = reachable (sanitize' neighbours') (next `S.union` visited)
+ where
+ sanitize' = S.filter (`S.notMember` visited)
+ neighbours' = setNeighbours (sanitize' next)
+
+ setNeighbours = S.unions . map (`neighbours` graph) . S.toList
+
+-- | Remove all dangling pointers, i.e. references to notes that do
+-- not actually exist in the graph.
+--
+sanitize :: Ord a => DirectedGraph a -> DirectedGraph a
+sanitize (DirectedGraph graph) = DirectedGraph $ M.map sanitize' graph
+ where
+ sanitize' (Node t n) = Node t $ S.filter (`M.member` graph) n
diff --git a/src/Hakyll/Core/DirectedGraph/DependencySolver.hs b/src/Hakyll/Core/DirectedGraph/DependencySolver.hs
new file mode 100644
index 0000000..54826ff
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/DependencySolver.hs
@@ -0,0 +1,70 @@
+-- | Given a dependency graph, this module provides a function that will
+-- generate an order in which the graph can be visited, so that all the
+-- dependencies of a given node have been visited before the node itself is
+-- visited.
+--
+module Hakyll.Core.DirectedGraph.DependencySolver
+ ( solveDependencies
+ ) where
+
+import Prelude
+import qualified Prelude as P
+import Data.Set (Set)
+import Data.Maybe (mapMaybe)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+import Hakyll.Core.DirectedGraph
+import Hakyll.Core.DirectedGraph.Internal
+
+-- | Solve a dependency graph. This function returns an order to run the
+-- different nodes
+--
+solveDependencies :: Ord a
+ => DirectedGraph a -- ^ Graph
+ -> [a] -- ^ Resulting plan
+solveDependencies = P.reverse . order [] [] S.empty
+
+-- | Produce a reversed order using a stack
+--
+order :: Ord a
+ => [a] -- ^ Temporary result
+ -> [Node a] -- ^ Backtrace stack
+ -> Set a -- ^ Items in the stack
+ -> DirectedGraph a -- ^ Graph
+ -> [a] -- ^ Ordered result
+order temp stack set graph@(DirectedGraph graph')
+ -- Empty graph - return our current result
+ | M.null graph' = temp
+ | otherwise = case stack of
+
+ -- Empty stack - pick a node, and add it to the stack
+ [] ->
+ let (tag, node) = M.findMin graph'
+ in order temp (node : stack) (S.insert tag set) graph
+
+ -- At least one item on the stack - continue using this item
+ (node : stackTail) ->
+ -- Check which dependencies are still in the graph
+ let tag = nodeTag node
+ deps = S.toList $ nodeNeighbours node
+ unsatisfied = mapMaybe (`M.lookup` graph') deps
+ in case unsatisfied of
+
+ -- All dependencies for node are satisfied, we can return it and
+ -- remove it from the graph
+ [] -> order (tag : temp) stackTail (S.delete tag set)
+ (DirectedGraph $ M.delete tag graph')
+
+ -- There is at least one dependency left. We need to solve that
+ -- one first...
+ (dep : _) -> if nodeTag dep `S.member` set
+ -- The dependency is already in our stack - cycle detected!
+ then cycleError
+ -- Continue with the dependency
+ else order temp (dep : node : stackTail)
+ (S.insert (nodeTag dep) set)
+ graph
+ where
+ cycleError = error $ "Hakyll.Core.DirectedGraph.DependencySolver.order: "
+ ++ "Cycle detected!" -- TODO: Dump cycle
diff --git a/src/Hakyll/Core/DirectedGraph/Dot.hs b/src/Hakyll/Core/DirectedGraph/Dot.hs
new file mode 100644
index 0000000..8289992
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/Dot.hs
@@ -0,0 +1,30 @@
+-- | Dump a directed graph in dot format. Used for debugging purposes
+--
+module Hakyll.Core.DirectedGraph.Dot
+ ( toDot
+ , writeDot
+ ) where
+
+import Hakyll.Core.DirectedGraph
+import qualified Data.Set as S
+
+-- | Convert a directed graph into dot format for debugging purposes
+--
+toDot :: Ord a
+ => (a -> String) -- ^ Convert nodes to dot names
+ -> DirectedGraph a -- ^ Graph to dump
+ -> String -- ^ Resulting string
+toDot showTag graph = unlines $ concat
+ [ return "digraph dependencies {"
+ , concatMap showNode (S.toList $ nodes graph)
+ , return "}"
+ ]
+ where
+ showNode node = map (showEdge node) $ S.toList $ neighbours node graph
+ showEdge x y = " \"" ++ showTag x ++ "\" -> \"" ++ showTag y ++ "\";"
+
+-- | Write out the @.dot@ file to a given file path. See 'toDot' for more
+-- information.
+--
+writeDot :: Ord a => FilePath -> (a -> String) -> DirectedGraph a -> IO ()
+writeDot path showTag = writeFile path . toDot showTag
diff --git a/src/Hakyll/Core/DirectedGraph/Internal.hs b/src/Hakyll/Core/DirectedGraph/Internal.hs
new file mode 100644
index 0000000..5b02ad6
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/Internal.hs
@@ -0,0 +1,43 @@
+-- | Internal structure of the DirectedGraph type. Not exported outside of the
+-- library.
+--
+module Hakyll.Core.DirectedGraph.Internal
+ ( Node (..)
+ , DirectedGraph (..)
+ ) where
+
+import Prelude hiding (reverse, filter)
+import Data.Monoid (Monoid, mempty, mappend)
+import Data.Set (Set)
+import Data.Map (Map)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+-- | A node in the directed graph
+--
+data Node a = Node
+ { nodeTag :: a -- ^ Tag identifying the node
+ , nodeNeighbours :: Set a -- ^ Edges starting at this node
+ } deriving (Show)
+
+-- | Append two nodes. Useful for joining graphs.
+--
+appendNodes :: Ord a => Node a -> Node a -> Node a
+appendNodes (Node t1 n1) (Node t2 n2)
+ | t1 /= t2 = error'
+ | otherwise = Node t1 (n1 `S.union` n2)
+ where
+ error' = error $ "Hakyll.Core.DirectedGraph.Internal.appendNodes: "
+ ++ "Appending differently tagged nodes"
+
+-- | Type used to represent a directed graph
+--
+newtype DirectedGraph a = DirectedGraph {unDirectedGraph :: Map a (Node a)}
+ deriving (Show)
+
+-- | Allow users to concatenate different graphs
+--
+instance Ord a => Monoid (DirectedGraph a) where
+ mempty = DirectedGraph M.empty
+ mappend (DirectedGraph m1) (DirectedGraph m2) = DirectedGraph $
+ M.unionWith appendNodes m1 m2
diff --git a/src/Hakyll/Core/Identifier.hs b/src/Hakyll/Core/Identifier.hs
new file mode 100644
index 0000000..16403e6
--- /dev/null
+++ b/src/Hakyll/Core/Identifier.hs
@@ -0,0 +1,59 @@
+-- | An identifier is a type used to uniquely identify a resource, target...
+--
+-- One can think of an identifier as something similar to a file path. An
+-- identifier is a path as well, with the different elements in the path
+-- separated by @/@ characters. Examples of identifiers are:
+--
+-- * @posts/foo.markdown@
+--
+-- * @index@
+--
+-- * @error/404@
+--
+-- The most important difference between an 'Identifier' and a file path is that
+-- the identifier for an item is not necesserily the file path.
+--
+-- For example, we could have an @index@ identifier, generated by Hakyll. The
+-- actual file path would be @index.html@, but we identify it using @index@.
+--
+-- @posts/foo.markdown@ could be an identifier of an item that is rendered to
+-- @posts/foo.html@. In this case, the identifier is the name of the source
+-- file of the page.
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Identifier
+ ( Identifier (..)
+ , parseIdentifier
+ , toFilePath
+ ) where
+
+import Control.Arrow (second)
+import Data.Monoid (Monoid)
+
+import GHC.Exts (IsString, fromString)
+import System.FilePath (joinPath)
+
+-- | An identifier used to uniquely identify a value
+--
+newtype Identifier = Identifier {unIdentifier :: [String]}
+ deriving (Eq, Ord, Monoid)
+
+instance Show Identifier where
+ show = toFilePath
+
+instance IsString Identifier where
+ fromString = parseIdentifier
+
+-- | Parse an identifier from a string
+--
+parseIdentifier :: String -> Identifier
+parseIdentifier = Identifier . filter (not . null) . split'
+ where
+ split' [] = [[]]
+ split' str = let (pre, post) = second (drop 1) $ break (== '/') str
+ in pre : split' post
+
+-- | Convert an identifier to a relative 'FilePath'
+--
+toFilePath :: Identifier -> FilePath
+toFilePath = joinPath . unIdentifier
diff --git a/src/Hakyll/Core/Identifier/Pattern.hs b/src/Hakyll/Core/Identifier/Pattern.hs
new file mode 100644
index 0000000..7c88356
--- /dev/null
+++ b/src/Hakyll/Core/Identifier/Pattern.hs
@@ -0,0 +1,160 @@
+-- | Module providing pattern matching and capturing on 'Identifier's.
+--
+-- A very simple pattern could be, for example, @foo\/bar@. This pattern will
+-- only match the exact @foo\/bar@ identifier.
+--
+-- To match more than one identifier, there are different captures that one can
+-- use:
+--
+-- * @*@: matches exactly one element of an identifier;
+--
+-- * @**@: matches one or more elements of an identifier.
+--
+-- Some examples:
+--
+-- * @foo\/*@ will match @foo\/bar@ and @foo\/foo@, but not @foo\/bar\/qux@ nor
+-- @foo@;
+--
+-- * @**@ will match any non-empty identifier;
+--
+-- * @foo\/**@ will match @foo\/bar@ and @foo\/bar\/qux@, but not @bar\/foo@ nor
+-- @foo@;
+--
+-- A small warning: patterns are not globs. Using @foo\/*.markdown@ will not do
+-- what you probably intended, as it will only match the file which is literally
+-- called @foo\/*.markdown@. Remember that these captures only work on elements
+-- of identifiers as a whole; not on parts of these elements.
+--
+-- Furthermore, the 'match' function allows the user to get access to the
+-- elements captured by the capture elements in the pattern.
+--
+module Hakyll.Core.Identifier.Pattern
+ ( Pattern
+ , parsePattern
+ , match
+ , doesMatch
+ , matches
+ , fromCapture
+ , fromCaptureString
+ , fromCaptures
+ ) where
+
+import Data.List (intercalate)
+import Control.Monad (msum)
+import Data.Maybe (isJust)
+import Data.Monoid (mempty, mappend)
+
+import GHC.Exts (IsString, fromString)
+
+import Hakyll.Core.Identifier
+
+-- | One base element of a pattern
+--
+data PatternComponent = CaptureOne
+ | CaptureMany
+ | Literal String
+ deriving (Eq)
+
+instance Show PatternComponent where
+ show CaptureOne = "*"
+ show CaptureMany = "**"
+ show (Literal s) = s
+
+-- | Type that allows matching on identifiers
+--
+newtype Pattern = Pattern {unPattern :: [PatternComponent]}
+ deriving (Eq)
+
+instance Show Pattern where
+ show = intercalate "/" . map show . unPattern
+
+instance IsString Pattern where
+ fromString = parsePattern
+
+-- | Parse a pattern from a string
+--
+parsePattern :: String -> Pattern
+parsePattern = Pattern . map toPattern . unIdentifier . parseIdentifier
+ where
+ toPattern x | x == "*" = CaptureOne
+ | x == "**" = CaptureMany
+ | otherwise = Literal x
+
+-- | Match an identifier against a pattern, generating a list of captures
+--
+match :: Pattern -> Identifier -> Maybe [Identifier]
+match (Pattern p) (Identifier i) = fmap (map Identifier) $ match' p i
+
+-- | Check if an identifier matches a pattern
+--
+doesMatch :: Pattern -> Identifier -> Bool
+doesMatch p = isJust . match p
+
+-- | Given a list of identifiers, retain only those who match the given pattern
+--
+matches :: Pattern -> [Identifier] -> [Identifier]
+matches p = filter (doesMatch p)
+
+-- | Split a list at every possible point, generate a list of (init, tail) cases
+--
+splits :: [a] -> [([a], [a])]
+splits ls = reverse $ splits' [] ls
+ where
+ splits' lx ly = (lx, ly) : case ly of
+ [] -> []
+ (y : ys) -> splits' (lx ++ [y]) ys
+
+-- | Internal verion of 'match'
+--
+match' :: [PatternComponent] -> [String] -> Maybe [[String]]
+match' [] [] = Just [] -- An empty match
+match' [] _ = Nothing -- No match
+match' _ [] = Nothing -- No match
+match' (m : ms) (s : ss) = case m of
+ -- Take one string and one literal, fail on mismatch
+ Literal l -> if s == l then match' ms ss else Nothing
+ -- Take one string and one capture
+ CaptureOne -> fmap ([s] :) $ match' ms ss
+ -- Take one string, and one or many captures
+ CaptureMany ->
+ let take' (i, t) = fmap (i :) $ match' ms t
+ in msum $ map take' $ splits (s : ss)
+
+-- | Create an identifier from a pattern by filling in the captures with a given
+-- string
+--
+-- Example:
+--
+-- > fromCapture (parsePattern "tags/*") (parseIdentifier "foo")
+--
+-- Result:
+--
+-- > "tags/foo"
+--
+fromCapture :: Pattern -> Identifier -> Identifier
+fromCapture pattern = fromCaptures pattern . repeat
+
+-- | Simplified version of 'fromCapture' which takes a 'String' instead of an
+-- 'Identifier'
+--
+-- > fromCaptureString (parsePattern "tags/*") "foo"
+--
+-- Result:
+--
+-- > "tags/foo"
+--
+fromCaptureString :: Pattern -> String -> Identifier
+fromCaptureString pattern = fromCapture pattern . parseIdentifier
+
+-- | Create an identifier from a pattern by filling in the captures with the
+-- given list of strings
+--
+fromCaptures :: Pattern -> [Identifier] -> Identifier
+fromCaptures (Pattern []) _ = mempty
+fromCaptures (Pattern (m : ms)) [] = case m of
+ Literal l -> Identifier [l] `mappend` fromCaptures (Pattern ms) []
+ _ -> error $ "Hakyll.Core.Identifier.Pattern.fromCaptures: "
+ ++ "identifier list exhausted"
+fromCaptures (Pattern (m : ms)) ids@(i : is) = case m of
+ Literal l -> Identifier [l] `mappend` fromCaptures (Pattern ms) ids
+ _ -> i `mappend` fromCaptures (Pattern ms) is
diff --git a/src/Hakyll/Core/Logger.hs b/src/Hakyll/Core/Logger.hs
new file mode 100644
index 0000000..720dee0
--- /dev/null
+++ b/src/Hakyll/Core/Logger.hs
@@ -0,0 +1,90 @@
+-- | Produce pretty, thread-safe logs
+--
+{-# LANGUAGE BangPatterns #-}
+module Hakyll.Core.Logger
+ ( Logger
+ , makeLogger
+ , flushLogger
+ , section
+ , timed
+ , report
+ ) where
+
+import Control.Monad (forever)
+import Control.Monad.Trans (MonadIO, liftIO)
+import Control.Applicative ((<$>), (<*>))
+import Control.Concurrent (forkIO)
+import Control.Concurrent.Chan.Strict (Chan, newChan, readChan, writeChan)
+import Control.Concurrent.MVar.Strict (MVar, newEmptyMVar, takeMVar, putMVar)
+import Text.Printf (printf)
+
+import Data.Time (getCurrentTime, diffUTCTime)
+
+-- | Logger structure. Very complicated.
+--
+data Logger = Logger
+ { loggerChan :: Chan (Maybe String) -- Nothing marks the end
+ , loggerSync :: MVar () -- Used for sync on quit
+ }
+
+-- | Create a new logger
+--
+makeLogger :: IO Logger
+makeLogger = do
+ logger <- Logger <$> newChan <*> newEmptyMVar
+ _ <- forkIO $ loggerThread logger
+ return logger
+ where
+ loggerThread logger = forever $ do
+ msg <- readChan $ loggerChan logger
+ case msg of
+ -- Stop: sync
+ Nothing -> putMVar (loggerSync logger) ()
+ -- Print and continue
+ Just m -> putStrLn m
+
+-- | Flush the logger (blocks until flushed)
+--
+flushLogger :: Logger -> IO ()
+flushLogger logger = do
+ writeChan (loggerChan logger) Nothing
+ () <- takeMVar $ loggerSync logger
+ return ()
+
+-- | Send a raw message to the logger
+--
+message :: Logger -> String -> IO ()
+message logger = writeChan (loggerChan logger) . Just
+
+-- | Start a section in the log
+--
+section :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Section name
+ -> m () -- ^ No result
+section logger = liftIO . message logger
+
+-- | Execute a monadic action and log the duration
+--
+timed :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Message
+ -> m a -- ^ Action
+ -> m a -- ^ Timed and logged action
+timed logger msg action = do
+ start <- liftIO getCurrentTime
+ !result <- action
+ stop <- liftIO getCurrentTime
+ let diff = fromEnum $ diffUTCTime stop start
+ ms = diff `div` 10 ^ (9 :: Int)
+ formatted = printf " [%4dms] %s" ms msg
+ liftIO $ message logger formatted
+ return result
+
+-- | Log something at the same level as 'timed', but without the timing
+--
+report :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Message
+ -> m () -- ^ No result
+report logger msg = liftIO $ message logger $ " [ ] " ++ msg
diff --git a/src/Hakyll/Core/ResourceProvider.hs b/src/Hakyll/Core/ResourceProvider.hs
new file mode 100644
index 0000000..dcd4af0
--- /dev/null
+++ b/src/Hakyll/Core/ResourceProvider.hs
@@ -0,0 +1,75 @@
+-- | This module provides an API for resource providers. Resource providers
+-- allow Hakyll to get content from resources; the type of resource depends on
+-- the concrete instance.
+--
+-- A resource is represented by the 'Resource' type. This is basically just a
+-- newtype wrapper around 'Identifier' -- but it has an important effect: it
+-- guarantees that a resource with this identifier can be provided by one or
+-- more resource providers.
+--
+-- Therefore, it is not recommended to read files directly -- you should use the
+-- provided 'Resource' methods.
+--
+module Hakyll.Core.ResourceProvider
+ ( Resource (..)
+ , ResourceProvider (..)
+ , resourceExists
+ , resourceDigest
+ , resourceModified
+ ) where
+
+import Control.Monad ((<=<))
+import Data.Word (Word8)
+
+import qualified Data.ByteString.Lazy as LB
+import OpenSSL.Digest.ByteString.Lazy (digest)
+import OpenSSL.Digest (MessageDigest (MD5))
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Store
+
+-- | A resource
+--
+-- Invariant: the resource specified by the given identifier must exist
+--
+newtype Resource = Resource {unResource :: Identifier}
+ deriving (Eq, Show, Ord)
+
+-- | A value responsible for retrieving and listing resources
+--
+data ResourceProvider = ResourceProvider
+ { -- | A list of all resources this provider is able to provide
+ resourceList :: [Resource]
+ , -- | Retrieve a certain resource as string
+ resourceString :: Resource -> IO String
+ , -- | Retrieve a certain resource as lazy bytestring
+ resourceLazyByteString :: Resource -> IO LB.ByteString
+ }
+
+-- | Check if a given identifier has a resource
+--
+resourceExists :: ResourceProvider -> Identifier -> Bool
+resourceExists provider = flip elem $ map unResource $ resourceList provider
+
+-- | Retrieve a digest for a given resource
+--
+resourceDigest :: ResourceProvider -> Resource -> IO [Word8]
+resourceDigest provider = digest MD5 <=< resourceLazyByteString provider
+
+-- | Check if a resource was modified
+--
+resourceModified :: ResourceProvider -> Resource -> Store -> IO Bool
+resourceModified provider resource store = do
+ -- Get the latest seen digest from the store
+ lastDigest <- storeGet store itemName $ unResource resource
+ -- Calculate the digest for the resource
+ newDigest <- resourceDigest provider resource
+ -- Check digests
+ if Just newDigest == lastDigest
+ -- All is fine, not modified
+ then return False
+ -- Resource modified; store new digest
+ else do storeSet store itemName (unResource resource) newDigest
+ return True
+ where
+ itemName = "Hakyll.Core.ResourceProvider.resourceModified"
diff --git a/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs b/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs
new file mode 100644
index 0000000..0d89b21
--- /dev/null
+++ b/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs
@@ -0,0 +1,29 @@
+-- | A concrete 'ResourceProvider' that gets it's resources from the filesystem
+--
+module Hakyll.Core.ResourceProvider.FileResourceProvider
+ ( fileResourceProvider
+ ) where
+
+import Control.Applicative ((<$>))
+
+import qualified Data.ByteString.Lazy as LB
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+import Hakyll.Core.Configuration
+
+-- | Create a filesystem-based 'ResourceProvider'
+--
+fileResourceProvider :: HakyllConfiguration -> IO ResourceProvider
+fileResourceProvider configuration = do
+ -- Retrieve a list of identifiers
+ list <- map parseIdentifier . filter (not . ignoreFile configuration) <$>
+ getRecursiveContents False "."
+
+ -- Construct a resource provider
+ return ResourceProvider
+ { resourceList = map Resource list
+ , resourceString = readFile . toFilePath . unResource
+ , resourceLazyByteString = LB.readFile . toFilePath . unResource
+ }
diff --git a/src/Hakyll/Core/Routes.hs b/src/Hakyll/Core/Routes.hs
new file mode 100644
index 0000000..fcab28d
--- /dev/null
+++ b/src/Hakyll/Core/Routes.hs
@@ -0,0 +1,136 @@
+-- | Once a target is compiled, the user usually wants to save it to the disk.
+-- This is where the 'Routes' type comes in; it determines where a certain
+-- target should be written.
+--
+-- Suppose we have an item @foo\/bar.markdown@. We can render this to
+-- @foo\/bar.html@ using:
+--
+-- > route "foo/bar.markdown" (setExtension ".html")
+--
+-- If we do not want to change the extension, we can use 'idRoute', the simplest
+-- route available:
+--
+-- > route "foo/bar.markdown" idRoute
+--
+-- That will route @foo\/bar.markdown@ to @foo\/bar.markdown@.
+--
+-- Note that the extension says nothing about the content! If you set the
+-- extension to @.html@, it is your own responsibility to ensure that the
+-- content is indeed HTML.
+--
+-- Finally, some special cases:
+--
+-- * If there is no route for an item, this item will not be routed, so it will
+-- not appear in your site directory.
+--
+-- * If an item matches multiple routes, the first rule will be chosen.
+--
+module Hakyll.Core.Routes
+ ( Routes
+ , runRoutes
+ , idRoute
+ , setExtension
+ , ifMatch
+ , customRoute
+ , gsubRoute
+ , composeRoutes
+ ) where
+
+import Data.Monoid (Monoid, mempty, mappend)
+import Control.Monad (mplus)
+import System.FilePath (replaceExtension)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Util.String
+
+-- | Type used for a route
+--
+newtype Routes = Routes {unRoutes :: Identifier -> Maybe FilePath}
+
+instance Monoid Routes where
+ mempty = Routes $ const Nothing
+ mappend (Routes f) (Routes g) = Routes $ \id' -> f id' `mplus` g id'
+
+-- | Apply a route to an identifier
+--
+runRoutes :: Routes -> Identifier -> Maybe FilePath
+runRoutes = unRoutes
+
+-- | A route that uses the identifier as filepath. For example, the target with
+-- ID @foo\/bar@ will be written to the file @foo\/bar@.
+--
+idRoute :: Routes
+idRoute = Routes $ Just . toFilePath
+
+-- | Set (or replace) the extension of a route.
+--
+-- Example:
+--
+-- > runRoute (setExtension "html") "foo/bar"
+--
+-- Result:
+--
+-- > Just "foo/bar.html"
+--
+-- Example:
+--
+-- > runRoute (setExtension "html") "posts/the-art-of-trolling.markdown"
+--
+-- Result:
+--
+-- > Just "posts/the-art-of-trolling.html"
+--
+setExtension :: String -> Routes
+setExtension extension = Routes $ fmap (`replaceExtension` extension)
+ . unRoutes idRoute
+
+-- | Modify a route: apply the route if the identifier matches the given
+-- pattern, fail otherwise.
+--
+ifMatch :: Pattern -> Routes -> Routes
+ifMatch pattern (Routes route) = Routes $ \id' ->
+ if doesMatch pattern id' then route id'
+ else Nothing
+
+-- | Create a custom route. This should almost always be used with 'ifMatch'.
+--
+customRoute :: (Identifier -> FilePath) -> Routes
+customRoute f = Routes $ Just . f
+
+-- | Create a gsub route
+--
+-- Example:
+--
+-- > runRoutes (gsubRoute "rss/" (const "")) "tags/rss/bar.xml"
+--
+-- Result:
+--
+-- > Just "tags/bar.xml"
+--
+gsubRoute :: String -- ^ Pattern
+ -> (String -> String) -- ^ Replacement
+ -> Routes -- ^ Resulting route
+gsubRoute pattern replacement = customRoute $
+ replaceAll pattern replacement . toFilePath
+
+-- | Compose routes so that @f `composeRoutes` g@ is more or less equivalent
+-- with @f >>> g@.
+--
+-- Example:
+--
+-- > let routes = gsubRoute "rss/" (const "") `composeRoutes` setExtension "xml"
+-- > in runRoutes routes "tags/rss/bar"
+--
+-- Result:
+--
+-- > Just "tags/bar.xml"
+--
+-- If the first route given fails, Hakyll will not apply the second route.
+--
+composeRoutes :: Routes -- ^ First route to apply
+ -> Routes -- ^ Second route to apply
+ -> Routes -- ^ Resulting route
+composeRoutes (Routes f) (Routes g) = Routes $ \i -> do
+ p <- f i
+ g $ parseIdentifier p
diff --git a/src/Hakyll/Core/Rules.hs b/src/Hakyll/Core/Rules.hs
new file mode 100644
index 0000000..eba3fb9
--- /dev/null
+++ b/src/Hakyll/Core/Rules.hs
@@ -0,0 +1,161 @@
+-- | This module provides a declarative DSL in which the user can specify the
+-- different rules used to run the compilers.
+--
+-- The convention is to just list all items in the 'RulesM' monad, routes and
+-- compilation rules.
+--
+-- A typical usage example would be:
+--
+-- > main = hakyll $ do
+-- > route "posts/*" (setExtension "html")
+-- > compile "posts/*" someCompiler
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
+module Hakyll.Core.Rules
+ ( RulesM
+ , Rules
+ , compile
+ , create
+ , route
+ , metaCompile
+ , metaCompileWith
+ ) where
+
+import Control.Applicative ((<$>))
+import Control.Monad.Writer (tell)
+import Control.Monad.Reader (ask)
+import Control.Arrow (second, (>>>), arr, (>>^))
+import Control.Monad.State (get, put)
+import Data.Monoid (mempty)
+import qualified Data.Set as S
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.Util.Arrow
+
+-- | Add a route
+--
+tellRoute :: Routes -> Rules
+tellRoute route' = RulesM $ tell $ RuleSet route' mempty mempty
+
+-- | Add a number of compilers
+--
+tellCompilers :: (Binary a, Typeable a, Writable a)
+ => [(Identifier, Compiler () a)]
+ -> Rules
+tellCompilers compilers = RulesM $ tell $ RuleSet mempty compilers' mempty
+ where
+ compilers' = map (second boxCompiler) compilers
+ boxCompiler = (>>> arr compiledItem >>> arr CompileRule)
+
+-- | Add resources
+--
+tellResources :: [Resource]
+ -> Rules
+tellResources resources = RulesM $ tell $ RuleSet mempty mempty $ S.fromList resources
+
+-- | Add a compilation rule to the rules.
+--
+-- This instructs all resources matching the given pattern to be compiled using
+-- the given compiler. When no resources match the given pattern, nothing will
+-- happen. In this case, you might want to have a look at 'create'.
+--
+compile :: (Binary a, Typeable a, Writable a)
+ => Pattern -> Compiler Resource a -> Rules
+compile pattern compiler = RulesM $ do
+ identifiers <- matches pattern . map unResource . resourceList <$> ask
+ unRulesM $ do
+ tellCompilers $ flip map identifiers $ \identifier ->
+ (identifier, constA (Resource identifier) >>> compiler)
+ tellResources $ map Resource identifiers
+
+-- | Add a compilation rule
+--
+-- This sets a compiler for the given identifier. No resource is needed, since
+-- we are creating the item from scratch. This is useful if you want to create a
+-- page on your site that just takes content from other items -- but has no
+-- actual content itself.
+--
+create :: (Binary a, Typeable a, Writable a)
+ => Identifier -> Compiler () a -> Rules
+create identifier compiler = tellCompilers [(identifier, compiler)]
+
+-- | Add a route.
+--
+-- This adds a route for all items matching the given pattern.
+--
+route :: Pattern -> Routes -> Rules
+route pattern route' = tellRoute $ ifMatch pattern route'
+
+-- | Apart from regular compilers, one is also able to specify metacompilers.
+-- Metacompilers are a special class of compilers: they are compilers which
+-- produce other compilers.
+--
+-- This is needed when the list of compilers depends on something we cannot know
+-- before actually running other compilers. The most typical example is if we
+-- have a blogpost using tags.
+--
+-- Every post has a collection of tags. For example,
+--
+-- > post1: code, haskell
+-- > post2: code, random
+--
+-- Now, we want to create a list of posts for every tag. We cannot write this
+-- down in our 'Rules' DSL directly, since we don't know what tags the different
+-- posts will have -- we depend on information that will only be available when
+-- we are actually compiling the pages.
+--
+-- The solution is simple, using 'metaCompile', we can add a compiler that will
+-- parse the pages and produce the compilers needed for the different tag pages.
+--
+-- And indeed, we can see that the first argument to 'metaCompile' is a
+-- 'Compiler' which produces a list of ('Identifier', 'Compiler') pairs. The
+-- idea is simple: 'metaCompile' produces a list of compilers, and the
+-- corresponding identifiers.
+--
+-- For simple hakyll systems, it is no need for this construction. More
+-- formally, it is only needed when the content of one or more items determines
+-- which items must be rendered.
+--
+metaCompile :: (Binary a, Typeable a, Writable a)
+ => Compiler () [(Identifier, Compiler () a)]
+ -- ^ Compiler generating the other compilers
+ -> Rules
+ -- ^ Resulting rules
+metaCompile compiler = RulesM $ do
+ -- Create an identifier from the state
+ state <- get
+ let index = rulesMetaCompilerIndex state
+ id' = fromCaptureString "Hakyll.Core.Rules.metaCompile/*" (show index)
+
+ -- Update the state with a new identifier
+ put $ state {rulesMetaCompilerIndex = index + 1}
+
+ -- Fallback to 'metaCompileWith' with now known identifier
+ unRulesM $ metaCompileWith id' compiler
+
+-- | Version of 'metaCompile' that allows you to specify a custom identifier for
+-- the metacompiler.
+--
+metaCompileWith :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -- ^ Identifier for this compiler
+ -> Compiler () [(Identifier, Compiler () a)]
+ -- ^ Compiler generating the other compilers
+ -> Rules
+ -- ^ Resulting rules
+metaCompileWith identifier compiler = RulesM $ tell $
+ RuleSet mempty compilers mempty
+ where
+ makeRule = MetaCompileRule . map (second box)
+ compilers = [(identifier, compiler >>> arr makeRule )]
+ box = (>>> fromDependency identifier >>^ CompileRule . compiledItem)
diff --git a/src/Hakyll/Core/Rules/Internal.hs b/src/Hakyll/Core/Rules/Internal.hs
new file mode 100644
index 0000000..2895257
--- /dev/null
+++ b/src/Hakyll/Core/Rules/Internal.hs
@@ -0,0 +1,75 @@
+-- | Internal rules module for types which are not exposed to the user
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Rules.Internal
+ ( CompileRule (..)
+ , RuleSet (..)
+ , RuleState (..)
+ , RulesM (..)
+ , Rules
+ , runRules
+ ) where
+
+import Control.Applicative (Applicative)
+import Control.Monad.Writer (WriterT, execWriterT)
+import Control.Monad.Reader (ReaderT, runReaderT)
+import Control.Monad.State (State, evalState)
+import Data.Monoid (Monoid, mempty, mappend)
+import Data.Set (Set)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.CompiledItem
+
+-- | Output of a compiler rule
+--
+-- * The compiler will produce a simple item. This is the most common case.
+--
+-- * The compiler will produce more compilers. These new compilers need to be
+-- added to the runtime if possible, since other items might depend upon them.
+--
+data CompileRule = CompileRule CompiledItem
+ | MetaCompileRule [(Identifier, Compiler () CompileRule)]
+
+-- | A collection of rules for the compilation process
+--
+data RuleSet = RuleSet
+ { -- | Routes used in the compilation structure
+ rulesRoutes :: Routes
+ , -- | Compilation rules
+ rulesCompilers :: [(Identifier, Compiler () CompileRule)]
+ , -- | A list of the used resources
+ rulesResources :: Set Resource
+ }
+
+instance Monoid RuleSet where
+ mempty = RuleSet mempty mempty mempty
+ mappend (RuleSet r1 c1 s1) (RuleSet r2 c2 s2) =
+ RuleSet (mappend r1 r2) (mappend c1 c2) (mappend s1 s2)
+
+-- | Rule state
+--
+data RuleState = RuleState
+ { rulesMetaCompilerIndex :: Int
+ } deriving (Show)
+
+-- | The monad used to compose rules
+--
+newtype RulesM a = RulesM
+ { unRulesM :: ReaderT ResourceProvider (WriterT RuleSet (State RuleState)) a
+ } deriving (Monad, Functor, Applicative)
+
+-- | Simplification of the RulesM type; usually, it will not return any
+-- result.
+--
+type Rules = RulesM ()
+
+-- | Run a Rules monad, resulting in a 'RuleSet'
+--
+runRules :: Rules -> ResourceProvider -> RuleSet
+runRules rules provider =
+ evalState (execWriterT $ runReaderT (unRulesM rules) provider) state
+ where
+ state = RuleState {rulesMetaCompilerIndex = 0}
diff --git a/src/Hakyll/Core/Run.hs b/src/Hakyll/Core/Run.hs
new file mode 100644
index 0000000..09864be
--- /dev/null
+++ b/src/Hakyll/Core/Run.hs
@@ -0,0 +1,207 @@
+-- | This is the module which binds it all together
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Run
+ ( run
+ ) where
+
+import Prelude hiding (reverse)
+import Control.Monad (filterM)
+import Control.Monad.Trans (liftIO)
+import Control.Applicative (Applicative, (<$>))
+import Control.Monad.Reader (ReaderT, runReaderT, ask)
+import Control.Monad.State.Strict (StateT, evalStateT, get, modify)
+import Control.Arrow ((&&&))
+import qualified Data.Map as M
+import Data.Monoid (mempty, mappend)
+import System.FilePath ((</>))
+import Data.Set (Set)
+import qualified Data.Set as S
+
+import Hakyll.Core.Routes
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+import Hakyll.Core.Compiler
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.ResourceProvider.FileResourceProvider
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.DirectedGraph
+import Hakyll.Core.DirectedGraph.DependencySolver
+import Hakyll.Core.Writable
+import Hakyll.Core.Store
+import Hakyll.Core.Configuration
+import Hakyll.Core.Logger
+
+-- | Run all rules needed, return the rule set used
+--
+run :: HakyllConfiguration -> Rules -> IO RuleSet
+run configuration rules = do
+ logger <- makeLogger
+
+ section logger "Initialising"
+ store <- timed logger "Creating store" $
+ makeStore $ storeDirectory configuration
+ provider <- timed logger "Creating provider" $
+ fileResourceProvider configuration
+
+ let ruleSet = runRules rules provider
+ compilers = rulesCompilers ruleSet
+
+ -- Extract the reader/state
+ reader = unRuntime $ addNewCompilers [] compilers
+ state' = runReaderT reader $ env logger ruleSet provider store
+
+ evalStateT state' state
+
+ -- Flush and return
+ flushLogger logger
+ return ruleSet
+ where
+ env logger ruleSet provider store = RuntimeEnvironment
+ { hakyllLogger = logger
+ , hakyllConfiguration = configuration
+ , hakyllRoutes = rulesRoutes ruleSet
+ , hakyllResourceProvider = provider
+ , hakyllStore = store
+ }
+
+ state = RuntimeState
+ { hakyllModified = S.empty
+ , hakyllGraph = mempty
+ }
+
+data RuntimeEnvironment = RuntimeEnvironment
+ { hakyllLogger :: Logger
+ , hakyllConfiguration :: HakyllConfiguration
+ , hakyllRoutes :: Routes
+ , hakyllResourceProvider :: ResourceProvider
+ , hakyllStore :: Store
+ }
+
+data RuntimeState = RuntimeState
+ { hakyllModified :: Set Identifier
+ , hakyllGraph :: DirectedGraph Identifier
+ }
+
+newtype Runtime a = Runtime
+ { unRuntime :: ReaderT RuntimeEnvironment (StateT RuntimeState IO) a
+ } deriving (Functor, Applicative, Monad)
+
+-- | Return a set of modified identifiers
+--
+modified :: ResourceProvider -- ^ Resource provider
+ -> Store -- ^ Store
+ -> [Identifier] -- ^ Identifiers to check
+ -> IO (Set Identifier) -- ^ Modified resources
+modified provider store ids = fmap S.fromList $ flip filterM ids $ \id' ->
+ if resourceExists provider id'
+ then resourceModified provider (Resource id') store
+ else return False
+
+-- | Add a number of compilers and continue using these compilers
+--
+addNewCompilers :: [(Identifier, Compiler () CompileRule)]
+ -- ^ Remaining compilers yet to be run
+ -> [(Identifier, Compiler () CompileRule)]
+ -- ^ Compilers to add
+ -> Runtime ()
+addNewCompilers oldCompilers newCompilers = Runtime $ do
+ -- Get some information
+ logger <- hakyllLogger <$> ask
+ section logger "Adding new compilers"
+ provider <- hakyllResourceProvider <$> ask
+ store <- hakyllStore <$> ask
+
+ let -- All compilers
+ compilers = oldCompilers ++ newCompilers
+
+ -- Get all dependencies for the compilers
+ dependencies = flip map compilers $ \(id', compiler) ->
+ let deps = runCompilerDependencies compiler id' provider
+ in (id', deps)
+
+ -- Create a compiler map (Id -> Compiler)
+ compilerMap = M.fromList compilers
+
+ -- Create the dependency graph
+ currentGraph = fromList dependencies
+
+ -- Find the old graph and append the new graph to it. This forms the
+ -- complete graph
+ completeGraph <- timed logger "Creating graph" $
+ mappend currentGraph . hakyllGraph <$> get
+
+ orderedCompilers <- timed logger "Solving dependencies" $ do
+ -- Check which items are up-to-date. This only needs to happen for the new
+ -- compilers
+ oldModified <- hakyllModified <$> get
+ newModified <- liftIO $ modified provider store $ map fst newCompilers
+
+ let modified' = oldModified `S.union` newModified
+
+ -- Find obsolete items. Every item that is reachable from a modified
+ -- item is considered obsolete. From these obsolete items, we are only
+ -- interested in ones that are in the current subgraph.
+ obsolete = S.filter (`member` currentGraph)
+ $ reachableNodes modified' $ reverse completeGraph
+
+ -- Solve the graph and retain only the obsolete items
+ ordered = filter (`S.member` obsolete) $ solveDependencies currentGraph
+
+ -- Update the state
+ modify $ updateState modified' completeGraph
+
+ -- Join the order with the compilers again
+ return $ map (id &&& (compilerMap M.!)) ordered
+
+ -- Now run the ordered list of compilers
+ unRuntime $ runCompilers orderedCompilers
+ where
+ -- Add the modified information for the new compilers
+ updateState modified' graph state = state
+ { hakyllModified = modified'
+ , hakyllGraph = graph
+ }
+
+runCompilers :: [(Identifier, Compiler () CompileRule)]
+ -- ^ Ordered list of compilers
+ -> Runtime ()
+ -- ^ No result
+runCompilers [] = return ()
+runCompilers ((id', compiler) : compilers) = Runtime $ do
+ -- Obtain information
+ logger <- hakyllLogger <$> ask
+ routes <- hakyllRoutes <$> ask
+ provider <- hakyllResourceProvider <$> ask
+ store <- hakyllStore <$> ask
+ modified' <- hakyllModified <$> get
+
+ section logger $ "Compiling " ++ show id'
+
+ let -- Check if the resource was modified
+ isModified = id' `S.member` modified'
+
+ -- Run the compiler
+ result <- timed logger "Total compile time" $ liftIO $
+ runCompiler compiler id' provider routes store isModified logger
+
+ case result of
+ -- Compile rule for one item, easy stuff
+ CompileRule compiled -> do
+ case runRoutes routes id' of
+ Nothing -> return ()
+ Just url -> timed logger ("Routing to " ++ url) $ do
+ destination <-
+ destinationDirectory . hakyllConfiguration <$> ask
+ let path = destination </> url
+ liftIO $ makeDirectories path
+ liftIO $ write path compiled
+
+ -- Continue for the remaining compilers
+ unRuntime $ runCompilers compilers
+
+ -- Metacompiler, slightly more complicated
+ MetaCompileRule newCompilers ->
+ -- Actually I was just kidding, it's not hard at all
+ unRuntime $ addNewCompilers compilers newCompilers
diff --git a/src/Hakyll/Core/Store.hs b/src/Hakyll/Core/Store.hs
new file mode 100644
index 0000000..12e33a7
--- /dev/null
+++ b/src/Hakyll/Core/Store.hs
@@ -0,0 +1,88 @@
+-- | A store for stroing and retreiving items
+--
+module Hakyll.Core.Store
+ ( Store
+ , makeStore
+ , storeSet
+ , storeGet
+ ) where
+
+import Control.Concurrent.MVar (MVar, newMVar, readMVar, modifyMVar_)
+import System.FilePath ((</>))
+import System.Directory (doesFileExist)
+import Data.Map (Map)
+import qualified Data.Map as M
+
+import Data.Binary (Binary, encodeFile, decodeFile)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+
+-- | Data structure used for the store
+--
+data Store = Store
+ { -- | All items are stored on the filesystem
+ storeDirectory :: FilePath
+ , -- | And some items are also kept in-memory
+ storeMap :: MVar (Map FilePath CompiledItem)
+ }
+
+-- | Initialize the store
+--
+makeStore :: FilePath -> IO Store
+makeStore directory = do
+ mvar <- newMVar M.empty
+ return Store
+ { storeDirectory = directory
+ , storeMap = mvar
+ }
+
+-- | Auxiliary: add an item to the map
+--
+addToMap :: (Binary a, Typeable a, Writable a)
+ => Store -> FilePath -> a -> IO ()
+addToMap store path value =
+ modifyMVar_ (storeMap store) $ return . M.insert path (compiledItem value)
+
+-- | Create a path
+--
+makePath :: Store -> String -> Identifier -> FilePath
+makePath store name identifier =
+ storeDirectory store </> name </> toFilePath identifier </> ".hakyllstore"
+
+-- | Store an item
+--
+storeSet :: (Binary a, Typeable a, Writable a)
+ => Store -> String -> Identifier -> a -> IO ()
+storeSet store name identifier value = do
+ makeDirectories path
+ encodeFile path value
+ addToMap store path value
+ where
+ path = makePath store name identifier
+
+-- | Load an item
+--
+storeGet :: (Binary a, Typeable a, Writable a)
+ => Store -> String -> Identifier -> IO (Maybe a)
+storeGet store name identifier = do
+ -- First check the in-memory map
+ map' <- readMVar $ storeMap store
+ case M.lookup path map' of
+ -- Found in the in-memory map
+ Just c -> return $ Just $ unCompiledItem c
+ -- Not found in the map, try the filesystem
+ Nothing -> do
+ exists <- doesFileExist path
+ if not exists
+ -- Not found in the filesystem either
+ then return Nothing
+ -- Found in the filesystem
+ else do v <- decodeFile path
+ addToMap store path v
+ return $ Just v
+ where
+ path = makePath store name identifier
diff --git a/src/Hakyll/Core/UnixFilter.hs b/src/Hakyll/Core/UnixFilter.hs
new file mode 100644
index 0000000..ee4b6cd
--- /dev/null
+++ b/src/Hakyll/Core/UnixFilter.hs
@@ -0,0 +1,76 @@
+-- | A Compiler that supports unix filters.
+--
+module Hakyll.Core.UnixFilter
+ ( unixFilter
+ ) where
+
+import Control.Concurrent (forkIO)
+import System.IO (hPutStr, hClose, hGetContents)
+import System.Posix.Process (executeFile, forkProcess)
+import System.Posix.IO ( dupTo, createPipe, stdInput
+ , stdOutput, closeFd, fdToHandle
+ )
+
+import Hakyll.Core.Compiler
+
+-- | Use a unix filter as compiler. For example, we could use the 'rev' program
+-- as a compiler.
+--
+-- > rev :: Compiler Resource String
+-- > rev = getResourceString >>> unixFilter "rev" []
+--
+-- A more realistic example: one can use this to call, for example, the sass
+-- compiler on CSS files. More information about sass can be found here:
+--
+-- <http://sass-lang.com/>
+--
+-- The code is fairly straightforward, given that we use @.scss@ for sass:
+--
+-- > route "style.scss" $ setExtension "css"
+-- > compile "style.scss" $
+-- > getResourceString >>> unixFilter "sass" ["-s", "--scss"]
+-- > >>> arr compressCss
+--
+unixFilter :: String -- ^ Program name
+ -> [String] -- ^ Program args
+ -> Compiler String String -- ^ Resulting compiler
+unixFilter programName args =
+ timedCompiler ("Executing external program " ++ programName) $
+ unsafeCompiler $ \input -> unixFilterIO programName args input
+
+-- | Internally used function
+--
+unixFilterIO :: String
+ -> [String]
+ -> String
+ -> IO String
+unixFilterIO programName args input = do
+ -- Create pipes
+ (stdinRead, stdinWrite) <- createPipe
+ (stdoutRead, stdoutWrite) <- createPipe
+
+ -- Fork the child
+ _ <- forkProcess $ do
+ -- Copy our pipes over the regular stdin/stdout
+ _ <- dupTo stdinRead stdInput
+ _ <- dupTo stdoutWrite stdOutput
+
+ -- Close the now unneeded file descriptors in the child
+ mapM_ closeFd [stdinWrite, stdoutRead, stdinRead, stdoutWrite]
+
+ -- Execute the program
+ _ <- executeFile programName True args Nothing
+ return ()
+
+ -- On the parent side, close the client-side FDs.
+ mapM_ closeFd [stdinRead, stdoutWrite]
+
+ -- Write the input to the child pipe
+ _ <- forkIO $ do
+ stdinWriteHandle <- fdToHandle stdinWrite
+ hPutStr stdinWriteHandle input
+ hClose stdinWriteHandle
+
+ -- Receive the output from the child
+ stdoutReadHandle <- fdToHandle stdoutRead
+ hGetContents stdoutReadHandle
diff --git a/src/Hakyll/Core/Util/Arrow.hs b/src/Hakyll/Core/Util/Arrow.hs
new file mode 100644
index 0000000..1896e11
--- /dev/null
+++ b/src/Hakyll/Core/Util/Arrow.hs
@@ -0,0 +1,25 @@
+-- | Various arrow utility functions
+--
+module Hakyll.Core.Util.Arrow
+ ( constA
+ , sequenceA
+ , unitA
+ ) where
+
+import Control.Arrow (Arrow, (&&&), arr, (>>^))
+
+constA :: Arrow a
+ => c
+ -> a b c
+constA = arr . const
+
+sequenceA :: Arrow a
+ => [a b c]
+ -> a b [c]
+sequenceA = foldl reduce $ constA []
+ where
+ reduce la xa = xa &&& la >>^ arr (uncurry (:))
+
+unitA :: Arrow a
+ => a b ()
+unitA = constA ()
diff --git a/src/Hakyll/Core/Util/File.hs b/src/Hakyll/Core/Util/File.hs
new file mode 100644
index 0000000..9babc8b
--- /dev/null
+++ b/src/Hakyll/Core/Util/File.hs
@@ -0,0 +1,90 @@
+-- | A module containing various file utility functions
+--
+module Hakyll.Core.Util.File
+ ( makeDirectories
+ , getRecursiveContents
+ , isFileObsolete
+ , isFileInternal
+ ) where
+
+import Control.Applicative ((<$>))
+import System.Time (ClockTime)
+import Control.Monad (forM, filterM)
+import Data.List (isPrefixOf)
+import System.Directory ( createDirectoryIfMissing, doesDirectoryExist
+ , doesFileExist, getModificationTime
+ , getDirectoryContents
+ )
+import System.FilePath ( normalise, takeDirectory, splitPath
+ , dropTrailingPathSeparator, (</>)
+ )
+
+import Hakyll.Core.Configuration
+
+-- | Given a path to a file, try to make the path writable by making
+-- all directories on the path.
+--
+makeDirectories :: FilePath -> IO ()
+makeDirectories = createDirectoryIfMissing True . takeDirectory
+
+-- | Get all contents of a directory. Note that files starting with a dot (.)
+-- will be ignored.
+--
+getRecursiveContents :: Bool -- ^ Include directories?
+ -> FilePath -- ^ Directory to search
+ -> IO [FilePath] -- ^ List of files found
+getRecursiveContents includeDirs topdir = do
+ topdirExists <- doesDirectoryExist topdir
+ if not topdirExists
+ then return []
+ else do
+ names <- filter isProper <$> getDirectoryContents topdir
+ paths <- forM names $ \name -> do
+ let path = normalise $ topdir </> name
+ isDirectory <- doesDirectoryExist path
+ if isDirectory then getRecursiveContents includeDirs path
+ else return [path]
+ return $ if includeDirs then topdir : concat paths
+ else concat paths
+ where
+ isProper = not . (== ".") . take 1
+
+-- | Check if a timestamp is obsolete compared to the timestamps of a number of
+-- files. When they are no files, it is never obsolete.
+--
+isObsolete :: ClockTime -- ^ The time to check.
+ -> [FilePath] -- ^ Dependencies of the cached file.
+ -> IO Bool
+isObsolete _ [] = return False
+isObsolete timeStamp depends = do
+ depends' <- filterM doesFileExist depends
+ dependsModified <- mapM getModificationTime depends'
+ return (timeStamp < maximum dependsModified)
+
+-- | Check if a file is obsolete, given it's dependencies. When the file does
+-- not exist, it is always obsolete. Other wise, it is obsolete if any of it's
+-- dependencies has a more recent modification time than the file.
+--
+isFileObsolete :: FilePath -- ^ The cached file
+ -> [FilePath] -- ^ Dependencies of the cached file
+ -> IO Bool
+isFileObsolete file depends = do
+ exists <- doesFileExist file
+ if not exists
+ then return True
+ else do timeStamp <- getModificationTime file
+ isObsolete timeStamp depends
+
+-- | Check if a file is meant for Hakyll internal use, i.e. if it is located in
+-- the destination or store directory
+--
+isFileInternal :: HakyllConfiguration -- ^ Configuration
+ -> FilePath -- ^ File to check
+ -> Bool -- ^ If the given file is internal
+isFileInternal configuration file =
+ any (`isPrefixOf` split file) dirs
+ where
+ split = map dropTrailingPathSeparator . splitPath
+ dirs = map (split . ($ configuration)) [ destinationDirectory
+ , storeDirectory
+ ]
diff --git a/src/Hakyll/Core/Util/String.hs b/src/Hakyll/Core/Util/String.hs
new file mode 100644
index 0000000..7f75a36
--- /dev/null
+++ b/src/Hakyll/Core/Util/String.hs
@@ -0,0 +1,48 @@
+-- | Miscellaneous string manipulation functions.
+--
+module Hakyll.Core.Util.String
+ ( trim
+ , replaceAll
+ , splitAll
+ ) where
+
+import Data.Char (isSpace)
+import Data.Maybe (listToMaybe)
+
+import Text.Regex.PCRE ((=~~))
+
+-- | Trim a string (drop spaces, tabs and newlines at both sides).
+--
+trim :: String -> String
+trim = reverse . trim' . reverse . trim'
+ where
+ trim' = dropWhile isSpace
+
+-- | A simple (but inefficient) regex replace funcion
+--
+replaceAll :: String -- ^ Pattern
+ -> (String -> String) -- ^ Replacement (called on capture)
+ -> String -- ^ Source string
+ -> String -- ^ Result
+replaceAll pattern f source = replaceAll' source
+ where
+ replaceAll' src = case listToMaybe (src =~~ pattern) of
+ Nothing -> src
+ Just (o, l) ->
+ let (before, tmp) = splitAt o src
+ (capture, after) = splitAt l tmp
+ in before ++ f capture ++ replaceAll' after
+
+-- | A simple regex split function. The resulting list will contain no empty
+-- strings.
+--
+splitAll :: String -- ^ Pattern
+ -> String -- ^ String to split
+ -> [String] -- ^ Result
+splitAll pattern = filter (not . null) . splitAll'
+ where
+ splitAll' src = case listToMaybe (src =~~ pattern) of
+ Nothing -> [src]
+ Just (o, l) ->
+ let (before, tmp) = splitAt o src
+ in before : splitAll' (drop l tmp)
diff --git a/src/Hakyll/Core/Writable.hs b/src/Hakyll/Core/Writable.hs
new file mode 100644
index 0000000..a3fd421
--- /dev/null
+++ b/src/Hakyll/Core/Writable.hs
@@ -0,0 +1,22 @@
+-- | Describes writable items; items that can be saved to the disk
+--
+{-# LANGUAGE FlexibleInstances #-}
+module Hakyll.Core.Writable
+ ( Writable (..)
+ ) where
+
+import Data.Word (Word8)
+
+import qualified Data.ByteString as SB
+
+-- | Describes an item that can be saved to the disk
+--
+class Writable a where
+ -- | Save an item to the given filepath
+ write :: FilePath -> a -> IO ()
+
+instance Writable [Char] where
+ write = writeFile
+
+instance Writable [Word8] where
+ write p = SB.writeFile p . SB.pack