summaryrefslogtreecommitdiff
path: root/lib/Hakyll/Core
diff options
context:
space:
mode:
authorJasper Van der Jeugt <m@jaspervdj.be>2017-06-19 11:57:23 +0200
committerJasper Van der Jeugt <m@jaspervdj.be>2017-06-19 11:57:23 +0200
commit67ecff7ad383640bc73d64edc2506c7cc648a134 (patch)
tree6d328e43c3ab86c29a2d775fabaa23618c16fb51 /lib/Hakyll/Core
parent2df3209bafa08e6b77ee4a8598fc503269513527 (diff)
downloadhakyll-67ecff7ad383640bc73d64edc2506c7cc648a134.tar.gz
Move src/ to lib/, put Init.hs in src/
Diffstat (limited to 'lib/Hakyll/Core')
-rw-r--r--lib/Hakyll/Core/Compiler.hs189
-rw-r--r--lib/Hakyll/Core/Compiler/Internal.hs265
-rw-r--r--lib/Hakyll/Core/Compiler/Require.hs121
-rw-r--r--lib/Hakyll/Core/Configuration.hs134
-rw-r--r--lib/Hakyll/Core/Dependencies.hs146
-rw-r--r--lib/Hakyll/Core/File.hs93
-rw-r--r--lib/Hakyll/Core/Identifier.hs80
-rw-r--r--lib/Hakyll/Core/Identifier/Pattern.hs322
-rw-r--r--lib/Hakyll/Core/Item.hs63
-rw-r--r--lib/Hakyll/Core/Item/SomeItem.hs23
-rw-r--r--lib/Hakyll/Core/Logger.hs97
-rw-r--r--lib/Hakyll/Core/Metadata.hs138
-rw-r--r--lib/Hakyll/Core/Provider.hs43
-rw-r--r--lib/Hakyll/Core/Provider/Internal.hs202
-rw-r--r--lib/Hakyll/Core/Provider/Metadata.hs151
-rw-r--r--lib/Hakyll/Core/Provider/MetadataCache.hs62
-rw-r--r--lib/Hakyll/Core/Routes.hs194
-rw-r--r--lib/Hakyll/Core/Rules.hs223
-rw-r--r--lib/Hakyll/Core/Rules/Internal.hs109
-rw-r--r--lib/Hakyll/Core/Runtime.hs276
-rw-r--r--lib/Hakyll/Core/Store.hs197
-rw-r--r--lib/Hakyll/Core/UnixFilter.hs159
-rw-r--r--lib/Hakyll/Core/Util/File.hs56
-rw-r--r--lib/Hakyll/Core/Util/Parser.hs32
-rw-r--r--lib/Hakyll/Core/Util/String.hs78
-rw-r--r--lib/Hakyll/Core/Writable.hs56
26 files changed, 3509 insertions, 0 deletions
diff --git a/lib/Hakyll/Core/Compiler.hs b/lib/Hakyll/Core/Compiler.hs
new file mode 100644
index 0000000..42b24d6
--- /dev/null
+++ b/lib/Hakyll/Core/Compiler.hs
@@ -0,0 +1,189 @@
+--------------------------------------------------------------------------------
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+module Hakyll.Core.Compiler
+ ( Compiler
+ , getUnderlying
+ , getUnderlyingExtension
+ , makeItem
+ , getRoute
+ , getResourceBody
+ , getResourceString
+ , getResourceLBS
+ , getResourceFilePath
+
+ , Internal.Snapshot
+ , saveSnapshot
+ , Internal.load
+ , Internal.loadSnapshot
+ , Internal.loadBody
+ , Internal.loadSnapshotBody
+ , Internal.loadAll
+ , Internal.loadAllSnapshots
+
+ , cached
+ , unsafeCompiler
+ , debugCompiler
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Monad (when, unless)
+import Data.Binary (Binary)
+import Data.ByteString.Lazy (ByteString)
+import Data.Typeable (Typeable)
+import System.Environment (getProgName)
+import System.FilePath (takeExtension)
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Compiler.Internal
+import qualified Hakyll.Core.Compiler.Require as Internal
+import Hakyll.Core.Dependencies
+import Hakyll.Core.Identifier
+import Hakyll.Core.Item
+import Hakyll.Core.Logger as Logger
+import Hakyll.Core.Provider
+import Hakyll.Core.Routes
+import qualified Hakyll.Core.Store as Store
+
+
+--------------------------------------------------------------------------------
+-- | Get the underlying identifier.
+getUnderlying :: Compiler Identifier
+getUnderlying = compilerUnderlying <$> compilerAsk
+
+
+--------------------------------------------------------------------------------
+-- | Get the extension of the underlying identifier. Returns something like
+-- @".html"@
+getUnderlyingExtension :: Compiler String
+getUnderlyingExtension = takeExtension . toFilePath <$> getUnderlying
+
+
+--------------------------------------------------------------------------------
+makeItem :: a -> Compiler (Item a)
+makeItem x = do
+ identifier <- getUnderlying
+ return $ Item identifier x
+
+
+--------------------------------------------------------------------------------
+-- | Get the route for a specified item
+getRoute :: Identifier -> Compiler (Maybe FilePath)
+getRoute identifier = do
+ provider <- compilerProvider <$> compilerAsk
+ routes <- compilerRoutes <$> compilerAsk
+ -- Note that this makes us dependend on that identifier: when the metadata
+ -- of that item changes, the route may change, hence we have to recompile
+ (mfp, um) <- compilerUnsafeIO $ runRoutes routes provider identifier
+ when um $ compilerTellDependencies [IdentifierDependency identifier]
+ return mfp
+
+
+--------------------------------------------------------------------------------
+-- | Get the full contents of the matched source file as a string,
+-- but without metadata preamble, if there was one.
+getResourceBody :: Compiler (Item String)
+getResourceBody = getResourceWith resourceBody
+
+
+--------------------------------------------------------------------------------
+-- | Get the full contents of the matched source file as a string.
+getResourceString :: Compiler (Item String)
+getResourceString = getResourceWith resourceString
+
+
+--------------------------------------------------------------------------------
+-- | Get the full contents of the matched source file as a lazy bytestring.
+getResourceLBS :: Compiler (Item ByteString)
+getResourceLBS = getResourceWith resourceLBS
+
+
+--------------------------------------------------------------------------------
+-- | Get the file path of the resource we are compiling
+getResourceFilePath :: Compiler FilePath
+getResourceFilePath = do
+ provider <- compilerProvider <$> compilerAsk
+ id' <- compilerUnderlying <$> compilerAsk
+ return $ resourceFilePath provider id'
+
+
+--------------------------------------------------------------------------------
+-- | Overloadable function for 'getResourceString' and 'getResourceLBS'
+getResourceWith :: (Provider -> Identifier -> IO a) -> Compiler (Item a)
+getResourceWith reader = do
+ provider <- compilerProvider <$> compilerAsk
+ id' <- compilerUnderlying <$> compilerAsk
+ let filePath = toFilePath id'
+ if resourceExists provider id'
+ then compilerUnsafeIO $ Item id' <$> reader provider id'
+ else fail $ error' filePath
+ where
+ error' fp = "Hakyll.Core.Compiler.getResourceWith: resource " ++
+ show fp ++ " not found"
+
+
+--------------------------------------------------------------------------------
+-- | Save a snapshot of the item. This function returns the same item, which
+-- convenient for building '>>=' chains.
+saveSnapshot :: (Binary a, Typeable a)
+ => Internal.Snapshot -> Item a -> Compiler (Item a)
+saveSnapshot snapshot item = do
+ store <- compilerStore <$> compilerAsk
+ logger <- compilerLogger <$> compilerAsk
+ compilerUnsafeIO $ do
+ Logger.debug logger $ "Storing snapshot: " ++ snapshot
+ Internal.saveSnapshot store snapshot item
+
+ -- Signal that we saved the snapshot.
+ Compiler $ \_ -> return $ CompilerSnapshot snapshot (return item)
+
+
+--------------------------------------------------------------------------------
+cached :: (Binary a, Typeable a)
+ => String
+ -> Compiler a
+ -> Compiler a
+cached name compiler = do
+ id' <- compilerUnderlying <$> compilerAsk
+ store <- compilerStore <$> compilerAsk
+ provider <- compilerProvider <$> compilerAsk
+
+ -- Give a better error message when the resource is not there at all.
+ unless (resourceExists provider id') $ fail $ itDoesntEvenExist id'
+
+ let modified = resourceModified provider id'
+ if modified
+ then do
+ x <- compiler
+ compilerUnsafeIO $ Store.set store [name, show id'] x
+ return x
+ else do
+ compilerTellCacheHits 1
+ x <- compilerUnsafeIO $ Store.get store [name, show id']
+ progName <- compilerUnsafeIO getProgName
+ case x of Store.Found x' -> return x'
+ _ -> fail $ error' progName
+ where
+ error' progName =
+ "Hakyll.Core.Compiler.cached: Cache corrupt! " ++
+ "Try running: " ++ progName ++ " clean"
+
+ itDoesntEvenExist id' =
+ "Hakyll.Core.Compiler.cached: You are trying to (perhaps " ++
+ "indirectly) use `cached` on a non-existing resource: there " ++
+ "is no file backing " ++ show id'
+
+
+--------------------------------------------------------------------------------
+unsafeCompiler :: IO a -> Compiler a
+unsafeCompiler = compilerUnsafeIO
+
+
+--------------------------------------------------------------------------------
+-- | Compiler for debugging purposes
+debugCompiler :: String -> Compiler ()
+debugCompiler msg = do
+ logger <- compilerLogger <$> compilerAsk
+ compilerUnsafeIO $ Logger.debug logger msg
diff --git a/lib/Hakyll/Core/Compiler/Internal.hs b/lib/Hakyll/Core/Compiler/Internal.hs
new file mode 100644
index 0000000..7b1df83
--- /dev/null
+++ b/lib/Hakyll/Core/Compiler/Internal.hs
@@ -0,0 +1,265 @@
+--------------------------------------------------------------------------------
+-- | Internally used compiler module
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE GADTs #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE MultiParamTypeClasses #-}
+module Hakyll.Core.Compiler.Internal
+ ( -- * Types
+ Snapshot
+ , CompilerRead (..)
+ , CompilerWrite (..)
+ , CompilerResult (..)
+ , Compiler (..)
+ , runCompiler
+
+ -- * Core operations
+ , compilerTell
+ , compilerAsk
+ , compilerThrow
+ , compilerCatch
+ , compilerResult
+ , compilerUnsafeIO
+
+ -- * Utilities
+ , compilerTellDependencies
+ , compilerTellCacheHits
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Applicative (Alternative (..))
+import Control.Exception (SomeException, handle)
+import Control.Monad (forM_)
+import Control.Monad.Except (MonadError (..))
+import Data.Set (Set)
+import qualified Data.Set as S
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Configuration
+import Hakyll.Core.Dependencies
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Logger (Logger)
+import qualified Hakyll.Core.Logger as Logger
+import Hakyll.Core.Metadata
+import Hakyll.Core.Provider
+import Hakyll.Core.Routes
+import Hakyll.Core.Store
+
+
+--------------------------------------------------------------------------------
+-- | Whilst compiling an item, it possible to save multiple snapshots of it, and
+-- not just the final result.
+type Snapshot = String
+
+
+--------------------------------------------------------------------------------
+-- | Environment in which a compiler runs
+data CompilerRead = CompilerRead
+ { -- | Main configuration
+ compilerConfig :: Configuration
+ , -- | Underlying identifier
+ compilerUnderlying :: Identifier
+ , -- | Resource provider
+ compilerProvider :: Provider
+ , -- | List of all known identifiers
+ compilerUniverse :: Set Identifier
+ , -- | Site routes
+ compilerRoutes :: Routes
+ , -- | Compiler store
+ compilerStore :: Store
+ , -- | Logger
+ compilerLogger :: Logger
+ }
+
+
+--------------------------------------------------------------------------------
+data CompilerWrite = CompilerWrite
+ { compilerDependencies :: [Dependency]
+ , compilerCacheHits :: Int
+ } deriving (Show)
+
+
+--------------------------------------------------------------------------------
+instance Monoid CompilerWrite where
+ mempty = CompilerWrite [] 0
+ mappend (CompilerWrite d1 h1) (CompilerWrite d2 h2) =
+ CompilerWrite (d1 ++ d2) (h1 + h2)
+
+
+--------------------------------------------------------------------------------
+data CompilerResult a where
+ CompilerDone :: a -> CompilerWrite -> CompilerResult a
+ CompilerSnapshot :: Snapshot -> Compiler a -> CompilerResult a
+ CompilerError :: [String] -> CompilerResult a
+ CompilerRequire :: (Identifier, Snapshot) -> Compiler a -> CompilerResult a
+
+
+--------------------------------------------------------------------------------
+-- | A monad which lets you compile items and takes care of dependency tracking
+-- for you.
+newtype Compiler a = Compiler
+ { unCompiler :: CompilerRead -> IO (CompilerResult a)
+ }
+
+
+--------------------------------------------------------------------------------
+instance Functor Compiler where
+ fmap f (Compiler c) = Compiler $ \r -> do
+ res <- c r
+ return $ case res of
+ CompilerDone x w -> CompilerDone (f x) w
+ CompilerSnapshot s c' -> CompilerSnapshot s (fmap f c')
+ CompilerError e -> CompilerError e
+ CompilerRequire i c' -> CompilerRequire i (fmap f c')
+ {-# INLINE fmap #-}
+
+
+--------------------------------------------------------------------------------
+instance Monad Compiler where
+ return x = Compiler $ \_ -> return $ CompilerDone x mempty
+ {-# INLINE return #-}
+
+ Compiler c >>= f = Compiler $ \r -> do
+ res <- c r
+ case res of
+ CompilerDone x w -> do
+ res' <- unCompiler (f x) r
+ return $ case res' of
+ CompilerDone y w' -> CompilerDone y (w `mappend` w')
+ CompilerSnapshot s c' -> CompilerSnapshot s $ do
+ compilerTell w -- Save dependencies!
+ c'
+ CompilerError e -> CompilerError e
+ CompilerRequire i c' -> CompilerRequire i $ do
+ compilerTell w -- Save dependencies!
+ c'
+
+ CompilerSnapshot s c' -> return $ CompilerSnapshot s (c' >>= f)
+ CompilerError e -> return $ CompilerError e
+ CompilerRequire i c' -> return $ CompilerRequire i (c' >>= f)
+ {-# INLINE (>>=) #-}
+
+ fail = compilerThrow . return
+ {-# INLINE fail #-}
+
+
+--------------------------------------------------------------------------------
+instance Applicative Compiler where
+ pure x = return x
+ {-# INLINE pure #-}
+
+ f <*> x = f >>= \f' -> fmap f' x
+ {-# INLINE (<*>) #-}
+
+
+--------------------------------------------------------------------------------
+instance MonadMetadata Compiler where
+ getMetadata = compilerGetMetadata
+ getMatches = compilerGetMatches
+
+
+--------------------------------------------------------------------------------
+instance MonadError [String] Compiler where
+ throwError = compilerThrow
+ catchError = compilerCatch
+
+
+--------------------------------------------------------------------------------
+runCompiler :: Compiler a -> CompilerRead -> IO (CompilerResult a)
+runCompiler compiler read' = handle handler $ unCompiler compiler read'
+ where
+ handler :: SomeException -> IO (CompilerResult a)
+ handler e = return $ CompilerError [show e]
+
+
+--------------------------------------------------------------------------------
+instance Alternative Compiler where
+ empty = compilerThrow []
+ x <|> y = compilerCatch x $ \es -> do
+ logger <- compilerLogger <$> compilerAsk
+ forM_ es $ \e -> compilerUnsafeIO $ Logger.debug logger $
+ "Hakyll.Core.Compiler.Internal: Alternative failed: " ++ e
+ y
+ {-# INLINE (<|>) #-}
+
+
+--------------------------------------------------------------------------------
+compilerAsk :: Compiler CompilerRead
+compilerAsk = Compiler $ \r -> return $ CompilerDone r mempty
+{-# INLINE compilerAsk #-}
+
+
+--------------------------------------------------------------------------------
+compilerTell :: CompilerWrite -> Compiler ()
+compilerTell deps = Compiler $ \_ -> return $ CompilerDone () deps
+{-# INLINE compilerTell #-}
+
+
+--------------------------------------------------------------------------------
+compilerThrow :: [String] -> Compiler a
+compilerThrow es = Compiler $ \_ -> return $ CompilerError es
+{-# INLINE compilerThrow #-}
+
+
+--------------------------------------------------------------------------------
+compilerCatch :: Compiler a -> ([String] -> Compiler a) -> Compiler a
+compilerCatch (Compiler x) f = Compiler $ \r -> do
+ res <- x r
+ case res of
+ CompilerDone res' w -> return (CompilerDone res' w)
+ CompilerSnapshot s c -> return (CompilerSnapshot s (compilerCatch c f))
+ CompilerError e -> unCompiler (f e) r
+ CompilerRequire i c -> return (CompilerRequire i (compilerCatch c f))
+{-# INLINE compilerCatch #-}
+
+
+--------------------------------------------------------------------------------
+-- | Put the result back in a compiler
+compilerResult :: CompilerResult a -> Compiler a
+compilerResult x = Compiler $ \_ -> return x
+{-# INLINE compilerResult #-}
+
+
+--------------------------------------------------------------------------------
+compilerUnsafeIO :: IO a -> Compiler a
+compilerUnsafeIO io = Compiler $ \_ -> do
+ x <- io
+ return $ CompilerDone x mempty
+{-# INLINE compilerUnsafeIO #-}
+
+
+--------------------------------------------------------------------------------
+compilerTellDependencies :: [Dependency] -> Compiler ()
+compilerTellDependencies ds = do
+ logger <- compilerLogger <$> compilerAsk
+ forM_ ds $ \d -> compilerUnsafeIO $ Logger.debug logger $
+ "Hakyll.Core.Compiler.Internal: Adding dependency: " ++ show d
+ compilerTell mempty {compilerDependencies = ds}
+{-# INLINE compilerTellDependencies #-}
+
+
+--------------------------------------------------------------------------------
+compilerTellCacheHits :: Int -> Compiler ()
+compilerTellCacheHits ch = compilerTell mempty {compilerCacheHits = ch}
+{-# INLINE compilerTellCacheHits #-}
+
+
+--------------------------------------------------------------------------------
+compilerGetMetadata :: Identifier -> Compiler Metadata
+compilerGetMetadata identifier = do
+ provider <- compilerProvider <$> compilerAsk
+ compilerTellDependencies [IdentifierDependency identifier]
+ compilerUnsafeIO $ resourceMetadata provider identifier
+
+
+--------------------------------------------------------------------------------
+compilerGetMatches :: Pattern -> Compiler [Identifier]
+compilerGetMatches pattern = do
+ universe <- compilerUniverse <$> compilerAsk
+ let matching = filterMatches pattern $ S.toList universe
+ set' = S.fromList matching
+ compilerTellDependencies [PatternDependency pattern set']
+ return matching
diff --git a/lib/Hakyll/Core/Compiler/Require.hs b/lib/Hakyll/Core/Compiler/Require.hs
new file mode 100644
index 0000000..c9373bf
--- /dev/null
+++ b/lib/Hakyll/Core/Compiler/Require.hs
@@ -0,0 +1,121 @@
+--------------------------------------------------------------------------------
+module Hakyll.Core.Compiler.Require
+ ( Snapshot
+ , save
+ , saveSnapshot
+ , load
+ , loadSnapshot
+ , loadBody
+ , loadSnapshotBody
+ , loadAll
+ , loadAllSnapshots
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Monad (when)
+import Data.Binary (Binary)
+import qualified Data.Set as S
+import Data.Typeable
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Dependencies
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Item
+import Hakyll.Core.Metadata
+import Hakyll.Core.Store (Store)
+import qualified Hakyll.Core.Store as Store
+
+
+--------------------------------------------------------------------------------
+save :: (Binary a, Typeable a) => Store -> Item a -> IO ()
+save store item = saveSnapshot store final item
+
+
+--------------------------------------------------------------------------------
+-- | Save a specific snapshot of an item, so you can load it later using
+-- 'loadSnapshot'.
+saveSnapshot :: (Binary a, Typeable a)
+ => Store -> Snapshot -> Item a -> IO ()
+saveSnapshot store snapshot item =
+ Store.set store (key (itemIdentifier item) snapshot) (itemBody item)
+
+
+--------------------------------------------------------------------------------
+-- | Load an item compiled elsewhere. If the required item is not yet compiled,
+-- the build system will take care of that automatically.
+load :: (Binary a, Typeable a) => Identifier -> Compiler (Item a)
+load id' = loadSnapshot id' final
+
+
+--------------------------------------------------------------------------------
+-- | Require a specific snapshot of an item.
+loadSnapshot :: (Binary a, Typeable a)
+ => Identifier -> Snapshot -> Compiler (Item a)
+loadSnapshot id' snapshot = do
+ store <- compilerStore <$> compilerAsk
+ universe <- compilerUniverse <$> compilerAsk
+
+ -- Quick check for better error messages
+ when (id' `S.notMember` universe) $ fail notFound
+
+ compilerTellDependencies [IdentifierDependency id']
+ compilerResult $ CompilerRequire (id', snapshot) $ do
+ result <- compilerUnsafeIO $ Store.get store (key id' snapshot)
+ case result of
+ Store.NotFound -> fail notFound
+ Store.WrongType e r -> fail $ wrongType e r
+ Store.Found x -> return $ Item id' x
+ where
+ notFound =
+ "Hakyll.Core.Compiler.Require.load: " ++ show id' ++
+ " (snapshot " ++ snapshot ++ ") was not found in the cache, " ++
+ "the cache might be corrupted or " ++
+ "the item you are referring to might not exist"
+ wrongType e r =
+ "Hakyll.Core.Compiler.Require.load: " ++ show id' ++
+ " (snapshot " ++ snapshot ++ ") was found in the cache, " ++
+ "but does not have the right type: expected " ++ show e ++
+ " but got " ++ show r
+
+
+--------------------------------------------------------------------------------
+-- | A shortcut for only requiring the body of an item.
+--
+-- > loadBody = fmap itemBody . load
+loadBody :: (Binary a, Typeable a) => Identifier -> Compiler a
+loadBody id' = loadSnapshotBody id' final
+
+
+--------------------------------------------------------------------------------
+loadSnapshotBody :: (Binary a, Typeable a)
+ => Identifier -> Snapshot -> Compiler a
+loadSnapshotBody id' snapshot = fmap itemBody $ loadSnapshot id' snapshot
+
+
+--------------------------------------------------------------------------------
+-- | This function allows you to 'load' a dynamic list of items
+loadAll :: (Binary a, Typeable a) => Pattern -> Compiler [Item a]
+loadAll pattern = loadAllSnapshots pattern final
+
+
+--------------------------------------------------------------------------------
+loadAllSnapshots :: (Binary a, Typeable a)
+ => Pattern -> Snapshot -> Compiler [Item a]
+loadAllSnapshots pattern snapshot = do
+ matching <- getMatches pattern
+ mapM (\i -> loadSnapshot i snapshot) matching
+
+
+--------------------------------------------------------------------------------
+key :: Identifier -> String -> [String]
+key identifier snapshot =
+ ["Hakyll.Core.Compiler.Require", show identifier, snapshot]
+
+
+--------------------------------------------------------------------------------
+final :: Snapshot
+final = "_final"
diff --git a/lib/Hakyll/Core/Configuration.hs b/lib/Hakyll/Core/Configuration.hs
new file mode 100644
index 0000000..52b23ec
--- /dev/null
+++ b/lib/Hakyll/Core/Configuration.hs
@@ -0,0 +1,134 @@
+--------------------------------------------------------------------------------
+-- | Exports a datastructure for the top-level hakyll configuration
+module Hakyll.Core.Configuration
+ ( Configuration (..)
+ , shouldIgnoreFile
+ , defaultConfiguration
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Data.Default (Default (..))
+import Data.List (isPrefixOf, isSuffixOf)
+import System.Directory (canonicalizePath)
+import System.Exit (ExitCode)
+import System.FilePath (isAbsolute, normalise, takeFileName)
+import System.IO.Error (catchIOError)
+import System.Process (system)
+
+
+--------------------------------------------------------------------------------
+data Configuration = Configuration
+ { -- | Directory in which the output written
+ destinationDirectory :: FilePath
+ , -- | Directory where hakyll's internal store is kept
+ storeDirectory :: FilePath
+ , -- | Directory in which some temporary files will be kept
+ tmpDirectory :: FilePath
+ , -- | Directory where hakyll finds the files to compile. This is @.@ by
+ -- default.
+ providerDirectory :: FilePath
+ , -- | Function to determine ignored files
+ --
+ -- In 'defaultConfiguration', the following files are ignored:
+ --
+ -- * files starting with a @.@
+ --
+ -- * files starting with a @#@
+ --
+ -- * files ending with a @~@
+ --
+ -- * files ending with @.swp@
+ --
+ -- Note that the files in 'destinationDirectory' and 'storeDirectory' will
+ -- also be ignored. Note that this is the configuration parameter, if you
+ -- want to use the test, you should use 'shouldIgnoreFile'.
+ --
+ ignoreFile :: FilePath -> Bool
+ , -- | Here, you can plug in a system command to upload/deploy your site.
+ --
+ -- Example:
+ --
+ -- > rsync -ave 'ssh -p 2217' _site jaspervdj@jaspervdj.be:hakyll
+ --
+ -- You can execute this by using
+ --
+ -- > ./site deploy
+ --
+ deployCommand :: String
+ , -- | Function to deploy the site from Haskell.
+ --
+ -- By default, this command executes the shell command stored in
+ -- 'deployCommand'. If you override it, 'deployCommand' will not
+ -- be used implicitely.
+ --
+ -- The 'Configuration' object is passed as a parameter to this
+ -- function.
+ --
+ deploySite :: Configuration -> IO ExitCode
+ , -- | Use an in-memory cache for items. This is faster but uses more
+ -- memory.
+ inMemoryCache :: Bool
+ , -- | Override default host for preview server. Default is "127.0.0.1",
+ -- which binds only on the loopback address.
+ -- One can also override the host as a command line argument:
+ -- ./site preview -h "0.0.0.0"
+ previewHost :: String
+ , -- | Override default port for preview server. Default is 8000.
+ -- One can also override the port as a command line argument:
+ -- ./site preview -p 1234
+ previewPort :: Int
+ }
+
+--------------------------------------------------------------------------------
+instance Default Configuration where
+ def = defaultConfiguration
+
+--------------------------------------------------------------------------------
+-- | Default configuration for a hakyll application
+defaultConfiguration :: Configuration
+defaultConfiguration = Configuration
+ { destinationDirectory = "_site"
+ , storeDirectory = "_cache"
+ , tmpDirectory = "_cache/tmp"
+ , providerDirectory = "."
+ , ignoreFile = ignoreFile'
+ , deployCommand = "echo 'No deploy command specified' && exit 1"
+ , deploySite = system . deployCommand
+ , inMemoryCache = True
+ , previewHost = "127.0.0.1"
+ , previewPort = 8000
+ }
+ where
+ ignoreFile' path
+ | "." `isPrefixOf` fileName = True
+ | "#" `isPrefixOf` fileName = True
+ | "~" `isSuffixOf` fileName = True
+ | ".swp" `isSuffixOf` fileName = True
+ | otherwise = False
+ where
+ fileName = takeFileName path
+
+
+--------------------------------------------------------------------------------
+-- | Check if a file should be ignored
+shouldIgnoreFile :: Configuration -> FilePath -> IO Bool
+shouldIgnoreFile conf path = orM
+ [ inDir (destinationDirectory conf)
+ , inDir (storeDirectory conf)
+ , inDir (tmpDirectory conf)
+ , return (ignoreFile conf path')
+ ]
+ where
+ path' = normalise path
+ absolute = isAbsolute path
+
+ inDir dir
+ | absolute = do
+ dir' <- catchIOError (canonicalizePath dir) (const $ return dir)
+ return $ dir' `isPrefixOf` path'
+ | otherwise = return $ dir `isPrefixOf` path'
+
+ orM :: [IO Bool] -> IO Bool
+ orM [] = return False
+ orM (x : xs) = x >>= \b -> if b then return True else orM xs
diff --git a/lib/Hakyll/Core/Dependencies.hs b/lib/Hakyll/Core/Dependencies.hs
new file mode 100644
index 0000000..4a51b9c
--- /dev/null
+++ b/lib/Hakyll/Core/Dependencies.hs
@@ -0,0 +1,146 @@
+--------------------------------------------------------------------------------
+{-# LANGUAGE DeriveDataTypeable #-}
+module Hakyll.Core.Dependencies
+ ( Dependency (..)
+ , DependencyFacts
+ , outOfDate
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Monad (foldM, forM_, unless, when)
+import Control.Monad.Reader (ask)
+import Control.Monad.RWS (RWS, runRWS)
+import qualified Control.Monad.State as State
+import Control.Monad.Writer (tell)
+import Data.Binary (Binary (..), getWord8,
+ putWord8)
+import Data.List (find)
+import Data.Map (Map)
+import qualified Data.Map as M
+import Data.Maybe (fromMaybe)
+import Data.Set (Set)
+import qualified Data.Set as S
+import Data.Typeable (Typeable)
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+
+
+--------------------------------------------------------------------------------
+data Dependency
+ = PatternDependency Pattern (Set Identifier)
+ | IdentifierDependency Identifier
+ deriving (Show, Typeable)
+
+
+--------------------------------------------------------------------------------
+instance Binary Dependency where
+ put (PatternDependency p is) = putWord8 0 >> put p >> put is
+ put (IdentifierDependency i) = putWord8 1 >> put i
+ get = getWord8 >>= \t -> case t of
+ 0 -> PatternDependency <$> get <*> get
+ 1 -> IdentifierDependency <$> get
+ _ -> error "Data.Binary.get: Invalid Dependency"
+
+
+--------------------------------------------------------------------------------
+type DependencyFacts = Map Identifier [Dependency]
+
+
+--------------------------------------------------------------------------------
+outOfDate
+ :: [Identifier] -- ^ All known identifiers
+ -> Set Identifier -- ^ Initially out-of-date resources
+ -> DependencyFacts -- ^ Old dependency facts
+ -> (Set Identifier, DependencyFacts, [String])
+outOfDate universe ood oldFacts =
+ let (_, state, logs) = runRWS rws universe (DependencyState oldFacts ood)
+ in (dependencyOod state, dependencyFacts state, logs)
+ where
+ rws = do
+ checkNew
+ checkChangedPatterns
+ bruteForce
+
+
+--------------------------------------------------------------------------------
+data DependencyState = DependencyState
+ { dependencyFacts :: DependencyFacts
+ , dependencyOod :: Set Identifier
+ } deriving (Show)
+
+
+--------------------------------------------------------------------------------
+type DependencyM a = RWS [Identifier] [String] DependencyState a
+
+
+--------------------------------------------------------------------------------
+markOod :: Identifier -> DependencyM ()
+markOod id' = State.modify $ \s ->
+ s {dependencyOod = S.insert id' $ dependencyOod s}
+
+
+--------------------------------------------------------------------------------
+dependenciesFor :: Identifier -> DependencyM [Identifier]
+dependenciesFor id' = do
+ facts <- dependencyFacts <$> State.get
+ return $ concatMap dependenciesFor' $ fromMaybe [] $ M.lookup id' facts
+ where
+ dependenciesFor' (IdentifierDependency i) = [i]
+ dependenciesFor' (PatternDependency _ is) = S.toList is
+
+
+--------------------------------------------------------------------------------
+checkNew :: DependencyM ()
+checkNew = do
+ universe <- ask
+ facts <- dependencyFacts <$> State.get
+ forM_ universe $ \id' -> unless (id' `M.member` facts) $ do
+ tell [show id' ++ " is out-of-date because it is new"]
+ markOod id'
+
+
+--------------------------------------------------------------------------------
+checkChangedPatterns :: DependencyM ()
+checkChangedPatterns = do
+ facts <- M.toList . dependencyFacts <$> State.get
+ forM_ facts $ \(id', deps) -> do
+ deps' <- foldM (go id') [] deps
+ State.modify $ \s -> s
+ {dependencyFacts = M.insert id' deps' $ dependencyFacts s}
+ where
+ go _ ds (IdentifierDependency i) = return $ IdentifierDependency i : ds
+ go id' ds (PatternDependency p ls) = do
+ universe <- ask
+ let ls' = S.fromList $ filterMatches p universe
+ if ls == ls'
+ then return $ PatternDependency p ls : ds
+ else do
+ tell [show id' ++ " is out-of-date because a pattern changed"]
+ markOod id'
+ return $ PatternDependency p ls' : ds
+
+
+--------------------------------------------------------------------------------
+bruteForce :: DependencyM ()
+bruteForce = do
+ todo <- ask
+ go todo
+ where
+ go todo = do
+ (todo', changed) <- foldM check ([], False) todo
+ when changed (go todo')
+
+ check (todo, changed) id' = do
+ deps <- dependenciesFor id'
+ ood <- dependencyOod <$> State.get
+ case find (`S.member` ood) deps of
+ Nothing -> return (id' : todo, changed)
+ Just d -> do
+ tell [show id' ++ " is out-of-date because " ++
+ show d ++ " is out-of-date"]
+ markOod id'
+ return (todo, True)
diff --git a/lib/Hakyll/Core/File.hs b/lib/Hakyll/Core/File.hs
new file mode 100644
index 0000000..49af659
--- /dev/null
+++ b/lib/Hakyll/Core/File.hs
@@ -0,0 +1,93 @@
+--------------------------------------------------------------------------------
+-- | Exports simple compilers to just copy files
+{-# LANGUAGE CPP #-}
+{-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.File
+ ( CopyFile (..)
+ , copyFileCompiler
+ , TmpFile (..)
+ , newTmpFile
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Data.Binary (Binary (..))
+import Data.Typeable (Typeable)
+#if MIN_VERSION_directory(1,2,6)
+import System.Directory (copyFileWithMetadata)
+#else
+import System.Directory (copyFile)
+#endif
+import System.Directory (doesFileExist,
+ renameFile)
+import System.FilePath ((</>))
+import System.Random (randomIO)
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Compiler
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Configuration
+import Hakyll.Core.Item
+import Hakyll.Core.Provider
+import qualified Hakyll.Core.Store as Store
+import Hakyll.Core.Util.File
+import Hakyll.Core.Writable
+
+
+--------------------------------------------------------------------------------
+-- | This will copy any file directly by using a system call
+newtype CopyFile = CopyFile FilePath
+ deriving (Binary, Eq, Ord, Show, Typeable)
+
+
+--------------------------------------------------------------------------------
+instance Writable CopyFile where
+#if MIN_VERSION_directory(1,2,6)
+ write dst (Item _ (CopyFile src)) = copyFileWithMetadata src dst
+#else
+ write dst (Item _ (CopyFile src)) = copyFile src dst
+#endif
+--------------------------------------------------------------------------------
+copyFileCompiler :: Compiler (Item CopyFile)
+copyFileCompiler = do
+ identifier <- getUnderlying
+ provider <- compilerProvider <$> compilerAsk
+ makeItem $ CopyFile $ resourceFilePath provider identifier
+
+
+--------------------------------------------------------------------------------
+newtype TmpFile = TmpFile FilePath
+ deriving (Typeable)
+
+
+--------------------------------------------------------------------------------
+instance Binary TmpFile where
+ put _ = return ()
+ get = error $
+ "Hakyll.Core.File.TmpFile: You tried to load a TmpFile, however, " ++
+ "this is not possible since these are deleted as soon as possible."
+
+
+--------------------------------------------------------------------------------
+instance Writable TmpFile where
+ write dst (Item _ (TmpFile fp)) = renameFile fp dst
+
+
+--------------------------------------------------------------------------------
+-- | Create a tmp file
+newTmpFile :: String -- ^ Suffix and extension
+ -> Compiler TmpFile -- ^ Resulting tmp path
+newTmpFile suffix = do
+ path <- mkPath
+ compilerUnsafeIO $ makeDirectories path
+ debugCompiler $ "newTmpFile " ++ path
+ return $ TmpFile path
+ where
+ mkPath = do
+ rand <- compilerUnsafeIO $ randomIO :: Compiler Int
+ tmp <- tmpDirectory . compilerConfig <$> compilerAsk
+ let path = tmp </> Store.hash [show rand] ++ "-" ++ suffix
+ exists <- compilerUnsafeIO $ doesFileExist path
+ if exists then mkPath else return path
diff --git a/lib/Hakyll/Core/Identifier.hs b/lib/Hakyll/Core/Identifier.hs
new file mode 100644
index 0000000..777811c
--- /dev/null
+++ b/lib/Hakyll/Core/Identifier.hs
@@ -0,0 +1,80 @@
+--------------------------------------------------------------------------------
+-- | An identifier is a type used to uniquely identify an item. An identifier is
+-- conceptually similar to a file path. Examples of identifiers are:
+--
+-- * @posts/foo.markdown@
+--
+-- * @index@
+--
+-- * @error/404@
+{-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Identifier
+ ( Identifier
+ , fromFilePath
+ , toFilePath
+ , identifierVersion
+ , setVersion
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.DeepSeq (NFData (..))
+import Data.List (intercalate)
+import System.FilePath (dropTrailingPathSeparator, splitPath)
+
+
+--------------------------------------------------------------------------------
+import Data.Binary (Binary (..))
+import Data.Typeable (Typeable)
+import GHC.Exts (IsString, fromString)
+
+
+--------------------------------------------------------------------------------
+data Identifier = Identifier
+ { identifierVersion :: Maybe String
+ , identifierPath :: String
+ } deriving (Eq, Ord, Typeable)
+
+
+--------------------------------------------------------------------------------
+instance Binary Identifier where
+ put (Identifier v p) = put v >> put p
+ get = Identifier <$> get <*> get
+
+
+--------------------------------------------------------------------------------
+instance IsString Identifier where
+ fromString = fromFilePath
+
+
+--------------------------------------------------------------------------------
+instance NFData Identifier where
+ rnf (Identifier v p) = rnf v `seq` rnf p `seq` ()
+
+
+--------------------------------------------------------------------------------
+instance Show Identifier where
+ show i = case identifierVersion i of
+ Nothing -> toFilePath i
+ Just v -> toFilePath i ++ " (" ++ v ++ ")"
+
+
+--------------------------------------------------------------------------------
+-- | Parse an identifier from a string
+fromFilePath :: String -> Identifier
+fromFilePath = Identifier Nothing .
+ intercalate "/" . filter (not . null) . split'
+ where
+ split' = map dropTrailingPathSeparator . splitPath
+
+
+--------------------------------------------------------------------------------
+-- | Convert an identifier to a relative 'FilePath'
+toFilePath :: Identifier -> FilePath
+toFilePath = identifierPath
+
+
+--------------------------------------------------------------------------------
+setVersion :: Maybe String -> Identifier -> Identifier
+setVersion v i = i {identifierVersion = v}
diff --git a/lib/Hakyll/Core/Identifier/Pattern.hs b/lib/Hakyll/Core/Identifier/Pattern.hs
new file mode 100644
index 0000000..47ad21b
--- /dev/null
+++ b/lib/Hakyll/Core/Identifier/Pattern.hs
@@ -0,0 +1,322 @@
+--------------------------------------------------------------------------------
+-- | As 'Identifier' is used to specify a single item, a 'Pattern' is used to
+-- specify a list of items.
+--
+-- In most cases, globs are used for patterns.
+--
+-- A very simple pattern of such a pattern is @\"foo\/bar\"@. This pattern will
+-- only match the exact @foo\/bar@ identifier.
+--
+-- To match more than one identifier, there are different captures that one can
+-- use:
+--
+-- * @\"*\"@: matches at most one element of an identifier;
+--
+-- * @\"**\"@: matches one or more elements of an identifier.
+--
+-- Some examples:
+--
+-- * @\"foo\/*\"@ will match @\"foo\/bar\"@ and @\"foo\/foo\"@, but not
+-- @\"foo\/bar\/qux\"@;
+--
+-- * @\"**\"@ will match any identifier;
+--
+-- * @\"foo\/**\"@ will match @\"foo\/bar\"@ and @\"foo\/bar\/qux\"@, but not
+-- @\"bar\/foo\"@;
+--
+-- * @\"foo\/*.html\"@ will match all HTML files in the @\"foo\/\"@ directory.
+--
+-- The 'capture' function allows the user to get access to the elements captured
+-- by the capture elements in the pattern.
+module Hakyll.Core.Identifier.Pattern
+ ( -- * The pattern type
+ Pattern
+
+ -- * Creating patterns
+ , fromGlob
+ , fromList
+ , fromRegex
+ , fromVersion
+ , hasVersion
+ , hasNoVersion
+
+ -- * Composing patterns
+ , (.&&.)
+ , (.||.)
+ , complement
+
+ -- * Applying patterns
+ , matches
+ , filterMatches
+
+ -- * Capturing strings
+ , capture
+ , fromCapture
+ , fromCaptures
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Arrow ((&&&), (>>>))
+import Control.Monad (msum)
+import Data.Binary (Binary (..), getWord8, putWord8)
+import Data.List (inits, isPrefixOf, tails)
+import Data.Maybe (isJust)
+import Data.Set (Set)
+import qualified Data.Set as S
+
+
+--------------------------------------------------------------------------------
+import GHC.Exts (IsString, fromString)
+import Text.Regex.TDFA ((=~))
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Identifier
+
+
+--------------------------------------------------------------------------------
+-- | Elements of a glob pattern
+data GlobComponent
+ = Capture
+ | CaptureMany
+ | Literal String
+ deriving (Eq, Show)
+
+
+--------------------------------------------------------------------------------
+instance Binary GlobComponent where
+ put Capture = putWord8 0
+ put CaptureMany = putWord8 1
+ put (Literal s) = putWord8 2 >> put s
+
+ get = getWord8 >>= \t -> case t of
+ 0 -> pure Capture
+ 1 -> pure CaptureMany
+ 2 -> Literal <$> get
+ _ -> error "Data.Binary.get: Invalid GlobComponent"
+
+
+--------------------------------------------------------------------------------
+-- | Type that allows matching on identifiers
+data Pattern
+ = Everything
+ | Complement Pattern
+ | And Pattern Pattern
+ | Glob [GlobComponent]
+ | List (Set Identifier)
+ | Regex String
+ | Version (Maybe String)
+ deriving (Show)
+
+
+--------------------------------------------------------------------------------
+instance Binary Pattern where
+ put Everything = putWord8 0
+ put (Complement p) = putWord8 1 >> put p
+ put (And x y) = putWord8 2 >> put x >> put y
+ put (Glob g) = putWord8 3 >> put g
+ put (List is) = putWord8 4 >> put is
+ put (Regex r) = putWord8 5 >> put r
+ put (Version v) = putWord8 6 >> put v
+
+ get = getWord8 >>= \t -> case t of
+ 0 -> pure Everything
+ 1 -> Complement <$> get
+ 2 -> And <$> get <*> get
+ 3 -> Glob <$> get
+ 4 -> List <$> get
+ 5 -> Regex <$> get
+ _ -> Version <$> get
+
+
+--------------------------------------------------------------------------------
+instance IsString Pattern where
+ fromString = fromGlob
+
+
+--------------------------------------------------------------------------------
+instance Monoid Pattern where
+ mempty = Everything
+ mappend = (.&&.)
+
+
+--------------------------------------------------------------------------------
+-- | Parse a pattern from a string
+fromGlob :: String -> Pattern
+fromGlob = Glob . parse'
+ where
+ parse' str =
+ let (chunk, rest) = break (`elem` "\\*") str
+ in case rest of
+ ('\\' : x : xs) -> Literal (chunk ++ [x]) : parse' xs
+ ('*' : '*' : xs) -> Literal chunk : CaptureMany : parse' xs
+ ('*' : xs) -> Literal chunk : Capture : parse' xs
+ xs -> Literal chunk : Literal xs : []
+
+
+--------------------------------------------------------------------------------
+-- | Create a 'Pattern' from a list of 'Identifier's it should match.
+--
+-- /Warning/: use this carefully with 'hasNoVersion' and 'hasVersion'. The
+-- 'Identifier's in the list /already/ have versions assigned, and the pattern
+-- will then only match the intersection of both versions.
+--
+-- A more concrete example,
+--
+-- > fromList ["foo.markdown"] .&&. hasVersion "pdf"
+--
+-- will not match anything! The @"foo.markdown"@ 'Identifier' has no version
+-- assigned, so the LHS of '.&&.' will only match this 'Identifier' with no
+-- version. The RHS only matches 'Identifier's with version set to @"pdf"@ --
+-- hence, this pattern matches nothing.
+--
+-- The correct way to use this is:
+--
+-- > fromList $ map (setVersion $ Just "pdf") ["foo.markdown"]
+fromList :: [Identifier] -> Pattern
+fromList = List . S.fromList
+
+
+--------------------------------------------------------------------------------
+-- | Create a 'Pattern' from a regex
+--
+-- Example:
+--
+-- > regex "^foo/[^x]*$
+fromRegex :: String -> Pattern
+fromRegex = Regex
+
+
+--------------------------------------------------------------------------------
+-- | Create a pattern which matches all items with the given version.
+fromVersion :: Maybe String -> Pattern
+fromVersion = Version
+
+
+--------------------------------------------------------------------------------
+-- | Specify a version, e.g.
+--
+-- > "foo/*.markdown" .&&. hasVersion "pdf"
+hasVersion :: String -> Pattern
+hasVersion = fromVersion . Just
+
+
+--------------------------------------------------------------------------------
+-- | Match only if the identifier has no version set, e.g.
+--
+-- > "foo/*.markdown" .&&. hasNoVersion
+hasNoVersion :: Pattern
+hasNoVersion = fromVersion Nothing
+
+
+--------------------------------------------------------------------------------
+-- | '&&' for patterns: the given identifier must match both subterms
+(.&&.) :: Pattern -> Pattern -> Pattern
+x .&&. y = And x y
+infixr 3 .&&.
+
+
+--------------------------------------------------------------------------------
+-- | '||' for patterns: the given identifier must match any subterm
+(.||.) :: Pattern -> Pattern -> Pattern
+x .||. y = complement (complement x `And` complement y) -- De Morgan's law
+infixr 2 .||.
+
+
+--------------------------------------------------------------------------------
+-- | Inverts a pattern, e.g.
+--
+-- > complement "foo/bar.html"
+--
+-- will match /anything/ except @\"foo\/bar.html\"@
+complement :: Pattern -> Pattern
+complement = Complement
+
+
+--------------------------------------------------------------------------------
+-- | Check if an identifier matches a pattern
+matches :: Pattern -> Identifier -> Bool
+matches Everything _ = True
+matches (Complement p) i = not $ matches p i
+matches (And x y) i = matches x i && matches y i
+matches (Glob p) i = isJust $ capture (Glob p) i
+matches (List l) i = i `S.member` l
+matches (Regex r) i = toFilePath i =~ r
+matches (Version v) i = identifierVersion i == v
+
+
+--------------------------------------------------------------------------------
+-- | Given a list of identifiers, retain only those who match the given pattern
+filterMatches :: Pattern -> [Identifier] -> [Identifier]
+filterMatches = filter . matches
+
+
+--------------------------------------------------------------------------------
+-- | Split a list at every possible point, generate a list of (init, tail)
+-- cases. The result is sorted with inits decreasing in length.
+splits :: [a] -> [([a], [a])]
+splits = inits &&& tails >>> uncurry zip >>> reverse
+
+
+--------------------------------------------------------------------------------
+-- | Match a glob against a pattern, generating a list of captures
+capture :: Pattern -> Identifier -> Maybe [String]
+capture (Glob p) i = capture' p (toFilePath i)
+capture _ _ = Nothing
+
+
+--------------------------------------------------------------------------------
+-- | Internal verion of 'capture'
+capture' :: [GlobComponent] -> String -> Maybe [String]
+capture' [] [] = Just [] -- An empty match
+capture' [] _ = Nothing -- No match
+capture' (Literal l : ms) str
+ -- Match the literal against the string
+ | l `isPrefixOf` str = capture' ms $ drop (length l) str
+ | otherwise = Nothing
+capture' (Capture : ms) str =
+ -- Match until the next /
+ let (chunk, rest) = break (== '/') str
+ in msum $ [ fmap (i :) (capture' ms (t ++ rest)) | (i, t) <- splits chunk ]
+capture' (CaptureMany : ms) str =
+ -- Match everything
+ msum $ [ fmap (i :) (capture' ms t) | (i, t) <- splits str ]
+
+
+--------------------------------------------------------------------------------
+-- | Create an identifier from a pattern by filling in the captures with a given
+-- string
+--
+-- Example:
+--
+-- > fromCapture (fromGlob "tags/*") "foo"
+--
+-- Result:
+--
+-- > "tags/foo"
+fromCapture :: Pattern -> String -> Identifier
+fromCapture pattern = fromCaptures pattern . repeat
+
+
+--------------------------------------------------------------------------------
+-- | Create an identifier from a pattern by filling in the captures with the
+-- given list of strings
+fromCaptures :: Pattern -> [String] -> Identifier
+fromCaptures (Glob p) = fromFilePath . fromCaptures' p
+fromCaptures _ = error $
+ "Hakyll.Core.Identifier.Pattern.fromCaptures: fromCaptures only works " ++
+ "on simple globs!"
+
+
+--------------------------------------------------------------------------------
+-- | Internally used version of 'fromCaptures'
+fromCaptures' :: [GlobComponent] -> [String] -> String
+fromCaptures' [] _ = mempty
+fromCaptures' (m : ms) [] = case m of
+ Literal l -> l `mappend` fromCaptures' ms []
+ _ -> error $ "Hakyll.Core.Identifier.Pattern.fromCaptures': "
+ ++ "identifier list exhausted"
+fromCaptures' (m : ms) ids@(i : is) = case m of
+ Literal l -> l `mappend` fromCaptures' ms ids
+ _ -> i `mappend` fromCaptures' ms is
diff --git a/lib/Hakyll/Core/Item.hs b/lib/Hakyll/Core/Item.hs
new file mode 100644
index 0000000..e05df42
--- /dev/null
+++ b/lib/Hakyll/Core/Item.hs
@@ -0,0 +1,63 @@
+--------------------------------------------------------------------------------
+-- | An item is a combination of some content and its 'Identifier'. This way, we
+-- can still use the 'Identifier' to access metadata.
+{-# LANGUAGE DeriveDataTypeable #-}
+module Hakyll.Core.Item
+ ( Item (..)
+ , itemSetBody
+ , withItemBody
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Data.Binary (Binary (..))
+import Data.Foldable (Foldable (..))
+import Data.Typeable (Typeable)
+import Prelude hiding (foldr)
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Identifier
+
+
+--------------------------------------------------------------------------------
+data Item a = Item
+ { itemIdentifier :: Identifier
+ , itemBody :: a
+ } deriving (Show, Typeable)
+
+
+--------------------------------------------------------------------------------
+instance Functor Item where
+ fmap f (Item i x) = Item i (f x)
+
+
+--------------------------------------------------------------------------------
+instance Foldable Item where
+ foldr f z (Item _ x) = f x z
+
+
+--------------------------------------------------------------------------------
+instance Traversable Item where
+ traverse f (Item i x) = Item i <$> f x
+
+
+--------------------------------------------------------------------------------
+instance Binary a => Binary (Item a) where
+ put (Item i x) = put i >> put x
+ get = Item <$> get <*> get
+
+
+--------------------------------------------------------------------------------
+itemSetBody :: a -> Item b -> Item a
+itemSetBody x (Item i _) = Item i x
+
+
+--------------------------------------------------------------------------------
+-- | Perform a compiler action on the item body. This is the same as 'traverse',
+-- but looks less intimidating.
+--
+-- > withItemBody = traverse
+withItemBody :: (a -> Compiler b) -> Item a -> Compiler (Item b)
+withItemBody = traverse
diff --git a/lib/Hakyll/Core/Item/SomeItem.hs b/lib/Hakyll/Core/Item/SomeItem.hs
new file mode 100644
index 0000000..c5ba0df
--- /dev/null
+++ b/lib/Hakyll/Core/Item/SomeItem.hs
@@ -0,0 +1,23 @@
+--------------------------------------------------------------------------------
+{-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE ExistentialQuantification #-}
+module Hakyll.Core.Item.SomeItem
+ ( SomeItem (..)
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Data.Binary (Binary)
+import Data.Typeable (Typeable)
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Item
+import Hakyll.Core.Writable
+
+
+--------------------------------------------------------------------------------
+-- | An existential type, mostly for internal usage.
+data SomeItem = forall a.
+ (Binary a, Typeable a, Writable a) => SomeItem (Item a)
+ deriving (Typeable)
diff --git a/lib/Hakyll/Core/Logger.hs b/lib/Hakyll/Core/Logger.hs
new file mode 100644
index 0000000..6f950a6
--- /dev/null
+++ b/lib/Hakyll/Core/Logger.hs
@@ -0,0 +1,97 @@
+--------------------------------------------------------------------------------
+-- | Produce pretty, thread-safe logs
+module Hakyll.Core.Logger
+ ( Verbosity (..)
+ , Logger
+ , new
+ , flush
+ , error
+ , header
+ , message
+ , debug
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Concurrent (forkIO)
+import Control.Concurrent.Chan (Chan, newChan, readChan, writeChan)
+import Control.Concurrent.MVar (MVar, newEmptyMVar, putMVar, takeMVar)
+import Control.Monad (forever)
+import Control.Monad.Trans (MonadIO, liftIO)
+import Prelude hiding (error)
+
+
+--------------------------------------------------------------------------------
+data Verbosity
+ = Error
+ | Message
+ | Debug
+ deriving (Eq, Ord, Show)
+
+
+--------------------------------------------------------------------------------
+-- | Logger structure. Very complicated.
+data Logger = Logger
+ { loggerChan :: Chan (Maybe String) -- ^ Nothing marks the end
+ , loggerSync :: MVar () -- ^ Used for sync on quit
+ , loggerSink :: String -> IO () -- ^ Out sink
+ , loggerVerbosity :: Verbosity -- ^ Verbosity
+ }
+
+
+--------------------------------------------------------------------------------
+-- | Create a new logger
+new :: Verbosity -> IO Logger
+new vbty = do
+ logger <- Logger <$>
+ newChan <*> newEmptyMVar <*> pure putStrLn <*> pure vbty
+ _ <- forkIO $ loggerThread logger
+ return logger
+ where
+ loggerThread logger = forever $ do
+ msg <- readChan $ loggerChan logger
+ case msg of
+ -- Stop: sync
+ Nothing -> putMVar (loggerSync logger) ()
+ -- Print and continue
+ Just m -> loggerSink logger m
+
+
+--------------------------------------------------------------------------------
+-- | Flush the logger (blocks until flushed)
+flush :: Logger -> IO ()
+flush logger = do
+ writeChan (loggerChan logger) Nothing
+ () <- takeMVar $ loggerSync logger
+ return ()
+
+
+--------------------------------------------------------------------------------
+string :: MonadIO m
+ => Logger -- ^ Logger
+ -> Verbosity -- ^ Verbosity of the string
+ -> String -- ^ Section name
+ -> m () -- ^ No result
+string l v m
+ | loggerVerbosity l >= v = liftIO $ writeChan (loggerChan l) (Just m)
+ | otherwise = return ()
+
+
+--------------------------------------------------------------------------------
+error :: MonadIO m => Logger -> String -> m ()
+error l m = string l Error $ " [ERROR] " ++ m
+
+
+--------------------------------------------------------------------------------
+header :: MonadIO m => Logger -> String -> m ()
+header l = string l Message
+
+
+--------------------------------------------------------------------------------
+message :: MonadIO m => Logger -> String -> m ()
+message l m = string l Message $ " " ++ m
+
+
+--------------------------------------------------------------------------------
+debug :: MonadIO m => Logger -> String -> m ()
+debug l m = string l Debug $ " [DEBUG] " ++ m
diff --git a/lib/Hakyll/Core/Metadata.hs b/lib/Hakyll/Core/Metadata.hs
new file mode 100644
index 0000000..1cf536e
--- /dev/null
+++ b/lib/Hakyll/Core/Metadata.hs
@@ -0,0 +1,138 @@
+--------------------------------------------------------------------------------
+module Hakyll.Core.Metadata
+ ( Metadata
+ , lookupString
+ , lookupStringList
+
+ , MonadMetadata (..)
+ , getMetadataField
+ , getMetadataField'
+ , makePatternDependency
+
+ , BinaryMetadata (..)
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Arrow (second)
+import Control.Monad (forM)
+import Data.Binary (Binary (..), getWord8,
+ putWord8, Get)
+import qualified Data.HashMap.Strict as HMS
+import qualified Data.Set as S
+import qualified Data.Text as T
+import qualified Data.Vector as V
+import qualified Data.Yaml.Extended as Yaml
+import Hakyll.Core.Dependencies
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+
+
+--------------------------------------------------------------------------------
+type Metadata = Yaml.Object
+
+
+--------------------------------------------------------------------------------
+lookupString :: String -> Metadata -> Maybe String
+lookupString key meta = HMS.lookup (T.pack key) meta >>= Yaml.toString
+
+
+--------------------------------------------------------------------------------
+lookupStringList :: String -> Metadata -> Maybe [String]
+lookupStringList key meta =
+ HMS.lookup (T.pack key) meta >>= Yaml.toList >>= mapM Yaml.toString
+
+
+--------------------------------------------------------------------------------
+class Monad m => MonadMetadata m where
+ getMetadata :: Identifier -> m Metadata
+ getMatches :: Pattern -> m [Identifier]
+
+ getAllMetadata :: Pattern -> m [(Identifier, Metadata)]
+ getAllMetadata pattern = do
+ matches' <- getMatches pattern
+ forM matches' $ \id' -> do
+ metadata <- getMetadata id'
+ return (id', metadata)
+
+
+--------------------------------------------------------------------------------
+getMetadataField :: MonadMetadata m => Identifier -> String -> m (Maybe String)
+getMetadataField identifier key = do
+ metadata <- getMetadata identifier
+ return $ lookupString key metadata
+
+
+--------------------------------------------------------------------------------
+-- | Version of 'getMetadataField' which throws an error if the field does not
+-- exist.
+getMetadataField' :: MonadMetadata m => Identifier -> String -> m String
+getMetadataField' identifier key = do
+ field <- getMetadataField identifier key
+ case field of
+ Just v -> return v
+ Nothing -> fail $ "Hakyll.Core.Metadata.getMetadataField': " ++
+ "Item " ++ show identifier ++ " has no metadata field " ++ show key
+
+
+--------------------------------------------------------------------------------
+makePatternDependency :: MonadMetadata m => Pattern -> m Dependency
+makePatternDependency pattern = do
+ matches' <- getMatches pattern
+ return $ PatternDependency pattern (S.fromList matches')
+
+
+--------------------------------------------------------------------------------
+-- | Newtype wrapper for serialization.
+newtype BinaryMetadata = BinaryMetadata
+ {unBinaryMetadata :: Metadata}
+
+
+instance Binary BinaryMetadata where
+ put (BinaryMetadata obj) = put (BinaryYaml $ Yaml.Object obj)
+ get = do
+ BinaryYaml (Yaml.Object obj) <- get
+ return $ BinaryMetadata obj
+
+
+--------------------------------------------------------------------------------
+newtype BinaryYaml = BinaryYaml {unBinaryYaml :: Yaml.Value}
+
+
+--------------------------------------------------------------------------------
+instance Binary BinaryYaml where
+ put (BinaryYaml yaml) = case yaml of
+ Yaml.Object obj -> do
+ putWord8 0
+ let list :: [(T.Text, BinaryYaml)]
+ list = map (second BinaryYaml) $ HMS.toList obj
+ put list
+
+ Yaml.Array arr -> do
+ putWord8 1
+ let list = map BinaryYaml (V.toList arr) :: [BinaryYaml]
+ put list
+
+ Yaml.String s -> putWord8 2 >> put s
+ Yaml.Number n -> putWord8 3 >> put n
+ Yaml.Bool b -> putWord8 4 >> put b
+ Yaml.Null -> putWord8 5
+
+ get = do
+ tag <- getWord8
+ case tag of
+ 0 -> do
+ list <- get :: Get [(T.Text, BinaryYaml)]
+ return $ BinaryYaml $ Yaml.Object $
+ HMS.fromList $ map (second unBinaryYaml) list
+
+ 1 -> do
+ list <- get :: Get [BinaryYaml]
+ return $ BinaryYaml $
+ Yaml.Array $ V.fromList $ map unBinaryYaml list
+
+ 2 -> BinaryYaml . Yaml.String <$> get
+ 3 -> BinaryYaml . Yaml.Number <$> get
+ 4 -> BinaryYaml . Yaml.Bool <$> get
+ 5 -> return $ BinaryYaml Yaml.Null
+ _ -> fail "Data.Binary.get: Invalid Binary Metadata"
diff --git a/lib/Hakyll/Core/Provider.hs b/lib/Hakyll/Core/Provider.hs
new file mode 100644
index 0000000..384f5b1
--- /dev/null
+++ b/lib/Hakyll/Core/Provider.hs
@@ -0,0 +1,43 @@
+--------------------------------------------------------------------------------
+-- | This module provides an wrapper API around the file system which does some
+-- caching.
+module Hakyll.Core.Provider
+ ( -- * Constructing resource providers
+ Internal.Provider
+ , newProvider
+
+ -- * Querying resource properties
+ , Internal.resourceList
+ , Internal.resourceExists
+ , Internal.resourceFilePath
+ , Internal.resourceModified
+ , Internal.resourceModificationTime
+
+ -- * Access to raw resource content
+ , Internal.resourceString
+ , Internal.resourceLBS
+
+ -- * Access to metadata and body content
+ , Internal.resourceMetadata
+ , Internal.resourceBody
+ ) where
+
+
+--------------------------------------------------------------------------------
+import qualified Hakyll.Core.Provider.Internal as Internal
+import qualified Hakyll.Core.Provider.MetadataCache as Internal
+import Hakyll.Core.Store (Store)
+
+
+--------------------------------------------------------------------------------
+-- | Create a resource provider
+newProvider :: Store -- ^ Store to use
+ -> (FilePath -> IO Bool) -- ^ Should we ignore this file?
+ -> FilePath -- ^ Search directory
+ -> IO Internal.Provider -- ^ Resulting provider
+newProvider store ignore directory = do
+ -- Delete metadata cache where necessary
+ p <- Internal.newProvider store ignore directory
+ mapM_ (Internal.resourceInvalidateMetadataCache p) $
+ filter (Internal.resourceModified p) $ Internal.resourceList p
+ return p
diff --git a/lib/Hakyll/Core/Provider/Internal.hs b/lib/Hakyll/Core/Provider/Internal.hs
new file mode 100644
index 0000000..c298653
--- /dev/null
+++ b/lib/Hakyll/Core/Provider/Internal.hs
@@ -0,0 +1,202 @@
+--------------------------------------------------------------------------------
+{-# LANGUAGE CPP #-}
+{-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Provider.Internal
+ ( ResourceInfo (..)
+ , Provider (..)
+ , newProvider
+
+ , resourceList
+ , resourceExists
+
+ , resourceFilePath
+ , resourceString
+ , resourceLBS
+
+ , resourceModified
+ , resourceModificationTime
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.DeepSeq (NFData (..), deepseq)
+import Control.Monad (forM)
+import Data.Binary (Binary (..))
+import qualified Data.ByteString.Lazy as BL
+import Data.Map (Map)
+import qualified Data.Map as M
+import Data.Maybe (fromMaybe)
+import Data.Set (Set)
+import qualified Data.Set as S
+import Data.Time (Day (..), UTCTime (..))
+import Data.Typeable (Typeable)
+import System.Directory (getModificationTime)
+import System.FilePath (addExtension, (</>))
+
+
+--------------------------------------------------------------------------------
+#if !MIN_VERSION_directory(1,2,0)
+import Data.Time (readTime)
+import System.Locale (defaultTimeLocale)
+import System.Time (formatCalendarTime, toCalendarTime)
+#endif
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Identifier
+import Hakyll.Core.Store (Store)
+import qualified Hakyll.Core.Store as Store
+import Hakyll.Core.Util.File
+
+
+--------------------------------------------------------------------------------
+-- | Because UTCTime doesn't have a Binary instance...
+newtype BinaryTime = BinaryTime {unBinaryTime :: UTCTime}
+ deriving (Eq, NFData, Ord, Show, Typeable)
+
+
+--------------------------------------------------------------------------------
+instance Binary BinaryTime where
+ put (BinaryTime (UTCTime (ModifiedJulianDay d) dt)) =
+ put d >> put (toRational dt)
+
+ get = fmap BinaryTime $ UTCTime
+ <$> (ModifiedJulianDay <$> get)
+ <*> (fromRational <$> get)
+
+
+--------------------------------------------------------------------------------
+data ResourceInfo = ResourceInfo
+ { resourceInfoModified :: BinaryTime
+ , resourceInfoMetadata :: Maybe Identifier
+ } deriving (Show, Typeable)
+
+
+--------------------------------------------------------------------------------
+instance Binary ResourceInfo where
+ put (ResourceInfo mtime meta) = put mtime >> put meta
+ get = ResourceInfo <$> get <*> get
+
+
+--------------------------------------------------------------------------------
+instance NFData ResourceInfo where
+ rnf (ResourceInfo mtime meta) = rnf mtime `seq` rnf meta `seq` ()
+
+
+--------------------------------------------------------------------------------
+-- | Responsible for retrieving and listing resources
+data Provider = Provider
+ { -- Top of the provided directory
+ providerDirectory :: FilePath
+ , -- | A list of all files found
+ providerFiles :: Map Identifier ResourceInfo
+ , -- | A list of the files from the previous run
+ providerOldFiles :: Map Identifier ResourceInfo
+ , -- | Underlying persistent store for caching
+ providerStore :: Store
+ } deriving (Show)
+
+
+--------------------------------------------------------------------------------
+-- | Create a resource provider
+newProvider :: Store -- ^ Store to use
+ -> (FilePath -> IO Bool) -- ^ Should we ignore this file?
+ -> FilePath -- ^ Search directory
+ -> IO Provider -- ^ Resulting provider
+newProvider store ignore directory = do
+ list <- map fromFilePath <$> getRecursiveContents ignore directory
+ let universe = S.fromList list
+ files <- fmap (maxmtime . M.fromList) $ forM list $ \identifier -> do
+ rInfo <- getResourceInfo directory universe identifier
+ return (identifier, rInfo)
+
+ -- Get the old files from the store, and then immediately replace them by
+ -- the new files.
+ oldFiles <- fromMaybe mempty . Store.toMaybe <$> Store.get store oldKey
+ oldFiles `deepseq` Store.set store oldKey files
+
+ return $ Provider directory files oldFiles store
+ where
+ oldKey = ["Hakyll.Core.Provider.Internal.newProvider", "oldFiles"]
+
+ -- Update modified if metadata is modified
+ maxmtime files = flip M.map files $ \rInfo@(ResourceInfo mtime meta) ->
+ let metaMod = fmap resourceInfoModified $ meta >>= flip M.lookup files
+ in rInfo {resourceInfoModified = maybe mtime (max mtime) metaMod}
+
+
+--------------------------------------------------------------------------------
+getResourceInfo :: FilePath -> Set Identifier -> Identifier -> IO ResourceInfo
+getResourceInfo directory universe identifier = do
+ mtime <- fileModificationTime $ directory </> toFilePath identifier
+ return $ ResourceInfo (BinaryTime mtime) $
+ if mdRsc `S.member` universe then Just mdRsc else Nothing
+ where
+ mdRsc = fromFilePath $ flip addExtension "metadata" $ toFilePath identifier
+
+
+--------------------------------------------------------------------------------
+resourceList :: Provider -> [Identifier]
+resourceList = M.keys . providerFiles
+
+
+--------------------------------------------------------------------------------
+-- | Check if a given resource exists
+resourceExists :: Provider -> Identifier -> Bool
+resourceExists provider =
+ (`M.member` providerFiles provider) . setVersion Nothing
+
+
+--------------------------------------------------------------------------------
+resourceFilePath :: Provider -> Identifier -> FilePath
+resourceFilePath p i = providerDirectory p </> toFilePath i
+
+
+--------------------------------------------------------------------------------
+-- | Get the raw body of a resource as string
+resourceString :: Provider -> Identifier -> IO String
+resourceString p i = readFile $ resourceFilePath p i
+
+
+--------------------------------------------------------------------------------
+-- | Get the raw body of a resource of a lazy bytestring
+resourceLBS :: Provider -> Identifier -> IO BL.ByteString
+resourceLBS p i = BL.readFile $ resourceFilePath p i
+
+
+--------------------------------------------------------------------------------
+-- | A resource is modified if it or its metadata has changed
+resourceModified :: Provider -> Identifier -> Bool
+resourceModified p r = case (ri, oldRi) of
+ (Nothing, _) -> False
+ (Just _, Nothing) -> True
+ (Just n, Just o) ->
+ resourceInfoModified n > resourceInfoModified o ||
+ resourceInfoMetadata n /= resourceInfoMetadata o
+ where
+ normal = setVersion Nothing r
+ ri = M.lookup normal (providerFiles p)
+ oldRi = M.lookup normal (providerOldFiles p)
+
+
+--------------------------------------------------------------------------------
+resourceModificationTime :: Provider -> Identifier -> UTCTime
+resourceModificationTime p i =
+ case M.lookup (setVersion Nothing i) (providerFiles p) of
+ Just ri -> unBinaryTime $ resourceInfoModified ri
+ Nothing -> error $
+ "Hakyll.Core.Provider.Internal.resourceModificationTime: " ++
+ "resource " ++ show i ++ " does not exist"
+
+
+--------------------------------------------------------------------------------
+fileModificationTime :: FilePath -> IO UTCTime
+fileModificationTime fp = do
+#if MIN_VERSION_directory(1,2,0)
+ getModificationTime fp
+#else
+ ct <- toCalendarTime =<< getModificationTime fp
+ let str = formatCalendarTime defaultTimeLocale "%s" ct
+ return $ readTime defaultTimeLocale "%s" str
+#endif
diff --git a/lib/Hakyll/Core/Provider/Metadata.hs b/lib/Hakyll/Core/Provider/Metadata.hs
new file mode 100644
index 0000000..6285ce1
--- /dev/null
+++ b/lib/Hakyll/Core/Provider/Metadata.hs
@@ -0,0 +1,151 @@
+--------------------------------------------------------------------------------
+-- | Internal module to parse metadata
+{-# LANGUAGE BangPatterns #-}
+{-# LANGUAGE RecordWildCards #-}
+module Hakyll.Core.Provider.Metadata
+ ( loadMetadata
+ , parsePage
+
+ , MetadataException (..)
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Arrow (second)
+import Control.Exception (Exception, throwIO)
+import Control.Monad (guard)
+import qualified Data.ByteString as B
+import qualified Data.ByteString.Char8 as BC
+import Data.List.Extended (breakWhen)
+import qualified Data.Map as M
+import Data.Maybe (fromMaybe)
+import Data.Monoid ((<>))
+import qualified Data.Text as T
+import qualified Data.Text.Encoding as T
+import qualified Data.Yaml as Yaml
+import Hakyll.Core.Identifier
+import Hakyll.Core.Metadata
+import Hakyll.Core.Provider.Internal
+import System.IO as IO
+
+
+--------------------------------------------------------------------------------
+loadMetadata :: Provider -> Identifier -> IO (Metadata, Maybe String)
+loadMetadata p identifier = do
+ hasHeader <- probablyHasMetadataHeader fp
+ (md, body) <- if hasHeader
+ then second Just <$> loadMetadataHeader fp
+ else return (mempty, Nothing)
+
+ emd <- case mi of
+ Nothing -> return mempty
+ Just mi' -> loadMetadataFile $ resourceFilePath p mi'
+
+ return (md <> emd, body)
+ where
+ normal = setVersion Nothing identifier
+ fp = resourceFilePath p identifier
+ mi = M.lookup normal (providerFiles p) >>= resourceInfoMetadata
+
+
+--------------------------------------------------------------------------------
+loadMetadataHeader :: FilePath -> IO (Metadata, String)
+loadMetadataHeader fp = do
+ fileContent <- readFile fp
+ case parsePage fileContent of
+ Right x -> return x
+ Left err -> throwIO $ MetadataException fp err
+
+
+--------------------------------------------------------------------------------
+loadMetadataFile :: FilePath -> IO Metadata
+loadMetadataFile fp = do
+ fileContent <- B.readFile fp
+ let errOrMeta = Yaml.decodeEither' fileContent
+ either (fail . show) return errOrMeta
+
+
+--------------------------------------------------------------------------------
+-- | Check if a file "probably" has a metadata header. The main goal of this is
+-- to exclude binary files (which are unlikely to start with "---").
+probablyHasMetadataHeader :: FilePath -> IO Bool
+probablyHasMetadataHeader fp = do
+ handle <- IO.openFile fp IO.ReadMode
+ bs <- BC.hGet handle 1024
+ IO.hClose handle
+ return $ isMetadataHeader bs
+ where
+ isMetadataHeader bs =
+ let pre = BC.takeWhile (\x -> x /= '\n' && x /= '\r') bs
+ in BC.length pre >= 3 && BC.all (== '-') pre
+
+
+--------------------------------------------------------------------------------
+-- | Parse the page metadata and body.
+splitMetadata :: String -> (Maybe String, String)
+splitMetadata str0 = fromMaybe (Nothing, str0) $ do
+ guard $ leading >= 3
+ let !str1 = drop leading str0
+ guard $ all isNewline (take 1 str1)
+ let !(!meta, !content0) = breakWhen isTrailing str1
+ guard $ not $ null content0
+ let !content1 = drop (leading + 1) content0
+ !content2 = dropWhile isNewline $ dropWhile isInlineSpace content1
+ -- Adding this newline fixes the line numbers reported by the YAML parser.
+ -- It's a bit ugly but it works.
+ return (Just ('\n' : meta), content2)
+ where
+ -- Parse the leading "---"
+ !leading = length $ takeWhile (== '-') str0
+
+ -- Predicate to recognize the trailing "---" or "..."
+ isTrailing [] = False
+ isTrailing (x : xs) =
+ isNewline x && length (takeWhile isDash xs) == leading
+
+ -- Characters
+ isNewline c = c == '\n' || c == '\r'
+ isDash c = c == '-' || c == '.'
+ isInlineSpace c = c == '\t' || c == ' '
+
+
+--------------------------------------------------------------------------------
+parseMetadata :: String -> Either Yaml.ParseException Metadata
+parseMetadata = Yaml.decodeEither' . T.encodeUtf8 . T.pack
+
+
+--------------------------------------------------------------------------------
+parsePage :: String -> Either Yaml.ParseException (Metadata, String)
+parsePage fileContent = case mbMetaBlock of
+ Nothing -> return (mempty, content)
+ Just metaBlock -> case parseMetadata metaBlock of
+ Left err -> Left err
+ Right meta -> return (meta, content)
+ where
+ !(!mbMetaBlock, !content) = splitMetadata fileContent
+
+
+--------------------------------------------------------------------------------
+-- | Thrown in the IO monad if things go wrong. Provides a nice-ish error
+-- message.
+data MetadataException = MetadataException FilePath Yaml.ParseException
+
+
+--------------------------------------------------------------------------------
+instance Exception MetadataException
+
+
+--------------------------------------------------------------------------------
+instance Show MetadataException where
+ show (MetadataException fp err) =
+ fp ++ ": " ++ Yaml.prettyPrintParseException err ++ hint
+
+ where
+ hint = case err of
+ Yaml.InvalidYaml (Just (Yaml.YamlParseException {..}))
+ | yamlProblem == problem -> "\n" ++
+ "Hint: if the metadata value contains characters such\n" ++
+ "as ':' or '-', try enclosing it in quotes."
+ _ -> ""
+
+ problem = "mapping values are not allowed in this context"
diff --git a/lib/Hakyll/Core/Provider/MetadataCache.hs b/lib/Hakyll/Core/Provider/MetadataCache.hs
new file mode 100644
index 0000000..46dbf3e
--- /dev/null
+++ b/lib/Hakyll/Core/Provider/MetadataCache.hs
@@ -0,0 +1,62 @@
+--------------------------------------------------------------------------------
+module Hakyll.Core.Provider.MetadataCache
+ ( resourceMetadata
+ , resourceBody
+ , resourceInvalidateMetadataCache
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Monad (unless)
+import Hakyll.Core.Identifier
+import Hakyll.Core.Metadata
+import Hakyll.Core.Provider.Internal
+import Hakyll.Core.Provider.Metadata
+import qualified Hakyll.Core.Store as Store
+
+
+--------------------------------------------------------------------------------
+resourceMetadata :: Provider -> Identifier -> IO Metadata
+resourceMetadata p r
+ | not (resourceExists p r) = return mempty
+ | otherwise = do
+ -- TODO keep time in md cache
+ load p r
+ Store.Found (BinaryMetadata md) <- Store.get (providerStore p)
+ [name, toFilePath r, "metadata"]
+ return md
+
+
+--------------------------------------------------------------------------------
+resourceBody :: Provider -> Identifier -> IO String
+resourceBody p r = do
+ load p r
+ Store.Found bd <- Store.get (providerStore p)
+ [name, toFilePath r, "body"]
+ maybe (resourceString p r) return bd
+
+
+--------------------------------------------------------------------------------
+resourceInvalidateMetadataCache :: Provider -> Identifier -> IO ()
+resourceInvalidateMetadataCache p r = do
+ Store.delete (providerStore p) [name, toFilePath r, "metadata"]
+ Store.delete (providerStore p) [name, toFilePath r, "body"]
+
+
+--------------------------------------------------------------------------------
+load :: Provider -> Identifier -> IO ()
+load p r = do
+ mmof <- Store.isMember store mdk
+ unless mmof $ do
+ (md, body) <- loadMetadata p r
+ Store.set store mdk (BinaryMetadata md)
+ Store.set store bk body
+ where
+ store = providerStore p
+ mdk = [name, toFilePath r, "metadata"]
+ bk = [name, toFilePath r, "body"]
+
+
+--------------------------------------------------------------------------------
+name :: String
+name = "Hakyll.Core.Resource.Provider.MetadataCache"
diff --git a/lib/Hakyll/Core/Routes.hs b/lib/Hakyll/Core/Routes.hs
new file mode 100644
index 0000000..513725f
--- /dev/null
+++ b/lib/Hakyll/Core/Routes.hs
@@ -0,0 +1,194 @@
+--------------------------------------------------------------------------------
+-- | Once a target is compiled, the user usually wants to save it to the disk.
+-- This is where the 'Routes' type comes in; it determines where a certain
+-- target should be written.
+--
+-- Suppose we have an item @foo\/bar.markdown@. We can render this to
+-- @foo\/bar.html@ using:
+--
+-- > route "foo/bar.markdown" (setExtension ".html")
+--
+-- If we do not want to change the extension, we can use 'idRoute', the simplest
+-- route available:
+--
+-- > route "foo/bar.markdown" idRoute
+--
+-- That will route @foo\/bar.markdown@ to @foo\/bar.markdown@.
+--
+-- Note that the extension says nothing about the content! If you set the
+-- extension to @.html@, it is your own responsibility to ensure that the
+-- content is indeed HTML.
+--
+-- Finally, some special cases:
+--
+-- * If there is no route for an item, this item will not be routed, so it will
+-- not appear in your site directory.
+--
+-- * If an item matches multiple routes, the first rule will be chosen.
+{-# LANGUAGE Rank2Types #-}
+module Hakyll.Core.Routes
+ ( UsedMetadata
+ , Routes
+ , runRoutes
+ , idRoute
+ , setExtension
+ , matchRoute
+ , customRoute
+ , constRoute
+ , gsubRoute
+ , metadataRoute
+ , composeRoutes
+ ) where
+
+
+--------------------------------------------------------------------------------
+import System.FilePath (replaceExtension)
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Metadata
+import Hakyll.Core.Provider
+import Hakyll.Core.Util.String
+
+
+--------------------------------------------------------------------------------
+-- | When you ran a route, it's useful to know whether or not this used
+-- metadata. This allows us to do more granular dependency analysis.
+type UsedMetadata = Bool
+
+
+--------------------------------------------------------------------------------
+data RoutesRead = RoutesRead
+ { routesProvider :: Provider
+ , routesUnderlying :: Identifier
+ }
+
+
+--------------------------------------------------------------------------------
+-- | Type used for a route
+newtype Routes = Routes
+ { unRoutes :: RoutesRead -> Identifier -> IO (Maybe FilePath, UsedMetadata)
+ }
+
+
+--------------------------------------------------------------------------------
+instance Monoid Routes where
+ mempty = Routes $ \_ _ -> return (Nothing, False)
+ mappend (Routes f) (Routes g) = Routes $ \p id' -> do
+ (mfp, um) <- f p id'
+ case mfp of
+ Nothing -> g p id'
+ Just _ -> return (mfp, um)
+
+
+--------------------------------------------------------------------------------
+-- | Apply a route to an identifier
+runRoutes :: Routes -> Provider -> Identifier
+ -> IO (Maybe FilePath, UsedMetadata)
+runRoutes routes provider identifier =
+ unRoutes routes (RoutesRead provider identifier) identifier
+
+
+--------------------------------------------------------------------------------
+-- | A route that uses the identifier as filepath. For example, the target with
+-- ID @foo\/bar@ will be written to the file @foo\/bar@.
+idRoute :: Routes
+idRoute = customRoute toFilePath
+
+
+--------------------------------------------------------------------------------
+-- | Set (or replace) the extension of a route.
+--
+-- Example:
+--
+-- > runRoutes (setExtension "html") "foo/bar"
+--
+-- Result:
+--
+-- > Just "foo/bar.html"
+--
+-- Example:
+--
+-- > runRoutes (setExtension "html") "posts/the-art-of-trolling.markdown"
+--
+-- Result:
+--
+-- > Just "posts/the-art-of-trolling.html"
+setExtension :: String -> Routes
+setExtension extension = customRoute $
+ (`replaceExtension` extension) . toFilePath
+
+
+--------------------------------------------------------------------------------
+-- | Apply the route if the identifier matches the given pattern, fail
+-- otherwise
+matchRoute :: Pattern -> Routes -> Routes
+matchRoute pattern (Routes route) = Routes $ \p id' ->
+ if matches pattern id' then route p id' else return (Nothing, False)
+
+
+--------------------------------------------------------------------------------
+-- | Create a custom route. This should almost always be used with
+-- 'matchRoute'
+customRoute :: (Identifier -> FilePath) -> Routes
+customRoute f = Routes $ const $ \id' -> return (Just (f id'), False)
+
+
+--------------------------------------------------------------------------------
+-- | A route that always gives the same result. Obviously, you should only use
+-- this for a single compilation rule.
+constRoute :: FilePath -> Routes
+constRoute = customRoute . const
+
+
+--------------------------------------------------------------------------------
+-- | Create a gsub route
+--
+-- Example:
+--
+-- > runRoutes (gsubRoute "rss/" (const "")) "tags/rss/bar.xml"
+--
+-- Result:
+--
+-- > Just "tags/bar.xml"
+gsubRoute :: String -- ^ Pattern
+ -> (String -> String) -- ^ Replacement
+ -> Routes -- ^ Resulting route
+gsubRoute pattern replacement = customRoute $
+ replaceAll pattern replacement . toFilePath
+
+
+--------------------------------------------------------------------------------
+-- | Get access to the metadata in order to determine the route
+metadataRoute :: (Metadata -> Routes) -> Routes
+metadataRoute f = Routes $ \r i -> do
+ metadata <- resourceMetadata (routesProvider r) (routesUnderlying r)
+ unRoutes (f metadata) r i
+
+
+--------------------------------------------------------------------------------
+-- | Compose routes so that @f \`composeRoutes\` g@ is more or less equivalent
+-- with @g . f@.
+--
+-- Example:
+--
+-- > let routes = gsubRoute "rss/" (const "") `composeRoutes` setExtension "xml"
+-- > in runRoutes routes "tags/rss/bar"
+--
+-- Result:
+--
+-- > Just "tags/bar.xml"
+--
+-- If the first route given fails, Hakyll will not apply the second route.
+composeRoutes :: Routes -- ^ First route to apply
+ -> Routes -- ^ Second route to apply
+ -> Routes -- ^ Resulting route
+composeRoutes (Routes f) (Routes g) = Routes $ \p i -> do
+ (mfp, um) <- f p i
+ case mfp of
+ Nothing -> return (Nothing, um)
+ Just fp -> do
+ (mfp', um') <- g p (fromFilePath fp)
+ return (mfp', um || um')
diff --git a/lib/Hakyll/Core/Rules.hs b/lib/Hakyll/Core/Rules.hs
new file mode 100644
index 0000000..41b9a73
--- /dev/null
+++ b/lib/Hakyll/Core/Rules.hs
@@ -0,0 +1,223 @@
+--------------------------------------------------------------------------------
+-- | This module provides a declarative DSL in which the user can specify the
+-- different rules used to run the compilers.
+--
+-- The convention is to just list all items in the 'Rules' monad, routes and
+-- compilation rules.
+--
+-- A typical usage example would be:
+--
+-- > main = hakyll $ do
+-- > match "posts/*" $ do
+-- > route (setExtension "html")
+-- > compile someCompiler
+-- > match "css/*" $ do
+-- > route idRoute
+-- > compile compressCssCompiler
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE OverloadedStrings #-}
+module Hakyll.Core.Rules
+ ( Rules
+ , match
+ , matchMetadata
+ , create
+ , version
+ , compile
+ , route
+
+ -- * Advanced usage
+ , preprocess
+ , Dependency (..)
+ , rulesExtraDependencies
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Monad.Reader (ask, local)
+import Control.Monad.State (get, modify, put)
+import Control.Monad.Trans (liftIO)
+import Control.Monad.Writer (censor, tell)
+import Data.Maybe (fromMaybe)
+import qualified Data.Set as S
+
+
+--------------------------------------------------------------------------------
+import Data.Binary (Binary)
+import Data.Typeable (Typeable)
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Dependencies
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Item
+import Hakyll.Core.Item.SomeItem
+import Hakyll.Core.Metadata
+import Hakyll.Core.Routes
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.Writable
+
+
+--------------------------------------------------------------------------------
+-- | Add a route
+tellRoute :: Routes -> Rules ()
+tellRoute route' = Rules $ tell $ RuleSet route' mempty mempty mempty
+
+
+--------------------------------------------------------------------------------
+-- | Add a number of compilers
+tellCompilers :: [(Identifier, Compiler SomeItem)] -> Rules ()
+tellCompilers compilers = Rules $ tell $ RuleSet mempty compilers mempty mempty
+
+
+--------------------------------------------------------------------------------
+-- | Add resources
+tellResources :: [Identifier] -> Rules ()
+tellResources resources' = Rules $ tell $
+ RuleSet mempty mempty (S.fromList resources') mempty
+
+
+--------------------------------------------------------------------------------
+-- | Add a pattern
+tellPattern :: Pattern -> Rules ()
+tellPattern pattern = Rules $ tell $ RuleSet mempty mempty mempty pattern
+
+
+--------------------------------------------------------------------------------
+flush :: Rules ()
+flush = Rules $ do
+ mcompiler <- rulesCompiler <$> get
+ case mcompiler of
+ Nothing -> return ()
+ Just compiler -> do
+ matches' <- rulesMatches <$> ask
+ version' <- rulesVersion <$> ask
+ route' <- fromMaybe mempty . rulesRoute <$> get
+
+ -- The version is possibly not set correctly at this point (yet)
+ let ids = map (setVersion version') matches'
+
+ {-
+ ids <- case fromLiteral pattern of
+ Just id' -> return [setVersion version' id']
+ Nothing -> do
+ ids <- unRules $ getMatches pattern
+ unRules $ tellResources ids
+ return $ map (setVersion version') ids
+ -}
+
+ -- Create a fast pattern for routing that matches exactly the
+ -- compilers created in the block given to match
+ let fastPattern = fromList ids
+
+ -- Write out the compilers and routes
+ unRules $ tellRoute $ matchRoute fastPattern route'
+ unRules $ tellCompilers $ [(id', compiler) | id' <- ids]
+
+ put $ emptyRulesState
+
+
+--------------------------------------------------------------------------------
+matchInternal :: Pattern -> Rules [Identifier] -> Rules () -> Rules ()
+matchInternal pattern getIDs rules = do
+ tellPattern pattern
+ flush
+ ids <- getIDs
+ tellResources ids
+ Rules $ local (setMatches ids) $ unRules $ rules >> flush
+ where
+ setMatches ids env = env {rulesMatches = ids}
+
+--------------------------------------------------------------------------------
+match :: Pattern -> Rules () -> Rules ()
+match pattern = matchInternal pattern $ getMatches pattern
+
+
+--------------------------------------------------------------------------------
+matchMetadata :: Pattern -> (Metadata -> Bool) -> Rules () -> Rules ()
+matchMetadata pattern metadataPred = matchInternal pattern $
+ map fst . filter (metadataPred . snd) <$> getAllMetadata pattern
+
+
+--------------------------------------------------------------------------------
+create :: [Identifier] -> Rules () -> Rules ()
+create ids rules = do
+ flush
+ -- TODO Maybe check if the resources exist and call tellResources on that
+ Rules $ local setMatches $ unRules $ rules >> flush
+ where
+ setMatches env = env {rulesMatches = ids}
+
+
+--------------------------------------------------------------------------------
+version :: String -> Rules () -> Rules ()
+version v rules = do
+ flush
+ Rules $ local setVersion' $ unRules $ rules >> flush
+ where
+ setVersion' env = env {rulesVersion = Just v}
+
+
+--------------------------------------------------------------------------------
+-- | Add a compilation rule to the rules.
+--
+-- This instructs all resources to be compiled using the given compiler.
+compile :: (Binary a, Typeable a, Writable a) => Compiler (Item a) -> Rules ()
+compile compiler = Rules $ modify $ \s ->
+ s {rulesCompiler = Just (fmap SomeItem compiler)}
+
+
+--------------------------------------------------------------------------------
+-- | Add a route.
+--
+-- This adds a route for all items matching the current pattern.
+route :: Routes -> Rules ()
+route route' = Rules $ modify $ \s -> s {rulesRoute = Just route'}
+
+
+--------------------------------------------------------------------------------
+-- | Execute an 'IO' action immediately while the rules are being evaluated.
+-- This should be avoided if possible, but occasionally comes in useful.
+preprocess :: IO a -> Rules a
+preprocess = Rules . liftIO
+
+
+--------------------------------------------------------------------------------
+-- | Advanced usage: add extra dependencies to compilers. Basically this is
+-- needed when you're doing unsafe tricky stuff in the rules monad, but you
+-- still want correct builds.
+--
+-- A useful utility for this purpose is 'makePatternDependency'.
+rulesExtraDependencies :: [Dependency] -> Rules a -> Rules a
+rulesExtraDependencies deps rules =
+ -- Note that we add the dependencies seemingly twice here. However, this is
+ -- done so that 'rulesExtraDependencies' works both if we have something
+ -- like:
+ --
+ -- > match "*.css" $ rulesExtraDependencies [foo] $ ...
+ --
+ -- and something like:
+ --
+ -- > rulesExtraDependencies [foo] $ match "*.css" $ ...
+ --
+ -- (1) takes care of the latter and (2) of the former.
+ Rules $ censor fixRuleSet $ do
+ x <- unRules rules
+ fixCompiler
+ return x
+ where
+ -- (1) Adds the dependencies to the compilers we are yet to create
+ fixCompiler = modify $ \s -> case rulesCompiler s of
+ Nothing -> s
+ Just c -> s
+ { rulesCompiler = Just $ compilerTellDependencies deps >> c
+ }
+
+ -- (2) Adds the dependencies to the compilers that are already in the ruleset
+ fixRuleSet ruleSet = ruleSet
+ { rulesCompilers =
+ [ (i, compilerTellDependencies deps >> c)
+ | (i, c) <- rulesCompilers ruleSet
+ ]
+ }
diff --git a/lib/Hakyll/Core/Rules/Internal.hs b/lib/Hakyll/Core/Rules/Internal.hs
new file mode 100644
index 0000000..0641dcf
--- /dev/null
+++ b/lib/Hakyll/Core/Rules/Internal.hs
@@ -0,0 +1,109 @@
+--------------------------------------------------------------------------------
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE Rank2Types #-}
+module Hakyll.Core.Rules.Internal
+ ( RulesRead (..)
+ , RuleSet (..)
+ , RulesState (..)
+ , emptyRulesState
+ , Rules (..)
+ , runRules
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Monad.Reader (ask)
+import Control.Monad.RWS (RWST, runRWST)
+import Control.Monad.Trans (liftIO)
+import qualified Data.Map as M
+import Data.Set (Set)
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Item.SomeItem
+import Hakyll.Core.Metadata
+import Hakyll.Core.Provider
+import Hakyll.Core.Routes
+
+
+--------------------------------------------------------------------------------
+data RulesRead = RulesRead
+ { rulesProvider :: Provider
+ , rulesMatches :: [Identifier]
+ , rulesVersion :: Maybe String
+ }
+
+
+--------------------------------------------------------------------------------
+data RuleSet = RuleSet
+ { -- | Accumulated routes
+ rulesRoutes :: Routes
+ , -- | Accumulated compilers
+ rulesCompilers :: [(Identifier, Compiler SomeItem)]
+ , -- | A set of the actually used files
+ rulesResources :: Set Identifier
+ , -- | A pattern we can use to check if a file *would* be used. This is
+ -- needed for the preview server.
+ rulesPattern :: Pattern
+ }
+
+
+--------------------------------------------------------------------------------
+instance Monoid RuleSet where
+ mempty = RuleSet mempty mempty mempty mempty
+ mappend (RuleSet r1 c1 s1 p1) (RuleSet r2 c2 s2 p2) =
+ RuleSet (mappend r1 r2) (mappend c1 c2) (mappend s1 s2) (p1 .||. p2)
+
+
+--------------------------------------------------------------------------------
+data RulesState = RulesState
+ { rulesRoute :: Maybe Routes
+ , rulesCompiler :: Maybe (Compiler SomeItem)
+ }
+
+
+--------------------------------------------------------------------------------
+emptyRulesState :: RulesState
+emptyRulesState = RulesState Nothing Nothing
+
+
+--------------------------------------------------------------------------------
+-- | The monad used to compose rules
+newtype Rules a = Rules
+ { unRules :: RWST RulesRead RuleSet RulesState IO a
+ } deriving (Monad, Functor, Applicative)
+
+
+--------------------------------------------------------------------------------
+instance MonadMetadata Rules where
+ getMetadata identifier = Rules $ do
+ provider <- rulesProvider <$> ask
+ liftIO $ resourceMetadata provider identifier
+
+ getMatches pattern = Rules $ do
+ provider <- rulesProvider <$> ask
+ return $ filterMatches pattern $ resourceList provider
+
+
+--------------------------------------------------------------------------------
+-- | Run a Rules monad, resulting in a 'RuleSet'
+runRules :: Rules a -> Provider -> IO RuleSet
+runRules rules provider = do
+ (_, _, ruleSet) <- runRWST (unRules rules) env emptyRulesState
+
+ -- Ensure compiler uniqueness
+ let ruleSet' = ruleSet
+ { rulesCompilers = M.toList $
+ M.fromListWith (flip const) (rulesCompilers ruleSet)
+ }
+
+ return ruleSet'
+ where
+ env = RulesRead
+ { rulesProvider = provider
+ , rulesMatches = []
+ , rulesVersion = Nothing
+ }
diff --git a/lib/Hakyll/Core/Runtime.hs b/lib/Hakyll/Core/Runtime.hs
new file mode 100644
index 0000000..16a5d9e
--- /dev/null
+++ b/lib/Hakyll/Core/Runtime.hs
@@ -0,0 +1,276 @@
+--------------------------------------------------------------------------------
+module Hakyll.Core.Runtime
+ ( run
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Monad (unless)
+import Control.Monad.Except (ExceptT, runExceptT, throwError)
+import Control.Monad.Reader (ask)
+import Control.Monad.RWS (RWST, runRWST)
+import Control.Monad.State (get, modify)
+import Control.Monad.Trans (liftIO)
+import Data.List (intercalate)
+import Data.Map (Map)
+import qualified Data.Map as M
+import Data.Set (Set)
+import qualified Data.Set as S
+import System.Exit (ExitCode (..))
+import System.FilePath ((</>))
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Compiler.Require
+import Hakyll.Core.Configuration
+import Hakyll.Core.Dependencies
+import Hakyll.Core.Identifier
+import Hakyll.Core.Item
+import Hakyll.Core.Item.SomeItem
+import Hakyll.Core.Logger (Logger)
+import qualified Hakyll.Core.Logger as Logger
+import Hakyll.Core.Provider
+import Hakyll.Core.Routes
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.Store (Store)
+import qualified Hakyll.Core.Store as Store
+import Hakyll.Core.Util.File
+import Hakyll.Core.Writable
+
+
+--------------------------------------------------------------------------------
+run :: Configuration -> Logger -> Rules a -> IO (ExitCode, RuleSet)
+run config logger rules = do
+ -- Initialization
+ Logger.header logger "Initialising..."
+ Logger.message logger "Creating store..."
+ store <- Store.new (inMemoryCache config) $ storeDirectory config
+ Logger.message logger "Creating provider..."
+ provider <- newProvider store (shouldIgnoreFile config) $
+ providerDirectory config
+ Logger.message logger "Running rules..."
+ ruleSet <- runRules rules provider
+
+ -- Get old facts
+ mOldFacts <- Store.get store factsKey
+ let (oldFacts) = case mOldFacts of Store.Found f -> f
+ _ -> mempty
+
+ -- Build runtime read/state
+ let compilers = rulesCompilers ruleSet
+ read' = RuntimeRead
+ { runtimeConfiguration = config
+ , runtimeLogger = logger
+ , runtimeProvider = provider
+ , runtimeStore = store
+ , runtimeRoutes = rulesRoutes ruleSet
+ , runtimeUniverse = M.fromList compilers
+ }
+ state = RuntimeState
+ { runtimeDone = S.empty
+ , runtimeSnapshots = S.empty
+ , runtimeTodo = M.empty
+ , runtimeFacts = oldFacts
+ }
+
+ -- Run the program and fetch the resulting state
+ result <- runExceptT $ runRWST build read' state
+ case result of
+ Left e -> do
+ Logger.error logger e
+ Logger.flush logger
+ return (ExitFailure 1, ruleSet)
+
+ Right (_, s, _) -> do
+ Store.set store factsKey $ runtimeFacts s
+
+ Logger.debug logger "Removing tmp directory..."
+ removeDirectory $ tmpDirectory config
+
+ Logger.flush logger
+ return (ExitSuccess, ruleSet)
+ where
+ factsKey = ["Hakyll.Core.Runtime.run", "facts"]
+
+
+--------------------------------------------------------------------------------
+data RuntimeRead = RuntimeRead
+ { runtimeConfiguration :: Configuration
+ , runtimeLogger :: Logger
+ , runtimeProvider :: Provider
+ , runtimeStore :: Store
+ , runtimeRoutes :: Routes
+ , runtimeUniverse :: Map Identifier (Compiler SomeItem)
+ }
+
+
+--------------------------------------------------------------------------------
+data RuntimeState = RuntimeState
+ { runtimeDone :: Set Identifier
+ , runtimeSnapshots :: Set (Identifier, Snapshot)
+ , runtimeTodo :: Map Identifier (Compiler SomeItem)
+ , runtimeFacts :: DependencyFacts
+ }
+
+
+--------------------------------------------------------------------------------
+type Runtime a = RWST RuntimeRead () RuntimeState (ExceptT String IO) a
+
+
+--------------------------------------------------------------------------------
+build :: Runtime ()
+build = do
+ logger <- runtimeLogger <$> ask
+ Logger.header logger "Checking for out-of-date items"
+ scheduleOutOfDate
+ Logger.header logger "Compiling"
+ pickAndChase
+ Logger.header logger "Success"
+
+
+--------------------------------------------------------------------------------
+scheduleOutOfDate :: Runtime ()
+scheduleOutOfDate = do
+ logger <- runtimeLogger <$> ask
+ provider <- runtimeProvider <$> ask
+ universe <- runtimeUniverse <$> ask
+ facts <- runtimeFacts <$> get
+ todo <- runtimeTodo <$> get
+
+ let identifiers = M.keys universe
+ modified = S.fromList $ flip filter identifiers $
+ resourceModified provider
+
+ let (ood, facts', msgs) = outOfDate identifiers modified facts
+ todo' = M.filterWithKey
+ (\id' _ -> id' `S.member` ood) universe
+
+ -- Print messages
+ mapM_ (Logger.debug logger) msgs
+
+ -- Update facts and todo items
+ modify $ \s -> s
+ { runtimeDone = runtimeDone s `S.union`
+ (S.fromList identifiers `S.difference` ood)
+ , runtimeTodo = todo `M.union` todo'
+ , runtimeFacts = facts'
+ }
+
+
+--------------------------------------------------------------------------------
+pickAndChase :: Runtime ()
+pickAndChase = do
+ todo <- runtimeTodo <$> get
+ case M.minViewWithKey todo of
+ Nothing -> return ()
+ Just ((id', _), _) -> do
+ chase [] id'
+ pickAndChase
+
+
+--------------------------------------------------------------------------------
+chase :: [Identifier] -> Identifier -> Runtime ()
+chase trail id'
+ | id' `elem` trail = throwError $ "Hakyll.Core.Runtime.chase: " ++
+ "Dependency cycle detected: " ++ intercalate " depends on "
+ (map show $ dropWhile (/= id') (reverse trail) ++ [id'])
+ | otherwise = do
+ logger <- runtimeLogger <$> ask
+ todo <- runtimeTodo <$> get
+ provider <- runtimeProvider <$> ask
+ universe <- runtimeUniverse <$> ask
+ routes <- runtimeRoutes <$> ask
+ store <- runtimeStore <$> ask
+ config <- runtimeConfiguration <$> ask
+ Logger.debug logger $ "Processing " ++ show id'
+
+ let compiler = todo M.! id'
+ read' = CompilerRead
+ { compilerConfig = config
+ , compilerUnderlying = id'
+ , compilerProvider = provider
+ , compilerUniverse = M.keysSet universe
+ , compilerRoutes = routes
+ , compilerStore = store
+ , compilerLogger = logger
+ }
+
+ result <- liftIO $ runCompiler compiler read'
+ case result of
+ -- Rethrow error
+ CompilerError [] -> throwError
+ "Compiler failed but no info given, try running with -v?"
+ CompilerError es -> throwError $ intercalate "; " es
+
+ -- Signal that a snapshot was saved ->
+ CompilerSnapshot snapshot c -> do
+ -- Update info. The next 'chase' will pick us again at some
+ -- point so we can continue then.
+ modify $ \s -> s
+ { runtimeSnapshots =
+ S.insert (id', snapshot) (runtimeSnapshots s)
+ , runtimeTodo = M.insert id' c (runtimeTodo s)
+ }
+
+ -- Huge success
+ CompilerDone (SomeItem item) cwrite -> do
+ -- Print some info
+ let facts = compilerDependencies cwrite
+ cacheHits
+ | compilerCacheHits cwrite <= 0 = "updated"
+ | otherwise = "cached "
+ Logger.message logger $ cacheHits ++ " " ++ show id'
+
+ -- Sanity check
+ unless (itemIdentifier item == id') $ throwError $
+ "The compiler yielded an Item with Identifier " ++
+ show (itemIdentifier item) ++ ", but we were expecting " ++
+ "an Item with Identifier " ++ show id' ++ " " ++
+ "(you probably want to call makeItem to solve this problem)"
+
+ -- Write if necessary
+ (mroute, _) <- liftIO $ runRoutes routes provider id'
+ case mroute of
+ Nothing -> return ()
+ Just route -> do
+ let path = destinationDirectory config </> route
+ liftIO $ makeDirectories path
+ liftIO $ write path item
+ Logger.debug logger $ "Routed to " ++ path
+
+ -- Save! (For load)
+ liftIO $ save store item
+
+ -- Update state
+ modify $ \s -> s
+ { runtimeDone = S.insert id' (runtimeDone s)
+ , runtimeTodo = M.delete id' (runtimeTodo s)
+ , runtimeFacts = M.insert id' facts (runtimeFacts s)
+ }
+
+ -- Try something else first
+ CompilerRequire dep c -> do
+ -- Update the compiler so we don't execute it twice
+ let (depId, depSnapshot) = dep
+ done <- runtimeDone <$> get
+ snapshots <- runtimeSnapshots <$> get
+
+ -- Done if we either completed the entire item (runtimeDone) or
+ -- if we previously saved the snapshot (runtimeSnapshots).
+ let depDone =
+ depId `S.member` done ||
+ (depId, depSnapshot) `S.member` snapshots
+
+ modify $ \s -> s
+ { runtimeTodo = M.insert id'
+ (if depDone then c else compilerResult result)
+ (runtimeTodo s)
+ }
+
+ -- If the required item is already compiled, continue, or, start
+ -- chasing that
+ Logger.debug logger $ "Require " ++ show depId ++
+ " (snapshot " ++ depSnapshot ++ "): " ++
+ (if depDone then "OK" else "chasing")
+ if depDone then chase trail id' else chase (id' : trail) depId
diff --git a/lib/Hakyll/Core/Store.hs b/lib/Hakyll/Core/Store.hs
new file mode 100644
index 0000000..fdbcf11
--- /dev/null
+++ b/lib/Hakyll/Core/Store.hs
@@ -0,0 +1,197 @@
+--------------------------------------------------------------------------------
+-- | A store for storing and retreiving items
+{-# LANGUAGE ExistentialQuantification #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+module Hakyll.Core.Store
+ ( Store
+ , Result (..)
+ , toMaybe
+ , new
+ , set
+ , get
+ , isMember
+ , delete
+ , hash
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Exception (IOException, handle)
+import qualified Crypto.Hash.MD5 as MD5
+import Data.Binary (Binary, decode, encodeFile)
+import qualified Data.ByteString as B
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Cache.LRU.IO as Lru
+import Data.List (intercalate)
+import Data.Maybe (isJust)
+import qualified Data.Text as T
+import qualified Data.Text.Encoding as T
+import Data.Typeable (TypeRep, Typeable, cast, typeOf)
+import System.Directory (createDirectoryIfMissing)
+import System.Directory (doesFileExist, removeFile)
+import System.FilePath ((</>))
+import System.IO (IOMode (..), hClose, openFile)
+import Text.Printf (printf)
+
+
+--------------------------------------------------------------------------------
+-- | Simple wrapper type
+data Box = forall a. Typeable a => Box a
+
+
+--------------------------------------------------------------------------------
+data Store = Store
+ { -- | All items are stored on the filesystem
+ storeDirectory :: FilePath
+ , -- | Optionally, items are also kept in-memory
+ storeMap :: Maybe (Lru.AtomicLRU FilePath Box)
+ }
+
+
+--------------------------------------------------------------------------------
+instance Show Store where
+ show _ = "<Store>"
+
+
+--------------------------------------------------------------------------------
+-- | Result of a store query
+data Result a
+ = Found a -- ^ Found, result
+ | NotFound -- ^ Not found
+ | WrongType TypeRep TypeRep -- ^ Expected, true type
+ deriving (Show, Eq)
+
+
+--------------------------------------------------------------------------------
+-- | Convert result to 'Maybe'
+toMaybe :: Result a -> Maybe a
+toMaybe (Found x) = Just x
+toMaybe _ = Nothing
+
+
+--------------------------------------------------------------------------------
+-- | Initialize the store
+new :: Bool -- ^ Use in-memory caching
+ -> FilePath -- ^ Directory to use for hard disk storage
+ -> IO Store -- ^ Store
+new inMemory directory = do
+ createDirectoryIfMissing True directory
+ ref <- if inMemory then Just <$> Lru.newAtomicLRU csize else return Nothing
+ return Store
+ { storeDirectory = directory
+ , storeMap = ref
+ }
+ where
+ csize = Just 500
+
+
+--------------------------------------------------------------------------------
+-- | Auxiliary: add an item to the in-memory cache
+cacheInsert :: Typeable a => Store -> String -> a -> IO ()
+cacheInsert (Store _ Nothing) _ _ = return ()
+cacheInsert (Store _ (Just lru)) key x =
+ Lru.insert key (Box x) lru
+
+
+--------------------------------------------------------------------------------
+-- | Auxiliary: get an item from the in-memory cache
+cacheLookup :: forall a. Typeable a => Store -> String -> IO (Result a)
+cacheLookup (Store _ Nothing) _ = return NotFound
+cacheLookup (Store _ (Just lru)) key = do
+ res <- Lru.lookup key lru
+ return $ case res of
+ Nothing -> NotFound
+ Just (Box x) -> case cast x of
+ Just x' -> Found x'
+ Nothing -> WrongType (typeOf (undefined :: a)) (typeOf x)
+
+
+--------------------------------------------------------------------------------
+cacheIsMember :: Store -> String -> IO Bool
+cacheIsMember (Store _ Nothing) _ = return False
+cacheIsMember (Store _ (Just lru)) key = isJust <$> Lru.lookup key lru
+
+
+--------------------------------------------------------------------------------
+-- | Auxiliary: delete an item from the in-memory cache
+cacheDelete :: Store -> String -> IO ()
+cacheDelete (Store _ Nothing) _ = return ()
+cacheDelete (Store _ (Just lru)) key = do
+ _ <- Lru.delete key lru
+ return ()
+
+
+--------------------------------------------------------------------------------
+-- | Store an item
+set :: (Binary a, Typeable a) => Store -> [String] -> a -> IO ()
+set store identifier value = do
+ encodeFile (storeDirectory store </> key) value
+ cacheInsert store key value
+ where
+ key = hash identifier
+
+
+--------------------------------------------------------------------------------
+-- | Load an item
+get :: (Binary a, Typeable a) => Store -> [String] -> IO (Result a)
+get store identifier = do
+ -- First check the in-memory map
+ ref <- cacheLookup store key
+ case ref of
+ -- Not found in the map, try the filesystem
+ NotFound -> do
+ exists <- doesFileExist path
+ if not exists
+ -- Not found in the filesystem either
+ then return NotFound
+ -- Found in the filesystem
+ else do
+ v <- decodeClose
+ cacheInsert store key v
+ return $ Found v
+ -- Found in the in-memory map (or wrong type), just return
+ s -> return s
+ where
+ key = hash identifier
+ path = storeDirectory store </> key
+
+ -- 'decodeFile' from Data.Binary which closes the file ASAP
+ decodeClose = do
+ h <- openFile path ReadMode
+ lbs <- BL.hGetContents h
+ BL.length lbs `seq` hClose h
+ return $ decode lbs
+
+
+--------------------------------------------------------------------------------
+-- | Strict function
+isMember :: Store -> [String] -> IO Bool
+isMember store identifier = do
+ inCache <- cacheIsMember store key
+ if inCache then return True else doesFileExist path
+ where
+ key = hash identifier
+ path = storeDirectory store </> key
+
+
+--------------------------------------------------------------------------------
+-- | Delete an item
+delete :: Store -> [String] -> IO ()
+delete store identifier = do
+ cacheDelete store key
+ deleteFile $ storeDirectory store </> key
+ where
+ key = hash identifier
+
+
+--------------------------------------------------------------------------------
+-- | Delete a file unless it doesn't exist...
+deleteFile :: FilePath -> IO ()
+deleteFile = handle (\(_ :: IOException) -> return ()) . removeFile
+
+
+--------------------------------------------------------------------------------
+-- | Mostly meant for internal usage
+hash :: [String] -> String
+hash = concatMap (printf "%02x") . B.unpack .
+ MD5.hash . T.encodeUtf8 . T.pack . intercalate "/"
diff --git a/lib/Hakyll/Core/UnixFilter.hs b/lib/Hakyll/Core/UnixFilter.hs
new file mode 100644
index 0000000..734d8d8
--- /dev/null
+++ b/lib/Hakyll/Core/UnixFilter.hs
@@ -0,0 +1,159 @@
+{-# LANGUAGE CPP #-}
+
+--------------------------------------------------------------------------------
+-- | A Compiler that supports unix filters.
+module Hakyll.Core.UnixFilter
+ ( unixFilter
+ , unixFilterLBS
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Concurrent (forkIO)
+import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar)
+import Control.DeepSeq (deepseq)
+import Control.Monad (forM_)
+import Data.ByteString.Lazy (ByteString)
+import qualified Data.ByteString.Lazy as LB
+import Data.IORef (newIORef, readIORef, writeIORef)
+import System.Exit (ExitCode (..))
+import System.IO (Handle, hClose, hFlush, hGetContents,
+ hPutStr, hSetEncoding, localeEncoding)
+import System.Process
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Compiler
+
+
+--------------------------------------------------------------------------------
+-- | Use a unix filter as compiler. For example, we could use the 'rev' program
+-- as a compiler.
+--
+-- > rev :: Compiler (Item String)
+-- > rev = getResourceString >>= withItemBody (unixFilter "rev" [])
+--
+-- A more realistic example: one can use this to call, for example, the sass
+-- compiler on CSS files. More information about sass can be found here:
+--
+-- <http://sass-lang.com/>
+--
+-- The code is fairly straightforward, given that we use @.scss@ for sass:
+--
+-- > match "style.scss" $ do
+-- > route $ setExtension "css"
+-- > compile $ getResourceString >>=
+-- > withItemBody (unixFilter "sass" ["-s", "--scss"]) >>=
+-- > return . fmap compressCss
+unixFilter :: String -- ^ Program name
+ -> [String] -- ^ Program args
+ -> String -- ^ Program input
+ -> Compiler String -- ^ Program output
+unixFilter = unixFilterWith writer reader
+ where
+ writer handle input = do
+ hSetEncoding handle localeEncoding
+ hPutStr handle input
+ reader handle = do
+ hSetEncoding handle localeEncoding
+ out <- hGetContents handle
+ deepseq out (return out)
+
+
+--------------------------------------------------------------------------------
+-- | Variant of 'unixFilter' that should be used for binary files
+--
+-- > match "music.wav" $ do
+-- > route $ setExtension "ogg"
+-- > compile $ getResourceLBS >>= withItemBody (unixFilterLBS "oggenc" ["-"])
+unixFilterLBS :: String -- ^ Program name
+ -> [String] -- ^ Program args
+ -> ByteString -- ^ Program input
+ -> Compiler ByteString -- ^ Program output
+unixFilterLBS = unixFilterWith LB.hPutStr $ \handle -> do
+ out <- LB.hGetContents handle
+ LB.length out `seq` return out
+
+
+--------------------------------------------------------------------------------
+-- | Overloaded compiler
+unixFilterWith :: Monoid o
+ => (Handle -> i -> IO ()) -- ^ Writer
+ -> (Handle -> IO o) -- ^ Reader
+ -> String -- ^ Program name
+ -> [String] -- ^ Program args
+ -> i -- ^ Program input
+ -> Compiler o -- ^ Program output
+unixFilterWith writer reader programName args input = do
+ debugCompiler ("Executing external program " ++ programName)
+ (output, err, exitCode) <- unsafeCompiler $
+ unixFilterIO writer reader programName args input
+ forM_ (lines err) debugCompiler
+ case exitCode of
+ ExitSuccess -> return output
+ ExitFailure e -> fail $
+ "Hakyll.Core.UnixFilter.unixFilterWith: " ++
+ unwords (programName : args) ++ " gave exit code " ++ show e
+
+
+--------------------------------------------------------------------------------
+-- | Internally used function
+unixFilterIO :: Monoid o
+ => (Handle -> i -> IO ())
+ -> (Handle -> IO o)
+ -> String
+ -> [String]
+ -> i
+ -> IO (o, String, ExitCode)
+unixFilterIO writer reader programName args input = do
+ -- The problem on Windows is that `proc` is unable to execute
+ -- batch stubs (eg. anything created using 'gem install ...') even if its in
+ -- `$PATH`. A solution to this issue is to execute the batch file explicitly
+ -- using `cmd /c batchfile` but there is no rational way to know where said
+ -- batchfile is on the system. Hence, we detect windows using the
+ -- CPP and instead of using `proc` to create the process, use `shell`
+ -- which will be able to execute everything `proc` can
+ -- as well as batch files.
+#ifdef mingw32_HOST_OS
+ let pr = shell $ unwords (programName : args)
+#else
+ let pr = proc programName args
+#endif
+
+ (Just inh, Just outh, Just errh, pid) <-
+ createProcess pr
+ { std_in = CreatePipe
+ , std_out = CreatePipe
+ , std_err = CreatePipe
+ }
+
+ -- Create boxes
+ lock <- newEmptyMVar
+ outRef <- newIORef mempty
+ errRef <- newIORef ""
+
+ -- Write the input to the child pipe
+ _ <- forkIO $ writer inh input >> hFlush inh >> hClose inh
+
+ -- Read from stdout
+ _ <- forkIO $ do
+ out <- reader outh
+ hClose outh
+ writeIORef outRef out
+ putMVar lock ()
+
+ -- Read from stderr
+ _ <- forkIO $ do
+ hSetEncoding errh localeEncoding
+ err <- hGetContents errh
+ _ <- deepseq err (return err)
+ hClose errh
+ writeIORef errRef err
+ putMVar lock ()
+
+ -- Get exit code & return
+ takeMVar lock
+ takeMVar lock
+ exitCode <- waitForProcess pid
+ out <- readIORef outRef
+ err <- readIORef errRef
+ return (out, err, exitCode)
diff --git a/lib/Hakyll/Core/Util/File.hs b/lib/Hakyll/Core/Util/File.hs
new file mode 100644
index 0000000..9db6b11
--- /dev/null
+++ b/lib/Hakyll/Core/Util/File.hs
@@ -0,0 +1,56 @@
+--------------------------------------------------------------------------------
+-- | A module containing various file utility functions
+module Hakyll.Core.Util.File
+ ( makeDirectories
+ , getRecursiveContents
+ , removeDirectory
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Monad (filterM, forM, when)
+import System.Directory (createDirectoryIfMissing,
+ doesDirectoryExist, getDirectoryContents,
+ removeDirectoryRecursive)
+import System.FilePath (takeDirectory, (</>))
+
+
+--------------------------------------------------------------------------------
+-- | Given a path to a file, try to make the path writable by making
+-- all directories on the path.
+makeDirectories :: FilePath -> IO ()
+makeDirectories = createDirectoryIfMissing True . takeDirectory
+
+
+--------------------------------------------------------------------------------
+-- | Get all contents of a directory.
+getRecursiveContents :: (FilePath -> IO Bool) -- ^ Ignore this file/directory
+ -> FilePath -- ^ Directory to search
+ -> IO [FilePath] -- ^ List of files found
+getRecursiveContents ignore top = go ""
+ where
+ isProper x
+ | x `elem` [".", ".."] = return False
+ | otherwise = not <$> ignore x
+
+ go dir = do
+ dirExists <- doesDirectoryExist (top </> dir)
+ if not dirExists
+ then return []
+ else do
+ names <- filterM isProper =<< getDirectoryContents (top </> dir)
+ paths <- forM names $ \name -> do
+ let rel = dir </> name
+ isDirectory <- doesDirectoryExist (top </> rel)
+ if isDirectory
+ then go rel
+ else return [rel]
+
+ return $ concat paths
+
+
+--------------------------------------------------------------------------------
+removeDirectory :: FilePath -> IO ()
+removeDirectory fp = do
+ e <- doesDirectoryExist fp
+ when e $ removeDirectoryRecursive fp
diff --git a/lib/Hakyll/Core/Util/Parser.hs b/lib/Hakyll/Core/Util/Parser.hs
new file mode 100644
index 0000000..c4b2f8d
--- /dev/null
+++ b/lib/Hakyll/Core/Util/Parser.hs
@@ -0,0 +1,32 @@
+--------------------------------------------------------------------------------
+-- | Parser utilities
+module Hakyll.Core.Util.Parser
+ ( metadataKey
+ , reservedKeys
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Control.Applicative ((<|>))
+import Control.Monad (guard, mzero, void)
+import qualified Text.Parsec as P
+import Text.Parsec.String (Parser)
+
+
+--------------------------------------------------------------------------------
+metadataKey :: Parser String
+metadataKey = do
+ -- Ensure trailing '-' binds to '$' if present.
+ let hyphon = P.try $ do
+ void $ P.char '-'
+ x <- P.lookAhead P.anyChar
+ guard $ x /= '$'
+ pure '-'
+
+ i <- (:) <$> P.letter <*> P.many (P.alphaNum <|> P.oneOf "_." <|> hyphon)
+ if i `elem` reservedKeys then mzero else return i
+
+
+--------------------------------------------------------------------------------
+reservedKeys :: [String]
+reservedKeys = ["if", "else", "endif", "for", "sep", "endfor", "partial"]
diff --git a/lib/Hakyll/Core/Util/String.hs b/lib/Hakyll/Core/Util/String.hs
new file mode 100644
index 0000000..23bdd39
--- /dev/null
+++ b/lib/Hakyll/Core/Util/String.hs
@@ -0,0 +1,78 @@
+{-# LANGUAGE FlexibleContexts #-}
+--------------------------------------------------------------------------------
+-- | Miscellaneous string manipulation functions.
+module Hakyll.Core.Util.String
+ ( trim
+ , replaceAll
+ , splitAll
+ , needlePrefix
+ ) where
+
+
+--------------------------------------------------------------------------------
+import Data.Char (isSpace)
+import Data.List (isPrefixOf)
+import Data.Maybe (listToMaybe)
+import Text.Regex.TDFA ((=~~))
+
+
+--------------------------------------------------------------------------------
+-- | Trim a string (drop spaces, tabs and newlines at both sides).
+trim :: String -> String
+trim = reverse . trim' . reverse . trim'
+ where
+ trim' = dropWhile isSpace
+
+
+--------------------------------------------------------------------------------
+-- | A simple (but inefficient) regex replace funcion
+replaceAll :: String -- ^ Pattern
+ -> (String -> String) -- ^ Replacement (called on capture)
+ -> String -- ^ Source string
+ -> String -- ^ Result
+replaceAll pattern f source = replaceAll' source
+ where
+ replaceAll' src = case listToMaybe (src =~~ pattern) of
+ Nothing -> src
+ Just (o, l) ->
+ let (before, tmp) = splitAt o src
+ (capture, after) = splitAt l tmp
+ in before ++ f capture ++ replaceAll' after
+
+
+--------------------------------------------------------------------------------
+-- | A simple regex split function. The resulting list will contain no empty
+-- strings.
+splitAll :: String -- ^ Pattern
+ -> String -- ^ String to split
+ -> [String] -- ^ Result
+splitAll pattern = filter (not . null) . splitAll'
+ where
+ splitAll' src = case listToMaybe (src =~~ pattern) of
+ Nothing -> [src]
+ Just (o, l) ->
+ let (before, tmp) = splitAt o src
+ in before : splitAll' (drop l tmp)
+
+
+
+--------------------------------------------------------------------------------
+-- | Find the first instance of needle (must be non-empty) in haystack. We
+-- return the prefix of haystack before needle is matched.
+--
+-- Examples:
+--
+-- > needlePrefix "cd" "abcde" = "ab"
+--
+-- > needlePrefix "ab" "abc" = ""
+--
+-- > needlePrefix "ab" "xxab" = "xx"
+--
+-- > needlePrefix "a" "xx" = "xx"
+needlePrefix :: String -> String -> Maybe String
+needlePrefix needle haystack = go [] haystack
+ where
+ go _ [] = Nothing
+ go acc xss@(x:xs)
+ | needle `isPrefixOf` xss = Just $ reverse acc
+ | otherwise = go (x : acc) xs
diff --git a/lib/Hakyll/Core/Writable.hs b/lib/Hakyll/Core/Writable.hs
new file mode 100644
index 0000000..cad6cf1
--- /dev/null
+++ b/lib/Hakyll/Core/Writable.hs
@@ -0,0 +1,56 @@
+--------------------------------------------------------------------------------
+-- | Describes writable items; items that can be saved to the disk
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE TypeSynonymInstances #-}
+module Hakyll.Core.Writable
+ ( Writable (..)
+ ) where
+
+
+--------------------------------------------------------------------------------
+import qualified Data.ByteString as SB
+import qualified Data.ByteString.Lazy as LB
+import Data.Word (Word8)
+import Text.Blaze.Html (Html)
+import Text.Blaze.Html.Renderer.String (renderHtml)
+
+
+--------------------------------------------------------------------------------
+import Hakyll.Core.Item
+
+
+--------------------------------------------------------------------------------
+-- | Describes an item that can be saved to the disk
+class Writable a where
+ -- | Save an item to the given filepath
+ write :: FilePath -> Item a -> IO ()
+
+
+--------------------------------------------------------------------------------
+instance Writable () where
+ write _ _ = return ()
+
+
+--------------------------------------------------------------------------------
+instance Writable [Char] where
+ write p = writeFile p . itemBody
+
+
+--------------------------------------------------------------------------------
+instance Writable SB.ByteString where
+ write p = SB.writeFile p . itemBody
+
+
+--------------------------------------------------------------------------------
+instance Writable LB.ByteString where
+ write p = LB.writeFile p . itemBody
+
+
+--------------------------------------------------------------------------------
+instance Writable [Word8] where
+ write p = write p . fmap SB.pack
+
+
+--------------------------------------------------------------------------------
+instance Writable Html where
+ write p = write p . fmap renderHtml