summaryrefslogtreecommitdiff
path: root/src/Hakyll
diff options
context:
space:
mode:
authorJasper Van der Jeugt <jaspervdj@gmail.com>2011-03-01 14:50:41 +0100
committerJasper Van der Jeugt <jaspervdj@gmail.com>2011-03-01 14:50:41 +0100
commit90b25105830d6e4b0943ab55f9317bd142533acf (patch)
tree6eefb80a8a84724e70539dd8fa449530f7b17fe0 /src/Hakyll
parent8ef5a3ed0307be5d34a9564d02af3ed494f8e228 (diff)
parent8b727b994d482d593046f9b01a5c40b97c166d62 (diff)
downloadhakyll-90b25105830d6e4b0943ab55f9317bd142533acf.tar.gz
Merge branch 'hakyll3'
Conflicts: hakyll.cabal src/Text/Hakyll/Tags.hs
Diffstat (limited to 'src/Hakyll')
-rw-r--r--src/Hakyll/Core/CompiledItem.hs45
-rw-r--r--src/Hakyll/Core/Compiler.hs333
-rw-r--r--src/Hakyll/Core/Compiler/Internal.hs146
-rw-r--r--src/Hakyll/Core/Configuration.hs44
-rw-r--r--src/Hakyll/Core/CopyFile.hs29
-rw-r--r--src/Hakyll/Core/DirectedGraph.hs85
-rw-r--r--src/Hakyll/Core/DirectedGraph/DependencySolver.hs70
-rw-r--r--src/Hakyll/Core/DirectedGraph/Dot.hs30
-rw-r--r--src/Hakyll/Core/DirectedGraph/Internal.hs43
-rw-r--r--src/Hakyll/Core/Identifier.hs59
-rw-r--r--src/Hakyll/Core/Identifier/Pattern.hs160
-rw-r--r--src/Hakyll/Core/Logger.hs90
-rw-r--r--src/Hakyll/Core/ResourceProvider.hs75
-rw-r--r--src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs29
-rw-r--r--src/Hakyll/Core/Routes.hs136
-rw-r--r--src/Hakyll/Core/Rules.hs161
-rw-r--r--src/Hakyll/Core/Rules/Internal.hs75
-rw-r--r--src/Hakyll/Core/Run.hs207
-rw-r--r--src/Hakyll/Core/Store.hs88
-rw-r--r--src/Hakyll/Core/UnixFilter.hs76
-rw-r--r--src/Hakyll/Core/Util/Arrow.hs25
-rw-r--r--src/Hakyll/Core/Util/File.hs90
-rw-r--r--src/Hakyll/Core/Util/String.hs48
-rw-r--r--src/Hakyll/Core/Writable.hs22
-rw-r--r--src/Hakyll/Main.hs113
-rw-r--r--src/Hakyll/Web/CompressCss.hs51
-rw-r--r--src/Hakyll/Web/Feed.hs124
-rw-r--r--src/Hakyll/Web/FileType.hs55
-rw-r--r--src/Hakyll/Web/Page.hs124
-rw-r--r--src/Hakyll/Web/Page/Internal.hs50
-rw-r--r--src/Hakyll/Web/Page/Metadata.hs131
-rw-r--r--src/Hakyll/Web/Page/Read.hs60
-rw-r--r--src/Hakyll/Web/Pandoc.hs110
-rw-r--r--src/Hakyll/Web/Preview/Server.hs72
-rw-r--r--src/Hakyll/Web/RelativizeUrls.hs62
-rw-r--r--src/Hakyll/Web/Tags.hs180
-rw-r--r--src/Hakyll/Web/Template.hs109
-rw-r--r--src/Hakyll/Web/Template/Internal.hs45
-rw-r--r--src/Hakyll/Web/Template/Read.hs10
-rw-r--r--src/Hakyll/Web/Template/Read/Hakyll.hs35
-rw-r--r--src/Hakyll/Web/Template/Read/Hamlet.hs46
-rw-r--r--src/Hakyll/Web/Util/Url.hs30
42 files changed, 3573 insertions, 0 deletions
diff --git a/src/Hakyll/Core/CompiledItem.hs b/src/Hakyll/Core/CompiledItem.hs
new file mode 100644
index 0000000..5dd0efc
--- /dev/null
+++ b/src/Hakyll/Core/CompiledItem.hs
@@ -0,0 +1,45 @@
+-- | A module containing a box datatype representing a compiled item. This
+-- item can be of any type, given that a few restrictions hold:
+--
+-- * we need a 'Typeable' instance to perform type-safe casts;
+--
+-- * we need a 'Binary' instance so we can serialize these items to the cache;
+--
+-- * we need a 'Writable' instance so the results can be saved.
+--
+{-# LANGUAGE ExistentialQuantification #-}
+module Hakyll.Core.CompiledItem
+ ( CompiledItem (..)
+ , compiledItem
+ , unCompiledItem
+ ) where
+
+import Data.Binary (Binary)
+import Data.Typeable (Typeable, cast)
+import Data.Maybe (fromMaybe)
+
+import Hakyll.Core.Writable
+
+-- | Box type for a compiled item
+--
+data CompiledItem = forall a. (Binary a, Typeable a, Writable a)
+ => CompiledItem a
+
+instance Writable CompiledItem where
+ write p (CompiledItem x) = write p x
+
+-- | Box a value into a 'CompiledItem'
+--
+compiledItem :: (Binary a, Typeable a, Writable a)
+ => a
+ -> CompiledItem
+compiledItem = CompiledItem
+
+-- | Unbox a value from a 'CompiledItem'
+--
+unCompiledItem :: (Binary a, Typeable a, Writable a)
+ => CompiledItem
+ -> a
+unCompiledItem (CompiledItem x) = fromMaybe error' $ cast x
+ where
+ error' = error "Hakyll.Core.CompiledItem.unCompiledItem: Unsupported type"
diff --git a/src/Hakyll/Core/Compiler.hs b/src/Hakyll/Core/Compiler.hs
new file mode 100644
index 0000000..e5da9b8
--- /dev/null
+++ b/src/Hakyll/Core/Compiler.hs
@@ -0,0 +1,333 @@
+-- | A Compiler manages targets and dependencies between targets
+--
+-- The most distinguishing property of a 'Compiler' is that it is an Arrow. A
+-- compiler of the type @Compiler a b@ is simply a compilation phase which takes
+-- an @a@ as input, and produces a @b@ as output.
+--
+-- Compilers are chained using the '>>>' arrow operation. If we have a compiler
+--
+-- > getResourceString :: Compiler Resource String
+--
+-- which reads the resource, and a compiler
+--
+-- > readPage :: Compiler String (Page String)
+--
+-- we can chain these two compilers to get a
+--
+-- > (getResourceString >>> readPage) :: Compiler Resource (Page String)
+--
+-- Most compilers can be created by combining smaller compilers using '>>>'.
+--
+-- More advanced constructions are also possible using arrow, and sometimes
+-- these are needed. For a good introduction to arrow, you can refer to
+--
+-- <http://en.wikibooks.org/wiki/Haskell/Understanding_arrows>
+--
+-- A construction worth writing a few paragraphs about here are the 'require'
+-- functions. Different variants of this function are exported here, but they
+-- all serve more or less the same goal.
+--
+-- When you use only '>>>' to chain your compilers, you get a linear pipeline --
+-- it is not possible to add extra items from other compilers along the way.
+-- This is where the 'require' functions come in.
+--
+-- This function allows you to reference other items, which are then added to
+-- the pipeline. Let's look at this crappy ASCII illustration which represents
+-- a pretty common scenario:
+--
+-- > read resource >>> pandoc render >>> layout >>> relativize URL's
+-- >
+-- > @templates/fancy.html@
+--
+-- We want to construct a pipeline of compilers to go from our resource to a
+-- proper webpage. However, the @layout@ compiler takes more than just the
+-- rendered page as input: it needs the @templates/fancy.html@ template as well.
+--
+-- This is an example of where we need the @require@ function. We can solve
+-- this using a construction that looks like:
+--
+-- > ... >>> pandoc render >>> require >>> layout >>> ...
+-- > |
+-- > @templates/fancy.html@ ------/
+--
+-- This illustration can help us understand the type signature of 'require'.
+--
+-- > require :: (Binary a, Typeable a, Writable a)
+-- > => Identifier
+-- > -> (b -> a -> c)
+-- > -> Compiler b c
+--
+-- Let's look at it in detail:
+--
+-- > (Binary a, Typeable a, Writable a)
+--
+-- These are constraints for the @a@ type. @a@ (the template) needs to have
+-- certain properties for it to be required.
+--
+-- > Identifier
+--
+-- This is simply @templates/fancy.html@: the 'Identifier' of the item we want
+-- to 'require', in other words, the name of the item we want to add to the
+-- pipeline somehow.
+--
+-- > (b -> a -> c)
+--
+-- This is a function given by the user, specifying /how/ the two items shall be
+-- merged. @b@ is the output of the previous compiler, and @a@ is the item we
+-- just required -- the template. This means @c@ will be the final output of the
+-- 'require' combinator.
+--
+-- > Compiler b c
+--
+-- Indeed, we have now constructed a compiler which takes a @b@ and produces a
+-- @c@. This means that we have a linear pipeline again, thanks to the 'require'
+-- function. So, the 'require' function actually helps to reduce to complexity
+-- of Hakyll applications!
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Compiler
+ ( Compiler
+ , runCompiler
+ , getIdentifier
+ , getRoute
+ , getRouteFor
+ , getResourceString
+ , fromDependency
+ , require_
+ , require
+ , requireA
+ , requireAll_
+ , requireAll
+ , requireAllA
+ , cached
+ , unsafeCompiler
+ , traceShowCompiler
+ , mapCompiler
+ , timedCompiler
+ , byExtension
+ ) where
+
+import Prelude hiding ((.), id)
+import Control.Arrow ((>>>), (&&&), arr)
+import Control.Applicative ((<$>))
+import Control.Monad.Reader (ask)
+import Control.Monad.Trans (liftIO)
+import Control.Category (Category, (.), id)
+import Data.Maybe (fromMaybe)
+import System.FilePath (takeExtension)
+
+import Data.Binary (Binary)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Store
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.Logger
+
+-- | Run a compiler, yielding the resulting target and it's dependencies. This
+-- version of 'runCompilerJob' also stores the result
+--
+runCompiler :: Compiler () CompileRule -- ^ Compiler to run
+ -> Identifier -- ^ Target identifier
+ -> ResourceProvider -- ^ Resource provider
+ -> Routes -- ^ Route
+ -> Store -- ^ Store
+ -> Bool -- ^ Was the resource modified?
+ -> Logger -- ^ Logger
+ -> IO CompileRule -- ^ Resulting item
+runCompiler compiler identifier provider routes store modified logger = do
+ -- Run the compiler job
+ result <-
+ runCompilerJob compiler identifier provider routes store modified logger
+
+ -- Inspect the result
+ case result of
+ -- In case we compiled an item, we will store a copy in the cache first,
+ -- before we return control. This makes sure the compiled item can later
+ -- be accessed by e.g. require.
+ CompileRule (CompiledItem x) ->
+ storeSet store "Hakyll.Core.Compiler.runCompiler" identifier x
+
+ -- Otherwise, we do nothing here
+ _ -> return ()
+
+ return result
+
+-- | Get the identifier of the item that is currently being compiled
+--
+getIdentifier :: Compiler a Identifier
+getIdentifier = fromJob $ const $ CompilerM $ compilerIdentifier <$> ask
+
+-- | Get the route we are using for this item
+--
+getRoute :: Compiler a (Maybe FilePath)
+getRoute = getIdentifier >>> getRouteFor
+
+-- | Get the route for a specified item
+--
+getRouteFor :: Compiler Identifier (Maybe FilePath)
+getRouteFor = fromJob $ \identifier -> CompilerM $ do
+ routes <- compilerRoutes <$> ask
+ return $ runRoutes routes identifier
+
+-- | Get the resource we are compiling as a string
+--
+getResourceString :: Compiler Resource String
+getResourceString = fromJob $ \resource -> CompilerM $ do
+ provider <- compilerResourceProvider <$> ask
+ liftIO $ resourceString provider resource
+
+-- | Auxiliary: get a dependency
+--
+getDependency :: (Binary a, Writable a, Typeable a)
+ => Identifier -> CompilerM a
+getDependency identifier = CompilerM $ do
+ store <- compilerStore <$> ask
+ fmap (fromMaybe error') $ liftIO $
+ storeGet store "Hakyll.Core.Compiler.runCompiler" identifier
+ where
+ error' = error $ "Hakyll.Core.Compiler.getDependency: "
+ ++ show identifier
+ ++ " not found in the cache, the cache might be corrupted or"
+ ++ " the item you are referring to might not exist"
+
+
+-- | Variant of 'require' which drops the current value
+--
+require_ :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> Compiler b a
+require_ identifier =
+ fromDependency identifier >>> fromJob (const $ getDependency identifier)
+
+-- | Require another target. Using this function ensures automatic handling of
+-- dependencies
+--
+require :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> (b -> a -> c)
+ -> Compiler b c
+require identifier = requireA identifier . arr . uncurry
+
+-- | Arrow-based variant of 'require'
+--
+requireA :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> Compiler (b, a) c
+ -> Compiler b c
+requireA identifier = (id &&& require_ identifier >>>)
+
+-- | Variant of 'requireAll' which drops the current value
+--
+requireAll_ :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> Compiler b [a]
+requireAll_ pattern = fromDependencies (const getDeps) >>> fromJob requireAll_'
+ where
+ getDeps = matches pattern . map unResource . resourceList
+ requireAll_' = const $ CompilerM $ do
+ deps <- getDeps . compilerResourceProvider <$> ask
+ mapM (unCompilerM . getDependency) deps
+
+-- | Require a number of targets. Using this function ensures automatic handling
+-- of dependencies
+--
+requireAll :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> (b -> [a] -> c)
+ -> Compiler b c
+requireAll pattern = requireAllA pattern . arr . uncurry
+
+-- | Arrow-based variant of 'requireAll'
+--
+requireAllA :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> Compiler (b, [a]) c
+ -> Compiler b c
+requireAllA pattern = (id &&& requireAll_ pattern >>>)
+
+cached :: (Binary a, Typeable a, Writable a)
+ => String
+ -> Compiler Resource a
+ -> Compiler Resource a
+cached name (Compiler d j) = Compiler d $ const $ CompilerM $ do
+ logger <- compilerLogger <$> ask
+ identifier <- compilerIdentifier <$> ask
+ store <- compilerStore <$> ask
+ modified <- compilerResourceModified <$> ask
+ report logger $ "Checking cache: " ++ if modified then "modified" else "OK"
+ if modified
+ then do v <- unCompilerM $ j $ Resource identifier
+ liftIO $ storeSet store name identifier v
+ return v
+ else do v <- liftIO $ storeGet store name identifier
+ case v of Just v' -> return v'
+ Nothing -> error'
+ where
+ error' = error "Hakyll.Core.Compiler.cached: Cache corrupt!"
+
+-- | Create an unsafe compiler from a function in IO
+--
+unsafeCompiler :: (a -> IO b) -- ^ Function to lift
+ -> Compiler a b -- ^ Resulting compiler
+unsafeCompiler f = fromJob $ CompilerM . liftIO . f
+
+-- | Compiler for debugging purposes
+--
+traceShowCompiler :: Show a => Compiler a a
+traceShowCompiler = fromJob $ \x -> CompilerM $ do
+ logger <- compilerLogger <$> ask
+ report logger $ show x
+ return x
+
+-- | Map over a compiler
+--
+mapCompiler :: Compiler a b
+ -> Compiler [a] [b]
+mapCompiler (Compiler d j) = Compiler d $ mapM j
+
+-- | Log and time a compiler
+--
+timedCompiler :: String -- ^ Message
+ -> Compiler a b -- ^ Compiler to time
+ -> Compiler a b -- ^ Resulting compiler
+timedCompiler msg (Compiler d j) = Compiler d $ \x -> CompilerM $ do
+ logger <- compilerLogger <$> ask
+ timed logger msg $ unCompilerM $ j x
+
+-- | Choose a compiler by extension
+--
+-- Example:
+--
+-- > route "css/*" $ setExtension "css"
+-- > compile "css/*" $ byExtension (error "Not a (S)CSS file")
+-- > [ (".css", compressCssCompiler)
+-- > , (".scss", sass)
+-- > ]
+--
+-- This piece of code will select the @compressCssCompiler@ for @.css@ files,
+-- and the @sass@ compiler (defined elsewhere) for @.scss@ files.
+--
+byExtension :: Compiler a b -- ^ Default compiler
+ -> [(String, Compiler a b)] -- ^ Choices
+ -> Compiler a b -- ^ Resulting compiler
+byExtension defaultCompiler choices = Compiler deps job
+ where
+ -- Lookup the compiler, give an error when it is not found
+ lookup' identifier =
+ let extension = takeExtension $ toFilePath identifier
+ in fromMaybe defaultCompiler $ lookup extension choices
+ -- Collect the dependencies of the choice
+ deps = do
+ identifier <- dependencyIdentifier <$> ask
+ compilerDependencies $ lookup' identifier
+ -- Collect the job of the choice
+ job x = CompilerM $ do
+ identifier <- compilerIdentifier <$> ask
+ unCompilerM $ compilerJob (lookup' identifier) x
diff --git a/src/Hakyll/Core/Compiler/Internal.hs b/src/Hakyll/Core/Compiler/Internal.hs
new file mode 100644
index 0000000..53df044
--- /dev/null
+++ b/src/Hakyll/Core/Compiler/Internal.hs
@@ -0,0 +1,146 @@
+-- | Internally used compiler module
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Compiler.Internal
+ ( Dependencies
+ , DependencyEnvironment (..)
+ , CompilerEnvironment (..)
+ , CompilerM (..)
+ , Compiler (..)
+ , runCompilerJob
+ , runCompilerDependencies
+ , fromJob
+ , fromDependencies
+ , fromDependency
+ ) where
+
+import Prelude hiding ((.), id)
+import Control.Applicative (Applicative, pure, (<*>), (<$>))
+import Control.Monad.Reader (ReaderT, Reader, ask, runReaderT, runReader)
+import Control.Monad ((<=<), liftM2)
+import Data.Set (Set)
+import qualified Data.Set as S
+import Control.Category (Category, (.), id)
+import Control.Arrow (Arrow, ArrowChoice, arr, first, left)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Store
+import Hakyll.Core.Routes
+import Hakyll.Core.Logger
+
+-- | A set of dependencies
+--
+type Dependencies = Set Identifier
+
+-- | Environment in which the dependency analyzer runs
+--
+data DependencyEnvironment = DependencyEnvironment
+ { -- | Target identifier
+ dependencyIdentifier :: Identifier
+ , -- | Resource provider
+ dependencyResourceProvider :: ResourceProvider
+ }
+
+-- | Environment in which a compiler runs
+--
+data CompilerEnvironment = CompilerEnvironment
+ { -- | Target identifier
+ compilerIdentifier :: Identifier
+ , -- | Resource provider
+ compilerResourceProvider :: ResourceProvider
+ , -- | Site routes
+ compilerRoutes :: Routes
+ , -- | Compiler store
+ compilerStore :: Store
+ , -- | Flag indicating if the underlying resource was modified
+ compilerResourceModified :: Bool
+ , -- | Logger
+ compilerLogger :: Logger
+ }
+
+-- | The compiler monad
+--
+newtype CompilerM a = CompilerM
+ { unCompilerM :: ReaderT CompilerEnvironment IO a
+ } deriving (Monad, Functor, Applicative)
+
+-- | The compiler arrow
+--
+data Compiler a b = Compiler
+ { compilerDependencies :: Reader DependencyEnvironment Dependencies
+ , compilerJob :: a -> CompilerM b
+ }
+
+instance Functor (Compiler a) where
+ fmap f ~(Compiler d j) = Compiler d $ fmap f . j
+
+instance Applicative (Compiler a) where
+ pure = Compiler (return S.empty) . const . return
+ ~(Compiler d1 f) <*> ~(Compiler d2 j) =
+ Compiler (liftM2 S.union d1 d2) $ \x -> f x <*> j x
+
+instance Category Compiler where
+ id = Compiler (return S.empty) return
+ ~(Compiler d1 j1) . ~(Compiler d2 j2) =
+ Compiler (liftM2 S.union d1 d2) (j1 <=< j2)
+
+instance Arrow Compiler where
+ arr f = Compiler (return S.empty) (return . f)
+ first ~(Compiler d j) = Compiler d $ \(x, y) -> do
+ x' <- j x
+ return (x', y)
+
+instance ArrowChoice Compiler where
+ left ~(Compiler d j) = Compiler d $ \e -> case e of
+ Left l -> Left <$> j l
+ Right r -> Right <$> return r
+
+-- | Run a compiler, yielding the resulting target and it's dependencies
+--
+runCompilerJob :: Compiler () a -- ^ Compiler to run
+ -> Identifier -- ^ Target identifier
+ -> ResourceProvider -- ^ Resource provider
+ -> Routes -- ^ Route
+ -> Store -- ^ Store
+ -> Bool -- ^ Was the resource modified?
+ -> Logger -- ^ Logger
+ -> IO a
+runCompilerJob compiler identifier provider route store modified logger =
+ runReaderT (unCompilerM $ compilerJob compiler ()) env
+ where
+ env = CompilerEnvironment
+ { compilerIdentifier = identifier
+ , compilerResourceProvider = provider
+ , compilerRoutes = route
+ , compilerStore = store
+ , compilerResourceModified = modified
+ , compilerLogger = logger
+ }
+
+runCompilerDependencies :: Compiler () a
+ -> Identifier
+ -> ResourceProvider
+ -> Dependencies
+runCompilerDependencies compiler identifier provider =
+ runReader (compilerDependencies compiler) env
+ where
+ env = DependencyEnvironment
+ { dependencyIdentifier = identifier
+ , dependencyResourceProvider = provider
+ }
+
+fromJob :: (a -> CompilerM b)
+ -> Compiler a b
+fromJob = Compiler (return S.empty)
+
+fromDependencies :: (Identifier -> ResourceProvider -> [Identifier])
+ -> Compiler b b
+fromDependencies collectDeps = flip Compiler return $ do
+ DependencyEnvironment identifier provider <- ask
+ return $ S.fromList $ collectDeps identifier provider
+
+-- | Wait until another compiler has finished before running this compiler
+--
+fromDependency :: Identifier -> Compiler a a
+fromDependency = fromDependencies . const . const . return
diff --git a/src/Hakyll/Core/Configuration.hs b/src/Hakyll/Core/Configuration.hs
new file mode 100644
index 0000000..242b68f
--- /dev/null
+++ b/src/Hakyll/Core/Configuration.hs
@@ -0,0 +1,44 @@
+-- | Exports a datastructure for the top-level hakyll configuration
+--
+module Hakyll.Core.Configuration
+ ( HakyllConfiguration (..)
+ , defaultHakyllConfiguration
+ ) where
+
+import System.FilePath (takeFileName)
+import Data.List (isPrefixOf, isSuffixOf)
+
+data HakyllConfiguration = HakyllConfiguration
+ { -- | Directory in which the output written
+ destinationDirectory :: FilePath
+ , -- | Directory where hakyll's internal store is kept
+ storeDirectory :: FilePath
+ , -- | Function to determine ignored files
+ --
+ -- In 'defaultHakyllConfiguration', the following files are ignored:
+ --
+ -- * files starting with a @.@
+ --
+ -- * files ending with a @~@
+ --
+ -- * files ending with @.swp@
+ --
+ ignoreFile :: FilePath -> Bool
+ }
+
+-- | Default configuration for a hakyll application
+--
+defaultHakyllConfiguration :: HakyllConfiguration
+defaultHakyllConfiguration = HakyllConfiguration
+ { destinationDirectory = "_site"
+ , storeDirectory = "_cache"
+ , ignoreFile = ignoreFile'
+ }
+ where
+ ignoreFile' path
+ | "." `isPrefixOf` fileName = True
+ | "~" `isSuffixOf` fileName = True
+ | ".swp" `isSuffixOf` fileName = True
+ | otherwise = False
+ where
+ fileName = takeFileName path
diff --git a/src/Hakyll/Core/CopyFile.hs b/src/Hakyll/Core/CopyFile.hs
new file mode 100644
index 0000000..dbbaaa1
--- /dev/null
+++ b/src/Hakyll/Core/CopyFile.hs
@@ -0,0 +1,29 @@
+-- | Exports simple compilers to just copy files
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
+module Hakyll.Core.CopyFile
+ ( CopyFile (..)
+ , copyFileCompiler
+ ) where
+
+import Control.Arrow ((>>^))
+import System.Directory (copyFile)
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Writable
+import Hakyll.Core.Compiler
+import Hakyll.Core.Identifier
+
+-- | Newtype construct around 'FilePath' which will copy the file directly
+--
+newtype CopyFile = CopyFile {unCopyFile :: FilePath}
+ deriving (Show, Eq, Ord, Binary, Typeable)
+
+instance Writable CopyFile where
+ write dst (CopyFile src) = copyFile src dst
+
+copyFileCompiler :: Compiler Resource CopyFile
+copyFileCompiler = getIdentifier >>^ CopyFile . toFilePath
diff --git a/src/Hakyll/Core/DirectedGraph.hs b/src/Hakyll/Core/DirectedGraph.hs
new file mode 100644
index 0000000..76a030b
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph.hs
@@ -0,0 +1,85 @@
+-- | Representation of a directed graph. In Hakyll, this is used for dependency
+-- tracking.
+--
+module Hakyll.Core.DirectedGraph
+ ( DirectedGraph
+ , fromList
+ , member
+ , nodes
+ , neighbours
+ , reverse
+ , reachableNodes
+ , sanitize
+ ) where
+
+import Prelude hiding (reverse)
+import Data.Monoid (mconcat)
+import Data.Set (Set)
+import Data.Maybe (fromMaybe)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+import Hakyll.Core.DirectedGraph.Internal
+
+-- | Construction of directed graphs
+--
+fromList :: Ord a
+ => [(a, Set a)] -- ^ List of (node, reachable neighbours)
+ -> DirectedGraph a -- ^ Resulting directed graph
+fromList = DirectedGraph . M.fromList . map (\(t, d) -> (t, Node t d))
+
+-- | Check if a node lies in the given graph
+--
+member :: Ord a
+ => a -- ^ Node to check for
+ -> DirectedGraph a -- ^ Directed graph to check in
+ -> Bool -- ^ If the node lies in the graph
+member n = M.member n . unDirectedGraph
+
+-- | Get all nodes in the graph
+--
+nodes :: Ord a
+ => DirectedGraph a -- ^ Graph to get the nodes from
+ -> Set a -- ^ All nodes in the graph
+nodes = M.keysSet . unDirectedGraph
+
+-- | Get a set of reachable neighbours from a directed graph
+--
+neighbours :: Ord a
+ => a -- ^ Node to get the neighbours of
+ -> DirectedGraph a -- ^ Graph to search in
+ -> Set a -- ^ Set containing the neighbours
+neighbours x = fromMaybe S.empty . fmap nodeNeighbours
+ . M.lookup x . unDirectedGraph
+
+-- | Reverse a directed graph (i.e. flip all edges)
+--
+reverse :: Ord a
+ => DirectedGraph a
+ -> DirectedGraph a
+reverse = mconcat . map reverse' . M.toList . unDirectedGraph
+ where
+ reverse' (id', Node _ neighbours') = fromList $
+ zip (S.toList neighbours') $ repeat $ S.singleton id'
+
+-- | Find all reachable nodes from a given set of nodes in the directed graph
+--
+reachableNodes :: Ord a => Set a -> DirectedGraph a -> Set a
+reachableNodes set graph = reachable (setNeighbours set) set
+ where
+ reachable next visited
+ | S.null next = visited
+ | otherwise = reachable (sanitize' neighbours') (next `S.union` visited)
+ where
+ sanitize' = S.filter (`S.notMember` visited)
+ neighbours' = setNeighbours (sanitize' next)
+
+ setNeighbours = S.unions . map (`neighbours` graph) . S.toList
+
+-- | Remove all dangling pointers, i.e. references to notes that do
+-- not actually exist in the graph.
+--
+sanitize :: Ord a => DirectedGraph a -> DirectedGraph a
+sanitize (DirectedGraph graph) = DirectedGraph $ M.map sanitize' graph
+ where
+ sanitize' (Node t n) = Node t $ S.filter (`M.member` graph) n
diff --git a/src/Hakyll/Core/DirectedGraph/DependencySolver.hs b/src/Hakyll/Core/DirectedGraph/DependencySolver.hs
new file mode 100644
index 0000000..54826ff
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/DependencySolver.hs
@@ -0,0 +1,70 @@
+-- | Given a dependency graph, this module provides a function that will
+-- generate an order in which the graph can be visited, so that all the
+-- dependencies of a given node have been visited before the node itself is
+-- visited.
+--
+module Hakyll.Core.DirectedGraph.DependencySolver
+ ( solveDependencies
+ ) where
+
+import Prelude
+import qualified Prelude as P
+import Data.Set (Set)
+import Data.Maybe (mapMaybe)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+import Hakyll.Core.DirectedGraph
+import Hakyll.Core.DirectedGraph.Internal
+
+-- | Solve a dependency graph. This function returns an order to run the
+-- different nodes
+--
+solveDependencies :: Ord a
+ => DirectedGraph a -- ^ Graph
+ -> [a] -- ^ Resulting plan
+solveDependencies = P.reverse . order [] [] S.empty
+
+-- | Produce a reversed order using a stack
+--
+order :: Ord a
+ => [a] -- ^ Temporary result
+ -> [Node a] -- ^ Backtrace stack
+ -> Set a -- ^ Items in the stack
+ -> DirectedGraph a -- ^ Graph
+ -> [a] -- ^ Ordered result
+order temp stack set graph@(DirectedGraph graph')
+ -- Empty graph - return our current result
+ | M.null graph' = temp
+ | otherwise = case stack of
+
+ -- Empty stack - pick a node, and add it to the stack
+ [] ->
+ let (tag, node) = M.findMin graph'
+ in order temp (node : stack) (S.insert tag set) graph
+
+ -- At least one item on the stack - continue using this item
+ (node : stackTail) ->
+ -- Check which dependencies are still in the graph
+ let tag = nodeTag node
+ deps = S.toList $ nodeNeighbours node
+ unsatisfied = mapMaybe (`M.lookup` graph') deps
+ in case unsatisfied of
+
+ -- All dependencies for node are satisfied, we can return it and
+ -- remove it from the graph
+ [] -> order (tag : temp) stackTail (S.delete tag set)
+ (DirectedGraph $ M.delete tag graph')
+
+ -- There is at least one dependency left. We need to solve that
+ -- one first...
+ (dep : _) -> if nodeTag dep `S.member` set
+ -- The dependency is already in our stack - cycle detected!
+ then cycleError
+ -- Continue with the dependency
+ else order temp (dep : node : stackTail)
+ (S.insert (nodeTag dep) set)
+ graph
+ where
+ cycleError = error $ "Hakyll.Core.DirectedGraph.DependencySolver.order: "
+ ++ "Cycle detected!" -- TODO: Dump cycle
diff --git a/src/Hakyll/Core/DirectedGraph/Dot.hs b/src/Hakyll/Core/DirectedGraph/Dot.hs
new file mode 100644
index 0000000..8289992
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/Dot.hs
@@ -0,0 +1,30 @@
+-- | Dump a directed graph in dot format. Used for debugging purposes
+--
+module Hakyll.Core.DirectedGraph.Dot
+ ( toDot
+ , writeDot
+ ) where
+
+import Hakyll.Core.DirectedGraph
+import qualified Data.Set as S
+
+-- | Convert a directed graph into dot format for debugging purposes
+--
+toDot :: Ord a
+ => (a -> String) -- ^ Convert nodes to dot names
+ -> DirectedGraph a -- ^ Graph to dump
+ -> String -- ^ Resulting string
+toDot showTag graph = unlines $ concat
+ [ return "digraph dependencies {"
+ , concatMap showNode (S.toList $ nodes graph)
+ , return "}"
+ ]
+ where
+ showNode node = map (showEdge node) $ S.toList $ neighbours node graph
+ showEdge x y = " \"" ++ showTag x ++ "\" -> \"" ++ showTag y ++ "\";"
+
+-- | Write out the @.dot@ file to a given file path. See 'toDot' for more
+-- information.
+--
+writeDot :: Ord a => FilePath -> (a -> String) -> DirectedGraph a -> IO ()
+writeDot path showTag = writeFile path . toDot showTag
diff --git a/src/Hakyll/Core/DirectedGraph/Internal.hs b/src/Hakyll/Core/DirectedGraph/Internal.hs
new file mode 100644
index 0000000..5b02ad6
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/Internal.hs
@@ -0,0 +1,43 @@
+-- | Internal structure of the DirectedGraph type. Not exported outside of the
+-- library.
+--
+module Hakyll.Core.DirectedGraph.Internal
+ ( Node (..)
+ , DirectedGraph (..)
+ ) where
+
+import Prelude hiding (reverse, filter)
+import Data.Monoid (Monoid, mempty, mappend)
+import Data.Set (Set)
+import Data.Map (Map)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+-- | A node in the directed graph
+--
+data Node a = Node
+ { nodeTag :: a -- ^ Tag identifying the node
+ , nodeNeighbours :: Set a -- ^ Edges starting at this node
+ } deriving (Show)
+
+-- | Append two nodes. Useful for joining graphs.
+--
+appendNodes :: Ord a => Node a -> Node a -> Node a
+appendNodes (Node t1 n1) (Node t2 n2)
+ | t1 /= t2 = error'
+ | otherwise = Node t1 (n1 `S.union` n2)
+ where
+ error' = error $ "Hakyll.Core.DirectedGraph.Internal.appendNodes: "
+ ++ "Appending differently tagged nodes"
+
+-- | Type used to represent a directed graph
+--
+newtype DirectedGraph a = DirectedGraph {unDirectedGraph :: Map a (Node a)}
+ deriving (Show)
+
+-- | Allow users to concatenate different graphs
+--
+instance Ord a => Monoid (DirectedGraph a) where
+ mempty = DirectedGraph M.empty
+ mappend (DirectedGraph m1) (DirectedGraph m2) = DirectedGraph $
+ M.unionWith appendNodes m1 m2
diff --git a/src/Hakyll/Core/Identifier.hs b/src/Hakyll/Core/Identifier.hs
new file mode 100644
index 0000000..16403e6
--- /dev/null
+++ b/src/Hakyll/Core/Identifier.hs
@@ -0,0 +1,59 @@
+-- | An identifier is a type used to uniquely identify a resource, target...
+--
+-- One can think of an identifier as something similar to a file path. An
+-- identifier is a path as well, with the different elements in the path
+-- separated by @/@ characters. Examples of identifiers are:
+--
+-- * @posts/foo.markdown@
+--
+-- * @index@
+--
+-- * @error/404@
+--
+-- The most important difference between an 'Identifier' and a file path is that
+-- the identifier for an item is not necesserily the file path.
+--
+-- For example, we could have an @index@ identifier, generated by Hakyll. The
+-- actual file path would be @index.html@, but we identify it using @index@.
+--
+-- @posts/foo.markdown@ could be an identifier of an item that is rendered to
+-- @posts/foo.html@. In this case, the identifier is the name of the source
+-- file of the page.
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Identifier
+ ( Identifier (..)
+ , parseIdentifier
+ , toFilePath
+ ) where
+
+import Control.Arrow (second)
+import Data.Monoid (Monoid)
+
+import GHC.Exts (IsString, fromString)
+import System.FilePath (joinPath)
+
+-- | An identifier used to uniquely identify a value
+--
+newtype Identifier = Identifier {unIdentifier :: [String]}
+ deriving (Eq, Ord, Monoid)
+
+instance Show Identifier where
+ show = toFilePath
+
+instance IsString Identifier where
+ fromString = parseIdentifier
+
+-- | Parse an identifier from a string
+--
+parseIdentifier :: String -> Identifier
+parseIdentifier = Identifier . filter (not . null) . split'
+ where
+ split' [] = [[]]
+ split' str = let (pre, post) = second (drop 1) $ break (== '/') str
+ in pre : split' post
+
+-- | Convert an identifier to a relative 'FilePath'
+--
+toFilePath :: Identifier -> FilePath
+toFilePath = joinPath . unIdentifier
diff --git a/src/Hakyll/Core/Identifier/Pattern.hs b/src/Hakyll/Core/Identifier/Pattern.hs
new file mode 100644
index 0000000..7c88356
--- /dev/null
+++ b/src/Hakyll/Core/Identifier/Pattern.hs
@@ -0,0 +1,160 @@
+-- | Module providing pattern matching and capturing on 'Identifier's.
+--
+-- A very simple pattern could be, for example, @foo\/bar@. This pattern will
+-- only match the exact @foo\/bar@ identifier.
+--
+-- To match more than one identifier, there are different captures that one can
+-- use:
+--
+-- * @*@: matches exactly one element of an identifier;
+--
+-- * @**@: matches one or more elements of an identifier.
+--
+-- Some examples:
+--
+-- * @foo\/*@ will match @foo\/bar@ and @foo\/foo@, but not @foo\/bar\/qux@ nor
+-- @foo@;
+--
+-- * @**@ will match any non-empty identifier;
+--
+-- * @foo\/**@ will match @foo\/bar@ and @foo\/bar\/qux@, but not @bar\/foo@ nor
+-- @foo@;
+--
+-- A small warning: patterns are not globs. Using @foo\/*.markdown@ will not do
+-- what you probably intended, as it will only match the file which is literally
+-- called @foo\/*.markdown@. Remember that these captures only work on elements
+-- of identifiers as a whole; not on parts of these elements.
+--
+-- Furthermore, the 'match' function allows the user to get access to the
+-- elements captured by the capture elements in the pattern.
+--
+module Hakyll.Core.Identifier.Pattern
+ ( Pattern
+ , parsePattern
+ , match
+ , doesMatch
+ , matches
+ , fromCapture
+ , fromCaptureString
+ , fromCaptures
+ ) where
+
+import Data.List (intercalate)
+import Control.Monad (msum)
+import Data.Maybe (isJust)
+import Data.Monoid (mempty, mappend)
+
+import GHC.Exts (IsString, fromString)
+
+import Hakyll.Core.Identifier
+
+-- | One base element of a pattern
+--
+data PatternComponent = CaptureOne
+ | CaptureMany
+ | Literal String
+ deriving (Eq)
+
+instance Show PatternComponent where
+ show CaptureOne = "*"
+ show CaptureMany = "**"
+ show (Literal s) = s
+
+-- | Type that allows matching on identifiers
+--
+newtype Pattern = Pattern {unPattern :: [PatternComponent]}
+ deriving (Eq)
+
+instance Show Pattern where
+ show = intercalate "/" . map show . unPattern
+
+instance IsString Pattern where
+ fromString = parsePattern
+
+-- | Parse a pattern from a string
+--
+parsePattern :: String -> Pattern
+parsePattern = Pattern . map toPattern . unIdentifier . parseIdentifier
+ where
+ toPattern x | x == "*" = CaptureOne
+ | x == "**" = CaptureMany
+ | otherwise = Literal x
+
+-- | Match an identifier against a pattern, generating a list of captures
+--
+match :: Pattern -> Identifier -> Maybe [Identifier]
+match (Pattern p) (Identifier i) = fmap (map Identifier) $ match' p i
+
+-- | Check if an identifier matches a pattern
+--
+doesMatch :: Pattern -> Identifier -> Bool
+doesMatch p = isJust . match p
+
+-- | Given a list of identifiers, retain only those who match the given pattern
+--
+matches :: Pattern -> [Identifier] -> [Identifier]
+matches p = filter (doesMatch p)
+
+-- | Split a list at every possible point, generate a list of (init, tail) cases
+--
+splits :: [a] -> [([a], [a])]
+splits ls = reverse $ splits' [] ls
+ where
+ splits' lx ly = (lx, ly) : case ly of
+ [] -> []
+ (y : ys) -> splits' (lx ++ [y]) ys
+
+-- | Internal verion of 'match'
+--
+match' :: [PatternComponent] -> [String] -> Maybe [[String]]
+match' [] [] = Just [] -- An empty match
+match' [] _ = Nothing -- No match
+match' _ [] = Nothing -- No match
+match' (m : ms) (s : ss) = case m of
+ -- Take one string and one literal, fail on mismatch
+ Literal l -> if s == l then match' ms ss else Nothing
+ -- Take one string and one capture
+ CaptureOne -> fmap ([s] :) $ match' ms ss
+ -- Take one string, and one or many captures
+ CaptureMany ->
+ let take' (i, t) = fmap (i :) $ match' ms t
+ in msum $ map take' $ splits (s : ss)
+
+-- | Create an identifier from a pattern by filling in the captures with a given
+-- string
+--
+-- Example:
+--
+-- > fromCapture (parsePattern "tags/*") (parseIdentifier "foo")
+--
+-- Result:
+--
+-- > "tags/foo"
+--
+fromCapture :: Pattern -> Identifier -> Identifier
+fromCapture pattern = fromCaptures pattern . repeat
+
+-- | Simplified version of 'fromCapture' which takes a 'String' instead of an
+-- 'Identifier'
+--
+-- > fromCaptureString (parsePattern "tags/*") "foo"
+--
+-- Result:
+--
+-- > "tags/foo"
+--
+fromCaptureString :: Pattern -> String -> Identifier
+fromCaptureString pattern = fromCapture pattern . parseIdentifier
+
+-- | Create an identifier from a pattern by filling in the captures with the
+-- given list of strings
+--
+fromCaptures :: Pattern -> [Identifier] -> Identifier
+fromCaptures (Pattern []) _ = mempty
+fromCaptures (Pattern (m : ms)) [] = case m of
+ Literal l -> Identifier [l] `mappend` fromCaptures (Pattern ms) []
+ _ -> error $ "Hakyll.Core.Identifier.Pattern.fromCaptures: "
+ ++ "identifier list exhausted"
+fromCaptures (Pattern (m : ms)) ids@(i : is) = case m of
+ Literal l -> Identifier [l] `mappend` fromCaptures (Pattern ms) ids
+ _ -> i `mappend` fromCaptures (Pattern ms) is
diff --git a/src/Hakyll/Core/Logger.hs b/src/Hakyll/Core/Logger.hs
new file mode 100644
index 0000000..720dee0
--- /dev/null
+++ b/src/Hakyll/Core/Logger.hs
@@ -0,0 +1,90 @@
+-- | Produce pretty, thread-safe logs
+--
+{-# LANGUAGE BangPatterns #-}
+module Hakyll.Core.Logger
+ ( Logger
+ , makeLogger
+ , flushLogger
+ , section
+ , timed
+ , report
+ ) where
+
+import Control.Monad (forever)
+import Control.Monad.Trans (MonadIO, liftIO)
+import Control.Applicative ((<$>), (<*>))
+import Control.Concurrent (forkIO)
+import Control.Concurrent.Chan.Strict (Chan, newChan, readChan, writeChan)
+import Control.Concurrent.MVar.Strict (MVar, newEmptyMVar, takeMVar, putMVar)
+import Text.Printf (printf)
+
+import Data.Time (getCurrentTime, diffUTCTime)
+
+-- | Logger structure. Very complicated.
+--
+data Logger = Logger
+ { loggerChan :: Chan (Maybe String) -- Nothing marks the end
+ , loggerSync :: MVar () -- Used for sync on quit
+ }
+
+-- | Create a new logger
+--
+makeLogger :: IO Logger
+makeLogger = do
+ logger <- Logger <$> newChan <*> newEmptyMVar
+ _ <- forkIO $ loggerThread logger
+ return logger
+ where
+ loggerThread logger = forever $ do
+ msg <- readChan $ loggerChan logger
+ case msg of
+ -- Stop: sync
+ Nothing -> putMVar (loggerSync logger) ()
+ -- Print and continue
+ Just m -> putStrLn m
+
+-- | Flush the logger (blocks until flushed)
+--
+flushLogger :: Logger -> IO ()
+flushLogger logger = do
+ writeChan (loggerChan logger) Nothing
+ () <- takeMVar $ loggerSync logger
+ return ()
+
+-- | Send a raw message to the logger
+--
+message :: Logger -> String -> IO ()
+message logger = writeChan (loggerChan logger) . Just
+
+-- | Start a section in the log
+--
+section :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Section name
+ -> m () -- ^ No result
+section logger = liftIO . message logger
+
+-- | Execute a monadic action and log the duration
+--
+timed :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Message
+ -> m a -- ^ Action
+ -> m a -- ^ Timed and logged action
+timed logger msg action = do
+ start <- liftIO getCurrentTime
+ !result <- action
+ stop <- liftIO getCurrentTime
+ let diff = fromEnum $ diffUTCTime stop start
+ ms = diff `div` 10 ^ (9 :: Int)
+ formatted = printf " [%4dms] %s" ms msg
+ liftIO $ message logger formatted
+ return result
+
+-- | Log something at the same level as 'timed', but without the timing
+--
+report :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Message
+ -> m () -- ^ No result
+report logger msg = liftIO $ message logger $ " [ ] " ++ msg
diff --git a/src/Hakyll/Core/ResourceProvider.hs b/src/Hakyll/Core/ResourceProvider.hs
new file mode 100644
index 0000000..dcd4af0
--- /dev/null
+++ b/src/Hakyll/Core/ResourceProvider.hs
@@ -0,0 +1,75 @@
+-- | This module provides an API for resource providers. Resource providers
+-- allow Hakyll to get content from resources; the type of resource depends on
+-- the concrete instance.
+--
+-- A resource is represented by the 'Resource' type. This is basically just a
+-- newtype wrapper around 'Identifier' -- but it has an important effect: it
+-- guarantees that a resource with this identifier can be provided by one or
+-- more resource providers.
+--
+-- Therefore, it is not recommended to read files directly -- you should use the
+-- provided 'Resource' methods.
+--
+module Hakyll.Core.ResourceProvider
+ ( Resource (..)
+ , ResourceProvider (..)
+ , resourceExists
+ , resourceDigest
+ , resourceModified
+ ) where
+
+import Control.Monad ((<=<))
+import Data.Word (Word8)
+
+import qualified Data.ByteString.Lazy as LB
+import OpenSSL.Digest.ByteString.Lazy (digest)
+import OpenSSL.Digest (MessageDigest (MD5))
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Store
+
+-- | A resource
+--
+-- Invariant: the resource specified by the given identifier must exist
+--
+newtype Resource = Resource {unResource :: Identifier}
+ deriving (Eq, Show, Ord)
+
+-- | A value responsible for retrieving and listing resources
+--
+data ResourceProvider = ResourceProvider
+ { -- | A list of all resources this provider is able to provide
+ resourceList :: [Resource]
+ , -- | Retrieve a certain resource as string
+ resourceString :: Resource -> IO String
+ , -- | Retrieve a certain resource as lazy bytestring
+ resourceLazyByteString :: Resource -> IO LB.ByteString
+ }
+
+-- | Check if a given identifier has a resource
+--
+resourceExists :: ResourceProvider -> Identifier -> Bool
+resourceExists provider = flip elem $ map unResource $ resourceList provider
+
+-- | Retrieve a digest for a given resource
+--
+resourceDigest :: ResourceProvider -> Resource -> IO [Word8]
+resourceDigest provider = digest MD5 <=< resourceLazyByteString provider
+
+-- | Check if a resource was modified
+--
+resourceModified :: ResourceProvider -> Resource -> Store -> IO Bool
+resourceModified provider resource store = do
+ -- Get the latest seen digest from the store
+ lastDigest <- storeGet store itemName $ unResource resource
+ -- Calculate the digest for the resource
+ newDigest <- resourceDigest provider resource
+ -- Check digests
+ if Just newDigest == lastDigest
+ -- All is fine, not modified
+ then return False
+ -- Resource modified; store new digest
+ else do storeSet store itemName (unResource resource) newDigest
+ return True
+ where
+ itemName = "Hakyll.Core.ResourceProvider.resourceModified"
diff --git a/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs b/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs
new file mode 100644
index 0000000..0d89b21
--- /dev/null
+++ b/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs
@@ -0,0 +1,29 @@
+-- | A concrete 'ResourceProvider' that gets it's resources from the filesystem
+--
+module Hakyll.Core.ResourceProvider.FileResourceProvider
+ ( fileResourceProvider
+ ) where
+
+import Control.Applicative ((<$>))
+
+import qualified Data.ByteString.Lazy as LB
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+import Hakyll.Core.Configuration
+
+-- | Create a filesystem-based 'ResourceProvider'
+--
+fileResourceProvider :: HakyllConfiguration -> IO ResourceProvider
+fileResourceProvider configuration = do
+ -- Retrieve a list of identifiers
+ list <- map parseIdentifier . filter (not . ignoreFile configuration) <$>
+ getRecursiveContents False "."
+
+ -- Construct a resource provider
+ return ResourceProvider
+ { resourceList = map Resource list
+ , resourceString = readFile . toFilePath . unResource
+ , resourceLazyByteString = LB.readFile . toFilePath . unResource
+ }
diff --git a/src/Hakyll/Core/Routes.hs b/src/Hakyll/Core/Routes.hs
new file mode 100644
index 0000000..fcab28d
--- /dev/null
+++ b/src/Hakyll/Core/Routes.hs
@@ -0,0 +1,136 @@
+-- | Once a target is compiled, the user usually wants to save it to the disk.
+-- This is where the 'Routes' type comes in; it determines where a certain
+-- target should be written.
+--
+-- Suppose we have an item @foo\/bar.markdown@. We can render this to
+-- @foo\/bar.html@ using:
+--
+-- > route "foo/bar.markdown" (setExtension ".html")
+--
+-- If we do not want to change the extension, we can use 'idRoute', the simplest
+-- route available:
+--
+-- > route "foo/bar.markdown" idRoute
+--
+-- That will route @foo\/bar.markdown@ to @foo\/bar.markdown@.
+--
+-- Note that the extension says nothing about the content! If you set the
+-- extension to @.html@, it is your own responsibility to ensure that the
+-- content is indeed HTML.
+--
+-- Finally, some special cases:
+--
+-- * If there is no route for an item, this item will not be routed, so it will
+-- not appear in your site directory.
+--
+-- * If an item matches multiple routes, the first rule will be chosen.
+--
+module Hakyll.Core.Routes
+ ( Routes
+ , runRoutes
+ , idRoute
+ , setExtension
+ , ifMatch
+ , customRoute
+ , gsubRoute
+ , composeRoutes
+ ) where
+
+import Data.Monoid (Monoid, mempty, mappend)
+import Control.Monad (mplus)
+import System.FilePath (replaceExtension)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Util.String
+
+-- | Type used for a route
+--
+newtype Routes = Routes {unRoutes :: Identifier -> Maybe FilePath}
+
+instance Monoid Routes where
+ mempty = Routes $ const Nothing
+ mappend (Routes f) (Routes g) = Routes $ \id' -> f id' `mplus` g id'
+
+-- | Apply a route to an identifier
+--
+runRoutes :: Routes -> Identifier -> Maybe FilePath
+runRoutes = unRoutes
+
+-- | A route that uses the identifier as filepath. For example, the target with
+-- ID @foo\/bar@ will be written to the file @foo\/bar@.
+--
+idRoute :: Routes
+idRoute = Routes $ Just . toFilePath
+
+-- | Set (or replace) the extension of a route.
+--
+-- Example:
+--
+-- > runRoute (setExtension "html") "foo/bar"
+--
+-- Result:
+--
+-- > Just "foo/bar.html"
+--
+-- Example:
+--
+-- > runRoute (setExtension "html") "posts/the-art-of-trolling.markdown"
+--
+-- Result:
+--
+-- > Just "posts/the-art-of-trolling.html"
+--
+setExtension :: String -> Routes
+setExtension extension = Routes $ fmap (`replaceExtension` extension)
+ . unRoutes idRoute
+
+-- | Modify a route: apply the route if the identifier matches the given
+-- pattern, fail otherwise.
+--
+ifMatch :: Pattern -> Routes -> Routes
+ifMatch pattern (Routes route) = Routes $ \id' ->
+ if doesMatch pattern id' then route id'
+ else Nothing
+
+-- | Create a custom route. This should almost always be used with 'ifMatch'.
+--
+customRoute :: (Identifier -> FilePath) -> Routes
+customRoute f = Routes $ Just . f
+
+-- | Create a gsub route
+--
+-- Example:
+--
+-- > runRoutes (gsubRoute "rss/" (const "")) "tags/rss/bar.xml"
+--
+-- Result:
+--
+-- > Just "tags/bar.xml"
+--
+gsubRoute :: String -- ^ Pattern
+ -> (String -> String) -- ^ Replacement
+ -> Routes -- ^ Resulting route
+gsubRoute pattern replacement = customRoute $
+ replaceAll pattern replacement . toFilePath
+
+-- | Compose routes so that @f `composeRoutes` g@ is more or less equivalent
+-- with @f >>> g@.
+--
+-- Example:
+--
+-- > let routes = gsubRoute "rss/" (const "") `composeRoutes` setExtension "xml"
+-- > in runRoutes routes "tags/rss/bar"
+--
+-- Result:
+--
+-- > Just "tags/bar.xml"
+--
+-- If the first route given fails, Hakyll will not apply the second route.
+--
+composeRoutes :: Routes -- ^ First route to apply
+ -> Routes -- ^ Second route to apply
+ -> Routes -- ^ Resulting route
+composeRoutes (Routes f) (Routes g) = Routes $ \i -> do
+ p <- f i
+ g $ parseIdentifier p
diff --git a/src/Hakyll/Core/Rules.hs b/src/Hakyll/Core/Rules.hs
new file mode 100644
index 0000000..eba3fb9
--- /dev/null
+++ b/src/Hakyll/Core/Rules.hs
@@ -0,0 +1,161 @@
+-- | This module provides a declarative DSL in which the user can specify the
+-- different rules used to run the compilers.
+--
+-- The convention is to just list all items in the 'RulesM' monad, routes and
+-- compilation rules.
+--
+-- A typical usage example would be:
+--
+-- > main = hakyll $ do
+-- > route "posts/*" (setExtension "html")
+-- > compile "posts/*" someCompiler
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
+module Hakyll.Core.Rules
+ ( RulesM
+ , Rules
+ , compile
+ , create
+ , route
+ , metaCompile
+ , metaCompileWith
+ ) where
+
+import Control.Applicative ((<$>))
+import Control.Monad.Writer (tell)
+import Control.Monad.Reader (ask)
+import Control.Arrow (second, (>>>), arr, (>>^))
+import Control.Monad.State (get, put)
+import Data.Monoid (mempty)
+import qualified Data.Set as S
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.Util.Arrow
+
+-- | Add a route
+--
+tellRoute :: Routes -> Rules
+tellRoute route' = RulesM $ tell $ RuleSet route' mempty mempty
+
+-- | Add a number of compilers
+--
+tellCompilers :: (Binary a, Typeable a, Writable a)
+ => [(Identifier, Compiler () a)]
+ -> Rules
+tellCompilers compilers = RulesM $ tell $ RuleSet mempty compilers' mempty
+ where
+ compilers' = map (second boxCompiler) compilers
+ boxCompiler = (>>> arr compiledItem >>> arr CompileRule)
+
+-- | Add resources
+--
+tellResources :: [Resource]
+ -> Rules
+tellResources resources = RulesM $ tell $ RuleSet mempty mempty $ S.fromList resources
+
+-- | Add a compilation rule to the rules.
+--
+-- This instructs all resources matching the given pattern to be compiled using
+-- the given compiler. When no resources match the given pattern, nothing will
+-- happen. In this case, you might want to have a look at 'create'.
+--
+compile :: (Binary a, Typeable a, Writable a)
+ => Pattern -> Compiler Resource a -> Rules
+compile pattern compiler = RulesM $ do
+ identifiers <- matches pattern . map unResource . resourceList <$> ask
+ unRulesM $ do
+ tellCompilers $ flip map identifiers $ \identifier ->
+ (identifier, constA (Resource identifier) >>> compiler)
+ tellResources $ map Resource identifiers
+
+-- | Add a compilation rule
+--
+-- This sets a compiler for the given identifier. No resource is needed, since
+-- we are creating the item from scratch. This is useful if you want to create a
+-- page on your site that just takes content from other items -- but has no
+-- actual content itself.
+--
+create :: (Binary a, Typeable a, Writable a)
+ => Identifier -> Compiler () a -> Rules
+create identifier compiler = tellCompilers [(identifier, compiler)]
+
+-- | Add a route.
+--
+-- This adds a route for all items matching the given pattern.
+--
+route :: Pattern -> Routes -> Rules
+route pattern route' = tellRoute $ ifMatch pattern route'
+
+-- | Apart from regular compilers, one is also able to specify metacompilers.
+-- Metacompilers are a special class of compilers: they are compilers which
+-- produce other compilers.
+--
+-- This is needed when the list of compilers depends on something we cannot know
+-- before actually running other compilers. The most typical example is if we
+-- have a blogpost using tags.
+--
+-- Every post has a collection of tags. For example,
+--
+-- > post1: code, haskell
+-- > post2: code, random
+--
+-- Now, we want to create a list of posts for every tag. We cannot write this
+-- down in our 'Rules' DSL directly, since we don't know what tags the different
+-- posts will have -- we depend on information that will only be available when
+-- we are actually compiling the pages.
+--
+-- The solution is simple, using 'metaCompile', we can add a compiler that will
+-- parse the pages and produce the compilers needed for the different tag pages.
+--
+-- And indeed, we can see that the first argument to 'metaCompile' is a
+-- 'Compiler' which produces a list of ('Identifier', 'Compiler') pairs. The
+-- idea is simple: 'metaCompile' produces a list of compilers, and the
+-- corresponding identifiers.
+--
+-- For simple hakyll systems, it is no need for this construction. More
+-- formally, it is only needed when the content of one or more items determines
+-- which items must be rendered.
+--
+metaCompile :: (Binary a, Typeable a, Writable a)
+ => Compiler () [(Identifier, Compiler () a)]
+ -- ^ Compiler generating the other compilers
+ -> Rules
+ -- ^ Resulting rules
+metaCompile compiler = RulesM $ do
+ -- Create an identifier from the state
+ state <- get
+ let index = rulesMetaCompilerIndex state
+ id' = fromCaptureString "Hakyll.Core.Rules.metaCompile/*" (show index)
+
+ -- Update the state with a new identifier
+ put $ state {rulesMetaCompilerIndex = index + 1}
+
+ -- Fallback to 'metaCompileWith' with now known identifier
+ unRulesM $ metaCompileWith id' compiler
+
+-- | Version of 'metaCompile' that allows you to specify a custom identifier for
+-- the metacompiler.
+--
+metaCompileWith :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -- ^ Identifier for this compiler
+ -> Compiler () [(Identifier, Compiler () a)]
+ -- ^ Compiler generating the other compilers
+ -> Rules
+ -- ^ Resulting rules
+metaCompileWith identifier compiler = RulesM $ tell $
+ RuleSet mempty compilers mempty
+ where
+ makeRule = MetaCompileRule . map (second box)
+ compilers = [(identifier, compiler >>> arr makeRule )]
+ box = (>>> fromDependency identifier >>^ CompileRule . compiledItem)
diff --git a/src/Hakyll/Core/Rules/Internal.hs b/src/Hakyll/Core/Rules/Internal.hs
new file mode 100644
index 0000000..2895257
--- /dev/null
+++ b/src/Hakyll/Core/Rules/Internal.hs
@@ -0,0 +1,75 @@
+-- | Internal rules module for types which are not exposed to the user
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Rules.Internal
+ ( CompileRule (..)
+ , RuleSet (..)
+ , RuleState (..)
+ , RulesM (..)
+ , Rules
+ , runRules
+ ) where
+
+import Control.Applicative (Applicative)
+import Control.Monad.Writer (WriterT, execWriterT)
+import Control.Monad.Reader (ReaderT, runReaderT)
+import Control.Monad.State (State, evalState)
+import Data.Monoid (Monoid, mempty, mappend)
+import Data.Set (Set)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.CompiledItem
+
+-- | Output of a compiler rule
+--
+-- * The compiler will produce a simple item. This is the most common case.
+--
+-- * The compiler will produce more compilers. These new compilers need to be
+-- added to the runtime if possible, since other items might depend upon them.
+--
+data CompileRule = CompileRule CompiledItem
+ | MetaCompileRule [(Identifier, Compiler () CompileRule)]
+
+-- | A collection of rules for the compilation process
+--
+data RuleSet = RuleSet
+ { -- | Routes used in the compilation structure
+ rulesRoutes :: Routes
+ , -- | Compilation rules
+ rulesCompilers :: [(Identifier, Compiler () CompileRule)]
+ , -- | A list of the used resources
+ rulesResources :: Set Resource
+ }
+
+instance Monoid RuleSet where
+ mempty = RuleSet mempty mempty mempty
+ mappend (RuleSet r1 c1 s1) (RuleSet r2 c2 s2) =
+ RuleSet (mappend r1 r2) (mappend c1 c2) (mappend s1 s2)
+
+-- | Rule state
+--
+data RuleState = RuleState
+ { rulesMetaCompilerIndex :: Int
+ } deriving (Show)
+
+-- | The monad used to compose rules
+--
+newtype RulesM a = RulesM
+ { unRulesM :: ReaderT ResourceProvider (WriterT RuleSet (State RuleState)) a
+ } deriving (Monad, Functor, Applicative)
+
+-- | Simplification of the RulesM type; usually, it will not return any
+-- result.
+--
+type Rules = RulesM ()
+
+-- | Run a Rules monad, resulting in a 'RuleSet'
+--
+runRules :: Rules -> ResourceProvider -> RuleSet
+runRules rules provider =
+ evalState (execWriterT $ runReaderT (unRulesM rules) provider) state
+ where
+ state = RuleState {rulesMetaCompilerIndex = 0}
diff --git a/src/Hakyll/Core/Run.hs b/src/Hakyll/Core/Run.hs
new file mode 100644
index 0000000..09864be
--- /dev/null
+++ b/src/Hakyll/Core/Run.hs
@@ -0,0 +1,207 @@
+-- | This is the module which binds it all together
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Run
+ ( run
+ ) where
+
+import Prelude hiding (reverse)
+import Control.Monad (filterM)
+import Control.Monad.Trans (liftIO)
+import Control.Applicative (Applicative, (<$>))
+import Control.Monad.Reader (ReaderT, runReaderT, ask)
+import Control.Monad.State.Strict (StateT, evalStateT, get, modify)
+import Control.Arrow ((&&&))
+import qualified Data.Map as M
+import Data.Monoid (mempty, mappend)
+import System.FilePath ((</>))
+import Data.Set (Set)
+import qualified Data.Set as S
+
+import Hakyll.Core.Routes
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+import Hakyll.Core.Compiler
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.ResourceProvider.FileResourceProvider
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.DirectedGraph
+import Hakyll.Core.DirectedGraph.DependencySolver
+import Hakyll.Core.Writable
+import Hakyll.Core.Store
+import Hakyll.Core.Configuration
+import Hakyll.Core.Logger
+
+-- | Run all rules needed, return the rule set used
+--
+run :: HakyllConfiguration -> Rules -> IO RuleSet
+run configuration rules = do
+ logger <- makeLogger
+
+ section logger "Initialising"
+ store <- timed logger "Creating store" $
+ makeStore $ storeDirectory configuration
+ provider <- timed logger "Creating provider" $
+ fileResourceProvider configuration
+
+ let ruleSet = runRules rules provider
+ compilers = rulesCompilers ruleSet
+
+ -- Extract the reader/state
+ reader = unRuntime $ addNewCompilers [] compilers
+ state' = runReaderT reader $ env logger ruleSet provider store
+
+ evalStateT state' state
+
+ -- Flush and return
+ flushLogger logger
+ return ruleSet
+ where
+ env logger ruleSet provider store = RuntimeEnvironment
+ { hakyllLogger = logger
+ , hakyllConfiguration = configuration
+ , hakyllRoutes = rulesRoutes ruleSet
+ , hakyllResourceProvider = provider
+ , hakyllStore = store
+ }
+
+ state = RuntimeState
+ { hakyllModified = S.empty
+ , hakyllGraph = mempty
+ }
+
+data RuntimeEnvironment = RuntimeEnvironment
+ { hakyllLogger :: Logger
+ , hakyllConfiguration :: HakyllConfiguration
+ , hakyllRoutes :: Routes
+ , hakyllResourceProvider :: ResourceProvider
+ , hakyllStore :: Store
+ }
+
+data RuntimeState = RuntimeState
+ { hakyllModified :: Set Identifier
+ , hakyllGraph :: DirectedGraph Identifier
+ }
+
+newtype Runtime a = Runtime
+ { unRuntime :: ReaderT RuntimeEnvironment (StateT RuntimeState IO) a
+ } deriving (Functor, Applicative, Monad)
+
+-- | Return a set of modified identifiers
+--
+modified :: ResourceProvider -- ^ Resource provider
+ -> Store -- ^ Store
+ -> [Identifier] -- ^ Identifiers to check
+ -> IO (Set Identifier) -- ^ Modified resources
+modified provider store ids = fmap S.fromList $ flip filterM ids $ \id' ->
+ if resourceExists provider id'
+ then resourceModified provider (Resource id') store
+ else return False
+
+-- | Add a number of compilers and continue using these compilers
+--
+addNewCompilers :: [(Identifier, Compiler () CompileRule)]
+ -- ^ Remaining compilers yet to be run
+ -> [(Identifier, Compiler () CompileRule)]
+ -- ^ Compilers to add
+ -> Runtime ()
+addNewCompilers oldCompilers newCompilers = Runtime $ do
+ -- Get some information
+ logger <- hakyllLogger <$> ask
+ section logger "Adding new compilers"
+ provider <- hakyllResourceProvider <$> ask
+ store <- hakyllStore <$> ask
+
+ let -- All compilers
+ compilers = oldCompilers ++ newCompilers
+
+ -- Get all dependencies for the compilers
+ dependencies = flip map compilers $ \(id', compiler) ->
+ let deps = runCompilerDependencies compiler id' provider
+ in (id', deps)
+
+ -- Create a compiler map (Id -> Compiler)
+ compilerMap = M.fromList compilers
+
+ -- Create the dependency graph
+ currentGraph = fromList dependencies
+
+ -- Find the old graph and append the new graph to it. This forms the
+ -- complete graph
+ completeGraph <- timed logger "Creating graph" $
+ mappend currentGraph . hakyllGraph <$> get
+
+ orderedCompilers <- timed logger "Solving dependencies" $ do
+ -- Check which items are up-to-date. This only needs to happen for the new
+ -- compilers
+ oldModified <- hakyllModified <$> get
+ newModified <- liftIO $ modified provider store $ map fst newCompilers
+
+ let modified' = oldModified `S.union` newModified
+
+ -- Find obsolete items. Every item that is reachable from a modified
+ -- item is considered obsolete. From these obsolete items, we are only
+ -- interested in ones that are in the current subgraph.
+ obsolete = S.filter (`member` currentGraph)
+ $ reachableNodes modified' $ reverse completeGraph
+
+ -- Solve the graph and retain only the obsolete items
+ ordered = filter (`S.member` obsolete) $ solveDependencies currentGraph
+
+ -- Update the state
+ modify $ updateState modified' completeGraph
+
+ -- Join the order with the compilers again
+ return $ map (id &&& (compilerMap M.!)) ordered
+
+ -- Now run the ordered list of compilers
+ unRuntime $ runCompilers orderedCompilers
+ where
+ -- Add the modified information for the new compilers
+ updateState modified' graph state = state
+ { hakyllModified = modified'
+ , hakyllGraph = graph
+ }
+
+runCompilers :: [(Identifier, Compiler () CompileRule)]
+ -- ^ Ordered list of compilers
+ -> Runtime ()
+ -- ^ No result
+runCompilers [] = return ()
+runCompilers ((id', compiler) : compilers) = Runtime $ do
+ -- Obtain information
+ logger <- hakyllLogger <$> ask
+ routes <- hakyllRoutes <$> ask
+ provider <- hakyllResourceProvider <$> ask
+ store <- hakyllStore <$> ask
+ modified' <- hakyllModified <$> get
+
+ section logger $ "Compiling " ++ show id'
+
+ let -- Check if the resource was modified
+ isModified = id' `S.member` modified'
+
+ -- Run the compiler
+ result <- timed logger "Total compile time" $ liftIO $
+ runCompiler compiler id' provider routes store isModified logger
+
+ case result of
+ -- Compile rule for one item, easy stuff
+ CompileRule compiled -> do
+ case runRoutes routes id' of
+ Nothing -> return ()
+ Just url -> timed logger ("Routing to " ++ url) $ do
+ destination <-
+ destinationDirectory . hakyllConfiguration <$> ask
+ let path = destination </> url
+ liftIO $ makeDirectories path
+ liftIO $ write path compiled
+
+ -- Continue for the remaining compilers
+ unRuntime $ runCompilers compilers
+
+ -- Metacompiler, slightly more complicated
+ MetaCompileRule newCompilers ->
+ -- Actually I was just kidding, it's not hard at all
+ unRuntime $ addNewCompilers compilers newCompilers
diff --git a/src/Hakyll/Core/Store.hs b/src/Hakyll/Core/Store.hs
new file mode 100644
index 0000000..12e33a7
--- /dev/null
+++ b/src/Hakyll/Core/Store.hs
@@ -0,0 +1,88 @@
+-- | A store for stroing and retreiving items
+--
+module Hakyll.Core.Store
+ ( Store
+ , makeStore
+ , storeSet
+ , storeGet
+ ) where
+
+import Control.Concurrent.MVar (MVar, newMVar, readMVar, modifyMVar_)
+import System.FilePath ((</>))
+import System.Directory (doesFileExist)
+import Data.Map (Map)
+import qualified Data.Map as M
+
+import Data.Binary (Binary, encodeFile, decodeFile)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+
+-- | Data structure used for the store
+--
+data Store = Store
+ { -- | All items are stored on the filesystem
+ storeDirectory :: FilePath
+ , -- | And some items are also kept in-memory
+ storeMap :: MVar (Map FilePath CompiledItem)
+ }
+
+-- | Initialize the store
+--
+makeStore :: FilePath -> IO Store
+makeStore directory = do
+ mvar <- newMVar M.empty
+ return Store
+ { storeDirectory = directory
+ , storeMap = mvar
+ }
+
+-- | Auxiliary: add an item to the map
+--
+addToMap :: (Binary a, Typeable a, Writable a)
+ => Store -> FilePath -> a -> IO ()
+addToMap store path value =
+ modifyMVar_ (storeMap store) $ return . M.insert path (compiledItem value)
+
+-- | Create a path
+--
+makePath :: Store -> String -> Identifier -> FilePath
+makePath store name identifier =
+ storeDirectory store </> name </> toFilePath identifier </> ".hakyllstore"
+
+-- | Store an item
+--
+storeSet :: (Binary a, Typeable a, Writable a)
+ => Store -> String -> Identifier -> a -> IO ()
+storeSet store name identifier value = do
+ makeDirectories path
+ encodeFile path value
+ addToMap store path value
+ where
+ path = makePath store name identifier
+
+-- | Load an item
+--
+storeGet :: (Binary a, Typeable a, Writable a)
+ => Store -> String -> Identifier -> IO (Maybe a)
+storeGet store name identifier = do
+ -- First check the in-memory map
+ map' <- readMVar $ storeMap store
+ case M.lookup path map' of
+ -- Found in the in-memory map
+ Just c -> return $ Just $ unCompiledItem c
+ -- Not found in the map, try the filesystem
+ Nothing -> do
+ exists <- doesFileExist path
+ if not exists
+ -- Not found in the filesystem either
+ then return Nothing
+ -- Found in the filesystem
+ else do v <- decodeFile path
+ addToMap store path v
+ return $ Just v
+ where
+ path = makePath store name identifier
diff --git a/src/Hakyll/Core/UnixFilter.hs b/src/Hakyll/Core/UnixFilter.hs
new file mode 100644
index 0000000..ee4b6cd
--- /dev/null
+++ b/src/Hakyll/Core/UnixFilter.hs
@@ -0,0 +1,76 @@
+-- | A Compiler that supports unix filters.
+--
+module Hakyll.Core.UnixFilter
+ ( unixFilter
+ ) where
+
+import Control.Concurrent (forkIO)
+import System.IO (hPutStr, hClose, hGetContents)
+import System.Posix.Process (executeFile, forkProcess)
+import System.Posix.IO ( dupTo, createPipe, stdInput
+ , stdOutput, closeFd, fdToHandle
+ )
+
+import Hakyll.Core.Compiler
+
+-- | Use a unix filter as compiler. For example, we could use the 'rev' program
+-- as a compiler.
+--
+-- > rev :: Compiler Resource String
+-- > rev = getResourceString >>> unixFilter "rev" []
+--
+-- A more realistic example: one can use this to call, for example, the sass
+-- compiler on CSS files. More information about sass can be found here:
+--
+-- <http://sass-lang.com/>
+--
+-- The code is fairly straightforward, given that we use @.scss@ for sass:
+--
+-- > route "style.scss" $ setExtension "css"
+-- > compile "style.scss" $
+-- > getResourceString >>> unixFilter "sass" ["-s", "--scss"]
+-- > >>> arr compressCss
+--
+unixFilter :: String -- ^ Program name
+ -> [String] -- ^ Program args
+ -> Compiler String String -- ^ Resulting compiler
+unixFilter programName args =
+ timedCompiler ("Executing external program " ++ programName) $
+ unsafeCompiler $ \input -> unixFilterIO programName args input
+
+-- | Internally used function
+--
+unixFilterIO :: String
+ -> [String]
+ -> String
+ -> IO String
+unixFilterIO programName args input = do
+ -- Create pipes
+ (stdinRead, stdinWrite) <- createPipe
+ (stdoutRead, stdoutWrite) <- createPipe
+
+ -- Fork the child
+ _ <- forkProcess $ do
+ -- Copy our pipes over the regular stdin/stdout
+ _ <- dupTo stdinRead stdInput
+ _ <- dupTo stdoutWrite stdOutput
+
+ -- Close the now unneeded file descriptors in the child
+ mapM_ closeFd [stdinWrite, stdoutRead, stdinRead, stdoutWrite]
+
+ -- Execute the program
+ _ <- executeFile programName True args Nothing
+ return ()
+
+ -- On the parent side, close the client-side FDs.
+ mapM_ closeFd [stdinRead, stdoutWrite]
+
+ -- Write the input to the child pipe
+ _ <- forkIO $ do
+ stdinWriteHandle <- fdToHandle stdinWrite
+ hPutStr stdinWriteHandle input
+ hClose stdinWriteHandle
+
+ -- Receive the output from the child
+ stdoutReadHandle <- fdToHandle stdoutRead
+ hGetContents stdoutReadHandle
diff --git a/src/Hakyll/Core/Util/Arrow.hs b/src/Hakyll/Core/Util/Arrow.hs
new file mode 100644
index 0000000..1896e11
--- /dev/null
+++ b/src/Hakyll/Core/Util/Arrow.hs
@@ -0,0 +1,25 @@
+-- | Various arrow utility functions
+--
+module Hakyll.Core.Util.Arrow
+ ( constA
+ , sequenceA
+ , unitA
+ ) where
+
+import Control.Arrow (Arrow, (&&&), arr, (>>^))
+
+constA :: Arrow a
+ => c
+ -> a b c
+constA = arr . const
+
+sequenceA :: Arrow a
+ => [a b c]
+ -> a b [c]
+sequenceA = foldl reduce $ constA []
+ where
+ reduce la xa = xa &&& la >>^ arr (uncurry (:))
+
+unitA :: Arrow a
+ => a b ()
+unitA = constA ()
diff --git a/src/Hakyll/Core/Util/File.hs b/src/Hakyll/Core/Util/File.hs
new file mode 100644
index 0000000..9babc8b
--- /dev/null
+++ b/src/Hakyll/Core/Util/File.hs
@@ -0,0 +1,90 @@
+-- | A module containing various file utility functions
+--
+module Hakyll.Core.Util.File
+ ( makeDirectories
+ , getRecursiveContents
+ , isFileObsolete
+ , isFileInternal
+ ) where
+
+import Control.Applicative ((<$>))
+import System.Time (ClockTime)
+import Control.Monad (forM, filterM)
+import Data.List (isPrefixOf)
+import System.Directory ( createDirectoryIfMissing, doesDirectoryExist
+ , doesFileExist, getModificationTime
+ , getDirectoryContents
+ )
+import System.FilePath ( normalise, takeDirectory, splitPath
+ , dropTrailingPathSeparator, (</>)
+ )
+
+import Hakyll.Core.Configuration
+
+-- | Given a path to a file, try to make the path writable by making
+-- all directories on the path.
+--
+makeDirectories :: FilePath -> IO ()
+makeDirectories = createDirectoryIfMissing True . takeDirectory
+
+-- | Get all contents of a directory. Note that files starting with a dot (.)
+-- will be ignored.
+--
+getRecursiveContents :: Bool -- ^ Include directories?
+ -> FilePath -- ^ Directory to search
+ -> IO [FilePath] -- ^ List of files found
+getRecursiveContents includeDirs topdir = do
+ topdirExists <- doesDirectoryExist topdir
+ if not topdirExists
+ then return []
+ else do
+ names <- filter isProper <$> getDirectoryContents topdir
+ paths <- forM names $ \name -> do
+ let path = normalise $ topdir </> name
+ isDirectory <- doesDirectoryExist path
+ if isDirectory then getRecursiveContents includeDirs path
+ else return [path]
+ return $ if includeDirs then topdir : concat paths
+ else concat paths
+ where
+ isProper = not . (== ".") . take 1
+
+-- | Check if a timestamp is obsolete compared to the timestamps of a number of
+-- files. When they are no files, it is never obsolete.
+--
+isObsolete :: ClockTime -- ^ The time to check.
+ -> [FilePath] -- ^ Dependencies of the cached file.
+ -> IO Bool
+isObsolete _ [] = return False
+isObsolete timeStamp depends = do
+ depends' <- filterM doesFileExist depends
+ dependsModified <- mapM getModificationTime depends'
+ return (timeStamp < maximum dependsModified)
+
+-- | Check if a file is obsolete, given it's dependencies. When the file does
+-- not exist, it is always obsolete. Other wise, it is obsolete if any of it's
+-- dependencies has a more recent modification time than the file.
+--
+isFileObsolete :: FilePath -- ^ The cached file
+ -> [FilePath] -- ^ Dependencies of the cached file
+ -> IO Bool
+isFileObsolete file depends = do
+ exists <- doesFileExist file
+ if not exists
+ then return True
+ else do timeStamp <- getModificationTime file
+ isObsolete timeStamp depends
+
+-- | Check if a file is meant for Hakyll internal use, i.e. if it is located in
+-- the destination or store directory
+--
+isFileInternal :: HakyllConfiguration -- ^ Configuration
+ -> FilePath -- ^ File to check
+ -> Bool -- ^ If the given file is internal
+isFileInternal configuration file =
+ any (`isPrefixOf` split file) dirs
+ where
+ split = map dropTrailingPathSeparator . splitPath
+ dirs = map (split . ($ configuration)) [ destinationDirectory
+ , storeDirectory
+ ]
diff --git a/src/Hakyll/Core/Util/String.hs b/src/Hakyll/Core/Util/String.hs
new file mode 100644
index 0000000..7f75a36
--- /dev/null
+++ b/src/Hakyll/Core/Util/String.hs
@@ -0,0 +1,48 @@
+-- | Miscellaneous string manipulation functions.
+--
+module Hakyll.Core.Util.String
+ ( trim
+ , replaceAll
+ , splitAll
+ ) where
+
+import Data.Char (isSpace)
+import Data.Maybe (listToMaybe)
+
+import Text.Regex.PCRE ((=~~))
+
+-- | Trim a string (drop spaces, tabs and newlines at both sides).
+--
+trim :: String -> String
+trim = reverse . trim' . reverse . trim'
+ where
+ trim' = dropWhile isSpace
+
+-- | A simple (but inefficient) regex replace funcion
+--
+replaceAll :: String -- ^ Pattern
+ -> (String -> String) -- ^ Replacement (called on capture)
+ -> String -- ^ Source string
+ -> String -- ^ Result
+replaceAll pattern f source = replaceAll' source
+ where
+ replaceAll' src = case listToMaybe (src =~~ pattern) of
+ Nothing -> src
+ Just (o, l) ->
+ let (before, tmp) = splitAt o src
+ (capture, after) = splitAt l tmp
+ in before ++ f capture ++ replaceAll' after
+
+-- | A simple regex split function. The resulting list will contain no empty
+-- strings.
+--
+splitAll :: String -- ^ Pattern
+ -> String -- ^ String to split
+ -> [String] -- ^ Result
+splitAll pattern = filter (not . null) . splitAll'
+ where
+ splitAll' src = case listToMaybe (src =~~ pattern) of
+ Nothing -> [src]
+ Just (o, l) ->
+ let (before, tmp) = splitAt o src
+ in before : splitAll' (drop l tmp)
diff --git a/src/Hakyll/Core/Writable.hs b/src/Hakyll/Core/Writable.hs
new file mode 100644
index 0000000..a3fd421
--- /dev/null
+++ b/src/Hakyll/Core/Writable.hs
@@ -0,0 +1,22 @@
+-- | Describes writable items; items that can be saved to the disk
+--
+{-# LANGUAGE FlexibleInstances #-}
+module Hakyll.Core.Writable
+ ( Writable (..)
+ ) where
+
+import Data.Word (Word8)
+
+import qualified Data.ByteString as SB
+
+-- | Describes an item that can be saved to the disk
+--
+class Writable a where
+ -- | Save an item to the given filepath
+ write :: FilePath -> a -> IO ()
+
+instance Writable [Char] where
+ write = writeFile
+
+instance Writable [Word8] where
+ write p = SB.writeFile p . SB.pack
diff --git a/src/Hakyll/Main.hs b/src/Hakyll/Main.hs
new file mode 100644
index 0000000..04b4cea
--- /dev/null
+++ b/src/Hakyll/Main.hs
@@ -0,0 +1,113 @@
+-- | Module providing the main hakyll function and command-line argument parsing
+--
+module Hakyll.Main
+ ( hakyll
+ , hakyllWith
+ ) where
+
+import Control.Concurrent (forkIO)
+import Control.Monad (when)
+import System.Environment (getProgName, getArgs)
+import System.Directory (doesDirectoryExist, removeDirectoryRecursive)
+
+import Hakyll.Core.Configuration
+import Hakyll.Core.Run
+import Hakyll.Core.Rules
+import Hakyll.Core.Rules.Internal
+import Hakyll.Web.Preview.Poll
+import Hakyll.Web.Preview.Server
+
+-- | This usualy is the function with which the user runs the hakyll compiler
+--
+hakyll :: Rules -> IO ()
+hakyll = hakyllWith defaultHakyllConfiguration
+
+-- | A variant of 'hakyll' which allows the user to specify a custom
+-- configuration
+--
+hakyllWith :: HakyllConfiguration -> Rules -> IO ()
+hakyllWith configuration rules = do
+ args <- getArgs
+ case args of
+ ["build"] -> build configuration rules
+ ["clean"] -> clean configuration
+ ["help"] -> help
+ ["preview"] -> preview configuration rules 8000
+ ["preview", p] -> preview configuration rules (read p)
+ ["rebuild"] -> rebuild configuration rules
+ ["server"] -> server configuration 8000
+ ["server", p] -> server configuration (read p)
+ _ -> help
+
+-- | Build the site
+--
+build :: HakyllConfiguration -> Rules -> IO ()
+build configuration rules = do
+ _ <- run configuration rules
+ return ()
+
+-- | Remove the output directories
+--
+clean :: HakyllConfiguration -> IO ()
+clean configuration = do
+ remove $ destinationDirectory configuration
+ remove $ storeDirectory configuration
+ where
+ remove dir = do
+ putStrLn $ "Removing " ++ dir ++ "..."
+ exists <- doesDirectoryExist dir
+ when exists $ removeDirectoryRecursive dir
+
+-- | Show usage information.
+--
+help :: IO ()
+help = do
+ name <- getProgName
+ mapM_ putStrLn
+ [ "ABOUT"
+ , ""
+ , "This is a Hakyll site generator program. You should always"
+ , "run it from the project root directory."
+ , ""
+ , "USAGE"
+ , ""
+ , name ++ " build Generate the site"
+ , name ++ " clean Clean up and remove cache"
+ , name ++ " help Show this message"
+ , name ++ " preview [port] Run a server and autocompile"
+ , name ++ " rebuild Clean up and build again"
+ , name ++ " server [port] Run a local test server"
+ ]
+
+-- | Preview the site
+--
+preview :: HakyllConfiguration -> Rules -> Int -> IO ()
+preview configuration rules port = do
+ -- Build once, keep the rule set
+ ruleSet <- run configuration rules
+
+ -- Get the resource list and a callback for the preview poll
+ let resources = rulesResources ruleSet
+ callback = build configuration rules
+
+ -- Fork a thread polling for changes
+ _ <- forkIO $ previewPoll configuration resources callback
+
+ -- Run the server in the main thread
+ server configuration port
+
+-- | Rebuild the site
+--
+rebuild :: HakyllConfiguration -> Rules -> IO ()
+rebuild configuration rules = do
+ clean configuration
+ build configuration rules
+
+-- | Start a server
+--
+server :: HakyllConfiguration -> Int -> IO ()
+server configuration port = do
+ let destination = destinationDirectory configuration
+ staticServer destination preServeHook port
+ where
+ preServeHook _ = return ()
diff --git a/src/Hakyll/Web/CompressCss.hs b/src/Hakyll/Web/CompressCss.hs
new file mode 100644
index 0000000..2df08fd
--- /dev/null
+++ b/src/Hakyll/Web/CompressCss.hs
@@ -0,0 +1,51 @@
+-- | Module used for CSS compression. The compression is currently in a simple
+-- state, but would typically reduce the number of bytes by about 25%.
+--
+module Hakyll.Web.CompressCss
+ ( compressCssCompiler
+ , compressCss
+ ) where
+
+import Data.Char (isSpace)
+import Data.List (isPrefixOf)
+import Control.Arrow ((>>^))
+
+import Hakyll.Core.Compiler
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Util.String
+
+-- | Compiler form of 'compressCss'
+--
+compressCssCompiler :: Compiler Resource String
+compressCssCompiler = getResourceString >>^ compressCss
+
+-- | Compress CSS to speed up your site.
+--
+compressCss :: String -> String
+compressCss = compressSeparators
+ . stripComments
+ . compressWhitespace
+
+-- | Compresses certain forms of separators.
+--
+compressSeparators :: String -> String
+compressSeparators = replaceAll "; *}" (const "}")
+ . replaceAll " *([{};:]) *" (take 1 . dropWhile isSpace)
+ . replaceAll ";;*" (const ";")
+
+-- | Compresses all whitespace.
+--
+compressWhitespace :: String -> String
+compressWhitespace = replaceAll "[ \t\n][ \t\n]*" (const " ")
+
+-- | Function that strips CSS comments away.
+--
+stripComments :: String -> String
+stripComments [] = []
+stripComments str
+ | isPrefixOf "/*" str = stripComments $ eatComments $ drop 2 str
+ | otherwise = head str : stripComments (drop 1 str)
+ where
+ eatComments str' | null str' = []
+ | isPrefixOf "*/" str' = drop 2 str'
+ | otherwise = eatComments $ drop 1 str'
diff --git a/src/Hakyll/Web/Feed.hs b/src/Hakyll/Web/Feed.hs
new file mode 100644
index 0000000..85674c6
--- /dev/null
+++ b/src/Hakyll/Web/Feed.hs
@@ -0,0 +1,124 @@
+-- | A Module that allows easy rendering of RSS feeds.
+--
+-- The main rendering functions (@renderRss@, @renderAtom@) all assume that
+-- you pass the list of items so that the most recent entry in the feed is the
+-- first item in the list.
+--
+-- Also note that the pages should have (at least) the following fields to
+-- produce a correct feed:
+--
+-- - @$title@: Title of the item
+--
+-- - @$description@: Description to appear in the feed
+--
+-- - @$url@: URL to the item - this is usually set automatically.
+--
+-- In addition, the posts should be named according to the rules for
+-- 'Hakyll.Page.Metadata.renderDateField'.
+--
+module Hakyll.Web.Feed
+ ( FeedConfiguration (..)
+ , renderRss
+ , renderAtom
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow ((>>>), arr, (&&&))
+import Control.Monad ((<=<))
+import Data.Maybe (fromMaybe, listToMaybe)
+
+import Hakyll.Core.Compiler
+import Hakyll.Web.Page
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Template
+import Hakyll.Web.Template.Read.Hakyll (readTemplate)
+import Hakyll.Web.Util.Url
+
+import Paths_hakyll
+
+-- | This is a data structure to keep the configuration of a feed.
+data FeedConfiguration = FeedConfiguration
+ { -- | Title of the feed.
+ feedTitle :: String
+ , -- | Description of the feed.
+ feedDescription :: String
+ , -- | Name of the feed author.
+ feedAuthorName :: String
+ , -- | Absolute root URL of the feed site (e.g. @http://jaspervdj.be@)
+ feedRoot :: String
+ }
+
+-- | This is an auxiliary function to create a listing that is, in fact, a feed.
+-- The items should be sorted on date. The @$timestamp@ field should be set.
+--
+createFeed :: Template -- ^ Feed template
+ -> Template -- ^ Item template
+ -> String -- ^ URL of the feed
+ -> FeedConfiguration -- ^ Feed configuration
+ -> [Page String] -- ^ Items to include
+ -> String -- ^ Resulting feed
+createFeed feedTemplate itemTemplate url configuration items =
+ pageBody $ applyTemplate feedTemplate
+ $ setField "timestamp" timestamp
+ $ setField "title" (feedTitle configuration)
+ $ setField "description" (feedDescription configuration)
+ $ setField "authorName" (feedDescription configuration)
+ $ setField "root" (feedRoot configuration)
+ $ setField "url" url
+ $ fromBody body
+ where
+ -- Preprocess items
+ items' = flip map items $ applyTemplate itemTemplate
+ . setField "root" (feedRoot configuration)
+
+ -- Body: concatenated items
+ body = concat $ map pageBody items'
+
+ -- Take the first timestamp, which should be the most recent
+ timestamp = fromMaybe "Unknown" $ do
+ p <- listToMaybe items
+ return $ getField "timestamp" p
+
+
+-- | Abstract function to render any feed.
+--
+renderFeed :: FilePath -- ^ Feed template
+ -> FilePath -- ^ Item template
+ -> FeedConfiguration -- ^ Feed configuration
+ -> Compiler [Page String] String -- ^ Feed compiler
+renderFeed feedTemplate itemTemplate configuration =
+ id &&& getRoute >>> renderFeed'
+ where
+ -- Arrow rendering the feed from the items and the URL
+ renderFeed' = unsafeCompiler $ \(items, url) -> do
+ feedTemplate' <- loadTemplate feedTemplate
+ itemTemplate' <- loadTemplate itemTemplate
+ let url' = toUrl $ fromMaybe noUrl url
+ return $ createFeed feedTemplate' itemTemplate' url' configuration items
+
+ -- Auxiliary: load a template from a datafile
+ loadTemplate = fmap readTemplate . readFile <=< getDataFileName
+
+ -- URL is required to have a valid field
+ noUrl = error "Hakyll.Web.Feed.renderFeed: no route specified"
+
+-- | Render an RSS feed with a number of items.
+--
+renderRss :: FeedConfiguration -- ^ Feed configuration
+ -> Compiler [Page String] String -- ^ Feed compiler
+renderRss configuration = arr (map renderDate)
+ >>> renderFeed "templates/rss.xml" "templates/rss-item.xml" configuration
+ where
+ renderDate = renderDateField "timestamp" "%a, %d %b %Y %H:%M:%S UT"
+ "No date found."
+
+-- | Render an Atom feed with a number of items.
+--
+renderAtom :: FeedConfiguration -- ^ Feed configuration
+ -> Compiler [Page String] String -- ^ Feed compiler
+renderAtom configuration = arr (map renderDate)
+ >>> renderFeed "templates/atom.xml" "templates/atom-item.xml" configuration
+ where
+ renderDate = renderDateField "timestamp" "%Y-%m-%dT%H:%M:%SZ"
+ "No date found."
diff --git a/src/Hakyll/Web/FileType.hs b/src/Hakyll/Web/FileType.hs
new file mode 100644
index 0000000..cd1188a
--- /dev/null
+++ b/src/Hakyll/Web/FileType.hs
@@ -0,0 +1,55 @@
+-- | A module dealing with common file extensions and associated file types.
+--
+module Hakyll.Web.FileType
+ ( FileType (..)
+ , fileType
+ , getFileType
+ ) where
+
+import System.FilePath (takeExtension)
+import Control.Arrow ((>>^))
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler
+
+-- | Datatype to represent the different file types Hakyll can deal with by
+-- default
+--
+data FileType
+ = Html
+ | LaTeX
+ | LiterateHaskell FileType
+ | Markdown
+ | Rst
+ | PlainText
+ | Css
+ | Binary
+ deriving (Eq, Ord, Show, Read)
+
+-- | Get the file type for a certain file. The type is determined by extension.
+--
+fileType :: FilePath -> FileType
+fileType = fileType' . takeExtension
+ where
+ fileType' ".htm" = Html
+ fileType' ".html" = Html
+ fileType' ".lhs" = LiterateHaskell Markdown
+ fileType' ".markdown" = Markdown
+ fileType' ".md" = Markdown
+ fileType' ".mdn" = Markdown
+ fileType' ".mdown" = Markdown
+ fileType' ".mdwn" = Markdown
+ fileType' ".mkd" = Markdown
+ fileType' ".mkdwn" = Markdown
+ fileType' ".page" = Markdown
+ fileType' ".rst" = Rst
+ fileType' ".tex" = LaTeX
+ fileType' ".text" = PlainText
+ fileType' ".txt" = PlainText
+ fileType' ".css" = Css
+ fileType' _ = Binary -- Treat unknown files as binary
+
+-- | Get the file type for the current file
+--
+getFileType :: Compiler a FileType
+getFileType = getIdentifier >>^ fileType . toFilePath
diff --git a/src/Hakyll/Web/Page.hs b/src/Hakyll/Web/Page.hs
new file mode 100644
index 0000000..955e1a8
--- /dev/null
+++ b/src/Hakyll/Web/Page.hs
@@ -0,0 +1,124 @@
+-- | A page is a key-value mapping, representing a page on your site
+--
+-- A page is an important concept in Hakyll. It is a key-value mapping, and has
+-- one field with an arbitrary type. A 'Page' thus consists of
+--
+-- * a key-value mapping (of the type @Map String String@);
+--
+-- * a value (of the type @a@).
+--
+-- Usually, the value will be a 'String' as well, and the value will be the body
+-- of the page.
+--
+-- Pages can be constructed using Haskell, but they are usually parsed from a
+-- file. The file format for pages is pretty straightforward.
+--
+-- > This is a simple page
+-- > consisting of two lines.
+--
+-- This is a valid page with two lines. If we load this in Hakyll, there would
+-- be no metadata, and the body would be the given text. Let's look at a page
+-- with some metadata.
+--
+-- > ---
+-- > title: Alice's Adventures in Wonderland
+-- > author: Lewis Caroll
+-- > year: 1865
+-- > ---
+-- >
+-- > Chapter I
+-- > =========
+-- >
+-- > Down the Rabbit-Hole
+-- > --------------------
+-- >
+-- > Alice was beginning to get very tired of sitting by her sister on the bank,
+-- > and of having nothing to do: once or twice she had peeped into the book her
+-- > sister was reading, but it had no pictures or conversations in it, "and
+-- > what is the use of a book," thought Alice "without pictures or
+-- > conversation?"
+-- >
+-- > ...
+--
+-- As you can see, we construct a metadata header in Hakyll using @---@. Then,
+-- we simply list all @key: value@ pairs, and end with @---@ again. This page
+-- contains three metadata fields and a body. The body is given in markdown
+-- format, which can be easily rendered to HTML by Hakyll, using pandoc.
+--
+{-# LANGUAGE DeriveDataTypeable #-}
+module Hakyll.Web.Page
+ ( Page (..)
+ , fromBody
+ , fromMap
+ , toMap
+ , readPageCompiler
+ , pageCompiler
+ , addDefaultFields
+ , sortByBaseName
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow (arr, (>>^), (&&&), (>>>))
+import System.FilePath (takeBaseName, takeDirectory)
+import qualified Data.Map as M
+import Data.List (sortBy)
+import Data.Ord (comparing)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler
+import Hakyll.Core.ResourceProvider
+import Hakyll.Web.Page.Internal
+import Hakyll.Web.Page.Read
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Pandoc
+import Hakyll.Web.Template
+import Hakyll.Web.Util.Url
+
+-- | Create a page from a body, without metadata
+--
+fromBody :: a -> Page a
+fromBody = Page M.empty
+
+-- | Read a page (do not render it)
+--
+readPageCompiler :: Compiler Resource (Page String)
+readPageCompiler = getResourceString >>^ readPage
+
+-- | Read a page, add default fields, substitute fields and render using pandoc
+--
+pageCompiler :: Compiler Resource (Page String)
+pageCompiler = cached "Hakyll.Web.Page.pageCompiler" $
+ readPageCompiler >>> addDefaultFields >>> arr applySelf >>> pageRenderPandoc
+
+-- | Add a number of default metadata fields to a page. These fields include:
+--
+-- * @$url@
+--
+-- * @$category@
+--
+-- * @$title@
+--
+-- * @$path@
+--
+addDefaultFields :: Compiler (Page a) (Page a)
+addDefaultFields = (getRoute &&& id >>^ uncurry addRoute)
+ >>> (getIdentifier &&& id >>^ uncurry addIdentifier)
+ where
+ -- Add root and url, based on route
+ addRoute Nothing = id
+ addRoute (Just r) = setField "url" (toUrl r)
+
+ -- Add title and category, based on identifier
+ addIdentifier i = setField "title" (takeBaseName p)
+ . setField "category" (takeBaseName $ takeDirectory p)
+ . setField "path" p
+ where
+ p = toFilePath i
+
+-- | Sort posts based on the basename of the post. This is equivalent to a
+-- chronologival sort, because of the @year-month-day-title.extension@ naming
+-- convention in Hakyll.
+--
+sortByBaseName :: [Page a] -> [Page a]
+sortByBaseName = sortBy $ comparing $ takeBaseName . getField "path"
diff --git a/src/Hakyll/Web/Page/Internal.hs b/src/Hakyll/Web/Page/Internal.hs
new file mode 100644
index 0000000..55067ed
--- /dev/null
+++ b/src/Hakyll/Web/Page/Internal.hs
@@ -0,0 +1,50 @@
+-- | Internal representation of the page datatype
+--
+{-# LANGUAGE DeriveDataTypeable #-}
+module Hakyll.Web.Page.Internal
+ ( Page (..)
+ , fromMap
+ , toMap
+ ) where
+
+import Control.Applicative ((<$>), (<*>))
+import Data.Monoid (Monoid, mempty, mappend)
+
+import Data.Map (Map)
+import Data.Binary (Binary, get, put)
+import Data.Typeable (Typeable)
+import qualified Data.Map as M
+
+import Hakyll.Core.Writable
+
+-- | Type used to represent pages
+--
+data Page a = Page
+ { pageMetadata :: Map String String
+ , pageBody :: a
+ } deriving (Eq, Show, Typeable)
+
+instance Monoid a => Monoid (Page a) where
+ mempty = Page M.empty mempty
+ mappend (Page m1 b1) (Page m2 b2) =
+ Page (M.union m1 m2) (mappend b1 b2)
+
+instance Functor Page where
+ fmap f (Page m b) = Page m (f b)
+
+instance Binary a => Binary (Page a) where
+ put (Page m b) = put m >> put b
+ get = Page <$> get <*> get
+
+instance Writable a => Writable (Page a) where
+ write p (Page _ b) = write p b
+
+-- | Create a metadata page, without a body
+--
+fromMap :: Monoid a => Map String String -> Page a
+fromMap m = Page m mempty
+
+-- | Convert a page to a map. The body will be placed in the @body@ key.
+--
+toMap :: Page String -> Map String String
+toMap (Page m b) = M.insert "body" b m
diff --git a/src/Hakyll/Web/Page/Metadata.hs b/src/Hakyll/Web/Page/Metadata.hs
new file mode 100644
index 0000000..72742e6
--- /dev/null
+++ b/src/Hakyll/Web/Page/Metadata.hs
@@ -0,0 +1,131 @@
+-- | Provides various functions to manipulate the metadata fields of a page
+--
+module Hakyll.Web.Page.Metadata
+ ( getField
+ , getFieldMaybe
+ , setField
+ , setFieldA
+ , renderField
+ , changeField
+ , copyField
+ , renderDateField
+ , renderDateFieldWith
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow (Arrow, (>>>), (***), arr)
+import Data.List (intercalate)
+import Data.Maybe (fromMaybe)
+import Data.Time.Clock (UTCTime)
+import Data.Time.Format (parseTime, formatTime)
+import qualified Data.Map as M
+import System.FilePath (takeFileName)
+import System.Locale (TimeLocale, defaultTimeLocale)
+
+import Hakyll.Web.Page.Internal
+import Hakyll.Core.Util.String
+
+-- | Get a metadata field. If the field does not exist, the empty string is
+-- returned.
+--
+getField :: String -- ^ Key
+ -> Page a -- ^ Page
+ -> String -- ^ Value
+getField key = fromMaybe "" . getFieldMaybe key
+
+-- | Get a field in a 'Maybe' wrapper
+--
+getFieldMaybe :: String -- ^ Key
+ -> Page a -- ^ Page
+ -> Maybe String -- ^ Value, if found
+getFieldMaybe key = M.lookup key . pageMetadata
+
+-- | Add a metadata field. If the field already exists, it is not overwritten.
+--
+setField :: String -- ^ Key
+ -> String -- ^ Value
+ -> Page a -- ^ Page to add it to
+ -> Page a -- ^ Resulting page
+setField k v (Page m b) = Page (M.insertWith (flip const) k v m) b
+
+-- | Arrow-based variant of 'setField'. Because of it's type, this function is
+-- very usable together with the different 'require' functions.
+--
+setFieldA :: Arrow a
+ => String -- ^ Key
+ -> a x String -- ^ Value arrow
+ -> a (Page b, x) (Page b) -- ^ Resulting arrow
+setFieldA k v = id *** v >>> arr (uncurry $ flip $ setField k)
+
+-- | Do something with a metadata value, but keep the old value as well. If the
+-- key given is not present in the metadata, nothing will happen. If the source
+-- and destination keys are the same, the value will be changed (but you should
+-- use 'changeField' for this purpose).
+--
+renderField :: String -- ^ Key of which the value should be copied
+ -> String -- ^ Key the value should be copied to
+ -> (String -> String) -- ^ Function to apply on the value
+ -> Page a -- ^ Page on which this should be applied
+ -> Page a -- ^ Resulting page
+renderField src dst f page = case M.lookup src (pageMetadata page) of
+ Nothing -> page
+ Just value -> setField dst (f value) page
+
+-- | Change a metadata value.
+--
+-- > import Data.Char (toUpper)
+-- > changeField "title" (map toUpper)
+--
+-- Will put the title in UPPERCASE.
+--
+changeField :: String -- ^ Key to change.
+ -> (String -> String) -- ^ Function to apply on the value.
+ -> Page a -- ^ Page to change
+ -> Page a -- ^ Resulting page
+changeField key = renderField key key
+
+-- | Make a copy of a metadata field (put the value belonging to a certain key
+-- under some other key as well)
+--
+copyField :: String -- ^ Key to copy
+ -> String -- ^ Destination to copy to
+ -> Page a -- ^ Page on which this should be applied
+ -> Page a -- ^ Resulting page
+copyField src dst = renderField src dst id
+
+-- | When the metadata has a field called @path@ in a
+-- @folder/yyyy-mm-dd-title.extension@ format (the convention for pages),
+-- this function can render the date.
+--
+-- > renderDate "date" "%B %e, %Y" "Date unknown"
+--
+-- Will render something like @January 32, 2010@.
+--
+renderDateField :: String -- ^ Key in which the rendered date should be placed
+ -> String -- ^ Format to use on the date
+ -> String -- ^ Default value, in case the date cannot be parsed
+ -> Page a -- ^ Page on which this should be applied
+ -> Page a -- ^ Resulting page
+renderDateField = renderDateFieldWith defaultTimeLocale
+
+-- | This is an extended version of 'renderDateField' that allows you to
+-- specify a time locale that is used for outputting the date. For more
+-- details, see 'renderDateField'.
+--
+renderDateFieldWith :: TimeLocale -- ^ Output time locale
+ -> String -- ^ Destination key
+ -> String -- ^ Format to use on the date
+ -> String -- ^ Default value
+ -> Page a -- ^ Target page
+ -> Page a -- ^ Resulting page
+renderDateFieldWith locale key format defaultValue =
+ renderField "path" key renderDate'
+ where
+ renderDate' filePath = fromMaybe defaultValue $ do
+ let dateString = intercalate "-" $ take 3
+ $ splitAll "-" $ takeFileName filePath
+ time <- parseTime defaultTimeLocale
+ "%Y-%m-%d"
+ dateString :: Maybe UTCTime
+ return $ formatTime locale format time
diff --git a/src/Hakyll/Web/Page/Read.hs b/src/Hakyll/Web/Page/Read.hs
new file mode 100644
index 0000000..cf39ddd
--- /dev/null
+++ b/src/Hakyll/Web/Page/Read.hs
@@ -0,0 +1,60 @@
+-- | Module providing a function to parse a page from a file
+--
+module Hakyll.Web.Page.Read
+ ( readPage
+ ) where
+
+import Control.Applicative ((<$>), (<*>))
+import Control.Arrow (second, (***))
+import Control.Monad.State (State, get, put, evalState)
+import Data.List (isPrefixOf)
+import Data.Map (Map)
+import qualified Data.Map as M
+
+import Hakyll.Web.Page.Internal
+import Hakyll.Core.Util.String
+
+-- | We're using a simple state monad as parser
+--
+type LineParser = State [String]
+
+-- | Read the metadata section from a page
+--
+parseMetadata :: LineParser (Map String String)
+parseMetadata = get >>= \content -> case content of
+ -- No lines means no metadata
+ [] -> return M.empty
+ -- Check if the file begins with a delimiter
+ (l : ls) -> if not (isPossibleDelimiter l)
+ then -- No delimiter means no metadata
+ return M.empty
+ else do -- Break the metadata section
+ let (metadata, rest) = second (drop 1) $ break (== l) ls
+ -- Put the rest back
+ put rest
+ -- Parse the metadata
+ return $ M.fromList $ map parseMetadata' metadata
+ where
+ -- Check if a line can be a delimiter
+ isPossibleDelimiter = isPrefixOf "---"
+
+ -- Parse a "key: value" string to a (key, value) tupple
+ parseMetadata' = (trim *** trim . drop 1) . break (== ':')
+
+-- | Read the body section of a page
+--
+parseBody :: LineParser String
+parseBody = do
+ body <- get
+ put []
+ return $ unlines body
+
+-- | Read an entire page
+--
+parsePage :: LineParser (Page String)
+parsePage = Page <$> parseMetadata <*> parseBody
+
+-- | Read a page from a string
+--
+readPage :: String -> Page String
+readPage = evalState parsePage . lines
diff --git a/src/Hakyll/Web/Pandoc.hs b/src/Hakyll/Web/Pandoc.hs
new file mode 100644
index 0000000..f225997
--- /dev/null
+++ b/src/Hakyll/Web/Pandoc.hs
@@ -0,0 +1,110 @@
+-- | Module exporting pandoc bindings
+--
+module Hakyll.Web.Pandoc
+ ( -- * The basic building blocks
+ readPandoc
+ , readPandocWith
+ , writePandoc
+ , writePandocWith
+
+ -- * Functions working on pages/compilers
+ , pageReadPandoc
+ , pageReadPandocWith
+ , pageRenderPandoc
+ , pageRenderPandocWith
+
+ -- * Default options
+ , defaultHakyllParserState
+ , defaultHakyllWriterOptions
+ ) where
+
+import Prelude hiding (id)
+import Control.Applicative ((<$>))
+import Control.Arrow ((>>^), (&&&))
+import Control.Category (id)
+
+import Text.Pandoc
+
+import Hakyll.Core.Compiler
+import Hakyll.Web.FileType
+import Hakyll.Web.Page.Internal
+
+-- | Read a string using pandoc, with the default options
+--
+readPandoc :: FileType -- ^ File type, determines how parsing happens
+ -> String -- ^ String to read
+ -> Pandoc -- ^ Resulting document
+readPandoc = readPandocWith defaultHakyllParserState
+
+-- | Read a string using pandoc, with the supplied options
+--
+readPandocWith :: ParserState -- ^ Parser options
+ -> FileType -- ^ File type, determines how parsing happens
+ -> String -- ^ String to read
+ -> Pandoc -- ^ Resulting document
+readPandocWith state fileType' = case fileType' of
+ Html -> readHtml state
+ LaTeX -> readLaTeX state
+ LiterateHaskell t -> readPandocWith state {stateLiterateHaskell = True} t
+ Markdown -> readMarkdown state
+ Rst -> readRST state
+ t -> error $
+ "Hakyll.Web.readPandocWith: I don't know how to read " ++ show t
+
+-- | Write a document (as HTML) using pandoc, with the default options
+--
+writePandoc :: Pandoc -- ^ Document to write
+ -> String -- ^ Resulting HTML
+writePandoc = writePandocWith defaultHakyllWriterOptions
+
+-- | Write a document (as HTML) using pandoc, with the supplied options
+--
+writePandocWith :: WriterOptions -- ^ Writer options for pandoc
+ -> Pandoc -- ^ Document to write
+ -> String -- ^ Resulting HTML
+writePandocWith = writeHtmlString
+
+-- | Read the resource using pandoc
+--
+pageReadPandoc :: Compiler (Page String) (Page Pandoc)
+pageReadPandoc = pageReadPandocWith defaultHakyllParserState
+
+-- | Read the resource using pandoc
+--
+pageReadPandocWith :: ParserState -> Compiler (Page String) (Page Pandoc)
+pageReadPandocWith state =
+ id &&& getFileType >>^ pageReadPandocWith'
+ where
+ pageReadPandocWith' (p, t) = readPandocWith state t <$> p
+
+-- | Render the resource using pandoc
+--
+pageRenderPandoc :: Compiler (Page String) (Page String)
+pageRenderPandoc =
+ pageRenderPandocWith defaultHakyllParserState defaultHakyllWriterOptions
+
+-- | Render the resource using pandoc
+--
+pageRenderPandocWith :: ParserState
+ -> WriterOptions
+ -> Compiler (Page String) (Page String)
+pageRenderPandocWith state options =
+ pageReadPandocWith state >>^ fmap (writePandocWith options)
+
+-- | The default reader options for pandoc parsing in hakyll
+--
+defaultHakyllParserState :: ParserState
+defaultHakyllParserState = defaultParserState
+ { -- The following option causes pandoc to read smart typography, a nice
+ -- and free bonus.
+ stateSmart = True
+ }
+
+-- | The default writer options for pandoc rendering in hakyll
+--
+defaultHakyllWriterOptions :: WriterOptions
+defaultHakyllWriterOptions = defaultWriterOptions
+ { -- This option causes literate haskell to be written using '>' marks in
+ -- html, which I think is a good default.
+ writerLiterateHaskell = True
+ }
diff --git a/src/Hakyll/Web/Preview/Server.hs b/src/Hakyll/Web/Preview/Server.hs
new file mode 100644
index 0000000..c550b69
--- /dev/null
+++ b/src/Hakyll/Web/Preview/Server.hs
@@ -0,0 +1,72 @@
+-- | Implements a basic static file server for previewing options
+--
+{-# LANGUAGE OverloadedStrings #-}
+module Hakyll.Web.Preview.Server
+ ( staticServer
+ ) where
+
+import Control.Monad.Trans (liftIO)
+import Control.Applicative ((<$>))
+import Codec.Binary.UTF8.String
+import System.FilePath ((</>))
+import System.Directory (doesFileExist)
+
+import qualified Data.ByteString as SB
+import Snap.Util.FileServe (serveFile)
+import Snap.Types (Snap, rqURI, getRequest, writeBS)
+import Snap.Http.Server ( httpServe, setAccessLog, setErrorLog, addListen
+ , ConfigListen (..), emptyConfig
+ )
+
+import Hakyll.Core.Util.String (replaceAll)
+
+-- | The first file in the list that actually exists is returned
+--
+findFile :: [FilePath] -> IO (Maybe FilePath)
+findFile [] = return Nothing
+findFile (x : xs) = do
+ exists <- doesFileExist x
+ if exists then return (Just x) else findFile xs
+
+-- | Serve a given directory
+--
+static :: FilePath -- ^ Directory to serve
+ -> (FilePath -> IO ()) -- ^ Pre-serve hook
+ -> Snap ()
+static directory preServe = do
+ -- Obtain the path
+ uri <- rqURI <$> getRequest
+ let filePath = replaceAll "\\?$" (const "") -- Remove trailing ?
+ $ replaceAll "#[^#]*$" (const "") -- Remove #section
+ $ replaceAll "^/" (const "") -- Remove leading /
+ $ decode $ SB.unpack uri
+
+ -- Try to find the requested file
+ r <- liftIO $ findFile $ map (directory </>) $
+ [ filePath
+ , filePath </> "index.htm"
+ , filePath </> "index.html"
+ ]
+
+ case r of
+ -- Not found, error
+ Nothing -> writeBS "Not found"
+ -- Found, serve
+ Just f -> do
+ liftIO $ preServe f
+ serveFile f
+
+-- | Main method, runs a static server in the given directory
+--
+staticServer :: FilePath -- ^ Directory to serve
+ -> (FilePath -> IO ()) -- ^ Pre-serve hook
+ -> Int -- ^ Port to listen on
+ -> IO () -- ^ Blocks forever
+staticServer directory preServe port =
+ httpServe config $ static directory preServe
+ where
+ -- Snap server config
+ config = addListen (ListenHttp "0.0.0.0" port)
+ $ setAccessLog Nothing
+ $ setErrorLog Nothing
+ $ emptyConfig
diff --git a/src/Hakyll/Web/RelativizeUrls.hs b/src/Hakyll/Web/RelativizeUrls.hs
new file mode 100644
index 0000000..2de4a0e
--- /dev/null
+++ b/src/Hakyll/Web/RelativizeUrls.hs
@@ -0,0 +1,62 @@
+-- | This module exposes a function which can relativize URL's on a webpage.
+--
+-- This means that one can deploy the resulting site on
+-- @http:\/\/example.com\/@, but also on @http:\/\/example.com\/some-folder\/@
+-- without having to change anything (simply copy over the files).
+--
+-- To use it, you should use absolute URL's from the site root everywhere. For
+-- example, use
+--
+-- > <img src="/images/lolcat.png" alt="Funny zomgroflcopter" />
+--
+-- in a blogpost. When running this through the relativize URL's module, this
+-- will result in (suppose your blogpost is located at @\/posts\/foo.html@:
+--
+-- > <img src="../images/lolcat.png" alt="Funny zomgroflcopter" />
+--
+module Hakyll.Web.RelativizeUrls
+ ( relativizeUrlsCompiler
+ , relativizeUrls
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow ((&&&), (>>^))
+import Data.List (isPrefixOf)
+import qualified Data.Set as S
+
+import Text.HTML.TagSoup
+
+import Hakyll.Core.Compiler
+import Hakyll.Web.Page
+import Hakyll.Web.Util.Url
+
+-- | Compiler form of 'compressCss' which automatically picks the right root
+-- path
+--
+relativizeUrlsCompiler :: Compiler (Page String) (Page String)
+relativizeUrlsCompiler = getRoute &&& id >>^ uncurry relativize
+ where
+ relativize Nothing = id
+ relativize (Just r) = fmap (relativizeUrls $ toSiteRoot r)
+
+-- | Relativize URL's in HTML
+--
+relativizeUrls :: String -- ^ Path to the site root
+ -> String -- ^ HTML to relativize
+ -> String -- ^ Resulting HTML
+relativizeUrls root = renderTags . map relativizeUrls' . parseTags
+ where
+ relativizeUrls' (TagOpen s a) = TagOpen s $ map (relativizeUrlsAttrs root) a
+ relativizeUrls' x = x
+
+-- | Relativize URL's in attributes
+--
+relativizeUrlsAttrs :: String -- ^ Path to the site root
+ -> Attribute String -- ^ Attribute to relativize
+ -> Attribute String -- ^ Resulting attribute
+relativizeUrlsAttrs root (key, value)
+ | key `S.member` urls && "/" `isPrefixOf` value = (key, root ++ value)
+ | otherwise = (key, value)
+ where
+ urls = S.fromList ["src", "href"]
diff --git a/src/Hakyll/Web/Tags.hs b/src/Hakyll/Web/Tags.hs
new file mode 100644
index 0000000..211a06b
--- /dev/null
+++ b/src/Hakyll/Web/Tags.hs
@@ -0,0 +1,180 @@
+-- | Module containing some specialized functions to deal with tags.
+-- This Module follows certain conventions. My advice is to stick with them if
+-- possible.
+--
+-- More concrete: all functions in this module assume that the tags are
+-- located in the @tags@ field, and separated by commas. An example file
+-- @foo.markdown@ could look like:
+--
+-- > ---
+-- > author: Philip K. Dick
+-- > title: Do androids dream of electric sheep?
+-- > tags: future, science fiction, humanoid
+-- > ---
+-- > The novel is set in a post-apocalyptic near future, where the Earth and
+-- > its populations have been damaged greatly by Nuclear...
+--
+-- All the following functions would work with such a format. In addition to
+-- tags, Hakyll also supports categories. The convention when using categories
+-- is to place pages in subdirectories.
+--
+-- An example, the page @posts\/coding\/2010-01-28-hakyll-categories.markdown@
+-- Tags or categories are read using the @readTags@ and @readCategory@
+-- functions. This module only provides functions to work with tags:
+-- categories are represented as tags. This is perfectly possible: categories
+-- only have an additional restriction that a page can only have one category
+-- (instead of multiple tags).
+--
+{-# LANGUAGE DeriveDataTypeable, OverloadedStrings, Arrows #-}
+module Hakyll.Web.Tags
+ ( Tags (..)
+ , readTagsWith
+ , readTags
+ , readCategory
+ , renderTagCloud
+ , renderTagsField
+ , renderCategoryField
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Applicative ((<$>))
+import Data.Map (Map)
+import qualified Data.Map as M
+import Data.List (intersperse)
+import Control.Arrow (arr, (&&&), (>>>), (***), (<<^), returnA)
+import Data.Maybe (catMaybes, fromMaybe)
+import Data.Monoid (mconcat)
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary, get, put)
+import Text.Blaze.Renderer.String (renderHtml)
+import Text.Blaze ((!), toHtml, toValue)
+import qualified Text.Blaze.Html5 as H
+import qualified Text.Blaze.Html5.Attributes as A
+
+import Hakyll.Web.Page
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Util.Url
+import Hakyll.Core.Writable
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler
+import Hakyll.Core.Util.String
+
+-- | Data about tags
+--
+data Tags a = Tags
+ { tagsMap :: Map String [Page a]
+ } deriving (Show, Typeable)
+
+instance Binary a => Binary (Tags a) where
+ get = Tags <$> get
+ put (Tags m) = put m
+
+instance Writable (Tags a) where
+ write _ _ = return ()
+
+-- | Obtain tags from a page
+--
+getTags :: Page a -> [String]
+getTags = map trim . splitAll "," . getField "tags"
+
+-- | Obtain categories from a page
+--
+getCategory :: Page a -> [String]
+getCategory = return . getField "category"
+
+-- | Higher-level function to read tags
+--
+readTagsWith :: (Page a -> [String]) -- ^ Function extracting tags from a page
+ -> [Page a] -- ^ Pages
+ -> Tags a -- ^ Resulting tags
+readTagsWith f pages = Tags
+ { tagsMap = foldl (M.unionWith (++)) M.empty (map readTagsWith' pages)
+ }
+ where
+ -- Create a tag map for one page
+ readTagsWith' page =
+ let tags = f page
+ in M.fromList $ zip tags $ repeat [page]
+
+-- | Read a tagmap using the @tags@ metadata field
+--
+readTags :: [Page a] -> Tags a
+readTags = readTagsWith getTags
+
+-- | Read a tagmap using the @category@ metadata field
+--
+readCategory :: [Page a] -> Tags a
+readCategory = readTagsWith getCategory
+
+-- | Render a tag cloud in HTML
+--
+renderTagCloud :: (String -> Identifier) -- ^ Produce a link for a tag
+ -> Double -- ^ Smallest font size, in percent
+ -> Double -- ^ Biggest font size, in percent
+ -> Compiler (Tags a) String -- ^ Tag cloud renderer
+renderTagCloud makeUrl minSize maxSize = proc (Tags tags) -> do
+ -- In tags' we create a list: [((tag, route), count)]
+ tags' <- mapCompiler ((id &&& (getRouteFor <<^ makeUrl)) *** arr length)
+ -< M.toList tags
+
+ let -- Absolute frequencies of the pages
+ freqs = map snd tags'
+
+ -- Find out the relative count of a tag: on a scale from 0 to 1
+ relative count = (fromIntegral count - min') / (1 + max' - min')
+
+ -- Show the relative size of one 'count' in percent
+ size count =
+ let size' = floor $ minSize + relative count * (maxSize - minSize)
+ in show (size' :: Int) ++ "%"
+
+ -- The minimum and maximum count found, as doubles
+ (min', max')
+ | null freqs = (0, 1)
+ | otherwise = (minimum &&& maximum) $ map fromIntegral freqs
+
+ -- Create a link for one item
+ makeLink ((tag, url), count) =
+ H.a ! A.style (toValue $ "font-size: " ++ size count)
+ ! A.href (toValue $ fromMaybe "/" url)
+ $ toHtml tag
+
+ -- Render and return the HTML
+ returnA -< renderHtml $ mconcat $ intersperse " " $ map makeLink tags'
+
+-- | Render tags with links
+--
+renderTagsFieldWith :: (Page a -> [String]) -- ^ Function to get the tags
+ -> String -- ^ Destination key
+ -> (String -> Identifier) -- ^ Create a link for a tag
+ -> Compiler (Page a) (Page a) -- ^ Resulting compiler
+renderTagsFieldWith tags destination makeUrl =
+ id &&& arr tags >>> setFieldA destination renderTags
+ where
+ -- Compiler creating a comma-separated HTML string for a list of tags
+ renderTags :: Compiler [String] String
+ renderTags = arr (map $ id &&& makeUrl)
+ >>> mapCompiler (id *** getRouteFor)
+ >>> arr (map $ uncurry renderLink)
+ >>> arr (renderHtml . mconcat . intersperse ", " . catMaybes)
+
+ -- Render one tag link
+ renderLink _ Nothing = Nothing
+ renderLink tag (Just filePath) = Just $
+ H.a ! A.href (toValue $ toUrl filePath) $ toHtml tag
+
+-- | Render tags with links
+--
+renderTagsField :: String -- ^ Destination key
+ -> (String -> Identifier) -- ^ Create a link for a tag
+ -> Compiler (Page a) (Page a) -- ^ Resulting compiler
+renderTagsField = renderTagsFieldWith getTags
+
+-- | Render the category in a link
+--
+renderCategoryField :: String -- ^ Destination key
+ -> (String -> Identifier) -- ^ Create a category link
+ -> Compiler (Page a) (Page a) -- ^ Resulting compiler
+renderCategoryField = renderTagsFieldWith getCategory
diff --git a/src/Hakyll/Web/Template.hs b/src/Hakyll/Web/Template.hs
new file mode 100644
index 0000000..9c49278
--- /dev/null
+++ b/src/Hakyll/Web/Template.hs
@@ -0,0 +1,109 @@
+-- | This module provides means for reading and applying 'Template's.
+--
+-- Templates are tools to convert data (pages) into a string. They are
+-- perfectly suited for laying out your site.
+--
+-- Let's look at an example template:
+--
+-- > <html>
+-- > <head>
+-- > <title>My crazy homepage - $title$</title>
+-- > </head>
+-- > <body>
+-- > <div id="header">
+-- > <h1>My crazy homepage - $title$</h1>
+-- > </div>
+-- > <div id="content">
+-- > $body$
+-- > </div>
+-- > <div id="footer">
+-- > By reading this you agree that I now own your soul
+-- > </div>
+-- > </body>
+-- > </html>
+--
+-- We can use this template to render a 'Page' which has a body and a @$title$@
+-- metadata field.
+--
+-- As you can see, the format is very simple -- @$key$@ is used to render the
+-- @$key$@ field from the page, everything else is literally copied. If you want
+-- to literally insert @\"$key$\"@ into your page (for example, when you're
+-- writing a Hakyll tutorial) you can use
+--
+-- > <p>
+-- > A literal $$key$$.
+-- > </p>
+--
+-- Because of it's simplicity, these templates can be used for more than HTML:
+-- you could make, for example, CSS or JS templates as well.
+--
+-- In addition to the native format, Hakyll also supports hamlet templates. For
+-- more information on hamlet templates, please refer to:
+-- <http://hackage.haskell.org/package/hamlet>.
+--
+module Hakyll.Web.Template
+ ( Template
+ , applyTemplate
+ , applySelf
+ , templateCompiler
+ , templateCompilerWith
+ , applyTemplateCompiler
+ ) where
+
+import Control.Arrow
+import Data.Maybe (fromMaybe)
+import System.FilePath (takeExtension)
+import qualified Data.Map as M
+
+import Text.Hamlet (HamletSettings, defaultHamletSettings)
+
+import Hakyll.Core.Compiler
+import Hakyll.Core.Identifier
+import Hakyll.Core.ResourceProvider
+import Hakyll.Web.Template.Internal
+import Hakyll.Web.Template.Read
+import Hakyll.Web.Page.Internal
+
+-- | Substitutes @$identifiers@ in the given @Template@ by values from the given
+-- "Page". When a key is not found, it is left as it is. You can specify
+-- the characters used to replace escaped dollars (@$$@) here.
+--
+applyTemplate :: Template -> Page String -> Page String
+applyTemplate template page =
+ fmap (const $ substitute =<< unTemplate template) page
+ where
+ map' = toMap page
+ substitute (Chunk chunk) = chunk
+ substitute (Key key) = fromMaybe ("$" ++ key ++ "$") $ M.lookup key map'
+ substitute (Escaped) = "$"
+
+-- | Apply a page as it's own template. This is often very useful to fill in
+-- certain keys like @$root@ and @$url@.
+--
+applySelf :: Page String -> Page String
+applySelf page = applyTemplate (readTemplate $ pageBody page) page
+
+-- | Read a template. If the extension of the file we're compiling is
+-- @.hml@ or @.hamlet@, it will be considered as a Hamlet template, and parsed
+-- as such.
+--
+templateCompiler :: Compiler Resource Template
+templateCompiler = templateCompilerWith defaultHamletSettings
+
+-- | Version of 'templateCompiler' that enables custom settings.
+--
+templateCompilerWith :: HamletSettings -> Compiler Resource Template
+templateCompilerWith settings =
+ cached "Hakyll.Web.Template.templateCompilerWith" $
+ getIdentifier &&& getResourceString >>^ uncurry read'
+ where
+ read' identifier string =
+ if takeExtension (toFilePath identifier) `elem` [".hml", ".hamlet"]
+ -- Hamlet template
+ then readHamletTemplateWith settings string
+ -- Hakyll template
+ else readTemplate string
+
+applyTemplateCompiler :: Identifier -- ^ Template
+ -> Compiler (Page String) (Page String) -- ^ Compiler
+applyTemplateCompiler identifier = require identifier (flip applyTemplate)
diff --git a/src/Hakyll/Web/Template/Internal.hs b/src/Hakyll/Web/Template/Internal.hs
new file mode 100644
index 0000000..d0e0859
--- /dev/null
+++ b/src/Hakyll/Web/Template/Internal.hs
@@ -0,0 +1,45 @@
+-- | Module containing the template data structure
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
+module Hakyll.Web.Template.Internal
+ ( Template (..)
+ , TemplateElement (..)
+ ) where
+
+import Control.Applicative ((<$>))
+
+import Data.Binary (Binary, get, getWord8, put, putWord8)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.Writable
+
+-- | Datatype used for template substitutions.
+--
+newtype Template = Template
+ { unTemplate :: [TemplateElement]
+ }
+ deriving (Show, Eq, Binary, Typeable)
+
+instance Writable Template where
+ -- Writing a template is impossible
+ write _ _ = return ()
+
+-- | Elements of a template.
+--
+data TemplateElement
+ = Chunk String
+ | Key String
+ | Escaped
+ deriving (Show, Eq, Typeable)
+
+instance Binary TemplateElement where
+ put (Chunk string) = putWord8 0 >> put string
+ put (Key key) = putWord8 1 >> put key
+ put (Escaped) = putWord8 2
+
+ get = getWord8 >>= \tag -> case tag of
+ 0 -> Chunk <$> get
+ 1 -> Key <$> get
+ 2 -> return Escaped
+ _ -> error $ "Hakyll.Web.Template.Internal: "
+ ++ "Error reading cached template"
diff --git a/src/Hakyll/Web/Template/Read.hs b/src/Hakyll/Web/Template/Read.hs
new file mode 100644
index 0000000..421b7e9
--- /dev/null
+++ b/src/Hakyll/Web/Template/Read.hs
@@ -0,0 +1,10 @@
+-- | Re-exports all different template reading modules
+--
+module Hakyll.Web.Template.Read
+ ( readTemplate
+ , readHamletTemplate
+ , readHamletTemplateWith
+ ) where
+
+import Hakyll.Web.Template.Read.Hakyll
+import Hakyll.Web.Template.Read.Hamlet
diff --git a/src/Hakyll/Web/Template/Read/Hakyll.hs b/src/Hakyll/Web/Template/Read/Hakyll.hs
new file mode 100644
index 0000000..fecf772
--- /dev/null
+++ b/src/Hakyll/Web/Template/Read/Hakyll.hs
@@ -0,0 +1,35 @@
+-- | Read templates in Hakyll's native format
+--
+module Hakyll.Web.Template.Read.Hakyll
+ ( readTemplate
+ ) where
+
+import Data.List (isPrefixOf)
+import Data.Char (isAlphaNum)
+
+import Hakyll.Web.Template.Internal
+
+-- | Construct a @Template@ from a string.
+--
+readTemplate :: String -> Template
+readTemplate = Template . readTemplate'
+ where
+ readTemplate' [] = []
+ readTemplate' string
+ | "$$" `isPrefixOf` string =
+ Escaped : readTemplate' (drop 2 string)
+ | "$" `isPrefixOf` string =
+ case readKey (drop 1 string) of
+ Just (key, rest) -> Key key : readTemplate' rest
+ Nothing -> Chunk "$" : readTemplate' (drop 1 string)
+ | otherwise =
+ let (chunk, rest) = break (== '$') string
+ in Chunk chunk : readTemplate' rest
+
+ -- Parse an key into (key, rest) if it's valid, and return
+ -- Nothing otherwise
+ readKey string =
+ let (key, rest) = span isAlphaNum string
+ in if not (null key) && "$" `isPrefixOf` rest
+ then Just (key, drop 1 rest)
+ else Nothing
diff --git a/src/Hakyll/Web/Template/Read/Hamlet.hs b/src/Hakyll/Web/Template/Read/Hamlet.hs
new file mode 100644
index 0000000..7b496de
--- /dev/null
+++ b/src/Hakyll/Web/Template/Read/Hamlet.hs
@@ -0,0 +1,46 @@
+-- | Read templates in the hamlet format
+--
+{-# LANGUAGE MultiParamTypeClasses #-}
+module Hakyll.Web.Template.Read.Hamlet
+ ( readHamletTemplate
+ , readHamletTemplateWith
+ ) where
+
+import Text.Hamlet (HamletSettings (..), defaultHamletSettings)
+import Text.Hamlet.RT
+
+import Hakyll.Web.Template.Internal
+
+-- | Read a hamlet template using the default settings
+--
+readHamletTemplate :: String -> Template
+readHamletTemplate = readHamletTemplateWith defaultHamletSettings
+
+-- | Read a hamlet template using the specified settings
+--
+readHamletTemplateWith :: HamletSettings -> String -> Template
+readHamletTemplateWith settings string =
+ let result = parseHamletRT settings string
+ in case result of
+ Just hamlet -> fromHamletRT hamlet
+ Nothing -> error
+ "Hakyll.Web.Template.Read.Hamlet.readHamletTemplateWith: \
+ \Could not parse Hamlet file"
+
+-- | Convert a 'HamletRT' to a 'Template'
+--
+fromHamletRT :: HamletRT -- ^ Hamlet runtime template
+ -> Template -- ^ Hakyll template
+fromHamletRT (HamletRT sd) = Template $ map fromSimpleDoc sd
+ where
+ fromSimpleDoc :: SimpleDoc -> TemplateElement
+ fromSimpleDoc (SDRaw chunk) = Chunk chunk
+ fromSimpleDoc (SDVar [var]) = Key var
+ fromSimpleDoc (SDVar _) = error
+ "Hakyll.Web.Template.Read.Hamlet.fromHamletRT: \
+ \Hakyll does not support '.' in identifier names when using \
+ \hamlet templates."
+ fromSimpleDoc _ = error
+ "Hakyll.Web.Template.Read.Hamlet.fromHamletRT: \
+ \Only simple $key$ identifiers are allowed when using hamlet \
+ \templates."
diff --git a/src/Hakyll/Web/Util/Url.hs b/src/Hakyll/Web/Util/Url.hs
new file mode 100644
index 0000000..54a361e
--- /dev/null
+++ b/src/Hakyll/Web/Util/Url.hs
@@ -0,0 +1,30 @@
+-- | Miscellaneous URL manipulation functions.
+--
+module Hakyll.Web.Util.Url
+ ( toUrl
+ , toSiteRoot
+ ) where
+
+import System.FilePath (splitPath, takeDirectory, joinPath)
+
+-- | Convert a filepath to an URL starting from the site root
+--
+-- Example:
+--
+-- > toUrl "foo/bar.html"
+--
+-- Result:
+--
+-- > "/foo/bar.html"
+--
+toUrl :: FilePath -> String
+toUrl = ('/' :)
+
+-- | Get the relative url to the site root, for a given (absolute) url
+--
+toSiteRoot :: String -> String
+toSiteRoot = emptyException . joinPath . map parent . splitPath . takeDirectory
+ where
+ parent = const ".."
+ emptyException [] = "."
+ emptyException x = x