summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJasper Van der Jeugt <jaspervdj@gmail.com>2011-03-01 14:50:41 +0100
committerJasper Van der Jeugt <jaspervdj@gmail.com>2011-03-01 14:50:41 +0100
commit90b25105830d6e4b0943ab55f9317bd142533acf (patch)
tree6eefb80a8a84724e70539dd8fa449530f7b17fe0 /src
parent8ef5a3ed0307be5d34a9564d02af3ed494f8e228 (diff)
parent8b727b994d482d593046f9b01a5c40b97c166d62 (diff)
downloadhakyll-90b25105830d6e4b0943ab55f9317bd142533acf.tar.gz
Merge branch 'hakyll3'
Conflicts: hakyll.cabal src/Text/Hakyll/Tags.hs
Diffstat (limited to 'src')
-rw-r--r--src/Hakyll.hs55
-rw-r--r--src/Hakyll/Core/CompiledItem.hs45
-rw-r--r--src/Hakyll/Core/Compiler.hs333
-rw-r--r--src/Hakyll/Core/Compiler/Internal.hs146
-rw-r--r--src/Hakyll/Core/Configuration.hs44
-rw-r--r--src/Hakyll/Core/CopyFile.hs29
-rw-r--r--src/Hakyll/Core/DirectedGraph.hs85
-rw-r--r--src/Hakyll/Core/DirectedGraph/DependencySolver.hs70
-rw-r--r--src/Hakyll/Core/DirectedGraph/Dot.hs30
-rw-r--r--src/Hakyll/Core/DirectedGraph/Internal.hs43
-rw-r--r--src/Hakyll/Core/Identifier.hs59
-rw-r--r--src/Hakyll/Core/Identifier/Pattern.hs160
-rw-r--r--src/Hakyll/Core/Logger.hs90
-rw-r--r--src/Hakyll/Core/ResourceProvider.hs75
-rw-r--r--src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs29
-rw-r--r--src/Hakyll/Core/Routes.hs136
-rw-r--r--src/Hakyll/Core/Rules.hs161
-rw-r--r--src/Hakyll/Core/Rules/Internal.hs75
-rw-r--r--src/Hakyll/Core/Run.hs207
-rw-r--r--src/Hakyll/Core/Store.hs88
-rw-r--r--src/Hakyll/Core/UnixFilter.hs76
-rw-r--r--src/Hakyll/Core/Util/Arrow.hs25
-rw-r--r--src/Hakyll/Core/Util/File.hs90
-rw-r--r--src/Hakyll/Core/Util/String.hs48
-rw-r--r--src/Hakyll/Core/Writable.hs22
-rw-r--r--src/Hakyll/Main.hs113
-rw-r--r--src/Hakyll/Web/CompressCss.hs (renamed from src/Text/Hakyll/Internal/CompressCss.hs)31
-rw-r--r--src/Hakyll/Web/Feed.hs124
-rw-r--r--src/Hakyll/Web/FileType.hs55
-rw-r--r--src/Hakyll/Web/Page.hs124
-rw-r--r--src/Hakyll/Web/Page/Internal.hs50
-rw-r--r--src/Hakyll/Web/Page/Metadata.hs131
-rw-r--r--src/Hakyll/Web/Page/Read.hs60
-rw-r--r--src/Hakyll/Web/Pandoc.hs110
-rw-r--r--src/Hakyll/Web/Preview/Server.hs72
-rw-r--r--src/Hakyll/Web/RelativizeUrls.hs62
-rw-r--r--src/Hakyll/Web/Tags.hs180
-rw-r--r--src/Hakyll/Web/Template.hs109
-rw-r--r--src/Hakyll/Web/Template/Internal.hs45
-rw-r--r--src/Hakyll/Web/Template/Read.hs10
-rw-r--r--src/Hakyll/Web/Template/Read/Hakyll.hs35
-rw-r--r--src/Hakyll/Web/Template/Read/Hamlet.hs46
-rw-r--r--src/Hakyll/Web/Util/Url.hs30
-rw-r--r--src/Network/Hakyll/SimpleServer.hs215
-rw-r--r--src/Text/Hakyll.hs185
-rw-r--r--src/Text/Hakyll/Configurations/Static.hs59
-rw-r--r--src/Text/Hakyll/Context.hs16
-rw-r--r--src/Text/Hakyll/ContextManipulations.hs124
-rw-r--r--src/Text/Hakyll/CreateContext.hs114
-rw-r--r--src/Text/Hakyll/Feed.hs112
-rw-r--r--src/Text/Hakyll/File.hs196
-rw-r--r--src/Text/Hakyll/HakyllAction.hs98
-rw-r--r--src/Text/Hakyll/HakyllMonad.hs99
-rw-r--r--src/Text/Hakyll/Internal/Cache.hs53
-rw-r--r--src/Text/Hakyll/Internal/FileType.hs49
-rw-r--r--src/Text/Hakyll/Internal/Template.hs86
-rw-r--r--src/Text/Hakyll/Internal/Template/Hamlet.hs56
-rw-r--r--src/Text/Hakyll/Internal/Template/Template.hs34
-rw-r--r--src/Text/Hakyll/Page.hs108
-rw-r--r--src/Text/Hakyll/Paginate.hs94
-rw-r--r--src/Text/Hakyll/Pandoc.hs57
-rw-r--r--src/Text/Hakyll/Regex.hs77
-rw-r--r--src/Text/Hakyll/Render.hs126
-rw-r--r--src/Text/Hakyll/Tags.hs172
-rw-r--r--src/Text/Hakyll/Util.hs34
65 files changed, 3600 insertions, 2172 deletions
diff --git a/src/Hakyll.hs b/src/Hakyll.hs
new file mode 100644
index 0000000..0261044
--- /dev/null
+++ b/src/Hakyll.hs
@@ -0,0 +1,55 @@
+-- | Top-level module exporting all modules that are interesting for the user
+--
+module Hakyll
+ ( module Hakyll.Core.Compiler
+ , module Hakyll.Core.CopyFile
+ , module Hakyll.Core.Configuration
+ , module Hakyll.Core.Identifier
+ , module Hakyll.Core.Identifier.Pattern
+ , module Hakyll.Core.ResourceProvider
+ , module Hakyll.Core.Routes
+ , module Hakyll.Core.Rules
+ , module Hakyll.Core.UnixFilter
+ , module Hakyll.Core.Util.Arrow
+ , module Hakyll.Core.Util.File
+ , module Hakyll.Core.Util.String
+ , module Hakyll.Core.Writable
+ , module Hakyll.Main
+ , module Hakyll.Web.CompressCss
+ , module Hakyll.Web.Feed
+ , module Hakyll.Web.FileType
+ , module Hakyll.Web.Page
+ , module Hakyll.Web.Page.Metadata
+ , module Hakyll.Web.Page.Read
+ , module Hakyll.Web.Pandoc
+ , module Hakyll.Web.RelativizeUrls
+ , module Hakyll.Web.Tags
+ , module Hakyll.Web.Template
+ , module Hakyll.Web.Util.Url
+ ) where
+
+import Hakyll.Core.Compiler
+import Hakyll.Core.CopyFile
+import Hakyll.Core.Configuration
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Routes
+import Hakyll.Core.Rules
+import Hakyll.Core.UnixFilter
+import Hakyll.Core.Util.Arrow
+import Hakyll.Core.Util.File
+import Hakyll.Core.Util.String
+import Hakyll.Core.Writable
+import Hakyll.Main
+import Hakyll.Web.CompressCss
+import Hakyll.Web.Feed
+import Hakyll.Web.FileType
+import Hakyll.Web.Page
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Page.Read
+import Hakyll.Web.Pandoc
+import Hakyll.Web.RelativizeUrls
+import Hakyll.Web.Tags
+import Hakyll.Web.Template
+import Hakyll.Web.Util.Url
diff --git a/src/Hakyll/Core/CompiledItem.hs b/src/Hakyll/Core/CompiledItem.hs
new file mode 100644
index 0000000..5dd0efc
--- /dev/null
+++ b/src/Hakyll/Core/CompiledItem.hs
@@ -0,0 +1,45 @@
+-- | A module containing a box datatype representing a compiled item. This
+-- item can be of any type, given that a few restrictions hold:
+--
+-- * we need a 'Typeable' instance to perform type-safe casts;
+--
+-- * we need a 'Binary' instance so we can serialize these items to the cache;
+--
+-- * we need a 'Writable' instance so the results can be saved.
+--
+{-# LANGUAGE ExistentialQuantification #-}
+module Hakyll.Core.CompiledItem
+ ( CompiledItem (..)
+ , compiledItem
+ , unCompiledItem
+ ) where
+
+import Data.Binary (Binary)
+import Data.Typeable (Typeable, cast)
+import Data.Maybe (fromMaybe)
+
+import Hakyll.Core.Writable
+
+-- | Box type for a compiled item
+--
+data CompiledItem = forall a. (Binary a, Typeable a, Writable a)
+ => CompiledItem a
+
+instance Writable CompiledItem where
+ write p (CompiledItem x) = write p x
+
+-- | Box a value into a 'CompiledItem'
+--
+compiledItem :: (Binary a, Typeable a, Writable a)
+ => a
+ -> CompiledItem
+compiledItem = CompiledItem
+
+-- | Unbox a value from a 'CompiledItem'
+--
+unCompiledItem :: (Binary a, Typeable a, Writable a)
+ => CompiledItem
+ -> a
+unCompiledItem (CompiledItem x) = fromMaybe error' $ cast x
+ where
+ error' = error "Hakyll.Core.CompiledItem.unCompiledItem: Unsupported type"
diff --git a/src/Hakyll/Core/Compiler.hs b/src/Hakyll/Core/Compiler.hs
new file mode 100644
index 0000000..e5da9b8
--- /dev/null
+++ b/src/Hakyll/Core/Compiler.hs
@@ -0,0 +1,333 @@
+-- | A Compiler manages targets and dependencies between targets
+--
+-- The most distinguishing property of a 'Compiler' is that it is an Arrow. A
+-- compiler of the type @Compiler a b@ is simply a compilation phase which takes
+-- an @a@ as input, and produces a @b@ as output.
+--
+-- Compilers are chained using the '>>>' arrow operation. If we have a compiler
+--
+-- > getResourceString :: Compiler Resource String
+--
+-- which reads the resource, and a compiler
+--
+-- > readPage :: Compiler String (Page String)
+--
+-- we can chain these two compilers to get a
+--
+-- > (getResourceString >>> readPage) :: Compiler Resource (Page String)
+--
+-- Most compilers can be created by combining smaller compilers using '>>>'.
+--
+-- More advanced constructions are also possible using arrow, and sometimes
+-- these are needed. For a good introduction to arrow, you can refer to
+--
+-- <http://en.wikibooks.org/wiki/Haskell/Understanding_arrows>
+--
+-- A construction worth writing a few paragraphs about here are the 'require'
+-- functions. Different variants of this function are exported here, but they
+-- all serve more or less the same goal.
+--
+-- When you use only '>>>' to chain your compilers, you get a linear pipeline --
+-- it is not possible to add extra items from other compilers along the way.
+-- This is where the 'require' functions come in.
+--
+-- This function allows you to reference other items, which are then added to
+-- the pipeline. Let's look at this crappy ASCII illustration which represents
+-- a pretty common scenario:
+--
+-- > read resource >>> pandoc render >>> layout >>> relativize URL's
+-- >
+-- > @templates/fancy.html@
+--
+-- We want to construct a pipeline of compilers to go from our resource to a
+-- proper webpage. However, the @layout@ compiler takes more than just the
+-- rendered page as input: it needs the @templates/fancy.html@ template as well.
+--
+-- This is an example of where we need the @require@ function. We can solve
+-- this using a construction that looks like:
+--
+-- > ... >>> pandoc render >>> require >>> layout >>> ...
+-- > |
+-- > @templates/fancy.html@ ------/
+--
+-- This illustration can help us understand the type signature of 'require'.
+--
+-- > require :: (Binary a, Typeable a, Writable a)
+-- > => Identifier
+-- > -> (b -> a -> c)
+-- > -> Compiler b c
+--
+-- Let's look at it in detail:
+--
+-- > (Binary a, Typeable a, Writable a)
+--
+-- These are constraints for the @a@ type. @a@ (the template) needs to have
+-- certain properties for it to be required.
+--
+-- > Identifier
+--
+-- This is simply @templates/fancy.html@: the 'Identifier' of the item we want
+-- to 'require', in other words, the name of the item we want to add to the
+-- pipeline somehow.
+--
+-- > (b -> a -> c)
+--
+-- This is a function given by the user, specifying /how/ the two items shall be
+-- merged. @b@ is the output of the previous compiler, and @a@ is the item we
+-- just required -- the template. This means @c@ will be the final output of the
+-- 'require' combinator.
+--
+-- > Compiler b c
+--
+-- Indeed, we have now constructed a compiler which takes a @b@ and produces a
+-- @c@. This means that we have a linear pipeline again, thanks to the 'require'
+-- function. So, the 'require' function actually helps to reduce to complexity
+-- of Hakyll applications!
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Compiler
+ ( Compiler
+ , runCompiler
+ , getIdentifier
+ , getRoute
+ , getRouteFor
+ , getResourceString
+ , fromDependency
+ , require_
+ , require
+ , requireA
+ , requireAll_
+ , requireAll
+ , requireAllA
+ , cached
+ , unsafeCompiler
+ , traceShowCompiler
+ , mapCompiler
+ , timedCompiler
+ , byExtension
+ ) where
+
+import Prelude hiding ((.), id)
+import Control.Arrow ((>>>), (&&&), arr)
+import Control.Applicative ((<$>))
+import Control.Monad.Reader (ask)
+import Control.Monad.Trans (liftIO)
+import Control.Category (Category, (.), id)
+import Data.Maybe (fromMaybe)
+import System.FilePath (takeExtension)
+
+import Data.Binary (Binary)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Store
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.Logger
+
+-- | Run a compiler, yielding the resulting target and it's dependencies. This
+-- version of 'runCompilerJob' also stores the result
+--
+runCompiler :: Compiler () CompileRule -- ^ Compiler to run
+ -> Identifier -- ^ Target identifier
+ -> ResourceProvider -- ^ Resource provider
+ -> Routes -- ^ Route
+ -> Store -- ^ Store
+ -> Bool -- ^ Was the resource modified?
+ -> Logger -- ^ Logger
+ -> IO CompileRule -- ^ Resulting item
+runCompiler compiler identifier provider routes store modified logger = do
+ -- Run the compiler job
+ result <-
+ runCompilerJob compiler identifier provider routes store modified logger
+
+ -- Inspect the result
+ case result of
+ -- In case we compiled an item, we will store a copy in the cache first,
+ -- before we return control. This makes sure the compiled item can later
+ -- be accessed by e.g. require.
+ CompileRule (CompiledItem x) ->
+ storeSet store "Hakyll.Core.Compiler.runCompiler" identifier x
+
+ -- Otherwise, we do nothing here
+ _ -> return ()
+
+ return result
+
+-- | Get the identifier of the item that is currently being compiled
+--
+getIdentifier :: Compiler a Identifier
+getIdentifier = fromJob $ const $ CompilerM $ compilerIdentifier <$> ask
+
+-- | Get the route we are using for this item
+--
+getRoute :: Compiler a (Maybe FilePath)
+getRoute = getIdentifier >>> getRouteFor
+
+-- | Get the route for a specified item
+--
+getRouteFor :: Compiler Identifier (Maybe FilePath)
+getRouteFor = fromJob $ \identifier -> CompilerM $ do
+ routes <- compilerRoutes <$> ask
+ return $ runRoutes routes identifier
+
+-- | Get the resource we are compiling as a string
+--
+getResourceString :: Compiler Resource String
+getResourceString = fromJob $ \resource -> CompilerM $ do
+ provider <- compilerResourceProvider <$> ask
+ liftIO $ resourceString provider resource
+
+-- | Auxiliary: get a dependency
+--
+getDependency :: (Binary a, Writable a, Typeable a)
+ => Identifier -> CompilerM a
+getDependency identifier = CompilerM $ do
+ store <- compilerStore <$> ask
+ fmap (fromMaybe error') $ liftIO $
+ storeGet store "Hakyll.Core.Compiler.runCompiler" identifier
+ where
+ error' = error $ "Hakyll.Core.Compiler.getDependency: "
+ ++ show identifier
+ ++ " not found in the cache, the cache might be corrupted or"
+ ++ " the item you are referring to might not exist"
+
+
+-- | Variant of 'require' which drops the current value
+--
+require_ :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> Compiler b a
+require_ identifier =
+ fromDependency identifier >>> fromJob (const $ getDependency identifier)
+
+-- | Require another target. Using this function ensures automatic handling of
+-- dependencies
+--
+require :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> (b -> a -> c)
+ -> Compiler b c
+require identifier = requireA identifier . arr . uncurry
+
+-- | Arrow-based variant of 'require'
+--
+requireA :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> Compiler (b, a) c
+ -> Compiler b c
+requireA identifier = (id &&& require_ identifier >>>)
+
+-- | Variant of 'requireAll' which drops the current value
+--
+requireAll_ :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> Compiler b [a]
+requireAll_ pattern = fromDependencies (const getDeps) >>> fromJob requireAll_'
+ where
+ getDeps = matches pattern . map unResource . resourceList
+ requireAll_' = const $ CompilerM $ do
+ deps <- getDeps . compilerResourceProvider <$> ask
+ mapM (unCompilerM . getDependency) deps
+
+-- | Require a number of targets. Using this function ensures automatic handling
+-- of dependencies
+--
+requireAll :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> (b -> [a] -> c)
+ -> Compiler b c
+requireAll pattern = requireAllA pattern . arr . uncurry
+
+-- | Arrow-based variant of 'requireAll'
+--
+requireAllA :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> Compiler (b, [a]) c
+ -> Compiler b c
+requireAllA pattern = (id &&& requireAll_ pattern >>>)
+
+cached :: (Binary a, Typeable a, Writable a)
+ => String
+ -> Compiler Resource a
+ -> Compiler Resource a
+cached name (Compiler d j) = Compiler d $ const $ CompilerM $ do
+ logger <- compilerLogger <$> ask
+ identifier <- compilerIdentifier <$> ask
+ store <- compilerStore <$> ask
+ modified <- compilerResourceModified <$> ask
+ report logger $ "Checking cache: " ++ if modified then "modified" else "OK"
+ if modified
+ then do v <- unCompilerM $ j $ Resource identifier
+ liftIO $ storeSet store name identifier v
+ return v
+ else do v <- liftIO $ storeGet store name identifier
+ case v of Just v' -> return v'
+ Nothing -> error'
+ where
+ error' = error "Hakyll.Core.Compiler.cached: Cache corrupt!"
+
+-- | Create an unsafe compiler from a function in IO
+--
+unsafeCompiler :: (a -> IO b) -- ^ Function to lift
+ -> Compiler a b -- ^ Resulting compiler
+unsafeCompiler f = fromJob $ CompilerM . liftIO . f
+
+-- | Compiler for debugging purposes
+--
+traceShowCompiler :: Show a => Compiler a a
+traceShowCompiler = fromJob $ \x -> CompilerM $ do
+ logger <- compilerLogger <$> ask
+ report logger $ show x
+ return x
+
+-- | Map over a compiler
+--
+mapCompiler :: Compiler a b
+ -> Compiler [a] [b]
+mapCompiler (Compiler d j) = Compiler d $ mapM j
+
+-- | Log and time a compiler
+--
+timedCompiler :: String -- ^ Message
+ -> Compiler a b -- ^ Compiler to time
+ -> Compiler a b -- ^ Resulting compiler
+timedCompiler msg (Compiler d j) = Compiler d $ \x -> CompilerM $ do
+ logger <- compilerLogger <$> ask
+ timed logger msg $ unCompilerM $ j x
+
+-- | Choose a compiler by extension
+--
+-- Example:
+--
+-- > route "css/*" $ setExtension "css"
+-- > compile "css/*" $ byExtension (error "Not a (S)CSS file")
+-- > [ (".css", compressCssCompiler)
+-- > , (".scss", sass)
+-- > ]
+--
+-- This piece of code will select the @compressCssCompiler@ for @.css@ files,
+-- and the @sass@ compiler (defined elsewhere) for @.scss@ files.
+--
+byExtension :: Compiler a b -- ^ Default compiler
+ -> [(String, Compiler a b)] -- ^ Choices
+ -> Compiler a b -- ^ Resulting compiler
+byExtension defaultCompiler choices = Compiler deps job
+ where
+ -- Lookup the compiler, give an error when it is not found
+ lookup' identifier =
+ let extension = takeExtension $ toFilePath identifier
+ in fromMaybe defaultCompiler $ lookup extension choices
+ -- Collect the dependencies of the choice
+ deps = do
+ identifier <- dependencyIdentifier <$> ask
+ compilerDependencies $ lookup' identifier
+ -- Collect the job of the choice
+ job x = CompilerM $ do
+ identifier <- compilerIdentifier <$> ask
+ unCompilerM $ compilerJob (lookup' identifier) x
diff --git a/src/Hakyll/Core/Compiler/Internal.hs b/src/Hakyll/Core/Compiler/Internal.hs
new file mode 100644
index 0000000..53df044
--- /dev/null
+++ b/src/Hakyll/Core/Compiler/Internal.hs
@@ -0,0 +1,146 @@
+-- | Internally used compiler module
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Compiler.Internal
+ ( Dependencies
+ , DependencyEnvironment (..)
+ , CompilerEnvironment (..)
+ , CompilerM (..)
+ , Compiler (..)
+ , runCompilerJob
+ , runCompilerDependencies
+ , fromJob
+ , fromDependencies
+ , fromDependency
+ ) where
+
+import Prelude hiding ((.), id)
+import Control.Applicative (Applicative, pure, (<*>), (<$>))
+import Control.Monad.Reader (ReaderT, Reader, ask, runReaderT, runReader)
+import Control.Monad ((<=<), liftM2)
+import Data.Set (Set)
+import qualified Data.Set as S
+import Control.Category (Category, (.), id)
+import Control.Arrow (Arrow, ArrowChoice, arr, first, left)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Store
+import Hakyll.Core.Routes
+import Hakyll.Core.Logger
+
+-- | A set of dependencies
+--
+type Dependencies = Set Identifier
+
+-- | Environment in which the dependency analyzer runs
+--
+data DependencyEnvironment = DependencyEnvironment
+ { -- | Target identifier
+ dependencyIdentifier :: Identifier
+ , -- | Resource provider
+ dependencyResourceProvider :: ResourceProvider
+ }
+
+-- | Environment in which a compiler runs
+--
+data CompilerEnvironment = CompilerEnvironment
+ { -- | Target identifier
+ compilerIdentifier :: Identifier
+ , -- | Resource provider
+ compilerResourceProvider :: ResourceProvider
+ , -- | Site routes
+ compilerRoutes :: Routes
+ , -- | Compiler store
+ compilerStore :: Store
+ , -- | Flag indicating if the underlying resource was modified
+ compilerResourceModified :: Bool
+ , -- | Logger
+ compilerLogger :: Logger
+ }
+
+-- | The compiler monad
+--
+newtype CompilerM a = CompilerM
+ { unCompilerM :: ReaderT CompilerEnvironment IO a
+ } deriving (Monad, Functor, Applicative)
+
+-- | The compiler arrow
+--
+data Compiler a b = Compiler
+ { compilerDependencies :: Reader DependencyEnvironment Dependencies
+ , compilerJob :: a -> CompilerM b
+ }
+
+instance Functor (Compiler a) where
+ fmap f ~(Compiler d j) = Compiler d $ fmap f . j
+
+instance Applicative (Compiler a) where
+ pure = Compiler (return S.empty) . const . return
+ ~(Compiler d1 f) <*> ~(Compiler d2 j) =
+ Compiler (liftM2 S.union d1 d2) $ \x -> f x <*> j x
+
+instance Category Compiler where
+ id = Compiler (return S.empty) return
+ ~(Compiler d1 j1) . ~(Compiler d2 j2) =
+ Compiler (liftM2 S.union d1 d2) (j1 <=< j2)
+
+instance Arrow Compiler where
+ arr f = Compiler (return S.empty) (return . f)
+ first ~(Compiler d j) = Compiler d $ \(x, y) -> do
+ x' <- j x
+ return (x', y)
+
+instance ArrowChoice Compiler where
+ left ~(Compiler d j) = Compiler d $ \e -> case e of
+ Left l -> Left <$> j l
+ Right r -> Right <$> return r
+
+-- | Run a compiler, yielding the resulting target and it's dependencies
+--
+runCompilerJob :: Compiler () a -- ^ Compiler to run
+ -> Identifier -- ^ Target identifier
+ -> ResourceProvider -- ^ Resource provider
+ -> Routes -- ^ Route
+ -> Store -- ^ Store
+ -> Bool -- ^ Was the resource modified?
+ -> Logger -- ^ Logger
+ -> IO a
+runCompilerJob compiler identifier provider route store modified logger =
+ runReaderT (unCompilerM $ compilerJob compiler ()) env
+ where
+ env = CompilerEnvironment
+ { compilerIdentifier = identifier
+ , compilerResourceProvider = provider
+ , compilerRoutes = route
+ , compilerStore = store
+ , compilerResourceModified = modified
+ , compilerLogger = logger
+ }
+
+runCompilerDependencies :: Compiler () a
+ -> Identifier
+ -> ResourceProvider
+ -> Dependencies
+runCompilerDependencies compiler identifier provider =
+ runReader (compilerDependencies compiler) env
+ where
+ env = DependencyEnvironment
+ { dependencyIdentifier = identifier
+ , dependencyResourceProvider = provider
+ }
+
+fromJob :: (a -> CompilerM b)
+ -> Compiler a b
+fromJob = Compiler (return S.empty)
+
+fromDependencies :: (Identifier -> ResourceProvider -> [Identifier])
+ -> Compiler b b
+fromDependencies collectDeps = flip Compiler return $ do
+ DependencyEnvironment identifier provider <- ask
+ return $ S.fromList $ collectDeps identifier provider
+
+-- | Wait until another compiler has finished before running this compiler
+--
+fromDependency :: Identifier -> Compiler a a
+fromDependency = fromDependencies . const . const . return
diff --git a/src/Hakyll/Core/Configuration.hs b/src/Hakyll/Core/Configuration.hs
new file mode 100644
index 0000000..242b68f
--- /dev/null
+++ b/src/Hakyll/Core/Configuration.hs
@@ -0,0 +1,44 @@
+-- | Exports a datastructure for the top-level hakyll configuration
+--
+module Hakyll.Core.Configuration
+ ( HakyllConfiguration (..)
+ , defaultHakyllConfiguration
+ ) where
+
+import System.FilePath (takeFileName)
+import Data.List (isPrefixOf, isSuffixOf)
+
+data HakyllConfiguration = HakyllConfiguration
+ { -- | Directory in which the output written
+ destinationDirectory :: FilePath
+ , -- | Directory where hakyll's internal store is kept
+ storeDirectory :: FilePath
+ , -- | Function to determine ignored files
+ --
+ -- In 'defaultHakyllConfiguration', the following files are ignored:
+ --
+ -- * files starting with a @.@
+ --
+ -- * files ending with a @~@
+ --
+ -- * files ending with @.swp@
+ --
+ ignoreFile :: FilePath -> Bool
+ }
+
+-- | Default configuration for a hakyll application
+--
+defaultHakyllConfiguration :: HakyllConfiguration
+defaultHakyllConfiguration = HakyllConfiguration
+ { destinationDirectory = "_site"
+ , storeDirectory = "_cache"
+ , ignoreFile = ignoreFile'
+ }
+ where
+ ignoreFile' path
+ | "." `isPrefixOf` fileName = True
+ | "~" `isSuffixOf` fileName = True
+ | ".swp" `isSuffixOf` fileName = True
+ | otherwise = False
+ where
+ fileName = takeFileName path
diff --git a/src/Hakyll/Core/CopyFile.hs b/src/Hakyll/Core/CopyFile.hs
new file mode 100644
index 0000000..dbbaaa1
--- /dev/null
+++ b/src/Hakyll/Core/CopyFile.hs
@@ -0,0 +1,29 @@
+-- | Exports simple compilers to just copy files
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
+module Hakyll.Core.CopyFile
+ ( CopyFile (..)
+ , copyFileCompiler
+ ) where
+
+import Control.Arrow ((>>^))
+import System.Directory (copyFile)
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Writable
+import Hakyll.Core.Compiler
+import Hakyll.Core.Identifier
+
+-- | Newtype construct around 'FilePath' which will copy the file directly
+--
+newtype CopyFile = CopyFile {unCopyFile :: FilePath}
+ deriving (Show, Eq, Ord, Binary, Typeable)
+
+instance Writable CopyFile where
+ write dst (CopyFile src) = copyFile src dst
+
+copyFileCompiler :: Compiler Resource CopyFile
+copyFileCompiler = getIdentifier >>^ CopyFile . toFilePath
diff --git a/src/Hakyll/Core/DirectedGraph.hs b/src/Hakyll/Core/DirectedGraph.hs
new file mode 100644
index 0000000..76a030b
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph.hs
@@ -0,0 +1,85 @@
+-- | Representation of a directed graph. In Hakyll, this is used for dependency
+-- tracking.
+--
+module Hakyll.Core.DirectedGraph
+ ( DirectedGraph
+ , fromList
+ , member
+ , nodes
+ , neighbours
+ , reverse
+ , reachableNodes
+ , sanitize
+ ) where
+
+import Prelude hiding (reverse)
+import Data.Monoid (mconcat)
+import Data.Set (Set)
+import Data.Maybe (fromMaybe)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+import Hakyll.Core.DirectedGraph.Internal
+
+-- | Construction of directed graphs
+--
+fromList :: Ord a
+ => [(a, Set a)] -- ^ List of (node, reachable neighbours)
+ -> DirectedGraph a -- ^ Resulting directed graph
+fromList = DirectedGraph . M.fromList . map (\(t, d) -> (t, Node t d))
+
+-- | Check if a node lies in the given graph
+--
+member :: Ord a
+ => a -- ^ Node to check for
+ -> DirectedGraph a -- ^ Directed graph to check in
+ -> Bool -- ^ If the node lies in the graph
+member n = M.member n . unDirectedGraph
+
+-- | Get all nodes in the graph
+--
+nodes :: Ord a
+ => DirectedGraph a -- ^ Graph to get the nodes from
+ -> Set a -- ^ All nodes in the graph
+nodes = M.keysSet . unDirectedGraph
+
+-- | Get a set of reachable neighbours from a directed graph
+--
+neighbours :: Ord a
+ => a -- ^ Node to get the neighbours of
+ -> DirectedGraph a -- ^ Graph to search in
+ -> Set a -- ^ Set containing the neighbours
+neighbours x = fromMaybe S.empty . fmap nodeNeighbours
+ . M.lookup x . unDirectedGraph
+
+-- | Reverse a directed graph (i.e. flip all edges)
+--
+reverse :: Ord a
+ => DirectedGraph a
+ -> DirectedGraph a
+reverse = mconcat . map reverse' . M.toList . unDirectedGraph
+ where
+ reverse' (id', Node _ neighbours') = fromList $
+ zip (S.toList neighbours') $ repeat $ S.singleton id'
+
+-- | Find all reachable nodes from a given set of nodes in the directed graph
+--
+reachableNodes :: Ord a => Set a -> DirectedGraph a -> Set a
+reachableNodes set graph = reachable (setNeighbours set) set
+ where
+ reachable next visited
+ | S.null next = visited
+ | otherwise = reachable (sanitize' neighbours') (next `S.union` visited)
+ where
+ sanitize' = S.filter (`S.notMember` visited)
+ neighbours' = setNeighbours (sanitize' next)
+
+ setNeighbours = S.unions . map (`neighbours` graph) . S.toList
+
+-- | Remove all dangling pointers, i.e. references to notes that do
+-- not actually exist in the graph.
+--
+sanitize :: Ord a => DirectedGraph a -> DirectedGraph a
+sanitize (DirectedGraph graph) = DirectedGraph $ M.map sanitize' graph
+ where
+ sanitize' (Node t n) = Node t $ S.filter (`M.member` graph) n
diff --git a/src/Hakyll/Core/DirectedGraph/DependencySolver.hs b/src/Hakyll/Core/DirectedGraph/DependencySolver.hs
new file mode 100644
index 0000000..54826ff
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/DependencySolver.hs
@@ -0,0 +1,70 @@
+-- | Given a dependency graph, this module provides a function that will
+-- generate an order in which the graph can be visited, so that all the
+-- dependencies of a given node have been visited before the node itself is
+-- visited.
+--
+module Hakyll.Core.DirectedGraph.DependencySolver
+ ( solveDependencies
+ ) where
+
+import Prelude
+import qualified Prelude as P
+import Data.Set (Set)
+import Data.Maybe (mapMaybe)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+import Hakyll.Core.DirectedGraph
+import Hakyll.Core.DirectedGraph.Internal
+
+-- | Solve a dependency graph. This function returns an order to run the
+-- different nodes
+--
+solveDependencies :: Ord a
+ => DirectedGraph a -- ^ Graph
+ -> [a] -- ^ Resulting plan
+solveDependencies = P.reverse . order [] [] S.empty
+
+-- | Produce a reversed order using a stack
+--
+order :: Ord a
+ => [a] -- ^ Temporary result
+ -> [Node a] -- ^ Backtrace stack
+ -> Set a -- ^ Items in the stack
+ -> DirectedGraph a -- ^ Graph
+ -> [a] -- ^ Ordered result
+order temp stack set graph@(DirectedGraph graph')
+ -- Empty graph - return our current result
+ | M.null graph' = temp
+ | otherwise = case stack of
+
+ -- Empty stack - pick a node, and add it to the stack
+ [] ->
+ let (tag, node) = M.findMin graph'
+ in order temp (node : stack) (S.insert tag set) graph
+
+ -- At least one item on the stack - continue using this item
+ (node : stackTail) ->
+ -- Check which dependencies are still in the graph
+ let tag = nodeTag node
+ deps = S.toList $ nodeNeighbours node
+ unsatisfied = mapMaybe (`M.lookup` graph') deps
+ in case unsatisfied of
+
+ -- All dependencies for node are satisfied, we can return it and
+ -- remove it from the graph
+ [] -> order (tag : temp) stackTail (S.delete tag set)
+ (DirectedGraph $ M.delete tag graph')
+
+ -- There is at least one dependency left. We need to solve that
+ -- one first...
+ (dep : _) -> if nodeTag dep `S.member` set
+ -- The dependency is already in our stack - cycle detected!
+ then cycleError
+ -- Continue with the dependency
+ else order temp (dep : node : stackTail)
+ (S.insert (nodeTag dep) set)
+ graph
+ where
+ cycleError = error $ "Hakyll.Core.DirectedGraph.DependencySolver.order: "
+ ++ "Cycle detected!" -- TODO: Dump cycle
diff --git a/src/Hakyll/Core/DirectedGraph/Dot.hs b/src/Hakyll/Core/DirectedGraph/Dot.hs
new file mode 100644
index 0000000..8289992
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/Dot.hs
@@ -0,0 +1,30 @@
+-- | Dump a directed graph in dot format. Used for debugging purposes
+--
+module Hakyll.Core.DirectedGraph.Dot
+ ( toDot
+ , writeDot
+ ) where
+
+import Hakyll.Core.DirectedGraph
+import qualified Data.Set as S
+
+-- | Convert a directed graph into dot format for debugging purposes
+--
+toDot :: Ord a
+ => (a -> String) -- ^ Convert nodes to dot names
+ -> DirectedGraph a -- ^ Graph to dump
+ -> String -- ^ Resulting string
+toDot showTag graph = unlines $ concat
+ [ return "digraph dependencies {"
+ , concatMap showNode (S.toList $ nodes graph)
+ , return "}"
+ ]
+ where
+ showNode node = map (showEdge node) $ S.toList $ neighbours node graph
+ showEdge x y = " \"" ++ showTag x ++ "\" -> \"" ++ showTag y ++ "\";"
+
+-- | Write out the @.dot@ file to a given file path. See 'toDot' for more
+-- information.
+--
+writeDot :: Ord a => FilePath -> (a -> String) -> DirectedGraph a -> IO ()
+writeDot path showTag = writeFile path . toDot showTag
diff --git a/src/Hakyll/Core/DirectedGraph/Internal.hs b/src/Hakyll/Core/DirectedGraph/Internal.hs
new file mode 100644
index 0000000..5b02ad6
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/Internal.hs
@@ -0,0 +1,43 @@
+-- | Internal structure of the DirectedGraph type. Not exported outside of the
+-- library.
+--
+module Hakyll.Core.DirectedGraph.Internal
+ ( Node (..)
+ , DirectedGraph (..)
+ ) where
+
+import Prelude hiding (reverse, filter)
+import Data.Monoid (Monoid, mempty, mappend)
+import Data.Set (Set)
+import Data.Map (Map)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+-- | A node in the directed graph
+--
+data Node a = Node
+ { nodeTag :: a -- ^ Tag identifying the node
+ , nodeNeighbours :: Set a -- ^ Edges starting at this node
+ } deriving (Show)
+
+-- | Append two nodes. Useful for joining graphs.
+--
+appendNodes :: Ord a => Node a -> Node a -> Node a
+appendNodes (Node t1 n1) (Node t2 n2)
+ | t1 /= t2 = error'
+ | otherwise = Node t1 (n1 `S.union` n2)
+ where
+ error' = error $ "Hakyll.Core.DirectedGraph.Internal.appendNodes: "
+ ++ "Appending differently tagged nodes"
+
+-- | Type used to represent a directed graph
+--
+newtype DirectedGraph a = DirectedGraph {unDirectedGraph :: Map a (Node a)}
+ deriving (Show)
+
+-- | Allow users to concatenate different graphs
+--
+instance Ord a => Monoid (DirectedGraph a) where
+ mempty = DirectedGraph M.empty
+ mappend (DirectedGraph m1) (DirectedGraph m2) = DirectedGraph $
+ M.unionWith appendNodes m1 m2
diff --git a/src/Hakyll/Core/Identifier.hs b/src/Hakyll/Core/Identifier.hs
new file mode 100644
index 0000000..16403e6
--- /dev/null
+++ b/src/Hakyll/Core/Identifier.hs
@@ -0,0 +1,59 @@
+-- | An identifier is a type used to uniquely identify a resource, target...
+--
+-- One can think of an identifier as something similar to a file path. An
+-- identifier is a path as well, with the different elements in the path
+-- separated by @/@ characters. Examples of identifiers are:
+--
+-- * @posts/foo.markdown@
+--
+-- * @index@
+--
+-- * @error/404@
+--
+-- The most important difference between an 'Identifier' and a file path is that
+-- the identifier for an item is not necesserily the file path.
+--
+-- For example, we could have an @index@ identifier, generated by Hakyll. The
+-- actual file path would be @index.html@, but we identify it using @index@.
+--
+-- @posts/foo.markdown@ could be an identifier of an item that is rendered to
+-- @posts/foo.html@. In this case, the identifier is the name of the source
+-- file of the page.
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Identifier
+ ( Identifier (..)
+ , parseIdentifier
+ , toFilePath
+ ) where
+
+import Control.Arrow (second)
+import Data.Monoid (Monoid)
+
+import GHC.Exts (IsString, fromString)
+import System.FilePath (joinPath)
+
+-- | An identifier used to uniquely identify a value
+--
+newtype Identifier = Identifier {unIdentifier :: [String]}
+ deriving (Eq, Ord, Monoid)
+
+instance Show Identifier where
+ show = toFilePath
+
+instance IsString Identifier where
+ fromString = parseIdentifier
+
+-- | Parse an identifier from a string
+--
+parseIdentifier :: String -> Identifier
+parseIdentifier = Identifier . filter (not . null) . split'
+ where
+ split' [] = [[]]
+ split' str = let (pre, post) = second (drop 1) $ break (== '/') str
+ in pre : split' post
+
+-- | Convert an identifier to a relative 'FilePath'
+--
+toFilePath :: Identifier -> FilePath
+toFilePath = joinPath . unIdentifier
diff --git a/src/Hakyll/Core/Identifier/Pattern.hs b/src/Hakyll/Core/Identifier/Pattern.hs
new file mode 100644
index 0000000..7c88356
--- /dev/null
+++ b/src/Hakyll/Core/Identifier/Pattern.hs
@@ -0,0 +1,160 @@
+-- | Module providing pattern matching and capturing on 'Identifier's.
+--
+-- A very simple pattern could be, for example, @foo\/bar@. This pattern will
+-- only match the exact @foo\/bar@ identifier.
+--
+-- To match more than one identifier, there are different captures that one can
+-- use:
+--
+-- * @*@: matches exactly one element of an identifier;
+--
+-- * @**@: matches one or more elements of an identifier.
+--
+-- Some examples:
+--
+-- * @foo\/*@ will match @foo\/bar@ and @foo\/foo@, but not @foo\/bar\/qux@ nor
+-- @foo@;
+--
+-- * @**@ will match any non-empty identifier;
+--
+-- * @foo\/**@ will match @foo\/bar@ and @foo\/bar\/qux@, but not @bar\/foo@ nor
+-- @foo@;
+--
+-- A small warning: patterns are not globs. Using @foo\/*.markdown@ will not do
+-- what you probably intended, as it will only match the file which is literally
+-- called @foo\/*.markdown@. Remember that these captures only work on elements
+-- of identifiers as a whole; not on parts of these elements.
+--
+-- Furthermore, the 'match' function allows the user to get access to the
+-- elements captured by the capture elements in the pattern.
+--
+module Hakyll.Core.Identifier.Pattern
+ ( Pattern
+ , parsePattern
+ , match
+ , doesMatch
+ , matches
+ , fromCapture
+ , fromCaptureString
+ , fromCaptures
+ ) where
+
+import Data.List (intercalate)
+import Control.Monad (msum)
+import Data.Maybe (isJust)
+import Data.Monoid (mempty, mappend)
+
+import GHC.Exts (IsString, fromString)
+
+import Hakyll.Core.Identifier
+
+-- | One base element of a pattern
+--
+data PatternComponent = CaptureOne
+ | CaptureMany
+ | Literal String
+ deriving (Eq)
+
+instance Show PatternComponent where
+ show CaptureOne = "*"
+ show CaptureMany = "**"
+ show (Literal s) = s
+
+-- | Type that allows matching on identifiers
+--
+newtype Pattern = Pattern {unPattern :: [PatternComponent]}
+ deriving (Eq)
+
+instance Show Pattern where
+ show = intercalate "/" . map show . unPattern
+
+instance IsString Pattern where
+ fromString = parsePattern
+
+-- | Parse a pattern from a string
+--
+parsePattern :: String -> Pattern
+parsePattern = Pattern . map toPattern . unIdentifier . parseIdentifier
+ where
+ toPattern x | x == "*" = CaptureOne
+ | x == "**" = CaptureMany
+ | otherwise = Literal x
+
+-- | Match an identifier against a pattern, generating a list of captures
+--
+match :: Pattern -> Identifier -> Maybe [Identifier]
+match (Pattern p) (Identifier i) = fmap (map Identifier) $ match' p i
+
+-- | Check if an identifier matches a pattern
+--
+doesMatch :: Pattern -> Identifier -> Bool
+doesMatch p = isJust . match p
+
+-- | Given a list of identifiers, retain only those who match the given pattern
+--
+matches :: Pattern -> [Identifier] -> [Identifier]
+matches p = filter (doesMatch p)
+
+-- | Split a list at every possible point, generate a list of (init, tail) cases
+--
+splits :: [a] -> [([a], [a])]
+splits ls = reverse $ splits' [] ls
+ where
+ splits' lx ly = (lx, ly) : case ly of
+ [] -> []
+ (y : ys) -> splits' (lx ++ [y]) ys
+
+-- | Internal verion of 'match'
+--
+match' :: [PatternComponent] -> [String] -> Maybe [[String]]
+match' [] [] = Just [] -- An empty match
+match' [] _ = Nothing -- No match
+match' _ [] = Nothing -- No match
+match' (m : ms) (s : ss) = case m of
+ -- Take one string and one literal, fail on mismatch
+ Literal l -> if s == l then match' ms ss else Nothing
+ -- Take one string and one capture
+ CaptureOne -> fmap ([s] :) $ match' ms ss
+ -- Take one string, and one or many captures
+ CaptureMany ->
+ let take' (i, t) = fmap (i :) $ match' ms t
+ in msum $ map take' $ splits (s : ss)
+
+-- | Create an identifier from a pattern by filling in the captures with a given
+-- string
+--
+-- Example:
+--
+-- > fromCapture (parsePattern "tags/*") (parseIdentifier "foo")
+--
+-- Result:
+--
+-- > "tags/foo"
+--
+fromCapture :: Pattern -> Identifier -> Identifier
+fromCapture pattern = fromCaptures pattern . repeat
+
+-- | Simplified version of 'fromCapture' which takes a 'String' instead of an
+-- 'Identifier'
+--
+-- > fromCaptureString (parsePattern "tags/*") "foo"
+--
+-- Result:
+--
+-- > "tags/foo"
+--
+fromCaptureString :: Pattern -> String -> Identifier
+fromCaptureString pattern = fromCapture pattern . parseIdentifier
+
+-- | Create an identifier from a pattern by filling in the captures with the
+-- given list of strings
+--
+fromCaptures :: Pattern -> [Identifier] -> Identifier
+fromCaptures (Pattern []) _ = mempty
+fromCaptures (Pattern (m : ms)) [] = case m of
+ Literal l -> Identifier [l] `mappend` fromCaptures (Pattern ms) []
+ _ -> error $ "Hakyll.Core.Identifier.Pattern.fromCaptures: "
+ ++ "identifier list exhausted"
+fromCaptures (Pattern (m : ms)) ids@(i : is) = case m of
+ Literal l -> Identifier [l] `mappend` fromCaptures (Pattern ms) ids
+ _ -> i `mappend` fromCaptures (Pattern ms) is
diff --git a/src/Hakyll/Core/Logger.hs b/src/Hakyll/Core/Logger.hs
new file mode 100644
index 0000000..720dee0
--- /dev/null
+++ b/src/Hakyll/Core/Logger.hs
@@ -0,0 +1,90 @@
+-- | Produce pretty, thread-safe logs
+--
+{-# LANGUAGE BangPatterns #-}
+module Hakyll.Core.Logger
+ ( Logger
+ , makeLogger
+ , flushLogger
+ , section
+ , timed
+ , report
+ ) where
+
+import Control.Monad (forever)
+import Control.Monad.Trans (MonadIO, liftIO)
+import Control.Applicative ((<$>), (<*>))
+import Control.Concurrent (forkIO)
+import Control.Concurrent.Chan.Strict (Chan, newChan, readChan, writeChan)
+import Control.Concurrent.MVar.Strict (MVar, newEmptyMVar, takeMVar, putMVar)
+import Text.Printf (printf)
+
+import Data.Time (getCurrentTime, diffUTCTime)
+
+-- | Logger structure. Very complicated.
+--
+data Logger = Logger
+ { loggerChan :: Chan (Maybe String) -- Nothing marks the end
+ , loggerSync :: MVar () -- Used for sync on quit
+ }
+
+-- | Create a new logger
+--
+makeLogger :: IO Logger
+makeLogger = do
+ logger <- Logger <$> newChan <*> newEmptyMVar
+ _ <- forkIO $ loggerThread logger
+ return logger
+ where
+ loggerThread logger = forever $ do
+ msg <- readChan $ loggerChan logger
+ case msg of
+ -- Stop: sync
+ Nothing -> putMVar (loggerSync logger) ()
+ -- Print and continue
+ Just m -> putStrLn m
+
+-- | Flush the logger (blocks until flushed)
+--
+flushLogger :: Logger -> IO ()
+flushLogger logger = do
+ writeChan (loggerChan logger) Nothing
+ () <- takeMVar $ loggerSync logger
+ return ()
+
+-- | Send a raw message to the logger
+--
+message :: Logger -> String -> IO ()
+message logger = writeChan (loggerChan logger) . Just
+
+-- | Start a section in the log
+--
+section :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Section name
+ -> m () -- ^ No result
+section logger = liftIO . message logger
+
+-- | Execute a monadic action and log the duration
+--
+timed :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Message
+ -> m a -- ^ Action
+ -> m a -- ^ Timed and logged action
+timed logger msg action = do
+ start <- liftIO getCurrentTime
+ !result <- action
+ stop <- liftIO getCurrentTime
+ let diff = fromEnum $ diffUTCTime stop start
+ ms = diff `div` 10 ^ (9 :: Int)
+ formatted = printf " [%4dms] %s" ms msg
+ liftIO $ message logger formatted
+ return result
+
+-- | Log something at the same level as 'timed', but without the timing
+--
+report :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Message
+ -> m () -- ^ No result
+report logger msg = liftIO $ message logger $ " [ ] " ++ msg
diff --git a/src/Hakyll/Core/ResourceProvider.hs b/src/Hakyll/Core/ResourceProvider.hs
new file mode 100644
index 0000000..dcd4af0
--- /dev/null
+++ b/src/Hakyll/Core/ResourceProvider.hs
@@ -0,0 +1,75 @@
+-- | This module provides an API for resource providers. Resource providers
+-- allow Hakyll to get content from resources; the type of resource depends on
+-- the concrete instance.
+--
+-- A resource is represented by the 'Resource' type. This is basically just a
+-- newtype wrapper around 'Identifier' -- but it has an important effect: it
+-- guarantees that a resource with this identifier can be provided by one or
+-- more resource providers.
+--
+-- Therefore, it is not recommended to read files directly -- you should use the
+-- provided 'Resource' methods.
+--
+module Hakyll.Core.ResourceProvider
+ ( Resource (..)
+ , ResourceProvider (..)
+ , resourceExists
+ , resourceDigest
+ , resourceModified
+ ) where
+
+import Control.Monad ((<=<))
+import Data.Word (Word8)
+
+import qualified Data.ByteString.Lazy as LB
+import OpenSSL.Digest.ByteString.Lazy (digest)
+import OpenSSL.Digest (MessageDigest (MD5))
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Store
+
+-- | A resource
+--
+-- Invariant: the resource specified by the given identifier must exist
+--
+newtype Resource = Resource {unResource :: Identifier}
+ deriving (Eq, Show, Ord)
+
+-- | A value responsible for retrieving and listing resources
+--
+data ResourceProvider = ResourceProvider
+ { -- | A list of all resources this provider is able to provide
+ resourceList :: [Resource]
+ , -- | Retrieve a certain resource as string
+ resourceString :: Resource -> IO String
+ , -- | Retrieve a certain resource as lazy bytestring
+ resourceLazyByteString :: Resource -> IO LB.ByteString
+ }
+
+-- | Check if a given identifier has a resource
+--
+resourceExists :: ResourceProvider -> Identifier -> Bool
+resourceExists provider = flip elem $ map unResource $ resourceList provider
+
+-- | Retrieve a digest for a given resource
+--
+resourceDigest :: ResourceProvider -> Resource -> IO [Word8]
+resourceDigest provider = digest MD5 <=< resourceLazyByteString provider
+
+-- | Check if a resource was modified
+--
+resourceModified :: ResourceProvider -> Resource -> Store -> IO Bool
+resourceModified provider resource store = do
+ -- Get the latest seen digest from the store
+ lastDigest <- storeGet store itemName $ unResource resource
+ -- Calculate the digest for the resource
+ newDigest <- resourceDigest provider resource
+ -- Check digests
+ if Just newDigest == lastDigest
+ -- All is fine, not modified
+ then return False
+ -- Resource modified; store new digest
+ else do storeSet store itemName (unResource resource) newDigest
+ return True
+ where
+ itemName = "Hakyll.Core.ResourceProvider.resourceModified"
diff --git a/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs b/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs
new file mode 100644
index 0000000..0d89b21
--- /dev/null
+++ b/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs
@@ -0,0 +1,29 @@
+-- | A concrete 'ResourceProvider' that gets it's resources from the filesystem
+--
+module Hakyll.Core.ResourceProvider.FileResourceProvider
+ ( fileResourceProvider
+ ) where
+
+import Control.Applicative ((<$>))
+
+import qualified Data.ByteString.Lazy as LB
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+import Hakyll.Core.Configuration
+
+-- | Create a filesystem-based 'ResourceProvider'
+--
+fileResourceProvider :: HakyllConfiguration -> IO ResourceProvider
+fileResourceProvider configuration = do
+ -- Retrieve a list of identifiers
+ list <- map parseIdentifier . filter (not . ignoreFile configuration) <$>
+ getRecursiveContents False "."
+
+ -- Construct a resource provider
+ return ResourceProvider
+ { resourceList = map Resource list
+ , resourceString = readFile . toFilePath . unResource
+ , resourceLazyByteString = LB.readFile . toFilePath . unResource
+ }
diff --git a/src/Hakyll/Core/Routes.hs b/src/Hakyll/Core/Routes.hs
new file mode 100644
index 0000000..fcab28d
--- /dev/null
+++ b/src/Hakyll/Core/Routes.hs
@@ -0,0 +1,136 @@
+-- | Once a target is compiled, the user usually wants to save it to the disk.
+-- This is where the 'Routes' type comes in; it determines where a certain
+-- target should be written.
+--
+-- Suppose we have an item @foo\/bar.markdown@. We can render this to
+-- @foo\/bar.html@ using:
+--
+-- > route "foo/bar.markdown" (setExtension ".html")
+--
+-- If we do not want to change the extension, we can use 'idRoute', the simplest
+-- route available:
+--
+-- > route "foo/bar.markdown" idRoute
+--
+-- That will route @foo\/bar.markdown@ to @foo\/bar.markdown@.
+--
+-- Note that the extension says nothing about the content! If you set the
+-- extension to @.html@, it is your own responsibility to ensure that the
+-- content is indeed HTML.
+--
+-- Finally, some special cases:
+--
+-- * If there is no route for an item, this item will not be routed, so it will
+-- not appear in your site directory.
+--
+-- * If an item matches multiple routes, the first rule will be chosen.
+--
+module Hakyll.Core.Routes
+ ( Routes
+ , runRoutes
+ , idRoute
+ , setExtension
+ , ifMatch
+ , customRoute
+ , gsubRoute
+ , composeRoutes
+ ) where
+
+import Data.Monoid (Monoid, mempty, mappend)
+import Control.Monad (mplus)
+import System.FilePath (replaceExtension)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Util.String
+
+-- | Type used for a route
+--
+newtype Routes = Routes {unRoutes :: Identifier -> Maybe FilePath}
+
+instance Monoid Routes where
+ mempty = Routes $ const Nothing
+ mappend (Routes f) (Routes g) = Routes $ \id' -> f id' `mplus` g id'
+
+-- | Apply a route to an identifier
+--
+runRoutes :: Routes -> Identifier -> Maybe FilePath
+runRoutes = unRoutes
+
+-- | A route that uses the identifier as filepath. For example, the target with
+-- ID @foo\/bar@ will be written to the file @foo\/bar@.
+--
+idRoute :: Routes
+idRoute = Routes $ Just . toFilePath
+
+-- | Set (or replace) the extension of a route.
+--
+-- Example:
+--
+-- > runRoute (setExtension "html") "foo/bar"
+--
+-- Result:
+--
+-- > Just "foo/bar.html"
+--
+-- Example:
+--
+-- > runRoute (setExtension "html") "posts/the-art-of-trolling.markdown"
+--
+-- Result:
+--
+-- > Just "posts/the-art-of-trolling.html"
+--
+setExtension :: String -> Routes
+setExtension extension = Routes $ fmap (`replaceExtension` extension)
+ . unRoutes idRoute
+
+-- | Modify a route: apply the route if the identifier matches the given
+-- pattern, fail otherwise.
+--
+ifMatch :: Pattern -> Routes -> Routes
+ifMatch pattern (Routes route) = Routes $ \id' ->
+ if doesMatch pattern id' then route id'
+ else Nothing
+
+-- | Create a custom route. This should almost always be used with 'ifMatch'.
+--
+customRoute :: (Identifier -> FilePath) -> Routes
+customRoute f = Routes $ Just . f
+
+-- | Create a gsub route
+--
+-- Example:
+--
+-- > runRoutes (gsubRoute "rss/" (const "")) "tags/rss/bar.xml"
+--
+-- Result:
+--
+-- > Just "tags/bar.xml"
+--
+gsubRoute :: String -- ^ Pattern
+ -> (String -> String) -- ^ Replacement
+ -> Routes -- ^ Resulting route
+gsubRoute pattern replacement = customRoute $
+ replaceAll pattern replacement . toFilePath
+
+-- | Compose routes so that @f `composeRoutes` g@ is more or less equivalent
+-- with @f >>> g@.
+--
+-- Example:
+--
+-- > let routes = gsubRoute "rss/" (const "") `composeRoutes` setExtension "xml"
+-- > in runRoutes routes "tags/rss/bar"
+--
+-- Result:
+--
+-- > Just "tags/bar.xml"
+--
+-- If the first route given fails, Hakyll will not apply the second route.
+--
+composeRoutes :: Routes -- ^ First route to apply
+ -> Routes -- ^ Second route to apply
+ -> Routes -- ^ Resulting route
+composeRoutes (Routes f) (Routes g) = Routes $ \i -> do
+ p <- f i
+ g $ parseIdentifier p
diff --git a/src/Hakyll/Core/Rules.hs b/src/Hakyll/Core/Rules.hs
new file mode 100644
index 0000000..eba3fb9
--- /dev/null
+++ b/src/Hakyll/Core/Rules.hs
@@ -0,0 +1,161 @@
+-- | This module provides a declarative DSL in which the user can specify the
+-- different rules used to run the compilers.
+--
+-- The convention is to just list all items in the 'RulesM' monad, routes and
+-- compilation rules.
+--
+-- A typical usage example would be:
+--
+-- > main = hakyll $ do
+-- > route "posts/*" (setExtension "html")
+-- > compile "posts/*" someCompiler
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
+module Hakyll.Core.Rules
+ ( RulesM
+ , Rules
+ , compile
+ , create
+ , route
+ , metaCompile
+ , metaCompileWith
+ ) where
+
+import Control.Applicative ((<$>))
+import Control.Monad.Writer (tell)
+import Control.Monad.Reader (ask)
+import Control.Arrow (second, (>>>), arr, (>>^))
+import Control.Monad.State (get, put)
+import Data.Monoid (mempty)
+import qualified Data.Set as S
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.Util.Arrow
+
+-- | Add a route
+--
+tellRoute :: Routes -> Rules
+tellRoute route' = RulesM $ tell $ RuleSet route' mempty mempty
+
+-- | Add a number of compilers
+--
+tellCompilers :: (Binary a, Typeable a, Writable a)
+ => [(Identifier, Compiler () a)]
+ -> Rules
+tellCompilers compilers = RulesM $ tell $ RuleSet mempty compilers' mempty
+ where
+ compilers' = map (second boxCompiler) compilers
+ boxCompiler = (>>> arr compiledItem >>> arr CompileRule)
+
+-- | Add resources
+--
+tellResources :: [Resource]
+ -> Rules
+tellResources resources = RulesM $ tell $ RuleSet mempty mempty $ S.fromList resources
+
+-- | Add a compilation rule to the rules.
+--
+-- This instructs all resources matching the given pattern to be compiled using
+-- the given compiler. When no resources match the given pattern, nothing will
+-- happen. In this case, you might want to have a look at 'create'.
+--
+compile :: (Binary a, Typeable a, Writable a)
+ => Pattern -> Compiler Resource a -> Rules
+compile pattern compiler = RulesM $ do
+ identifiers <- matches pattern . map unResource . resourceList <$> ask
+ unRulesM $ do
+ tellCompilers $ flip map identifiers $ \identifier ->
+ (identifier, constA (Resource identifier) >>> compiler)
+ tellResources $ map Resource identifiers
+
+-- | Add a compilation rule
+--
+-- This sets a compiler for the given identifier. No resource is needed, since
+-- we are creating the item from scratch. This is useful if you want to create a
+-- page on your site that just takes content from other items -- but has no
+-- actual content itself.
+--
+create :: (Binary a, Typeable a, Writable a)
+ => Identifier -> Compiler () a -> Rules
+create identifier compiler = tellCompilers [(identifier, compiler)]
+
+-- | Add a route.
+--
+-- This adds a route for all items matching the given pattern.
+--
+route :: Pattern -> Routes -> Rules
+route pattern route' = tellRoute $ ifMatch pattern route'
+
+-- | Apart from regular compilers, one is also able to specify metacompilers.
+-- Metacompilers are a special class of compilers: they are compilers which
+-- produce other compilers.
+--
+-- This is needed when the list of compilers depends on something we cannot know
+-- before actually running other compilers. The most typical example is if we
+-- have a blogpost using tags.
+--
+-- Every post has a collection of tags. For example,
+--
+-- > post1: code, haskell
+-- > post2: code, random
+--
+-- Now, we want to create a list of posts for every tag. We cannot write this
+-- down in our 'Rules' DSL directly, since we don't know what tags the different
+-- posts will have -- we depend on information that will only be available when
+-- we are actually compiling the pages.
+--
+-- The solution is simple, using 'metaCompile', we can add a compiler that will
+-- parse the pages and produce the compilers needed for the different tag pages.
+--
+-- And indeed, we can see that the first argument to 'metaCompile' is a
+-- 'Compiler' which produces a list of ('Identifier', 'Compiler') pairs. The
+-- idea is simple: 'metaCompile' produces a list of compilers, and the
+-- corresponding identifiers.
+--
+-- For simple hakyll systems, it is no need for this construction. More
+-- formally, it is only needed when the content of one or more items determines
+-- which items must be rendered.
+--
+metaCompile :: (Binary a, Typeable a, Writable a)
+ => Compiler () [(Identifier, Compiler () a)]
+ -- ^ Compiler generating the other compilers
+ -> Rules
+ -- ^ Resulting rules
+metaCompile compiler = RulesM $ do
+ -- Create an identifier from the state
+ state <- get
+ let index = rulesMetaCompilerIndex state
+ id' = fromCaptureString "Hakyll.Core.Rules.metaCompile/*" (show index)
+
+ -- Update the state with a new identifier
+ put $ state {rulesMetaCompilerIndex = index + 1}
+
+ -- Fallback to 'metaCompileWith' with now known identifier
+ unRulesM $ metaCompileWith id' compiler
+
+-- | Version of 'metaCompile' that allows you to specify a custom identifier for
+-- the metacompiler.
+--
+metaCompileWith :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -- ^ Identifier for this compiler
+ -> Compiler () [(Identifier, Compiler () a)]
+ -- ^ Compiler generating the other compilers
+ -> Rules
+ -- ^ Resulting rules
+metaCompileWith identifier compiler = RulesM $ tell $
+ RuleSet mempty compilers mempty
+ where
+ makeRule = MetaCompileRule . map (second box)
+ compilers = [(identifier, compiler >>> arr makeRule )]
+ box = (>>> fromDependency identifier >>^ CompileRule . compiledItem)
diff --git a/src/Hakyll/Core/Rules/Internal.hs b/src/Hakyll/Core/Rules/Internal.hs
new file mode 100644
index 0000000..2895257
--- /dev/null
+++ b/src/Hakyll/Core/Rules/Internal.hs
@@ -0,0 +1,75 @@
+-- | Internal rules module for types which are not exposed to the user
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Rules.Internal
+ ( CompileRule (..)
+ , RuleSet (..)
+ , RuleState (..)
+ , RulesM (..)
+ , Rules
+ , runRules
+ ) where
+
+import Control.Applicative (Applicative)
+import Control.Monad.Writer (WriterT, execWriterT)
+import Control.Monad.Reader (ReaderT, runReaderT)
+import Control.Monad.State (State, evalState)
+import Data.Monoid (Monoid, mempty, mappend)
+import Data.Set (Set)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.CompiledItem
+
+-- | Output of a compiler rule
+--
+-- * The compiler will produce a simple item. This is the most common case.
+--
+-- * The compiler will produce more compilers. These new compilers need to be
+-- added to the runtime if possible, since other items might depend upon them.
+--
+data CompileRule = CompileRule CompiledItem
+ | MetaCompileRule [(Identifier, Compiler () CompileRule)]
+
+-- | A collection of rules for the compilation process
+--
+data RuleSet = RuleSet
+ { -- | Routes used in the compilation structure
+ rulesRoutes :: Routes
+ , -- | Compilation rules
+ rulesCompilers :: [(Identifier, Compiler () CompileRule)]
+ , -- | A list of the used resources
+ rulesResources :: Set Resource
+ }
+
+instance Monoid RuleSet where
+ mempty = RuleSet mempty mempty mempty
+ mappend (RuleSet r1 c1 s1) (RuleSet r2 c2 s2) =
+ RuleSet (mappend r1 r2) (mappend c1 c2) (mappend s1 s2)
+
+-- | Rule state
+--
+data RuleState = RuleState
+ { rulesMetaCompilerIndex :: Int
+ } deriving (Show)
+
+-- | The monad used to compose rules
+--
+newtype RulesM a = RulesM
+ { unRulesM :: ReaderT ResourceProvider (WriterT RuleSet (State RuleState)) a
+ } deriving (Monad, Functor, Applicative)
+
+-- | Simplification of the RulesM type; usually, it will not return any
+-- result.
+--
+type Rules = RulesM ()
+
+-- | Run a Rules monad, resulting in a 'RuleSet'
+--
+runRules :: Rules -> ResourceProvider -> RuleSet
+runRules rules provider =
+ evalState (execWriterT $ runReaderT (unRulesM rules) provider) state
+ where
+ state = RuleState {rulesMetaCompilerIndex = 0}
diff --git a/src/Hakyll/Core/Run.hs b/src/Hakyll/Core/Run.hs
new file mode 100644
index 0000000..09864be
--- /dev/null
+++ b/src/Hakyll/Core/Run.hs
@@ -0,0 +1,207 @@
+-- | This is the module which binds it all together
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Run
+ ( run
+ ) where
+
+import Prelude hiding (reverse)
+import Control.Monad (filterM)
+import Control.Monad.Trans (liftIO)
+import Control.Applicative (Applicative, (<$>))
+import Control.Monad.Reader (ReaderT, runReaderT, ask)
+import Control.Monad.State.Strict (StateT, evalStateT, get, modify)
+import Control.Arrow ((&&&))
+import qualified Data.Map as M
+import Data.Monoid (mempty, mappend)
+import System.FilePath ((</>))
+import Data.Set (Set)
+import qualified Data.Set as S
+
+import Hakyll.Core.Routes
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+import Hakyll.Core.Compiler
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.ResourceProvider.FileResourceProvider
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.DirectedGraph
+import Hakyll.Core.DirectedGraph.DependencySolver
+import Hakyll.Core.Writable
+import Hakyll.Core.Store
+import Hakyll.Core.Configuration
+import Hakyll.Core.Logger
+
+-- | Run all rules needed, return the rule set used
+--
+run :: HakyllConfiguration -> Rules -> IO RuleSet
+run configuration rules = do
+ logger <- makeLogger
+
+ section logger "Initialising"
+ store <- timed logger "Creating store" $
+ makeStore $ storeDirectory configuration
+ provider <- timed logger "Creating provider" $
+ fileResourceProvider configuration
+
+ let ruleSet = runRules rules provider
+ compilers = rulesCompilers ruleSet
+
+ -- Extract the reader/state
+ reader = unRuntime $ addNewCompilers [] compilers
+ state' = runReaderT reader $ env logger ruleSet provider store
+
+ evalStateT state' state
+
+ -- Flush and return
+ flushLogger logger
+ return ruleSet
+ where
+ env logger ruleSet provider store = RuntimeEnvironment
+ { hakyllLogger = logger
+ , hakyllConfiguration = configuration
+ , hakyllRoutes = rulesRoutes ruleSet
+ , hakyllResourceProvider = provider
+ , hakyllStore = store
+ }
+
+ state = RuntimeState
+ { hakyllModified = S.empty
+ , hakyllGraph = mempty
+ }
+
+data RuntimeEnvironment = RuntimeEnvironment
+ { hakyllLogger :: Logger
+ , hakyllConfiguration :: HakyllConfiguration
+ , hakyllRoutes :: Routes
+ , hakyllResourceProvider :: ResourceProvider
+ , hakyllStore :: Store
+ }
+
+data RuntimeState = RuntimeState
+ { hakyllModified :: Set Identifier
+ , hakyllGraph :: DirectedGraph Identifier
+ }
+
+newtype Runtime a = Runtime
+ { unRuntime :: ReaderT RuntimeEnvironment (StateT RuntimeState IO) a
+ } deriving (Functor, Applicative, Monad)
+
+-- | Return a set of modified identifiers
+--
+modified :: ResourceProvider -- ^ Resource provider
+ -> Store -- ^ Store
+ -> [Identifier] -- ^ Identifiers to check
+ -> IO (Set Identifier) -- ^ Modified resources
+modified provider store ids = fmap S.fromList $ flip filterM ids $ \id' ->
+ if resourceExists provider id'
+ then resourceModified provider (Resource id') store
+ else return False
+
+-- | Add a number of compilers and continue using these compilers
+--
+addNewCompilers :: [(Identifier, Compiler () CompileRule)]
+ -- ^ Remaining compilers yet to be run
+ -> [(Identifier, Compiler () CompileRule)]
+ -- ^ Compilers to add
+ -> Runtime ()
+addNewCompilers oldCompilers newCompilers = Runtime $ do
+ -- Get some information
+ logger <- hakyllLogger <$> ask
+ section logger "Adding new compilers"
+ provider <- hakyllResourceProvider <$> ask
+ store <- hakyllStore <$> ask
+
+ let -- All compilers
+ compilers = oldCompilers ++ newCompilers
+
+ -- Get all dependencies for the compilers
+ dependencies = flip map compilers $ \(id', compiler) ->
+ let deps = runCompilerDependencies compiler id' provider
+ in (id', deps)
+
+ -- Create a compiler map (Id -> Compiler)
+ compilerMap = M.fromList compilers
+
+ -- Create the dependency graph
+ currentGraph = fromList dependencies
+
+ -- Find the old graph and append the new graph to it. This forms the
+ -- complete graph
+ completeGraph <- timed logger "Creating graph" $
+ mappend currentGraph . hakyllGraph <$> get
+
+ orderedCompilers <- timed logger "Solving dependencies" $ do
+ -- Check which items are up-to-date. This only needs to happen for the new
+ -- compilers
+ oldModified <- hakyllModified <$> get
+ newModified <- liftIO $ modified provider store $ map fst newCompilers
+
+ let modified' = oldModified `S.union` newModified
+
+ -- Find obsolete items. Every item that is reachable from a modified
+ -- item is considered obsolete. From these obsolete items, we are only
+ -- interested in ones that are in the current subgraph.
+ obsolete = S.filter (`member` currentGraph)
+ $ reachableNodes modified' $ reverse completeGraph
+
+ -- Solve the graph and retain only the obsolete items
+ ordered = filter (`S.member` obsolete) $ solveDependencies currentGraph
+
+ -- Update the state
+ modify $ updateState modified' completeGraph
+
+ -- Join the order with the compilers again
+ return $ map (id &&& (compilerMap M.!)) ordered
+
+ -- Now run the ordered list of compilers
+ unRuntime $ runCompilers orderedCompilers
+ where
+ -- Add the modified information for the new compilers
+ updateState modified' graph state = state
+ { hakyllModified = modified'
+ , hakyllGraph = graph
+ }
+
+runCompilers :: [(Identifier, Compiler () CompileRule)]
+ -- ^ Ordered list of compilers
+ -> Runtime ()
+ -- ^ No result
+runCompilers [] = return ()
+runCompilers ((id', compiler) : compilers) = Runtime $ do
+ -- Obtain information
+ logger <- hakyllLogger <$> ask
+ routes <- hakyllRoutes <$> ask
+ provider <- hakyllResourceProvider <$> ask
+ store <- hakyllStore <$> ask
+ modified' <- hakyllModified <$> get
+
+ section logger $ "Compiling " ++ show id'
+
+ let -- Check if the resource was modified
+ isModified = id' `S.member` modified'
+
+ -- Run the compiler
+ result <- timed logger "Total compile time" $ liftIO $
+ runCompiler compiler id' provider routes store isModified logger
+
+ case result of
+ -- Compile rule for one item, easy stuff
+ CompileRule compiled -> do
+ case runRoutes routes id' of
+ Nothing -> return ()
+ Just url -> timed logger ("Routing to " ++ url) $ do
+ destination <-
+ destinationDirectory . hakyllConfiguration <$> ask
+ let path = destination </> url
+ liftIO $ makeDirectories path
+ liftIO $ write path compiled
+
+ -- Continue for the remaining compilers
+ unRuntime $ runCompilers compilers
+
+ -- Metacompiler, slightly more complicated
+ MetaCompileRule newCompilers ->
+ -- Actually I was just kidding, it's not hard at all
+ unRuntime $ addNewCompilers compilers newCompilers
diff --git a/src/Hakyll/Core/Store.hs b/src/Hakyll/Core/Store.hs
new file mode 100644
index 0000000..12e33a7
--- /dev/null
+++ b/src/Hakyll/Core/Store.hs
@@ -0,0 +1,88 @@
+-- | A store for stroing and retreiving items
+--
+module Hakyll.Core.Store
+ ( Store
+ , makeStore
+ , storeSet
+ , storeGet
+ ) where
+
+import Control.Concurrent.MVar (MVar, newMVar, readMVar, modifyMVar_)
+import System.FilePath ((</>))
+import System.Directory (doesFileExist)
+import Data.Map (Map)
+import qualified Data.Map as M
+
+import Data.Binary (Binary, encodeFile, decodeFile)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+
+-- | Data structure used for the store
+--
+data Store = Store
+ { -- | All items are stored on the filesystem
+ storeDirectory :: FilePath
+ , -- | And some items are also kept in-memory
+ storeMap :: MVar (Map FilePath CompiledItem)
+ }
+
+-- | Initialize the store
+--
+makeStore :: FilePath -> IO Store
+makeStore directory = do
+ mvar <- newMVar M.empty
+ return Store
+ { storeDirectory = directory
+ , storeMap = mvar
+ }
+
+-- | Auxiliary: add an item to the map
+--
+addToMap :: (Binary a, Typeable a, Writable a)
+ => Store -> FilePath -> a -> IO ()
+addToMap store path value =
+ modifyMVar_ (storeMap store) $ return . M.insert path (compiledItem value)
+
+-- | Create a path
+--
+makePath :: Store -> String -> Identifier -> FilePath
+makePath store name identifier =
+ storeDirectory store </> name </> toFilePath identifier </> ".hakyllstore"
+
+-- | Store an item
+--
+storeSet :: (Binary a, Typeable a, Writable a)
+ => Store -> String -> Identifier -> a -> IO ()
+storeSet store name identifier value = do
+ makeDirectories path
+ encodeFile path value
+ addToMap store path value
+ where
+ path = makePath store name identifier
+
+-- | Load an item
+--
+storeGet :: (Binary a, Typeable a, Writable a)
+ => Store -> String -> Identifier -> IO (Maybe a)
+storeGet store name identifier = do
+ -- First check the in-memory map
+ map' <- readMVar $ storeMap store
+ case M.lookup path map' of
+ -- Found in the in-memory map
+ Just c -> return $ Just $ unCompiledItem c
+ -- Not found in the map, try the filesystem
+ Nothing -> do
+ exists <- doesFileExist path
+ if not exists
+ -- Not found in the filesystem either
+ then return Nothing
+ -- Found in the filesystem
+ else do v <- decodeFile path
+ addToMap store path v
+ return $ Just v
+ where
+ path = makePath store name identifier
diff --git a/src/Hakyll/Core/UnixFilter.hs b/src/Hakyll/Core/UnixFilter.hs
new file mode 100644
index 0000000..ee4b6cd
--- /dev/null
+++ b/src/Hakyll/Core/UnixFilter.hs
@@ -0,0 +1,76 @@
+-- | A Compiler that supports unix filters.
+--
+module Hakyll.Core.UnixFilter
+ ( unixFilter
+ ) where
+
+import Control.Concurrent (forkIO)
+import System.IO (hPutStr, hClose, hGetContents)
+import System.Posix.Process (executeFile, forkProcess)
+import System.Posix.IO ( dupTo, createPipe, stdInput
+ , stdOutput, closeFd, fdToHandle
+ )
+
+import Hakyll.Core.Compiler
+
+-- | Use a unix filter as compiler. For example, we could use the 'rev' program
+-- as a compiler.
+--
+-- > rev :: Compiler Resource String
+-- > rev = getResourceString >>> unixFilter "rev" []
+--
+-- A more realistic example: one can use this to call, for example, the sass
+-- compiler on CSS files. More information about sass can be found here:
+--
+-- <http://sass-lang.com/>
+--
+-- The code is fairly straightforward, given that we use @.scss@ for sass:
+--
+-- > route "style.scss" $ setExtension "css"
+-- > compile "style.scss" $
+-- > getResourceString >>> unixFilter "sass" ["-s", "--scss"]
+-- > >>> arr compressCss
+--
+unixFilter :: String -- ^ Program name
+ -> [String] -- ^ Program args
+ -> Compiler String String -- ^ Resulting compiler
+unixFilter programName args =
+ timedCompiler ("Executing external program " ++ programName) $
+ unsafeCompiler $ \input -> unixFilterIO programName args input
+
+-- | Internally used function
+--
+unixFilterIO :: String
+ -> [String]
+ -> String
+ -> IO String
+unixFilterIO programName args input = do
+ -- Create pipes
+ (stdinRead, stdinWrite) <- createPipe
+ (stdoutRead, stdoutWrite) <- createPipe
+
+ -- Fork the child
+ _ <- forkProcess $ do
+ -- Copy our pipes over the regular stdin/stdout
+ _ <- dupTo stdinRead stdInput
+ _ <- dupTo stdoutWrite stdOutput
+
+ -- Close the now unneeded file descriptors in the child
+ mapM_ closeFd [stdinWrite, stdoutRead, stdinRead, stdoutWrite]
+
+ -- Execute the program
+ _ <- executeFile programName True args Nothing
+ return ()
+
+ -- On the parent side, close the client-side FDs.
+ mapM_ closeFd [stdinRead, stdoutWrite]
+
+ -- Write the input to the child pipe
+ _ <- forkIO $ do
+ stdinWriteHandle <- fdToHandle stdinWrite
+ hPutStr stdinWriteHandle input
+ hClose stdinWriteHandle
+
+ -- Receive the output from the child
+ stdoutReadHandle <- fdToHandle stdoutRead
+ hGetContents stdoutReadHandle
diff --git a/src/Hakyll/Core/Util/Arrow.hs b/src/Hakyll/Core/Util/Arrow.hs
new file mode 100644
index 0000000..1896e11
--- /dev/null
+++ b/src/Hakyll/Core/Util/Arrow.hs
@@ -0,0 +1,25 @@
+-- | Various arrow utility functions
+--
+module Hakyll.Core.Util.Arrow
+ ( constA
+ , sequenceA
+ , unitA
+ ) where
+
+import Control.Arrow (Arrow, (&&&), arr, (>>^))
+
+constA :: Arrow a
+ => c
+ -> a b c
+constA = arr . const
+
+sequenceA :: Arrow a
+ => [a b c]
+ -> a b [c]
+sequenceA = foldl reduce $ constA []
+ where
+ reduce la xa = xa &&& la >>^ arr (uncurry (:))
+
+unitA :: Arrow a
+ => a b ()
+unitA = constA ()
diff --git a/src/Hakyll/Core/Util/File.hs b/src/Hakyll/Core/Util/File.hs
new file mode 100644
index 0000000..9babc8b
--- /dev/null
+++ b/src/Hakyll/Core/Util/File.hs
@@ -0,0 +1,90 @@
+-- | A module containing various file utility functions
+--
+module Hakyll.Core.Util.File
+ ( makeDirectories
+ , getRecursiveContents
+ , isFileObsolete
+ , isFileInternal
+ ) where
+
+import Control.Applicative ((<$>))
+import System.Time (ClockTime)
+import Control.Monad (forM, filterM)
+import Data.List (isPrefixOf)
+import System.Directory ( createDirectoryIfMissing, doesDirectoryExist
+ , doesFileExist, getModificationTime
+ , getDirectoryContents
+ )
+import System.FilePath ( normalise, takeDirectory, splitPath
+ , dropTrailingPathSeparator, (</>)
+ )
+
+import Hakyll.Core.Configuration
+
+-- | Given a path to a file, try to make the path writable by making
+-- all directories on the path.
+--
+makeDirectories :: FilePath -> IO ()
+makeDirectories = createDirectoryIfMissing True . takeDirectory
+
+-- | Get all contents of a directory. Note that files starting with a dot (.)
+-- will be ignored.
+--
+getRecursiveContents :: Bool -- ^ Include directories?
+ -> FilePath -- ^ Directory to search
+ -> IO [FilePath] -- ^ List of files found
+getRecursiveContents includeDirs topdir = do
+ topdirExists <- doesDirectoryExist topdir
+ if not topdirExists
+ then return []
+ else do
+ names <- filter isProper <$> getDirectoryContents topdir
+ paths <- forM names $ \name -> do
+ let path = normalise $ topdir </> name
+ isDirectory <- doesDirectoryExist path
+ if isDirectory then getRecursiveContents includeDirs path
+ else return [path]
+ return $ if includeDirs then topdir : concat paths
+ else concat paths
+ where
+ isProper = not . (== ".") . take 1
+
+-- | Check if a timestamp is obsolete compared to the timestamps of a number of
+-- files. When they are no files, it is never obsolete.
+--
+isObsolete :: ClockTime -- ^ The time to check.
+ -> [FilePath] -- ^ Dependencies of the cached file.
+ -> IO Bool
+isObsolete _ [] = return False
+isObsolete timeStamp depends = do
+ depends' <- filterM doesFileExist depends
+ dependsModified <- mapM getModificationTime depends'
+ return (timeStamp < maximum dependsModified)
+
+-- | Check if a file is obsolete, given it's dependencies. When the file does
+-- not exist, it is always obsolete. Other wise, it is obsolete if any of it's
+-- dependencies has a more recent modification time than the file.
+--
+isFileObsolete :: FilePath -- ^ The cached file
+ -> [FilePath] -- ^ Dependencies of the cached file
+ -> IO Bool
+isFileObsolete file depends = do
+ exists <- doesFileExist file
+ if not exists
+ then return True
+ else do timeStamp <- getModificationTime file
+ isObsolete timeStamp depends
+
+-- | Check if a file is meant for Hakyll internal use, i.e. if it is located in
+-- the destination or store directory
+--
+isFileInternal :: HakyllConfiguration -- ^ Configuration
+ -> FilePath -- ^ File to check
+ -> Bool -- ^ If the given file is internal
+isFileInternal configuration file =
+ any (`isPrefixOf` split file) dirs
+ where
+ split = map dropTrailingPathSeparator . splitPath
+ dirs = map (split . ($ configuration)) [ destinationDirectory
+ , storeDirectory
+ ]
diff --git a/src/Hakyll/Core/Util/String.hs b/src/Hakyll/Core/Util/String.hs
new file mode 100644
index 0000000..7f75a36
--- /dev/null
+++ b/src/Hakyll/Core/Util/String.hs
@@ -0,0 +1,48 @@
+-- | Miscellaneous string manipulation functions.
+--
+module Hakyll.Core.Util.String
+ ( trim
+ , replaceAll
+ , splitAll
+ ) where
+
+import Data.Char (isSpace)
+import Data.Maybe (listToMaybe)
+
+import Text.Regex.PCRE ((=~~))
+
+-- | Trim a string (drop spaces, tabs and newlines at both sides).
+--
+trim :: String -> String
+trim = reverse . trim' . reverse . trim'
+ where
+ trim' = dropWhile isSpace
+
+-- | A simple (but inefficient) regex replace funcion
+--
+replaceAll :: String -- ^ Pattern
+ -> (String -> String) -- ^ Replacement (called on capture)
+ -> String -- ^ Source string
+ -> String -- ^ Result
+replaceAll pattern f source = replaceAll' source
+ where
+ replaceAll' src = case listToMaybe (src =~~ pattern) of
+ Nothing -> src
+ Just (o, l) ->
+ let (before, tmp) = splitAt o src
+ (capture, after) = splitAt l tmp
+ in before ++ f capture ++ replaceAll' after
+
+-- | A simple regex split function. The resulting list will contain no empty
+-- strings.
+--
+splitAll :: String -- ^ Pattern
+ -> String -- ^ String to split
+ -> [String] -- ^ Result
+splitAll pattern = filter (not . null) . splitAll'
+ where
+ splitAll' src = case listToMaybe (src =~~ pattern) of
+ Nothing -> [src]
+ Just (o, l) ->
+ let (before, tmp) = splitAt o src
+ in before : splitAll' (drop l tmp)
diff --git a/src/Hakyll/Core/Writable.hs b/src/Hakyll/Core/Writable.hs
new file mode 100644
index 0000000..a3fd421
--- /dev/null
+++ b/src/Hakyll/Core/Writable.hs
@@ -0,0 +1,22 @@
+-- | Describes writable items; items that can be saved to the disk
+--
+{-# LANGUAGE FlexibleInstances #-}
+module Hakyll.Core.Writable
+ ( Writable (..)
+ ) where
+
+import Data.Word (Word8)
+
+import qualified Data.ByteString as SB
+
+-- | Describes an item that can be saved to the disk
+--
+class Writable a where
+ -- | Save an item to the given filepath
+ write :: FilePath -> a -> IO ()
+
+instance Writable [Char] where
+ write = writeFile
+
+instance Writable [Word8] where
+ write p = SB.writeFile p . SB.pack
diff --git a/src/Hakyll/Main.hs b/src/Hakyll/Main.hs
new file mode 100644
index 0000000..04b4cea
--- /dev/null
+++ b/src/Hakyll/Main.hs
@@ -0,0 +1,113 @@
+-- | Module providing the main hakyll function and command-line argument parsing
+--
+module Hakyll.Main
+ ( hakyll
+ , hakyllWith
+ ) where
+
+import Control.Concurrent (forkIO)
+import Control.Monad (when)
+import System.Environment (getProgName, getArgs)
+import System.Directory (doesDirectoryExist, removeDirectoryRecursive)
+
+import Hakyll.Core.Configuration
+import Hakyll.Core.Run
+import Hakyll.Core.Rules
+import Hakyll.Core.Rules.Internal
+import Hakyll.Web.Preview.Poll
+import Hakyll.Web.Preview.Server
+
+-- | This usualy is the function with which the user runs the hakyll compiler
+--
+hakyll :: Rules -> IO ()
+hakyll = hakyllWith defaultHakyllConfiguration
+
+-- | A variant of 'hakyll' which allows the user to specify a custom
+-- configuration
+--
+hakyllWith :: HakyllConfiguration -> Rules -> IO ()
+hakyllWith configuration rules = do
+ args <- getArgs
+ case args of
+ ["build"] -> build configuration rules
+ ["clean"] -> clean configuration
+ ["help"] -> help
+ ["preview"] -> preview configuration rules 8000
+ ["preview", p] -> preview configuration rules (read p)
+ ["rebuild"] -> rebuild configuration rules
+ ["server"] -> server configuration 8000
+ ["server", p] -> server configuration (read p)
+ _ -> help
+
+-- | Build the site
+--
+build :: HakyllConfiguration -> Rules -> IO ()
+build configuration rules = do
+ _ <- run configuration rules
+ return ()
+
+-- | Remove the output directories
+--
+clean :: HakyllConfiguration -> IO ()
+clean configuration = do
+ remove $ destinationDirectory configuration
+ remove $ storeDirectory configuration
+ where
+ remove dir = do
+ putStrLn $ "Removing " ++ dir ++ "..."
+ exists <- doesDirectoryExist dir
+ when exists $ removeDirectoryRecursive dir
+
+-- | Show usage information.
+--
+help :: IO ()
+help = do
+ name <- getProgName
+ mapM_ putStrLn
+ [ "ABOUT"
+ , ""
+ , "This is a Hakyll site generator program. You should always"
+ , "run it from the project root directory."
+ , ""
+ , "USAGE"
+ , ""
+ , name ++ " build Generate the site"
+ , name ++ " clean Clean up and remove cache"
+ , name ++ " help Show this message"
+ , name ++ " preview [port] Run a server and autocompile"
+ , name ++ " rebuild Clean up and build again"
+ , name ++ " server [port] Run a local test server"
+ ]
+
+-- | Preview the site
+--
+preview :: HakyllConfiguration -> Rules -> Int -> IO ()
+preview configuration rules port = do
+ -- Build once, keep the rule set
+ ruleSet <- run configuration rules
+
+ -- Get the resource list and a callback for the preview poll
+ let resources = rulesResources ruleSet
+ callback = build configuration rules
+
+ -- Fork a thread polling for changes
+ _ <- forkIO $ previewPoll configuration resources callback
+
+ -- Run the server in the main thread
+ server configuration port
+
+-- | Rebuild the site
+--
+rebuild :: HakyllConfiguration -> Rules -> IO ()
+rebuild configuration rules = do
+ clean configuration
+ build configuration rules
+
+-- | Start a server
+--
+server :: HakyllConfiguration -> Int -> IO ()
+server configuration port = do
+ let destination = destinationDirectory configuration
+ staticServer destination preServeHook port
+ where
+ preServeHook _ = return ()
diff --git a/src/Text/Hakyll/Internal/CompressCss.hs b/src/Hakyll/Web/CompressCss.hs
index 4a78791..2df08fd 100644
--- a/src/Text/Hakyll/Internal/CompressCss.hs
+++ b/src/Hakyll/Web/CompressCss.hs
@@ -1,30 +1,45 @@
-- | Module used for CSS compression. The compression is currently in a simple
--- state, but would typically reduce the number of bytes by about 25%.
-module Text.Hakyll.Internal.CompressCss
- ( compressCss
+-- state, but would typically reduce the number of bytes by about 25%.
+--
+module Hakyll.Web.CompressCss
+ ( compressCssCompiler
+ , compressCss
) where
+import Data.Char (isSpace)
import Data.List (isPrefixOf)
+import Control.Arrow ((>>^))
-import Text.Hakyll.Regex (substituteRegex)
+import Hakyll.Core.Compiler
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Util.String
+
+-- | Compiler form of 'compressCss'
+--
+compressCssCompiler :: Compiler Resource String
+compressCssCompiler = getResourceString >>^ compressCss
-- | Compress CSS to speed up your site.
+--
compressCss :: String -> String
compressCss = compressSeparators
. stripComments
. compressWhitespace
-- | Compresses certain forms of separators.
+--
compressSeparators :: String -> String
-compressSeparators = substituteRegex "; *}" "}"
- . substituteRegex " *([{};:]) *" "\\1"
- . substituteRegex ";;*" ";"
+compressSeparators = replaceAll "; *}" (const "}")
+ . replaceAll " *([{};:]) *" (take 1 . dropWhile isSpace)
+ . replaceAll ";;*" (const ";")
-- | Compresses all whitespace.
+--
compressWhitespace :: String -> String
-compressWhitespace = substituteRegex "[ \t\n][ \t\n]*" " "
+compressWhitespace = replaceAll "[ \t\n][ \t\n]*" (const " ")
-- | Function that strips CSS comments away.
+--
stripComments :: String -> String
stripComments [] = []
stripComments str
diff --git a/src/Hakyll/Web/Feed.hs b/src/Hakyll/Web/Feed.hs
new file mode 100644
index 0000000..85674c6
--- /dev/null
+++ b/src/Hakyll/Web/Feed.hs
@@ -0,0 +1,124 @@
+-- | A Module that allows easy rendering of RSS feeds.
+--
+-- The main rendering functions (@renderRss@, @renderAtom@) all assume that
+-- you pass the list of items so that the most recent entry in the feed is the
+-- first item in the list.
+--
+-- Also note that the pages should have (at least) the following fields to
+-- produce a correct feed:
+--
+-- - @$title@: Title of the item
+--
+-- - @$description@: Description to appear in the feed
+--
+-- - @$url@: URL to the item - this is usually set automatically.
+--
+-- In addition, the posts should be named according to the rules for
+-- 'Hakyll.Page.Metadata.renderDateField'.
+--
+module Hakyll.Web.Feed
+ ( FeedConfiguration (..)
+ , renderRss
+ , renderAtom
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow ((>>>), arr, (&&&))
+import Control.Monad ((<=<))
+import Data.Maybe (fromMaybe, listToMaybe)
+
+import Hakyll.Core.Compiler
+import Hakyll.Web.Page
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Template
+import Hakyll.Web.Template.Read.Hakyll (readTemplate)
+import Hakyll.Web.Util.Url
+
+import Paths_hakyll
+
+-- | This is a data structure to keep the configuration of a feed.
+data FeedConfiguration = FeedConfiguration
+ { -- | Title of the feed.
+ feedTitle :: String
+ , -- | Description of the feed.
+ feedDescription :: String
+ , -- | Name of the feed author.
+ feedAuthorName :: String
+ , -- | Absolute root URL of the feed site (e.g. @http://jaspervdj.be@)
+ feedRoot :: String
+ }
+
+-- | This is an auxiliary function to create a listing that is, in fact, a feed.
+-- The items should be sorted on date. The @$timestamp@ field should be set.
+--
+createFeed :: Template -- ^ Feed template
+ -> Template -- ^ Item template
+ -> String -- ^ URL of the feed
+ -> FeedConfiguration -- ^ Feed configuration
+ -> [Page String] -- ^ Items to include
+ -> String -- ^ Resulting feed
+createFeed feedTemplate itemTemplate url configuration items =
+ pageBody $ applyTemplate feedTemplate
+ $ setField "timestamp" timestamp
+ $ setField "title" (feedTitle configuration)
+ $ setField "description" (feedDescription configuration)
+ $ setField "authorName" (feedDescription configuration)
+ $ setField "root" (feedRoot configuration)
+ $ setField "url" url
+ $ fromBody body
+ where
+ -- Preprocess items
+ items' = flip map items $ applyTemplate itemTemplate
+ . setField "root" (feedRoot configuration)
+
+ -- Body: concatenated items
+ body = concat $ map pageBody items'
+
+ -- Take the first timestamp, which should be the most recent
+ timestamp = fromMaybe "Unknown" $ do
+ p <- listToMaybe items
+ return $ getField "timestamp" p
+
+
+-- | Abstract function to render any feed.
+--
+renderFeed :: FilePath -- ^ Feed template
+ -> FilePath -- ^ Item template
+ -> FeedConfiguration -- ^ Feed configuration
+ -> Compiler [Page String] String -- ^ Feed compiler
+renderFeed feedTemplate itemTemplate configuration =
+ id &&& getRoute >>> renderFeed'
+ where
+ -- Arrow rendering the feed from the items and the URL
+ renderFeed' = unsafeCompiler $ \(items, url) -> do
+ feedTemplate' <- loadTemplate feedTemplate
+ itemTemplate' <- loadTemplate itemTemplate
+ let url' = toUrl $ fromMaybe noUrl url
+ return $ createFeed feedTemplate' itemTemplate' url' configuration items
+
+ -- Auxiliary: load a template from a datafile
+ loadTemplate = fmap readTemplate . readFile <=< getDataFileName
+
+ -- URL is required to have a valid field
+ noUrl = error "Hakyll.Web.Feed.renderFeed: no route specified"
+
+-- | Render an RSS feed with a number of items.
+--
+renderRss :: FeedConfiguration -- ^ Feed configuration
+ -> Compiler [Page String] String -- ^ Feed compiler
+renderRss configuration = arr (map renderDate)
+ >>> renderFeed "templates/rss.xml" "templates/rss-item.xml" configuration
+ where
+ renderDate = renderDateField "timestamp" "%a, %d %b %Y %H:%M:%S UT"
+ "No date found."
+
+-- | Render an Atom feed with a number of items.
+--
+renderAtom :: FeedConfiguration -- ^ Feed configuration
+ -> Compiler [Page String] String -- ^ Feed compiler
+renderAtom configuration = arr (map renderDate)
+ >>> renderFeed "templates/atom.xml" "templates/atom-item.xml" configuration
+ where
+ renderDate = renderDateField "timestamp" "%Y-%m-%dT%H:%M:%SZ"
+ "No date found."
diff --git a/src/Hakyll/Web/FileType.hs b/src/Hakyll/Web/FileType.hs
new file mode 100644
index 0000000..cd1188a
--- /dev/null
+++ b/src/Hakyll/Web/FileType.hs
@@ -0,0 +1,55 @@
+-- | A module dealing with common file extensions and associated file types.
+--
+module Hakyll.Web.FileType
+ ( FileType (..)
+ , fileType
+ , getFileType
+ ) where
+
+import System.FilePath (takeExtension)
+import Control.Arrow ((>>^))
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler
+
+-- | Datatype to represent the different file types Hakyll can deal with by
+-- default
+--
+data FileType
+ = Html
+ | LaTeX
+ | LiterateHaskell FileType
+ | Markdown
+ | Rst
+ | PlainText
+ | Css
+ | Binary
+ deriving (Eq, Ord, Show, Read)
+
+-- | Get the file type for a certain file. The type is determined by extension.
+--
+fileType :: FilePath -> FileType
+fileType = fileType' . takeExtension
+ where
+ fileType' ".htm" = Html
+ fileType' ".html" = Html
+ fileType' ".lhs" = LiterateHaskell Markdown
+ fileType' ".markdown" = Markdown
+ fileType' ".md" = Markdown
+ fileType' ".mdn" = Markdown
+ fileType' ".mdown" = Markdown
+ fileType' ".mdwn" = Markdown
+ fileType' ".mkd" = Markdown
+ fileType' ".mkdwn" = Markdown
+ fileType' ".page" = Markdown
+ fileType' ".rst" = Rst
+ fileType' ".tex" = LaTeX
+ fileType' ".text" = PlainText
+ fileType' ".txt" = PlainText
+ fileType' ".css" = Css
+ fileType' _ = Binary -- Treat unknown files as binary
+
+-- | Get the file type for the current file
+--
+getFileType :: Compiler a FileType
+getFileType = getIdentifier >>^ fileType . toFilePath
diff --git a/src/Hakyll/Web/Page.hs b/src/Hakyll/Web/Page.hs
new file mode 100644
index 0000000..955e1a8
--- /dev/null
+++ b/src/Hakyll/Web/Page.hs
@@ -0,0 +1,124 @@
+-- | A page is a key-value mapping, representing a page on your site
+--
+-- A page is an important concept in Hakyll. It is a key-value mapping, and has
+-- one field with an arbitrary type. A 'Page' thus consists of
+--
+-- * a key-value mapping (of the type @Map String String@);
+--
+-- * a value (of the type @a@).
+--
+-- Usually, the value will be a 'String' as well, and the value will be the body
+-- of the page.
+--
+-- Pages can be constructed using Haskell, but they are usually parsed from a
+-- file. The file format for pages is pretty straightforward.
+--
+-- > This is a simple page
+-- > consisting of two lines.
+--
+-- This is a valid page with two lines. If we load this in Hakyll, there would
+-- be no metadata, and the body would be the given text. Let's look at a page
+-- with some metadata.
+--
+-- > ---
+-- > title: Alice's Adventures in Wonderland
+-- > author: Lewis Caroll
+-- > year: 1865
+-- > ---
+-- >
+-- > Chapter I
+-- > =========
+-- >
+-- > Down the Rabbit-Hole
+-- > --------------------
+-- >
+-- > Alice was beginning to get very tired of sitting by her sister on the bank,
+-- > and of having nothing to do: once or twice she had peeped into the book her
+-- > sister was reading, but it had no pictures or conversations in it, "and
+-- > what is the use of a book," thought Alice "without pictures or
+-- > conversation?"
+-- >
+-- > ...
+--
+-- As you can see, we construct a metadata header in Hakyll using @---@. Then,
+-- we simply list all @key: value@ pairs, and end with @---@ again. This page
+-- contains three metadata fields and a body. The body is given in markdown
+-- format, which can be easily rendered to HTML by Hakyll, using pandoc.
+--
+{-# LANGUAGE DeriveDataTypeable #-}
+module Hakyll.Web.Page
+ ( Page (..)
+ , fromBody
+ , fromMap
+ , toMap
+ , readPageCompiler
+ , pageCompiler
+ , addDefaultFields
+ , sortByBaseName
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow (arr, (>>^), (&&&), (>>>))
+import System.FilePath (takeBaseName, takeDirectory)
+import qualified Data.Map as M
+import Data.List (sortBy)
+import Data.Ord (comparing)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler
+import Hakyll.Core.ResourceProvider
+import Hakyll.Web.Page.Internal
+import Hakyll.Web.Page.Read
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Pandoc
+import Hakyll.Web.Template
+import Hakyll.Web.Util.Url
+
+-- | Create a page from a body, without metadata
+--
+fromBody :: a -> Page a
+fromBody = Page M.empty
+
+-- | Read a page (do not render it)
+--
+readPageCompiler :: Compiler Resource (Page String)
+readPageCompiler = getResourceString >>^ readPage
+
+-- | Read a page, add default fields, substitute fields and render using pandoc
+--
+pageCompiler :: Compiler Resource (Page String)
+pageCompiler = cached "Hakyll.Web.Page.pageCompiler" $
+ readPageCompiler >>> addDefaultFields >>> arr applySelf >>> pageRenderPandoc
+
+-- | Add a number of default metadata fields to a page. These fields include:
+--
+-- * @$url@
+--
+-- * @$category@
+--
+-- * @$title@
+--
+-- * @$path@
+--
+addDefaultFields :: Compiler (Page a) (Page a)
+addDefaultFields = (getRoute &&& id >>^ uncurry addRoute)
+ >>> (getIdentifier &&& id >>^ uncurry addIdentifier)
+ where
+ -- Add root and url, based on route
+ addRoute Nothing = id
+ addRoute (Just r) = setField "url" (toUrl r)
+
+ -- Add title and category, based on identifier
+ addIdentifier i = setField "title" (takeBaseName p)
+ . setField "category" (takeBaseName $ takeDirectory p)
+ . setField "path" p
+ where
+ p = toFilePath i
+
+-- | Sort posts based on the basename of the post. This is equivalent to a
+-- chronologival sort, because of the @year-month-day-title.extension@ naming
+-- convention in Hakyll.
+--
+sortByBaseName :: [Page a] -> [Page a]
+sortByBaseName = sortBy $ comparing $ takeBaseName . getField "path"
diff --git a/src/Hakyll/Web/Page/Internal.hs b/src/Hakyll/Web/Page/Internal.hs
new file mode 100644
index 0000000..55067ed
--- /dev/null
+++ b/src/Hakyll/Web/Page/Internal.hs
@@ -0,0 +1,50 @@
+-- | Internal representation of the page datatype
+--
+{-# LANGUAGE DeriveDataTypeable #-}
+module Hakyll.Web.Page.Internal
+ ( Page (..)
+ , fromMap
+ , toMap
+ ) where
+
+import Control.Applicative ((<$>), (<*>))
+import Data.Monoid (Monoid, mempty, mappend)
+
+import Data.Map (Map)
+import Data.Binary (Binary, get, put)
+import Data.Typeable (Typeable)
+import qualified Data.Map as M
+
+import Hakyll.Core.Writable
+
+-- | Type used to represent pages
+--
+data Page a = Page
+ { pageMetadata :: Map String String
+ , pageBody :: a
+ } deriving (Eq, Show, Typeable)
+
+instance Monoid a => Monoid (Page a) where
+ mempty = Page M.empty mempty
+ mappend (Page m1 b1) (Page m2 b2) =
+ Page (M.union m1 m2) (mappend b1 b2)
+
+instance Functor Page where
+ fmap f (Page m b) = Page m (f b)
+
+instance Binary a => Binary (Page a) where
+ put (Page m b) = put m >> put b
+ get = Page <$> get <*> get
+
+instance Writable a => Writable (Page a) where
+ write p (Page _ b) = write p b
+
+-- | Create a metadata page, without a body
+--
+fromMap :: Monoid a => Map String String -> Page a
+fromMap m = Page m mempty
+
+-- | Convert a page to a map. The body will be placed in the @body@ key.
+--
+toMap :: Page String -> Map String String
+toMap (Page m b) = M.insert "body" b m
diff --git a/src/Hakyll/Web/Page/Metadata.hs b/src/Hakyll/Web/Page/Metadata.hs
new file mode 100644
index 0000000..72742e6
--- /dev/null
+++ b/src/Hakyll/Web/Page/Metadata.hs
@@ -0,0 +1,131 @@
+-- | Provides various functions to manipulate the metadata fields of a page
+--
+module Hakyll.Web.Page.Metadata
+ ( getField
+ , getFieldMaybe
+ , setField
+ , setFieldA
+ , renderField
+ , changeField
+ , copyField
+ , renderDateField
+ , renderDateFieldWith
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow (Arrow, (>>>), (***), arr)
+import Data.List (intercalate)
+import Data.Maybe (fromMaybe)
+import Data.Time.Clock (UTCTime)
+import Data.Time.Format (parseTime, formatTime)
+import qualified Data.Map as M
+import System.FilePath (takeFileName)
+import System.Locale (TimeLocale, defaultTimeLocale)
+
+import Hakyll.Web.Page.Internal
+import Hakyll.Core.Util.String
+
+-- | Get a metadata field. If the field does not exist, the empty string is
+-- returned.
+--
+getField :: String -- ^ Key
+ -> Page a -- ^ Page
+ -> String -- ^ Value
+getField key = fromMaybe "" . getFieldMaybe key
+
+-- | Get a field in a 'Maybe' wrapper
+--
+getFieldMaybe :: String -- ^ Key
+ -> Page a -- ^ Page
+ -> Maybe String -- ^ Value, if found
+getFieldMaybe key = M.lookup key . pageMetadata
+
+-- | Add a metadata field. If the field already exists, it is not overwritten.
+--
+setField :: String -- ^ Key
+ -> String -- ^ Value
+ -> Page a -- ^ Page to add it to
+ -> Page a -- ^ Resulting page
+setField k v (Page m b) = Page (M.insertWith (flip const) k v m) b
+
+-- | Arrow-based variant of 'setField'. Because of it's type, this function is
+-- very usable together with the different 'require' functions.
+--
+setFieldA :: Arrow a
+ => String -- ^ Key
+ -> a x String -- ^ Value arrow
+ -> a (Page b, x) (Page b) -- ^ Resulting arrow
+setFieldA k v = id *** v >>> arr (uncurry $ flip $ setField k)
+
+-- | Do something with a metadata value, but keep the old value as well. If the
+-- key given is not present in the metadata, nothing will happen. If the source
+-- and destination keys are the same, the value will be changed (but you should
+-- use 'changeField' for this purpose).
+--
+renderField :: String -- ^ Key of which the value should be copied
+ -> String -- ^ Key the value should be copied to
+ -> (String -> String) -- ^ Function to apply on the value
+ -> Page a -- ^ Page on which this should be applied
+ -> Page a -- ^ Resulting page
+renderField src dst f page = case M.lookup src (pageMetadata page) of
+ Nothing -> page
+ Just value -> setField dst (f value) page
+
+-- | Change a metadata value.
+--
+-- > import Data.Char (toUpper)
+-- > changeField "title" (map toUpper)
+--
+-- Will put the title in UPPERCASE.
+--
+changeField :: String -- ^ Key to change.
+ -> (String -> String) -- ^ Function to apply on the value.
+ -> Page a -- ^ Page to change
+ -> Page a -- ^ Resulting page
+changeField key = renderField key key
+
+-- | Make a copy of a metadata field (put the value belonging to a certain key
+-- under some other key as well)
+--
+copyField :: String -- ^ Key to copy
+ -> String -- ^ Destination to copy to
+ -> Page a -- ^ Page on which this should be applied
+ -> Page a -- ^ Resulting page
+copyField src dst = renderField src dst id
+
+-- | When the metadata has a field called @path@ in a
+-- @folder/yyyy-mm-dd-title.extension@ format (the convention for pages),
+-- this function can render the date.
+--
+-- > renderDate "date" "%B %e, %Y" "Date unknown"
+--
+-- Will render something like @January 32, 2010@.
+--
+renderDateField :: String -- ^ Key in which the rendered date should be placed
+ -> String -- ^ Format to use on the date
+ -> String -- ^ Default value, in case the date cannot be parsed
+ -> Page a -- ^ Page on which this should be applied
+ -> Page a -- ^ Resulting page
+renderDateField = renderDateFieldWith defaultTimeLocale
+
+-- | This is an extended version of 'renderDateField' that allows you to
+-- specify a time locale that is used for outputting the date. For more
+-- details, see 'renderDateField'.
+--
+renderDateFieldWith :: TimeLocale -- ^ Output time locale
+ -> String -- ^ Destination key
+ -> String -- ^ Format to use on the date
+ -> String -- ^ Default value
+ -> Page a -- ^ Target page
+ -> Page a -- ^ Resulting page
+renderDateFieldWith locale key format defaultValue =
+ renderField "path" key renderDate'
+ where
+ renderDate' filePath = fromMaybe defaultValue $ do
+ let dateString = intercalate "-" $ take 3
+ $ splitAll "-" $ takeFileName filePath
+ time <- parseTime defaultTimeLocale
+ "%Y-%m-%d"
+ dateString :: Maybe UTCTime
+ return $ formatTime locale format time
diff --git a/src/Hakyll/Web/Page/Read.hs b/src/Hakyll/Web/Page/Read.hs
new file mode 100644
index 0000000..cf39ddd
--- /dev/null
+++ b/src/Hakyll/Web/Page/Read.hs
@@ -0,0 +1,60 @@
+-- | Module providing a function to parse a page from a file
+--
+module Hakyll.Web.Page.Read
+ ( readPage
+ ) where
+
+import Control.Applicative ((<$>), (<*>))
+import Control.Arrow (second, (***))
+import Control.Monad.State (State, get, put, evalState)
+import Data.List (isPrefixOf)
+import Data.Map (Map)
+import qualified Data.Map as M
+
+import Hakyll.Web.Page.Internal
+import Hakyll.Core.Util.String
+
+-- | We're using a simple state monad as parser
+--
+type LineParser = State [String]
+
+-- | Read the metadata section from a page
+--
+parseMetadata :: LineParser (Map String String)
+parseMetadata = get >>= \content -> case content of
+ -- No lines means no metadata
+ [] -> return M.empty
+ -- Check if the file begins with a delimiter
+ (l : ls) -> if not (isPossibleDelimiter l)
+ then -- No delimiter means no metadata
+ return M.empty
+ else do -- Break the metadata section
+ let (metadata, rest) = second (drop 1) $ break (== l) ls
+ -- Put the rest back
+ put rest
+ -- Parse the metadata
+ return $ M.fromList $ map parseMetadata' metadata
+ where
+ -- Check if a line can be a delimiter
+ isPossibleDelimiter = isPrefixOf "---"
+
+ -- Parse a "key: value" string to a (key, value) tupple
+ parseMetadata' = (trim *** trim . drop 1) . break (== ':')
+
+-- | Read the body section of a page
+--
+parseBody :: LineParser String
+parseBody = do
+ body <- get
+ put []
+ return $ unlines body
+
+-- | Read an entire page
+--
+parsePage :: LineParser (Page String)
+parsePage = Page <$> parseMetadata <*> parseBody
+
+-- | Read a page from a string
+--
+readPage :: String -> Page String
+readPage = evalState parsePage . lines
diff --git a/src/Hakyll/Web/Pandoc.hs b/src/Hakyll/Web/Pandoc.hs
new file mode 100644
index 0000000..f225997
--- /dev/null
+++ b/src/Hakyll/Web/Pandoc.hs
@@ -0,0 +1,110 @@
+-- | Module exporting pandoc bindings
+--
+module Hakyll.Web.Pandoc
+ ( -- * The basic building blocks
+ readPandoc
+ , readPandocWith
+ , writePandoc
+ , writePandocWith
+
+ -- * Functions working on pages/compilers
+ , pageReadPandoc
+ , pageReadPandocWith
+ , pageRenderPandoc
+ , pageRenderPandocWith
+
+ -- * Default options
+ , defaultHakyllParserState
+ , defaultHakyllWriterOptions
+ ) where
+
+import Prelude hiding (id)
+import Control.Applicative ((<$>))
+import Control.Arrow ((>>^), (&&&))
+import Control.Category (id)
+
+import Text.Pandoc
+
+import Hakyll.Core.Compiler
+import Hakyll.Web.FileType
+import Hakyll.Web.Page.Internal
+
+-- | Read a string using pandoc, with the default options
+--
+readPandoc :: FileType -- ^ File type, determines how parsing happens
+ -> String -- ^ String to read
+ -> Pandoc -- ^ Resulting document
+readPandoc = readPandocWith defaultHakyllParserState
+
+-- | Read a string using pandoc, with the supplied options
+--
+readPandocWith :: ParserState -- ^ Parser options
+ -> FileType -- ^ File type, determines how parsing happens
+ -> String -- ^ String to read
+ -> Pandoc -- ^ Resulting document
+readPandocWith state fileType' = case fileType' of
+ Html -> readHtml state
+ LaTeX -> readLaTeX state
+ LiterateHaskell t -> readPandocWith state {stateLiterateHaskell = True} t
+ Markdown -> readMarkdown state
+ Rst -> readRST state
+ t -> error $
+ "Hakyll.Web.readPandocWith: I don't know how to read " ++ show t
+
+-- | Write a document (as HTML) using pandoc, with the default options
+--
+writePandoc :: Pandoc -- ^ Document to write
+ -> String -- ^ Resulting HTML
+writePandoc = writePandocWith defaultHakyllWriterOptions
+
+-- | Write a document (as HTML) using pandoc, with the supplied options
+--
+writePandocWith :: WriterOptions -- ^ Writer options for pandoc
+ -> Pandoc -- ^ Document to write
+ -> String -- ^ Resulting HTML
+writePandocWith = writeHtmlString
+
+-- | Read the resource using pandoc
+--
+pageReadPandoc :: Compiler (Page String) (Page Pandoc)
+pageReadPandoc = pageReadPandocWith defaultHakyllParserState
+
+-- | Read the resource using pandoc
+--
+pageReadPandocWith :: ParserState -> Compiler (Page String) (Page Pandoc)
+pageReadPandocWith state =
+ id &&& getFileType >>^ pageReadPandocWith'
+ where
+ pageReadPandocWith' (p, t) = readPandocWith state t <$> p
+
+-- | Render the resource using pandoc
+--
+pageRenderPandoc :: Compiler (Page String) (Page String)
+pageRenderPandoc =
+ pageRenderPandocWith defaultHakyllParserState defaultHakyllWriterOptions
+
+-- | Render the resource using pandoc
+--
+pageRenderPandocWith :: ParserState
+ -> WriterOptions
+ -> Compiler (Page String) (Page String)
+pageRenderPandocWith state options =
+ pageReadPandocWith state >>^ fmap (writePandocWith options)
+
+-- | The default reader options for pandoc parsing in hakyll
+--
+defaultHakyllParserState :: ParserState
+defaultHakyllParserState = defaultParserState
+ { -- The following option causes pandoc to read smart typography, a nice
+ -- and free bonus.
+ stateSmart = True
+ }
+
+-- | The default writer options for pandoc rendering in hakyll
+--
+defaultHakyllWriterOptions :: WriterOptions
+defaultHakyllWriterOptions = defaultWriterOptions
+ { -- This option causes literate haskell to be written using '>' marks in
+ -- html, which I think is a good default.
+ writerLiterateHaskell = True
+ }
diff --git a/src/Hakyll/Web/Preview/Server.hs b/src/Hakyll/Web/Preview/Server.hs
new file mode 100644
index 0000000..c550b69
--- /dev/null
+++ b/src/Hakyll/Web/Preview/Server.hs
@@ -0,0 +1,72 @@
+-- | Implements a basic static file server for previewing options
+--
+{-# LANGUAGE OverloadedStrings #-}
+module Hakyll.Web.Preview.Server
+ ( staticServer
+ ) where
+
+import Control.Monad.Trans (liftIO)
+import Control.Applicative ((<$>))
+import Codec.Binary.UTF8.String
+import System.FilePath ((</>))
+import System.Directory (doesFileExist)
+
+import qualified Data.ByteString as SB
+import Snap.Util.FileServe (serveFile)
+import Snap.Types (Snap, rqURI, getRequest, writeBS)
+import Snap.Http.Server ( httpServe, setAccessLog, setErrorLog, addListen
+ , ConfigListen (..), emptyConfig
+ )
+
+import Hakyll.Core.Util.String (replaceAll)
+
+-- | The first file in the list that actually exists is returned
+--
+findFile :: [FilePath] -> IO (Maybe FilePath)
+findFile [] = return Nothing
+findFile (x : xs) = do
+ exists <- doesFileExist x
+ if exists then return (Just x) else findFile xs
+
+-- | Serve a given directory
+--
+static :: FilePath -- ^ Directory to serve
+ -> (FilePath -> IO ()) -- ^ Pre-serve hook
+ -> Snap ()
+static directory preServe = do
+ -- Obtain the path
+ uri <- rqURI <$> getRequest
+ let filePath = replaceAll "\\?$" (const "") -- Remove trailing ?
+ $ replaceAll "#[^#]*$" (const "") -- Remove #section
+ $ replaceAll "^/" (const "") -- Remove leading /
+ $ decode $ SB.unpack uri
+
+ -- Try to find the requested file
+ r <- liftIO $ findFile $ map (directory </>) $
+ [ filePath
+ , filePath </> "index.htm"
+ , filePath </> "index.html"
+ ]
+
+ case r of
+ -- Not found, error
+ Nothing -> writeBS "Not found"
+ -- Found, serve
+ Just f -> do
+ liftIO $ preServe f
+ serveFile f
+
+-- | Main method, runs a static server in the given directory
+--
+staticServer :: FilePath -- ^ Directory to serve
+ -> (FilePath -> IO ()) -- ^ Pre-serve hook
+ -> Int -- ^ Port to listen on
+ -> IO () -- ^ Blocks forever
+staticServer directory preServe port =
+ httpServe config $ static directory preServe
+ where
+ -- Snap server config
+ config = addListen (ListenHttp "0.0.0.0" port)
+ $ setAccessLog Nothing
+ $ setErrorLog Nothing
+ $ emptyConfig
diff --git a/src/Hakyll/Web/RelativizeUrls.hs b/src/Hakyll/Web/RelativizeUrls.hs
new file mode 100644
index 0000000..2de4a0e
--- /dev/null
+++ b/src/Hakyll/Web/RelativizeUrls.hs
@@ -0,0 +1,62 @@
+-- | This module exposes a function which can relativize URL's on a webpage.
+--
+-- This means that one can deploy the resulting site on
+-- @http:\/\/example.com\/@, but also on @http:\/\/example.com\/some-folder\/@
+-- without having to change anything (simply copy over the files).
+--
+-- To use it, you should use absolute URL's from the site root everywhere. For
+-- example, use
+--
+-- > <img src="/images/lolcat.png" alt="Funny zomgroflcopter" />
+--
+-- in a blogpost. When running this through the relativize URL's module, this
+-- will result in (suppose your blogpost is located at @\/posts\/foo.html@:
+--
+-- > <img src="../images/lolcat.png" alt="Funny zomgroflcopter" />
+--
+module Hakyll.Web.RelativizeUrls
+ ( relativizeUrlsCompiler
+ , relativizeUrls
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow ((&&&), (>>^))
+import Data.List (isPrefixOf)
+import qualified Data.Set as S
+
+import Text.HTML.TagSoup
+
+import Hakyll.Core.Compiler
+import Hakyll.Web.Page
+import Hakyll.Web.Util.Url
+
+-- | Compiler form of 'compressCss' which automatically picks the right root
+-- path
+--
+relativizeUrlsCompiler :: Compiler (Page String) (Page String)
+relativizeUrlsCompiler = getRoute &&& id >>^ uncurry relativize
+ where
+ relativize Nothing = id
+ relativize (Just r) = fmap (relativizeUrls $ toSiteRoot r)
+
+-- | Relativize URL's in HTML
+--
+relativizeUrls :: String -- ^ Path to the site root
+ -> String -- ^ HTML to relativize
+ -> String -- ^ Resulting HTML
+relativizeUrls root = renderTags . map relativizeUrls' . parseTags
+ where
+ relativizeUrls' (TagOpen s a) = TagOpen s $ map (relativizeUrlsAttrs root) a
+ relativizeUrls' x = x
+
+-- | Relativize URL's in attributes
+--
+relativizeUrlsAttrs :: String -- ^ Path to the site root
+ -> Attribute String -- ^ Attribute to relativize
+ -> Attribute String -- ^ Resulting attribute
+relativizeUrlsAttrs root (key, value)
+ | key `S.member` urls && "/" `isPrefixOf` value = (key, root ++ value)
+ | otherwise = (key, value)
+ where
+ urls = S.fromList ["src", "href"]
diff --git a/src/Hakyll/Web/Tags.hs b/src/Hakyll/Web/Tags.hs
new file mode 100644
index 0000000..211a06b
--- /dev/null
+++ b/src/Hakyll/Web/Tags.hs
@@ -0,0 +1,180 @@
+-- | Module containing some specialized functions to deal with tags.
+-- This Module follows certain conventions. My advice is to stick with them if
+-- possible.
+--
+-- More concrete: all functions in this module assume that the tags are
+-- located in the @tags@ field, and separated by commas. An example file
+-- @foo.markdown@ could look like:
+--
+-- > ---
+-- > author: Philip K. Dick
+-- > title: Do androids dream of electric sheep?
+-- > tags: future, science fiction, humanoid
+-- > ---
+-- > The novel is set in a post-apocalyptic near future, where the Earth and
+-- > its populations have been damaged greatly by Nuclear...
+--
+-- All the following functions would work with such a format. In addition to
+-- tags, Hakyll also supports categories. The convention when using categories
+-- is to place pages in subdirectories.
+--
+-- An example, the page @posts\/coding\/2010-01-28-hakyll-categories.markdown@
+-- Tags or categories are read using the @readTags@ and @readCategory@
+-- functions. This module only provides functions to work with tags:
+-- categories are represented as tags. This is perfectly possible: categories
+-- only have an additional restriction that a page can only have one category
+-- (instead of multiple tags).
+--
+{-# LANGUAGE DeriveDataTypeable, OverloadedStrings, Arrows #-}
+module Hakyll.Web.Tags
+ ( Tags (..)
+ , readTagsWith
+ , readTags
+ , readCategory
+ , renderTagCloud
+ , renderTagsField
+ , renderCategoryField
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Applicative ((<$>))
+import Data.Map (Map)
+import qualified Data.Map as M
+import Data.List (intersperse)
+import Control.Arrow (arr, (&&&), (>>>), (***), (<<^), returnA)
+import Data.Maybe (catMaybes, fromMaybe)
+import Data.Monoid (mconcat)
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary, get, put)
+import Text.Blaze.Renderer.String (renderHtml)
+import Text.Blaze ((!), toHtml, toValue)
+import qualified Text.Blaze.Html5 as H
+import qualified Text.Blaze.Html5.Attributes as A
+
+import Hakyll.Web.Page
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Util.Url
+import Hakyll.Core.Writable
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler
+import Hakyll.Core.Util.String
+
+-- | Data about tags
+--
+data Tags a = Tags
+ { tagsMap :: Map String [Page a]
+ } deriving (Show, Typeable)
+
+instance Binary a => Binary (Tags a) where
+ get = Tags <$> get
+ put (Tags m) = put m
+
+instance Writable (Tags a) where
+ write _ _ = return ()
+
+-- | Obtain tags from a page
+--
+getTags :: Page a -> [String]
+getTags = map trim . splitAll "," . getField "tags"
+
+-- | Obtain categories from a page
+--
+getCategory :: Page a -> [String]
+getCategory = return . getField "category"
+
+-- | Higher-level function to read tags
+--
+readTagsWith :: (Page a -> [String]) -- ^ Function extracting tags from a page
+ -> [Page a] -- ^ Pages
+ -> Tags a -- ^ Resulting tags
+readTagsWith f pages = Tags
+ { tagsMap = foldl (M.unionWith (++)) M.empty (map readTagsWith' pages)
+ }
+ where
+ -- Create a tag map for one page
+ readTagsWith' page =
+ let tags = f page
+ in M.fromList $ zip tags $ repeat [page]
+
+-- | Read a tagmap using the @tags@ metadata field
+--
+readTags :: [Page a] -> Tags a
+readTags = readTagsWith getTags
+
+-- | Read a tagmap using the @category@ metadata field
+--
+readCategory :: [Page a] -> Tags a
+readCategory = readTagsWith getCategory
+
+-- | Render a tag cloud in HTML
+--
+renderTagCloud :: (String -> Identifier) -- ^ Produce a link for a tag
+ -> Double -- ^ Smallest font size, in percent
+ -> Double -- ^ Biggest font size, in percent
+ -> Compiler (Tags a) String -- ^ Tag cloud renderer
+renderTagCloud makeUrl minSize maxSize = proc (Tags tags) -> do
+ -- In tags' we create a list: [((tag, route), count)]
+ tags' <- mapCompiler ((id &&& (getRouteFor <<^ makeUrl)) *** arr length)
+ -< M.toList tags
+
+ let -- Absolute frequencies of the pages
+ freqs = map snd tags'
+
+ -- Find out the relative count of a tag: on a scale from 0 to 1
+ relative count = (fromIntegral count - min') / (1 + max' - min')
+
+ -- Show the relative size of one 'count' in percent
+ size count =
+ let size' = floor $ minSize + relative count * (maxSize - minSize)
+ in show (size' :: Int) ++ "%"
+
+ -- The minimum and maximum count found, as doubles
+ (min', max')
+ | null freqs = (0, 1)
+ | otherwise = (minimum &&& maximum) $ map fromIntegral freqs
+
+ -- Create a link for one item
+ makeLink ((tag, url), count) =
+ H.a ! A.style (toValue $ "font-size: " ++ size count)
+ ! A.href (toValue $ fromMaybe "/" url)
+ $ toHtml tag
+
+ -- Render and return the HTML
+ returnA -< renderHtml $ mconcat $ intersperse " " $ map makeLink tags'
+
+-- | Render tags with links
+--
+renderTagsFieldWith :: (Page a -> [String]) -- ^ Function to get the tags
+ -> String -- ^ Destination key
+ -> (String -> Identifier) -- ^ Create a link for a tag
+ -> Compiler (Page a) (Page a) -- ^ Resulting compiler
+renderTagsFieldWith tags destination makeUrl =
+ id &&& arr tags >>> setFieldA destination renderTags
+ where
+ -- Compiler creating a comma-separated HTML string for a list of tags
+ renderTags :: Compiler [String] String
+ renderTags = arr (map $ id &&& makeUrl)
+ >>> mapCompiler (id *** getRouteFor)
+ >>> arr (map $ uncurry renderLink)
+ >>> arr (renderHtml . mconcat . intersperse ", " . catMaybes)
+
+ -- Render one tag link
+ renderLink _ Nothing = Nothing
+ renderLink tag (Just filePath) = Just $
+ H.a ! A.href (toValue $ toUrl filePath) $ toHtml tag
+
+-- | Render tags with links
+--
+renderTagsField :: String -- ^ Destination key
+ -> (String -> Identifier) -- ^ Create a link for a tag
+ -> Compiler (Page a) (Page a) -- ^ Resulting compiler
+renderTagsField = renderTagsFieldWith getTags
+
+-- | Render the category in a link
+--
+renderCategoryField :: String -- ^ Destination key
+ -> (String -> Identifier) -- ^ Create a category link
+ -> Compiler (Page a) (Page a) -- ^ Resulting compiler
+renderCategoryField = renderTagsFieldWith getCategory
diff --git a/src/Hakyll/Web/Template.hs b/src/Hakyll/Web/Template.hs
new file mode 100644
index 0000000..9c49278
--- /dev/null
+++ b/src/Hakyll/Web/Template.hs
@@ -0,0 +1,109 @@
+-- | This module provides means for reading and applying 'Template's.
+--
+-- Templates are tools to convert data (pages) into a string. They are
+-- perfectly suited for laying out your site.
+--
+-- Let's look at an example template:
+--
+-- > <html>
+-- > <head>
+-- > <title>My crazy homepage - $title$</title>
+-- > </head>
+-- > <body>
+-- > <div id="header">
+-- > <h1>My crazy homepage - $title$</h1>
+-- > </div>
+-- > <div id="content">
+-- > $body$
+-- > </div>
+-- > <div id="footer">
+-- > By reading this you agree that I now own your soul
+-- > </div>
+-- > </body>
+-- > </html>
+--
+-- We can use this template to render a 'Page' which has a body and a @$title$@
+-- metadata field.
+--
+-- As you can see, the format is very simple -- @$key$@ is used to render the
+-- @$key$@ field from the page, everything else is literally copied. If you want
+-- to literally insert @\"$key$\"@ into your page (for example, when you're
+-- writing a Hakyll tutorial) you can use
+--
+-- > <p>
+-- > A literal $$key$$.
+-- > </p>
+--
+-- Because of it's simplicity, these templates can be used for more than HTML:
+-- you could make, for example, CSS or JS templates as well.
+--
+-- In addition to the native format, Hakyll also supports hamlet templates. For
+-- more information on hamlet templates, please refer to:
+-- <http://hackage.haskell.org/package/hamlet>.
+--
+module Hakyll.Web.Template
+ ( Template
+ , applyTemplate
+ , applySelf
+ , templateCompiler
+ , templateCompilerWith
+ , applyTemplateCompiler
+ ) where
+
+import Control.Arrow
+import Data.Maybe (fromMaybe)
+import System.FilePath (takeExtension)
+import qualified Data.Map as M
+
+import Text.Hamlet (HamletSettings, defaultHamletSettings)
+
+import Hakyll.Core.Compiler
+import Hakyll.Core.Identifier
+import Hakyll.Core.ResourceProvider
+import Hakyll.Web.Template.Internal
+import Hakyll.Web.Template.Read
+import Hakyll.Web.Page.Internal
+
+-- | Substitutes @$identifiers@ in the given @Template@ by values from the given
+-- "Page". When a key is not found, it is left as it is. You can specify
+-- the characters used to replace escaped dollars (@$$@) here.
+--
+applyTemplate :: Template -> Page String -> Page String
+applyTemplate template page =
+ fmap (const $ substitute =<< unTemplate template) page
+ where
+ map' = toMap page
+ substitute (Chunk chunk) = chunk
+ substitute (Key key) = fromMaybe ("$" ++ key ++ "$") $ M.lookup key map'
+ substitute (Escaped) = "$"
+
+-- | Apply a page as it's own template. This is often very useful to fill in
+-- certain keys like @$root@ and @$url@.
+--
+applySelf :: Page String -> Page String
+applySelf page = applyTemplate (readTemplate $ pageBody page) page
+
+-- | Read a template. If the extension of the file we're compiling is
+-- @.hml@ or @.hamlet@, it will be considered as a Hamlet template, and parsed
+-- as such.
+--
+templateCompiler :: Compiler Resource Template
+templateCompiler = templateCompilerWith defaultHamletSettings
+
+-- | Version of 'templateCompiler' that enables custom settings.
+--
+templateCompilerWith :: HamletSettings -> Compiler Resource Template
+templateCompilerWith settings =
+ cached "Hakyll.Web.Template.templateCompilerWith" $
+ getIdentifier &&& getResourceString >>^ uncurry read'
+ where
+ read' identifier string =
+ if takeExtension (toFilePath identifier) `elem` [".hml", ".hamlet"]
+ -- Hamlet template
+ then readHamletTemplateWith settings string
+ -- Hakyll template
+ else readTemplate string
+
+applyTemplateCompiler :: Identifier -- ^ Template
+ -> Compiler (Page String) (Page String) -- ^ Compiler
+applyTemplateCompiler identifier = require identifier (flip applyTemplate)
diff --git a/src/Hakyll/Web/Template/Internal.hs b/src/Hakyll/Web/Template/Internal.hs
new file mode 100644
index 0000000..d0e0859
--- /dev/null
+++ b/src/Hakyll/Web/Template/Internal.hs
@@ -0,0 +1,45 @@
+-- | Module containing the template data structure
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
+module Hakyll.Web.Template.Internal
+ ( Template (..)
+ , TemplateElement (..)
+ ) where
+
+import Control.Applicative ((<$>))
+
+import Data.Binary (Binary, get, getWord8, put, putWord8)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.Writable
+
+-- | Datatype used for template substitutions.
+--
+newtype Template = Template
+ { unTemplate :: [TemplateElement]
+ }
+ deriving (Show, Eq, Binary, Typeable)
+
+instance Writable Template where
+ -- Writing a template is impossible
+ write _ _ = return ()
+
+-- | Elements of a template.
+--
+data TemplateElement
+ = Chunk String
+ | Key String
+ | Escaped
+ deriving (Show, Eq, Typeable)
+
+instance Binary TemplateElement where
+ put (Chunk string) = putWord8 0 >> put string
+ put (Key key) = putWord8 1 >> put key
+ put (Escaped) = putWord8 2
+
+ get = getWord8 >>= \tag -> case tag of
+ 0 -> Chunk <$> get
+ 1 -> Key <$> get
+ 2 -> return Escaped
+ _ -> error $ "Hakyll.Web.Template.Internal: "
+ ++ "Error reading cached template"
diff --git a/src/Hakyll/Web/Template/Read.hs b/src/Hakyll/Web/Template/Read.hs
new file mode 100644
index 0000000..421b7e9
--- /dev/null
+++ b/src/Hakyll/Web/Template/Read.hs
@@ -0,0 +1,10 @@
+-- | Re-exports all different template reading modules
+--
+module Hakyll.Web.Template.Read
+ ( readTemplate
+ , readHamletTemplate
+ , readHamletTemplateWith
+ ) where
+
+import Hakyll.Web.Template.Read.Hakyll
+import Hakyll.Web.Template.Read.Hamlet
diff --git a/src/Hakyll/Web/Template/Read/Hakyll.hs b/src/Hakyll/Web/Template/Read/Hakyll.hs
new file mode 100644
index 0000000..fecf772
--- /dev/null
+++ b/src/Hakyll/Web/Template/Read/Hakyll.hs
@@ -0,0 +1,35 @@
+-- | Read templates in Hakyll's native format
+--
+module Hakyll.Web.Template.Read.Hakyll
+ ( readTemplate
+ ) where
+
+import Data.List (isPrefixOf)
+import Data.Char (isAlphaNum)
+
+import Hakyll.Web.Template.Internal
+
+-- | Construct a @Template@ from a string.
+--
+readTemplate :: String -> Template
+readTemplate = Template . readTemplate'
+ where
+ readTemplate' [] = []
+ readTemplate' string
+ | "$$" `isPrefixOf` string =
+ Escaped : readTemplate' (drop 2 string)
+ | "$" `isPrefixOf` string =
+ case readKey (drop 1 string) of
+ Just (key, rest) -> Key key : readTemplate' rest
+ Nothing -> Chunk "$" : readTemplate' (drop 1 string)
+ | otherwise =
+ let (chunk, rest) = break (== '$') string
+ in Chunk chunk : readTemplate' rest
+
+ -- Parse an key into (key, rest) if it's valid, and return
+ -- Nothing otherwise
+ readKey string =
+ let (key, rest) = span isAlphaNum string
+ in if not (null key) && "$" `isPrefixOf` rest
+ then Just (key, drop 1 rest)
+ else Nothing
diff --git a/src/Hakyll/Web/Template/Read/Hamlet.hs b/src/Hakyll/Web/Template/Read/Hamlet.hs
new file mode 100644
index 0000000..7b496de
--- /dev/null
+++ b/src/Hakyll/Web/Template/Read/Hamlet.hs
@@ -0,0 +1,46 @@
+-- | Read templates in the hamlet format
+--
+{-# LANGUAGE MultiParamTypeClasses #-}
+module Hakyll.Web.Template.Read.Hamlet
+ ( readHamletTemplate
+ , readHamletTemplateWith
+ ) where
+
+import Text.Hamlet (HamletSettings (..), defaultHamletSettings)
+import Text.Hamlet.RT
+
+import Hakyll.Web.Template.Internal
+
+-- | Read a hamlet template using the default settings
+--
+readHamletTemplate :: String -> Template
+readHamletTemplate = readHamletTemplateWith defaultHamletSettings
+
+-- | Read a hamlet template using the specified settings
+--
+readHamletTemplateWith :: HamletSettings -> String -> Template
+readHamletTemplateWith settings string =
+ let result = parseHamletRT settings string
+ in case result of
+ Just hamlet -> fromHamletRT hamlet
+ Nothing -> error
+ "Hakyll.Web.Template.Read.Hamlet.readHamletTemplateWith: \
+ \Could not parse Hamlet file"
+
+-- | Convert a 'HamletRT' to a 'Template'
+--
+fromHamletRT :: HamletRT -- ^ Hamlet runtime template
+ -> Template -- ^ Hakyll template
+fromHamletRT (HamletRT sd) = Template $ map fromSimpleDoc sd
+ where
+ fromSimpleDoc :: SimpleDoc -> TemplateElement
+ fromSimpleDoc (SDRaw chunk) = Chunk chunk
+ fromSimpleDoc (SDVar [var]) = Key var
+ fromSimpleDoc (SDVar _) = error
+ "Hakyll.Web.Template.Read.Hamlet.fromHamletRT: \
+ \Hakyll does not support '.' in identifier names when using \
+ \hamlet templates."
+ fromSimpleDoc _ = error
+ "Hakyll.Web.Template.Read.Hamlet.fromHamletRT: \
+ \Only simple $key$ identifiers are allowed when using hamlet \
+ \templates."
diff --git a/src/Hakyll/Web/Util/Url.hs b/src/Hakyll/Web/Util/Url.hs
new file mode 100644
index 0000000..54a361e
--- /dev/null
+++ b/src/Hakyll/Web/Util/Url.hs
@@ -0,0 +1,30 @@
+-- | Miscellaneous URL manipulation functions.
+--
+module Hakyll.Web.Util.Url
+ ( toUrl
+ , toSiteRoot
+ ) where
+
+import System.FilePath (splitPath, takeDirectory, joinPath)
+
+-- | Convert a filepath to an URL starting from the site root
+--
+-- Example:
+--
+-- > toUrl "foo/bar.html"
+--
+-- Result:
+--
+-- > "/foo/bar.html"
+--
+toUrl :: FilePath -> String
+toUrl = ('/' :)
+
+-- | Get the relative url to the site root, for a given (absolute) url
+--
+toSiteRoot :: String -> String
+toSiteRoot = emptyException . joinPath . map parent . splitPath . takeDirectory
+ where
+ parent = const ".."
+ emptyException [] = "."
+ emptyException x = x
diff --git a/src/Network/Hakyll/SimpleServer.hs b/src/Network/Hakyll/SimpleServer.hs
deleted file mode 100644
index 4eef689..0000000
--- a/src/Network/Hakyll/SimpleServer.hs
+++ /dev/null
@@ -1,215 +0,0 @@
--- | Module containing a small, simple http file server for testing and preview
--- purposes.
-module Network.Hakyll.SimpleServer
- ( simpleServer
- ) where
-
-import Prelude hiding (log)
-import Control.Monad (forever)
-import Control.Monad.Reader (ReaderT, runReaderT, ask, liftIO)
-import Network
-import System.IO
-import System.Directory (doesFileExist, doesDirectoryExist)
-import Control.Concurrent (forkIO)
-import Control.Concurrent.Chan (Chan, newChan, readChan, writeChan)
-import System.FilePath (takeExtension)
-import qualified Data.Map as M
-import Data.List (intercalate)
-
-import Text.Hakyll.Util
-import Text.Hakyll.Regex
-
--- | Function to log from a chan.
-log :: Chan String -> IO ()
-log logChan = forever (readChan logChan >>= hPutStrLn stderr)
-
--- | General server configuration.
-data ServerConfig = ServerConfig { documentRoot :: FilePath
- , portNumber :: PortNumber
- , logChannel :: Chan String
- }
-
--- | Custom monad stack.
-type Server = ReaderT ServerConfig IO
-
--- | Simple representation of a HTTP request.
-data Request = Request { requestMethod :: String
- , requestURI :: String
- , requestVersion :: String
- } deriving (Ord, Eq)
-
-instance Show Request where
- show request = requestMethod request ++ " "
- ++ requestURI request ++ " "
- ++ requestVersion request
-
--- | Read a HTTP request from a 'Handle'. For now, this will ignore the request
--- headers and body.
-readRequest :: Handle -> Server Request
-readRequest handle = do
- requestLine <- liftIO $ hGetLine handle
- let [method, uri, version] = map trim $ splitRegex " " requestLine
- request = Request { requestMethod = method
- , requestURI = uri
- , requestVersion = version
- }
- return request
-
--- | Simple representation of the HTTP response we send back.
-data Response = Response { responseVersion :: String
- , responseStatusCode :: Int
- , responsePhrase :: String
- , responseHeaders :: M.Map String String
- , responseBody :: String
- } deriving (Ord, Eq)
-
-instance Show Response where
- show response = responseVersion response ++ " "
- ++ show (responseStatusCode response) ++ " "
- ++ responsePhrase response
-
--- | A default response.
-defaultResponse :: Response
-defaultResponse = Response { responseVersion = "HTTP/1.1"
- , responseStatusCode = 0
- , responsePhrase = ""
- , responseHeaders = M.empty
- , responseBody = ""
- }
-
--- | Create a response for a given HTTP request.
-createResponse :: Request -> Server Response
-createResponse request
- | requestMethod request == "GET" = createGetResponse request
- | otherwise = return $ createErrorResponse 501 "Not Implemented"
-
--- | Create a simple error response.
-createErrorResponse :: Int -- ^ Error code.
- -> String -- ^ Error phrase.
- -> Response -- ^ Result.
-createErrorResponse statusCode phrase = defaultResponse
- { responseStatusCode = statusCode
- , responsePhrase = phrase
- , responseHeaders = M.singleton "Content-Type" "text/html"
- , responseBody =
- "<html> <head> <title>" ++ show statusCode ++ "</title> </head>"
- ++ "<body> <h1>" ++ show statusCode ++ "</h1>\n"
- ++ "<p>" ++ phrase ++ "</p> </body> </html>"
- }
-
--- | Create a simple get response.
-createGetResponse :: Request -> Server Response
-createGetResponse request = do
- -- Construct the complete fileName of the requested resource.
- config <- ask
- let -- Drop everything after a '?'.
- uri = takeWhile ((/=) '?') $ requestURI request
- log' = writeChan (logChannel config)
- isDirectory <- liftIO $ doesDirectoryExist $ documentRoot config ++ uri
- let fileName =
- documentRoot config ++ if isDirectory then uri ++ "/index.html"
- else uri
-
- create200 = do
- h <- openBinaryFile fileName ReadMode
- contentLength <- hFileSize h
- body <- hGetContents h
- let mimeHeader = getMIMEHeader fileName
- headers = ("Content-Length", show contentLength) : mimeHeader
- return $ defaultResponse
- { responseStatusCode = 200
- , responsePhrase = "OK"
- , responseHeaders = responseHeaders defaultResponse
- `M.union` M.fromList headers
- , responseBody = body
- }
-
- -- Called when an error occurs during the creation of a 200 response.
- create500 e = do
- log' $ "Internal Error: " ++ show e
- return $ createErrorResponse 500 "Internal Server Error"
-
- -- Send back the page if found.
- exists <- liftIO $ doesFileExist fileName
- if exists
- then liftIO $ catch create200 create500
- else do liftIO $ log' $ "Not Found: " ++ fileName
- return $ createErrorResponse 404 "Not Found"
-
--- | Get the mime header for a certain filename. This is based on the extension
--- of the given 'FilePath'.
-getMIMEHeader :: FilePath -> [(String, String)]
-getMIMEHeader fileName =
- case result of (Just x) -> [("Content-Type", x)]
- Nothing -> []
- where
- result = lookup (takeExtension fileName) [ (".css", "text/css")
- , (".gif", "image/gif")
- , (".htm", "text/html")
- , (".html", "text/html")
- , (".jpeg", "image/jpeg")
- , (".jpg", "image/jpeg")
- , (".js", "text/javascript")
- , (".png", "image/png")
- , (".txt", "text/plain")
- , (".xml", "text/xml")
- ]
-
--- | Respond to an incoming request.
-respond :: Handle -> Server ()
-respond handle = do
- -- Read the request and create a response.
- request <- readRequest handle
- response <- createResponse request
-
- -- Generate some output.
- config <- ask
- liftIO $ writeChan (logChannel config)
- $ show request ++ " => " ++ show response
-
- -- Send the response back to the handle.
- liftIO $ putResponse response
- where
- putResponse response = do hPutStr handle $ intercalate " "
- [ responseVersion response
- , show $ responseStatusCode response
- , responsePhrase response
- ]
- hPutStr handle "\r\n"
- mapM_ putHeader
- (M.toList $ responseHeaders response)
- hPutStr handle "\r\n"
- hPutStr handle $ responseBody response
- hPutStr handle "\r\n"
- hClose handle
-
- putHeader (key, value) =
- hPutStr handle $ key ++ ": " ++ value ++ "\r\n"
-
--- | Start a simple http server on the given 'PortNumber', serving the given
--- directory.
---
-simpleServer :: PortNumber -- ^ Port to listen on.
- -> FilePath -- ^ Root directory to serve.
- -> IO () -- ^ Optional pre-respond action.
- -> IO ()
-simpleServer port root preRespond = do
- -- Channel to send logs to
- logChan <- newChan
-
- let config = ServerConfig { documentRoot = root
- , portNumber = port
- , logChannel = logChan
- }
-
- -- When a client connects, respond in a separate thread.
- listen socket = do (handle, _, _) <- accept socket
- preRespond
- forkIO (runReaderT (respond handle) config)
-
- -- Handle logging in a separate thread
- _ <- forkIO (log logChan)
-
- writeChan logChan $ "Starting hakyll server on port " ++ show port ++ "..."
- socket <- listenOn (PortNumber port)
- forever (listen socket)
diff --git a/src/Text/Hakyll.hs b/src/Text/Hakyll.hs
deleted file mode 100644
index b0fe479..0000000
--- a/src/Text/Hakyll.hs
+++ /dev/null
@@ -1,185 +0,0 @@
--- | This is the main Hakyll module, exporting the important @hakyll@ function.
---
--- Most configurations would use this @hakyll@ function more or less as the
--- main function:
---
--- > main = hakyll $ do
--- > directory css "css"
--- > directory static "images"
---
-module Text.Hakyll
- ( defaultHakyllConfiguration
- , hakyll
- , hakyllWithConfiguration
- , runDefaultHakyll
-
- , module Text.Hakyll.Context
- , module Text.Hakyll.ContextManipulations
- , module Text.Hakyll.CreateContext
- , module Text.Hakyll.File
- , module Text.Hakyll.HakyllMonad
- , module Text.Hakyll.Regex
- , module Text.Hakyll.Render
- , module Text.Hakyll.HakyllAction
- , module Text.Hakyll.Paginate
- , module Text.Hakyll.Page
- , module Text.Hakyll.Pandoc
- , module Text.Hakyll.Util
- , module Text.Hakyll.Tags
- , module Text.Hakyll.Feed
- , module Text.Hakyll.Configurations.Static
- ) where
-
-import Control.Concurrent (forkIO, threadDelay)
-import Control.Monad.Reader (runReaderT, liftIO, ask)
-import Control.Monad (when)
-import Data.Monoid (mempty)
-import System.Environment (getArgs, getProgName)
-import System.Directory (doesDirectoryExist, removeDirectoryRecursive)
-import System.Time (getClockTime)
-
-import Text.Pandoc
-import Text.Hamlet (defaultHamletSettings)
-
-import Network.Hakyll.SimpleServer (simpleServer)
-import Text.Hakyll.Context
-import Text.Hakyll.ContextManipulations
-import Text.Hakyll.CreateContext
-import Text.Hakyll.File
-import Text.Hakyll.HakyllMonad
-import Text.Hakyll.Regex
-import Text.Hakyll.Render
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Paginate
-import Text.Hakyll.Page
-import Text.Hakyll.Pandoc
-import Text.Hakyll.Util
-import Text.Hakyll.Tags
-import Text.Hakyll.Feed
-import Text.Hakyll.Configurations.Static
-
--- | The default reader options for pandoc parsing.
---
-defaultPandocParserState :: ParserState
-defaultPandocParserState = defaultParserState
- { -- The following option causes pandoc to read smart typography, a nice
- -- and free bonus.
- stateSmart = True
- }
-
--- | The default writer options for pandoc rendering.
---
-defaultPandocWriterOptions :: WriterOptions
-defaultPandocWriterOptions = defaultWriterOptions
- { -- This option causes literate haskell to be written using '>' marks in
- -- html, which I think is a good default.
- writerLiterateHaskell = True
- }
-
--- | The default hakyll configuration.
---
-defaultHakyllConfiguration :: String -- ^ Absolute site URL.
- -> HakyllConfiguration -- ^ Default config.
-defaultHakyllConfiguration absoluteUrl' = HakyllConfiguration
- { absoluteUrl = absoluteUrl'
- , additionalContext = mempty
- , siteDirectory = "_site"
- , cacheDirectory = "_cache"
- , enableIndexUrl = False
- , previewMode = BuildOnRequest
- , pandocParserState = defaultPandocParserState
- , pandocWriterOptions = defaultPandocWriterOptions
- , hamletSettings = defaultHamletSettings
- }
-
--- | Main function to run Hakyll with the default configuration. The
--- absolute URL is only used in certain cases, for example RSS feeds et
--- cetera.
---
-hakyll :: String -- ^ Absolute URL of your site. Used in certain cases.
- -> Hakyll () -- ^ You code.
- -> IO ()
-hakyll absolute = hakyllWithConfiguration configuration
- where
- configuration = defaultHakyllConfiguration absolute
-
--- | Main function to run hakyll with a custom configuration.
---
-hakyllWithConfiguration :: HakyllConfiguration -> Hakyll () -> IO ()
-hakyllWithConfiguration configuration buildFunction = do
- args <- getArgs
- let f = case args of ["build"] -> buildFunction
- ["clean"] -> clean
- ["preview", p] -> preview (read p)
- ["preview"] -> preview defaultPort
- ["rebuild"] -> clean >> buildFunction
- ["server", p] -> server (read p) (return ())
- ["server"] -> server defaultPort (return ())
- _ -> help
- runReaderT f configuration
- where
- preview port = case previewMode configuration of
- BuildOnRequest -> server port buildFunction
- BuildOnInterval -> do
- let pIO = runReaderT (previewThread buildFunction) configuration
- _ <- liftIO $ forkIO pIO
- server port (return ())
-
- defaultPort = 8000
-
--- | A preview thread that periodically recompiles the site.
---
-previewThread :: Hakyll () -- ^ Build function
- -> Hakyll () -- ^ Result
-previewThread buildFunction = run =<< liftIO getClockTime
- where
- delay = 1000000
- run time = do liftIO $ threadDelay delay
- contents <- getRecursiveContents "."
- valid <- isMoreRecent time contents
- when valid buildFunction
- run =<< liftIO getClockTime
-
--- | Clean up directories.
---
-clean :: Hakyll ()
-clean = do askHakyll siteDirectory >>= remove'
- askHakyll cacheDirectory >>= remove'
- where
- remove' dir = liftIO $ do putStrLn $ "Removing " ++ dir ++ "..."
- exists <- doesDirectoryExist dir
- when exists $ removeDirectoryRecursive dir
-
--- | Show usage information.
---
-help :: Hakyll ()
-help = liftIO $ do
- name <- getProgName
- putStrLn $ "This is a Hakyll site generator program. You should always\n"
- ++ "run it from the project root directory.\n"
- ++ "\n"
- ++ "Usage:\n"
- ++ name ++ " build Generate the site.\n"
- ++ name ++ " clean Clean up and remove cache.\n"
- ++ name ++ " help Show this message.\n"
- ++ name ++ " preview [port] Run a server and autocompile.\n"
- ++ name ++ " rebuild Clean up and build again.\n"
- ++ name ++ " server [port] Run a local test server.\n"
-
--- | Start a server at the given port number.
---
-server :: Integer -- ^ Port number to serve on.
- -> Hakyll () -- ^ Pre-respond action.
- -> Hakyll ()
-server port preRespond = do
- configuration <- ask
- root <- askHakyll siteDirectory
- let preRespondIO = runReaderT preRespond configuration
- liftIO $ simpleServer (fromIntegral port) root preRespondIO
-
--- | Run a Hakyll action with default settings. This is mostly aimed at testing
--- code.
---
-runDefaultHakyll :: Hakyll a -> IO a
-runDefaultHakyll f =
- runReaderT f $ defaultHakyllConfiguration "http://example.com"
diff --git a/src/Text/Hakyll/Configurations/Static.hs b/src/Text/Hakyll/Configurations/Static.hs
deleted file mode 100644
index 5a2c1be..0000000
--- a/src/Text/Hakyll/Configurations/Static.hs
+++ /dev/null
@@ -1,59 +0,0 @@
--- | Module for a simple static configuration of a website.
---
--- The configuration works like this:
---
--- * The @templates/@ directory should contain one template.
---
--- * Renderable files in the directory tree are rendered using this template.
---
--- * The @static/@ directory is copied entirely (if it exists).
---
--- * All files in the @css/@ directory are compressed.
---
-module Text.Hakyll.Configurations.Static
- ( staticConfiguration
- ) where
-
-import Control.Applicative ((<$>))
-import Control.Monad (filterM, forM_)
-
-import Text.Hakyll.File ( getRecursiveContents, inDirectory, inHakyllDirectory
- , directory )
-import Text.Hakyll.Internal.FileType (isRenderableFile)
-import Text.Hakyll.HakyllMonad (Hakyll, logHakyll)
-import Text.Hakyll.Render (renderChain, css, static)
-import Text.Hakyll.CreateContext (createPage)
-
--- | A simple configuration for an entirely static website.
---
-staticConfiguration :: Hakyll ()
-staticConfiguration = do
- -- Find all files not in _site or _cache.
- files <- filterM isRenderableFile' =<< getRecursiveContents "."
-
- -- Find a main template to use
- mainTemplate <- take 1 <$> getRecursiveContents templateDir
- logHakyll $ case mainTemplate of [] -> "Using no template"
- (x : _) -> "Using template " ++ x
-
- -- Render all files using this template
- forM_ files $ renderChain mainTemplate . createPage
-
- -- Render a static directory
- directory static staticDir
-
- -- Render a css directory
- directory css cssDir
- where
- -- A file should have a renderable extension and not be in a hakyll
- -- directory, and not in a special directory.
- isRenderableFile' file = do
- inHakyllDirectory' <- inHakyllDirectory file
- return $ isRenderableFile file
- && not (any (inDirectory file) [templateDir, cssDir, staticDir])
- && not inHakyllDirectory'
-
- -- Directories
- templateDir = "templates"
- cssDir = "css"
- staticDir = "static"
diff --git a/src/Text/Hakyll/Context.hs b/src/Text/Hakyll/Context.hs
deleted file mode 100644
index 9045a65..0000000
--- a/src/Text/Hakyll/Context.hs
+++ /dev/null
@@ -1,16 +0,0 @@
--- | This (quite small) module exports the datatype used for contexts. A
--- @Context@ is a simple key-value mapping. You can render these @Context@s
--- with templates, and manipulate them in various ways.
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Text.Hakyll.Context
- ( Context (..)
- ) where
-
-import Data.Monoid (Monoid)
-import Data.Map (Map)
-import Data.Binary (Binary)
-
--- | Datatype used for key-value mappings.
-newtype Context = Context { -- | Extract the context.
- unContext :: Map String String
- } deriving (Show, Monoid, Binary)
diff --git a/src/Text/Hakyll/ContextManipulations.hs b/src/Text/Hakyll/ContextManipulations.hs
deleted file mode 100644
index 1c26f72..0000000
--- a/src/Text/Hakyll/ContextManipulations.hs
+++ /dev/null
@@ -1,124 +0,0 @@
--- | This module exports a number of functions that produce @HakyllAction@s to
--- manipulate @Context@s.
-module Text.Hakyll.ContextManipulations
- ( renderValue
- , changeValue
- , changeUrl
- , copyValue
- , renderDate
- , renderDateWithLocale
- , changeExtension
- , renderBody
- , takeBody
- ) where
-
-import Control.Monad (liftM)
-import Control.Arrow (arr)
-import System.Locale (TimeLocale, defaultTimeLocale)
-import System.FilePath (takeFileName, addExtension, dropExtension)
-import Data.Time.Format (parseTime, formatTime)
-import Data.Time.Clock (UTCTime)
-import Data.Maybe (fromMaybe)
-import qualified Data.Map as M
-
-import Text.Hakyll.Regex (substituteRegex)
-import Text.Hakyll.HakyllAction (HakyllAction (..))
-import Text.Hakyll.Context (Context (..))
-
--- | Do something with a value in a @Context@, but keep the old value as well.
--- If the key given is not present in the @Context@, nothing will happen.
---
-renderValue :: String -- ^ Key of which the value should be copied.
- -> String -- ^ Key the value should be copied to.
- -> (String -> String) -- ^ Function to apply on the value.
- -> HakyllAction Context Context
-renderValue source destination f = arr $ \(Context context) -> Context $
- case M.lookup source context of
- Nothing -> context
- (Just value) -> M.insert destination (f value) context
-
--- | Change a value in a @Context@.
---
--- > import Data.Char (toUpper)
--- > changeValue "title" (map toUpper)
---
--- Will put the title in UPPERCASE.
-changeValue :: String -- ^ Key to change.
- -> (String -> String) -- ^ Function to apply on the value.
- -> HakyllAction Context Context
-changeValue key = renderValue key key
-
--- | Change the URL of a page. This requires a special function, so dependency
--- handling can happen correctly.
---
-changeUrl :: (String -> String) -- ^ Function to change URL with.
- -> HakyllAction Context Context -- ^ Resulting action.
-changeUrl f = let action = changeValue "url" f
- in action {actionUrl = Right $ liftM f}
-
--- | Copy a value from one key to another in a @Context@.
-copyValue :: String -- ^ Source key.
- -> String -- ^ Destination key.
- -> HakyllAction Context Context
-copyValue source destination = renderValue source destination id
-
--- | When the context has a key called @path@ in a
--- @folder/yyyy-mm-dd-title.extension@ format (the convention for pages),
--- this function can render the date.
---
--- > renderDate "date" "%B %e, %Y" "Date unknown"
---
--- Will render something like @January 32, 2010@.
---
-renderDate :: String -- ^ Key in which the rendered date should be placed.
- -> String -- ^ Format to use on the date.
- -> String -- ^ Default key, in case the date cannot be parsed.
- -> HakyllAction Context Context
-renderDate = renderDateWithLocale defaultTimeLocale
-
--- | This is an extended version of 'renderDate' that allows you to specify a
--- time locale that is used for outputting the date. For more details, see
--- 'renderDate'.
---
-renderDateWithLocale :: TimeLocale -- ^ Output time locale.
- -> String -- ^ Destination key.
- -> String -- ^ Format to use on the date.
- -> String -- ^ Default key.
- -> HakyllAction Context Context
-renderDateWithLocale locale key format defaultValue =
- renderValue "path" key renderDate'
- where
- renderDate' filePath = fromMaybe defaultValue $ do
- let dateString = substituteRegex "^([0-9]*-[0-9]*-[0-9]*).*" "\\1"
- (takeFileName filePath)
- time <- parseTime defaultTimeLocale
- "%Y-%m-%d"
- dateString :: Maybe UTCTime
- return $ formatTime locale format time
-
--- | Change the extension of a file. This is only needed when you want to
--- render, for example, mardown to @.php@ files instead of @.html@ files.
---
--- > changeExtension "php"
---
--- Will render @test.markdown@ to @test.php@ instead of @test.html@.
-changeExtension :: String -- ^ Extension to change to.
- -> HakyllAction Context Context
-changeExtension extension = changeValue "url" changeExtension'
- where
- changeExtension' = flip addExtension extension . dropExtension
-
--- | Change the body of a file using a certain manipulation.
---
--- > import Data.Char (toUpper)
--- > renderBody (map toUpper)
---
--- Will put the entire body of the page in UPPERCASE.
-renderBody :: (String -> String)
- -> HakyllAction Context Context
-renderBody = renderValue "body" "body"
-
--- | Get the resulting body text from a context
---
-takeBody :: HakyllAction Context String
-takeBody = arr $ fromMaybe "" . M.lookup "body" . unContext
diff --git a/src/Text/Hakyll/CreateContext.hs b/src/Text/Hakyll/CreateContext.hs
deleted file mode 100644
index 6a0e321..0000000
--- a/src/Text/Hakyll/CreateContext.hs
+++ /dev/null
@@ -1,114 +0,0 @@
--- | A module that provides different ways to create a @Context@. These
--- functions all use the @HakyllAction@ arrow, so they produce values of the
--- type @HakyllAction () Context@.
-module Text.Hakyll.CreateContext
- ( createPage
- , createCustomPage
- , createListing
- , addField
- , combine
- , combineWithUrl
- ) where
-
-import Prelude hiding (id)
-
-import qualified Data.Map as M
-import Control.Arrow (second, arr, (&&&), (***))
-import Control.Monad (liftM2)
-import Control.Applicative ((<$>))
-import Control.Arrow ((>>>))
-import Control.Category (id)
-
-import Text.Hakyll.Context
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Render
-import Text.Hakyll.Page
-import Text.Hakyll.Pandoc
-import Text.Hakyll.Internal.Cache
-
--- | Create a @Context@ from a page file stored on the disk. This is probably
--- the most common way to create a @Context@.
-createPage :: FilePath -> HakyllAction () Context
-createPage path = cacheAction "pages" $ readPageAction path >>> renderAction
-
--- | Create a custom page @Context@.
---
--- The association list given maps keys to values for substitution. Note
--- that as value, you can either give a @String@ or a
--- @HakyllAction () String@. The latter is preferred for more complex data,
--- since it allows dependency checking. A @String@ is obviously more simple
--- to use in some cases.
---
-createCustomPage :: FilePath
- -> [(String, Either String (HakyllAction () String))]
- -> HakyllAction () Context
-createCustomPage url association = HakyllAction
- { actionDependencies = dataDependencies
- , actionUrl = Left $ return url
- , actionFunction = \_ -> Context . M.fromList <$> assoc'
- }
- where
- mtuple (a, b) = b >>= \b' -> return (a, b')
- toHakyllString = second (either return runHakyllAction)
- assoc' = mapM (mtuple . toHakyllString) $ ("url", Left url) : association
- dataDependencies = map snd association >>= getDependencies
- getDependencies (Left _) = []
- getDependencies (Right x) = actionDependencies x
-
--- | A @createCustomPage@ function specialized in creating listings.
---
--- This function creates a listing of a certain list of @Context@s. Every
--- item in the list is created by applying the given template to every
--- renderable. You can also specify additional context to be included in the
--- @CustomPage@.
-createListing :: FilePath -- ^ Destination of the page.
- -> [FilePath] -- ^ Templates to render items with.
- -> [HakyllAction () Context] -- ^ Renderables in the list.
- -> [(String, Either String (HakyllAction () String))]
- -> HakyllAction () Context
-createListing url templates renderables additional =
- createCustomPage url context
- where
- context = ("body", Right concatenation) : additional
- concatenation = renderAndConcat templates renderables
-
--- | Add a field to a 'Context'.
---
-addField :: String -- ^ Key
- -> Either String (HakyllAction () String) -- ^ Value
- -> HakyllAction Context Context -- ^ Result
-addField key value = arr (const ()) &&& id
- >>> value' *** id
- >>> arr (uncurry insert)
- where
- value' = arr (const ()) >>> either (arr . const) id value
- insert v = Context . M.insert key v . unContext
-
--- | Combine two @Context@s. The url will always be taken from the first
--- @Renderable@. Also, if a `$key` is present in both renderables, the
--- value from the first @Context@ will be taken as well.
---
--- You can see this as a this as a @union@ between two mappings.
-combine :: HakyllAction () Context -> HakyllAction () Context
- -> HakyllAction () Context
-combine x y = HakyllAction
- { actionDependencies = actionDependencies x ++ actionDependencies y
- , actionUrl = actionUrl x
- , actionFunction = \_ ->
- Context <$> liftM2 (M.union) (unContext <$> runHakyllAction x)
- (unContext <$> runHakyllAction y)
- }
-
--- | Combine two @Context@s and set a custom URL. This behaves like @combine@,
--- except that for the @url@ field, the given URL is always chosen.
-combineWithUrl :: FilePath
- -> HakyllAction () Context
- -> HakyllAction () Context
- -> HakyllAction () Context
-combineWithUrl url x y = combine'
- { actionUrl = Left $ return url
- , actionFunction = \_ ->
- Context . M.insert "url" url . unContext <$> runHakyllAction combine'
- }
- where
- combine' = combine x y
diff --git a/src/Text/Hakyll/Feed.hs b/src/Text/Hakyll/Feed.hs
deleted file mode 100644
index be8d023..0000000
--- a/src/Text/Hakyll/Feed.hs
+++ /dev/null
@@ -1,112 +0,0 @@
--- | A Module that allows easy rendering of RSS feeds. If you use this module,
--- you must make sure you set the `absoluteUrl` field in the main Hakyll
--- configuration.
---
--- Apart from that, the main rendering functions (@renderRss@, @renderAtom@)
--- all assume that you pass the list of items so that the most recent entry
--- in the feed is the first item in the list.
---
--- Also note that the @Context@s should have (at least) the following
--- fields to produce a correct feed:
---
--- - @$title@: Title of the item.
---
--- - @$description@: Description to appear in the feed.
---
--- - @$url@: URL to the item - this is usually set automatically.
---
--- Furthermore, the feed will be generated, but will be incorrect (it won't
--- validate) if an empty list is passed.
---
-module Text.Hakyll.Feed
- ( FeedConfiguration (..)
- , renderRss
- , renderAtom
- ) where
-
-import Control.Arrow ((>>>), second)
-import Control.Monad.Reader (liftIO)
-import Data.Maybe (fromMaybe)
-import qualified Data.Map as M
-
-import Text.Hakyll.Context (Context (..))
-import Text.Hakyll.CreateContext (createListing)
-import Text.Hakyll.ContextManipulations (renderDate)
-import Text.Hakyll.HakyllMonad (Hakyll)
-import Text.Hakyll.Render (render, renderChain)
-import Text.Hakyll.HakyllAction
-
-import Paths_hakyll
-
--- | This is a data structure to keep the configuration of a feed.
-data FeedConfiguration = FeedConfiguration
- { -- | Url of the feed (relative to site root). For example, @rss.xml@.
- feedUrl :: String
- , -- | Title of the feed.
- feedTitle :: String
- , -- | Description of the feed.
- feedDescription :: String
- , -- | Name of the feed author.
- feedAuthorName :: String
- }
-
--- | This is an auxiliary function to create a listing that is, in fact, a feed.
--- The items should be sorted on date.
-createFeed :: FeedConfiguration -- ^ Feed configuration.
- -> [HakyllAction () Context] -- ^ Items to include.
- -> FilePath -- ^ Feed template.
- -> FilePath -- ^ Item template.
- -> HakyllAction () Context
-createFeed configuration renderables template itemTemplate =
- listing >>> render template
- where
- listing = createListing (feedUrl configuration)
- [itemTemplate] renderables additional
-
- additional = map (second $ Left . ($ configuration))
- [ ("title", feedTitle)
- , ("description", feedDescription)
- , ("authorName", feedAuthorName)
- ] ++ updated
-
- -- Take the first timestamp, which should be the most recent.
- updated = let action = createHakyllAction $ return . fromMaybe "foo"
- . M.lookup "timestamp" . unContext
- toTuple r = ("timestamp", Right $ r >>> action)
- in map toTuple $ take 1 renderables
-
-
--- | Abstract function to render any feed.
-renderFeed :: FeedConfiguration -- ^ Feed configuration.
- -> [HakyllAction () Context] -- ^ Items to include in the feed.
- -> FilePath -- ^ Feed template.
- -> FilePath -- ^ Item template.
- -> Hakyll ()
-renderFeed configuration renderables template itemTemplate = do
- template' <- liftIO $ getDataFileName template
- itemTemplate' <- liftIO $ getDataFileName itemTemplate
- let renderFeed' = createFeed configuration renderables
- template' itemTemplate'
- renderChain [] renderFeed'
-
--- | Render an RSS feed with a number of items.
-renderRss :: FeedConfiguration -- ^ Feed configuration.
- -> [HakyllAction () Context] -- ^ Items to include in the feed.
- -> Hakyll ()
-renderRss configuration renderables =
- renderFeed configuration (map (>>> renderRssDate) renderables)
- "templates/rss.xml" "templates/rss-item.xml"
- where
- renderRssDate = renderDate "timestamp" "%a, %d %b %Y %H:%M:%S UT"
- "No date found."
-
--- | Render an Atom feed with a number of items.
-renderAtom :: FeedConfiguration -- ^ Feed configuration.
- -> [HakyllAction () Context] -- ^ Items to include in the feed.
- -> Hakyll ()
-renderAtom configuration renderables =
- renderFeed configuration (map (>>> renderAtomDate) renderables)
- "templates/atom.xml" "templates/atom-item.xml"
- where
- renderAtomDate = renderDate "timestamp" "%Y-%m-%dT%H:%M:%SZ"
- "No date found."
diff --git a/src/Text/Hakyll/File.hs b/src/Text/Hakyll/File.hs
deleted file mode 100644
index 167ece7..0000000
--- a/src/Text/Hakyll/File.hs
+++ /dev/null
@@ -1,196 +0,0 @@
--- | A module containing various function for manipulating and examinating
--- files and directories.
-module Text.Hakyll.File
- ( toDestination
- , toCache
- , toUrl
- , toRoot
- , inDirectory
- , inHakyllDirectory
- , removeSpaces
- , makeDirectories
- , getRecursiveContents
- , sortByBaseName
- , havingExtension
- , directory
- , isMoreRecent
- , isFileMoreRecent
- ) where
-
-import System.Directory
-import Control.Applicative ((<$>))
-import System.FilePath
-import System.Time (ClockTime)
-import Control.Monad
-import Data.List (isPrefixOf, sortBy)
-import Data.Ord (comparing)
-import Control.Monad.Reader (liftIO)
-
-import Text.Hakyll.HakyllMonad
-import Text.Hakyll.Internal.FileType (isRenderableFile)
-
--- | Auxiliary function to remove pathSeparators form the start. We don't deal
--- with absolute paths here. We also remove $root from the start.
-removeLeadingSeparator :: FilePath -> FilePath
-removeLeadingSeparator [] = []
-removeLeadingSeparator path
- | head path' `elem` pathSeparators = drop 1 path'
- | otherwise = path'
- where
- path' = if "$root" `isPrefixOf` path then drop 5 path
- else path
-
--- | Convert a relative URL to a filepath in the destination
--- (default: @_site@).
-toDestination :: FilePath -> Hakyll FilePath
-toDestination url = do dir <- askHakyll siteDirectory
- toFilePath dir url
-
--- | Convert a relative URL to a filepath in the cache
--- (default: @_cache@).
-toCache :: FilePath -> Hakyll FilePath
-toCache path = do dir <- askHakyll cacheDirectory
- toFilePath dir path
-
--- | Implementation of toDestination/toCache
---
-toFilePath :: String -- ^ Directory (site or cache)
- -> String -- ^ URL
- -> Hakyll FilePath -- ^ Resulting file path
-toFilePath dir url = do
- enableIndexUrl' <- askHakyll enableIndexUrl
- let destination = if enableIndexUrl' && separatorEnd
- then dir </> noSeparator </> "index.html"
- else dir </> noSeparator
- return destination
- where
- noSeparator = removeLeadingSeparator url
- separatorEnd = not (null url) && last url == '/'
-
--- | Get the url for a given page. For most extensions, this would be the path
--- itself. It's only for rendered extensions (@.markdown@, @.rst@, @.lhs@ this
--- function returns a path with a @.html@ extension instead.
-toUrl :: FilePath -> Hakyll FilePath
-toUrl path = do enableIndexUrl' <- askHakyll enableIndexUrl
- -- If the file does not have a renderable extension, like for
- -- example favicon.ico, we don't have to change it at all.
- return $ if not (isRenderableFile path)
- then path
- -- If index url's are enabled, we create pick it
- -- unless the page is an index already.
- else if enableIndexUrl' && not isIndex
- then indexUrl
- else withSimpleHtmlExtension
- where
- isIndex = dropExtension (takeFileName path) == "index"
- withSimpleHtmlExtension = flip addExtension ".html" $ dropExtension path
- indexUrl = dropExtension path ++ "/"
-
-
--- | Get the relative url to the site root, for a given (absolute) url
-toRoot :: FilePath -> FilePath
-toRoot = emptyException . joinPath . map parent . splitPath
- . takeDirectory . removeLeadingSeparator
- where
- parent = const ".."
- emptyException [] = "."
- emptyException x = x
-
--- | Check if a file is in a given directory.
---
-inDirectory :: FilePath -- ^ File path
- -> FilePath -- ^ Directory
- -> Bool -- ^ Result
-inDirectory path dir = case splitDirectories path of
- [] -> False
- (x : _) -> x == dir
-
--- | Check if a file is in a Hakyll directory. With a Hakyll directory, we mean
--- a directory that should be "ignored" such as the @_site@ or @_cache@
--- directory.
---
--- Example:
---
--- > inHakyllDirectory "_cache/pages/index.html"
---
--- Result:
---
--- > True
---
-inHakyllDirectory :: FilePath -> Hakyll Bool
-inHakyllDirectory path =
- or <$> mapM (liftM (inDirectory path) . askHakyll)
- [siteDirectory, cacheDirectory]
-
--- | Swaps spaces for '-'.
-removeSpaces :: FilePath -> FilePath
-removeSpaces = map swap
- where
- swap ' ' = '-'
- swap x = x
-
--- | Given a path to a file, try to make the path writable by making
--- all directories on the path.
-makeDirectories :: FilePath -> Hakyll ()
-makeDirectories path = liftIO $ createDirectoryIfMissing True dir
- where
- dir = takeDirectory path
-
--- | Get all contents of a directory. Note that files starting with a dot (.)
--- will be ignored.
---
-getRecursiveContents :: FilePath -> Hakyll [FilePath]
-getRecursiveContents topdir = do
- topdirExists <- liftIO $ doesDirectoryExist topdir
- if topdirExists
- then do names <- liftIO $ getDirectoryContents topdir
- let properNames = filter isProper names
- paths <- forM properNames $ \name -> do
- let path = topdir </> name
- isDirectory <- liftIO $ doesDirectoryExist path
- if isDirectory
- then getRecursiveContents path
- else return [normalise path]
- return (concat paths)
- else return []
- where
- isProper = not . (== '.') . head
-
--- | Sort a list of filenames on the basename.
-sortByBaseName :: [FilePath] -> [FilePath]
-sortByBaseName = sortBy compareBaseName
- where
- compareBaseName = comparing takeFileName
-
--- | A filter that takes all file names with a given extension. Prefix the
--- extension with a dot:
---
--- > havingExtension ".markdown" [ "index.markdown"
--- > , "style.css"
--- > ] == ["index.markdown"]
-havingExtension :: String -> [FilePath] -> [FilePath]
-havingExtension extension = filter ((==) extension . takeExtension)
-
--- | Perform a Hakyll action on every file in a given directory.
-directory :: (FilePath -> Hakyll ()) -> FilePath -> Hakyll ()
-directory action dir = getRecursiveContents dir >>= mapM_ action
-
--- | Check if a timestamp is newer then a number of given files.
-isMoreRecent :: ClockTime -- ^ The time to check.
- -> [FilePath] -- ^ Dependencies of the cached file.
- -> Hakyll Bool
-isMoreRecent _ [] = return True
-isMoreRecent timeStamp depends = do
- dependsModified <- liftIO $ mapM getModificationTime depends
- return (timeStamp >= maximum dependsModified)
-
--- | Check if a file is newer then a number of given files.
-isFileMoreRecent :: FilePath -- ^ The cached file.
- -> [FilePath] -- ^ Dependencies of the cached file.
- -> Hakyll Bool
-isFileMoreRecent file depends = do
- exists <- liftIO $ doesFileExist file
- if not exists
- then return False
- else do timeStamp <- liftIO $ getModificationTime file
- isMoreRecent timeStamp depends
diff --git a/src/Text/Hakyll/HakyllAction.hs b/src/Text/Hakyll/HakyllAction.hs
deleted file mode 100644
index 491f1f1..0000000
--- a/src/Text/Hakyll/HakyllAction.hs
+++ /dev/null
@@ -1,98 +0,0 @@
--- | This is the module which exports @HakyllAction@.
-module Text.Hakyll.HakyllAction
- ( HakyllAction (..)
- , createHakyllAction
- , createSimpleHakyllAction
- , createFileHakyllAction
- , chain
- , runHakyllAction
- , runHakyllActionIfNeeded
- ) where
-
-import Control.Arrow
-import Control.Category
-import Control.Monad ((<=<), unless)
-import Prelude hiding ((.), id)
-
-import Text.Hakyll.File (toDestination, isFileMoreRecent)
-import Text.Hakyll.HakyllMonad
-
--- | Type used for rendering computations that carry along dependencies.
-data HakyllAction a b = HakyllAction
- { -- | Dependencies of the @HakyllAction@.
- actionDependencies :: [FilePath]
- , -- | URL pointing to the result of this @HakyllAction@.
- actionUrl :: Either (Hakyll FilePath)
- (Hakyll FilePath -> Hakyll FilePath)
- , -- | The actual render function.
- actionFunction :: a -> Hakyll b
- }
-
--- | Create a @HakyllAction@ from a function.
-createHakyllAction :: (a -> Hakyll b) -- ^ Function to execute.
- -> HakyllAction a b
-createHakyllAction f = id { actionFunction = f }
-
--- | Create a @HakyllAction@ from a simple @Hakyll@ value.
-createSimpleHakyllAction :: Hakyll b -- ^ Hakyll value to pass on.
- -> HakyllAction () b
-createSimpleHakyllAction = createHakyllAction . const
-
--- | Create a @HakyllAction@ that operates on one file.
-createFileHakyllAction :: FilePath -- ^ File to operate on.
- -> Hakyll b -- ^ Value to pass on.
- -> HakyllAction () b -- ^ The resulting action.
-createFileHakyllAction path action = HakyllAction
- { actionDependencies = [path]
- , actionUrl = Left $ return path
- , actionFunction = const action
- }
-
--- | Run a @HakyllAction@ now.
-runHakyllAction :: HakyllAction () a -- ^ Render action to run.
- -> Hakyll a -- ^ Result of the action.
-runHakyllAction action = actionFunction action ()
-
--- | Run a @HakyllAction@, but only when it is out-of-date. At this point, the
--- @actionUrl@ field must be set.
-runHakyllActionIfNeeded :: HakyllAction () () -- ^ Action to run.
- -> Hakyll () -- ^ Empty result.
-runHakyllActionIfNeeded action = do
- url <- case actionUrl action of
- Left u -> u
- Right _ -> error "No url when checking dependencies."
- destination <- toDestination url
- valid <- isFileMoreRecent destination $ actionDependencies action
- unless valid $ do logHakyll $ "Rendering " ++ destination
- runHakyllAction action
-
--- | Chain a number of @HakyllAction@ computations.
-chain :: [HakyllAction a a] -- ^ Actions to chain.
- -> HakyllAction a a -- ^ Resulting action.
-chain [] = id
-chain list = foldl1 (>>>) list
-
-instance Category HakyllAction where
- id = HakyllAction
- { actionDependencies = []
- , actionUrl = Right id
- , actionFunction = return
- }
-
- x . y = HakyllAction
- { actionDependencies = actionDependencies x ++ actionDependencies y
- , actionUrl = case actionUrl x of
- Left ux -> Left ux
- Right fx -> case actionUrl y of
- Left uy -> Left (fx uy)
- Right fy -> Right (fx . fy)
- , actionFunction = actionFunction x <=< actionFunction y
- }
-
-instance Arrow HakyllAction where
- arr f = id { actionFunction = return . f }
-
- first x = x
- { actionFunction = \(y, z) -> do y' <- actionFunction x y
- return (y', z)
- }
diff --git a/src/Text/Hakyll/HakyllMonad.hs b/src/Text/Hakyll/HakyllMonad.hs
deleted file mode 100644
index f51cf2c..0000000
--- a/src/Text/Hakyll/HakyllMonad.hs
+++ /dev/null
@@ -1,99 +0,0 @@
--- | Module describing the Hakyll monad stack.
-module Text.Hakyll.HakyllMonad
- ( HakyllConfiguration (..)
- , PreviewMode (..)
- , Hakyll
- , askHakyll
- , getAdditionalContext
- , logHakyll
- , forkHakyllWait
- , concurrentHakyll
- ) where
-
-import Control.Monad.Trans (liftIO)
-import Control.Concurrent.MVar (MVar, putMVar, newEmptyMVar, readMVar)
-import Control.Monad.Reader (ReaderT, ask, runReaderT)
-import Control.Monad (liftM, forM, forM_)
-import qualified Data.Map as M
-import System.IO (hPutStrLn, stderr)
-
-import Text.Pandoc (ParserState, WriterOptions)
-import Text.Hamlet (HamletSettings)
-
-import Text.Hakyll.Context (Context (..))
-
--- | Our custom monad stack.
---
-type Hakyll = ReaderT HakyllConfiguration IO
-
--- | Preview mode.
---
-data PreviewMode = BuildOnRequest
- | BuildOnInterval
- deriving (Show, Eq, Ord)
-
--- | Hakyll global configuration type.
---
-data HakyllConfiguration = HakyllConfiguration
- { -- | Absolute URL of the site.
- absoluteUrl :: String
- , -- | An additional context to use when rendering. This additional context
- -- is used globally.
- additionalContext :: Context
- , -- | Directory where the site is placed.
- siteDirectory :: FilePath
- , -- | Directory for cache files.
- cacheDirectory :: FilePath
- , -- | Enable index links.
- enableIndexUrl :: Bool
- , -- | The preview mode used
- previewMode :: PreviewMode
- , -- | Pandoc parsing options
- pandocParserState :: ParserState
- , -- | Pandoc writer options
- pandocWriterOptions :: WriterOptions
- , -- | Hamlet settings (if you use hamlet for templates)
- hamletSettings :: HamletSettings
- }
-
--- | Simplified @ask@ function for the Hakyll monad stack.
---
--- Usage would typically be something like:
---
--- > doSomething :: a -> b -> Hakyll c
--- > doSomething arg1 arg2 = do
--- > siteDirectory' <- askHakyll siteDirectory
--- > ...
---
-askHakyll :: (HakyllConfiguration -> a) -> Hakyll a
-askHakyll = flip liftM ask
-
--- | Obtain the globally available, additional context.
---
-getAdditionalContext :: HakyllConfiguration -> Context
-getAdditionalContext configuration =
- let (Context c) = additionalContext configuration
- in Context $ M.insert "absolute" (absoluteUrl configuration) c
-
--- | Write some log information.
---
-logHakyll :: String -> Hakyll ()
-logHakyll = liftIO . hPutStrLn stderr
-
--- | Perform a concurrent hakyll action. Returns an MVar you can wait on
---
-forkHakyllWait :: Hakyll () -> Hakyll (MVar ())
-forkHakyllWait action = do
- mvar <- liftIO newEmptyMVar
- config <- ask
- liftIO $ do
- runReaderT action config
- putMVar mvar ()
- return mvar
-
--- | Perform a number of concurrent hakyll actions, and waits for them to finish
---
-concurrentHakyll :: [Hakyll ()] -> Hakyll ()
-concurrentHakyll actions = do
- mvars <- forM actions forkHakyllWait
- forM_ mvars (liftIO . readMVar)
diff --git a/src/Text/Hakyll/Internal/Cache.hs b/src/Text/Hakyll/Internal/Cache.hs
deleted file mode 100644
index b83d9af..0000000
--- a/src/Text/Hakyll/Internal/Cache.hs
+++ /dev/null
@@ -1,53 +0,0 @@
-module Text.Hakyll.Internal.Cache
- ( storeInCache
- , getFromCache
- , isCacheMoreRecent
- , cacheAction
- ) where
-
-import Control.Monad ((<=<))
-import Control.Monad.Reader (liftIO)
-import Data.Binary
-import System.FilePath ((</>))
-
-import Text.Hakyll.File
-import Text.Hakyll.HakyllMonad (Hakyll)
-import Text.Hakyll.HakyllAction
-
--- | We can store all datatypes instantiating @Binary@ to the cache. The cache
--- directory is specified by the @HakyllConfiguration@, usually @_cache@.
-storeInCache :: (Binary a) => a -> FilePath -> Hakyll ()
-storeInCache value path = do
- cachePath <- toCache path
- makeDirectories cachePath
- liftIO $ encodeFile cachePath value
-
--- | Get a value from the cache. The filepath given should not be located in the
--- cache. This function performs a timestamp check on the filepath and the
--- filepath in the cache, and only returns the cached value when it is still
--- up-to-date.
-getFromCache :: (Binary a) => FilePath -> Hakyll a
-getFromCache = liftIO . decodeFile <=< toCache
-
--- | Check if a file in the cache is more recent than a number of other files.
-isCacheMoreRecent :: FilePath -> [FilePath] -> Hakyll Bool
-isCacheMoreRecent file depends = toCache file >>= flip isFileMoreRecent depends
-
--- | Cache an entire arrow
---
-cacheAction :: Binary a
- => String
- -> HakyllAction () a
- -> HakyllAction () a
-cacheAction key action = action { actionFunction = const cacheFunction }
- where
- cacheFunction = do
- -- Construct a filename
- fileName <- fmap (key </>) $ either id (const $ return "unknown")
- $ actionUrl action
- -- Check the cache
- cacheOk <- isCacheMoreRecent fileName $ actionDependencies action
- if cacheOk then getFromCache fileName
- else do result <- actionFunction action ()
- storeInCache result fileName
- return result
diff --git a/src/Text/Hakyll/Internal/FileType.hs b/src/Text/Hakyll/Internal/FileType.hs
deleted file mode 100644
index 689c77f..0000000
--- a/src/Text/Hakyll/Internal/FileType.hs
+++ /dev/null
@@ -1,49 +0,0 @@
--- | A module dealing with file extensions and associated file types.
-module Text.Hakyll.Internal.FileType
- ( FileType (..)
- , getFileType
- , isRenderable
- , isRenderableFile
- ) where
-
-import System.FilePath (takeExtension)
-
--- | Datatype to represent the different file types Hakyll can deal with.
-data FileType = Html
- | LaTeX
- | LiterateHaskellMarkdown
- | Markdown
- | ReStructuredText
- | Text
- | UnknownFileType
- deriving (Eq, Ord, Show, Read)
-
--- | Get the file type for a certain file. The type is determined by extension.
-getFileType :: FilePath -> FileType
-getFileType = getFileType' . takeExtension
- where
- getFileType' ".htm" = Html
- getFileType' ".html" = Html
- getFileType' ".lhs" = LiterateHaskellMarkdown
- getFileType' ".markdown" = Markdown
- getFileType' ".md" = Markdown
- getFileType' ".mdn" = Markdown
- getFileType' ".mdown" = Markdown
- getFileType' ".mdwn" = Markdown
- getFileType' ".mkd" = Markdown
- getFileType' ".mkdwn" = Markdown
- getFileType' ".page" = Markdown
- getFileType' ".rst" = ReStructuredText
- getFileType' ".tex" = LaTeX
- getFileType' ".text" = Text
- getFileType' ".txt" = Text
- getFileType' _ = UnknownFileType
-
--- | Check if a certain @FileType@ is renderable.
-isRenderable :: FileType -> Bool
-isRenderable UnknownFileType = False
-isRenderable _ = True
-
--- | Check if a certain file is renderable.
-isRenderableFile :: FilePath -> Bool
-isRenderableFile = isRenderable . getFileType
diff --git a/src/Text/Hakyll/Internal/Template.hs b/src/Text/Hakyll/Internal/Template.hs
deleted file mode 100644
index cd6a3bd..0000000
--- a/src/Text/Hakyll/Internal/Template.hs
+++ /dev/null
@@ -1,86 +0,0 @@
-module Text.Hakyll.Internal.Template
- ( Template (..)
- , fromString
- , readTemplate
- , substitute
- , regularSubstitute
- , finalSubstitute
- ) where
-
-import Control.Arrow ((>>>))
-import Control.Applicative ((<$>))
-import Data.List (isPrefixOf)
-import Data.Char (isAlphaNum)
-import Data.Maybe (fromMaybe)
-import System.FilePath ((</>))
-import qualified Data.Map as M
-
-import Text.Hakyll.Context (Context (..))
-import Text.Hakyll.HakyllMonad (Hakyll)
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Pandoc
-import Text.Hakyll.Internal.Cache
-import Text.Hakyll.Page
-import Text.Hakyll.ContextManipulations
-import Text.Hakyll.Internal.Template.Template
-import Text.Hakyll.Internal.Template.Hamlet
-
--- | Construct a @Template@ from a string.
---
-fromString :: String -> Template
-fromString = Template . fromString'
- where
- fromString' [] = []
- fromString' string
- | "$$" `isPrefixOf` string =
- EscapeCharacter : (fromString' $ drop 2 string)
- | "$" `isPrefixOf` string =
- let (key, rest) = span isAlphaNum $ drop 1 string
- in Identifier key : fromString' rest
- | otherwise =
- let (chunk, rest) = break (== '$') string
- in Chunk chunk : fromString' rest
-
--- | Read a @Template@ from a file. This function might fetch the @Template@
--- from the cache, if available.
-readTemplate :: FilePath -> Hakyll Template
-readTemplate path = do
- isCacheMoreRecent' <- isCacheMoreRecent fileName [path]
- if isCacheMoreRecent'
- then getFromCache fileName
- else do
- template <- if isHamletRTFile path
- then readHamletTemplate
- else readDefaultTemplate
- storeInCache template fileName
- return template
- where
- fileName = "templates" </> path
- readDefaultTemplate = do
- body <- runHakyllAction $ readPageAction path
- >>> renderAction
- >>> takeBody
- return $ fromString body
-
- readHamletTemplate = fromHamletRT <$> readHamletRT path
-
--- | Substitutes @$identifiers@ in the given @Template@ by values from the given
--- "Context". When a key is not found, it is left as it is. You can specify
--- the characters used to replace escaped dollars (@$$@) here.
-substitute :: String -> Template -> Context -> String
-substitute escaper template context = substitute' =<< unTemplate template
- where
- substitute' (Chunk chunk) = chunk
- substitute' (Identifier key) =
- fromMaybe ('$' : key) $ M.lookup key $ unContext context
- substitute' (EscapeCharacter) = escaper
-
--- | @substitute@ for use during a chain. This will leave escaped characters as
--- they are.
-regularSubstitute :: Template -> Context -> String
-regularSubstitute = substitute "$$"
-
--- | @substitute@ for the end of a chain (just before writing). This renders
--- escaped characters.
-finalSubstitute :: Template -> Context -> String
-finalSubstitute = substitute "$"
diff --git a/src/Text/Hakyll/Internal/Template/Hamlet.hs b/src/Text/Hakyll/Internal/Template/Hamlet.hs
deleted file mode 100644
index 458ab35..0000000
--- a/src/Text/Hakyll/Internal/Template/Hamlet.hs
+++ /dev/null
@@ -1,56 +0,0 @@
--- | Support for Hamlet templates in Hakyll.
---
-module Text.Hakyll.Internal.Template.Hamlet
- ( isHamletRTFile
- , readHamletRT
- , fromHamletRT
- ) where
-
-import Control.Exception (try)
-import Control.Monad.Trans (liftIO)
-import System.FilePath (takeExtension)
-
-import Text.Hamlet.RT
-
-import Text.Hakyll.Internal.Template.Template
-import Text.Hakyll.HakyllMonad (Hakyll, askHakyll, hamletSettings, logHakyll)
-
--- | Determine if a file is a hamlet template by extension.
---
-isHamletRTFile :: FilePath -> Bool
-isHamletRTFile fileName = takeExtension fileName `elem` [".hamlet", ".hml"]
-
--- | Read a 'HamletRT' by file name.
---
-readHamletRT :: FilePath -- ^ Filename of the template
- -> Hakyll HamletRT -- ^ Resulting hamlet template
-readHamletRT fileName = do
- settings <- askHakyll hamletSettings
- string <- liftIO $ readFile fileName
- result <- liftIO $ try $ parseHamletRT settings string
- case result of
- Left (HamletParseException s) -> error' s
- Left (HamletUnsupportedDocException d) -> error' $ show d
- Left (HamletRenderException s) -> error' s
- Right x -> return x
- where
- error' s = do
- logHakyll $ "Parse of hamlet file " ++ fileName ++ " failed."
- logHakyll s
- error "Parse failed."
-
--- | Convert a 'HamletRT' to a 'Template'
---
-fromHamletRT :: HamletRT -- ^ Hamlet runtime template
- -> Template -- ^ Hakyll template
-fromHamletRT (HamletRT sd) = Template $ map fromSimpleDoc sd
- where
- fromSimpleDoc :: SimpleDoc -> TemplateElement
- fromSimpleDoc (SDRaw chunk) = Chunk chunk
- fromSimpleDoc (SDVar [var]) = Identifier var
- fromSimpleDoc (SDVar _) =
- error "Hakyll does not support '.' in identifier names when using \
- \hamlet templates."
- fromSimpleDoc _ =
- error "Only simple $key$ identifiers are allowed when using hamlet \
- \templates."
diff --git a/src/Text/Hakyll/Internal/Template/Template.hs b/src/Text/Hakyll/Internal/Template/Template.hs
deleted file mode 100644
index 49373fd..0000000
--- a/src/Text/Hakyll/Internal/Template/Template.hs
+++ /dev/null
@@ -1,34 +0,0 @@
--- | Module containing the template data structure.
---
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Text.Hakyll.Internal.Template.Template
- ( Template (..)
- , TemplateElement (..)
- ) where
-
-import Control.Applicative ((<$>))
-
-import Data.Binary (Binary, get, getWord8, put, putWord8)
-
--- | Datatype used for template substitutions.
---
-newtype Template = Template { unTemplate :: [TemplateElement] }
- deriving (Show, Eq, Binary)
-
--- | Elements of a template.
---
-data TemplateElement = Chunk String
- | Identifier String
- | EscapeCharacter
- deriving (Show, Eq)
-
-instance Binary TemplateElement where
- put (Chunk string) = putWord8 0 >> put string
- put (Identifier key) = putWord8 1 >> put key
- put (EscapeCharacter) = putWord8 2
-
- get = getWord8 >>= \tag ->
- case tag of 0 -> Chunk <$> get
- 1 -> Identifier <$> get
- 2 -> return EscapeCharacter
- _ -> error "Error reading cached template"
diff --git a/src/Text/Hakyll/Page.hs b/src/Text/Hakyll/Page.hs
deleted file mode 100644
index f2b5fde..0000000
--- a/src/Text/Hakyll/Page.hs
+++ /dev/null
@@ -1,108 +0,0 @@
--- | A module for dealing with @Page@s. This module is mostly internally used.
-module Text.Hakyll.Page
- ( PageSection (..)
- , readPage
- , readPageAction
- ) where
-
-import Data.List (isPrefixOf)
-import Data.Char (isSpace)
-import Control.Monad.Reader (liftIO)
-import System.FilePath
-import Control.Monad.State (State, evalState, get, put)
-
-import Text.Hakyll.File
-import Text.Hakyll.HakyllMonad
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Regex (substituteRegex, matchesRegex)
-import Text.Hakyll.Util (trim)
-
--- | A page is first parsed into a number of page sections. A page section
--- consists of:
---
--- * A key
---
--- * A value
---
--- * A 'Bool' flag, indicating if the value is applicable for rendering
---
-data PageSection = PageSection {unPageSection :: (String, String, Bool)}
- deriving (Show)
-
--- | Split a page into sections.
---
-splitAtDelimiters :: [String] -> State (Maybe String) [[String]]
-splitAtDelimiters [] = return []
-splitAtDelimiters ls@(x:xs) = do
- delimiter <- get
- if not (isDelimiter delimiter x)
- then return [ls]
- else do let proper = takeWhile (== '-') x
- (content, rest) = break (isDelimiter $ Just proper) xs
- put $ Just proper
- rest' <- splitAtDelimiters rest
- return $ (x : content) : rest'
- where
- isDelimiter old = case old of
- Nothing -> isPossibleDelimiter
- (Just d) -> (== d) . takeWhile (== '-')
-
--- | Check if the given string is a metadata delimiter.
-isPossibleDelimiter :: String -> Bool
-isPossibleDelimiter = isPrefixOf "---"
-
--- | Read one section of a page.
---
-readSection :: Bool -- ^ If this section is the first section in the page.
- -> [String] -- ^ Lines in the section.
- -> [PageSection] -- ^ Key-values extracted.
-readSection _ [] = []
-readSection isFirst ls
- | not isDelimiter' = [body ls]
- | isNamedDelimiter = readSectionMetaData ls
- | isFirst = readSimpleMetaData (drop 1 ls)
- | otherwise = [body (drop 1 ls)]
- where
- isDelimiter' = isPossibleDelimiter (head ls)
- isNamedDelimiter = head ls `matchesRegex` "^----* *[a-zA-Z0-9][a-zA-Z0-9]*"
- body ls' = PageSection ("body", unlines ls', True)
-
- readSimpleMetaData = map readPair . filter (not . all isSpace)
- readPair = trimPair . break (== ':')
- trimPair (key, value) = PageSection (trim key, trim (drop 1 value), False)
-
- readSectionMetaData [] = []
- readSectionMetaData (header:value) =
- let key = substituteRegex "[^a-zA-Z0-9]" "" header
- in [PageSection (key, unlines value, True)]
-
--- | Read a page from a file. Metadata is supported.
---
-readPage :: FilePath -> Hakyll [PageSection]
-readPage path = do
- let sectionFunctions = map readSection $ True : repeat False
-
- -- Read file.
- contents <- liftIO $ readFile path
- url <- toUrl path
- let sections = evalState (splitAtDelimiters $ lines contents) Nothing
- sectionsData = concat $ zipWith ($) sectionFunctions sections
-
- -- Note that url, path etc. are listed first, which means can be overwritten
- -- by section data
- return $ PageSection ("url", url, False)
- : PageSection ("path", path, False)
- : PageSection ("title", takeBaseName path, False)
- : (category ++ sectionsData)
- where
- category = let dirs = splitDirectories $ takeDirectory path
- in [PageSection ("category", last dirs, False) | not (null dirs)]
-
--- | Read a page from a file. Metadata is supported.
---
-readPageAction :: FilePath -> HakyllAction () [PageSection]
-readPageAction path = HakyllAction
- { actionDependencies = [path]
- , actionUrl = Left $ toUrl path
- , actionFunction = const $ readPage path
- }
diff --git a/src/Text/Hakyll/Paginate.hs b/src/Text/Hakyll/Paginate.hs
deleted file mode 100644
index 04194ca..0000000
--- a/src/Text/Hakyll/Paginate.hs
+++ /dev/null
@@ -1,94 +0,0 @@
--- | Module aimed to paginate web pages.
---
-module Text.Hakyll.Paginate
- ( PaginateConfiguration (..)
- , defaultPaginateConfiguration
- , paginate
- ) where
-
-import Control.Applicative ((<$>))
-
-import Text.Hakyll.Context (Context)
-import Text.Hakyll.CreateContext
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Util (link)
-
--- | A configuration for a pagination.
---
-data PaginateConfiguration = PaginateConfiguration
- { -- | Label for the link to the previous page.
- previousLabel :: String
- , -- | Label for the link to the next page.
- nextLabel :: String
- , -- | Label for the link to the first page.
- firstLabel :: String
- , -- | Label for the link to the last page.
- lastLabel :: String
- }
-
--- | A simple default configuration for pagination.
---
-defaultPaginateConfiguration :: PaginateConfiguration
-defaultPaginateConfiguration = PaginateConfiguration
- { previousLabel = "Previous"
- , nextLabel = "Next"
- , firstLabel = "First"
- , lastLabel = "Last"
- }
-
--- | The most important function for pagination. This function operates on a
--- list of @Context@s (the pages), and basically just adds fields to them
--- by combining them with a custom page.
---
--- The following metadata fields will be added:
---
--- - @$previous@: A link to the previous page.
---
--- - @$next@: A link to the next page.
---
--- - @$first@: A link to the first page.
---
--- - @$last@: A link to the last page.
---
--- - @$index@: 1-based index of the current page.
---
--- - @$length@: Total number of pages.
---
--- When @$previous@ or @$next@ are not available, they will be just a label
--- without a link. The same goes for when we are on the first or last page for
--- @$first@ and @$last@.
---
-paginate :: PaginateConfiguration
- -> [HakyllAction () Context]
- -> [HakyllAction () Context]
-paginate configuration renderables = paginate' Nothing renderables (1 :: Int)
- where
- -- Create a link with a given label, taken from the configuration.
- linkWithLabel f r = Right $ case actionUrl r of
- Left l -> createSimpleHakyllAction $
- link (f configuration) . ("$root/" ++) <$> l
- Right _ -> error "No link found for pagination."
-
- -- The main function that creates combined renderables by recursing over
- -- the list of items.
- paginate' _ [] _ = []
- paginate' maybePrev (x:xs) index =
- let (previous, first) = case maybePrev of
- (Just r) -> ( linkWithLabel previousLabel r
- , linkWithLabel firstLabel (head renderables) )
- Nothing -> ( Left $ previousLabel configuration
- , Left $ firstLabel configuration )
- (next, last') = case xs of
- (n:_) -> ( linkWithLabel nextLabel n
- , linkWithLabel lastLabel (last renderables) )
- [] -> ( Left $ nextLabel configuration
- , Left $ lastLabel configuration )
- customPage = createCustomPage ""
- [ ("previous", previous)
- , ("next", next)
- , ("first", first)
- , ("last", last')
- , ("index", Left $ show index)
- , ("length", Left $ show $ length renderables)
- ]
- in (x `combine` customPage) : paginate' (Just x) xs (index + 1)
diff --git a/src/Text/Hakyll/Pandoc.hs b/src/Text/Hakyll/Pandoc.hs
deleted file mode 100644
index c0dec77..0000000
--- a/src/Text/Hakyll/Pandoc.hs
+++ /dev/null
@@ -1,57 +0,0 @@
--- | Module exporting a pandoc arrow
---
-module Text.Hakyll.Pandoc
- ( renderAction
- , renderActionWith
- ) where
-
-import Data.Maybe (fromMaybe)
-import qualified Data.Map as M
-import Control.Arrow (second, (>>>), arr, (&&&))
-
-import Text.Pandoc
-
-import Text.Hakyll.Internal.FileType
-import Text.Hakyll.Page
-import Text.Hakyll.HakyllMonad
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Context
-
--- | Get a render function for a given extension.
---
-getRenderFunction :: HakyllAction FileType (String -> String)
-getRenderFunction = createHakyllAction $ \fileType -> case fileType of
- Html -> return id
- Text -> return id
- UnknownFileType -> return id
- _ -> do parserState <- askHakyll pandocParserState
- writerOptions <- askHakyll pandocWriterOptions
- return $ writeHtmlString writerOptions
- . readFunction fileType (readOptions parserState fileType)
- where
- readFunction ReStructuredText = readRST
- readFunction LaTeX = readLaTeX
- readFunction Markdown = readMarkdown
- readFunction LiterateHaskellMarkdown = readMarkdown
- readFunction t = error $ "Cannot render " ++ show t
-
- readOptions options LiterateHaskellMarkdown = options
- { stateLiterateHaskell = True }
- readOptions options _ = options
-
--- | An action that renders the list of page sections to a context using pandoc
---
-renderAction :: HakyllAction [PageSection] Context
-renderAction = (arr id &&& (getFileType' >>> getRenderFunction))
- >>> renderActionWith
- where
- getFileType' = arr $ getFileType . fromMaybe "unknown" . lookup "path"
- . map (\(x, y, _) -> (x, y)) . map unPageSection
-
--- | An action to render pages, offering just a little more flexibility
---
-renderActionWith :: HakyllAction ([PageSection], String -> String) Context
-renderActionWith = createHakyllAction $ \(sections, render') -> return $
- Context $ M.fromList $ map (renderTriple render' . unPageSection) sections
- where
- renderTriple render' (k, v, r) = second (if r then render' else id) (k, v)
diff --git a/src/Text/Hakyll/Regex.hs b/src/Text/Hakyll/Regex.hs
deleted file mode 100644
index ba7ee46..0000000
--- a/src/Text/Hakyll/Regex.hs
+++ /dev/null
@@ -1,77 +0,0 @@
--- | A module that exports a simple regex interface. This code is mostly copied
--- from the regex-compat package at hackage. I decided to write this module
--- because I want to abstract the regex package used.
-module Text.Hakyll.Regex
- ( splitRegex
- , substituteRegex
- , matchesRegex
- ) where
-
-import Text.Regex.TDFA
-
--- | Match a regular expression against a string, returning more information
--- about the match.
-matchRegexAll :: Regex -> String -> Maybe (String, String, String, [String])
-matchRegexAll = matchM
-
--- | Replaces every occurance of the given regexp with the replacement string.
-subRegex :: Regex -- ^ Search pattern
- -> String -- ^ Input string
- -> String -- ^ Replacement text
- -> String -- ^ Output string
-subRegex _ "" _ = ""
-subRegex regexp inp replacement =
- let -- bre matches a backslash then capture either a backslash or some digits
- bre = makeRegex "\\\\(\\\\|[0-9]+)"
- lookup' _ [] _ = []
- lookup' [] _ _ = []
- lookup' match' repl groups =
- case matchRegexAll bre repl of
- Nothing -> repl
- Just (lead, _, trail, bgroups) ->
- let newval =
- if head bgroups == "\\"
- then "\\"
- else let index :: Int
- index = read (head bgroups) - 1
- in if index == -1
- then match'
- else groups !! index
- in lead ++ newval ++ lookup' match' trail groups
- in case matchRegexAll regexp inp of
- Nothing -> inp
- Just (lead, match', trail, groups) ->
- lead ++ lookup' match' replacement groups
- ++ subRegex regexp trail replacement
-
--- | Splits a string based on a regular expression. The regular expression
--- should identify one delimiter.
-splitRegex' :: Regex -> String -> [String]
-splitRegex' _ [] = []
-splitRegex' delim strIn = loop strIn where
- loop str = case matchOnceText delim str of
- Nothing -> [str]
- Just (firstline, _, remainder) ->
- if null remainder
- then [firstline,""]
- else firstline : loop remainder
-
--- | Split a list at a certain element.
-splitRegex :: String -> String -> [String]
-splitRegex pattern = filter (not . null)
- . splitRegex' (makeRegex pattern)
-
--- | Substitute a regex. Simplified interface. This function performs a global
--- substitution.
-substituteRegex :: String -- ^ Pattern to replace (regex).
- -> String -- ^ Replacement string.
- -> String -- ^ Input string.
- -> String -- ^ Result.
-substituteRegex pattern replacement string =
- subRegex (makeRegex pattern) string replacement
-
--- | Simple regex matching.
-matchesRegex :: String -- ^ Input string.
- -> String -- ^ Pattern to match.
- -> Bool
-matchesRegex = (=~)
diff --git a/src/Text/Hakyll/Render.hs b/src/Text/Hakyll/Render.hs
deleted file mode 100644
index aa3ef8c..0000000
--- a/src/Text/Hakyll/Render.hs
+++ /dev/null
@@ -1,126 +0,0 @@
--- | Module containing rendering functions. All these functions are used to
--- render files to the @_site@ directory.
-module Text.Hakyll.Render
- ( render
- , renderAndConcat
- , renderChain
- , static
- , css
- , writePage
- ) where
-
-import Control.Arrow ((>>>))
-import Control.Applicative ((<$>))
-import Control.Monad.Reader (liftIO)
-import System.Directory (copyFile)
-import Data.Maybe (fromMaybe)
-import qualified Data.Map as M
-
-import Text.Hakyll.Context (Context (..))
-import Text.Hakyll.HakyllMonad (Hakyll, askHakyll, getAdditionalContext)
-import Text.Hakyll.File
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.ContextManipulations
-import Text.Hakyll.Internal.CompressCss
-import Text.Hakyll.Internal.Template
-
--- | A pure render function - used internally.
-pureRender :: Template -- ^ Template to use for rendering.
- -> Context -- ^ Renderable object to render with given template.
- -> Context -- ^ The body of the result will contain the render.
-pureRender template (Context c) =
- -- Ignore $root when substituting here. We will only replace that in the
- -- final render (just before writing).
- let contextIgnoringRoot = Context $ M.insert "root" "$root" c
- body = regularSubstitute template $ contextIgnoringRoot
- in Context $ M.insert "body" body c
-
--- | This is the most simple render action. You render a @Context@ with a
--- template, and get back the result.
-render :: FilePath -- ^ Template to use for rendering.
- -> HakyllAction Context Context -- ^ The render computation.
-render templatePath = HakyllAction
- { actionDependencies = [templatePath]
- , actionUrl = Right id
- , actionFunction = \context ->
- flip pureRender context <$> readTemplate templatePath
- }
-
--- | Render each @Context@ with the given templates, then concatenate the
--- result. So, basically this function:
---
--- - Takes every @Context@.
---
--- - Renders every @Context@ with all given templates. This is comparable
--- with a renderChain action.
---
--- - Concatenates the result and returns that as a @String@.
-renderAndConcat :: [FilePath]
- -> [HakyllAction () Context]
- -> HakyllAction () String
-renderAndConcat templatePaths renderables = HakyllAction
- { actionDependencies = renders >>= actionDependencies
- , actionUrl = Right id
- , actionFunction = actionFunction'
- }
- where
- render' = chain (map render templatePaths)
- renders = map (>>> render') renderables
-
- actionFunction' _ = do
- contexts <- mapM (runHakyllAction . (>>> takeBody)) renders
- return $ concat contexts
-
--- | Chain a render action for a page with a number of templates. This will
--- also write the result to the site destination. This is the preferred way
--- to do general rendering.
---
--- > renderChain [ "templates/notice.html"
--- > , "templates/default.html"
--- > ] $ createPagePath "warning.html"
---
--- This code will first render @warning.html@ using @templates/notice.html@,
--- and will then render the result with @templates/default.html@.
-renderChain :: [FilePath]
- -> HakyllAction () Context
- -> Hakyll ()
-renderChain templatePaths initial =
- runHakyllActionIfNeeded renderChainWith'
- where
- renderChainWith' = initial >>> chain' >>> writePage
- chain' = chain $ map render templatePaths
-
--- | Mark a certain file as static, so it will just be copied when the site is
--- generated.
-static :: FilePath -> Hakyll ()
-static source = runHakyllActionIfNeeded static'
- where
- static' = createFileHakyllAction source $ do
- destination <- toDestination source
- makeDirectories destination
- liftIO $ copyFile source destination
-
--- | Render a css file, compressing it.
-css :: FilePath -> Hakyll ()
-css source = runHakyllActionIfNeeded css'
- where
- css' = createFileHakyllAction source $ do
- contents <- liftIO $ readFile source
- destination <- toDestination source
- makeDirectories destination
- liftIO $ writeFile destination (compressCss contents)
-
--- | Write a page to the site destination. Final action after render
--- chains and such.
-writePage :: HakyllAction Context ()
-writePage = createHakyllAction $ \(Context initialContext) -> do
- additionalContext' <- unContext <$> askHakyll getAdditionalContext
- let url = fromMaybe (error "No url defined at write time.")
- (M.lookup "url" initialContext)
- body = fromMaybe "" (M.lookup "body" initialContext)
- let context = additionalContext' `M.union` M.singleton "root" (toRoot url)
- destination <- toDestination url
- makeDirectories destination
- -- Substitute $root here, just before writing.
- liftIO $ writeFile destination $ finalSubstitute (fromString body)
- (Context context)
diff --git a/src/Text/Hakyll/Tags.hs b/src/Text/Hakyll/Tags.hs
deleted file mode 100644
index d427aa5..0000000
--- a/src/Text/Hakyll/Tags.hs
+++ /dev/null
@@ -1,172 +0,0 @@
--- | Module containing some specialized functions to deal with tags.
--- This Module follows certain conventions. Stick with them.
---
--- More concrete: all functions in this module assume that the tags are
--- located in the @tags@ field, and separated by commas. An example file
--- @foo.markdown@ could look like:
---
--- > ---
--- > author: Philip K. Dick
--- > title: Do androids dream of electric sheep?
--- > tags: future, science fiction, humanoid
--- > ---
--- > The novel is set in a post-apocalyptic near future, where the Earth and
--- > its populations have been damaged greatly by Nuclear...
---
--- All the following functions would work with such a format. In addition to
--- tags, Hakyll also supports categories. The convention when using categories
--- is to place pages in subdirectories.
---
--- An example, the page @posts\/coding\/2010-01-28-hakyll-categories.markdown@
--- Tags or categories are read using the @readTagMap@ and @readCategoryMap@
--- functions. Because categories are implemented using tags - categories can
--- be seen as tags, with the restriction that a page can only have one
--- category - all functions for tags also work with categories.
---
--- When reading a @TagMap@ (which is also used for category maps) using the
--- @readTagMap@ or @readCategoryMap@ function, you also have to give a unique
--- identifier to it. This identifier is simply for caching reasons, so Hakyll
--- can tell different maps apart; it has no other use.
---
-module Text.Hakyll.Tags
- ( TagMap
- , readTagMap
- , readCategoryMap
- , withTagMap
- , renderTagCloud
- , renderTagLinks
- ) where
-
-import qualified Data.Map as M
-import Data.List (intercalate)
-import Data.Maybe (fromMaybe, maybeToList)
-import Control.Arrow (second, (>>>))
-import Control.Applicative ((<$>))
-import System.FilePath
-
-import Text.Blaze.Renderer.String (renderHtml)
-import Text.Blaze.Html5 ((!), string, stringValue)
-import qualified Text.Blaze.Html5 as H
-import qualified Text.Blaze.Html5.Attributes as A
-
-import Text.Hakyll.Context (Context (..))
-import Text.Hakyll.ContextManipulations (changeValue)
-import Text.Hakyll.CreateContext (createPage)
-import Text.Hakyll.HakyllMonad (Hakyll)
-import Text.Hakyll.Regex
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Util
-import Text.Hakyll.Internal.Cache
-
--- | Type for a tag map.
---
--- This is a map associating tags or categories to the appropriate pages
--- using that tag or category. In the case of categories, each path will only
--- appear under one category - this is not the case with tags.
-type TagMap = M.Map String [HakyllAction () Context]
-
--- | Read a tag map. This is a internally used function that can be used for
--- tags as well as for categories.
-readMap :: (Context -> [String]) -- ^ Function to get tags from a context.
- -> String -- ^ Unique identifier for the tagmap.
- -> [FilePath]
- -> HakyllAction () TagMap
-readMap getTagsFunction identifier paths = HakyllAction
- { actionDependencies = paths
- , actionUrl = Right id
- , actionFunction = actionFunction'
- }
- where
- fileName = "tagmaps" </> identifier
-
- actionFunction' _ = do
- isCacheMoreRecent' <- isCacheMoreRecent fileName paths
- assocMap <- if isCacheMoreRecent'
- then M.fromAscList <$> getFromCache fileName
- else do assocMap' <- readTagMap'
- storeInCache (M.toAscList assocMap') fileName
- return assocMap'
- return $ M.map (map createPage) assocMap
-
- -- TODO: preserve order
- readTagMap' :: Hakyll (M.Map String [FilePath])
- readTagMap' = do
- pairs' <- concat <$> mapM pairs paths
- return $ M.fromListWith (flip (++)) pairs'
-
- -- | Read a page, and return an association list where every tag is
- -- associated with some paths. Of course, this will always be just one
- -- @FilePath@ here.
- pairs :: FilePath -> Hakyll [(String, [FilePath])]
- pairs path = do
- context <- runHakyllAction $ createPage path
- let tags = getTagsFunction context
- return $ map (\tag -> (tag, [path])) tags
-
--- | Read a @TagMap@, using the @tags@ metadata field.
-readTagMap :: String -- ^ Unique identifier for the map.
- -> [FilePath] -- ^ Paths to get tags from.
- -> HakyllAction () TagMap
-readTagMap = readMap getTagsFunction
- where
- getTagsFunction = map trim . splitRegex ","
- . fromMaybe [] . M.lookup "tags" . unContext
-
--- | Read a @TagMap@, using the subdirectories the pages are placed in.
-readCategoryMap :: String -- ^ Unique identifier for the map.
- -> [FilePath] -- ^ Paths to get tags from.
- -> HakyllAction () TagMap
-readCategoryMap = readMap $ maybeToList . M.lookup "category" . unContext
-
--- | Perform a @Hakyll@ action on every item in the tag
---
-withTagMap :: HakyllAction () TagMap
- -> (String -> [HakyllAction () Context] -> Hakyll ())
- -> Hakyll ()
-withTagMap tagMap function = runHakyllAction (tagMap >>> action)
- where
- action = createHakyllAction (mapM_ (uncurry function) . M.toList)
-
--- | Render a tag cloud.
-renderTagCloud :: (String -> String) -- ^ Function to produce an url for a tag.
- -> Float -- ^ Smallest font size, in percent.
- -> Float -- ^ Biggest font size, in percent.
- -> HakyllAction TagMap String
-renderTagCloud urlFunction minSize maxSize = createHakyllAction renderTagCloud'
- where
- renderTagCloud' tagMap =
- return $ intercalate " " $ map (renderTag tagMap) (tagCount tagMap)
-
- renderTag tagMap (tag, count) = renderHtml $
- H.a ! A.style (stringValue $ "font-size: " ++ sizeTag tagMap count)
- ! A.href (stringValue $ urlFunction tag)
- $ string tag
-
- sizeTag tagMap count = show (size' :: Int) ++ "%"
- where
- size' = floor $ minSize + relative tagMap count * (maxSize - minSize)
-
- minCount = minimum . map snd . tagCount
- maxCount = maximum . map snd . tagCount
- relative tagMap count = (count - minCount tagMap) /
- (1 + maxCount tagMap - minCount tagMap)
-
- tagCount = map (second $ fromIntegral . length) . M.toList
-
--- | Render all tags to links.
---
--- On your site, it is nice if you can display the tags on a page, but
--- naturally, most people would expect these are clickable.
---
--- So, this function takes a function to produce an url for a given tag, and
--- applies it on all tags.
---
--- Note that it is your own responsibility to ensure a page with such an url
--- exists.
-renderTagLinks :: (String -> String) -- ^ Function to produce an url for a tag.
- -> HakyllAction Context Context
-renderTagLinks urlFunction = changeValue "tags" renderTagLinks'
- where
- renderTagLinks' = intercalate ", "
- . map ((\t -> link t $ urlFunction t) . trim)
- . splitRegex ","
diff --git a/src/Text/Hakyll/Util.hs b/src/Text/Hakyll/Util.hs
deleted file mode 100644
index e032c52..0000000
--- a/src/Text/Hakyll/Util.hs
+++ /dev/null
@@ -1,34 +0,0 @@
--- | Miscellaneous text manipulation functions.
-module Text.Hakyll.Util
- ( trim
- , stripHtml
- , link
- ) where
-
-import Data.Char (isSpace)
-
-import Text.Blaze.Html5 ((!), string, stringValue, a)
-import Text.Blaze.Html5.Attributes (href)
-import Text.Blaze.Renderer.String (renderHtml)
-
--- | Trim a string (drop spaces, tabs and newlines at both sides).
-trim :: String -> String
-trim = reverse . trim' . reverse . trim'
- where
- trim' = dropWhile isSpace
-
--- | Strip html tags from the given string.
-stripHtml :: String -> String
-stripHtml [] = []
-stripHtml str = let (beforeTag, rest) = break (== '<') str
- (_, afterTag) = break (== '>') rest
- in beforeTag ++ stripHtml (drop 1 afterTag)
-
--- | Make a HTML link.
---
--- > link "foo" "bar.html" == "<a href='bar.html'>foo</a>"
-link :: String -- ^ Link text.
- -> String -- ^ Link destination.
- -> String
-link text destination = renderHtml $ a ! href (stringValue destination)
- $ string text