summaryrefslogtreecommitdiff
path: root/src/Hakyll/Core/Compiler.hs
diff options
context:
space:
mode:
authorJasper Van der Jeugt <m@jaspervdj.be>2012-11-13 17:31:03 +0100
committerJasper Van der Jeugt <m@jaspervdj.be>2012-11-13 17:31:03 +0100
commitf0af2a3b79ea7eea3f521f79fd903f9023ec85df (patch)
treebbc460b65ab52879c616dffce1bb32fe8d8df2ac /src/Hakyll/Core/Compiler.hs
parentd2e913f42434841c584b97ae9d5417ff2737c0ce (diff)
downloadhakyll-f0af2a3b79ea7eea3f521f79fd903f9023ec85df.tar.gz
WIP
Diffstat (limited to 'src/Hakyll/Core/Compiler.hs')
-rw-r--r--src/Hakyll/Core/Compiler.hs408
1 files changed, 76 insertions, 332 deletions
diff --git a/src/Hakyll/Core/Compiler.hs b/src/Hakyll/Core/Compiler.hs
index ef9b03c..e59506f 100644
--- a/src/Hakyll/Core/Compiler.hs
+++ b/src/Hakyll/Core/Compiler.hs
@@ -1,97 +1,8 @@
--- | A Compiler manages targets and dependencies between targets
---
--- The most distinguishing property of a 'Compiler' is that it is an Arrow. A
--- compiler of the type @Compiler a b@ is simply a compilation phase which takes
--- an @a@ as input, and produces a @b@ as output.
---
--- Compilers are chained using the '>>>' arrow operation. If we have a compiler
---
--- > getResourceString :: Compiler Resource String
---
--- which reads the resource, and a compiler
---
--- > readPage :: Compiler String (Page String)
---
--- we can chain these two compilers to get a
---
--- > (getResourceString >>> readPage) :: Compiler Resource (Page String)
---
--- Most compilers can be created by combining smaller compilers using '>>>'.
---
--- More advanced constructions are also possible using arrow, and sometimes
--- these are needed. For a good introduction to arrow, you can refer to
---
--- <http://en.wikibooks.org/wiki/Haskell/Understanding_arrows>
---
--- A construction worth writing a few paragraphs about here are the 'require'
--- functions. Different variants of this function are exported here, but they
--- all serve more or less the same goal.
---
--- When you use only '>>>' to chain your compilers, you get a linear pipeline --
--- it is not possible to add extra items from other compilers along the way.
--- This is where the 'require' functions come in.
---
--- This function allows you to reference other items, which are then added to
--- the pipeline. Let's look at this crappy ASCII illustration which represents
--- a pretty common scenario:
---
--- > read resource >>> pandoc render >>> layout >>> relativize URL's
--- >
--- > @templates/fancy.html@
---
--- We want to construct a pipeline of compilers to go from our resource to a
--- proper webpage. However, the @layout@ compiler takes more than just the
--- rendered page as input: it needs the @templates/fancy.html@ template as well.
---
--- This is an example of where we need the @require@ function. We can solve
--- this using a construction that looks like:
---
--- > ... >>> pandoc render >>> require >>> layout >>> ...
--- > |
--- > @templates/fancy.html@ ------/
---
--- This illustration can help us understand the type signature of 'require'.
---
--- > require :: (Binary a, Typeable a, Writable a)
--- > => Identifier a
--- > -> (b -> a -> c)
--- > -> Compiler b c
---
--- Let's look at it in detail:
---
--- > (Binary a, Typeable a, Writable a)
---
--- These are constraints for the @a@ type. @a@ (the template) needs to have
--- certain properties for it to be required.
---
--- > Identifier a
---
--- This is simply @templates/fancy.html@: the 'Identifier' of the item we want
--- to 'require', in other words, the name of the item we want to add to the
--- pipeline somehow.
---
--- > (b -> a -> c)
---
--- This is a function given by the user, specifying /how/ the two items shall be
--- merged. @b@ is the output of the previous compiler, and @a@ is the item we
--- just required -- the template. This means @c@ will be the final output of the
--- 'require' combinator.
---
--- > Compiler b c
---
--- Indeed, we have now constructed a compiler which takes a @b@ and produces a
--- @c@. This means that we have a linear pipeline again, thanks to the 'require'
--- function. So, the 'require' function actually helps to reduce to complexity
--- of Hakyll applications!
---
--- Note that require will fetch a previously compiled item: in our example of
--- the type @a@. It is /very/ important that the compiler which produced this
--- value, produced the right type as well!
---
-{-# LANGUAGE GeneralizedNewtypeDeriving, ScopedTypeVariables #-}
+--------------------------------------------------------------------------------
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE ScopedTypeVariables #-}
module Hakyll.Core.Compiler
( Compiler
- , runCompiler
, getIdentifier
, getRoute
, getRouteFor
@@ -99,95 +10,52 @@ module Hakyll.Core.Compiler
, getResourceString
, getResourceLBS
, getResourceWith
- , fromDependency
- , require_
, require
- , requireA
- , requireAll_
, requireAll
- , requireAllA
, cached
, unsafeCompiler
- , traceShowCompiler
+ , logCompiler
, timedCompiler
- , byPattern
- , byExtension
) where
-import Prelude hiding ((.), id)
-import Control.Arrow ((>>>), (&&&), arr, first)
-import Control.Applicative ((<$>), (*>))
-import Control.Exception (SomeException, handle)
-import Control.Monad.Reader (ask)
-import Control.Monad.Trans (liftIO)
-import Control.Monad.Error (throwError)
-import Control.Category (Category, (.), id)
-import Data.List (find)
-import System.Environment (getProgName)
-import System.FilePath (takeExtension)
-import Data.Binary (Binary)
-import Data.Typeable (Typeable)
-import Data.ByteString.Lazy (ByteString)
-
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.CompiledItem
-import Hakyll.Core.Writable
-import Hakyll.Core.ResourceProvider
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Store (Store)
-import Hakyll.Core.Routes
-import Hakyll.Core.Logger
-import qualified Hakyll.Core.Store as Store
-
--- | Run a compiler, yielding the resulting target and it's dependencies. This
--- version of 'runCompilerJob' also stores the result
---
-runCompiler :: Compiler CompiledItem -- ^ Compiler to run
- -> Identifier () -- ^ Target identifier
- -> ResourceProvider -- ^ Resource provider
- -> [Identifier ()] -- ^ Universe
- -> Routes -- ^ Route
- -> Store -- ^ Store
- -> Bool -- ^ Was the resource modified?
- -> Logger -- ^ Logger
- -> IO (Throwing CompiledItem) -- ^ Resulting item
-runCompiler compiler id' provider universe routes store modified logger = do
- -- Run the compiler job
- result <- handle (\(e :: SomeException) -> return $ Left $ show e) $
- runCompilerJob compiler id' provider universe routes store modified
- logger
+--------------------------------------------------------------------------------
+import Control.Applicative ((<$>))
+import Data.Binary (Binary)
+import Data.ByteString.Lazy (ByteString)
+import Data.Typeable (Typeable)
+import Prelude hiding (id, (.))
+import System.Environment (getProgName)
- -- Inspect the result
- case result of
- -- In case we compiled an item, we will store a copy in the cache first,
- -- before we return control. This makes sure the compiled item can later
- -- be accessed by e.g. require.
- Right (CompiledItem x) ->
- Store.set store ["Hakyll.Core.Compiler.runCompiler", show id'] x
- -- Otherwise, we do nothing here
- _ -> return ()
+--------------------------------------------------------------------------------
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Compiler.Require
+import Hakyll.Core.Identifier
+import Hakyll.Core.Logger
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Routes
+import qualified Hakyll.Core.Store as Store
+import Hakyll.Core.Writable
- return result
+--------------------------------------------------------------------------------
-- | Get the identifier of the item that is currently being compiled
---
-getIdentifier :: Compiler (Identifier b)
-getIdentifier = fromJob $ const $ CompilerM $
- castIdentifier . compilerIdentifier <$> ask
+getIdentifier :: Compiler Identifier
+getIdentifier = compilerIdentifier <$> compilerAsk
+
+--------------------------------------------------------------------------------
-- | Get the route we are using for this item
---
getRoute :: Compiler (Maybe FilePath)
-getRoute = getIdentifier >>> getRouteFor
+getRoute = getIdentifier >>= getRouteFor
+
+--------------------------------------------------------------------------------
-- | Get the route for a specified item
---
-getRouteFor :: Compiler (Identifier a -> Maybe FilePath)
-getRouteFor = fromJob $ \identifier -> CompilerM $ do
- routes <- compilerRoutes <$> ask
+getRouteFor :: Identifier -> Compiler (Maybe FilePath)
+getRouteFor identifier = do
+ routes <- compilerRoutes <$> compilerAsk
return $ runRoutes routes identifier
@@ -197,7 +65,6 @@ getResourceBody :: Compiler String
getResourceBody = getResourceWith resourceBody
-
--------------------------------------------------------------------------------
-- | Get the resource we are compiling as a string
getResourceString :: Compiler String
@@ -213,188 +80,65 @@ getResourceLBS = getResourceWith $ const resourceLBS
--------------------------------------------------------------------------------
-- | Overloadable function for 'getResourceString' and 'getResourceLBS'
-getResourceWith :: (ResourceProvider -> Identifier a -> IO b) -> Compiler b
-getResourceWith reader = fromJob $ \_ -> CompilerM $ do
- provider <- compilerResourceProvider <$> ask
- r <- compilerIdentifier <$> ask
- let filePath = toFilePath r
- if resourceExists provider r
- then liftIO $ reader provider $ castIdentifier r
- else throwError $ error' filePath
- where
- error' id' = "Hakyll.Core.Compiler.getResourceWith: resource "
- ++ show id' ++ " not found"
-
--- | Auxiliary: get a dependency
---
-getDependency :: (Binary a, Writable a, Typeable a)
- => Identifier a -> CompilerM a
-getDependency id' = CompilerM $ do
- store <- compilerStore <$> ask
- result <- liftIO $
- Store.get store ["Hakyll.Core.Compiler.runCompiler", show id']
- case result of
- Store.NotFound -> throwError notFound
- Store.WrongType e r -> throwError $ wrongType e r
- Store.Found x -> return x
- where
- notFound =
- "Hakyll.Core.Compiler.getDependency: " ++ show id' ++ " was " ++
- "not found in the cache, the cache might be corrupted or " ++
- "the item you are referring to might not exist"
- wrongType e r =
- "Hakyll.Core.Compiler.getDependency: " ++ show id' ++ " was found " ++
- "in the cache, but does not have the right type: expected " ++ show e ++
- " but got " ++ show r
-
--- | Variant of 'require' which drops the current value
---
-require_ :: (Binary a, Typeable a, Writable a)
- => Identifier a
- -> Compiler a
-require_ identifier =
- fromDependency identifier >>> fromJob (const $ getDependency identifier)
-
--- | Require another target. Using this function ensures automatic handling of
--- dependencies
---
-{-
-require :: (Binary a, Typeable a, Writable a)
- => Identifier a
- -> (b -> a -> c)
- -> Compiler b c
-require identifier = requireA identifier . arr . uncurry
-
--- | Arrow-based variant of 'require'
---
-requireA :: (Binary a, Typeable a, Writable a)
- => Identifier a
- -> Compiler (b, a) c
- -> Compiler b c
-requireA identifier = (id &&& require_ identifier >>>)
--}
-
--- | Variant of 'requireAll' which drops the current value
---
-requireAll_ :: (Binary a, Typeable a, Writable a)
- => Pattern a
- -> Compiler [a]
-requireAll_ pattern = fromDependencies (const getDeps) *> fromJob requireAll_'
+getResourceWith :: (ResourceProvider -> Identifier -> IO a) -> Compiler a
+getResourceWith reader = do
+ provider <- compilerProvider <$> compilerAsk
+ id' <- compilerIdentifier <$> compilerAsk
+ let filePath = toFilePath id'
+ if resourceExists provider id'
+ then compilerUnsafeIO $ reader provider id'
+ else compilerThrow $ error' filePath
where
- getDeps = map castIdentifier . filterMatches pattern . map castIdentifier
- requireAll_' = const $ CompilerM $ do
- deps <- getDeps . compilerUniverse <$> ask
- mapM (unCompilerM . getDependency) deps
+ error' fp = "Hakyll.Core.Compiler.getResourceWith: resource " ++
+ show fp ++ " not found"
--- | Require a number of targets. Using this function ensures automatic handling
--- of dependencies
---
-{-
-requireAll :: (Binary a, Typeable a, Writable a)
- => Pattern a
- -> (b -> [a] -> c)
- -> Compiler b c
-requireAll pattern = requireAllA pattern . arr . uncurry
-
--- | Arrow-based variant of 'requireAll'
---
-requireAllA :: (Binary a, Typeable a, Writable a)
- => Pattern a
- -> Compiler (b, [a]) c
- -> Compiler b c
-requireAllA pattern = (id &&& requireAll_ pattern >>>)
--}
+--------------------------------------------------------------------------------
cached :: (Binary a, Typeable a, Writable a)
=> String
-> Compiler a
-> Compiler a
-cached name (Compiler d j) = Compiler d $ CompilerM $ do
- logger <- compilerLogger <$> ask
- identifier <- castIdentifier . compilerIdentifier <$> ask
- store <- compilerStore <$> ask
- modified <- compilerResourceModified <$> ask
- progName <- liftIO getProgName
- report logger $ "Checking cache: " ++ if modified then "modified" else "OK"
+cached name compiler = do
+ logger <- compilerLogger <$> compilerAsk
+ id' <- compilerIdentifier <$> compilerAsk
+ store <- compilerStore <$> compilerAsk
+ provider <- compilerProvider <$> compilerAsk
+ modified <- compilerUnsafeIO $ resourceModified provider id'
+ compilerUnsafeIO $ report logger $
+ "Checking cache: " ++ if modified then "modified" else "OK"
if modified
- then do v <- unCompilerM $ j ()
- liftIO $ Store.set store [name, show identifier] v
- return v
- else do v <- liftIO $ Store.get store [name, show identifier]
- case v of Store.Found v' -> return v'
- _ -> throwError (error' progName)
+ then do
+ x <- compiler
+ compilerUnsafeIO $ Store.set store [name, show id'] x
+ return x
+ else do
+ x <- compilerUnsafeIO $ Store.get store [name, show id']
+ progName <- compilerUnsafeIO getProgName
+ case x of Store.Found x' -> return x'
+ _ -> compilerThrow (error' progName)
where
error' progName =
"Hakyll.Core.Compiler.cached: Cache corrupt! " ++
"Try running: " ++ progName ++ " clean"
--- | Create an unsafe compiler from a function in IO
---
-unsafeCompiler :: (a -> IO b) -- ^ Function to lift
- -> Compiler (a -> b) -- ^ Resulting compiler
-unsafeCompiler f = fromJob $ CompilerM . liftIO . f
--- | Compiler for debugging purposes
---
-traceShowCompiler :: Show a => Compiler a a
-traceShowCompiler = fromJob $ \x -> CompilerM $ do
- logger <- compilerLogger <$> ask
- report logger $ show x
- return x
+--------------------------------------------------------------------------------
+unsafeCompiler :: IO a -> Compiler a
+unsafeCompiler = compilerUnsafeIO
--- | Log and time a compiler
---
-timedCompiler :: String -- ^ Message
- -> Compiler a b -- ^ Compiler to time
- -> Compiler a b -- ^ Resulting compiler
-timedCompiler msg (Compiler d j) = Compiler d $ \x -> CompilerM $ do
- logger <- compilerLogger <$> ask
- timed logger msg $ unCompilerM $ j x
--- | Choose a compiler by identifier
---
--- For example, assume that most content files need to be compiled
--- normally, but a select few need an extra step in the pipeline:
---
--- > compile $ pageCompiler >>> byPattern id
--- > [ ("projects.md", addProjectListCompiler)
--- > , ("sitemap.md", addSiteMapCompiler)
--- > ]
---
-byPattern :: Compiler a b -- ^ Default compiler
- -> [(Pattern (), Compiler a b)] -- ^ Choices
- -> Compiler a b -- ^ Resulting compiler
-byPattern defaultCompiler choices = Compiler deps job
- where
- -- Lookup the compiler, give an error when it is not found
- lookup' identifier = maybe defaultCompiler snd $
- find (\(p, _) -> matches p identifier) choices
- -- Collect the dependencies of the choice
- deps = do
- identifier <- castIdentifier . dependencyIdentifier <$> ask
- compilerDependencies $ lookup' identifier
- -- Collect the job of the choice
- job x = CompilerM $ do
- identifier <- castIdentifier . compilerIdentifier <$> ask
- unCompilerM $ compilerJob (lookup' identifier) x
+--------------------------------------------------------------------------------
+-- | Compiler for debugging purposes
+logCompiler :: String -> Compiler ()
+logCompiler msg = do
+ logger <- compilerLogger <$> compilerAsk
+ compilerUnsafeIO $ report logger msg
--- | Choose a compiler by extension
---
--- Example:
---
--- > match "css/*" $ do
--- > route $ setExtension "css"
--- > compile $ byExtension (error "Not a (S)CSS file")
--- > [ (".css", compressCssCompiler)
--- > , (".scss", sass)
--- > ]
---
--- This piece of code will select the @compressCssCompiler@ for @.css@ files,
--- and the @sass@ compiler (defined elsewhere) for @.scss@ files.
---
-byExtension :: Compiler a b -- ^ Default compiler
- -> [(String, Compiler a b)] -- ^ Choices
- -> Compiler a b -- ^ Resulting compiler
-byExtension defaultCompiler = byPattern defaultCompiler . map (first extPattern)
- where
- extPattern c = predicate $ (== c) . takeExtension . toFilePath
+
+--------------------------------------------------------------------------------
+-- | Log and time a compiler
+timedCompiler :: String -- ^ Message
+ -> Compiler a -- ^ Compiler to time
+ -> Compiler a -- ^ Resulting compiler
+timedCompiler msg compiler = Compiler $ \r ->
+ timed (compilerLogger r) msg $ unCompiler compiler r