summaryrefslogtreecommitdiff
path: root/src/Text/Hakyll
diff options
context:
space:
mode:
authorJasper Van der Jeugt <jaspervdj@gmail.com>2010-01-19 16:34:41 +0100
committerJasper Van der Jeugt <jaspervdj@gmail.com>2010-01-19 16:34:41 +0100
commit68facf83ec0c223b9c033cc632041d45732ab0e7 (patch)
tree8a04a6a5ae5f7fd52537c062deecfaf7d6aaa59d /src/Text/Hakyll
parentfe77089955dbf1002496e34824da13bfaf64a898 (diff)
downloadhakyll-68facf83ec0c223b9c033cc632041d45732ab0e7.tar.gz
Removed page caching.
I am removing page caching for the following reasons (in no particular order): - It is very error-prone because the pandoc output might interfere with the Hakyll page layout (and thus would pages be read incorrectly). - Timestamping is a much better method to save compilation time. - It would also mess up Unicode sometimes. - Hakyll is aimed at small to medium sites. This caching would not bring a huge speedup anyway. Note: there still is timestamp checking!
Diffstat (limited to 'src/Text/Hakyll')
-rw-r--r--src/Text/Hakyll/File.hs5
-rw-r--r--src/Text/Hakyll/Page.hs49
2 files changed, 9 insertions, 45 deletions
diff --git a/src/Text/Hakyll/File.hs b/src/Text/Hakyll/File.hs
index 1becb46..af40500 100644
--- a/src/Text/Hakyll/File.hs
+++ b/src/Text/Hakyll/File.hs
@@ -2,7 +2,6 @@
-- files and directories.
module Text.Hakyll.File
( toDestination
- , toCache
, toURL
, toRoot
, removeSpaces
@@ -36,10 +35,6 @@ removeLeadingSeparator path
toDestination :: FilePath -> FilePath
toDestination path = "_site" </> removeLeadingSeparator path
--- | Convert a relative filepath to a filepath in the cache (@_cache@).
-toCache :: FilePath -> FilePath
-toCache path = "_cache" </> removeLeadingSeparator path
-
-- | Get the url for a given page.
toURL :: FilePath -> FilePath
toURL path = if takeExtension path `elem` [ ".markdown"
diff --git a/src/Text/Hakyll/Page.hs b/src/Text/Hakyll/Page.hs
index d120443..dfd0583 100644
--- a/src/Text/Hakyll/Page.hs
+++ b/src/Text/Hakyll/Page.hs
@@ -4,15 +4,15 @@ module Text.Hakyll.Page
, getValue
, getBody
, readPage
+ , splitAtDelimiters
) where
import qualified Data.Map as M
-import qualified Data.List as L
+import Data.List (isPrefixOf)
import Data.Char (isSpace)
import Data.Maybe (fromMaybe)
import Control.Parallel.Strategies (rdeepseq, ($|))
import Control.Monad.Reader (liftIO)
-import Control.Monad (unless)
import System.FilePath (takeExtension)
import System.IO
@@ -81,33 +81,7 @@ splitAtDelimiters ls@(x:xs)
-- | Check if the given string is a metadata delimiter.
isDelimiter :: String -> Bool
-isDelimiter = L.isPrefixOf "---"
-
--- | Used for caching of files.
-cachePage :: Page -> Hakyll ()
-cachePage page@(Page mapping) = do
- makeDirectories destination
- liftIO writePageToCache
- where
- (sectionMetaData, simpleMetaData) = M.partition (elem '\n')
- (M.delete "body" mapping)
-
- writePageToCache = do
- handle <- openFile destination WriteMode
- hPutStrLn handle "---"
- mapM_ (writePair handle) $ M.toList simpleMetaData
- mapM_ (writeSection handle) $ M.toList sectionMetaData
- hPutStrLn handle "---"
- hPutStrLn handle $ getBody page
- hClose handle
-
- writePair h (k, v) = do hPutStr h $ k ++ ": " ++ v
- hPutStrLn h ""
-
- writeSection h (k, v) = do hPutStrLn h $ "--- " ++ k
- hPutStrLn h v
-
- destination = toCache $ getURL page
+isDelimiter = isPrefixOf "---"
-- | Read one section of a page.
readSection :: (String -> String) -- ^ Render function.
@@ -122,7 +96,7 @@ readSection renderFunction isFirst ls
| otherwise = body (tail ls)
where
isDelimiter' = isDelimiter (head ls)
- isNamedDelimiter = head ls `matchesRegex` "----* *[a-zA-Z0-9][a-zA-Z0-9]*"
+ isNamedDelimiter = head ls `matchesRegex` "^----* *[a-zA-Z0-9][a-zA-Z0-9]*"
body ls' = [("body", renderFunction $ unlines ls')]
readSimpleMetaData = map readPair . filter (not . all isSpace)
@@ -137,11 +111,8 @@ readSection renderFunction isFirst ls
-- | Read a page from a file. Metadata is supported, and if the filename
-- has a @.markdown@ extension, it will be rendered using pandoc.
readPage :: FilePath -> Hakyll Page
-readPage pagePath = do
- -- Check cache.
- getFromCache <- isCacheValid cacheFile [pagePath]
- let path = if getFromCache then cacheFile else pagePath
- renderFunction = getRenderFunction $ takeExtension path
+readPage path = do
+ let renderFunction = getRenderFunction $ takeExtension path
sectionFunctions = map (readSection renderFunction)
(True : repeat False)
@@ -149,21 +120,19 @@ readPage pagePath = do
handle <- liftIO $ openFile path ReadMode
sections <- fmap (splitAtDelimiters . lines )
(liftIO $ hGetContents handle)
+ liftIO $ print sections
let context = concat $ zipWith ($) sectionFunctions sections
page = fromContext $ M.fromList $
[ ("url", url)
- , ("path", pagePath)
+ , ("path", path)
] ++ context
seq (($|) id rdeepseq context) $ liftIO $ hClose handle
- -- Cache if needed
- unless getFromCache $ cachePage page
return page
where
- url = toURL pagePath
- cacheFile = toCache url
+ url = toURL path
-- Make pages renderable.
instance Renderable Page where