summaryrefslogtreecommitdiff
path: root/src/Hakyll/Web
diff options
context:
space:
mode:
authorJasper Van der Jeugt <m@jaspervdj.be>2017-06-19 11:57:23 +0200
committerJasper Van der Jeugt <m@jaspervdj.be>2017-06-19 11:57:23 +0200
commit67ecff7ad383640bc73d64edc2506c7cc648a134 (patch)
tree6d328e43c3ab86c29a2d775fabaa23618c16fb51 /src/Hakyll/Web
parent2df3209bafa08e6b77ee4a8598fc503269513527 (diff)
downloadhakyll-67ecff7ad383640bc73d64edc2506c7cc648a134.tar.gz
Move src/ to lib/, put Init.hs in src/
Diffstat (limited to 'src/Hakyll/Web')
-rw-r--r--src/Hakyll/Web/CompressCss.hs86
-rw-r--r--src/Hakyll/Web/Feed.hs135
-rw-r--r--src/Hakyll/Web/Html.hs184
-rw-r--r--src/Hakyll/Web/Html/RelativizeUrls.hs52
-rw-r--r--src/Hakyll/Web/Paginate.hs153
-rw-r--r--src/Hakyll/Web/Pandoc.hs164
-rw-r--r--src/Hakyll/Web/Pandoc/Biblio.hs115
-rw-r--r--src/Hakyll/Web/Pandoc/Binary.hs32
-rw-r--r--src/Hakyll/Web/Pandoc/FileType.hs74
-rw-r--r--src/Hakyll/Web/Redirect.hs87
-rw-r--r--src/Hakyll/Web/Tags.hs344
-rw-r--r--src/Hakyll/Web/Template.hs154
-rw-r--r--src/Hakyll/Web/Template/Context.hs379
-rw-r--r--src/Hakyll/Web/Template/Internal.hs203
-rw-r--r--src/Hakyll/Web/Template/Internal/Element.hs298
-rw-r--r--src/Hakyll/Web/Template/Internal/Trim.hs95
-rw-r--r--src/Hakyll/Web/Template/List.hs91
17 files changed, 0 insertions, 2646 deletions
diff --git a/src/Hakyll/Web/CompressCss.hs b/src/Hakyll/Web/CompressCss.hs
deleted file mode 100644
index 9f61534..0000000
--- a/src/Hakyll/Web/CompressCss.hs
+++ /dev/null
@@ -1,86 +0,0 @@
---------------------------------------------------------------------------------
--- | Module used for CSS compression. The compression is currently in a simple
--- state, but would typically reduce the number of bytes by about 25%.
-module Hakyll.Web.CompressCss
- ( compressCssCompiler
- , compressCss
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.List (isPrefixOf)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Item
-
-
---------------------------------------------------------------------------------
--- | Compiler form of 'compressCss'
-compressCssCompiler :: Compiler (Item String)
-compressCssCompiler = fmap compressCss <$> getResourceString
-
-
---------------------------------------------------------------------------------
--- | Compress CSS to speed up your site.
-compressCss :: String -> String
-compressCss = compressSeparators . stripComments . compressWhitespace
-
-
---------------------------------------------------------------------------------
--- | Compresses certain forms of separators.
-compressSeparators :: String -> String
-compressSeparators [] = []
-compressSeparators str
- | isConstant = head str : retainConstants compressSeparators (head str) (drop 1 str)
- | stripFirst = compressSeparators (drop 1 str)
- | stripSecond = compressSeparators (head str : (drop 2 str))
- | otherwise = head str : compressSeparators (drop 1 str)
- where
- isConstant = or $ map (isOfPrefix str) ["\"", "'"]
- stripFirst = or $ map (isOfPrefix str) $ [";;", ";}"] ++ (map (\c -> " " ++ c) separators)
- stripSecond = or $ map (isOfPrefix str) $ map (\c -> c ++ " ") separators
- separators = [" ", "{", "}", ":", ";", ",", ">", "+", "!"]
-
---------------------------------------------------------------------------------
--- | Compresses all whitespace.
-compressWhitespace :: String -> String
-compressWhitespace [] = []
-compressWhitespace str
- | isConstant = head str : retainConstants compressWhitespace (head str) (drop 1 str)
- | replaceOne = compressWhitespace (' ' : (drop 1 str))
- | replaceTwo = compressWhitespace (' ' : (drop 2 str))
- | otherwise = head str : compressWhitespace (drop 1 str)
- where
- isConstant = or $ map (isOfPrefix str) ["\"", "'"]
- replaceOne = or $ map (isOfPrefix str) ["\t", "\n", "\r"]
- replaceTwo = or $ map (isOfPrefix str) [" \t", " \n", " \r", " "]
-
---------------------------------------------------------------------------------
--- | Function that strips CSS comments away.
-stripComments :: String -> String
-stripComments [] = []
-stripComments str
- | isConstant = head str : retainConstants stripComments (head str) (drop 1 str)
- | isPrefixOf "/*" str = stripComments $ eatComments $ drop 2 str
- | otherwise = head str : stripComments (drop 1 str)
- where
- isConstant = or $ map (isOfPrefix str) ["\"", "'"]
- eatComments str'
- | null str' = []
- | isPrefixOf "*/" str' = drop 2 str'
- | otherwise = eatComments $ drop 1 str'
-
---------------------------------------------------------------------------------
--- | Helper function to handle string constants correctly.
-retainConstants :: (String -> String) -> Char -> String -> String
-retainConstants f delim str
- | null str = []
- | isPrefixOf [delim] str = head str : f (drop 1 str)
- | otherwise = head str : retainConstants f delim (drop 1 str)
-
---------------------------------------------------------------------------------
--- | Helper function to determine whether a string is a substring.
-isOfPrefix :: String -> String -> Bool
-isOfPrefix = flip isPrefixOf
diff --git a/src/Hakyll/Web/Feed.hs b/src/Hakyll/Web/Feed.hs
deleted file mode 100644
index 6c6fa76..0000000
--- a/src/Hakyll/Web/Feed.hs
+++ /dev/null
@@ -1,135 +0,0 @@
---------------------------------------------------------------------------------
--- | A Module that allows easy rendering of RSS feeds.
---
--- The main rendering functions (@renderRss@, @renderAtom@) all assume that
--- you pass the list of items so that the most recent entry in the feed is the
--- first item in the list.
---
--- Also note that the context should have (at least) the following fields to
--- produce a correct feed:
---
--- - @$title$@: Title of the item
---
--- - @$description$@: Description to appear in the feed
---
--- - @$url$@: URL to the item - this is usually set automatically.
---
--- In addition, the posts should be named according to the rules for
--- 'Hakyll.Web.Template.Context.dateField'
-module Hakyll.Web.Feed
- ( FeedConfiguration (..)
- , renderRss
- , renderAtom
- ) where
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Item
-import Hakyll.Core.Util.String (replaceAll)
-import Hakyll.Web.Template
-import Hakyll.Web.Template.Context
-import Hakyll.Web.Template.List
-
-
---------------------------------------------------------------------------------
-import Paths_hakyll
-
-
---------------------------------------------------------------------------------
--- | This is a data structure to keep the configuration of a feed.
-data FeedConfiguration = FeedConfiguration
- { -- | Title of the feed.
- feedTitle :: String
- , -- | Description of the feed.
- feedDescription :: String
- , -- | Name of the feed author.
- feedAuthorName :: String
- , -- | Email of the feed author.
- feedAuthorEmail :: String
- , -- | Absolute root URL of the feed site (e.g. @http://jaspervdj.be@)
- feedRoot :: String
- } deriving (Show, Eq)
-
-
---------------------------------------------------------------------------------
--- | Abstract function to render any feed.
-renderFeed :: FilePath -- ^ Feed template
- -> FilePath -- ^ Item template
- -> FeedConfiguration -- ^ Feed configuration
- -> Context String -- ^ Context for the items
- -> [Item String] -- ^ Input items
- -> Compiler (Item String) -- ^ Resulting item
-renderFeed feedPath itemPath config itemContext items = do
- feedTpl <- loadTemplate feedPath
- itemTpl <- loadTemplate itemPath
-
- protectedItems <- mapM (applyFilter protectCDATA) items
- body <- makeItem =<< applyTemplateList itemTpl itemContext' protectedItems
- applyTemplate feedTpl feedContext body
- where
- applyFilter :: (Monad m,Functor f) => (String -> String) -> f String -> m (f String)
- applyFilter tr str = return $ fmap tr str
- protectCDATA :: String -> String
- protectCDATA = replaceAll "]]>" (const "]]&gt;")
- -- Auxiliary: load a template from a datafile
- loadTemplate path = do
- file <- compilerUnsafeIO $ getDataFileName path
- unsafeReadTemplateFile file
-
- itemContext' = mconcat
- [ itemContext
- , constField "root" (feedRoot config)
- , constField "authorName" (feedAuthorName config)
- , constField "authorEmail" (feedAuthorEmail config)
- ]
-
- feedContext = mconcat
- [ bodyField "body"
- , constField "title" (feedTitle config)
- , constField "description" (feedDescription config)
- , constField "authorName" (feedAuthorName config)
- , constField "authorEmail" (feedAuthorEmail config)
- , constField "root" (feedRoot config)
- , urlField "url"
- , updatedField
- , missingField
- ]
-
- -- Take the first "updated" field from all items -- this should be the most
- -- recent.
- updatedField = field "updated" $ \_ -> case items of
- [] -> return "Unknown"
- (x : _) -> unContext itemContext' "updated" [] x >>= \cf -> case cf of
- ListField _ _ -> fail "Hakyll.Web.Feed.renderFeed: Internal error"
- StringField s -> return s
-
-
---------------------------------------------------------------------------------
--- | Render an RSS feed with a number of items.
-renderRss :: FeedConfiguration -- ^ Feed configuration
- -> Context String -- ^ Item context
- -> [Item String] -- ^ Feed items
- -> Compiler (Item String) -- ^ Resulting feed
-renderRss config context = renderFeed
- "templates/rss.xml" "templates/rss-item.xml" config
- (makeItemContext "%a, %d %b %Y %H:%M:%S UT" context)
-
-
---------------------------------------------------------------------------------
--- | Render an Atom feed with a number of items.
-renderAtom :: FeedConfiguration -- ^ Feed configuration
- -> Context String -- ^ Item context
- -> [Item String] -- ^ Feed items
- -> Compiler (Item String) -- ^ Resulting feed
-renderAtom config context = renderFeed
- "templates/atom.xml" "templates/atom-item.xml" config
- (makeItemContext "%Y-%m-%dT%H:%M:%SZ" context)
-
-
---------------------------------------------------------------------------------
--- | Copies @$updated$@ from @$published$@ if it is not already set.
-makeItemContext :: String -> Context a -> Context a
-makeItemContext fmt context = mconcat
- [dateField "published" fmt, context, dateField "updated" fmt]
diff --git a/src/Hakyll/Web/Html.hs b/src/Hakyll/Web/Html.hs
deleted file mode 100644
index 6b7ec88..0000000
--- a/src/Hakyll/Web/Html.hs
+++ /dev/null
@@ -1,184 +0,0 @@
---------------------------------------------------------------------------------
--- | Provides utilities to manipulate HTML pages
-module Hakyll.Web.Html
- ( -- * Generic
- withTags
-
- -- * Headers
- , demoteHeaders
-
- -- * Url manipulation
- , getUrls
- , withUrls
- , toUrl
- , toSiteRoot
- , isExternal
-
- -- * Stripping/escaping
- , stripTags
- , escapeHtml
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.Char (digitToInt, intToDigit,
- isDigit, toLower)
-import Data.List (isPrefixOf)
-import qualified Data.Set as S
-import System.FilePath.Posix (joinPath, splitPath,
- takeDirectory)
-import Text.Blaze.Html (toHtml)
-import Text.Blaze.Html.Renderer.String (renderHtml)
-import qualified Text.HTML.TagSoup as TS
-import Network.URI (isUnreserved, escapeURIString)
-
-
---------------------------------------------------------------------------------
--- | Map over all tags in the document
-withTags :: (TS.Tag String -> TS.Tag String) -> String -> String
-withTags f = renderTags' . map f . parseTags'
-
-
---------------------------------------------------------------------------------
--- | Map every @h1@ to an @h2@, @h2@ to @h3@, etc.
-demoteHeaders :: String -> String
-demoteHeaders = withTags $ \tag -> case tag of
- TS.TagOpen t a -> TS.TagOpen (demote t) a
- TS.TagClose t -> TS.TagClose (demote t)
- t -> t
- where
- demote t@['h', n]
- | isDigit n = ['h', intToDigit (min 6 $ digitToInt n + 1)]
- | otherwise = t
- demote t = t
-
-
---------------------------------------------------------------------------------
-isUrlAttribute :: String -> Bool
-isUrlAttribute = (`elem` ["src", "href", "data", "poster"])
-
-
---------------------------------------------------------------------------------
-getUrls :: [TS.Tag String] -> [String]
-getUrls tags = [v | TS.TagOpen _ as <- tags, (k, v) <- as, isUrlAttribute k]
-
-
---------------------------------------------------------------------------------
--- | Apply a function to each URL on a webpage
-withUrls :: (String -> String) -> String -> String
-withUrls f = withTags tag
- where
- tag (TS.TagOpen s a) = TS.TagOpen s $ map attr a
- tag x = x
- attr (k, v) = (k, if isUrlAttribute k then f v else v)
-
-
---------------------------------------------------------------------------------
--- | Customized TagSoup renderer. The default TagSoup renderer escape CSS
--- within style tags, and doesn't properly minimize.
-renderTags' :: [TS.Tag String] -> String
-renderTags' = TS.renderTagsOptions TS.RenderOptions
- { TS.optRawTag = (`elem` ["script", "style"]) . map toLower
- , TS.optMinimize = (`S.member` minimize) . map toLower
- , TS.optEscape = id
- }
- where
- -- A list of elements which must be minimized
- minimize = S.fromList
- [ "area", "br", "col", "embed", "hr", "img", "input", "meta", "link"
- , "param"
- ]
-
-
---------------------------------------------------------------------------------
--- | Customized TagSoup parser: do not decode any entities.
-parseTags' :: String -> [TS.Tag String]
-parseTags' = TS.parseTagsOptions (TS.parseOptions :: TS.ParseOptions String)
- { TS.optEntityData = \(str, b) -> [TS.TagText $ "&" ++ str ++ [';' | b]]
- , TS.optEntityAttrib = \(str, b) -> ("&" ++ str ++ [';' | b], [])
- }
-
-
---------------------------------------------------------------------------------
--- | Convert a filepath to an URL starting from the site root
---
--- Example:
---
--- > toUrl "foo/bar.html"
---
--- Result:
---
--- > "/foo/bar.html"
---
--- This also sanitizes the URL, e.g. converting spaces into '%20'
-toUrl :: FilePath -> String
-toUrl url = case url of
- ('/' : xs) -> '/' : sanitize xs
- xs -> '/' : sanitize xs
- where
- -- Everything but unreserved characters should be escaped as we are
- -- sanitising the path therefore reserved characters which have a
- -- meaning in URI does not appear. Special casing for `/`, because it has
- -- a special meaning in FilePath as well as in URI.
- sanitize = escapeURIString (\c -> c == '/' || isUnreserved c)
-
-
---------------------------------------------------------------------------------
--- | Get the relative url to the site root, for a given (absolute) url
-toSiteRoot :: String -> String
-toSiteRoot = emptyException . joinPath . map parent
- . filter relevant . splitPath . takeDirectory
- where
- parent = const ".."
- emptyException [] = "."
- emptyException x = x
- relevant "." = False
- relevant "/" = False
- relevant "./" = False
- relevant _ = True
-
-
---------------------------------------------------------------------------------
--- | Check if an URL links to an external HTTP(S) source
-isExternal :: String -> Bool
-isExternal url = any (flip isPrefixOf url) ["http://", "https://", "//"]
-
-
---------------------------------------------------------------------------------
--- | Strip all HTML tags from a string
---
--- Example:
---
--- > stripTags "<p>foo</p>"
---
--- Result:
---
--- > "foo"
---
--- This also works for incomplete tags
---
--- Example:
---
--- > stripTags "<p>foo</p"
---
--- Result:
---
--- > "foo"
-stripTags :: String -> String
-stripTags [] = []
-stripTags ('<' : xs) = stripTags $ drop 1 $ dropWhile (/= '>') xs
-stripTags (x : xs) = x : stripTags xs
-
-
---------------------------------------------------------------------------------
--- | HTML-escape a string
---
--- Example:
---
--- > escapeHtml "Me & Dean"
---
--- Result:
---
--- > "Me &amp; Dean"
-escapeHtml :: String -> String
-escapeHtml = renderHtml . toHtml
diff --git a/src/Hakyll/Web/Html/RelativizeUrls.hs b/src/Hakyll/Web/Html/RelativizeUrls.hs
deleted file mode 100644
index 33b0c2c..0000000
--- a/src/Hakyll/Web/Html/RelativizeUrls.hs
+++ /dev/null
@@ -1,52 +0,0 @@
---------------------------------------------------------------------------------
--- | This module exposes a function which can relativize URL's on a webpage.
---
--- This means that one can deploy the resulting site on
--- @http:\/\/example.com\/@, but also on @http:\/\/example.com\/some-folder\/@
--- without having to change anything (simply copy over the files).
---
--- To use it, you should use absolute URL's from the site root everywhere. For
--- example, use
---
--- > <img src="/images/lolcat.png" alt="Funny zomgroflcopter" />
---
--- in a blogpost. When running this through the relativize URL's module, this
--- will result in (suppose your blogpost is located at @\/posts\/foo.html@:
---
--- > <img src="../images/lolcat.png" alt="Funny zomgroflcopter" />
-module Hakyll.Web.Html.RelativizeUrls
- ( relativizeUrls
- , relativizeUrlsWith
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.List (isPrefixOf)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Item
-import Hakyll.Web.Html
-
-
---------------------------------------------------------------------------------
--- | Compiler form of 'relativizeUrls' which automatically picks the right root
--- path
-relativizeUrls :: Item String -> Compiler (Item String)
-relativizeUrls item = do
- route <- getRoute $ itemIdentifier item
- return $ case route of
- Nothing -> item
- Just r -> fmap (relativizeUrlsWith $ toSiteRoot r) item
-
-
---------------------------------------------------------------------------------
--- | Relativize URL's in HTML
-relativizeUrlsWith :: String -- ^ Path to the site root
- -> String -- ^ HTML to relativize
- -> String -- ^ Resulting HTML
-relativizeUrlsWith root = withUrls rel
- where
- isRel x = "/" `isPrefixOf` x && not ("//" `isPrefixOf` x)
- rel x = if isRel x then root ++ x else x
diff --git a/src/Hakyll/Web/Paginate.hs b/src/Hakyll/Web/Paginate.hs
deleted file mode 100644
index dd058f6..0000000
--- a/src/Hakyll/Web/Paginate.hs
+++ /dev/null
@@ -1,153 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE OverloadedStrings #-}
-module Hakyll.Web.Paginate
- ( PageNumber
- , Paginate (..)
- , buildPaginateWith
- , paginateEvery
- , paginateRules
- , paginateContext
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Applicative (empty)
-import Control.Monad (forM_, forM)
-import qualified Data.Map as M
-import qualified Data.Set as S
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.Item
-import Hakyll.Core.Metadata
-import Hakyll.Core.Rules
-import Hakyll.Web.Html
-import Hakyll.Web.Template.Context
-
-
---------------------------------------------------------------------------------
-type PageNumber = Int
-
-
---------------------------------------------------------------------------------
--- | Data about paginators
-data Paginate = Paginate
- { paginateMap :: M.Map PageNumber [Identifier]
- , paginateMakeId :: PageNumber -> Identifier
- , paginateDependency :: Dependency
- }
-
-
---------------------------------------------------------------------------------
-paginateNumPages :: Paginate -> Int
-paginateNumPages = M.size . paginateMap
-
-
---------------------------------------------------------------------------------
-paginateEvery :: Int -> [a] -> [[a]]
-paginateEvery n = go
- where
- go [] = []
- go xs = let (y, ys) = splitAt n xs in y : go ys
-
-
---------------------------------------------------------------------------------
-buildPaginateWith
- :: MonadMetadata m
- => ([Identifier] -> m [[Identifier]]) -- ^ Group items into pages
- -> Pattern -- ^ Select items to paginate
- -> (PageNumber -> Identifier) -- ^ Identifiers for the pages
- -> m Paginate
-buildPaginateWith grouper pattern makeId = do
- ids <- getMatches pattern
- idGroups <- grouper ids
- let idsSet = S.fromList ids
- return Paginate
- { paginateMap = M.fromList (zip [1 ..] idGroups)
- , paginateMakeId = makeId
- , paginateDependency = PatternDependency pattern idsSet
- }
-
-
---------------------------------------------------------------------------------
-paginateRules :: Paginate -> (PageNumber -> Pattern -> Rules ()) -> Rules ()
-paginateRules paginator rules =
- forM_ (M.toList $ paginateMap paginator) $ \(idx, identifiers) ->
- rulesExtraDependencies [paginateDependency paginator] $
- create [paginateMakeId paginator idx] $
- rules idx $ fromList identifiers
-
-
---------------------------------------------------------------------------------
--- | Get the identifier for a certain page by passing in the page number.
-paginatePage :: Paginate -> PageNumber -> Maybe Identifier
-paginatePage pag pageNumber
- | pageNumber < 1 = Nothing
- | pageNumber > (paginateNumPages pag) = Nothing
- | otherwise = Just $ paginateMakeId pag pageNumber
-
-
---------------------------------------------------------------------------------
--- | A default paginate context which provides the following keys:
---
---
--- * @firstPageNum@
--- * @firstPageUrl@
--- * @previousPageNum@
--- * @previousPageUrl@
--- * @nextPageNum@
--- * @nextPageUrl@
--- * @lastPageNum@
--- * @lastPageUrl@
--- * @currentPageNum@
--- * @currentPageUrl@
--- * @numPages@
--- * @allPages@
-paginateContext :: Paginate -> PageNumber -> Context a
-paginateContext pag currentPage = mconcat
- [ field "firstPageNum" $ \_ -> otherPage 1 >>= num
- , field "firstPageUrl" $ \_ -> otherPage 1 >>= url
- , field "previousPageNum" $ \_ -> otherPage (currentPage - 1) >>= num
- , field "previousPageUrl" $ \_ -> otherPage (currentPage - 1) >>= url
- , field "nextPageNum" $ \_ -> otherPage (currentPage + 1) >>= num
- , field "nextPageUrl" $ \_ -> otherPage (currentPage + 1) >>= url
- , field "lastPageNum" $ \_ -> otherPage lastPage >>= num
- , field "lastPageUrl" $ \_ -> otherPage lastPage >>= url
- , field "currentPageNum" $ \i -> thisPage i >>= num
- , field "currentPageUrl" $ \i -> thisPage i >>= url
- , constField "numPages" $ show $ paginateNumPages pag
- , Context $ \k _ i -> case k of
- "allPages" -> do
- let ctx =
- field "isCurrent" (\n -> if fst (itemBody n) == currentPage then return "true" else empty) `mappend`
- field "num" (num . itemBody) `mappend`
- field "url" (url . itemBody)
-
- list <- forM [1 .. lastPage] $
- \n -> if n == currentPage then thisPage i else otherPage n
- items <- mapM makeItem list
- return $ ListField ctx items
- _ -> do
- empty
-
- ]
- where
- lastPage = paginateNumPages pag
-
- thisPage i = return (currentPage, itemIdentifier i)
- otherPage n
- | n == currentPage = fail $ "This is the current page: " ++ show n
- | otherwise = case paginatePage pag n of
- Nothing -> fail $ "No such page: " ++ show n
- Just i -> return (n, i)
-
- num :: (Int, Identifier) -> Compiler String
- num = return . show . fst
-
- url :: (Int, Identifier) -> Compiler String
- url (n, i) = getRoute i >>= \mbR -> case mbR of
- Just r -> return $ toUrl r
- Nothing -> fail $ "No URL for page: " ++ show n
diff --git a/src/Hakyll/Web/Pandoc.hs b/src/Hakyll/Web/Pandoc.hs
deleted file mode 100644
index eec0a8a..0000000
--- a/src/Hakyll/Web/Pandoc.hs
+++ /dev/null
@@ -1,164 +0,0 @@
---------------------------------------------------------------------------------
--- | Module exporting convenient pandoc bindings
-module Hakyll.Web.Pandoc
- ( -- * The basic building blocks
- readPandoc
- , readPandocWith
- , writePandoc
- , writePandocWith
- , renderPandoc
- , renderPandocWith
-
- -- * Derived compilers
- , pandocCompiler
- , pandocCompilerWith
- , pandocCompilerWithTransform
- , pandocCompilerWithTransformM
-
- -- * Default options
- , defaultHakyllReaderOptions
- , defaultHakyllWriterOptions
- ) where
-
-
---------------------------------------------------------------------------------
-import qualified Data.Set as S
-import Text.Pandoc
-import Text.Pandoc.Error (PandocError (..))
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Item
-import Hakyll.Web.Pandoc.FileType
-
-
---------------------------------------------------------------------------------
--- | Read a string using pandoc, with the default options
-readPandoc
- :: Item String -- ^ String to read
- -> Compiler (Item Pandoc) -- ^ Resulting document
-readPandoc = readPandocWith defaultHakyllReaderOptions
-
-
---------------------------------------------------------------------------------
--- | Read a string using pandoc, with the supplied options
-readPandocWith
- :: ReaderOptions -- ^ Parser options
- -> Item String -- ^ String to read
- -> Compiler (Item Pandoc) -- ^ Resulting document
-readPandocWith ropt item =
- case traverse (reader ropt (itemFileType item)) item of
- Left (ParseFailure err) -> fail $
- "Hakyll.Web.Pandoc.readPandocWith: parse failed: " ++ err
- Left (ParsecError _ err) -> fail $
- "Hakyll.Web.Pandoc.readPandocWith: parse failed: " ++ show err
- Right item' -> return item'
- where
- reader ro t = case t of
- DocBook -> readDocBook ro
- Html -> readHtml ro
- LaTeX -> readLaTeX ro
- LiterateHaskell t' -> reader (addExt ro Ext_literate_haskell) t'
- Markdown -> readMarkdown ro
- MediaWiki -> readMediaWiki ro
- OrgMode -> readOrg ro
- Rst -> readRST ro
- Textile -> readTextile ro
- _ -> error $
- "Hakyll.Web.readPandocWith: I don't know how to read a file of " ++
- "the type " ++ show t ++ " for: " ++ show (itemIdentifier item)
-
- addExt ro e = ro {readerExtensions = S.insert e $ readerExtensions ro}
-
-
---------------------------------------------------------------------------------
--- | Write a document (as HTML) using pandoc, with the default options
-writePandoc :: Item Pandoc -- ^ Document to write
- -> Item String -- ^ Resulting HTML
-writePandoc = writePandocWith defaultHakyllWriterOptions
-
-
---------------------------------------------------------------------------------
--- | Write a document (as HTML) using pandoc, with the supplied options
-writePandocWith :: WriterOptions -- ^ Writer options for pandoc
- -> Item Pandoc -- ^ Document to write
- -> Item String -- ^ Resulting HTML
-writePandocWith wopt = fmap $ writeHtmlString wopt
-
-
---------------------------------------------------------------------------------
--- | Render the resource using pandoc
-renderPandoc :: Item String -> Compiler (Item String)
-renderPandoc =
- renderPandocWith defaultHakyllReaderOptions defaultHakyllWriterOptions
-
-
---------------------------------------------------------------------------------
--- | Render the resource using pandoc
-renderPandocWith
- :: ReaderOptions -> WriterOptions -> Item String -> Compiler (Item String)
-renderPandocWith ropt wopt item =
- writePandocWith wopt <$> readPandocWith ropt item
-
-
---------------------------------------------------------------------------------
--- | Read a page render using pandoc
-pandocCompiler :: Compiler (Item String)
-pandocCompiler =
- pandocCompilerWith defaultHakyllReaderOptions defaultHakyllWriterOptions
-
-
---------------------------------------------------------------------------------
--- | A version of 'pandocCompiler' which allows you to specify your own pandoc
--- options
-pandocCompilerWith :: ReaderOptions -> WriterOptions -> Compiler (Item String)
-pandocCompilerWith ropt wopt =
- cached "Hakyll.Web.Pandoc.pandocCompilerWith" $
- pandocCompilerWithTransform ropt wopt id
-
-
---------------------------------------------------------------------------------
--- | An extension of 'pandocCompilerWith' which allows you to specify a custom
--- pandoc transformation for the content
-pandocCompilerWithTransform :: ReaderOptions -> WriterOptions
- -> (Pandoc -> Pandoc)
- -> Compiler (Item String)
-pandocCompilerWithTransform ropt wopt f =
- pandocCompilerWithTransformM ropt wopt (return . f)
-
-
---------------------------------------------------------------------------------
--- | Similar to 'pandocCompilerWithTransform', but the transformation
--- function is monadic. This is useful when you want the pandoc
--- transformation to use the 'Compiler' information such as routes,
--- metadata, etc
-pandocCompilerWithTransformM :: ReaderOptions -> WriterOptions
- -> (Pandoc -> Compiler Pandoc)
- -> Compiler (Item String)
-pandocCompilerWithTransformM ropt wopt f =
- writePandocWith wopt <$>
- (traverse f =<< readPandocWith ropt =<< getResourceBody)
-
-
---------------------------------------------------------------------------------
--- | The default reader options for pandoc parsing in hakyll
-defaultHakyllReaderOptions :: ReaderOptions
-defaultHakyllReaderOptions = def
- { -- The following option causes pandoc to read smart typography, a nice
- -- and free bonus.
- readerSmart = True
- }
-
-
---------------------------------------------------------------------------------
--- | The default writer options for pandoc rendering in hakyll
-defaultHakyllWriterOptions :: WriterOptions
-defaultHakyllWriterOptions = def
- { -- This option causes literate haskell to be written using '>' marks in
- -- html, which I think is a good default.
- writerExtensions = S.insert Ext_literate_haskell (writerExtensions def)
- , -- We want to have hightlighting by default, to be compatible with earlier
- -- Hakyll releases
- writerHighlight = True
- }
diff --git a/src/Hakyll/Web/Pandoc/Biblio.hs b/src/Hakyll/Web/Pandoc/Biblio.hs
deleted file mode 100644
index dfe6d93..0000000
--- a/src/Hakyll/Web/Pandoc/Biblio.hs
+++ /dev/null
@@ -1,115 +0,0 @@
---------------------------------------------------------------------------------
--- | Wraps pandocs bibiliography handling
---
--- In order to add a bibliography, you will need a bibliography file (e.g.
--- @.bib@) and a CSL file (@.csl@). Both need to be compiled with their
--- respective compilers ('biblioCompiler' and 'cslCompiler'). Then, you can
--- refer to these files when you use 'readPandocBiblio'. This function also
--- takes the reader options for completeness -- you can use
--- 'defaultHakyllReaderOptions' if you're unsure.
--- 'pandocBiblioCompiler' is a convenience wrapper which works like 'pandocCompiler',
--- but also takes paths to compiled bibliography and csl files.
-{-# LANGUAGE Arrows #-}
-{-# LANGUAGE DeriveDataTypeable #-}
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Hakyll.Web.Pandoc.Biblio
- ( CSL
- , cslCompiler
- , Biblio (..)
- , biblioCompiler
- , readPandocBiblio
- , pandocBiblioCompiler
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad (liftM, replicateM)
-import Data.Binary (Binary (..))
-import Data.Default (def)
-import Data.Typeable (Typeable)
-import Hakyll.Core.Compiler
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-import Hakyll.Core.Writable
-import Hakyll.Web.Pandoc
-import Hakyll.Web.Pandoc.Binary ()
-import qualified Text.CSL as CSL
-import Text.CSL.Pandoc (processCites)
-import Text.Pandoc (Pandoc, ReaderOptions (..))
-
-
---------------------------------------------------------------------------------
-data CSL = CSL
- deriving (Show, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary CSL where
- put CSL = return ()
- get = return CSL
-
-
---------------------------------------------------------------------------------
-instance Writable CSL where
- -- Shouldn't be written.
- write _ _ = return ()
-
-
---------------------------------------------------------------------------------
-cslCompiler :: Compiler (Item CSL)
-cslCompiler = makeItem CSL
-
-
---------------------------------------------------------------------------------
-newtype Biblio = Biblio [CSL.Reference]
- deriving (Show, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary Biblio where
- -- Ugly.
- get = do
- len <- get
- Biblio <$> replicateM len get
- put (Biblio rs) = put (length rs) >> mapM_ put rs
-
-
---------------------------------------------------------------------------------
-instance Writable Biblio where
- -- Shouldn't be written.
- write _ _ = return ()
-
-
---------------------------------------------------------------------------------
-biblioCompiler :: Compiler (Item Biblio)
-biblioCompiler = do
- filePath <- toFilePath <$> getUnderlying
- makeItem =<< unsafeCompiler (Biblio <$> CSL.readBiblioFile filePath)
-
-
---------------------------------------------------------------------------------
-readPandocBiblio :: ReaderOptions
- -> Item CSL
- -> Item Biblio
- -> (Item String)
- -> Compiler (Item Pandoc)
-readPandocBiblio ropt csl biblio item = do
- -- Parse CSL file, if given
- style <- unsafeCompiler $ CSL.readCSLFile Nothing . toFilePath . itemIdentifier $ csl
-
- -- We need to know the citation keys, add then *before* actually parsing the
- -- actual page. If we don't do this, pandoc won't even consider them
- -- citations!
- let Biblio refs = itemBody biblio
- pandoc <- itemBody <$> readPandocWith ropt item
- let pandoc' = processCites style refs pandoc
-
- return $ fmap (const pandoc') item
-
---------------------------------------------------------------------------------
-pandocBiblioCompiler :: String -> String -> Compiler (Item String)
-pandocBiblioCompiler cslFileName bibFileName = do
- csl <- load $ fromFilePath cslFileName
- bib <- load $ fromFilePath bibFileName
- liftM writePandoc
- (getResourceBody >>= readPandocBiblio def csl bib)
diff --git a/src/Hakyll/Web/Pandoc/Binary.hs b/src/Hakyll/Web/Pandoc/Binary.hs
deleted file mode 100644
index 3c5b5a3..0000000
--- a/src/Hakyll/Web/Pandoc/Binary.hs
+++ /dev/null
@@ -1,32 +0,0 @@
-{-# OPTIONS_GHC -fno-warn-orphans #-}
-{-# LANGUAGE DeriveGeneric #-}
-module Hakyll.Web.Pandoc.Binary where
-
-import Data.Binary (Binary (..))
-
-import qualified Text.CSL as CSL
-import qualified Text.CSL.Reference as REF
-import qualified Text.CSL.Style as STY
-import Text.Pandoc
-
---------------------------------------------------------------------------------
--- orphans
-
-instance Binary Alignment
-instance Binary Block
-instance Binary CSL.Reference
-instance Binary Citation
-instance Binary CitationMode
-instance Binary Format
-instance Binary Inline
-instance Binary ListNumberDelim
-instance Binary ListNumberStyle
-instance Binary MathType
-instance Binary QuoteType
-instance Binary REF.CLabel
-instance Binary REF.CNum
-instance Binary REF.Literal
-instance Binary REF.RefDate
-instance Binary REF.RefType
-instance Binary STY.Agent
-instance Binary STY.Formatted
diff --git a/src/Hakyll/Web/Pandoc/FileType.hs b/src/Hakyll/Web/Pandoc/FileType.hs
deleted file mode 100644
index 3636e41..0000000
--- a/src/Hakyll/Web/Pandoc/FileType.hs
+++ /dev/null
@@ -1,74 +0,0 @@
---------------------------------------------------------------------------------
--- | A module dealing with pandoc file extensions and associated file types
-module Hakyll.Web.Pandoc.FileType
- ( FileType (..)
- , fileType
- , itemFileType
- ) where
-
-
---------------------------------------------------------------------------------
-import System.FilePath (splitExtension)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-
-
---------------------------------------------------------------------------------
--- | Datatype to represent the different file types Hakyll can deal with by
--- default
-data FileType
- = Binary
- | Css
- | DocBook
- | Html
- | LaTeX
- | LiterateHaskell FileType
- | Markdown
- | MediaWiki
- | OrgMode
- | PlainText
- | Rst
- | Textile
- deriving (Eq, Ord, Show, Read)
-
-
---------------------------------------------------------------------------------
--- | Get the file type for a certain file. The type is determined by extension.
-fileType :: FilePath -> FileType
-fileType = uncurry fileType' . splitExtension
- where
- fileType' _ ".css" = Css
- fileType' _ ".dbk" = DocBook
- fileType' _ ".htm" = Html
- fileType' _ ".html" = Html
- fileType' f ".lhs" = LiterateHaskell $ case fileType f of
- -- If no extension is given, default to Markdown + LiterateHaskell
- Binary -> Markdown
- -- Otherwise, LaTeX + LiterateHaskell or whatever the user specified
- x -> x
- fileType' _ ".markdown" = Markdown
- fileType' _ ".mediawiki" = MediaWiki
- fileType' _ ".md" = Markdown
- fileType' _ ".mdn" = Markdown
- fileType' _ ".mdown" = Markdown
- fileType' _ ".mdwn" = Markdown
- fileType' _ ".mkd" = Markdown
- fileType' _ ".mkdwn" = Markdown
- fileType' _ ".org" = OrgMode
- fileType' _ ".page" = Markdown
- fileType' _ ".rst" = Rst
- fileType' _ ".tex" = LaTeX
- fileType' _ ".text" = PlainText
- fileType' _ ".textile" = Textile
- fileType' _ ".txt" = PlainText
- fileType' _ ".wiki" = MediaWiki
- fileType' _ _ = Binary -- Treat unknown files as binary
-
-
---------------------------------------------------------------------------------
--- | Get the file type for the current file
-itemFileType :: Item a -> FileType
-itemFileType = fileType . toFilePath . itemIdentifier
diff --git a/src/Hakyll/Web/Redirect.hs b/src/Hakyll/Web/Redirect.hs
deleted file mode 100644
index 4760cff..0000000
--- a/src/Hakyll/Web/Redirect.hs
+++ /dev/null
@@ -1,87 +0,0 @@
--- | Module used for generating HTML redirect pages. This allows renaming pages
--- to avoid breaking existing links without requiring server-side support for
--- formal 301 Redirect error codes
-module Hakyll.Web.Redirect
- ( Redirect (..)
- , createRedirects
- ) where
-
-import Control.Applicative ((<$>))
-import Control.Monad (forM_)
-import Data.Binary (Binary (..))
-import Hakyll.Core.Compiler
-import Hakyll.Core.Identifier
-import Hakyll.Core.Routes
-import Hakyll.Core.Rules
-import Hakyll.Core.Writable (Writable (..))
-
--- | This function exposes a higher-level interface compared to using the
--- 'Redirect' type manually.
---
--- This creates, using a database mapping broken URLs to working ones, HTML
--- files which will do HTML META tag redirect pages (since, as a static site, we
--- can't use web-server-level 301 redirects, and using JS is gross).
---
--- This is useful for sending people using old URLs to renamed versions, dealing
--- with common typos etc, and will increase site traffic. Such broken URLs can
--- be found by looking at server logs or by using Google Webmaster Tools.
--- Broken URLs must be valid Haskell strings, non-URL-escaped valid POSIX
--- filenames, and relative links, since they will be defined in a @hakyll.hs@
--- and during generation, written to disk with the filename corresponding to the
--- broken URLs. (Target URLs can be absolute or relative, but should be
--- URL-escaped.) So broken incoming links like <http://www.gwern.net/foo/> which
--- should be <http://www.gwern.net/foobar> cannot be fixed (since you cannot
--- create a HTML file named @"foo/"@ on disk, as that would be a directory).
---
--- An example of a valid association list would be:
---
--- > brokenLinks =
--- > [ ("projects.html", "http://github.com/gwern")
--- > , ("/Black-market archive", "Black-market%20archives")
--- > ]
---
--- In which case the functionality can then be used in `main` with a line like:
---
--- > version "redirects" $ createRedirects brokenLinks
---
--- The 'version' is recommended to separate these items from your other pages.
---
--- The on-disk files can then be uploaded with HTML mimetypes
--- (either explicitly by generating and uploading them separately, by
--- auto-detection of the filetype, or an upload tool defaulting to HTML
--- mimetype, such as calling @s3cmd@ with @--default-mime-type=text/html@) and
--- will redirect browsers and search engines going to the old/broken URLs.
---
--- See also <https://groups.google.com/d/msg/hakyll/sWc6zxfh-uM/fUpZPsFNDgAJ>.
-createRedirects :: [(Identifier, String)] -> Rules ()
-createRedirects redirects =
- forM_ redirects $ \(ident, to) ->
- create [ident] $ do
- route idRoute
- compile $ makeItem $! Redirect to
-
--- | This datatype can be used directly if you want a lower-level interface to
--- generate redirects. For example, if you want to redirect @foo.html@ to
--- @bar.jpg@, you can use:
---
--- > create ["foo.html"] $ do
--- > route idRoute
--- > compile $ makeItem $ Redirect "bar.jpg"
-data Redirect = Redirect
- { redirectTo :: String
- } deriving (Eq, Ord, Show)
-
-instance Binary Redirect where
- put (Redirect to) = put to
- get = Redirect <$> get
-
-instance Writable Redirect where
- write path = write path . fmap redirectToHtml
-
-redirectToHtml :: Redirect -> String
-redirectToHtml (Redirect working) =
- "<!DOCTYPE html><html><head><meta charset=\"utf-8\"/><meta name=\"generator\" content=\"hakyll\"/>" ++
- "<meta http-equiv=\"refresh\" content=\"0; url=" ++ working ++
- "\"><link rel=\"canonical\" href=\"" ++ working ++
- "\"><title>Permanent Redirect</title></head><body><p>The page has moved to: <a href=\"" ++ working ++
- "\">this page</a></p></body></html>"
diff --git a/src/Hakyll/Web/Tags.hs b/src/Hakyll/Web/Tags.hs
deleted file mode 100644
index 88119c2..0000000
--- a/src/Hakyll/Web/Tags.hs
+++ /dev/null
@@ -1,344 +0,0 @@
---------------------------------------------------------------------------------
--- | This module containing some specialized functions to deal with tags. It
--- assumes you follow some conventions.
---
--- We support two types of tags: tags and categories.
---
--- To use default tags, use 'buildTags'. Tags are placed in a comma-separated
--- metadata field like this:
---
--- > ---
--- > author: Philip K. Dick
--- > title: Do androids dream of electric sheep?
--- > tags: future, science fiction, humanoid
--- > ---
--- > The novel is set in a post-apocalyptic near future, where the Earth and
--- > its populations have been damaged greatly by Nuclear...
---
--- To use categories, use the 'buildCategories' function. Categories are
--- determined by the directory a page is in, for example, the post
---
--- > posts/coding/2010-01-28-hakyll-categories.markdown
---
--- will receive the @coding@ category.
---
--- Advanced users may implement custom systems using 'buildTagsWith' if desired.
---
--- In the above example, we would want to create a page which lists all pages in
--- the @coding@ category, for example, with the 'Identifier':
---
--- > tags/coding.html
---
--- This is where the first parameter of 'buildTags' and 'buildCategories' comes
--- in. In the above case, we used the function:
---
--- > fromCapture "tags/*.html" :: String -> Identifier
---
--- The 'tagsRules' function lets you generate such a page for each tag in the
--- 'Rules' monad.
-{-# LANGUAGE Arrows #-}
-{-# LANGUAGE DeriveDataTypeable #-}
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-{-# LANGUAGE OverloadedStrings #-}
-module Hakyll.Web.Tags
- ( Tags (..)
- , getTags
- , buildTagsWith
- , buildTags
- , buildCategories
- , tagsRules
- , renderTags
- , renderTagCloud
- , renderTagCloudWith
- , tagCloudField
- , tagCloudFieldWith
- , renderTagList
- , tagsField
- , tagsFieldWith
- , categoryField
- , sortTagsBy
- , caseInsensitiveTags
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Arrow ((&&&))
-import Control.Monad (foldM, forM, forM_, mplus)
-import Data.Char (toLower)
-import Data.List (intercalate, intersperse,
- sortBy)
-import qualified Data.Map as M
-import Data.Maybe (catMaybes, fromMaybe)
-import Data.Ord (comparing)
-import qualified Data.Set as S
-import System.FilePath (takeBaseName, takeDirectory)
-import Text.Blaze.Html (toHtml, toValue, (!))
-import Text.Blaze.Html.Renderer.String (renderHtml)
-import qualified Text.Blaze.Html5 as H
-import qualified Text.Blaze.Html5.Attributes as A
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Dependencies
-import Hakyll.Core.Identifier
-import Hakyll.Core.Identifier.Pattern
-import Hakyll.Core.Item
-import Hakyll.Core.Metadata
-import Hakyll.Core.Rules
-import Hakyll.Core.Util.String
-import Hakyll.Web.Html
-import Hakyll.Web.Template.Context
-
-
---------------------------------------------------------------------------------
--- | Data about tags
-data Tags = Tags
- { tagsMap :: [(String, [Identifier])]
- , tagsMakeId :: String -> Identifier
- , tagsDependency :: Dependency
- }
-
-
---------------------------------------------------------------------------------
--- | Obtain tags from a page in the default way: parse them from the @tags@
--- metadata field. This can either be a list or a comma-separated string.
-getTags :: MonadMetadata m => Identifier -> m [String]
-getTags identifier = do
- metadata <- getMetadata identifier
- return $ fromMaybe [] $
- (lookupStringList "tags" metadata) `mplus`
- (map trim . splitAll "," <$> lookupString "tags" metadata)
-
-
---------------------------------------------------------------------------------
--- | Obtain categories from a page.
-getCategory :: MonadMetadata m => Identifier -> m [String]
-getCategory = return . return . takeBaseName . takeDirectory . toFilePath
-
-
---------------------------------------------------------------------------------
--- | Higher-order function to read tags
-buildTagsWith :: MonadMetadata m
- => (Identifier -> m [String])
- -> Pattern
- -> (String -> Identifier)
- -> m Tags
-buildTagsWith f pattern makeId = do
- ids <- getMatches pattern
- tagMap <- foldM addTags M.empty ids
- let set' = S.fromList ids
- return $ Tags (M.toList tagMap) makeId (PatternDependency pattern set')
- where
- -- Create a tag map for one page
- addTags tagMap id' = do
- tags <- f id'
- let tagMap' = M.fromList $ zip tags $ repeat [id']
- return $ M.unionWith (++) tagMap tagMap'
-
-
---------------------------------------------------------------------------------
-buildTags :: MonadMetadata m => Pattern -> (String -> Identifier) -> m Tags
-buildTags = buildTagsWith getTags
-
-
---------------------------------------------------------------------------------
-buildCategories :: MonadMetadata m => Pattern -> (String -> Identifier)
- -> m Tags
-buildCategories = buildTagsWith getCategory
-
-
---------------------------------------------------------------------------------
-tagsRules :: Tags -> (String -> Pattern -> Rules ()) -> Rules ()
-tagsRules tags rules =
- forM_ (tagsMap tags) $ \(tag, identifiers) ->
- rulesExtraDependencies [tagsDependency tags] $
- create [tagsMakeId tags tag] $
- rules tag $ fromList identifiers
-
-
---------------------------------------------------------------------------------
--- | Render tags in HTML (the flexible higher-order function)
-renderTags :: (String -> String -> Int -> Int -> Int -> String)
- -- ^ Produce a tag item: tag, url, count, min count, max count
- -> ([String] -> String)
- -- ^ Join items
- -> Tags
- -- ^ Tag cloud renderer
- -> Compiler String
-renderTags makeHtml concatHtml tags = do
- -- In tags' we create a list: [((tag, route), count)]
- tags' <- forM (tagsMap tags) $ \(tag, ids) -> do
- route' <- getRoute $ tagsMakeId tags tag
- return ((tag, route'), length ids)
-
- -- TODO: We actually need to tell a dependency here!
-
- let -- Absolute frequencies of the pages
- freqs = map snd tags'
-
- -- The minimum and maximum count found
- (min', max')
- | null freqs = (0, 1)
- | otherwise = (minimum &&& maximum) freqs
-
- -- Create a link for one item
- makeHtml' ((tag, url), count) =
- makeHtml tag (toUrl $ fromMaybe "/" url) count min' max'
-
- -- Render and return the HTML
- return $ concatHtml $ map makeHtml' tags'
-
-
---------------------------------------------------------------------------------
--- | Render a tag cloud in HTML
-renderTagCloud :: Double
- -- ^ Smallest font size, in percent
- -> Double
- -- ^ Biggest font size, in percent
- -> Tags
- -- ^ Input tags
- -> Compiler String
- -- ^ Rendered cloud
-renderTagCloud = renderTagCloudWith makeLink (intercalate " ")
- where
- makeLink minSize maxSize tag url count min' max' =
- -- Show the relative size of one 'count' in percent
- let diff = 1 + fromIntegral max' - fromIntegral min'
- relative = (fromIntegral count - fromIntegral min') / diff
- size = floor $ minSize + relative * (maxSize - minSize) :: Int
- in renderHtml $
- H.a ! A.style (toValue $ "font-size: " ++ show size ++ "%")
- ! A.href (toValue url)
- $ toHtml tag
-
-
---------------------------------------------------------------------------------
--- | Render a tag cloud in HTML
-renderTagCloudWith :: (Double -> Double ->
- String -> String -> Int -> Int -> Int -> String)
- -- ^ Render a single tag link
- -> ([String] -> String)
- -- ^ Concatenate links
- -> Double
- -- ^ Smallest font size, in percent
- -> Double
- -- ^ Biggest font size, in percent
- -> Tags
- -- ^ Input tags
- -> Compiler String
- -- ^ Rendered cloud
-renderTagCloudWith makeLink cat minSize maxSize =
- renderTags (makeLink minSize maxSize) cat
-
-
---------------------------------------------------------------------------------
--- | Render a tag cloud in HTML as a context
-tagCloudField :: String
- -- ^ Destination key
- -> Double
- -- ^ Smallest font size, in percent
- -> Double
- -- ^ Biggest font size, in percent
- -> Tags
- -- ^ Input tags
- -> Context a
- -- ^ Context
-tagCloudField key minSize maxSize tags =
- field key $ \_ -> renderTagCloud minSize maxSize tags
-
-
---------------------------------------------------------------------------------
--- | Render a tag cloud in HTML as a context
-tagCloudFieldWith :: String
- -- ^ Destination key
- -> (Double -> Double ->
- String -> String -> Int -> Int -> Int -> String)
- -- ^ Render a single tag link
- -> ([String] -> String)
- -- ^ Concatenate links
- -> Double
- -- ^ Smallest font size, in percent
- -> Double
- -- ^ Biggest font size, in percent
- -> Tags
- -- ^ Input tags
- -> Context a
- -- ^ Context
-tagCloudFieldWith key makeLink cat minSize maxSize tags =
- field key $ \_ -> renderTagCloudWith makeLink cat minSize maxSize tags
-
-
---------------------------------------------------------------------------------
--- | Render a simple tag list in HTML, with the tag count next to the item
--- TODO: Maybe produce a Context here
-renderTagList :: Tags -> Compiler (String)
-renderTagList = renderTags makeLink (intercalate ", ")
- where
- makeLink tag url count _ _ = renderHtml $
- H.a ! A.href (toValue url) $ toHtml (tag ++ " (" ++ show count ++ ")")
-
-
---------------------------------------------------------------------------------
--- | Render tags with links with custom functions to get tags and to
--- render links
-tagsFieldWith :: (Identifier -> Compiler [String])
- -- ^ Get the tags
- -> (String -> (Maybe FilePath) -> Maybe H.Html)
- -- ^ Render link for one tag
- -> ([H.Html] -> H.Html)
- -- ^ Concatenate tag links
- -> String
- -- ^ Destination field
- -> Tags
- -- ^ Tags structure
- -> Context a
- -- ^ Resulting context
-tagsFieldWith getTags' renderLink cat key tags = field key $ \item -> do
- tags' <- getTags' $ itemIdentifier item
- links <- forM tags' $ \tag -> do
- route' <- getRoute $ tagsMakeId tags tag
- return $ renderLink tag route'
-
- return $ renderHtml $ cat $ catMaybes $ links
-
-
---------------------------------------------------------------------------------
--- | Render tags with links
-tagsField :: String -- ^ Destination key
- -> Tags -- ^ Tags
- -> Context a -- ^ Context
-tagsField =
- tagsFieldWith getTags simpleRenderLink (mconcat . intersperse ", ")
-
-
---------------------------------------------------------------------------------
--- | Render the category in a link
-categoryField :: String -- ^ Destination key
- -> Tags -- ^ Tags
- -> Context a -- ^ Context
-categoryField =
- tagsFieldWith getCategory simpleRenderLink (mconcat . intersperse ", ")
-
-
---------------------------------------------------------------------------------
--- | Render one tag link
-simpleRenderLink :: String -> (Maybe FilePath) -> Maybe H.Html
-simpleRenderLink _ Nothing = Nothing
-simpleRenderLink tag (Just filePath) =
- Just $ H.a ! A.href (toValue $ toUrl filePath) $ toHtml tag
-
-
---------------------------------------------------------------------------------
--- | Sort tags using supplied function. First element of the tuple passed to
--- the comparing function is the actual tag name.
-sortTagsBy :: ((String, [Identifier]) -> (String, [Identifier]) -> Ordering)
- -> Tags -> Tags
-sortTagsBy f t = t {tagsMap = sortBy f (tagsMap t)}
-
-
---------------------------------------------------------------------------------
--- | Sample sorting function that compares tags case insensitively.
-caseInsensitiveTags :: (String, [Identifier]) -> (String, [Identifier])
- -> Ordering
-caseInsensitiveTags = comparing $ map toLower . fst
diff --git a/src/Hakyll/Web/Template.hs b/src/Hakyll/Web/Template.hs
deleted file mode 100644
index 2a9684b..0000000
--- a/src/Hakyll/Web/Template.hs
+++ /dev/null
@@ -1,154 +0,0 @@
--- | This module provides means for reading and applying 'Template's.
---
--- Templates are tools to convert items into a string. They are perfectly suited
--- for laying out your site.
---
--- Let's look at an example template:
---
--- > <html>
--- > <head>
--- > <title>My crazy homepage - $title$</title>
--- > </head>
--- > <body>
--- > <div id="header">
--- > <h1>My crazy homepage - $title$</h1>
--- > </div>
--- > <div id="content">
--- > $body$
--- > </div>
--- > <div id="footer">
--- > By reading this you agree that I now own your soul
--- > </div>
--- > </body>
--- > </html>
---
--- As you can see, the format is very simple -- @$key$@ is used to render the
--- @$key$@ field from the page, everything else is literally copied. If you want
--- to literally insert @\"$key$\"@ into your page (for example, when you're
--- writing a Hakyll tutorial) you can use
---
--- > <p>
--- > A literal $$key$$.
--- > </p>
---
--- Because of it's simplicity, these templates can be used for more than HTML:
--- you could make, for example, CSS or JS templates as well.
---
--- Apart from interpolating @$key$@s from the 'Context' you can also
--- use the following macros:
---
--- * @$if(key)$@
---
--- > $if(key)$
--- > <b> Defined </b>
--- > $else$
--- > <b> Non-defined </b>
--- > $endif$
---
--- This example will print @Defined@ if @key@ is defined in the
--- context and @Non-defined@ otherwise. The @$else$@ clause is
--- optional.
---
--- * @$for(key)$@
---
--- The @for@ macro is used for enumerating 'Context' elements that are
--- lists, i.e. constructed using the 'listField' function. Assume that
--- in a context we have an element @listField \"key\" c itms@. Then
--- the snippet
---
--- > $for(key)$
--- > $x$
--- > $sep$,
--- > $endfor$
---
--- would, for each item @i@ in 'itms', lookup @$x$@ in the context @c@
--- with item @i@, interpolate it, and join the resulting list with
--- @,@.
---
--- Another concrete example one may consider is the following. Given the
--- context
---
--- > listField "things" (field "thing" (return . itemBody))
--- > (sequence [makeItem "fruits", makeItem "vegetables"])
---
--- and a template
---
--- > I like
--- > $for(things)$
--- > fresh $thing$$sep$, and
--- > $endfor$
---
--- the resulting page would look like
---
--- > <p>
--- > I like
--- >
--- > fresh fruits, and
--- >
--- > fresh vegetables
--- > </p>
---
--- The @$sep$@ part can be omitted. Usually, you can get by using the
--- 'applyListTemplate' and 'applyJoinListTemplate' functions.
---
--- * @$partial(path)$@
---
--- Loads a template located in a separate file and interpolates it
--- under the current context.
---
--- Assuming that the file @test.html@ contains
---
--- > <b>$key$</b>
---
--- The result of rendering
---
--- > <p>
--- > $partial("test.html")$
--- > </p>
---
--- is the same as the result of rendering
---
--- > <p>
--- > <b>$key$</b>
--- > </p>
---
--- That is, calling @$partial$@ is equivalent to just copying and pasting
--- template code.
---
--- In the examples above you can see that the outputs contain a lot of leftover
--- whitespace that you may wish to remove. Using @'$-'@ or @'-$'@ instead of
--- @'$'@ in a macro strips all whitespace to the left or right of that clause
--- respectively. Given the context
---
--- > listField "counts" (field "count" (return . itemBody))
--- > (sequence [makeItem "3", makeItem "2", makeItem "1"])
---
--- and a template
---
--- > <p>
--- > $for(counts)-$
--- > $count$
--- > $-sep$...
--- > $-endfor$
--- > </p>
---
--- the resulting page would look like
---
--- > <p>
--- > 3...2...1
--- > </p>
---
-module Hakyll.Web.Template
- ( Template
- , templateBodyCompiler
- , templateCompiler
- , applyTemplate
- , loadAndApplyTemplate
- , applyAsTemplate
- , readTemplate
- , unsafeReadTemplateFile
- ) where
-
-
---------------------------------------------------------------------------------
-import Hakyll.Web.Template.Internal
diff --git a/src/Hakyll/Web/Template/Context.hs b/src/Hakyll/Web/Template/Context.hs
deleted file mode 100644
index b6c7994..0000000
--- a/src/Hakyll/Web/Template/Context.hs
+++ /dev/null
@@ -1,379 +0,0 @@
---------------------------------------------------------------------------------
-{-# LANGUAGE CPP #-}
-{-# LANGUAGE ExistentialQuantification #-}
-module Hakyll.Web.Template.Context
- ( ContextField (..)
- , Context (..)
- , field
- , boolField
- , constField
- , listField
- , listFieldWith
- , functionField
- , mapContext
-
- , defaultContext
- , bodyField
- , metadataField
- , urlField
- , pathField
- , titleField
- , snippetField
- , dateField
- , dateFieldWith
- , getItemUTC
- , getItemModificationTime
- , modificationTimeField
- , modificationTimeFieldWith
- , teaserField
- , teaserFieldWithSeparator
- , missingField
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Applicative (Alternative (..))
-import Control.Monad (msum)
-import Data.List (intercalate)
-import Data.Time.Clock (UTCTime (..))
-import Data.Time.Format (formatTime)
-import qualified Data.Time.Format as TF
-import Data.Time.Locale.Compat (TimeLocale, defaultTimeLocale)
-import Hakyll.Core.Compiler
-import Hakyll.Core.Compiler.Internal
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-import Hakyll.Core.Metadata
-import Hakyll.Core.Provider
-import Hakyll.Core.Util.String (needlePrefix, splitAll)
-import Hakyll.Web.Html
-import System.FilePath (splitDirectories, takeBaseName)
-
-
---------------------------------------------------------------------------------
--- | Mostly for internal usage
-data ContextField
- = StringField String
- | forall a. ListField (Context a) [Item a]
-
-
---------------------------------------------------------------------------------
--- | The 'Context' monoid. Please note that the order in which you
--- compose the items is important. For example in
---
--- > field "A" f1 <> field "A" f2
---
--- the first context will overwrite the second. This is especially
--- important when something is being composed with
--- 'metadataField' (or 'defaultContext'). If you want your context to be
--- overwritten by the metadata fields, compose it from the right:
---
--- @
--- 'metadataField' \<\> field \"date\" fDate
--- @
---
-newtype Context a = Context
- { unContext :: String -> [String] -> Item a -> Compiler ContextField
- }
-
-
---------------------------------------------------------------------------------
-instance Monoid (Context a) where
- mempty = missingField
- mappend (Context f) (Context g) = Context $ \k a i -> f k a i <|> g k a i
-
-
---------------------------------------------------------------------------------
-field' :: String -> (Item a -> Compiler ContextField) -> Context a
-field' key value = Context $ \k _ i -> if k == key then value i else empty
-
-
---------------------------------------------------------------------------------
--- | Constructs a new field in the 'Context.'
-field
- :: String -- ^ Key
- -> (Item a -> Compiler String) -- ^ Function that constructs a value based
- -- on the item
- -> Context a
-field key value = field' key (fmap StringField . value)
-
-
---------------------------------------------------------------------------------
--- | Creates a 'field' to use with the @$if()$@ template macro.
-boolField
- :: String
- -> (Item a -> Bool)
- -> Context a
-boolField name f = field name (\i -> if f i
- then pure (error $ unwords ["no string value for bool field:",name])
- else empty)
-
-
---------------------------------------------------------------------------------
--- | Creates a 'field' that does not depend on the 'Item'
-constField :: String -> String -> Context a
-constField key = field key . const . return
-
-
---------------------------------------------------------------------------------
-listField :: String -> Context a -> Compiler [Item a] -> Context b
-listField key c xs = listFieldWith key c (const xs)
-
-
---------------------------------------------------------------------------------
-listFieldWith
- :: String -> Context a -> (Item b -> Compiler [Item a]) -> Context b
-listFieldWith key c f = field' key $ fmap (ListField c) . f
-
-
---------------------------------------------------------------------------------
-functionField :: String -> ([String] -> Item a -> Compiler String) -> Context a
-functionField name value = Context $ \k args i ->
- if k == name
- then StringField <$> value args i
- else empty
-
-
---------------------------------------------------------------------------------
-mapContext :: (String -> String) -> Context a -> Context a
-mapContext f (Context c) = Context $ \k a i -> do
- fld <- c k a i
- case fld of
- StringField str -> return $ StringField (f str)
- ListField _ _ -> fail $
- "Hakyll.Web.Template.Context.mapContext: " ++
- "can't map over a ListField!"
-
---------------------------------------------------------------------------------
--- | A context that allows snippet inclusion. In processed file, use as:
---
--- > ...
--- > $snippet("path/to/snippet/")$
--- > ...
---
--- The contents of the included file will not be interpolated.
---
-snippetField :: Context String
-snippetField = functionField "snippet" f
- where
- f [contentsPath] _ = loadBody (fromFilePath contentsPath)
- f _ i = error $
- "Too many arguments to function 'snippet()' in item " ++
- show (itemIdentifier i)
-
---------------------------------------------------------------------------------
--- | A context that contains (in that order)
---
--- 1. A @$body$@ field
---
--- 2. Metadata fields
---
--- 3. A @$url$@ 'urlField'
---
--- 4. A @$path$@ 'pathField'
---
--- 5. A @$title$@ 'titleField'
-defaultContext :: Context String
-defaultContext =
- bodyField "body" `mappend`
- metadataField `mappend`
- urlField "url" `mappend`
- pathField "path" `mappend`
- titleField "title" `mappend`
- missingField
-
-
---------------------------------------------------------------------------------
-teaserSeparator :: String
-teaserSeparator = "<!--more-->"
-
-
---------------------------------------------------------------------------------
--- | Constructs a 'field' that contains the body of the item.
-bodyField :: String -> Context String
-bodyField key = field key $ return . itemBody
-
-
---------------------------------------------------------------------------------
--- | Map any field to its metadata value, if present
-metadataField :: Context a
-metadataField = Context $ \k _ i -> do
- value <- getMetadataField (itemIdentifier i) k
- maybe empty (return . StringField) value
-
-
---------------------------------------------------------------------------------
--- | Absolute url to the resulting item
-urlField :: String -> Context a
-urlField key = field key $
- fmap (maybe empty toUrl) . getRoute . itemIdentifier
-
-
---------------------------------------------------------------------------------
--- | Filepath of the underlying file of the item
-pathField :: String -> Context a
-pathField key = field key $ return . toFilePath . itemIdentifier
-
-
---------------------------------------------------------------------------------
--- | This title 'field' takes the basename of the underlying file by default
-titleField :: String -> Context a
-titleField = mapContext takeBaseName . pathField
-
-
---------------------------------------------------------------------------------
--- | When the metadata has a field called @published@ in one of the
--- following formats then this function can render the date.
---
--- * @Mon, 06 Sep 2010 00:01:00 +0000@
---
--- * @Mon, 06 Sep 2010 00:01:00 UTC@
---
--- * @Mon, 06 Sep 2010 00:01:00@
---
--- * @2010-09-06T00:01:00+0000@
---
--- * @2010-09-06T00:01:00Z@
---
--- * @2010-09-06T00:01:00@
---
--- * @2010-09-06 00:01:00+0000@
---
--- * @2010-09-06 00:01:00@
---
--- * @September 06, 2010 00:01 AM@
---
--- Following date-only formats are supported too (@00:00:00@ for time is
--- assumed)
---
--- * @2010-09-06@
---
--- * @September 06, 2010@
---
--- Alternatively, when the metadata has a field called @path@ in a
--- @folder/yyyy-mm-dd-title.extension@ format (the convention for pages)
--- and no @published@ metadata field set, this function can render
--- the date. This pattern matches the file name or directory names
--- that begins with @yyyy-mm-dd@ . For example:
--- @folder//yyyy-mm-dd-title//dist//main.extension@ .
--- In case of multiple matches, the rightmost one is used.
-
-dateField :: String -- ^ Key in which the rendered date should be placed
- -> String -- ^ Format to use on the date
- -> Context a -- ^ Resulting context
-dateField = dateFieldWith defaultTimeLocale
-
-
---------------------------------------------------------------------------------
--- | This is an extended version of 'dateField' that allows you to
--- specify a time locale that is used for outputting the date. For more
--- details, see 'dateField'.
-dateFieldWith :: TimeLocale -- ^ Output time locale
- -> String -- ^ Destination key
- -> String -- ^ Format to use on the date
- -> Context a -- ^ Resulting context
-dateFieldWith locale key format = field key $ \i -> do
- time <- getItemUTC locale $ itemIdentifier i
- return $ formatTime locale format time
-
-
---------------------------------------------------------------------------------
--- | Parser to try to extract and parse the time from the @published@
--- field or from the filename. See 'dateField' for more information.
--- Exported for user convenience.
-getItemUTC :: MonadMetadata m
- => TimeLocale -- ^ Output time locale
- -> Identifier -- ^ Input page
- -> m UTCTime -- ^ Parsed UTCTime
-getItemUTC locale id' = do
- metadata <- getMetadata id'
- let tryField k fmt = lookupString k metadata >>= parseTime' fmt
- paths = splitDirectories $ toFilePath id'
-
- maybe empty' return $ msum $
- [tryField "published" fmt | fmt <- formats] ++
- [tryField "date" fmt | fmt <- formats] ++
- [parseTime' "%Y-%m-%d" $ intercalate "-" $ take 3 $ splitAll "-" fnCand | fnCand <- reverse paths]
- where
- empty' = fail $ "Hakyll.Web.Template.Context.getItemUTC: " ++
- "could not parse time for " ++ show id'
- parseTime' = parseTimeM True locale
- formats =
- [ "%a, %d %b %Y %H:%M:%S %Z"
- , "%Y-%m-%dT%H:%M:%S%Z"
- , "%Y-%m-%d %H:%M:%S%Z"
- , "%Y-%m-%d"
- , "%B %e, %Y %l:%M %p"
- , "%B %e, %Y"
- , "%b %d, %Y"
- ]
-
-
---------------------------------------------------------------------------------
--- | Get the time on which the actual file was last modified. This only works if
--- there actually is an underlying file, of couse.
-getItemModificationTime
- :: Identifier
- -> Compiler UTCTime
-getItemModificationTime identifier = do
- provider <- compilerProvider <$> compilerAsk
- return $ resourceModificationTime provider identifier
-
-
---------------------------------------------------------------------------------
-modificationTimeField :: String -- ^ Key
- -> String -- ^ Format
- -> Context a -- ^ Resuting context
-modificationTimeField = modificationTimeFieldWith defaultTimeLocale
-
-
---------------------------------------------------------------------------------
-modificationTimeFieldWith :: TimeLocale -- ^ Time output locale
- -> String -- ^ Key
- -> String -- ^ Format
- -> Context a -- ^ Resulting context
-modificationTimeFieldWith locale key fmt = field key $ \i -> do
- mtime <- getItemModificationTime $ itemIdentifier i
- return $ formatTime locale fmt mtime
-
-
---------------------------------------------------------------------------------
--- | A context with "teaser" key which contain a teaser of the item.
--- The item is loaded from the given snapshot (which should be saved
--- in the user code before any templates are applied).
-teaserField :: String -- ^ Key to use
- -> Snapshot -- ^ Snapshot to load
- -> Context String -- ^ Resulting context
-teaserField = teaserFieldWithSeparator teaserSeparator
-
-
---------------------------------------------------------------------------------
--- | A context with "teaser" key which contain a teaser of the item, defined as
--- the snapshot content before the teaser separator. The item is loaded from the
--- given snapshot (which should be saved in the user code before any templates
--- are applied).
-teaserFieldWithSeparator :: String -- ^ Separator to use
- -> String -- ^ Key to use
- -> Snapshot -- ^ Snapshot to load
- -> Context String -- ^ Resulting context
-teaserFieldWithSeparator separator key snapshot = field key $ \item -> do
- body <- itemBody <$> loadSnapshot (itemIdentifier item) snapshot
- case needlePrefix separator body of
- Nothing -> fail $
- "Hakyll.Web.Template.Context: no teaser defined for " ++
- show (itemIdentifier item)
- Just t -> return t
-
-
---------------------------------------------------------------------------------
-missingField :: Context a
-missingField = Context $ \k _ i -> fail $
- "Missing field $" ++ k ++ "$ in context for item " ++
- show (itemIdentifier i)
-
-parseTimeM :: Bool -> TimeLocale -> String -> String -> Maybe UTCTime
-#if MIN_VERSION_time(1,5,0)
-parseTimeM = TF.parseTimeM
-#else
-parseTimeM _ = TF.parseTime
-#endif
diff --git a/src/Hakyll/Web/Template/Internal.hs b/src/Hakyll/Web/Template/Internal.hs
deleted file mode 100644
index d0e4d47..0000000
--- a/src/Hakyll/Web/Template/Internal.hs
+++ /dev/null
@@ -1,203 +0,0 @@
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-{-# LANGUAGE ScopedTypeVariables #-}
-module Hakyll.Web.Template.Internal
- ( Template (..)
- , template
- , templateBodyCompiler
- , templateCompiler
- , applyTemplate
- , applyTemplate'
- , loadAndApplyTemplate
- , applyAsTemplate
- , readTemplate
- , unsafeReadTemplateFile
-
- , module Hakyll.Web.Template.Internal.Element
- , module Hakyll.Web.Template.Internal.Trim
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad.Except (MonadError (..))
-import Data.Binary (Binary)
-import Data.List (intercalate)
-import Data.Typeable (Typeable)
-import GHC.Exts (IsString (..))
-import Prelude hiding (id)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-import Hakyll.Core.Writable
-import Hakyll.Web.Template.Context
-import Hakyll.Web.Template.Internal.Element
-import Hakyll.Web.Template.Internal.Trim
-
-
---------------------------------------------------------------------------------
--- | Datatype used for template substitutions.
-newtype Template = Template
- { unTemplate :: [TemplateElement]
- } deriving (Show, Eq, Binary, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Writable Template where
- -- Writing a template is impossible
- write _ _ = return ()
-
-
---------------------------------------------------------------------------------
-instance IsString Template where
- fromString = readTemplate
-
-
---------------------------------------------------------------------------------
--- | Wrap the constructor to ensure trim is called.
-template :: [TemplateElement] -> Template
-template = Template . trim
-
-
---------------------------------------------------------------------------------
-readTemplate :: String -> Template
-readTemplate = Template . trim . readTemplateElems
-
---------------------------------------------------------------------------------
--- | Read a template, without metadata header
-templateBodyCompiler :: Compiler (Item Template)
-templateBodyCompiler = cached "Hakyll.Web.Template.templateBodyCompiler" $ do
- item <- getResourceBody
- file <- getResourceFilePath
- return $ fmap (template . readTemplateElemsFile file) item
-
---------------------------------------------------------------------------------
--- | Read complete file contents as a template
-templateCompiler :: Compiler (Item Template)
-templateCompiler = cached "Hakyll.Web.Template.templateCompiler" $ do
- item <- getResourceString
- file <- getResourceFilePath
- return $ fmap (template . readTemplateElemsFile file) item
-
-
---------------------------------------------------------------------------------
-applyTemplate :: Template -- ^ Template
- -> Context a -- ^ Context
- -> Item a -- ^ Page
- -> Compiler (Item String) -- ^ Resulting item
-applyTemplate tpl context item = do
- body <- applyTemplate' (unTemplate tpl) context item
- return $ itemSetBody body item
-
-
---------------------------------------------------------------------------------
-applyTemplate'
- :: forall a.
- [TemplateElement] -- ^ Unwrapped Template
- -> Context a -- ^ Context
- -> Item a -- ^ Page
- -> Compiler String -- ^ Resulting item
-applyTemplate' tes context x = go tes
- where
- context' :: String -> [String] -> Item a -> Compiler ContextField
- context' = unContext (context `mappend` missingField)
-
- go = fmap concat . mapM applyElem
-
- trimError = error $ "Hakyll.Web.Template.applyTemplate: template not " ++
- "fully trimmed."
-
- ---------------------------------------------------------------------------
-
- applyElem :: TemplateElement -> Compiler String
-
- applyElem TrimL = trimError
-
- applyElem TrimR = trimError
-
- applyElem (Chunk c) = return c
-
- applyElem (Expr e) = applyExpr e >>= getString e
-
- applyElem Escaped = return "$"
-
- applyElem (If e t mf) = (applyExpr e >> go t) `catchError` handler
- where
- handler _ = case mf of
- Nothing -> return ""
- Just f -> go f
-
- applyElem (For e b s) = applyExpr e >>= \cf -> case cf of
- StringField _ -> fail $
- "Hakyll.Web.Template.applyTemplateWith: expected ListField but " ++
- "got StringField for expr " ++ show e
- ListField c xs -> do
- sep <- maybe (return "") go s
- bs <- mapM (applyTemplate' b c) xs
- return $ intercalate sep bs
-
- applyElem (Partial e) = do
- p <- applyExpr e >>= getString e
- Template tpl' <- loadBody (fromFilePath p)
- applyTemplate' tpl' context x
-
- ---------------------------------------------------------------------------
-
- applyExpr :: TemplateExpr -> Compiler ContextField
-
- applyExpr (Ident (TemplateKey k)) = context' k [] x
-
- applyExpr (Call (TemplateKey k) args) = do
- args' <- mapM (\e -> applyExpr e >>= getString e) args
- context' k args' x
-
- applyExpr (StringLiteral s) = return (StringField s)
-
- ----------------------------------------------------------------------------
-
- getString _ (StringField s) = return s
- getString e (ListField _ _) = fail $
- "Hakyll.Web.Template.applyTemplateWith: expected StringField but " ++
- "got ListField for expr " ++ show e
-
-
---------------------------------------------------------------------------------
--- | The following pattern is so common:
---
--- > tpl <- loadBody "templates/foo.html"
--- > someCompiler
--- > >>= applyTemplate tpl context
---
--- That we have a single function which does this:
---
--- > someCompiler
--- > >>= loadAndApplyTemplate "templates/foo.html" context
-loadAndApplyTemplate :: Identifier -- ^ Template identifier
- -> Context a -- ^ Context
- -> Item a -- ^ Page
- -> Compiler (Item String) -- ^ Resulting item
-loadAndApplyTemplate identifier context item = do
- tpl <- loadBody identifier
- applyTemplate tpl context item
-
-
---------------------------------------------------------------------------------
--- | It is also possible that you want to substitute @$key$@s within the body of
--- an item. This function does that by interpreting the item body as a template,
--- and then applying it to itself.
-applyAsTemplate :: Context String -- ^ Context
- -> Item String -- ^ Item and template
- -> Compiler (Item String) -- ^ Resulting item
-applyAsTemplate context item =
- let tpl = template $ readTemplateElemsFile file (itemBody item)
- file = toFilePath $ itemIdentifier item
- in applyTemplate tpl context item
-
-
---------------------------------------------------------------------------------
-unsafeReadTemplateFile :: FilePath -> Compiler Template
-unsafeReadTemplateFile file = do
- tpl <- unsafeCompiler $ readFile file
- pure $ template $ readTemplateElemsFile file tpl
-
diff --git a/src/Hakyll/Web/Template/Internal/Element.hs b/src/Hakyll/Web/Template/Internal/Element.hs
deleted file mode 100644
index f564355..0000000
--- a/src/Hakyll/Web/Template/Internal/Element.hs
+++ /dev/null
@@ -1,298 +0,0 @@
---------------------------------------------------------------------------------
--- | Module containing the elements used in a template. A template is generally
--- just a list of these elements.
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Hakyll.Web.Template.Internal.Element
- ( TemplateKey (..)
- , TemplateExpr (..)
- , TemplateElement (..)
- , templateElems
- , readTemplateElems
- , readTemplateElemsFile
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Applicative ((<|>))
-import Control.Monad (void)
-import Data.Binary (Binary, get, getWord8, put, putWord8)
-import Data.List (intercalate)
-import Data.Maybe (isJust)
-import Data.Typeable (Typeable)
-import GHC.Exts (IsString (..))
-import qualified Text.Parsec as P
-import qualified Text.Parsec.String as P
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Util.Parser
-
-
---------------------------------------------------------------------------------
-newtype TemplateKey = TemplateKey String
- deriving (Binary, Show, Eq, Typeable)
-
-
---------------------------------------------------------------------------------
-instance IsString TemplateKey where
- fromString = TemplateKey
-
-
---------------------------------------------------------------------------------
--- | Elements of a template.
-data TemplateElement
- = Chunk String
- | Expr TemplateExpr
- | Escaped
- -- expr, then, else
- | If TemplateExpr [TemplateElement] (Maybe [TemplateElement])
- -- expr, body, separator
- | For TemplateExpr [TemplateElement] (Maybe [TemplateElement])
- -- filename
- | Partial TemplateExpr
- | TrimL
- | TrimR
- deriving (Show, Eq, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Binary TemplateElement where
- put (Chunk string) = putWord8 0 >> put string
- put (Expr e) = putWord8 1 >> put e
- put Escaped = putWord8 2
- put (If e t f) = putWord8 3 >> put e >> put t >> put f
- put (For e b s) = putWord8 4 >> put e >> put b >> put s
- put (Partial e) = putWord8 5 >> put e
- put TrimL = putWord8 6
- put TrimR = putWord8 7
-
- get = getWord8 >>= \tag -> case tag of
- 0 -> Chunk <$> get
- 1 -> Expr <$> get
- 2 -> pure Escaped
- 3 -> If <$> get <*> get <*> get
- 4 -> For <$> get <*> get <*> get
- 5 -> Partial <$> get
- 6 -> pure TrimL
- 7 -> pure TrimR
- _ -> error "Hakyll.Web.Template.Internal: Error reading cached template"
-
-
---------------------------------------------------------------------------------
--- | Expression in a template
-data TemplateExpr
- = Ident TemplateKey
- | Call TemplateKey [TemplateExpr]
- | StringLiteral String
- deriving (Eq, Typeable)
-
-
---------------------------------------------------------------------------------
-instance Show TemplateExpr where
- show (Ident (TemplateKey k)) = k
- show (Call (TemplateKey k) as) =
- k ++ "(" ++ intercalate ", " (map show as) ++ ")"
- show (StringLiteral s) = show s
-
-
---------------------------------------------------------------------------------
-instance Binary TemplateExpr where
- put (Ident k) = putWord8 0 >> put k
- put (Call k as) = putWord8 1 >> put k >> put as
- put (StringLiteral s) = putWord8 2 >> put s
-
- get = getWord8 >>= \tag -> case tag of
- 0 -> Ident <$> get
- 1 -> Call <$> get <*> get
- 2 -> StringLiteral <$> get
- _ -> error "Hakyll.Web.Template.Internal: Error reading cached template"
-
-
---------------------------------------------------------------------------------
-readTemplateElems :: String -> [TemplateElement]
-readTemplateElems = readTemplateElemsFile "{literal}"
-
-
---------------------------------------------------------------------------------
-readTemplateElemsFile :: FilePath -> String -> [TemplateElement]
-readTemplateElemsFile file input = case P.parse templateElems file input of
- Left err -> error $ "Cannot parse template: " ++ show err
- Right t -> t
-
-
---------------------------------------------------------------------------------
-templateElems :: P.Parser [TemplateElement]
-templateElems = mconcat <$> P.many (P.choice [ lift chunk
- , lift escaped
- , conditional
- , for
- , partial
- , expr
- ])
- where lift = fmap (:[])
-
-
---------------------------------------------------------------------------------
-chunk :: P.Parser TemplateElement
-chunk = Chunk <$> P.many1 (P.noneOf "$")
-
-
---------------------------------------------------------------------------------
-expr :: P.Parser [TemplateElement]
-expr = P.try $ do
- trimLExpr <- trimOpen
- e <- expr'
- trimRExpr <- trimClose
- return $ [TrimL | trimLExpr] ++ [Expr e] ++ [TrimR | trimRExpr]
-
-
---------------------------------------------------------------------------------
-expr' :: P.Parser TemplateExpr
-expr' = stringLiteral <|> call <|> ident
-
-
---------------------------------------------------------------------------------
-escaped :: P.Parser TemplateElement
-escaped = Escaped <$ P.try (P.string "$$")
-
-
---------------------------------------------------------------------------------
-trimOpen :: P.Parser Bool
-trimOpen = do
- void $ P.char '$'
- trimLIf <- P.optionMaybe $ P.try (P.char '-')
- pure $ isJust trimLIf
-
-
---------------------------------------------------------------------------------
-trimClose :: P.Parser Bool
-trimClose = do
- trimIfR <- P.optionMaybe $ P.try (P.char '-')
- void $ P.char '$'
- pure $ isJust trimIfR
-
-
---------------------------------------------------------------------------------
-conditional :: P.Parser [TemplateElement]
-conditional = P.try $ do
- -- if
- trimLIf <- trimOpen
- void $ P.string "if("
- e <- expr'
- void $ P.char ')'
- trimRIf <- trimClose
- -- then
- thenBranch <- templateElems
- -- else
- elseParse <- opt "else"
- -- endif
- trimLEnd <- trimOpen
- void $ P.string "endif"
- trimREnd <- trimClose
-
- -- As else is optional we need to sort out where any Trim_s need to go.
- let (thenBody, elseBody) = maybe (thenNoElse, Nothing) thenElse elseParse
- where thenNoElse =
- [TrimR | trimRIf] ++ thenBranch ++ [TrimL | trimLEnd]
-
- thenElse (trimLElse, elseBranch, trimRElse) = (thenB, elseB)
- where thenB = [TrimR | trimRIf]
- ++ thenBranch
- ++ [TrimL | trimLElse]
-
- elseB = Just $ [TrimR | trimRElse]
- ++ elseBranch
- ++ [TrimL | trimLEnd]
-
- pure $ [TrimL | trimLIf] ++ [If e thenBody elseBody] ++ [TrimR | trimREnd]
-
-
---------------------------------------------------------------------------------
-for :: P.Parser [TemplateElement]
-for = P.try $ do
- -- for
- trimLFor <- trimOpen
- void $ P.string "for("
- e <- expr'
- void $ P.char ')'
- trimRFor <- trimClose
- -- body
- bodyBranch <- templateElems
- -- sep
- sepParse <- opt "sep"
- -- endfor
- trimLEnd <- trimOpen
- void $ P.string "endfor"
- trimREnd <- trimClose
-
- -- As sep is optional we need to sort out where any Trim_s need to go.
- let (forBody, sepBody) = maybe (forNoSep, Nothing) forSep sepParse
- where forNoSep =
- [TrimR | trimRFor] ++ bodyBranch ++ [TrimL | trimLEnd]
-
- forSep (trimLSep, sepBranch, trimRSep) = (forB, sepB)
- where forB = [TrimR | trimRFor]
- ++ bodyBranch
- ++ [TrimL | trimLSep]
-
- sepB = Just $ [TrimR | trimRSep]
- ++ sepBranch
- ++ [TrimL | trimLEnd]
-
- pure $ [TrimL | trimLFor] ++ [For e forBody sepBody] ++ [TrimR | trimREnd]
-
-
---------------------------------------------------------------------------------
-partial :: P.Parser [TemplateElement]
-partial = P.try $ do
- trimLPart <- trimOpen
- void $ P.string "partial("
- e <- expr'
- void $ P.char ')'
- trimRPart <- trimClose
-
- pure $ [TrimL | trimLPart] ++ [Partial e] ++ [TrimR | trimRPart]
-
-
---------------------------------------------------------------------------------
-ident :: P.Parser TemplateExpr
-ident = P.try $ Ident <$> key
-
-
---------------------------------------------------------------------------------
-call :: P.Parser TemplateExpr
-call = P.try $ do
- f <- key
- void $ P.char '('
- P.spaces
- as <- P.sepBy expr' (P.spaces >> P.char ',' >> P.spaces)
- P.spaces
- void $ P.char ')'
- return $ Call f as
-
-
---------------------------------------------------------------------------------
-stringLiteral :: P.Parser TemplateExpr
-stringLiteral = do
- void $ P.char '\"'
- str <- P.many $ do
- x <- P.noneOf "\""
- if x == '\\' then P.anyChar else return x
- void $ P.char '\"'
- return $ StringLiteral str
-
-
---------------------------------------------------------------------------------
-key :: P.Parser TemplateKey
-key = TemplateKey <$> metadataKey
-
-
---------------------------------------------------------------------------------
-opt :: String -> P.Parser (Maybe (Bool, [TemplateElement], Bool))
-opt clause = P.optionMaybe $ P.try $ do
- trimL <- trimOpen
- void $ P.string clause
- trimR <- trimClose
- branch <- templateElems
- pure (trimL, branch, trimR)
-
diff --git a/src/Hakyll/Web/Template/Internal/Trim.hs b/src/Hakyll/Web/Template/Internal/Trim.hs
deleted file mode 100644
index e416ff2..0000000
--- a/src/Hakyll/Web/Template/Internal/Trim.hs
+++ /dev/null
@@ -1,95 +0,0 @@
---------------------------------------------------------------------------------
--- | Module for trimming whitespace from tempaltes.
-module Hakyll.Web.Template.Internal.Trim
- ( trim
- ) where
-
-
---------------------------------------------------------------------------------
-import Data.Char (isSpace)
-import Data.List (dropWhileEnd)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Web.Template.Internal.Element
-
-
---------------------------------------------------------------------------------
-trim :: [TemplateElement] -> [TemplateElement]
-trim = cleanse . canonicalize
-
-
---------------------------------------------------------------------------------
--- | Apply the Trim nodes to the Chunks.
-cleanse :: [TemplateElement] -> [TemplateElement]
-cleanse = recurse cleanse . process
- where process [] = []
- process (TrimR:Chunk str:ts) = let str' = dropWhile isSpace str
- in if null str'
- then process ts
- -- Might need to TrimL.
- else process $ Chunk str':ts
-
- process (Chunk str:TrimL:ts) = let str' = dropWhileEnd isSpace str
- in if null str'
- then process ts
- else Chunk str':process ts
-
- process (t:ts) = t:process ts
-
---------------------------------------------------------------------------------
--- | Enforce the invariant that:
---
--- * Every 'TrimL' has a 'Chunk' to its left.
--- * Every 'TrimR' has a 'Chunk' to its right.
---
-canonicalize :: [TemplateElement] -> [TemplateElement]
-canonicalize = go
- where go t = let t' = redundant . swap $ dedupe t
- in if t == t' then t else go t'
-
-
---------------------------------------------------------------------------------
--- | Remove the 'TrimR' and 'TrimL's that are no-ops.
-redundant :: [TemplateElement] -> [TemplateElement]
-redundant = recurse redundant . process
- where -- Remove the leading 'TrimL's.
- process (TrimL:ts) = process ts
- -- Remove trailing 'TrimR's.
- process ts = foldr trailing [] ts
- where trailing TrimR [] = []
- trailing x xs = x:xs
-
-
---------------------------------------------------------------------------------
--- >>> swap $ [TrimR, TrimL]
--- [TrimL, TrimR]
-swap :: [TemplateElement] -> [TemplateElement]
-swap = recurse swap . process
- where process [] = []
- process (TrimR:TrimL:ts) = TrimL:process (TrimR:ts)
- process (t:ts) = t:process ts
-
-
---------------------------------------------------------------------------------
--- | Remove 'TrimR' and 'TrimL' duplication.
-dedupe :: [TemplateElement] -> [TemplateElement]
-dedupe = recurse dedupe . process
- where process [] = []
- process (TrimR:TrimR:ts) = process (TrimR:ts)
- process (TrimL:TrimL:ts) = process (TrimL:ts)
- process (t:ts) = t:process ts
-
-
---------------------------------------------------------------------------------
--- | @'recurse' f t@ applies f to every '[TemplateElement]' in t.
-recurse :: ([TemplateElement] -> [TemplateElement])
- -> [TemplateElement]
- -> [TemplateElement]
-recurse _ [] = []
-recurse f (x:xs) = process x:recurse f xs
- where process y = case y of
- If e tb eb -> If e (f tb) (f <$> eb)
- For e t s -> For e (f t) (f <$> s)
- _ -> y
-
diff --git a/src/Hakyll/Web/Template/List.hs b/src/Hakyll/Web/Template/List.hs
deleted file mode 100644
index 4d769fc..0000000
--- a/src/Hakyll/Web/Template/List.hs
+++ /dev/null
@@ -1,91 +0,0 @@
---------------------------------------------------------------------------------
--- | Provides an easy way to combine several items in a list. The applications
--- are obvious:
---
--- * A post list on a blog
---
--- * An image list in a gallery
---
--- * A sitemap
-{-# LANGUAGE TupleSections #-}
-module Hakyll.Web.Template.List
- ( applyTemplateList
- , applyJoinTemplateList
- , chronological
- , recentFirst
- , sortChronological
- , sortRecentFirst
- ) where
-
-
---------------------------------------------------------------------------------
-import Control.Monad (liftM)
-import Data.List (intersperse, sortBy)
-import Data.Ord (comparing)
-import Data.Time.Locale.Compat (defaultTimeLocale)
-
-
---------------------------------------------------------------------------------
-import Hakyll.Core.Compiler
-import Hakyll.Core.Identifier
-import Hakyll.Core.Item
-import Hakyll.Core.Metadata
-import Hakyll.Web.Template
-import Hakyll.Web.Template.Context
-
-
---------------------------------------------------------------------------------
--- | Generate a string of a listing of pages, after applying a template to each
--- page.
-applyTemplateList :: Template
- -> Context a
- -> [Item a]
- -> Compiler String
-applyTemplateList = applyJoinTemplateList ""
-
-
---------------------------------------------------------------------------------
--- | Join a listing of pages with a string in between, after applying a template
--- to each page.
-applyJoinTemplateList :: String
- -> Template
- -> Context a
- -> [Item a]
- -> Compiler String
-applyJoinTemplateList delimiter tpl context items = do
- items' <- mapM (applyTemplate tpl context) items
- return $ concat $ intersperse delimiter $ map itemBody items'
-
-
---------------------------------------------------------------------------------
--- | Sort pages chronologically. Uses the same method as 'dateField' for
--- extracting the date.
-chronological :: MonadMetadata m => [Item a] -> m [Item a]
-chronological =
- sortByM $ getItemUTC defaultTimeLocale . itemIdentifier
- where
- sortByM :: (Monad m, Ord k) => (a -> m k) -> [a] -> m [a]
- sortByM f xs = liftM (map fst . sortBy (comparing snd)) $
- mapM (\x -> liftM (x,) (f x)) xs
-
-
---------------------------------------------------------------------------------
--- | The reverse of 'chronological'
-recentFirst :: MonadMetadata m => [Item a] -> m [Item a]
-recentFirst = liftM reverse . chronological
-
-
---------------------------------------------------------------------------------
--- | Version of 'chronological' which doesn't need the actual items.
-sortChronological
- :: MonadMetadata m => [Identifier] -> m [Identifier]
-sortChronological ids =
- liftM (map itemIdentifier) $ chronological [Item i () | i <- ids]
-
-
---------------------------------------------------------------------------------
--- | Version of 'recentFirst' which doesn't need the actual items.
-sortRecentFirst
- :: MonadMetadata m => [Identifier] -> m [Identifier]
-sortRecentFirst ids =
- liftM (map itemIdentifier) $ recentFirst [Item i () | i <- ids]