summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.ghci2
-rw-r--r--README.markdown7
-rw-r--r--data/templates/atom-item.xml10
-rw-r--r--data/templates/atom.xml14
-rw-r--r--data/templates/rss-item.xml10
-rw-r--r--data/templates/rss.xml12
-rw-r--r--examples/brochure/hakyll.hs31
-rw-r--r--examples/brochure/templates/default.html16
-rw-r--r--examples/hakyll/changelog.markdown4
-rw-r--r--examples/hakyll/css/default.css4
-rw-r--r--examples/hakyll/examples/brochure.zipbin3672 -> 3689 bytes
-rw-r--r--examples/hakyll/hakyll.hs77
-rw-r--r--examples/hakyll/images/arrow-composition.pngbin16066 -> 0 bytes
-rw-r--r--examples/hakyll/images/tutorial8-categories.pngbin9377 -> 0 bytes
-rw-r--r--examples/hakyll/images/tutorial8-tags.pngbin8631 -> 0 bytes
-rw-r--r--examples/hakyll/index.markdown20
-rw-r--r--examples/hakyll/sidebar.markdown13
-rw-r--r--examples/hakyll/templates/default.html14
-rw-r--r--examples/hakyll/tutorial.markdown236
-rw-r--r--examples/hakyll/tutorials/part01.markdown235
-rw-r--r--examples/hakyll/tutorials/part02.markdown91
-rw-r--r--examples/hakyll/tutorials/part03.markdown174
-rw-r--r--examples/hakyll/tutorials/part04.markdown60
-rw-r--r--examples/hakyll/tutorials/part05.markdown157
-rw-r--r--examples/hakyll/tutorials/part06.markdown84
-rw-r--r--examples/hakyll/tutorials/part07.markdown221
-rw-r--r--examples/hakyll/tutorials/part08.markdown97
-rw-r--r--examples/hakyll/tutorials/part09.markdown104
-rw-r--r--hakyll.cabal126
-rw-r--r--src-inotify/Hakyll/Web/Preview/Poll.hs51
-rw-r--r--src-interval/Hakyll/Web/Preview/Poll.hs36
-rw-r--r--src/Hakyll.hs55
-rw-r--r--src/Hakyll/Core/CompiledItem.hs45
-rw-r--r--src/Hakyll/Core/Compiler.hs333
-rw-r--r--src/Hakyll/Core/Compiler/Internal.hs146
-rw-r--r--src/Hakyll/Core/Configuration.hs44
-rw-r--r--src/Hakyll/Core/CopyFile.hs29
-rw-r--r--src/Hakyll/Core/DirectedGraph.hs85
-rw-r--r--src/Hakyll/Core/DirectedGraph/DependencySolver.hs70
-rw-r--r--src/Hakyll/Core/DirectedGraph/Dot.hs30
-rw-r--r--src/Hakyll/Core/DirectedGraph/Internal.hs43
-rw-r--r--src/Hakyll/Core/Identifier.hs59
-rw-r--r--src/Hakyll/Core/Identifier/Pattern.hs160
-rw-r--r--src/Hakyll/Core/Logger.hs90
-rw-r--r--src/Hakyll/Core/ResourceProvider.hs75
-rw-r--r--src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs29
-rw-r--r--src/Hakyll/Core/Routes.hs136
-rw-r--r--src/Hakyll/Core/Rules.hs161
-rw-r--r--src/Hakyll/Core/Rules/Internal.hs75
-rw-r--r--src/Hakyll/Core/Run.hs207
-rw-r--r--src/Hakyll/Core/Store.hs88
-rw-r--r--src/Hakyll/Core/UnixFilter.hs76
-rw-r--r--src/Hakyll/Core/Util/Arrow.hs25
-rw-r--r--src/Hakyll/Core/Util/File.hs90
-rw-r--r--src/Hakyll/Core/Util/String.hs48
-rw-r--r--src/Hakyll/Core/Writable.hs22
-rw-r--r--src/Hakyll/Main.hs113
-rw-r--r--src/Hakyll/Web/CompressCss.hs (renamed from src/Text/Hakyll/Internal/CompressCss.hs)31
-rw-r--r--src/Hakyll/Web/Feed.hs124
-rw-r--r--src/Hakyll/Web/FileType.hs55
-rw-r--r--src/Hakyll/Web/Page.hs124
-rw-r--r--src/Hakyll/Web/Page/Internal.hs50
-rw-r--r--src/Hakyll/Web/Page/Metadata.hs131
-rw-r--r--src/Hakyll/Web/Page/Read.hs60
-rw-r--r--src/Hakyll/Web/Pandoc.hs110
-rw-r--r--src/Hakyll/Web/Preview/Server.hs72
-rw-r--r--src/Hakyll/Web/RelativizeUrls.hs62
-rw-r--r--src/Hakyll/Web/Tags.hs180
-rw-r--r--src/Hakyll/Web/Template.hs109
-rw-r--r--src/Hakyll/Web/Template/Internal.hs45
-rw-r--r--src/Hakyll/Web/Template/Read.hs10
-rw-r--r--src/Hakyll/Web/Template/Read/Hakyll.hs35
-rw-r--r--src/Hakyll/Web/Template/Read/Hamlet.hs46
-rw-r--r--src/Hakyll/Web/Util/Url.hs30
-rw-r--r--src/Network/Hakyll/SimpleServer.hs215
-rw-r--r--src/Text/Hakyll.hs185
-rw-r--r--src/Text/Hakyll/Configurations/Static.hs59
-rw-r--r--src/Text/Hakyll/Context.hs16
-rw-r--r--src/Text/Hakyll/ContextManipulations.hs124
-rw-r--r--src/Text/Hakyll/CreateContext.hs114
-rw-r--r--src/Text/Hakyll/Feed.hs112
-rw-r--r--src/Text/Hakyll/File.hs196
-rw-r--r--src/Text/Hakyll/HakyllAction.hs98
-rw-r--r--src/Text/Hakyll/HakyllMonad.hs99
-rw-r--r--src/Text/Hakyll/Internal/Cache.hs53
-rw-r--r--src/Text/Hakyll/Internal/FileType.hs49
-rw-r--r--src/Text/Hakyll/Internal/Template.hs86
-rw-r--r--src/Text/Hakyll/Internal/Template/Hamlet.hs56
-rw-r--r--src/Text/Hakyll/Internal/Template/Template.hs34
-rw-r--r--src/Text/Hakyll/Page.hs108
-rw-r--r--src/Text/Hakyll/Paginate.hs94
-rw-r--r--src/Text/Hakyll/Pandoc.hs57
-rw-r--r--src/Text/Hakyll/Regex.hs77
-rw-r--r--src/Text/Hakyll/Render.hs126
-rw-r--r--src/Text/Hakyll/Tags.hs172
-rw-r--r--src/Text/Hakyll/Util.hs34
-rw-r--r--tests/CompressCss.hs42
-rw-r--r--tests/File.hs62
-rw-r--r--tests/Hakyll/Core/DirectedGraph/Tests.hs36
-rw-r--r--tests/Hakyll/Core/Identifier/Tests.hs22
-rw-r--r--tests/Hakyll/Core/Routes/Tests.hs24
-rw-r--r--tests/Hakyll/Web/Page/Tests.hs32
-rw-r--r--tests/Hakyll/Web/RelativizeUrls/Tests.hs20
-rw-r--r--tests/Hakyll/Web/Template/Tests.hs44
-rw-r--r--tests/Main.hs22
-rw-r--r--tests/Page.hs92
-rw-r--r--tests/Regex.hs24
-rw-r--r--tests/Template.hs70
-rw-r--r--tests/TestSuite.hs26
-rw-r--r--tests/TestSuite/Util.hs16
-rw-r--r--tests/Util.hs54
111 files changed, 4356 insertions, 3908 deletions
diff --git a/.ghci b/.ghci
index fd1deff..a42ffe2 100644
--- a/.ghci
+++ b/.ghci
@@ -1 +1 @@
-:set -isrc -itests -idist/build/autogen
+:set -isrc -isrc-inotify -itests -idist/build/autogen
diff --git a/README.markdown b/README.markdown
index e74f452..42cef0d 100644
--- a/README.markdown
+++ b/README.markdown
@@ -1,11 +1,6 @@
# Hakyll
-Hakyll is a simple static site generator library in Haskell. It is mostly
-inspired by [Jekyll](http://github.com/mojombo/jekyll), but I like to
-believe it is simpler. An example site where it is used is
-[my personal homepage](http://jaspervdj.be) of which
-[the source code](http://github.com/jaspervdj/jaspervdj) is available here on
-github as well, as a reference.
+Hakyll is a static site generator library in Haskell.
## Installation
diff --git a/data/templates/atom-item.xml b/data/templates/atom-item.xml
index 10d9b79..e9185f2 100644
--- a/data/templates/atom-item.xml
+++ b/data/templates/atom-item.xml
@@ -1,7 +1,7 @@
<entry>
- <title>$title</title>
- <link href="$absolute/$url" />
- <id>$absolute/$url</id>
- <updated>$timestamp</updated>
- <summary type="html"><![CDATA[$description]]></summary>
+ <title>$title$</title>
+ <link href="$root$$url$" />
+ <id>$root$$url$</id>
+ <updated>$timestamp$</updated>
+ <summary type="html"><![CDATA[$description$]]></summary>
</entry>
diff --git a/data/templates/atom.xml b/data/templates/atom.xml
index a0b30ef..5a7c0cf 100644
--- a/data/templates/atom.xml
+++ b/data/templates/atom.xml
@@ -1,12 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
- <title>$title</title>
- <link href="$absolute/$url" rel="self" />
- <link href="$absolute" />
- <id>$absolute/$url</id>
+ <title>$title$</title>
+ <link href="$root$$url$" rel="self" />
+ <link href="$root$" />
+ <id>$root$$url$</id>
<author>
- <name>$authorName</name>
+ <name>$authorName$</name>
</author>
- <updated>$timestamp</updated>
- $body
+ <updated>$timestamp$</updated>
+ $body$
</feed>
diff --git a/data/templates/rss-item.xml b/data/templates/rss-item.xml
index e04864e..f8f9fcf 100644
--- a/data/templates/rss-item.xml
+++ b/data/templates/rss-item.xml
@@ -1,7 +1,7 @@
<item>
- <title>$title</title>
- <link>$absolute/$url</link>
- <description><![CDATA[$description]]></description>
- <pubDate>$timestamp</pubDate>
- <guid>$absolute/$url</guid>
+ <title>$title$</title>
+ <link>$root$$url$</link>
+ <description><![CDATA[$description$]]></description>
+ <pubDate>$timestamp$</pubDate>
+ <guid>$root$$url$</guid>
</item>
diff --git a/data/templates/rss.xml b/data/templates/rss.xml
index bbdfc7e..d2f3915 100644
--- a/data/templates/rss.xml
+++ b/data/templates/rss.xml
@@ -1,12 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
- <title>$title</title>
- <link>$absolute</link>
- <description><![CDATA[$description]]></description>
- <atom:link href="$absolute/$url" rel="self"
+ <title>$title$</title>
+ <link>$root$</link>
+ <description><![CDATA[$description$]]></description>
+ <atom:link href="$root$$url$" rel="self"
type="application/rss+xml" />
- <lastBuildDate>$timestamp</lastBuildDate>
- $body
+ <lastBuildDate>$timestamp$</lastBuildDate>
+ $body$
</channel>
</rss>
diff --git a/examples/brochure/hakyll.hs b/examples/brochure/hakyll.hs
index a74bee1..8040bdf 100644
--- a/examples/brochure/hakyll.hs
+++ b/examples/brochure/hakyll.hs
@@ -1,13 +1,18 @@
-import Text.Hakyll (hakyll)
-import Text.Hakyll.File (directory)
-import Text.Hakyll.Render (css, static, renderChain)
-import Text.Hakyll.CreateContext (createPage)
-
-main = hakyll "http://example.com" $ do
- directory css "css"
- render "about.rst"
- render "index.markdown"
- render "code.lhs"
- where
- render = renderChain ["templates/default.html"]
- . createPage
+{-# LANGUAGE OverloadedStrings #-}
+import Control.Arrow ((>>>))
+import Control.Monad (forM_)
+
+import Hakyll
+
+main :: IO ()
+main = hakyll $ do
+ route "css/*" idRoute
+ compile "css/*" compressCssCompiler
+
+ compile "templates/*" templateCompiler
+
+ forM_ ["about.rst", "index.markdown", "code.lhs"] $ \page -> do
+ route page $ setExtension "html"
+ compile page $ readPageCompiler
+ >>> applyTemplateCompiler "templates/default.html"
+ >>> relativizeUrlsCompiler
diff --git a/examples/brochure/templates/default.html b/examples/brochure/templates/default.html
index 794d449..8fb75cb 100644
--- a/examples/brochure/templates/default.html
+++ b/examples/brochure/templates/default.html
@@ -4,18 +4,18 @@
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
- <title>MyAweSomeCompany - $title</title>
- <link rel="stylesheet" type="text/css" href="$root/css/default.css" />
- <link rel="stylesheet" type="text/css" href="$root/css/syntax.css" />
+ <title>MyAweSomeCompany - $title$</title>
+ <link rel="stylesheet" type="text/css" href="/css/default.css" />
+ <link rel="stylesheet" type="text/css" href="/css/syntax.css" />
</head>
<body>
- <h1>MyAweSomeCompany - $title</h1>
+ <h1>MyAweSomeCompany - $title$</h1>
<div id="navigation">
- <a href="$root/index.html">Home</a>
- <a href="$root/about.html">About</a>
- <a href="$root/code.html">Code</a>
+ <a href="/index.html">Home</a>
+ <a href="/about.html">About</a>
+ <a href="/code.html">Code</a>
</div>
- $body
+ $body$
</body>
</html>
diff --git a/examples/hakyll/changelog.markdown b/examples/hakyll/changelog.markdown
index 92c60fb..1a4cd6c 100644
--- a/examples/hakyll/changelog.markdown
+++ b/examples/hakyll/changelog.markdown
@@ -2,6 +2,10 @@
title: Changelog
---
+## Hakyll 3
+
+- Complete rewrite
+
## Hakyll 2.4.1
- Add a number of utility functions
diff --git a/examples/hakyll/css/default.css b/examples/hakyll/css/default.css
index 0e9462c..f895f48 100644
--- a/examples/hakyll/css/default.css
+++ b/examples/hakyll/css/default.css
@@ -75,6 +75,10 @@ h3 {
text-transform: uppercase;
}
+h1 a, h2 a, h3 a {
+ text-decoration: none;
+}
+
div.column {
width: 50%;
float: left;
diff --git a/examples/hakyll/examples/brochure.zip b/examples/hakyll/examples/brochure.zip
index bcdfc67..619d9a9 100644
--- a/examples/hakyll/examples/brochure.zip
+++ b/examples/hakyll/examples/brochure.zip
Binary files differ
diff --git a/examples/hakyll/hakyll.hs b/examples/hakyll/hakyll.hs
index ca4cd6e..c4f339c 100644
--- a/examples/hakyll/hakyll.hs
+++ b/examples/hakyll/hakyll.hs
@@ -1,39 +1,50 @@
-import Text.Hakyll
-import Text.Hakyll.Render
-import Text.Hakyll.File
-import Text.Hakyll.CreateContext
-import Control.Monad.Reader (liftIO)
-import System.Directory
-import Control.Monad (mapM_, forM_, liftM)
-import Data.List (sort)
+{-# LANGUAGE OverloadedStrings #-}
+import Hakyll
+import Control.Monad (forM_)
+import Control.Arrow ((>>>), arr)
+import Text.Pandoc
-main = hakyll "http://jaspervdj.be/hakyll" $ do
- directory css "css"
- directory static "images"
- directory static "examples"
- directory static "reference"
+main :: IO ()
+main = hakyll $ do
+ route "css/*" idRoute
+ compile "css/*" compressCssCompiler
- tutorials <- liftM sort $ getRecursiveContents "tutorials"
- let tutorialPage = createListing "tutorials.html"
- ["templates/tutorialitem.html"]
- (map createPage tutorials)
- [("title", Left "Tutorials")]
- renderChain ["templates/tutorials.html", "templates/default.html"]
- (withSidebar tutorialPage)
+ -- Static directories
+ forM_ ["images/*", "examples/*", "reference/*"] $ \f -> do
+ route f idRoute
+ compile f copyFileCompiler
- mapM_ (render' ["templates/default.html"]) $
- [ "about.markdown"
- , "index.markdown"
- , "philosophy.markdown"
- , "reference.markdown"
- , "changelog.markdown"
- ]
+ -- Pages
+ forM_ pages $ \p -> do
+ route p $ setExtension "html"
+ compile p $ pageCompiler
+ >>> requireA "sidebar.markdown" (setFieldA "sidebar" $ arr pageBody)
+ >>> applyTemplateCompiler "templates/default.html"
+ >>> relativizeUrlsCompiler
- forM_ tutorials $ render' [ "templates/tutorial.html"
- , "templates/default.html"
- ]
+ -- Tutorial
+ route "tutorial.markdown" $ setExtension "html"
+ compile "tutorial.markdown" $ readPageCompiler
+ >>> pageRenderPandocWith defaultHakyllParserState withToc
+ >>> requireA "sidebar.markdown" (setFieldA "sidebar" $ arr pageBody)
+ >>> applyTemplateCompiler "templates/default.html"
+ >>> relativizeUrlsCompiler
+ -- Sidebar
+ compile "sidebar.markdown" pageCompiler
+
+ -- Templates
+ compile "templates/*" templateCompiler
where
- render' templates = renderChain templates . withSidebar . createPage
- withSidebar a = a `combine` createPage "sidebar.markdown"
-
+ withToc = defaultHakyllWriterOptions
+ { writerTableOfContents = True
+ , writerTemplate = "<h2>Table of contents</h2>\n$toc$\n$body$"
+ , writerStandalone = True
+ }
+
+ pages = [ "about.markdown"
+ , "changelog.markdown"
+ , "index.markdown"
+ , "philosophy.markdown"
+ , "reference.markdown"
+ ]
diff --git a/examples/hakyll/images/arrow-composition.png b/examples/hakyll/images/arrow-composition.png
deleted file mode 100644
index 947561a..0000000
--- a/examples/hakyll/images/arrow-composition.png
+++ /dev/null
Binary files differ
diff --git a/examples/hakyll/images/tutorial8-categories.png b/examples/hakyll/images/tutorial8-categories.png
deleted file mode 100644
index 0567917..0000000
--- a/examples/hakyll/images/tutorial8-categories.png
+++ /dev/null
Binary files differ
diff --git a/examples/hakyll/images/tutorial8-tags.png b/examples/hakyll/images/tutorial8-tags.png
deleted file mode 100644
index d8ae73e..0000000
--- a/examples/hakyll/images/tutorial8-tags.png
+++ /dev/null
Binary files differ
diff --git a/examples/hakyll/index.markdown b/examples/hakyll/index.markdown
index 18c3352..911f2f0 100644
--- a/examples/hakyll/index.markdown
+++ b/examples/hakyll/index.markdown
@@ -2,6 +2,12 @@
title: Home
---
+## Hakyll 3
+
+Hakyll 3 has been released, and it can be installed by running
+`cabal install hakyll`. For a limited time (but as long as is necessary) you can
+access the old site and documentation [here](/hakyll2)
+
## Overview
Hakyll is a [Haskell](http://haskell.org) library for generating static sites,
@@ -34,10 +40,10 @@ Some sites written in Hakyll:
## Getting Started
-You can get the latest version from hackage using `cabal install hakyll`. I
-have written a few [tutorials](tutorials.html), and reading them is highly
-recommended if you want to get started with hakyll. Also, there are a few
-examples available in
-[the github repo](http://github.com/jaspervdj/Hakyll/tree/master/examples/),
-including this site. For any questions you might have, there is also a
-[google discussion group](http://groups.google.com/group/hakyll).
+You can get the latest version from hackage using `cabal install hakyll`. Then,
+you can:
+
+- read the [tutorial](/tutorial.html);
+- mail the [google discussion group](http://groups.google.com/group/hakyll);
+- ask questions on the IRC channel: `#hakyll` on
+ [freenode](http://freenode.net/).
diff --git a/examples/hakyll/sidebar.markdown b/examples/hakyll/sidebar.markdown
index 9444c58..fb40b5d 100644
--- a/examples/hakyll/sidebar.markdown
+++ b/examples/hakyll/sidebar.markdown
@@ -1,9 +1,8 @@
---- sidebar
## Navigation
-[home]($root/index.html)
-[philosophy]($root/philosophy.html)
-[about]($root/about.html)
-[tutorials]($root/tutorials.html)
-[reference]($root/reference.html)
-[changelog]($root/changelog.html)
+[home](/index.html)
+[philosophy](/philosophy.html)
+[about](/about.html)
+[tutorial](/tutorial.html)
+[reference](/reference.html)
+[changelog](/changelog.html)
diff --git a/examples/hakyll/templates/default.html b/examples/hakyll/templates/default.html
index 2e94f72..139564b 100644
--- a/examples/hakyll/templates/default.html
+++ b/examples/hakyll/templates/default.html
@@ -4,11 +4,11 @@
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
- <title>Hakyll - $title</title>
+ <title>Hakyll - $title$</title>
<!-- Stylesheets. -->
- <link rel="stylesheet" type="text/css" href="$root/css/default.css" />
- <link rel="stylesheet" type="text/css" href="$root/css/syntax.css" />
+ <link rel="stylesheet" type="text/css" href="/css/default.css" />
+ <link rel="stylesheet" type="text/css" href="/css/syntax.css" />
<!-- Metadata. -->
<meta name="keywords" content="hakyll,static site generator,static,site,generator,haskell,blog"/>
@@ -17,16 +17,16 @@
<body>
<div id="main">
<div id="header">
- <img src="$root/images/lambda.png" alt="lambda" />
- <h1>Hakyll - $title</h1>
+ <img src="/images/lambda.png" alt="lambda" />
+ <h1>Hakyll - $title$</h1>
</div>
<!-- Sidebar. -->
<div id="sidebar">
- $sidebar
+ $sidebar$
</div>
<div id="content">
- $body
+ $body$
</div>
<div id="footer">
diff --git a/examples/hakyll/tutorial.markdown b/examples/hakyll/tutorial.markdown
new file mode 100644
index 0000000..9ee6066
--- /dev/null
+++ b/examples/hakyll/tutorial.markdown
@@ -0,0 +1,236 @@
+---
+title: Tutorial
+---
+
+Why static websites?
+--------------------
+
+Modern web frameworks make it easy to create huge dynamic websites. Why would
+anyone still care about a static website?
+
+- Static websites are fast, because it's simply files served directly from the
+ hard disk.
+- Static websites are secure. Nobody has ever found an SQL injection in static
+ pages.
+- Static websites are easy to deploy. Just copy them to your webhost using
+ (S)FTP/rsync/scp and you are done. They work on all webhosts: no CGI or extra
+ modules needed for the web server.
+
+Why Hakyll?
+-----------
+
+Hakyll is a [Haskell] library meant for creating small-to-medium sized static
+websites. It is a powerful publishing tool, precisely because of the power of
+Haskell. By using the awesome [pandoc] library, it is able to create your
+website from a large variety of input formats.
+
+[Haskell]: http://haskell.org/
+[pandoc]: http://johnmacfarlane.net/pandoc/
+
+Features include:
+
+- easy templating system;
+- a simple HTTP server for previewing and compiling your website on the go;
+- powerful syntax highlighting;
+- modules for common items such as tags and feeds;
+- easily extensible.
+
+A simple brochure site
+----------------------
+
+### The two layers
+
+Hakyll consists of two important layers:
+
+- A top-level declarative eDSL, used to describe the relations between the
+ different items,
+- A lower-level filter-like eDSL built on Arrows.
+
+Both layer are used in the configuration file of your website. This
+configuration file is conventionally called `hakyll.hs` and placed at the root
+of your website directory.
+
+### The Rules DSL
+
+The Rules DSL is probably the simpler one. Let's look at a very simple example
+of a `hakyll.hs`. This piece of code might look a little confusing, but don't
+worry, we'll walk through it in detail.
+
+~~~~~{.haskell}
+{-# LANGUAGE OverloadedStrings #-}
+import Control.Arrow ((>>>))
+import Control.Monad (forM_)
+
+import Hakyll
+
+main :: IO ()
+main = hakyll $ do
+ route "css/*" idRoute
+ compile "css/*" compressCssCompiler
+
+ compile "templates/*" templateCompiler
+
+ forM_ ["about.rst", "index.markdown", "code.lhs"] $ \page -> do
+ route page $ setExtension "html"
+ compile page $ readPageCompiler
+ >>> applyTemplateCompiler "templates/default.html"
+ >>> relativizeUrlsCompiler
+~~~~~
+
+This is enough code to create a small brochure site! You can find all code
+and files necessary to build this site [right here](/examples/brochure.zip)
+-- feel free to play around with it!
+
+To create your site, compile and run your `hakyll.hs`:
+
+ [jasper@phoenix] ghc --make hakyll.hs
+ [jasper@phoenix] ./hakyll preview
+
+Alternatively,
+
+ [jasper@phoenix] runghc hakyll.hs preview
+
+Our code begins with a number of imports. Nothing out of the ordinary here, but
+do note that we use the `OverloadedStrings` extension for conciseness.
+
+~~~~~{.haskell}
+{-# LANGUAGE OverloadedStrings #-}
+import Control.Arrow ((>>>))
+import Control.Monad (forM_)
+
+import Hakyll
+~~~~~
+
+Our entry point is simply a `main` function with the type `IO ()` -- as in every
+other Haskell application. However, we directly wrap it in a `hakyll` function,
+which marks our declarative eDSL. Inside this function, we no longer operate in
+the `IO` monad, we operate in the pure `RulesM` monad.
+
+~~~~~{.haskell}
+main :: IO ()
+main = hakyll $ do
+~~~~~
+
+The `RulesM` monad is composed of a few functions. A first important one is
+`route`: this creates a new rule for routing items. This rule is applied to all
+items it matches -- and matching is done using the `"css/*"` [pattern].
+`idRoute` simply means that an item will be routed to it's own filename. For
+example, `css/screen.css` will be routed to `css/screen.css` -- not very
+exciting.
+
+Note that a [Pattern] matches an [Identifier], it doesn't match filenames.
+
+[Pattern]: /reference/Hakyll-Core-Identifier-Pattern.html
+[Identifier]: /reference/Hakyll-Core-Identifier.html
+
+~~~~~{.haskell}
+route "css/*" idRoute
+~~~~~
+
+Apart from specifying where the items should go (using `route`), we also have to
+specify *how* they need to be compiled. This is done using the `compile`
+function. It takes a `Compiler` as its second argument. These compilers can
+consist of very complicated constructions, but Hakyll also provides a number of
+good default compilers. The `compressCssCompiler` compiler will simply compress
+the CSS found in the files.
+
+~~~~~{.haskell}
+compile "css/*" compressCssCompiler
+~~~~~
+
+Next, we're going to render some pages. We're going to style the results a
+little, so we're going to need a [Template]. We simply compile a template using
+the `defaultTemplateRead` compiler, it's good enough in most cases.
+
+[Template]: /reference/Hakyll-Web-Template.html
+
+We don't use a route for these templates, after all, we don't want to route them
+anywhere, we just want to use them to style our pages a little.
+
+~~~~~{.haskell}
+compile "templates/*" templateCompiler
+~~~~~
+
+We can conclude that some rules do not *directly* add an output page on our
+site. In this case, we compile the template so it is available to the compiler
+later[^1].
+
+[^1]: Actually, since the rules DSL is declarative, we could also add the
+ template compile rule at the bottom -- this would make no difference.
+
+Now, it's time to actually render our pages. We use the `forM_` monad combinator
+so we can describe all files at once (instead of compiling all three files
+manually).
+
+~~~~~{.haskell}
+forM_ ["about.rst", "index.markdown", "code.lhs"] $ \page -> do
+~~~~~
+
+The pages all have different extensions. In our website, we only want to see
+`.html` files. Hakyll provides a route to do just that:
+
+~~~~~{.haskell}
+route page $ setExtension "html"
+~~~~~
+
+The [Rules] reference page has a complete listing of the API used.
+
+[Rules]: /reference/Hakyll-Core-Rules.html
+
+The compilation of our pages is slightly more complicated: we're using another
+DSL there.
+
+### The Compiler DSL
+
+The gist of it is that the `Compiler a b` type has two parameters -- it is an
+Arrow, and we can chain compilers using the `>>>` operator. The [Compiler]
+reference page has some more readable information on this subject.
+
+[Compiler]: /reference/Hakyll-Core-Compiler.html
+
+~~~~~{.haskell}
+compile page $ pageCompiler
+ >>> applyTemplateCompiler "templates/default.html"
+ >>> relativizeUrlsCompiler
+~~~~~
+
+Note that we can only use `applyTemplateCompiler` with
+`"templates/default.html"` because we compiled `"templates/default.html"`. If we
+didn't list a rule for that item, the compilation would fail (Hakyll would not
+find the template).
+
+Now, let's look at the concrete compiler:
+
+- `pageCompiler` starts by reading the [Page], parsing it, and rendering it
+ using [pandoc].
+- `applyTemplateCompiler` applies a [Template] which we have already loaded.
+- `relativizeUrlsCompiler` will [relativize] the URL's so we have a site we can
+ deploy everywhere.
+
+[Page]: /reference/Hakyll-Web-Page.html
+[relativize]: /reference/Hakyll-Web-RelativizeUrls.html
+
+Various tips and tricks
+-----------------------
+
+### Syntax highlighting
+
+Syntax highlighting is enabled by default in Hakyll. However, you also need to
+enable it in pandoc. If no syntax highlighting shows up, try
+
+ [jasper@phoenix] cabal install --reinstall -fhighlighting pandoc
+
+### When to rebuild
+
+If you execute a `./hakyll build`, Hakyll will build your site incrementally.
+This means it will be very fast, but it will not pick up _all_ changes.
+
+- In case you edited `hakyll.hs`, you first want to compile it again.
+- It is generally recommended to do a `./hakyll rebuild` before you deploy your
+ site.
+
+After rebuilding your site, all files will look as "modified" to the filesystem.
+This means that when you upload your site, it will usually transfer all files --
+this can generate more traffic than necessary, since it is possible that some
+files were not actually modified. If you use `rsync`, you can counter this using
+the `--checksum` option.
diff --git a/examples/hakyll/tutorials/part01.markdown b/examples/hakyll/tutorials/part01.markdown
deleted file mode 100644
index f96993a..0000000
--- a/examples/hakyll/tutorials/part01.markdown
+++ /dev/null
@@ -1,235 +0,0 @@
----
-title: Quickstart
-what: explains how to create a simple brochure site
----
-
-## Getting started
-
-First, make sure you have Hakyll installed. The recommended way to do this is
-through [hackage] using [cabal-install]. This tutorial also assumes you have a
-basic knowledge of Haskell.
-
-[hackage]: http://hackage.haskell.org/
-[cabal-install]: http://www.haskell.org/haskellwiki/Cabal-Install
-
-~~~~~
-[jasper@alice ~]$ cabal install hakyll
-~~~~~
-
-## Building a simple static site
-
-As an example to get started with, we're going to develop a so called
-"Brochure Site" for an imaginary company. The first step is to create a
-directory for our new site.
-
-~~~~~
-[jasper@alice Sites]$ mkdir brochure
-[jasper@alice Sites]$ cd brochure/
-[jasper@alice brochure]$
-~~~~~
-
-I have a [zip file] with the files we need for this
-tutorial available. Please unzip it in the brochure directory we just created.
-We'll first have a look at what we're going to create (because we're curious
-and all that).
-
-[zip file]: $root/examples/brochure.zip
-
-~~~~~
-[jasper@alice brochure]$ ghc --make hakyll.hs
-[1 of 1] Compiling Main ( hakyll.hs, hakyll.o )
-Linking hakyll ...
-[jasper@alice brochure]$ ./hakyll preview
-Starting hakyll server on port 8000...
-~~~~~
-
-If you now point your browser at [localhost:8000] you should see our simple
-brochure site.
-
-[localhost:8000]: http://localhost:8000/
-
-## hakyll.hs
-
-The main configuration file of a Hakyll site is traditionally called
-`hakyll.hs`. It is nothing special, just a small Haskell program. There is no
-magic going on.
-
-~~~~~{.haskell}
-import Text.Hakyll (hakyll)
-import Control.Monad.Trans (liftIO)
-main = hakyll "http://example.com" $ do
- liftIO $ putStrLn "I'm in your computer, generating your site!"
-~~~~~
-
-Note how we wrap everything in the `hakyll` function. This is useful because
-it will generate a very nice main function. We also pass the full site URL to
-the `hakyll` function. If you don't have an URL for your site yet, it doesn't
-really matter for now; just fill in anything then. The URL is only used for
-certain specific purposes where a full URL is needed, such as rendering RSS
-feeds.
-
-## Context
-
-Let's look at one of the most important types in Hakyll.
-
-~~~~~{.haskell}
-type Context = Map String String
-~~~~~
-
-A `Context` is a key-value mapping, used to represent pieces of information.
-One way to write such a `Context`, is a page.
-
-## Pages
-
-Another important concept in Hakyll is pages. Pages are text files that can be
-written in markdown, html, rst... basically anything Pandoc supports.
-Furthermore, they can also contain some metadata. The metadata is placed in the
-file header and surrounded by `---` lines. Each line should contain a
-`key: value` pair. Let's have a look at the `index.markdown` page.
-
- ---
- title: About
- ---
- Nullam imperdiet sodales orci vitae molestie.
- Nunc quam orci, pharetra a rhoncus vitae,
- eleifend id felis. Suspendisse potenti...
-
-This contains one `key: value` pair, namely `title: About`. The rest of the
-file is treated as markdown by pandoc. If you want to know more about
-markdown, I think [this](http://daringfireball.net/projects/markdown/syntax)
-is a pretty good page.
-
-## Templates
-
-Another concept are the so-called templates. Templates are text files (usually
-html files) containing a number of keys. The syntax for these keys is
-`$identifier`. Our example site contains one template, namely
-`templates/default.html`. Let's have a better look at that.
-
-~~~~~{.html}
-<html>
- <head>
- <title>MyAweSomeCompany - $title</title>
- <link rel="stylesheet" type="text/css"
- href="$$root/css/default.css" />
- <link rel="stylesheet" type="text/css"
- href="$$root/css/syntax.css" />
- </head>
- <body>
- <h1>MyAweSomeCompany - $title</h1>
- <div id="navigation">
- <a href="$$root/index.html">Home</a>
- <a href="$$root/about.html">About</a>
- <a href="$$root/code.html">Code</a>
- </div>
-
- $body
- </body>
-</html>
-~~~~~
-
-We can see how our `Page` would fit in. When we render the page we saw using
-this template, `$title` would be replaced by `About`, and `$body` would be
-replaced by the body of the about page. `body` is the traditional name for the
-body of any page - that is the convention in Hakyll. Also note that in this
-case, `$body` would be replaced by a chunk of html - the result of the
-markdown-to-html conversion.
-
-## The $$root key
-
-There are a few "special" keys in Hakyll: one of them is the $$root key. What
-is so special about it? Well, internally, it is treated differently - but this
-should not concern you. The thing is that it is the only key you can also use
-in __Pages__.
-
-It will be substituted by a relative url part (like `..` or `../..`) so it
-points to the root directory of your site. It is recommended to use this
-whenever you need it, it can save you some time from messing with absolute
-and relative URL's.
-
-## Putting it all together
-
-Now, we'll render the page using the `renderChain` function. This function
-takes a list of templates and a `Context`. In our case, we only have one
-template, and our `Context` is the about page we just saw - we can load that
-using the `createPage` function.
-
-~~~~~{.haskell}
-import Text.Hakyll (hakyll)
-import Text.Hakyll.Render (renderChain)
-import Text.Hakyll.CreateContext (createPage)
-main = hakyll "http://example.com" $ do
- renderChain ["templates/default.html"]
- (createPage "index.markdown")
-~~~~~
-
-Or, to render all our three pages:
-
-~~~~~{.haskell}
-import Text.Hakyll (hakyll)
-import Text.Hakyll.Render (renderChain)
-import Text.Hakyll.CreateContext (createPage)
-main = hakyll "http://example.com" $ do
- render "about.rst"
- render "index.markdown"
- render "code.lhs"
- where render = renderChain ["templates/default.html"]
- . createPage
-~~~~~
-
-As you can see, we can render a variety of formats. This will create the
-following files:
-
-~~~~~
-_site/about.html
-_site/index.html
-_site/code.html
-~~~~~
-
-## CSS, images and other static files
-
-Now, we also have a css file we would like to have in the `_site` directory.
-Static files can be rendered using the `static` function in Hakyll. We could
-use:
-
-~~~~~{.haskell}
-import Text.Hakyll (hakyll)
-import Text.Hakyll.Render (static)
-main = hakyll "http://example.com" $ do
- static "css/default.css"
-~~~~~
-
-This would work, but let's not forget that Hakyll also has css compression. If
-we want to use that, we would use `css` instead of `static`.
-
-~~~~~{.haskell}
-import Text.Hakyll (hakyll)
-import Text.Hakyll.Render (css)
-main = hakyll "http://example.com" $ do
- css "css/default.css"
-~~~~~
-
-If we were to create another css file, we would have to add a line to our
-`hakyll.hs` configuration file. This is pretty stupid, because the whole
-directory `css` contains only css files. That's why Hakyll has a `directory`
-function, which will execute a given function on an entire directory. So,
-our example would become:
-
-~~~~~{.haskell}
-import Text.Hakyll (hakyll)
-import Text.Hakyll.Render (css)
-import Text.Hakyll.File (directory)
-main = hakyll "http://example.com" $ do
- directory css "css"
-~~~~~
-
-## Deploying
-
-To setup your site, simply copy the contents of `_site` to your hosting provider
-using your favorite piece of software.
-
-## The gist of it
-
-- You render "pages" with "templates".
-- The most common render function is `renderChain`.
-- Hakyll also deals with static files and css.
diff --git a/examples/hakyll/tutorials/part02.markdown b/examples/hakyll/tutorials/part02.markdown
deleted file mode 100644
index 3df3c1f..0000000
--- a/examples/hakyll/tutorials/part02.markdown
+++ /dev/null
@@ -1,91 +0,0 @@
----
-title: Arrows: A Crash Course
-what: illustrates how Arrows are used in hakyll
----
-
-## Do I really need to know this stuff?
-
-Maybe. You don't need it when you only use the basic Hakyll functions, but
-Arrows are used a lot in the Hakyll code, and a lot of the more advanced
-features make use of Arrows. Besides, it's a pretty interesting subject.
-
-## What is an "Arrow"
-
-Arrows are comparable with monads. In fact, monads are a subset of arrows.
-Arrows allow you to represent a "computation". This is all pretty vague, so
-let's skip directly to the Arrows used in Hakyll.
-
-## HakyllAction
-
-The Arrow used throughout Hakyll is called `HakyllAction`. Arrows have two
-type parameters, so it's actually `HakyllAction a b`. You can think of `a`
-as the input for our action, and `b` is the corresponding output. Let's look
-at the type of `createPage`:
-
-~~~~~{.haskell}
-createPage :: FilePath -> HakyllAction () Context
-~~~~~
-
-So, you give `createPage` a `FilePath`, and it creates a `HakyllAction` that
-produces a `Context` out of thin air. Now, we want to render the `Context` we
-just loaded with a template. The type of the `render` function is:
-
-~~~~~{.haskell}
-render :: FilePath -> HakyllAction Context Context
-~~~~~
-
-We pass the file name of a template to the `render` function, and we get a
-`HakyllAction` that creates a `Context` from another `Context`. The result
-of the `render` operation (so basically the rendered template) will be placed
-in the `$body` field of the new `Context`. But we still haven't saved our
-result, so let's do that using the `writePage` function.
-
-~~~~~{.haskell}
-writePage :: HakyllAction Context ()
-~~~~~
-
-This function writes our result and returns nothing.
-
-## Composition
-
-Now, let's look at the big picture.
-
-![Arrow illustration]($root/images/arrow-composition.png)
-
-If these were regular functions, we could've composed them using the `.`
-operator. Since they're arrows, we'll have to use the `>>>` operator.
-
-~~~~~{.haskell}
-test :: HakyllAction () ()
-test = createPage "test.markdown"
- >>> render "template.html"
- >>> writePage
-~~~~~
-
-Now, we only have to execute our test.
-
-~~~~~{.haskell}
-runHakyllActionIfNeeded test
-~~~~~
-
-## Aso, the point emerges
-
-The `runHakyllActionIfNeeded` suggests why we use arrows. `HakyllAction` is more
-than just a function, it also tracks dependencies. This causes Hakyll to only
-execute our functions when it is really needed. In this particular case, `test`
-would only be executed if either `test.markdown` or `template.html` were
-recently changed.
-
-## So what's renderChain then?
-
-Now, we have done pretty much the same as we did with the `renderChain` function
-in the first part. That's right, the `renderChain` is more or less implemented
-like this. So, you will probably use `renderChain` in most cases, but it's
-handy if you know how it works.
-
-## The gist of it
-
-- Arrows really aren't complicated.
-- Compose them using `>>>`.
-- `HakyllAction` tracks dependencies for you. Use it.
-- In most cases, you will just use `renderChain`.
diff --git a/examples/hakyll/tutorials/part03.markdown b/examples/hakyll/tutorials/part03.markdown
deleted file mode 100644
index 0ba633e..0000000
--- a/examples/hakyll/tutorials/part03.markdown
+++ /dev/null
@@ -1,174 +0,0 @@
----
-title: How to write pages
-what: elaborates a little on writing pages
----
-
-## The structure of a Page
-
-The most important thing to realize is that a page is reduced to a `Context`,
-and therefore is just a key-value mapping. Another example:
-
- ---
- title: About
- author: Mia Wallace
- ---
- Hello there! This is
- a simple about page.
-
-This will produce the following mapping:
-
-- `$title`: About
-- `$author`: Mia Wallace
-- `$body`: Hello there! This is a simple about page.
-
-`$body` is the traditional name for the main body part of a page. If the page
-has a `.markdown` extension for example, this would also be rendered by pandoc.
-But pages are more flexible. The following is also a valid page:
-
- Hello there! This is
- a simple about page.
-
-This will produce one key-value pair:
-
-- `$body`: Hello there! This is a simple about page.
-
-But Hakyll can do more than this. You can add extra sections, apart from the
-body, and even leave out the body.
-
- ---
- author: Vincent Vega
-
- --- prelude
- A small introduction goes here. I can write *markdown*
- here, by the way. Well, assuming this page has a
- `.markdown` extension.
-
- --- main
- I can write some more things here.
-
- ---
- The body comes last, and is optional.
-
-This will produce the following:
-
-- `$author`: Vincent Vega
-- `$prelude`: A small introduction goes here. I can write *markdown* here, by the
- way. Well, assuming this page has a `.markdown` extension.
-- `$main`: I can write some more things here.
-- `$body`: The body comes last, and is optional.
-
-The example from this tutorial (we will see later) uses this to build a
-three-column system for the website, separating content from layout.
-
-## Combining Contexts
-
-Now you know that pages, and `Context`s in general, are basically nothing more
-than key-values mappings, it is time to abuse this fact. There is another
-way to create a `Context`, called `combine`.
-
-The type signature of the `combine` function does a pretty good job at
-explaining it:
-
-~~~~~{.haskell}
-combine :: HakyllAction () Context
- -> HakyllAction () Context
- -> HakyllAction () Context
-~~~~~
-
-This means we can take two `Context`s values and combine them. This is
-basically a `Map.union`: The result will contain all keys from both `Context`s,
-with there corresponding values. If a key is present in both `Context`s, the
-value from the first argument will be chosen. This is, for example, almost
-always the case with the `$url` field (since almost all `Context`s have an url
-in Hakyll).
-
-Combining two `Context`s, but overriding the `$url` is quite common, so there is
-another function that helps us here:
-
-~~~~~{.haskell}
-combineWithUrl :: FilePath
- -> HakyllAction () Context
- -> HakyllAction () Context
- -> HakyllAction () Context
-~~~~~
-
-## The example
-
-Now that we have the tools, we'll get on to the example. This time, we'll
-be making a more advanced brochure site. Here [is a zip file] containing the
-source code for the tutorial.
-
-[is a zip file]: $root/examples/morepages.zip
-
-Every page consists of three sections, originally named `section1`, `section2`
-and `section3`. So our pages look more or less like this:
-
- ---
- title: About
-
- --- section1
- ## Mattis
- Nullam imperdiet sodales orci vitae molestie. Nunc...
-
- --- section2
- ## Orci
- Vivamus eget mauris sit amet nulla laoreet lobortis.
- Nulla in...
-
- --- section3
- ## Augue
- In urna ante, pulvinar et imperdiet nec, fermentum ac...
-
-The cool thing is we do not have to specify how these will be layed out. In our
-template, we decide to use a simple three column system:
-
-~~~~~{.html}
-<div class="column"> $section1 </div>
-<div class="column"> $section2 </div>
-<div class="column"> $section3 </div>
-~~~~~
-
-The columns are then floated using css. So far so good, but what if we wanted
-an additional text block on every page? An easy solution would be to add this
-to the template, but then our layout-content separation idea will be broken
-again. So we simply add to the template:
-
-~~~~~{.html}
-<div class="footer"> $footer </div>
-~~~~~
-
-And now we will use `combine` to put the footer on every page - so we need to
-add the footer page to every `Context`. We write a small auxiliary function
-that combines a given `Context` with the footer:
-
-~~~~~{.haskell}
-withFooter = flip combine $ createPage "footer.markdown"
-~~~~~
-
-Note that we use `flip` here - we want `footer.markdown` to be our second
-argument. That is because Hakyll will take the `$url` from the first `Context`,
-so all pages would be rendered to `footer.html` - obviously not what we want.
-Now, were we previously wrote:
-
-~~~~~{.haskell}
-render "about.markdown"
-where render = renderChain ["templates/default.html"]
- . createPage
-~~~~~
-
-We simply have to add our footer:
-
-~~~~~{.haskell}
-render "about.markdown"
-where render = renderChain ["templates/default.html"]
- . withFooter
- . createPage
-~~~~~
-
-And now every page will include the footer.
-
-## The gist of it
-
-- Pages are just key-value mappings.
-- You can have multiple sections in every page.
-- Combine pages using the `combine` function.
diff --git a/examples/hakyll/tutorials/part04.markdown b/examples/hakyll/tutorials/part04.markdown
deleted file mode 100644
index 22fc846..0000000
--- a/examples/hakyll/tutorials/part04.markdown
+++ /dev/null
@@ -1,60 +0,0 @@
----
-title: How to write templates
-what: more information on template writing
----
-
-## Simple templates
-
-Simple templates are simply HTML files, with `$identifiers`. An example:
-
-~~~~~{.html}
-<html>
- <head>
- <title>$title</title>
- </head>
- <body>
- $body
- </body>
-</html>
-~~~~~
-
-## Markup in templates
-
-Most of the examples in these tutorials use HTML for templates. However, since
-Hakyll 2.2, it is possible use other markup languages in your templates. Simply
-use an appropriate extension, and Hakyll will pick it up. For example, you could
-write your `templates/post.markdown` template as:
-
- # $title
-
- _On $date_
-
- $body
-
-__Warning__: you shouldn't use markdown for your "root" template, as these
-templates will never insert things like the doctype for you -- so you always
-need at least one top-level HTML template.
-
-## Hamlet templates
-
-From Hakyll 2.3 onwards, it is possible to use [hamlet] templates. You can find
-more information about hamlet on that website. Usage is fairly simple -- since
-pages are strictly key-value mappings, only `$variable$` control is supported in
-hamlet templates. As an example, here is the template that can be used for the
-brochure site, but in hamlet:
-
- !!!
- %html
- %head
- %title MyAweSomeCompany - $$title$
- %body
- %h1 MyAweSomeCompany - $$title$
- #navigation
- %a!href="$$root$/index.html" Home
- %a!href="$$root$/about.html" About
- %a!href="$$root$/code.html" Code
- $body$
-
-Hakyll will recognise hamlet templates automatically by the `.hamlet` extension.
-
-[hamlet]: http://docs.yesodweb.com/hamlet/
diff --git a/examples/hakyll/tutorials/part05.markdown b/examples/hakyll/tutorials/part05.markdown
deleted file mode 100644
index be71714..0000000
--- a/examples/hakyll/tutorials/part05.markdown
+++ /dev/null
@@ -1,157 +0,0 @@
----
-title: Creating a Blog
-what: creates a simple blog
----
-
-## Creating a simple blog with Hakyll
-
-After we created a simple brochure site, we're going to try something more
-advanced: we are going to create a simple blog system.
-
-A [zip file] containing the source for this tutorial is also available.
-
-[zip file]: $root/examples/simpleblog.zip
-
-Blogs, as you probably know, are composed of posts. In Hakyll, we're going
-to use simple pages for posts. All posts are located in the `posts`
-directory. But we're not going to use the `directory` command here - you will
-see why later. First, some trivial things like css.
-
-~~~~~{.haskell}
-main = hakyll "http://example.com" $ do
- directory css "css"
-~~~~~
-
-## Finding the posts
-
-`Text.Hakyll.File` contains a handy function `getRecursiveContents`, which will
-provide us with all the blog posts. The blog posts have a
-`yyyy-mm-dd-title.extension` naming scheme. This is just a simple trick so we
-can sort them easily (sorting on filename implies sorting on date). You could of
-course name them whatever you want, but it's always a good idea to stick to the
-conventions. They contain some metadata, too:
-
- title: A first post
- author: Julius Caesar
- date: November 5, 2009
- ---
- Lorem ipsum dolor sit amet, consectetur adipiscing elit.
- Vivamus pretium leo adipiscing lectus iaculis lobortis.
- Vivamus scelerisque velit dignissim metus...
-
-Now, we find the posts and sort them reversed, so the most recent post will
-become the first item in the list:
-
-~~~~~{.haskell}
-postPaths <- liftM (reverse . sort) $ getRecursiveContents "posts"
-~~~~~
-
-Our `postPaths` value is now of the type `[FilePath]`. We want to be able to
-render all posts, so we pass them to the `createPage` function.
-
-~~~~~{.haskell}
-let postPages = map createPage postPaths
-~~~~~
-
-We have two templates we want to render our posts with: first we would like to
-render them using `templates/post.html`, and we want to render the result
-using `templates/default.html`. This can be done with the `renderChain`
-function:
-
-~~~~~{.haskell}
-mapM_ (renderChain [ "templates/post.html"
- , "templates/default.html"
- ]) postPages
-~~~~~
-
-Remember that the `renderChain` works by rendering the item using the first
-template, creating a new page with the render result in the `$body` field, and
-so on until it has been rendered with all templates.
-
-Now, we have the posts rendered. What is left is to generate some kind of index
-page with links to those posts. We want one general list showing all posts, and
-we want to show a few recent posts on the index page.
-
-## Creating listings.
-
-`createPage` is the easiest way of reading a `Context`. But in this case, we
-want something more custom, so we'll use the `createCustomPage` function. This
-allows us to create a more specific `Context`.
-
-~~~~~{.haskell}
-createCustomPage :: FilePath
- -> [(String, Either String (HakyllAction () String))]
- -> HakyllAction () Context
-~~~~~
-
-The first argument is the `url` of the page to generate. For our index page,
-this will be, `index.html`. The second argument is obviously our `key: value`
-mapping. But why the `Either`? This, once again, is about dependency handling.
-The idea is that you can choose which type to use for the value:
-
-- `String`: Simply a `String`.
-- `HakyllAction () String`: Here, you can give an `HakyllAction` Arrow action
- that can produce a String. However - this action _will not be executed_ when
- the file in `_site` is up-to-date.
-
-However, in this specific case - a list of posts - there is an easier, and more
-high-level approach than `createCustomPage`[^1]. Let's look at the type
-signature of `createListing`:
-
-~~~~~{.haskell}
-createListing :: FilePath
- -> [FilePath]
- -> [HakyllAction () Context]
- -> [(String, Either String (HakyllAction () String))]
- -> HakyllAction () Context
-~~~~~
-
-[^1]: Since Hakyll-1.3 onwards.
-
-The first argument is the destination url. For our blog, this is of course
-`index.html`. The second argument is a list templates to render _each_ `Context`
-with. We use only `templates/postitem.html` here. This is, as you can see, a
-simple template:
-
-~~~~~{.html}
-<li>
- <a href="$$root/$url">$title</a>
- - <em>$date</em> - by <em>$author</em>
-</li>
-~~~~~
-
-We then give a list of `Context`s to render. For our index, these are the 3 last
-posts. The last argument of the `createListing` functions lets you specify
-additional key-value pairs, like in `createCustomPage`. We use this to set the
-title of our page. So, we create our index page using:
-
-~~~~~{.haskell}
-let index = createListing "index.html"
- ["templates/postitem.html"]
- (take 3 postPages)
- [("title", Left "Home")]
-~~~~~
-
-The result of this will be a `HakyllAction () Context`. This `Context`'s `$body`
-will contain a concatenation of all the 3 posts, rendered with the
-`templates/postitem.html` template.
-
-Now, we only have to render it: first using the `index.html` template - which
-adds some more information to our index - then using the
-`templates/default.html` template.
-
-~~~~~{.haskell}
-renderChain ["index.html", "templates/default.html"] index
-~~~~~
-
-Note that the `index.html` in the `renderChain` list is also a template. Now,
-you might want to take your time to read the `index.html` template and the other
-files in the zip so you understand what is going on here.
-
-## The gist of it
-
-- You can find blogposts using `getRecursiveContents`.
-- The convention is to call them `yyyy-mm-dd-rest-of-title.extension`. This
- allows us to sort them easily.
-- You can use `createCustomPage` or `createListing` to create custom pages and
- simple listings.
diff --git a/examples/hakyll/tutorials/part06.markdown b/examples/hakyll/tutorials/part06.markdown
deleted file mode 100644
index d64dc55..0000000
--- a/examples/hakyll/tutorials/part06.markdown
+++ /dev/null
@@ -1,84 +0,0 @@
----
-title: Creating feeds
-what: adds an rss feed to the simple blog
----
-
-## Adding Feeds
-
-In this tutorial, we're going to add an RSS feed to the blog we wrote in the
-previous part. Here is a [zip file] containing the source.
-
-[zip file]: $root/examples/feedblog.zip
-
-You will be glad to hear that Hakyll has native support for RSS as well as Atom
-feeds[^1]. This simplifies our object a lot.
-
-[^1]: Since Hakyll-2.0
-
-This is the first time that the absolute URL of your site you have to give to
-the `hakyll` function actually matters. That's because the specifications of
-those feed formats dictate you need the full URL of them.
-
-## Creating a configuration
-
-The first thing to do is creating a configuration for your feed. You could
-place this code outside of your main function, as is done in the example.
-
-~~~~~{.haskell}
-myFeedConfiguration = FeedConfiguration
- { feedUrl = "rss.xml"
- , feedTitle = "SimpleBlog RSS feed."
- , feedDescription = "Simple demo of a feed created with Hakyll."
- , feedAuthorName = "Jasper Van der Jeugt"
- }
-~~~~~
-
-Note that we enter the url of the feed in our configuration. So the function
-to render our feed only takes two arguments, the configuration and a list of
-items to put in the feed. Let's put the three most recent posts in our feed.
-
-~~~~~{.haskell}
-renderRss myFeedConfiguration (take 3 postPages)
-~~~~~
-
-## But it's not that easy
-
-If you look at our generated RSS feed (build the site), you will see
-`$description` tags appearing in our final render. We don't want that! How
-did they get there in the first place?
-
-To render feeds, Hakyll expects a number of fields in the renderables you put
-in the feed. They are:
-
-- `$title`: Title of the item. This is set in our posts, since we use a `title`
- metadata field.
-- `$url`: Url of the item. This is automatically set by Hakyll, so you shouldn't
- worry about it.
-- `$description`: A description of our item to appear in the feed reader.
-
-The latter is obviously the problem: we don't have a description in our posts.
-In fact, we would like to copy the `$body` key to the `$description` key, so
-people can read the full post in their feed readers.
-
-## Where arrows come in
-
-The `Text.Hakyll.ContextManipulations` module contains a number of simple
-functions that create Arrows for us. One of these functions is `copyValue`,
-which takes a source and a destination key. So, we need to pass our
-items through this Arrow first.
-
-~~~~~{.haskell}
-renderRss myFeedConfiguration $
- map (>>> copyValue "body" "description") (take 3 postPages)
-~~~~~
-
-And that's that, now our feed gets rendered properly. Exercise for the reader
-is to add a Atom feed[^2].
-
-[^2]: Hint: look around in the [reference]($root/reference.html).
-
-## The gist of it
-
-- Hakyll has native support for RSS and Atom feeds.
-- The items must contain `$title` and `$description` fields.
-- Arrows can be used to copy values in a `Context`.
diff --git a/examples/hakyll/tutorials/part07.markdown b/examples/hakyll/tutorials/part07.markdown
deleted file mode 100644
index d017a1e..0000000
--- a/examples/hakyll/tutorials/part07.markdown
+++ /dev/null
@@ -1,221 +0,0 @@
----
-title: Tags and manipulations
-what: enhances our blog with tags and explains context manipulations.
----
-
-## Context manipulations
-
-Here, have [a zip file]($root/examples/tagblog.zip) for this tutorial.
-
-You probably remember that `Context` objects are just key-value mappings. We can
-render those with templates, and then the `$key`'s in the template get
-substituted by the appropriate values. This is a rather flexible system, but
-there are limitations. Some of these limitations can be solved using
-_context manipulations_.
-
-Like rendering actions, _context manipulations_ are also simply
-`HakyllAction Context Context` arrows. The `Text.Hakyll.ContextManipulations`
-contains some functions to easily construct easy variants.
-
-One of the most general functions is the `renderValue` function. Let's have a
-look at it's type.
-
-~~~~~{.haskell}
-renderValue :: String
- -> String
- -> (String -> String)
- -> HakyllAction Context Context
-~~~~~
-
-This is the preferred way of creating context manipulations. The first argument
-is the `key` to manipulate. The second argument is the `key` where the new value
-should be placed. If this is the same as the first argument, it will be
-replaced. The third argument is the function to manipulate the `value` with.
-
-As a simple example, let's write a function that puts the `$title` in uppercase.
-
-~~~~~{.haskell}
-import Data.Char (toUpper)
-
-titleUpper :: HakyllAction Context Context
-titleUpper = renderValue "title" "title" $ map toUpper
-~~~~~
-
-Because the destination `key` is the same as the source `key`, we can also use
-the `changeValue` function here.
-
-~~~~~{.haskell}
-titleUpper = changeValue "title" $ map toUpper
-~~~~~
-
-For further reading, refer to the `Text.Hakyll.ContextManipulations`
-documentation.
-
-## Applying Context Manipulations
-
-Because we're dealing with Arrows again, we can use `>>>` to apply our
-manipulations. For example, we could use or title manipulation like this:
-
-~~~~~{.haskell}
-renderChain ["templates/default.html"]
- (createPage "index.markdown" >>> titleUpper)
-~~~~~
-
-## Rendering dates
-
-As you remember, in our previous blog, all posts had a file name like
-`posts/yyyy-mm-dd-title.extension`, as is the Hakyll convention. But they also
-had a metadata field `date`, containing a human-readable date. This is not very
-D.R.Y., of course! Hakyll has a specialized `renderValue` function to deal with
-dates encoded in paths: `renderDate`.
-
-~~~~~{.haskell}
-postManipulation :: HakyllAction Context Context
-postManipulation = renderDate "date" "%B %e, %Y" "Unknown date"
-~~~~~
-
-That manipulation will:
-- Read the date from the file name the post was loaded from.
-- Parse the date and render it in a `%B %e, %Y` format. This is a
- `Month day, Year` format.
-- Put the result in the `date` metadata field.
-- If the date could not be parsed, it will put `"Unknown date"` in the `date`
- metadata field.
-
-So, we can throw away our `date: ` lines from our posts, and still use `$date`
-in our templates.
-
-## Abstracting the post list
-
-Now, we're going to render tags. This is also done using context manipulations.
-Hakyll has a specialized module to deal with tags, provided by
-`Text.Hakyll.Tags`. This module assumes tags are comma separated, and placed in
-the `tags` metadata field.
-
- ---
- title: A third post
- author: Publius Ovidius Naso
- tags: epic fail, ovidius
- ---
- Pellentesque tempor blandit elit, vel...
-
-But first things first. We need to render a post list for every tag. We already
-had some code to render a list of all posts. We're just going to abstract this
-code into a more general function:
-
-~~~~{.haskell}
-renderPostList url title posts = do
- let list = createListingWith url ["templates/postitem.html"]
- posts [("title", Left title)]
- renderChain ["posts.html", "templates/default.html"] list
-~~~~~
-
-Our "render all posts" action can now be written as:
-
-~~~~~{.haskell}
-renderPostList "posts.html" "All posts" renderablePosts
-~~~~~
-
-## Tag links
-
-We want to display the tags for our post under the title. But if we use the
-`$tags` key in a template, we will just have the plain tags - they will not be
-clickable. We can again solve this with a `ContextManipulation`. We have a
-function that produces an url for a given tag:
-
-~~~~~{.haskell}
-tagToUrl tag = "$root/tags/" ++ removeSpaces tag ++ ".html"
-~~~~~
-
-`removeSpaces` is an auxiliary function from `Text.Hakyll.File`. Now, there is
-a specialized `renderValue` function for creating linked tags called
-`renderTagLinks`. This function simply takes a function that produces an url
-for a given tag - the function we just wrote. Let's extend our
-`postManipulation`.
-
-~~~~~{.haskell}
-postManipulation :: HakyllAction Context Context
-postManipulation = renderDate "date" "%B %e, %Y" "Unknown date"
- >>> renderTagLinks tagToUrl
-~~~~~
-
-We apply this manipulation when we load the tags.
-
-~~~~~{.haskell}
-let renderablePosts =
- map ((>>> postManipulation) . createPage) postPaths
-~~~~~
-
-So, the `renderTagLinks` function replaces the `$tags` value from
-`epic fail, random` to `<a href="$root/tags/epic-fail.html">epic fail</a>, ...`.
-If we click a tag, we get a `404`. That's because we haven't generated the
-post lists for every tag.
-
-## The Tag Map
-
-Hakyll provides a function called `readTagMap`. Let's inspect it's type.
-
-~~~~~{.haskell}
-type TagMap = Map String [HakyllAction () Context]
-readTagMap String [FilePath] -> HakyllAction () TagMap
-~~~~~
-
-You give it a list of paths, and it creates a map that, for every tag, holds
-a number of posts. We can easily use this to render a post list for every tag.
-The first argument given is an "identifier", unique to this tag map. Hakyll
-needs this so it can cache the tags.
-
-~~~~~{.haskell}
-let tagMap = readTagMap "postTags" postPaths
-~~~~~
-
-When we have the `TagMap`, we can need to render a post list for every tag.
-There is a function in Hakyll designed more or less for this purpose:
-`withTagMap`. This takes a `TagMap` and an action to execute for every tag and
-it's associated posts. We pass a small function to it we create ourselves[^1]:
-
-[^1]: Exercise for the reader: why do we use `>>> postManipulation` again here?
-
-~~~~~{.haskell}
-let renderListForTag tag posts =
- renderPostList (tagToUrl tag)
- ("Posts tagged " ++ tag)
- (map (>>> postManipulation) posts)
-withTagMap tagMap renderPostList
-~~~~~
-
-There we go. We now have clickable tags, and a post list for every tag.
-
-## A Tag Cloud
-
-A tag cloud is a commonly found thing on blogs. Hakyll also provides code to
-generate a tag cloud. Let's have a look at the `renderTagCloud` function.
-
-~~~~~{.haskell}
-renderTagCloud :: (String -> String)
- -> Float
- -> Float
- -> HakyllAction TagMap String
-~~~~~
-
-The first argument is, once again, a function to create an url for a given tag.
-Then, we give a minimum and a maximum font size in percent, and we get a tag
-cloud Arrow back. We can add this to our index:
-
-~~~~~{.haskell}
-let tagCloud = tagMap >>> renderTagCloud tagToUrl 100 200
- index = createListing "index.html"
- ["templates/postitem.html"]
- (take 3 renderablePosts)
- [ ("title", Left "Home")
- , ("tagcloud", Right tagCloud)
- ]
-renderChain ["index.html", "templates/default.html"] index
-~~~~~
-
-## The gist of it
-
-- There's some handy, simple functions in `Text.Hakyll.ContextManipulations`.
-- Seperate tags by commas and put them in the `$tags` field.
-- Use `withTagMap` to render a list for every tag.
-- Hakyll can also create tag clouds.
diff --git a/examples/hakyll/tutorials/part08.markdown b/examples/hakyll/tutorials/part08.markdown
deleted file mode 100644
index ec27153..0000000
--- a/examples/hakyll/tutorials/part08.markdown
+++ /dev/null
@@ -1,97 +0,0 @@
----
-title: Interlude
-what: gives some various tips and tricks about Hakyll (quite handy, read this!)
----
-
-## Syntax-highlighting
-
-Pandoc (which Hakyll uses as a backend) offers powerful syntax highlighting.
-To enable this, Pandoc needs to be compiled with highlighting support. If this
-is not the case, you can fix this using:
-
-~~~~~
-[jasper@alice ~]$ cabal install --reinstall -fhighlighting pandoc
-~~~~~
-
-## Auto-compilation
-
-Hakyll features a simple _auto-compilation_ mode. This is invoked by running
-
-~~~~~
-[jasper@alice ~]$ ./hakyll preview
-Starting hakyll server on port 8000...
-~~~~~
-
-Now, Hakyll will recompile your site when you refresh in your browser. This will
-not update your site automatically when `hakyll.hs` changes. So if you make any
-changes to the configuration file, you'll have to compile it again, and then you
-can enter `preview` mode again.
-
-If you use a custom `HakyllConfiguration`, you can select your custom
-`PreviewMode`:
-
-- `BuildOnRequest`: rebuild site when the preview server receives a request
- (default).
-- `BuildOnInterval`: build when you change files.
-
-## When to rebuild
-
-If you execute a `./hakyll build`, Hakyll will build your site incrementally.
-This means it will be very fast, but it will not pick up _all_ changes.
-
-- In case you edited `hakyll.hs`, you first want to compile it again.
-- It is generally recommended to do a `./hakyll rebuild` before you deploy your
- site.
-
-After rebuilding your site, all files will look as "modified" to the filesystem.
-This means that when you upload your site, it will usually transfer all files --
-this can generate more traffic than necessary, since it is possible that some
-files were not actually modified. If you use `rsync`, you can counter this using
-the `--checksum` option.
-
-## Pretty URL's
-
-There is an option in Hakyll to produce pretty URL's, which is disabled by
-default because it can be confusing when you're first introduced to Hakyll.
-
-It can be enabled this way:
-
-~~~~~{.haskell}
-import Text.Hakyll
-import Text.Hakyll.HakyllMonad
-
-myConfig :: HakyllConfiguration
-myConfig = (defaultHakyllConfiguration "http://jaspervdj.be")
- { enableIndexUrl = True
- }
-
-main = hakyllWithConfiguration myConfig $ do
- -- Further code here
-~~~~~
-
-The effect will be that the internal `toUrl` function will behave differently.
-A few examples:
-
-- `about.html` will be rendered to `about/index.html`.
-- `posts/2010-02-16-a-post.markdown` will be rendered to
- `posts/2010-02-16-a-post/index.html`.
-- However, `index.markdown` will still be rendered to `index.html`. Likewise,
- `posts/index.html` would be rendered to `posts.index.html`.
-
-The benefit of this is simply prettier URL's. That is, if you consider
-`example.com/about` prettier than `example.com/about.html`.
-
-## Default values
-
-At some point, you might want to use a number of global key-value pairs, for
-example, `$author`. There are two possible ways to achieve this.
-
-- There is an option in `HakyllConfiguration` supporting this, called
- `additionalContext`. For an example on how to use `HakyllConfiguration`, see
- the pretty URL's section above.
-
-- Another option is to use a `defaults.markdown` file, simply containing some
- metadata, and then `combine` this file with other pages. The advantage is
- that autocompilation mode will pick up changes in this file[^1].
-
-[^1]: Original idea by zenzike.
diff --git a/examples/hakyll/tutorials/part09.markdown b/examples/hakyll/tutorials/part09.markdown
deleted file mode 100644
index 4cc1d43..0000000
--- a/examples/hakyll/tutorials/part09.markdown
+++ /dev/null
@@ -1,104 +0,0 @@
----
-title: CategoryBlog
-what: explains how to use categories instead of tags
----
-
-## Categories
-
-Most people familiar with "tags" will also know the concept "categories".
-
-![Tags illustration]($root/images/tutorial8-tags.png)
-
-In fact, tags are harder to implement because they have to be represented as a
-many-to-many relation, and categories are a simple 1-to-many relation.
-
-![Tags illustration]($root/images/tutorial8-categories.png)
-
-This is also the reason you can "simulate" categories using tags. In this
-tutorial we will adapt the blog to use categories instead of tags. Here is
-[a zip file]($root/examples/categoryblog.zip) containing the files used in this
-tutorial.
-
-## About category support
-
-Categories are simpler, but they are usually used in custom ways. That's why
-Hakyll provides less "standard" functions to deal with them. But this gives us
-another chance to learn some of the things we can do with Hakyll.
-
-## Reading Categories
-
-Tags are located in the `tags` metadata field. Since one post can only belong
-in one category, a different approach was chosen here. The category of a post
-is determined by the subfolder it is in. Here you see the directory layout for
-our posts using categories:
-
- posts
- |-- coding
- | |-- 2009-11-05-a-first-post.markdown
- | |-- 2009-11-28-a-third-post.markdown
- | `-- 2009-12-04-this-blog-aint-dead.markdown
- `-- random
- |-- 2009-11-10-another-post.markdown
- `-- 2009-12-23-almost-christmas.markdown
-
-Because we find all our posts in different subdirectories, sorting them is a
-little harder: we still want them sorted by date, so it boils down to sorting
-them by "base name". I hope it does not surprise you Hakyll provides a function
-for that:
-
-~~~~~{.haskell}
-postPaths <- liftM (reverse . sortByBaseName)
- (getRecursiveContents "posts")
-~~~~~
-
-We reverse them again, because we want the most recent posts first. Now, we can
-use the `readCategoryMap` function instead of `readTagMap`, which has the same
-signature, but assigns categories based on the folders the posts are in.
-
-~~~~~{.haskell}
-categoryMap <- readCategoryMap "categoryMap" renderablePosts
-~~~~~
-
-The rest of the `hakyll.hs` is very similar to the one in the previous
-tutorial, except we want to render a category list instead of a tag cloud.
-
-## Rendering a category list
-
-Because rendering a category list is quite easy, and it would be hard to
-write a "general" function for this, hakyll does not provide such a function --
-but it is not hard to write. First, we write an auxiliary function that produces
-a list item for one category:
-
-~~~~~{.haskell}
-categoryListItem category posts =
- "<li>" ++ link category (categoryToUrl category)
- ++ " - " ++ show (length posts) ++ " items.</li>"
-~~~~~
-
-This is nothing more that some basic string concatenation to create a `li` HTML
-element. The function that applies this on every element in the `TagMap` is more
-interesting:
-
-~~~~~{.haskell}
-categoryList :: HakyllAction TagMap String
-categoryList = arr $ uncurry categoryListItem <=< toList
-~~~~~
-
-This function might seem a little harder to understand if you are not familiar
-with the `<=<` operator -- but it's just right-to-left monad composition in the
-list monad. `uncurry categoryListItem <=< toList` is a pure function we want to
-execute on the `TagMap`. But this is not possible in Hakyll[^1]. We need to make
-an arrow of this function. The `arr` function solves this problem easily.
-
-[^1]: This is a feature, not a bug. It helps dependency handling.
-
-We then add this to our index page, and we are done. Feel free to hack around
-with the source code. If you still have questions, feel free to ask them at the
-[google discussion group](http://groups.google.com/group/hakyll).
-
-## The gist of it
-
-- Hakyll supports categories as well as tags.
-- Tags are actually a generalization of categories.
-- Use `readCategoryMap` to read categories.
-- You need to write some custom functions to render category lists etc.
diff --git a/hakyll.cabal b/hakyll.cabal
index cedd451..b4a533f 100644
--- a/hakyll.cabal
+++ b/hakyll.cabal
@@ -1,5 +1,5 @@
Name: hakyll
-Version: 2.4.3
+Version: 3.0.0.3
Synopsis: A simple static site generator library.
Description: A simple static site generator library, mainly aimed at
@@ -17,6 +17,8 @@ Data-Files: templates/atom.xml
templates/atom-item.xml
templates/rss.xml
templates/rss-item.xml
+extra-source-files: src-inotify/Hakyll/Web/Preview/Poll.hs
+ src-interval/Hakyll/Web/Preview/Poll.hs
build-type: Simple
@@ -24,45 +26,85 @@ source-repository head
type: git
location: git://github.com/jaspervdj/Hakyll.git
+flag inotify
+ description: Use the inotify bindings for the preview server. Better, but
+ only works on Linux.
+ default: False
+
library
- ghc-options: -Wall
- hs-source-dirs: src
- build-depends: base >= 4 && < 5,
- filepath == 1.*,
- directory == 1.*,
- containers == 0.*,
- pandoc == 1.*,
- regex-base >= 0.93,
- regex-tdfa >= 1.1,
- network == 2.*,
- mtl >= 1,
- old-locale == 1.*,
- old-time == 1.*,
- time >= 1.1,
- binary >= 0.5,
- hamlet >= 0.4.2,
- blaze-html >= 0.4
- exposed-modules: Network.Hakyll.SimpleServer
- Text.Hakyll
- Text.Hakyll.Context
- Text.Hakyll.ContextManipulations
- Text.Hakyll.CreateContext
- Text.Hakyll.File
- Text.Hakyll.HakyllMonad
- Text.Hakyll.Regex
- Text.Hakyll.Render
- Text.Hakyll.HakyllAction
- Text.Hakyll.Paginate
- Text.Hakyll.Page
- Text.Hakyll.Pandoc
- Text.Hakyll.Util
- Text.Hakyll.Tags
- Text.Hakyll.Feed
- Text.Hakyll.Configurations.Static
- other-modules: Paths_hakyll
- Text.Hakyll.Internal.Cache
- Text.Hakyll.Internal.CompressCss
- Text.Hakyll.Internal.FileType
- Text.Hakyll.Internal.Template
- Text.Hakyll.Internal.Template.Template
- Text.Hakyll.Internal.Template.Hamlet
+ ghc-options: -Wall
+ hs-source-dirs: src
+
+ if flag(inotify)
+ hs-source-dirs: src-inotify
+ build-depends: hinotify >= 0.3
+ else
+ hs-source-dirs: src-interval
+
+ build-depends: base >= 4 && < 5,
+ filepath == 1.*,
+ directory == 1.*,
+ containers == 0.*,
+ pandoc == 1.*,
+ regex-base >= 0.93,
+ regex-pcre >= 0.93,
+ mtl >= 1,
+ old-locale == 1.*,
+ old-time == 1.*,
+ time >= 1.1,
+ binary >= 0.5,
+ hamlet >= 0.7,
+ blaze-html >= 0.4,
+ snap-server >= 0.4,
+ snap-core >= 0.4,
+ bytestring >= 0.9,
+ utf8-string >= 0.3,
+ tagsoup >= 0.12,
+ hopenssl >= 1.4,
+ unix >= 2.4,
+ strict-concurrency >= 0.2
+ exposed-modules: Hakyll
+ Hakyll.Main
+ Hakyll.Web.Util.Url
+ Hakyll.Web.Preview.Server
+ Hakyll.Web.Preview.Poll
+ Hakyll.Web.CompressCss
+ Hakyll.Web.Template
+ Hakyll.Web.Feed
+ Hakyll.Web.Tags
+ Hakyll.Web.Pandoc
+ Hakyll.Web.FileType
+ Hakyll.Web.Page
+ Hakyll.Web.Template.Read
+ Hakyll.Web.RelativizeUrls
+ Hakyll.Web.Page.Read
+ Hakyll.Web.Page.Metadata
+ Hakyll.Core.ResourceProvider.FileResourceProvider
+ Hakyll.Core.Configuration
+ Hakyll.Core.Identifier.Pattern
+ Hakyll.Core.UnixFilter
+ Hakyll.Core.Util.Arrow
+ Hakyll.Core.Util.File
+ Hakyll.Core.Util.String
+ Hakyll.Core.ResourceProvider
+ Hakyll.Core.CompiledItem
+ Hakyll.Core.Compiler
+ Hakyll.Core.CopyFile
+ Hakyll.Core.Run
+ Hakyll.Core.Store
+ Hakyll.Core.Writable
+ Hakyll.Core.Identifier
+ Hakyll.Core.DirectedGraph.Dot
+ Hakyll.Core.DirectedGraph.DependencySolver
+ Hakyll.Core.DirectedGraph
+ Hakyll.Core.Rules
+ Hakyll.Core.Routes
+ Hakyll.Core.Logger
+ other-modules: Paths_hakyll
+ Hakyll.Web.Template.Read.Hakyll
+ Hakyll.Web.Template.Read.Hamlet
+ Hakyll.Web.Template.Internal
+ Hakyll.Web.Page.Internal
+ Hakyll.Core.Compiler.Internal
+ Hakyll.Core.DirectedGraph.Internal
+ Hakyll.Core.Rules.Internal
diff --git a/src-inotify/Hakyll/Web/Preview/Poll.hs b/src-inotify/Hakyll/Web/Preview/Poll.hs
new file mode 100644
index 0000000..686f045
--- /dev/null
+++ b/src-inotify/Hakyll/Web/Preview/Poll.hs
@@ -0,0 +1,51 @@
+-- | Filesystem polling with an inotify backend. Works only on linux.
+--
+module Hakyll.Web.Preview.Poll
+ ( previewPoll
+ ) where
+
+import Control.Monad (forM_, when)
+import Data.Set (Set)
+import qualified Data.Set as S
+import System.FilePath (takeDirectory, (</>))
+import Data.List (isPrefixOf)
+
+import System.INotify
+
+import Hakyll.Core.Configuration
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+
+-- | Calls the given callback when the directory tree changes
+--
+previewPoll :: HakyllConfiguration -- ^ Configuration
+ -> Set Resource -- ^ Resources to watch
+ -> IO () -- ^ Action called when something changes
+ -> IO () -- ^ Can block forever
+previewPoll _ resources callback = do
+ -- Initialize inotify
+ inotify <- initINotify
+
+ let -- A set of file paths
+ paths = S.map (toFilePath . unResource) resources
+
+ -- A list of directories. Run it through a set so we have every
+ -- directory only once.
+ directories = S.toList $ S.map (notEmpty . takeDirectory) paths
+
+ -- Problem: we can't add a watcher for "". So we make sure a directory
+ -- name is not empty
+ notEmpty "" = "."
+ notEmpty x = x
+
+ -- Execute the callback when path is known
+ ifResource path =
+ let path' = if "./" `isPrefixOf` path then drop 2 path else path
+ in when (path' `S.member` paths) callback
+
+ -- Add a watcher for every directory
+ forM_ directories $ \directory -> do
+ _ <- addWatch inotify [Modify] directory $ \e -> case e of
+ (Modified _ (Just p)) -> ifResource $ directory </> p
+ _ -> return ()
+ return ()
diff --git a/src-interval/Hakyll/Web/Preview/Poll.hs b/src-interval/Hakyll/Web/Preview/Poll.hs
new file mode 100644
index 0000000..ec6df0c
--- /dev/null
+++ b/src-interval/Hakyll/Web/Preview/Poll.hs
@@ -0,0 +1,36 @@
+-- | Interval-based implementation of preview polling, for the platforms which
+-- are not supported by inotify.
+--
+module Hakyll.Web.Preview.Poll
+ ( previewPoll
+ ) where
+
+import Control.Applicative ((<$>))
+import Control.Concurrent (threadDelay)
+import Control.Monad (when)
+import System.Time (getClockTime)
+import Data.Set (Set)
+import qualified Data.Set as S
+import System.Directory (getModificationTime)
+
+import Hakyll.Core.Configuration
+import Hakyll.Core.Identifier
+import Hakyll.Core.ResourceProvider
+
+-- | A preview thread that periodically recompiles the site.
+--
+previewPoll :: HakyllConfiguration -- ^ Configuration
+ -> Set Resource -- ^ Resources to watch
+ -> IO () -- ^ Action called when something changes
+ -> IO () -- ^ Can block forever
+previewPoll _ resources callback = do
+ let files = map (toFilePath . unResource) $ S.toList resources
+ time <- getClockTime
+ loop files time
+ where
+ delay = 1000000
+ loop files time = do
+ threadDelay delay
+ modified <- any (time <) <$> mapM getModificationTime files
+ when modified callback
+ loop files =<< getClockTime
diff --git a/src/Hakyll.hs b/src/Hakyll.hs
new file mode 100644
index 0000000..0261044
--- /dev/null
+++ b/src/Hakyll.hs
@@ -0,0 +1,55 @@
+-- | Top-level module exporting all modules that are interesting for the user
+--
+module Hakyll
+ ( module Hakyll.Core.Compiler
+ , module Hakyll.Core.CopyFile
+ , module Hakyll.Core.Configuration
+ , module Hakyll.Core.Identifier
+ , module Hakyll.Core.Identifier.Pattern
+ , module Hakyll.Core.ResourceProvider
+ , module Hakyll.Core.Routes
+ , module Hakyll.Core.Rules
+ , module Hakyll.Core.UnixFilter
+ , module Hakyll.Core.Util.Arrow
+ , module Hakyll.Core.Util.File
+ , module Hakyll.Core.Util.String
+ , module Hakyll.Core.Writable
+ , module Hakyll.Main
+ , module Hakyll.Web.CompressCss
+ , module Hakyll.Web.Feed
+ , module Hakyll.Web.FileType
+ , module Hakyll.Web.Page
+ , module Hakyll.Web.Page.Metadata
+ , module Hakyll.Web.Page.Read
+ , module Hakyll.Web.Pandoc
+ , module Hakyll.Web.RelativizeUrls
+ , module Hakyll.Web.Tags
+ , module Hakyll.Web.Template
+ , module Hakyll.Web.Util.Url
+ ) where
+
+import Hakyll.Core.Compiler
+import Hakyll.Core.CopyFile
+import Hakyll.Core.Configuration
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Routes
+import Hakyll.Core.Rules
+import Hakyll.Core.UnixFilter
+import Hakyll.Core.Util.Arrow
+import Hakyll.Core.Util.File
+import Hakyll.Core.Util.String
+import Hakyll.Core.Writable
+import Hakyll.Main
+import Hakyll.Web.CompressCss
+import Hakyll.Web.Feed
+import Hakyll.Web.FileType
+import Hakyll.Web.Page
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Page.Read
+import Hakyll.Web.Pandoc
+import Hakyll.Web.RelativizeUrls
+import Hakyll.Web.Tags
+import Hakyll.Web.Template
+import Hakyll.Web.Util.Url
diff --git a/src/Hakyll/Core/CompiledItem.hs b/src/Hakyll/Core/CompiledItem.hs
new file mode 100644
index 0000000..5dd0efc
--- /dev/null
+++ b/src/Hakyll/Core/CompiledItem.hs
@@ -0,0 +1,45 @@
+-- | A module containing a box datatype representing a compiled item. This
+-- item can be of any type, given that a few restrictions hold:
+--
+-- * we need a 'Typeable' instance to perform type-safe casts;
+--
+-- * we need a 'Binary' instance so we can serialize these items to the cache;
+--
+-- * we need a 'Writable' instance so the results can be saved.
+--
+{-# LANGUAGE ExistentialQuantification #-}
+module Hakyll.Core.CompiledItem
+ ( CompiledItem (..)
+ , compiledItem
+ , unCompiledItem
+ ) where
+
+import Data.Binary (Binary)
+import Data.Typeable (Typeable, cast)
+import Data.Maybe (fromMaybe)
+
+import Hakyll.Core.Writable
+
+-- | Box type for a compiled item
+--
+data CompiledItem = forall a. (Binary a, Typeable a, Writable a)
+ => CompiledItem a
+
+instance Writable CompiledItem where
+ write p (CompiledItem x) = write p x
+
+-- | Box a value into a 'CompiledItem'
+--
+compiledItem :: (Binary a, Typeable a, Writable a)
+ => a
+ -> CompiledItem
+compiledItem = CompiledItem
+
+-- | Unbox a value from a 'CompiledItem'
+--
+unCompiledItem :: (Binary a, Typeable a, Writable a)
+ => CompiledItem
+ -> a
+unCompiledItem (CompiledItem x) = fromMaybe error' $ cast x
+ where
+ error' = error "Hakyll.Core.CompiledItem.unCompiledItem: Unsupported type"
diff --git a/src/Hakyll/Core/Compiler.hs b/src/Hakyll/Core/Compiler.hs
new file mode 100644
index 0000000..e5da9b8
--- /dev/null
+++ b/src/Hakyll/Core/Compiler.hs
@@ -0,0 +1,333 @@
+-- | A Compiler manages targets and dependencies between targets
+--
+-- The most distinguishing property of a 'Compiler' is that it is an Arrow. A
+-- compiler of the type @Compiler a b@ is simply a compilation phase which takes
+-- an @a@ as input, and produces a @b@ as output.
+--
+-- Compilers are chained using the '>>>' arrow operation. If we have a compiler
+--
+-- > getResourceString :: Compiler Resource String
+--
+-- which reads the resource, and a compiler
+--
+-- > readPage :: Compiler String (Page String)
+--
+-- we can chain these two compilers to get a
+--
+-- > (getResourceString >>> readPage) :: Compiler Resource (Page String)
+--
+-- Most compilers can be created by combining smaller compilers using '>>>'.
+--
+-- More advanced constructions are also possible using arrow, and sometimes
+-- these are needed. For a good introduction to arrow, you can refer to
+--
+-- <http://en.wikibooks.org/wiki/Haskell/Understanding_arrows>
+--
+-- A construction worth writing a few paragraphs about here are the 'require'
+-- functions. Different variants of this function are exported here, but they
+-- all serve more or less the same goal.
+--
+-- When you use only '>>>' to chain your compilers, you get a linear pipeline --
+-- it is not possible to add extra items from other compilers along the way.
+-- This is where the 'require' functions come in.
+--
+-- This function allows you to reference other items, which are then added to
+-- the pipeline. Let's look at this crappy ASCII illustration which represents
+-- a pretty common scenario:
+--
+-- > read resource >>> pandoc render >>> layout >>> relativize URL's
+-- >
+-- > @templates/fancy.html@
+--
+-- We want to construct a pipeline of compilers to go from our resource to a
+-- proper webpage. However, the @layout@ compiler takes more than just the
+-- rendered page as input: it needs the @templates/fancy.html@ template as well.
+--
+-- This is an example of where we need the @require@ function. We can solve
+-- this using a construction that looks like:
+--
+-- > ... >>> pandoc render >>> require >>> layout >>> ...
+-- > |
+-- > @templates/fancy.html@ ------/
+--
+-- This illustration can help us understand the type signature of 'require'.
+--
+-- > require :: (Binary a, Typeable a, Writable a)
+-- > => Identifier
+-- > -> (b -> a -> c)
+-- > -> Compiler b c
+--
+-- Let's look at it in detail:
+--
+-- > (Binary a, Typeable a, Writable a)
+--
+-- These are constraints for the @a@ type. @a@ (the template) needs to have
+-- certain properties for it to be required.
+--
+-- > Identifier
+--
+-- This is simply @templates/fancy.html@: the 'Identifier' of the item we want
+-- to 'require', in other words, the name of the item we want to add to the
+-- pipeline somehow.
+--
+-- > (b -> a -> c)
+--
+-- This is a function given by the user, specifying /how/ the two items shall be
+-- merged. @b@ is the output of the previous compiler, and @a@ is the item we
+-- just required -- the template. This means @c@ will be the final output of the
+-- 'require' combinator.
+--
+-- > Compiler b c
+--
+-- Indeed, we have now constructed a compiler which takes a @b@ and produces a
+-- @c@. This means that we have a linear pipeline again, thanks to the 'require'
+-- function. So, the 'require' function actually helps to reduce to complexity
+-- of Hakyll applications!
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Compiler
+ ( Compiler
+ , runCompiler
+ , getIdentifier
+ , getRoute
+ , getRouteFor
+ , getResourceString
+ , fromDependency
+ , require_
+ , require
+ , requireA
+ , requireAll_
+ , requireAll
+ , requireAllA
+ , cached
+ , unsafeCompiler
+ , traceShowCompiler
+ , mapCompiler
+ , timedCompiler
+ , byExtension
+ ) where
+
+import Prelude hiding ((.), id)
+import Control.Arrow ((>>>), (&&&), arr)
+import Control.Applicative ((<$>))
+import Control.Monad.Reader (ask)
+import Control.Monad.Trans (liftIO)
+import Control.Category (Category, (.), id)
+import Data.Maybe (fromMaybe)
+import System.FilePath (takeExtension)
+
+import Data.Binary (Binary)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Store
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.Logger
+
+-- | Run a compiler, yielding the resulting target and it's dependencies. This
+-- version of 'runCompilerJob' also stores the result
+--
+runCompiler :: Compiler () CompileRule -- ^ Compiler to run
+ -> Identifier -- ^ Target identifier
+ -> ResourceProvider -- ^ Resource provider
+ -> Routes -- ^ Route
+ -> Store -- ^ Store
+ -> Bool -- ^ Was the resource modified?
+ -> Logger -- ^ Logger
+ -> IO CompileRule -- ^ Resulting item
+runCompiler compiler identifier provider routes store modified logger = do
+ -- Run the compiler job
+ result <-
+ runCompilerJob compiler identifier provider routes store modified logger
+
+ -- Inspect the result
+ case result of
+ -- In case we compiled an item, we will store a copy in the cache first,
+ -- before we return control. This makes sure the compiled item can later
+ -- be accessed by e.g. require.
+ CompileRule (CompiledItem x) ->
+ storeSet store "Hakyll.Core.Compiler.runCompiler" identifier x
+
+ -- Otherwise, we do nothing here
+ _ -> return ()
+
+ return result
+
+-- | Get the identifier of the item that is currently being compiled
+--
+getIdentifier :: Compiler a Identifier
+getIdentifier = fromJob $ const $ CompilerM $ compilerIdentifier <$> ask
+
+-- | Get the route we are using for this item
+--
+getRoute :: Compiler a (Maybe FilePath)
+getRoute = getIdentifier >>> getRouteFor
+
+-- | Get the route for a specified item
+--
+getRouteFor :: Compiler Identifier (Maybe FilePath)
+getRouteFor = fromJob $ \identifier -> CompilerM $ do
+ routes <- compilerRoutes <$> ask
+ return $ runRoutes routes identifier
+
+-- | Get the resource we are compiling as a string
+--
+getResourceString :: Compiler Resource String
+getResourceString = fromJob $ \resource -> CompilerM $ do
+ provider <- compilerResourceProvider <$> ask
+ liftIO $ resourceString provider resource
+
+-- | Auxiliary: get a dependency
+--
+getDependency :: (Binary a, Writable a, Typeable a)
+ => Identifier -> CompilerM a
+getDependency identifier = CompilerM $ do
+ store <- compilerStore <$> ask
+ fmap (fromMaybe error') $ liftIO $
+ storeGet store "Hakyll.Core.Compiler.runCompiler" identifier
+ where
+ error' = error $ "Hakyll.Core.Compiler.getDependency: "
+ ++ show identifier
+ ++ " not found in the cache, the cache might be corrupted or"
+ ++ " the item you are referring to might not exist"
+
+
+-- | Variant of 'require' which drops the current value
+--
+require_ :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> Compiler b a
+require_ identifier =
+ fromDependency identifier >>> fromJob (const $ getDependency identifier)
+
+-- | Require another target. Using this function ensures automatic handling of
+-- dependencies
+--
+require :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> (b -> a -> c)
+ -> Compiler b c
+require identifier = requireA identifier . arr . uncurry
+
+-- | Arrow-based variant of 'require'
+--
+requireA :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -> Compiler (b, a) c
+ -> Compiler b c
+requireA identifier = (id &&& require_ identifier >>>)
+
+-- | Variant of 'requireAll' which drops the current value
+--
+requireAll_ :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> Compiler b [a]
+requireAll_ pattern = fromDependencies (const getDeps) >>> fromJob requireAll_'
+ where
+ getDeps = matches pattern . map unResource . resourceList
+ requireAll_' = const $ CompilerM $ do
+ deps <- getDeps . compilerResourceProvider <$> ask
+ mapM (unCompilerM . getDependency) deps
+
+-- | Require a number of targets. Using this function ensures automatic handling
+-- of dependencies
+--
+requireAll :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> (b -> [a] -> c)
+ -> Compiler b c
+requireAll pattern = requireAllA pattern . arr . uncurry
+
+-- | Arrow-based variant of 'requireAll'
+--
+requireAllA :: (Binary a, Typeable a, Writable a)
+ => Pattern
+ -> Compiler (b, [a]) c
+ -> Compiler b c
+requireAllA pattern = (id &&& requireAll_ pattern >>>)
+
+cached :: (Binary a, Typeable a, Writable a)
+ => String
+ -> Compiler Resource a
+ -> Compiler Resource a
+cached name (Compiler d j) = Compiler d $ const $ CompilerM $ do
+ logger <- compilerLogger <$> ask
+ identifier <- compilerIdentifier <$> ask
+ store <- compilerStore <$> ask
+ modified <- compilerResourceModified <$> ask
+ report logger $ "Checking cache: " ++ if modified then "modified" else "OK"
+ if modified
+ then do v <- unCompilerM $ j $ Resource identifier
+ liftIO $ storeSet store name identifier v
+ return v
+ else do v <- liftIO $ storeGet store name identifier
+ case v of Just v' -> return v'
+ Nothing -> error'
+ where
+ error' = error "Hakyll.Core.Compiler.cached: Cache corrupt!"
+
+-- | Create an unsafe compiler from a function in IO
+--
+unsafeCompiler :: (a -> IO b) -- ^ Function to lift
+ -> Compiler a b -- ^ Resulting compiler
+unsafeCompiler f = fromJob $ CompilerM . liftIO . f
+
+-- | Compiler for debugging purposes
+--
+traceShowCompiler :: Show a => Compiler a a
+traceShowCompiler = fromJob $ \x -> CompilerM $ do
+ logger <- compilerLogger <$> ask
+ report logger $ show x
+ return x
+
+-- | Map over a compiler
+--
+mapCompiler :: Compiler a b
+ -> Compiler [a] [b]
+mapCompiler (Compiler d j) = Compiler d $ mapM j
+
+-- | Log and time a compiler
+--
+timedCompiler :: String -- ^ Message
+ -> Compiler a b -- ^ Compiler to time
+ -> Compiler a b -- ^ Resulting compiler
+timedCompiler msg (Compiler d j) = Compiler d $ \x -> CompilerM $ do
+ logger <- compilerLogger <$> ask
+ timed logger msg $ unCompilerM $ j x
+
+-- | Choose a compiler by extension
+--
+-- Example:
+--
+-- > route "css/*" $ setExtension "css"
+-- > compile "css/*" $ byExtension (error "Not a (S)CSS file")
+-- > [ (".css", compressCssCompiler)
+-- > , (".scss", sass)
+-- > ]
+--
+-- This piece of code will select the @compressCssCompiler@ for @.css@ files,
+-- and the @sass@ compiler (defined elsewhere) for @.scss@ files.
+--
+byExtension :: Compiler a b -- ^ Default compiler
+ -> [(String, Compiler a b)] -- ^ Choices
+ -> Compiler a b -- ^ Resulting compiler
+byExtension defaultCompiler choices = Compiler deps job
+ where
+ -- Lookup the compiler, give an error when it is not found
+ lookup' identifier =
+ let extension = takeExtension $ toFilePath identifier
+ in fromMaybe defaultCompiler $ lookup extension choices
+ -- Collect the dependencies of the choice
+ deps = do
+ identifier <- dependencyIdentifier <$> ask
+ compilerDependencies $ lookup' identifier
+ -- Collect the job of the choice
+ job x = CompilerM $ do
+ identifier <- compilerIdentifier <$> ask
+ unCompilerM $ compilerJob (lookup' identifier) x
diff --git a/src/Hakyll/Core/Compiler/Internal.hs b/src/Hakyll/Core/Compiler/Internal.hs
new file mode 100644
index 0000000..53df044
--- /dev/null
+++ b/src/Hakyll/Core/Compiler/Internal.hs
@@ -0,0 +1,146 @@
+-- | Internally used compiler module
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Compiler.Internal
+ ( Dependencies
+ , DependencyEnvironment (..)
+ , CompilerEnvironment (..)
+ , CompilerM (..)
+ , Compiler (..)
+ , runCompilerJob
+ , runCompilerDependencies
+ , fromJob
+ , fromDependencies
+ , fromDependency
+ ) where
+
+import Prelude hiding ((.), id)
+import Control.Applicative (Applicative, pure, (<*>), (<$>))
+import Control.Monad.Reader (ReaderT, Reader, ask, runReaderT, runReader)
+import Control.Monad ((<=<), liftM2)
+import Data.Set (Set)
+import qualified Data.Set as S
+import Control.Category (Category, (.), id)
+import Control.Arrow (Arrow, ArrowChoice, arr, first, left)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Store
+import Hakyll.Core.Routes
+import Hakyll.Core.Logger
+
+-- | A set of dependencies
+--
+type Dependencies = Set Identifier
+
+-- | Environment in which the dependency analyzer runs
+--
+data DependencyEnvironment = DependencyEnvironment
+ { -- | Target identifier
+ dependencyIdentifier :: Identifier
+ , -- | Resource provider
+ dependencyResourceProvider :: ResourceProvider
+ }
+
+-- | Environment in which a compiler runs
+--
+data CompilerEnvironment = CompilerEnvironment
+ { -- | Target identifier
+ compilerIdentifier :: Identifier
+ , -- | Resource provider
+ compilerResourceProvider :: ResourceProvider
+ , -- | Site routes
+ compilerRoutes :: Routes
+ , -- | Compiler store
+ compilerStore :: Store
+ , -- | Flag indicating if the underlying resource was modified
+ compilerResourceModified :: Bool
+ , -- | Logger
+ compilerLogger :: Logger
+ }
+
+-- | The compiler monad
+--
+newtype CompilerM a = CompilerM
+ { unCompilerM :: ReaderT CompilerEnvironment IO a
+ } deriving (Monad, Functor, Applicative)
+
+-- | The compiler arrow
+--
+data Compiler a b = Compiler
+ { compilerDependencies :: Reader DependencyEnvironment Dependencies
+ , compilerJob :: a -> CompilerM b
+ }
+
+instance Functor (Compiler a) where
+ fmap f ~(Compiler d j) = Compiler d $ fmap f . j
+
+instance Applicative (Compiler a) where
+ pure = Compiler (return S.empty) . const . return
+ ~(Compiler d1 f) <*> ~(Compiler d2 j) =
+ Compiler (liftM2 S.union d1 d2) $ \x -> f x <*> j x
+
+instance Category Compiler where
+ id = Compiler (return S.empty) return
+ ~(Compiler d1 j1) . ~(Compiler d2 j2) =
+ Compiler (liftM2 S.union d1 d2) (j1 <=< j2)
+
+instance Arrow Compiler where
+ arr f = Compiler (return S.empty) (return . f)
+ first ~(Compiler d j) = Compiler d $ \(x, y) -> do
+ x' <- j x
+ return (x', y)
+
+instance ArrowChoice Compiler where
+ left ~(Compiler d j) = Compiler d $ \e -> case e of
+ Left l -> Left <$> j l
+ Right r -> Right <$> return r
+
+-- | Run a compiler, yielding the resulting target and it's dependencies
+--
+runCompilerJob :: Compiler () a -- ^ Compiler to run
+ -> Identifier -- ^ Target identifier
+ -> ResourceProvider -- ^ Resource provider
+ -> Routes -- ^ Route
+ -> Store -- ^ Store
+ -> Bool -- ^ Was the resource modified?
+ -> Logger -- ^ Logger
+ -> IO a
+runCompilerJob compiler identifier provider route store modified logger =
+ runReaderT (unCompilerM $ compilerJob compiler ()) env
+ where
+ env = CompilerEnvironment
+ { compilerIdentifier = identifier
+ , compilerResourceProvider = provider
+ , compilerRoutes = route
+ , compilerStore = store
+ , compilerResourceModified = modified
+ , compilerLogger = logger
+ }
+
+runCompilerDependencies :: Compiler () a
+ -> Identifier
+ -> ResourceProvider
+ -> Dependencies
+runCompilerDependencies compiler identifier provider =
+ runReader (compilerDependencies compiler) env
+ where
+ env = DependencyEnvironment
+ { dependencyIdentifier = identifier
+ , dependencyResourceProvider = provider
+ }
+
+fromJob :: (a -> CompilerM b)
+ -> Compiler a b
+fromJob = Compiler (return S.empty)
+
+fromDependencies :: (Identifier -> ResourceProvider -> [Identifier])
+ -> Compiler b b
+fromDependencies collectDeps = flip Compiler return $ do
+ DependencyEnvironment identifier provider <- ask
+ return $ S.fromList $ collectDeps identifier provider
+
+-- | Wait until another compiler has finished before running this compiler
+--
+fromDependency :: Identifier -> Compiler a a
+fromDependency = fromDependencies . const . const . return
diff --git a/src/Hakyll/Core/Configuration.hs b/src/Hakyll/Core/Configuration.hs
new file mode 100644
index 0000000..242b68f
--- /dev/null
+++ b/src/Hakyll/Core/Configuration.hs
@@ -0,0 +1,44 @@
+-- | Exports a datastructure for the top-level hakyll configuration
+--
+module Hakyll.Core.Configuration
+ ( HakyllConfiguration (..)
+ , defaultHakyllConfiguration
+ ) where
+
+import System.FilePath (takeFileName)
+import Data.List (isPrefixOf, isSuffixOf)
+
+data HakyllConfiguration = HakyllConfiguration
+ { -- | Directory in which the output written
+ destinationDirectory :: FilePath
+ , -- | Directory where hakyll's internal store is kept
+ storeDirectory :: FilePath
+ , -- | Function to determine ignored files
+ --
+ -- In 'defaultHakyllConfiguration', the following files are ignored:
+ --
+ -- * files starting with a @.@
+ --
+ -- * files ending with a @~@
+ --
+ -- * files ending with @.swp@
+ --
+ ignoreFile :: FilePath -> Bool
+ }
+
+-- | Default configuration for a hakyll application
+--
+defaultHakyllConfiguration :: HakyllConfiguration
+defaultHakyllConfiguration = HakyllConfiguration
+ { destinationDirectory = "_site"
+ , storeDirectory = "_cache"
+ , ignoreFile = ignoreFile'
+ }
+ where
+ ignoreFile' path
+ | "." `isPrefixOf` fileName = True
+ | "~" `isSuffixOf` fileName = True
+ | ".swp" `isSuffixOf` fileName = True
+ | otherwise = False
+ where
+ fileName = takeFileName path
diff --git a/src/Hakyll/Core/CopyFile.hs b/src/Hakyll/Core/CopyFile.hs
new file mode 100644
index 0000000..dbbaaa1
--- /dev/null
+++ b/src/Hakyll/Core/CopyFile.hs
@@ -0,0 +1,29 @@
+-- | Exports simple compilers to just copy files
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
+module Hakyll.Core.CopyFile
+ ( CopyFile (..)
+ , copyFileCompiler
+ ) where
+
+import Control.Arrow ((>>^))
+import System.Directory (copyFile)
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Writable
+import Hakyll.Core.Compiler
+import Hakyll.Core.Identifier
+
+-- | Newtype construct around 'FilePath' which will copy the file directly
+--
+newtype CopyFile = CopyFile {unCopyFile :: FilePath}
+ deriving (Show, Eq, Ord, Binary, Typeable)
+
+instance Writable CopyFile where
+ write dst (CopyFile src) = copyFile src dst
+
+copyFileCompiler :: Compiler Resource CopyFile
+copyFileCompiler = getIdentifier >>^ CopyFile . toFilePath
diff --git a/src/Hakyll/Core/DirectedGraph.hs b/src/Hakyll/Core/DirectedGraph.hs
new file mode 100644
index 0000000..76a030b
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph.hs
@@ -0,0 +1,85 @@
+-- | Representation of a directed graph. In Hakyll, this is used for dependency
+-- tracking.
+--
+module Hakyll.Core.DirectedGraph
+ ( DirectedGraph
+ , fromList
+ , member
+ , nodes
+ , neighbours
+ , reverse
+ , reachableNodes
+ , sanitize
+ ) where
+
+import Prelude hiding (reverse)
+import Data.Monoid (mconcat)
+import Data.Set (Set)
+import Data.Maybe (fromMaybe)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+import Hakyll.Core.DirectedGraph.Internal
+
+-- | Construction of directed graphs
+--
+fromList :: Ord a
+ => [(a, Set a)] -- ^ List of (node, reachable neighbours)
+ -> DirectedGraph a -- ^ Resulting directed graph
+fromList = DirectedGraph . M.fromList . map (\(t, d) -> (t, Node t d))
+
+-- | Check if a node lies in the given graph
+--
+member :: Ord a
+ => a -- ^ Node to check for
+ -> DirectedGraph a -- ^ Directed graph to check in
+ -> Bool -- ^ If the node lies in the graph
+member n = M.member n . unDirectedGraph
+
+-- | Get all nodes in the graph
+--
+nodes :: Ord a
+ => DirectedGraph a -- ^ Graph to get the nodes from
+ -> Set a -- ^ All nodes in the graph
+nodes = M.keysSet . unDirectedGraph
+
+-- | Get a set of reachable neighbours from a directed graph
+--
+neighbours :: Ord a
+ => a -- ^ Node to get the neighbours of
+ -> DirectedGraph a -- ^ Graph to search in
+ -> Set a -- ^ Set containing the neighbours
+neighbours x = fromMaybe S.empty . fmap nodeNeighbours
+ . M.lookup x . unDirectedGraph
+
+-- | Reverse a directed graph (i.e. flip all edges)
+--
+reverse :: Ord a
+ => DirectedGraph a
+ -> DirectedGraph a
+reverse = mconcat . map reverse' . M.toList . unDirectedGraph
+ where
+ reverse' (id', Node _ neighbours') = fromList $
+ zip (S.toList neighbours') $ repeat $ S.singleton id'
+
+-- | Find all reachable nodes from a given set of nodes in the directed graph
+--
+reachableNodes :: Ord a => Set a -> DirectedGraph a -> Set a
+reachableNodes set graph = reachable (setNeighbours set) set
+ where
+ reachable next visited
+ | S.null next = visited
+ | otherwise = reachable (sanitize' neighbours') (next `S.union` visited)
+ where
+ sanitize' = S.filter (`S.notMember` visited)
+ neighbours' = setNeighbours (sanitize' next)
+
+ setNeighbours = S.unions . map (`neighbours` graph) . S.toList
+
+-- | Remove all dangling pointers, i.e. references to notes that do
+-- not actually exist in the graph.
+--
+sanitize :: Ord a => DirectedGraph a -> DirectedGraph a
+sanitize (DirectedGraph graph) = DirectedGraph $ M.map sanitize' graph
+ where
+ sanitize' (Node t n) = Node t $ S.filter (`M.member` graph) n
diff --git a/src/Hakyll/Core/DirectedGraph/DependencySolver.hs b/src/Hakyll/Core/DirectedGraph/DependencySolver.hs
new file mode 100644
index 0000000..54826ff
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/DependencySolver.hs
@@ -0,0 +1,70 @@
+-- | Given a dependency graph, this module provides a function that will
+-- generate an order in which the graph can be visited, so that all the
+-- dependencies of a given node have been visited before the node itself is
+-- visited.
+--
+module Hakyll.Core.DirectedGraph.DependencySolver
+ ( solveDependencies
+ ) where
+
+import Prelude
+import qualified Prelude as P
+import Data.Set (Set)
+import Data.Maybe (mapMaybe)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+import Hakyll.Core.DirectedGraph
+import Hakyll.Core.DirectedGraph.Internal
+
+-- | Solve a dependency graph. This function returns an order to run the
+-- different nodes
+--
+solveDependencies :: Ord a
+ => DirectedGraph a -- ^ Graph
+ -> [a] -- ^ Resulting plan
+solveDependencies = P.reverse . order [] [] S.empty
+
+-- | Produce a reversed order using a stack
+--
+order :: Ord a
+ => [a] -- ^ Temporary result
+ -> [Node a] -- ^ Backtrace stack
+ -> Set a -- ^ Items in the stack
+ -> DirectedGraph a -- ^ Graph
+ -> [a] -- ^ Ordered result
+order temp stack set graph@(DirectedGraph graph')
+ -- Empty graph - return our current result
+ | M.null graph' = temp
+ | otherwise = case stack of
+
+ -- Empty stack - pick a node, and add it to the stack
+ [] ->
+ let (tag, node) = M.findMin graph'
+ in order temp (node : stack) (S.insert tag set) graph
+
+ -- At least one item on the stack - continue using this item
+ (node : stackTail) ->
+ -- Check which dependencies are still in the graph
+ let tag = nodeTag node
+ deps = S.toList $ nodeNeighbours node
+ unsatisfied = mapMaybe (`M.lookup` graph') deps
+ in case unsatisfied of
+
+ -- All dependencies for node are satisfied, we can return it and
+ -- remove it from the graph
+ [] -> order (tag : temp) stackTail (S.delete tag set)
+ (DirectedGraph $ M.delete tag graph')
+
+ -- There is at least one dependency left. We need to solve that
+ -- one first...
+ (dep : _) -> if nodeTag dep `S.member` set
+ -- The dependency is already in our stack - cycle detected!
+ then cycleError
+ -- Continue with the dependency
+ else order temp (dep : node : stackTail)
+ (S.insert (nodeTag dep) set)
+ graph
+ where
+ cycleError = error $ "Hakyll.Core.DirectedGraph.DependencySolver.order: "
+ ++ "Cycle detected!" -- TODO: Dump cycle
diff --git a/src/Hakyll/Core/DirectedGraph/Dot.hs b/src/Hakyll/Core/DirectedGraph/Dot.hs
new file mode 100644
index 0000000..8289992
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/Dot.hs
@@ -0,0 +1,30 @@
+-- | Dump a directed graph in dot format. Used for debugging purposes
+--
+module Hakyll.Core.DirectedGraph.Dot
+ ( toDot
+ , writeDot
+ ) where
+
+import Hakyll.Core.DirectedGraph
+import qualified Data.Set as S
+
+-- | Convert a directed graph into dot format for debugging purposes
+--
+toDot :: Ord a
+ => (a -> String) -- ^ Convert nodes to dot names
+ -> DirectedGraph a -- ^ Graph to dump
+ -> String -- ^ Resulting string
+toDot showTag graph = unlines $ concat
+ [ return "digraph dependencies {"
+ , concatMap showNode (S.toList $ nodes graph)
+ , return "}"
+ ]
+ where
+ showNode node = map (showEdge node) $ S.toList $ neighbours node graph
+ showEdge x y = " \"" ++ showTag x ++ "\" -> \"" ++ showTag y ++ "\";"
+
+-- | Write out the @.dot@ file to a given file path. See 'toDot' for more
+-- information.
+--
+writeDot :: Ord a => FilePath -> (a -> String) -> DirectedGraph a -> IO ()
+writeDot path showTag = writeFile path . toDot showTag
diff --git a/src/Hakyll/Core/DirectedGraph/Internal.hs b/src/Hakyll/Core/DirectedGraph/Internal.hs
new file mode 100644
index 0000000..5b02ad6
--- /dev/null
+++ b/src/Hakyll/Core/DirectedGraph/Internal.hs
@@ -0,0 +1,43 @@
+-- | Internal structure of the DirectedGraph type. Not exported outside of the
+-- library.
+--
+module Hakyll.Core.DirectedGraph.Internal
+ ( Node (..)
+ , DirectedGraph (..)
+ ) where
+
+import Prelude hiding (reverse, filter)
+import Data.Monoid (Monoid, mempty, mappend)
+import Data.Set (Set)
+import Data.Map (Map)
+import qualified Data.Map as M
+import qualified Data.Set as S
+
+-- | A node in the directed graph
+--
+data Node a = Node
+ { nodeTag :: a -- ^ Tag identifying the node
+ , nodeNeighbours :: Set a -- ^ Edges starting at this node
+ } deriving (Show)
+
+-- | Append two nodes. Useful for joining graphs.
+--
+appendNodes :: Ord a => Node a -> Node a -> Node a
+appendNodes (Node t1 n1) (Node t2 n2)
+ | t1 /= t2 = error'
+ | otherwise = Node t1 (n1 `S.union` n2)
+ where
+ error' = error $ "Hakyll.Core.DirectedGraph.Internal.appendNodes: "
+ ++ "Appending differently tagged nodes"
+
+-- | Type used to represent a directed graph
+--
+newtype DirectedGraph a = DirectedGraph {unDirectedGraph :: Map a (Node a)}
+ deriving (Show)
+
+-- | Allow users to concatenate different graphs
+--
+instance Ord a => Monoid (DirectedGraph a) where
+ mempty = DirectedGraph M.empty
+ mappend (DirectedGraph m1) (DirectedGraph m2) = DirectedGraph $
+ M.unionWith appendNodes m1 m2
diff --git a/src/Hakyll/Core/Identifier.hs b/src/Hakyll/Core/Identifier.hs
new file mode 100644
index 0000000..16403e6
--- /dev/null
+++ b/src/Hakyll/Core/Identifier.hs
@@ -0,0 +1,59 @@
+-- | An identifier is a type used to uniquely identify a resource, target...
+--
+-- One can think of an identifier as something similar to a file path. An
+-- identifier is a path as well, with the different elements in the path
+-- separated by @/@ characters. Examples of identifiers are:
+--
+-- * @posts/foo.markdown@
+--
+-- * @index@
+--
+-- * @error/404@
+--
+-- The most important difference between an 'Identifier' and a file path is that
+-- the identifier for an item is not necesserily the file path.
+--
+-- For example, we could have an @index@ identifier, generated by Hakyll. The
+-- actual file path would be @index.html@, but we identify it using @index@.
+--
+-- @posts/foo.markdown@ could be an identifier of an item that is rendered to
+-- @posts/foo.html@. In this case, the identifier is the name of the source
+-- file of the page.
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Identifier
+ ( Identifier (..)
+ , parseIdentifier
+ , toFilePath
+ ) where
+
+import Control.Arrow (second)
+import Data.Monoid (Monoid)
+
+import GHC.Exts (IsString, fromString)
+import System.FilePath (joinPath)
+
+-- | An identifier used to uniquely identify a value
+--
+newtype Identifier = Identifier {unIdentifier :: [String]}
+ deriving (Eq, Ord, Monoid)
+
+instance Show Identifier where
+ show = toFilePath
+
+instance IsString Identifier where
+ fromString = parseIdentifier
+
+-- | Parse an identifier from a string
+--
+parseIdentifier :: String -> Identifier
+parseIdentifier = Identifier . filter (not . null) . split'
+ where
+ split' [] = [[]]
+ split' str = let (pre, post) = second (drop 1) $ break (== '/') str
+ in pre : split' post
+
+-- | Convert an identifier to a relative 'FilePath'
+--
+toFilePath :: Identifier -> FilePath
+toFilePath = joinPath . unIdentifier
diff --git a/src/Hakyll/Core/Identifier/Pattern.hs b/src/Hakyll/Core/Identifier/Pattern.hs
new file mode 100644
index 0000000..7c88356
--- /dev/null
+++ b/src/Hakyll/Core/Identifier/Pattern.hs
@@ -0,0 +1,160 @@
+-- | Module providing pattern matching and capturing on 'Identifier's.
+--
+-- A very simple pattern could be, for example, @foo\/bar@. This pattern will
+-- only match the exact @foo\/bar@ identifier.
+--
+-- To match more than one identifier, there are different captures that one can
+-- use:
+--
+-- * @*@: matches exactly one element of an identifier;
+--
+-- * @**@: matches one or more elements of an identifier.
+--
+-- Some examples:
+--
+-- * @foo\/*@ will match @foo\/bar@ and @foo\/foo@, but not @foo\/bar\/qux@ nor
+-- @foo@;
+--
+-- * @**@ will match any non-empty identifier;
+--
+-- * @foo\/**@ will match @foo\/bar@ and @foo\/bar\/qux@, but not @bar\/foo@ nor
+-- @foo@;
+--
+-- A small warning: patterns are not globs. Using @foo\/*.markdown@ will not do
+-- what you probably intended, as it will only match the file which is literally
+-- called @foo\/*.markdown@. Remember that these captures only work on elements
+-- of identifiers as a whole; not on parts of these elements.
+--
+-- Furthermore, the 'match' function allows the user to get access to the
+-- elements captured by the capture elements in the pattern.
+--
+module Hakyll.Core.Identifier.Pattern
+ ( Pattern
+ , parsePattern
+ , match
+ , doesMatch
+ , matches
+ , fromCapture
+ , fromCaptureString
+ , fromCaptures
+ ) where
+
+import Data.List (intercalate)
+import Control.Monad (msum)
+import Data.Maybe (isJust)
+import Data.Monoid (mempty, mappend)
+
+import GHC.Exts (IsString, fromString)
+
+import Hakyll.Core.Identifier
+
+-- | One base element of a pattern
+--
+data PatternComponent = CaptureOne
+ | CaptureMany
+ | Literal String
+ deriving (Eq)
+
+instance Show PatternComponent where
+ show CaptureOne = "*"
+ show CaptureMany = "**"
+ show (Literal s) = s
+
+-- | Type that allows matching on identifiers
+--
+newtype Pattern = Pattern {unPattern :: [PatternComponent]}
+ deriving (Eq)
+
+instance Show Pattern where
+ show = intercalate "/" . map show . unPattern
+
+instance IsString Pattern where
+ fromString = parsePattern
+
+-- | Parse a pattern from a string
+--
+parsePattern :: String -> Pattern
+parsePattern = Pattern . map toPattern . unIdentifier . parseIdentifier
+ where
+ toPattern x | x == "*" = CaptureOne
+ | x == "**" = CaptureMany
+ | otherwise = Literal x
+
+-- | Match an identifier against a pattern, generating a list of captures
+--
+match :: Pattern -> Identifier -> Maybe [Identifier]
+match (Pattern p) (Identifier i) = fmap (map Identifier) $ match' p i
+
+-- | Check if an identifier matches a pattern
+--
+doesMatch :: Pattern -> Identifier -> Bool
+doesMatch p = isJust . match p
+
+-- | Given a list of identifiers, retain only those who match the given pattern
+--
+matches :: Pattern -> [Identifier] -> [Identifier]
+matches p = filter (doesMatch p)
+
+-- | Split a list at every possible point, generate a list of (init, tail) cases
+--
+splits :: [a] -> [([a], [a])]
+splits ls = reverse $ splits' [] ls
+ where
+ splits' lx ly = (lx, ly) : case ly of
+ [] -> []
+ (y : ys) -> splits' (lx ++ [y]) ys
+
+-- | Internal verion of 'match'
+--
+match' :: [PatternComponent] -> [String] -> Maybe [[String]]
+match' [] [] = Just [] -- An empty match
+match' [] _ = Nothing -- No match
+match' _ [] = Nothing -- No match
+match' (m : ms) (s : ss) = case m of
+ -- Take one string and one literal, fail on mismatch
+ Literal l -> if s == l then match' ms ss else Nothing
+ -- Take one string and one capture
+ CaptureOne -> fmap ([s] :) $ match' ms ss
+ -- Take one string, and one or many captures
+ CaptureMany ->
+ let take' (i, t) = fmap (i :) $ match' ms t
+ in msum $ map take' $ splits (s : ss)
+
+-- | Create an identifier from a pattern by filling in the captures with a given
+-- string
+--
+-- Example:
+--
+-- > fromCapture (parsePattern "tags/*") (parseIdentifier "foo")
+--
+-- Result:
+--
+-- > "tags/foo"
+--
+fromCapture :: Pattern -> Identifier -> Identifier
+fromCapture pattern = fromCaptures pattern . repeat
+
+-- | Simplified version of 'fromCapture' which takes a 'String' instead of an
+-- 'Identifier'
+--
+-- > fromCaptureString (parsePattern "tags/*") "foo"
+--
+-- Result:
+--
+-- > "tags/foo"
+--
+fromCaptureString :: Pattern -> String -> Identifier
+fromCaptureString pattern = fromCapture pattern . parseIdentifier
+
+-- | Create an identifier from a pattern by filling in the captures with the
+-- given list of strings
+--
+fromCaptures :: Pattern -> [Identifier] -> Identifier
+fromCaptures (Pattern []) _ = mempty
+fromCaptures (Pattern (m : ms)) [] = case m of
+ Literal l -> Identifier [l] `mappend` fromCaptures (Pattern ms) []
+ _ -> error $ "Hakyll.Core.Identifier.Pattern.fromCaptures: "
+ ++ "identifier list exhausted"
+fromCaptures (Pattern (m : ms)) ids@(i : is) = case m of
+ Literal l -> Identifier [l] `mappend` fromCaptures (Pattern ms) ids
+ _ -> i `mappend` fromCaptures (Pattern ms) is
diff --git a/src/Hakyll/Core/Logger.hs b/src/Hakyll/Core/Logger.hs
new file mode 100644
index 0000000..720dee0
--- /dev/null
+++ b/src/Hakyll/Core/Logger.hs
@@ -0,0 +1,90 @@
+-- | Produce pretty, thread-safe logs
+--
+{-# LANGUAGE BangPatterns #-}
+module Hakyll.Core.Logger
+ ( Logger
+ , makeLogger
+ , flushLogger
+ , section
+ , timed
+ , report
+ ) where
+
+import Control.Monad (forever)
+import Control.Monad.Trans (MonadIO, liftIO)
+import Control.Applicative ((<$>), (<*>))
+import Control.Concurrent (forkIO)
+import Control.Concurrent.Chan.Strict (Chan, newChan, readChan, writeChan)
+import Control.Concurrent.MVar.Strict (MVar, newEmptyMVar, takeMVar, putMVar)
+import Text.Printf (printf)
+
+import Data.Time (getCurrentTime, diffUTCTime)
+
+-- | Logger structure. Very complicated.
+--
+data Logger = Logger
+ { loggerChan :: Chan (Maybe String) -- Nothing marks the end
+ , loggerSync :: MVar () -- Used for sync on quit
+ }
+
+-- | Create a new logger
+--
+makeLogger :: IO Logger
+makeLogger = do
+ logger <- Logger <$> newChan <*> newEmptyMVar
+ _ <- forkIO $ loggerThread logger
+ return logger
+ where
+ loggerThread logger = forever $ do
+ msg <- readChan $ loggerChan logger
+ case msg of
+ -- Stop: sync
+ Nothing -> putMVar (loggerSync logger) ()
+ -- Print and continue
+ Just m -> putStrLn m
+
+-- | Flush the logger (blocks until flushed)
+--
+flushLogger :: Logger -> IO ()
+flushLogger logger = do
+ writeChan (loggerChan logger) Nothing
+ () <- takeMVar $ loggerSync logger
+ return ()
+
+-- | Send a raw message to the logger
+--
+message :: Logger -> String -> IO ()
+message logger = writeChan (loggerChan logger) . Just
+
+-- | Start a section in the log
+--
+section :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Section name
+ -> m () -- ^ No result
+section logger = liftIO . message logger
+
+-- | Execute a monadic action and log the duration
+--
+timed :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Message
+ -> m a -- ^ Action
+ -> m a -- ^ Timed and logged action
+timed logger msg action = do
+ start <- liftIO getCurrentTime
+ !result <- action
+ stop <- liftIO getCurrentTime
+ let diff = fromEnum $ diffUTCTime stop start
+ ms = diff `div` 10 ^ (9 :: Int)
+ formatted = printf " [%4dms] %s" ms msg
+ liftIO $ message logger formatted
+ return result
+
+-- | Log something at the same level as 'timed', but without the timing
+--
+report :: MonadIO m
+ => Logger -- ^ Logger
+ -> String -- ^ Message
+ -> m () -- ^ No result
+report logger msg = liftIO $ message logger $ " [ ] " ++ msg
diff --git a/src/Hakyll/Core/ResourceProvider.hs b/src/Hakyll/Core/ResourceProvider.hs
new file mode 100644
index 0000000..dcd4af0
--- /dev/null
+++ b/src/Hakyll/Core/ResourceProvider.hs
@@ -0,0 +1,75 @@
+-- | This module provides an API for resource providers. Resource providers
+-- allow Hakyll to get content from resources; the type of resource depends on
+-- the concrete instance.
+--
+-- A resource is represented by the 'Resource' type. This is basically just a
+-- newtype wrapper around 'Identifier' -- but it has an important effect: it
+-- guarantees that a resource with this identifier can be provided by one or
+-- more resource providers.
+--
+-- Therefore, it is not recommended to read files directly -- you should use the
+-- provided 'Resource' methods.
+--
+module Hakyll.Core.ResourceProvider
+ ( Resource (..)
+ , ResourceProvider (..)
+ , resourceExists
+ , resourceDigest
+ , resourceModified
+ ) where
+
+import Control.Monad ((<=<))
+import Data.Word (Word8)
+
+import qualified Data.ByteString.Lazy as LB
+import OpenSSL.Digest.ByteString.Lazy (digest)
+import OpenSSL.Digest (MessageDigest (MD5))
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Store
+
+-- | A resource
+--
+-- Invariant: the resource specified by the given identifier must exist
+--
+newtype Resource = Resource {unResource :: Identifier}
+ deriving (Eq, Show, Ord)
+
+-- | A value responsible for retrieving and listing resources
+--
+data ResourceProvider = ResourceProvider
+ { -- | A list of all resources this provider is able to provide
+ resourceList :: [Resource]
+ , -- | Retrieve a certain resource as string
+ resourceString :: Resource -> IO String
+ , -- | Retrieve a certain resource as lazy bytestring
+ resourceLazyByteString :: Resource -> IO LB.ByteString
+ }
+
+-- | Check if a given identifier has a resource
+--
+resourceExists :: ResourceProvider -> Identifier -> Bool
+resourceExists provider = flip elem $ map unResource $ resourceList provider
+
+-- | Retrieve a digest for a given resource
+--
+resourceDigest :: ResourceProvider -> Resource -> IO [Word8]
+resourceDigest provider = digest MD5 <=< resourceLazyByteString provider
+
+-- | Check if a resource was modified
+--
+resourceModified :: ResourceProvider -> Resource -> Store -> IO Bool
+resourceModified provider resource store = do
+ -- Get the latest seen digest from the store
+ lastDigest <- storeGet store itemName $ unResource resource
+ -- Calculate the digest for the resource
+ newDigest <- resourceDigest provider resource
+ -- Check digests
+ if Just newDigest == lastDigest
+ -- All is fine, not modified
+ then return False
+ -- Resource modified; store new digest
+ else do storeSet store itemName (unResource resource) newDigest
+ return True
+ where
+ itemName = "Hakyll.Core.ResourceProvider.resourceModified"
diff --git a/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs b/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs
new file mode 100644
index 0000000..0d89b21
--- /dev/null
+++ b/src/Hakyll/Core/ResourceProvider/FileResourceProvider.hs
@@ -0,0 +1,29 @@
+-- | A concrete 'ResourceProvider' that gets it's resources from the filesystem
+--
+module Hakyll.Core.ResourceProvider.FileResourceProvider
+ ( fileResourceProvider
+ ) where
+
+import Control.Applicative ((<$>))
+
+import qualified Data.ByteString.Lazy as LB
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+import Hakyll.Core.Configuration
+
+-- | Create a filesystem-based 'ResourceProvider'
+--
+fileResourceProvider :: HakyllConfiguration -> IO ResourceProvider
+fileResourceProvider configuration = do
+ -- Retrieve a list of identifiers
+ list <- map parseIdentifier . filter (not . ignoreFile configuration) <$>
+ getRecursiveContents False "."
+
+ -- Construct a resource provider
+ return ResourceProvider
+ { resourceList = map Resource list
+ , resourceString = readFile . toFilePath . unResource
+ , resourceLazyByteString = LB.readFile . toFilePath . unResource
+ }
diff --git a/src/Hakyll/Core/Routes.hs b/src/Hakyll/Core/Routes.hs
new file mode 100644
index 0000000..fcab28d
--- /dev/null
+++ b/src/Hakyll/Core/Routes.hs
@@ -0,0 +1,136 @@
+-- | Once a target is compiled, the user usually wants to save it to the disk.
+-- This is where the 'Routes' type comes in; it determines where a certain
+-- target should be written.
+--
+-- Suppose we have an item @foo\/bar.markdown@. We can render this to
+-- @foo\/bar.html@ using:
+--
+-- > route "foo/bar.markdown" (setExtension ".html")
+--
+-- If we do not want to change the extension, we can use 'idRoute', the simplest
+-- route available:
+--
+-- > route "foo/bar.markdown" idRoute
+--
+-- That will route @foo\/bar.markdown@ to @foo\/bar.markdown@.
+--
+-- Note that the extension says nothing about the content! If you set the
+-- extension to @.html@, it is your own responsibility to ensure that the
+-- content is indeed HTML.
+--
+-- Finally, some special cases:
+--
+-- * If there is no route for an item, this item will not be routed, so it will
+-- not appear in your site directory.
+--
+-- * If an item matches multiple routes, the first rule will be chosen.
+--
+module Hakyll.Core.Routes
+ ( Routes
+ , runRoutes
+ , idRoute
+ , setExtension
+ , ifMatch
+ , customRoute
+ , gsubRoute
+ , composeRoutes
+ ) where
+
+import Data.Monoid (Monoid, mempty, mappend)
+import Control.Monad (mplus)
+import System.FilePath (replaceExtension)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Util.String
+
+-- | Type used for a route
+--
+newtype Routes = Routes {unRoutes :: Identifier -> Maybe FilePath}
+
+instance Monoid Routes where
+ mempty = Routes $ const Nothing
+ mappend (Routes f) (Routes g) = Routes $ \id' -> f id' `mplus` g id'
+
+-- | Apply a route to an identifier
+--
+runRoutes :: Routes -> Identifier -> Maybe FilePath
+runRoutes = unRoutes
+
+-- | A route that uses the identifier as filepath. For example, the target with
+-- ID @foo\/bar@ will be written to the file @foo\/bar@.
+--
+idRoute :: Routes
+idRoute = Routes $ Just . toFilePath
+
+-- | Set (or replace) the extension of a route.
+--
+-- Example:
+--
+-- > runRoute (setExtension "html") "foo/bar"
+--
+-- Result:
+--
+-- > Just "foo/bar.html"
+--
+-- Example:
+--
+-- > runRoute (setExtension "html") "posts/the-art-of-trolling.markdown"
+--
+-- Result:
+--
+-- > Just "posts/the-art-of-trolling.html"
+--
+setExtension :: String -> Routes
+setExtension extension = Routes $ fmap (`replaceExtension` extension)
+ . unRoutes idRoute
+
+-- | Modify a route: apply the route if the identifier matches the given
+-- pattern, fail otherwise.
+--
+ifMatch :: Pattern -> Routes -> Routes
+ifMatch pattern (Routes route) = Routes $ \id' ->
+ if doesMatch pattern id' then route id'
+ else Nothing
+
+-- | Create a custom route. This should almost always be used with 'ifMatch'.
+--
+customRoute :: (Identifier -> FilePath) -> Routes
+customRoute f = Routes $ Just . f
+
+-- | Create a gsub route
+--
+-- Example:
+--
+-- > runRoutes (gsubRoute "rss/" (const "")) "tags/rss/bar.xml"
+--
+-- Result:
+--
+-- > Just "tags/bar.xml"
+--
+gsubRoute :: String -- ^ Pattern
+ -> (String -> String) -- ^ Replacement
+ -> Routes -- ^ Resulting route
+gsubRoute pattern replacement = customRoute $
+ replaceAll pattern replacement . toFilePath
+
+-- | Compose routes so that @f `composeRoutes` g@ is more or less equivalent
+-- with @f >>> g@.
+--
+-- Example:
+--
+-- > let routes = gsubRoute "rss/" (const "") `composeRoutes` setExtension "xml"
+-- > in runRoutes routes "tags/rss/bar"
+--
+-- Result:
+--
+-- > Just "tags/bar.xml"
+--
+-- If the first route given fails, Hakyll will not apply the second route.
+--
+composeRoutes :: Routes -- ^ First route to apply
+ -> Routes -- ^ Second route to apply
+ -> Routes -- ^ Resulting route
+composeRoutes (Routes f) (Routes g) = Routes $ \i -> do
+ p <- f i
+ g $ parseIdentifier p
diff --git a/src/Hakyll/Core/Rules.hs b/src/Hakyll/Core/Rules.hs
new file mode 100644
index 0000000..eba3fb9
--- /dev/null
+++ b/src/Hakyll/Core/Rules.hs
@@ -0,0 +1,161 @@
+-- | This module provides a declarative DSL in which the user can specify the
+-- different rules used to run the compilers.
+--
+-- The convention is to just list all items in the 'RulesM' monad, routes and
+-- compilation rules.
+--
+-- A typical usage example would be:
+--
+-- > main = hakyll $ do
+-- > route "posts/*" (setExtension "html")
+-- > compile "posts/*" someCompiler
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
+module Hakyll.Core.Rules
+ ( RulesM
+ , Rules
+ , compile
+ , create
+ , route
+ , metaCompile
+ , metaCompileWith
+ ) where
+
+import Control.Applicative ((<$>))
+import Control.Monad.Writer (tell)
+import Control.Monad.Reader (ask)
+import Control.Arrow (second, (>>>), arr, (>>^))
+import Control.Monad.State (get, put)
+import Data.Monoid (mempty)
+import qualified Data.Set as S
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Identifier.Pattern
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.Util.Arrow
+
+-- | Add a route
+--
+tellRoute :: Routes -> Rules
+tellRoute route' = RulesM $ tell $ RuleSet route' mempty mempty
+
+-- | Add a number of compilers
+--
+tellCompilers :: (Binary a, Typeable a, Writable a)
+ => [(Identifier, Compiler () a)]
+ -> Rules
+tellCompilers compilers = RulesM $ tell $ RuleSet mempty compilers' mempty
+ where
+ compilers' = map (second boxCompiler) compilers
+ boxCompiler = (>>> arr compiledItem >>> arr CompileRule)
+
+-- | Add resources
+--
+tellResources :: [Resource]
+ -> Rules
+tellResources resources = RulesM $ tell $ RuleSet mempty mempty $ S.fromList resources
+
+-- | Add a compilation rule to the rules.
+--
+-- This instructs all resources matching the given pattern to be compiled using
+-- the given compiler. When no resources match the given pattern, nothing will
+-- happen. In this case, you might want to have a look at 'create'.
+--
+compile :: (Binary a, Typeable a, Writable a)
+ => Pattern -> Compiler Resource a -> Rules
+compile pattern compiler = RulesM $ do
+ identifiers <- matches pattern . map unResource . resourceList <$> ask
+ unRulesM $ do
+ tellCompilers $ flip map identifiers $ \identifier ->
+ (identifier, constA (Resource identifier) >>> compiler)
+ tellResources $ map Resource identifiers
+
+-- | Add a compilation rule
+--
+-- This sets a compiler for the given identifier. No resource is needed, since
+-- we are creating the item from scratch. This is useful if you want to create a
+-- page on your site that just takes content from other items -- but has no
+-- actual content itself.
+--
+create :: (Binary a, Typeable a, Writable a)
+ => Identifier -> Compiler () a -> Rules
+create identifier compiler = tellCompilers [(identifier, compiler)]
+
+-- | Add a route.
+--
+-- This adds a route for all items matching the given pattern.
+--
+route :: Pattern -> Routes -> Rules
+route pattern route' = tellRoute $ ifMatch pattern route'
+
+-- | Apart from regular compilers, one is also able to specify metacompilers.
+-- Metacompilers are a special class of compilers: they are compilers which
+-- produce other compilers.
+--
+-- This is needed when the list of compilers depends on something we cannot know
+-- before actually running other compilers. The most typical example is if we
+-- have a blogpost using tags.
+--
+-- Every post has a collection of tags. For example,
+--
+-- > post1: code, haskell
+-- > post2: code, random
+--
+-- Now, we want to create a list of posts for every tag. We cannot write this
+-- down in our 'Rules' DSL directly, since we don't know what tags the different
+-- posts will have -- we depend on information that will only be available when
+-- we are actually compiling the pages.
+--
+-- The solution is simple, using 'metaCompile', we can add a compiler that will
+-- parse the pages and produce the compilers needed for the different tag pages.
+--
+-- And indeed, we can see that the first argument to 'metaCompile' is a
+-- 'Compiler' which produces a list of ('Identifier', 'Compiler') pairs. The
+-- idea is simple: 'metaCompile' produces a list of compilers, and the
+-- corresponding identifiers.
+--
+-- For simple hakyll systems, it is no need for this construction. More
+-- formally, it is only needed when the content of one or more items determines
+-- which items must be rendered.
+--
+metaCompile :: (Binary a, Typeable a, Writable a)
+ => Compiler () [(Identifier, Compiler () a)]
+ -- ^ Compiler generating the other compilers
+ -> Rules
+ -- ^ Resulting rules
+metaCompile compiler = RulesM $ do
+ -- Create an identifier from the state
+ state <- get
+ let index = rulesMetaCompilerIndex state
+ id' = fromCaptureString "Hakyll.Core.Rules.metaCompile/*" (show index)
+
+ -- Update the state with a new identifier
+ put $ state {rulesMetaCompilerIndex = index + 1}
+
+ -- Fallback to 'metaCompileWith' with now known identifier
+ unRulesM $ metaCompileWith id' compiler
+
+-- | Version of 'metaCompile' that allows you to specify a custom identifier for
+-- the metacompiler.
+--
+metaCompileWith :: (Binary a, Typeable a, Writable a)
+ => Identifier
+ -- ^ Identifier for this compiler
+ -> Compiler () [(Identifier, Compiler () a)]
+ -- ^ Compiler generating the other compilers
+ -> Rules
+ -- ^ Resulting rules
+metaCompileWith identifier compiler = RulesM $ tell $
+ RuleSet mempty compilers mempty
+ where
+ makeRule = MetaCompileRule . map (second box)
+ compilers = [(identifier, compiler >>> arr makeRule )]
+ box = (>>> fromDependency identifier >>^ CompileRule . compiledItem)
diff --git a/src/Hakyll/Core/Rules/Internal.hs b/src/Hakyll/Core/Rules/Internal.hs
new file mode 100644
index 0000000..2895257
--- /dev/null
+++ b/src/Hakyll/Core/Rules/Internal.hs
@@ -0,0 +1,75 @@
+-- | Internal rules module for types which are not exposed to the user
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Rules.Internal
+ ( CompileRule (..)
+ , RuleSet (..)
+ , RuleState (..)
+ , RulesM (..)
+ , Rules
+ , runRules
+ ) where
+
+import Control.Applicative (Applicative)
+import Control.Monad.Writer (WriterT, execWriterT)
+import Control.Monad.Reader (ReaderT, runReaderT)
+import Control.Monad.State (State, evalState)
+import Data.Monoid (Monoid, mempty, mappend)
+import Data.Set (Set)
+
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.Routes
+import Hakyll.Core.CompiledItem
+
+-- | Output of a compiler rule
+--
+-- * The compiler will produce a simple item. This is the most common case.
+--
+-- * The compiler will produce more compilers. These new compilers need to be
+-- added to the runtime if possible, since other items might depend upon them.
+--
+data CompileRule = CompileRule CompiledItem
+ | MetaCompileRule [(Identifier, Compiler () CompileRule)]
+
+-- | A collection of rules for the compilation process
+--
+data RuleSet = RuleSet
+ { -- | Routes used in the compilation structure
+ rulesRoutes :: Routes
+ , -- | Compilation rules
+ rulesCompilers :: [(Identifier, Compiler () CompileRule)]
+ , -- | A list of the used resources
+ rulesResources :: Set Resource
+ }
+
+instance Monoid RuleSet where
+ mempty = RuleSet mempty mempty mempty
+ mappend (RuleSet r1 c1 s1) (RuleSet r2 c2 s2) =
+ RuleSet (mappend r1 r2) (mappend c1 c2) (mappend s1 s2)
+
+-- | Rule state
+--
+data RuleState = RuleState
+ { rulesMetaCompilerIndex :: Int
+ } deriving (Show)
+
+-- | The monad used to compose rules
+--
+newtype RulesM a = RulesM
+ { unRulesM :: ReaderT ResourceProvider (WriterT RuleSet (State RuleState)) a
+ } deriving (Monad, Functor, Applicative)
+
+-- | Simplification of the RulesM type; usually, it will not return any
+-- result.
+--
+type Rules = RulesM ()
+
+-- | Run a Rules monad, resulting in a 'RuleSet'
+--
+runRules :: Rules -> ResourceProvider -> RuleSet
+runRules rules provider =
+ evalState (execWriterT $ runReaderT (unRulesM rules) provider) state
+ where
+ state = RuleState {rulesMetaCompilerIndex = 0}
diff --git a/src/Hakyll/Core/Run.hs b/src/Hakyll/Core/Run.hs
new file mode 100644
index 0000000..09864be
--- /dev/null
+++ b/src/Hakyll/Core/Run.hs
@@ -0,0 +1,207 @@
+-- | This is the module which binds it all together
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Hakyll.Core.Run
+ ( run
+ ) where
+
+import Prelude hiding (reverse)
+import Control.Monad (filterM)
+import Control.Monad.Trans (liftIO)
+import Control.Applicative (Applicative, (<$>))
+import Control.Monad.Reader (ReaderT, runReaderT, ask)
+import Control.Monad.State.Strict (StateT, evalStateT, get, modify)
+import Control.Arrow ((&&&))
+import qualified Data.Map as M
+import Data.Monoid (mempty, mappend)
+import System.FilePath ((</>))
+import Data.Set (Set)
+import qualified Data.Set as S
+
+import Hakyll.Core.Routes
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+import Hakyll.Core.Compiler
+import Hakyll.Core.Compiler.Internal
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.ResourceProvider.FileResourceProvider
+import Hakyll.Core.Rules.Internal
+import Hakyll.Core.DirectedGraph
+import Hakyll.Core.DirectedGraph.DependencySolver
+import Hakyll.Core.Writable
+import Hakyll.Core.Store
+import Hakyll.Core.Configuration
+import Hakyll.Core.Logger
+
+-- | Run all rules needed, return the rule set used
+--
+run :: HakyllConfiguration -> Rules -> IO RuleSet
+run configuration rules = do
+ logger <- makeLogger
+
+ section logger "Initialising"
+ store <- timed logger "Creating store" $
+ makeStore $ storeDirectory configuration
+ provider <- timed logger "Creating provider" $
+ fileResourceProvider configuration
+
+ let ruleSet = runRules rules provider
+ compilers = rulesCompilers ruleSet
+
+ -- Extract the reader/state
+ reader = unRuntime $ addNewCompilers [] compilers
+ state' = runReaderT reader $ env logger ruleSet provider store
+
+ evalStateT state' state
+
+ -- Flush and return
+ flushLogger logger
+ return ruleSet
+ where
+ env logger ruleSet provider store = RuntimeEnvironment
+ { hakyllLogger = logger
+ , hakyllConfiguration = configuration
+ , hakyllRoutes = rulesRoutes ruleSet
+ , hakyllResourceProvider = provider
+ , hakyllStore = store
+ }
+
+ state = RuntimeState
+ { hakyllModified = S.empty
+ , hakyllGraph = mempty
+ }
+
+data RuntimeEnvironment = RuntimeEnvironment
+ { hakyllLogger :: Logger
+ , hakyllConfiguration :: HakyllConfiguration
+ , hakyllRoutes :: Routes
+ , hakyllResourceProvider :: ResourceProvider
+ , hakyllStore :: Store
+ }
+
+data RuntimeState = RuntimeState
+ { hakyllModified :: Set Identifier
+ , hakyllGraph :: DirectedGraph Identifier
+ }
+
+newtype Runtime a = Runtime
+ { unRuntime :: ReaderT RuntimeEnvironment (StateT RuntimeState IO) a
+ } deriving (Functor, Applicative, Monad)
+
+-- | Return a set of modified identifiers
+--
+modified :: ResourceProvider -- ^ Resource provider
+ -> Store -- ^ Store
+ -> [Identifier] -- ^ Identifiers to check
+ -> IO (Set Identifier) -- ^ Modified resources
+modified provider store ids = fmap S.fromList $ flip filterM ids $ \id' ->
+ if resourceExists provider id'
+ then resourceModified provider (Resource id') store
+ else return False
+
+-- | Add a number of compilers and continue using these compilers
+--
+addNewCompilers :: [(Identifier, Compiler () CompileRule)]
+ -- ^ Remaining compilers yet to be run
+ -> [(Identifier, Compiler () CompileRule)]
+ -- ^ Compilers to add
+ -> Runtime ()
+addNewCompilers oldCompilers newCompilers = Runtime $ do
+ -- Get some information
+ logger <- hakyllLogger <$> ask
+ section logger "Adding new compilers"
+ provider <- hakyllResourceProvider <$> ask
+ store <- hakyllStore <$> ask
+
+ let -- All compilers
+ compilers = oldCompilers ++ newCompilers
+
+ -- Get all dependencies for the compilers
+ dependencies = flip map compilers $ \(id', compiler) ->
+ let deps = runCompilerDependencies compiler id' provider
+ in (id', deps)
+
+ -- Create a compiler map (Id -> Compiler)
+ compilerMap = M.fromList compilers
+
+ -- Create the dependency graph
+ currentGraph = fromList dependencies
+
+ -- Find the old graph and append the new graph to it. This forms the
+ -- complete graph
+ completeGraph <- timed logger "Creating graph" $
+ mappend currentGraph . hakyllGraph <$> get
+
+ orderedCompilers <- timed logger "Solving dependencies" $ do
+ -- Check which items are up-to-date. This only needs to happen for the new
+ -- compilers
+ oldModified <- hakyllModified <$> get
+ newModified <- liftIO $ modified provider store $ map fst newCompilers
+
+ let modified' = oldModified `S.union` newModified
+
+ -- Find obsolete items. Every item that is reachable from a modified
+ -- item is considered obsolete. From these obsolete items, we are only
+ -- interested in ones that are in the current subgraph.
+ obsolete = S.filter (`member` currentGraph)
+ $ reachableNodes modified' $ reverse completeGraph
+
+ -- Solve the graph and retain only the obsolete items
+ ordered = filter (`S.member` obsolete) $ solveDependencies currentGraph
+
+ -- Update the state
+ modify $ updateState modified' completeGraph
+
+ -- Join the order with the compilers again
+ return $ map (id &&& (compilerMap M.!)) ordered
+
+ -- Now run the ordered list of compilers
+ unRuntime $ runCompilers orderedCompilers
+ where
+ -- Add the modified information for the new compilers
+ updateState modified' graph state = state
+ { hakyllModified = modified'
+ , hakyllGraph = graph
+ }
+
+runCompilers :: [(Identifier, Compiler () CompileRule)]
+ -- ^ Ordered list of compilers
+ -> Runtime ()
+ -- ^ No result
+runCompilers [] = return ()
+runCompilers ((id', compiler) : compilers) = Runtime $ do
+ -- Obtain information
+ logger <- hakyllLogger <$> ask
+ routes <- hakyllRoutes <$> ask
+ provider <- hakyllResourceProvider <$> ask
+ store <- hakyllStore <$> ask
+ modified' <- hakyllModified <$> get
+
+ section logger $ "Compiling " ++ show id'
+
+ let -- Check if the resource was modified
+ isModified = id' `S.member` modified'
+
+ -- Run the compiler
+ result <- timed logger "Total compile time" $ liftIO $
+ runCompiler compiler id' provider routes store isModified logger
+
+ case result of
+ -- Compile rule for one item, easy stuff
+ CompileRule compiled -> do
+ case runRoutes routes id' of
+ Nothing -> return ()
+ Just url -> timed logger ("Routing to " ++ url) $ do
+ destination <-
+ destinationDirectory . hakyllConfiguration <$> ask
+ let path = destination </> url
+ liftIO $ makeDirectories path
+ liftIO $ write path compiled
+
+ -- Continue for the remaining compilers
+ unRuntime $ runCompilers compilers
+
+ -- Metacompiler, slightly more complicated
+ MetaCompileRule newCompilers ->
+ -- Actually I was just kidding, it's not hard at all
+ unRuntime $ addNewCompilers compilers newCompilers
diff --git a/src/Hakyll/Core/Store.hs b/src/Hakyll/Core/Store.hs
new file mode 100644
index 0000000..12e33a7
--- /dev/null
+++ b/src/Hakyll/Core/Store.hs
@@ -0,0 +1,88 @@
+-- | A store for stroing and retreiving items
+--
+module Hakyll.Core.Store
+ ( Store
+ , makeStore
+ , storeSet
+ , storeGet
+ ) where
+
+import Control.Concurrent.MVar (MVar, newMVar, readMVar, modifyMVar_)
+import System.FilePath ((</>))
+import System.Directory (doesFileExist)
+import Data.Map (Map)
+import qualified Data.Map as M
+
+import Data.Binary (Binary, encodeFile, decodeFile)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.CompiledItem
+import Hakyll.Core.Writable
+import Hakyll.Core.Identifier
+import Hakyll.Core.Util.File
+
+-- | Data structure used for the store
+--
+data Store = Store
+ { -- | All items are stored on the filesystem
+ storeDirectory :: FilePath
+ , -- | And some items are also kept in-memory
+ storeMap :: MVar (Map FilePath CompiledItem)
+ }
+
+-- | Initialize the store
+--
+makeStore :: FilePath -> IO Store
+makeStore directory = do
+ mvar <- newMVar M.empty
+ return Store
+ { storeDirectory = directory
+ , storeMap = mvar
+ }
+
+-- | Auxiliary: add an item to the map
+--
+addToMap :: (Binary a, Typeable a, Writable a)
+ => Store -> FilePath -> a -> IO ()
+addToMap store path value =
+ modifyMVar_ (storeMap store) $ return . M.insert path (compiledItem value)
+
+-- | Create a path
+--
+makePath :: Store -> String -> Identifier -> FilePath
+makePath store name identifier =
+ storeDirectory store </> name </> toFilePath identifier </> ".hakyllstore"
+
+-- | Store an item
+--
+storeSet :: (Binary a, Typeable a, Writable a)
+ => Store -> String -> Identifier -> a -> IO ()
+storeSet store name identifier value = do
+ makeDirectories path
+ encodeFile path value
+ addToMap store path value
+ where
+ path = makePath store name identifier
+
+-- | Load an item
+--
+storeGet :: (Binary a, Typeable a, Writable a)
+ => Store -> String -> Identifier -> IO (Maybe a)
+storeGet store name identifier = do
+ -- First check the in-memory map
+ map' <- readMVar $ storeMap store
+ case M.lookup path map' of
+ -- Found in the in-memory map
+ Just c -> return $ Just $ unCompiledItem c
+ -- Not found in the map, try the filesystem
+ Nothing -> do
+ exists <- doesFileExist path
+ if not exists
+ -- Not found in the filesystem either
+ then return Nothing
+ -- Found in the filesystem
+ else do v <- decodeFile path
+ addToMap store path v
+ return $ Just v
+ where
+ path = makePath store name identifier
diff --git a/src/Hakyll/Core/UnixFilter.hs b/src/Hakyll/Core/UnixFilter.hs
new file mode 100644
index 0000000..ee4b6cd
--- /dev/null
+++ b/src/Hakyll/Core/UnixFilter.hs
@@ -0,0 +1,76 @@
+-- | A Compiler that supports unix filters.
+--
+module Hakyll.Core.UnixFilter
+ ( unixFilter
+ ) where
+
+import Control.Concurrent (forkIO)
+import System.IO (hPutStr, hClose, hGetContents)
+import System.Posix.Process (executeFile, forkProcess)
+import System.Posix.IO ( dupTo, createPipe, stdInput
+ , stdOutput, closeFd, fdToHandle
+ )
+
+import Hakyll.Core.Compiler
+
+-- | Use a unix filter as compiler. For example, we could use the 'rev' program
+-- as a compiler.
+--
+-- > rev :: Compiler Resource String
+-- > rev = getResourceString >>> unixFilter "rev" []
+--
+-- A more realistic example: one can use this to call, for example, the sass
+-- compiler on CSS files. More information about sass can be found here:
+--
+-- <http://sass-lang.com/>
+--
+-- The code is fairly straightforward, given that we use @.scss@ for sass:
+--
+-- > route "style.scss" $ setExtension "css"
+-- > compile "style.scss" $
+-- > getResourceString >>> unixFilter "sass" ["-s", "--scss"]
+-- > >>> arr compressCss
+--
+unixFilter :: String -- ^ Program name
+ -> [String] -- ^ Program args
+ -> Compiler String String -- ^ Resulting compiler
+unixFilter programName args =
+ timedCompiler ("Executing external program " ++ programName) $
+ unsafeCompiler $ \input -> unixFilterIO programName args input
+
+-- | Internally used function
+--
+unixFilterIO :: String
+ -> [String]
+ -> String
+ -> IO String
+unixFilterIO programName args input = do
+ -- Create pipes
+ (stdinRead, stdinWrite) <- createPipe
+ (stdoutRead, stdoutWrite) <- createPipe
+
+ -- Fork the child
+ _ <- forkProcess $ do
+ -- Copy our pipes over the regular stdin/stdout
+ _ <- dupTo stdinRead stdInput
+ _ <- dupTo stdoutWrite stdOutput
+
+ -- Close the now unneeded file descriptors in the child
+ mapM_ closeFd [stdinWrite, stdoutRead, stdinRead, stdoutWrite]
+
+ -- Execute the program
+ _ <- executeFile programName True args Nothing
+ return ()
+
+ -- On the parent side, close the client-side FDs.
+ mapM_ closeFd [stdinRead, stdoutWrite]
+
+ -- Write the input to the child pipe
+ _ <- forkIO $ do
+ stdinWriteHandle <- fdToHandle stdinWrite
+ hPutStr stdinWriteHandle input
+ hClose stdinWriteHandle
+
+ -- Receive the output from the child
+ stdoutReadHandle <- fdToHandle stdoutRead
+ hGetContents stdoutReadHandle
diff --git a/src/Hakyll/Core/Util/Arrow.hs b/src/Hakyll/Core/Util/Arrow.hs
new file mode 100644
index 0000000..1896e11
--- /dev/null
+++ b/src/Hakyll/Core/Util/Arrow.hs
@@ -0,0 +1,25 @@
+-- | Various arrow utility functions
+--
+module Hakyll.Core.Util.Arrow
+ ( constA
+ , sequenceA
+ , unitA
+ ) where
+
+import Control.Arrow (Arrow, (&&&), arr, (>>^))
+
+constA :: Arrow a
+ => c
+ -> a b c
+constA = arr . const
+
+sequenceA :: Arrow a
+ => [a b c]
+ -> a b [c]
+sequenceA = foldl reduce $ constA []
+ where
+ reduce la xa = xa &&& la >>^ arr (uncurry (:))
+
+unitA :: Arrow a
+ => a b ()
+unitA = constA ()
diff --git a/src/Hakyll/Core/Util/File.hs b/src/Hakyll/Core/Util/File.hs
new file mode 100644
index 0000000..9babc8b
--- /dev/null
+++ b/src/Hakyll/Core/Util/File.hs
@@ -0,0 +1,90 @@
+-- | A module containing various file utility functions
+--
+module Hakyll.Core.Util.File
+ ( makeDirectories
+ , getRecursiveContents
+ , isFileObsolete
+ , isFileInternal
+ ) where
+
+import Control.Applicative ((<$>))
+import System.Time (ClockTime)
+import Control.Monad (forM, filterM)
+import Data.List (isPrefixOf)
+import System.Directory ( createDirectoryIfMissing, doesDirectoryExist
+ , doesFileExist, getModificationTime
+ , getDirectoryContents
+ )
+import System.FilePath ( normalise, takeDirectory, splitPath
+ , dropTrailingPathSeparator, (</>)
+ )
+
+import Hakyll.Core.Configuration
+
+-- | Given a path to a file, try to make the path writable by making
+-- all directories on the path.
+--
+makeDirectories :: FilePath -> IO ()
+makeDirectories = createDirectoryIfMissing True . takeDirectory
+
+-- | Get all contents of a directory. Note that files starting with a dot (.)
+-- will be ignored.
+--
+getRecursiveContents :: Bool -- ^ Include directories?
+ -> FilePath -- ^ Directory to search
+ -> IO [FilePath] -- ^ List of files found
+getRecursiveContents includeDirs topdir = do
+ topdirExists <- doesDirectoryExist topdir
+ if not topdirExists
+ then return []
+ else do
+ names <- filter isProper <$> getDirectoryContents topdir
+ paths <- forM names $ \name -> do
+ let path = normalise $ topdir </> name
+ isDirectory <- doesDirectoryExist path
+ if isDirectory then getRecursiveContents includeDirs path
+ else return [path]
+ return $ if includeDirs then topdir : concat paths
+ else concat paths
+ where
+ isProper = not . (== ".") . take 1
+
+-- | Check if a timestamp is obsolete compared to the timestamps of a number of
+-- files. When they are no files, it is never obsolete.
+--
+isObsolete :: ClockTime -- ^ The time to check.
+ -> [FilePath] -- ^ Dependencies of the cached file.
+ -> IO Bool
+isObsolete _ [] = return False
+isObsolete timeStamp depends = do
+ depends' <- filterM doesFileExist depends
+ dependsModified <- mapM getModificationTime depends'
+ return (timeStamp < maximum dependsModified)
+
+-- | Check if a file is obsolete, given it's dependencies. When the file does
+-- not exist, it is always obsolete. Other wise, it is obsolete if any of it's
+-- dependencies has a more recent modification time than the file.
+--
+isFileObsolete :: FilePath -- ^ The cached file
+ -> [FilePath] -- ^ Dependencies of the cached file
+ -> IO Bool
+isFileObsolete file depends = do
+ exists <- doesFileExist file
+ if not exists
+ then return True
+ else do timeStamp <- getModificationTime file
+ isObsolete timeStamp depends
+
+-- | Check if a file is meant for Hakyll internal use, i.e. if it is located in
+-- the destination or store directory
+--
+isFileInternal :: HakyllConfiguration -- ^ Configuration
+ -> FilePath -- ^ File to check
+ -> Bool -- ^ If the given file is internal
+isFileInternal configuration file =
+ any (`isPrefixOf` split file) dirs
+ where
+ split = map dropTrailingPathSeparator . splitPath
+ dirs = map (split . ($ configuration)) [ destinationDirectory
+ , storeDirectory
+ ]
diff --git a/src/Hakyll/Core/Util/String.hs b/src/Hakyll/Core/Util/String.hs
new file mode 100644
index 0000000..7f75a36
--- /dev/null
+++ b/src/Hakyll/Core/Util/String.hs
@@ -0,0 +1,48 @@
+-- | Miscellaneous string manipulation functions.
+--
+module Hakyll.Core.Util.String
+ ( trim
+ , replaceAll
+ , splitAll
+ ) where
+
+import Data.Char (isSpace)
+import Data.Maybe (listToMaybe)
+
+import Text.Regex.PCRE ((=~~))
+
+-- | Trim a string (drop spaces, tabs and newlines at both sides).
+--
+trim :: String -> String
+trim = reverse . trim' . reverse . trim'
+ where
+ trim' = dropWhile isSpace
+
+-- | A simple (but inefficient) regex replace funcion
+--
+replaceAll :: String -- ^ Pattern
+ -> (String -> String) -- ^ Replacement (called on capture)
+ -> String -- ^ Source string
+ -> String -- ^ Result
+replaceAll pattern f source = replaceAll' source
+ where
+ replaceAll' src = case listToMaybe (src =~~ pattern) of
+ Nothing -> src
+ Just (o, l) ->
+ let (before, tmp) = splitAt o src
+ (capture, after) = splitAt l tmp
+ in before ++ f capture ++ replaceAll' after
+
+-- | A simple regex split function. The resulting list will contain no empty
+-- strings.
+--
+splitAll :: String -- ^ Pattern
+ -> String -- ^ String to split
+ -> [String] -- ^ Result
+splitAll pattern = filter (not . null) . splitAll'
+ where
+ splitAll' src = case listToMaybe (src =~~ pattern) of
+ Nothing -> [src]
+ Just (o, l) ->
+ let (before, tmp) = splitAt o src
+ in before : splitAll' (drop l tmp)
diff --git a/src/Hakyll/Core/Writable.hs b/src/Hakyll/Core/Writable.hs
new file mode 100644
index 0000000..a3fd421
--- /dev/null
+++ b/src/Hakyll/Core/Writable.hs
@@ -0,0 +1,22 @@
+-- | Describes writable items; items that can be saved to the disk
+--
+{-# LANGUAGE FlexibleInstances #-}
+module Hakyll.Core.Writable
+ ( Writable (..)
+ ) where
+
+import Data.Word (Word8)
+
+import qualified Data.ByteString as SB
+
+-- | Describes an item that can be saved to the disk
+--
+class Writable a where
+ -- | Save an item to the given filepath
+ write :: FilePath -> a -> IO ()
+
+instance Writable [Char] where
+ write = writeFile
+
+instance Writable [Word8] where
+ write p = SB.writeFile p . SB.pack
diff --git a/src/Hakyll/Main.hs b/src/Hakyll/Main.hs
new file mode 100644
index 0000000..04b4cea
--- /dev/null
+++ b/src/Hakyll/Main.hs
@@ -0,0 +1,113 @@
+-- | Module providing the main hakyll function and command-line argument parsing
+--
+module Hakyll.Main
+ ( hakyll
+ , hakyllWith
+ ) where
+
+import Control.Concurrent (forkIO)
+import Control.Monad (when)
+import System.Environment (getProgName, getArgs)
+import System.Directory (doesDirectoryExist, removeDirectoryRecursive)
+
+import Hakyll.Core.Configuration
+import Hakyll.Core.Run
+import Hakyll.Core.Rules
+import Hakyll.Core.Rules.Internal
+import Hakyll.Web.Preview.Poll
+import Hakyll.Web.Preview.Server
+
+-- | This usualy is the function with which the user runs the hakyll compiler
+--
+hakyll :: Rules -> IO ()
+hakyll = hakyllWith defaultHakyllConfiguration
+
+-- | A variant of 'hakyll' which allows the user to specify a custom
+-- configuration
+--
+hakyllWith :: HakyllConfiguration -> Rules -> IO ()
+hakyllWith configuration rules = do
+ args <- getArgs
+ case args of
+ ["build"] -> build configuration rules
+ ["clean"] -> clean configuration
+ ["help"] -> help
+ ["preview"] -> preview configuration rules 8000
+ ["preview", p] -> preview configuration rules (read p)
+ ["rebuild"] -> rebuild configuration rules
+ ["server"] -> server configuration 8000
+ ["server", p] -> server configuration (read p)
+ _ -> help
+
+-- | Build the site
+--
+build :: HakyllConfiguration -> Rules -> IO ()
+build configuration rules = do
+ _ <- run configuration rules
+ return ()
+
+-- | Remove the output directories
+--
+clean :: HakyllConfiguration -> IO ()
+clean configuration = do
+ remove $ destinationDirectory configuration
+ remove $ storeDirectory configuration
+ where
+ remove dir = do
+ putStrLn $ "Removing " ++ dir ++ "..."
+ exists <- doesDirectoryExist dir
+ when exists $ removeDirectoryRecursive dir
+
+-- | Show usage information.
+--
+help :: IO ()
+help = do
+ name <- getProgName
+ mapM_ putStrLn
+ [ "ABOUT"
+ , ""
+ , "This is a Hakyll site generator program. You should always"
+ , "run it from the project root directory."
+ , ""
+ , "USAGE"
+ , ""
+ , name ++ " build Generate the site"
+ , name ++ " clean Clean up and remove cache"
+ , name ++ " help Show this message"
+ , name ++ " preview [port] Run a server and autocompile"
+ , name ++ " rebuild Clean up and build again"
+ , name ++ " server [port] Run a local test server"
+ ]
+
+-- | Preview the site
+--
+preview :: HakyllConfiguration -> Rules -> Int -> IO ()
+preview configuration rules port = do
+ -- Build once, keep the rule set
+ ruleSet <- run configuration rules
+
+ -- Get the resource list and a callback for the preview poll
+ let resources = rulesResources ruleSet
+ callback = build configuration rules
+
+ -- Fork a thread polling for changes
+ _ <- forkIO $ previewPoll configuration resources callback
+
+ -- Run the server in the main thread
+ server configuration port
+
+-- | Rebuild the site
+--
+rebuild :: HakyllConfiguration -> Rules -> IO ()
+rebuild configuration rules = do
+ clean configuration
+ build configuration rules
+
+-- | Start a server
+--
+server :: HakyllConfiguration -> Int -> IO ()
+server configuration port = do
+ let destination = destinationDirectory configuration
+ staticServer destination preServeHook port
+ where
+ preServeHook _ = return ()
diff --git a/src/Text/Hakyll/Internal/CompressCss.hs b/src/Hakyll/Web/CompressCss.hs
index 4a78791..2df08fd 100644
--- a/src/Text/Hakyll/Internal/CompressCss.hs
+++ b/src/Hakyll/Web/CompressCss.hs
@@ -1,30 +1,45 @@
-- | Module used for CSS compression. The compression is currently in a simple
--- state, but would typically reduce the number of bytes by about 25%.
-module Text.Hakyll.Internal.CompressCss
- ( compressCss
+-- state, but would typically reduce the number of bytes by about 25%.
+--
+module Hakyll.Web.CompressCss
+ ( compressCssCompiler
+ , compressCss
) where
+import Data.Char (isSpace)
import Data.List (isPrefixOf)
+import Control.Arrow ((>>^))
-import Text.Hakyll.Regex (substituteRegex)
+import Hakyll.Core.Compiler
+import Hakyll.Core.ResourceProvider
+import Hakyll.Core.Util.String
+
+-- | Compiler form of 'compressCss'
+--
+compressCssCompiler :: Compiler Resource String
+compressCssCompiler = getResourceString >>^ compressCss
-- | Compress CSS to speed up your site.
+--
compressCss :: String -> String
compressCss = compressSeparators
. stripComments
. compressWhitespace
-- | Compresses certain forms of separators.
+--
compressSeparators :: String -> String
-compressSeparators = substituteRegex "; *}" "}"
- . substituteRegex " *([{};:]) *" "\\1"
- . substituteRegex ";;*" ";"
+compressSeparators = replaceAll "; *}" (const "}")
+ . replaceAll " *([{};:]) *" (take 1 . dropWhile isSpace)
+ . replaceAll ";;*" (const ";")
-- | Compresses all whitespace.
+--
compressWhitespace :: String -> String
-compressWhitespace = substituteRegex "[ \t\n][ \t\n]*" " "
+compressWhitespace = replaceAll "[ \t\n][ \t\n]*" (const " ")
-- | Function that strips CSS comments away.
+--
stripComments :: String -> String
stripComments [] = []
stripComments str
diff --git a/src/Hakyll/Web/Feed.hs b/src/Hakyll/Web/Feed.hs
new file mode 100644
index 0000000..85674c6
--- /dev/null
+++ b/src/Hakyll/Web/Feed.hs
@@ -0,0 +1,124 @@
+-- | A Module that allows easy rendering of RSS feeds.
+--
+-- The main rendering functions (@renderRss@, @renderAtom@) all assume that
+-- you pass the list of items so that the most recent entry in the feed is the
+-- first item in the list.
+--
+-- Also note that the pages should have (at least) the following fields to
+-- produce a correct feed:
+--
+-- - @$title@: Title of the item
+--
+-- - @$description@: Description to appear in the feed
+--
+-- - @$url@: URL to the item - this is usually set automatically.
+--
+-- In addition, the posts should be named according to the rules for
+-- 'Hakyll.Page.Metadata.renderDateField'.
+--
+module Hakyll.Web.Feed
+ ( FeedConfiguration (..)
+ , renderRss
+ , renderAtom
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow ((>>>), arr, (&&&))
+import Control.Monad ((<=<))
+import Data.Maybe (fromMaybe, listToMaybe)
+
+import Hakyll.Core.Compiler
+import Hakyll.Web.Page
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Template
+import Hakyll.Web.Template.Read.Hakyll (readTemplate)
+import Hakyll.Web.Util.Url
+
+import Paths_hakyll
+
+-- | This is a data structure to keep the configuration of a feed.
+data FeedConfiguration = FeedConfiguration
+ { -- | Title of the feed.
+ feedTitle :: String
+ , -- | Description of the feed.
+ feedDescription :: String
+ , -- | Name of the feed author.
+ feedAuthorName :: String
+ , -- | Absolute root URL of the feed site (e.g. @http://jaspervdj.be@)
+ feedRoot :: String
+ }
+
+-- | This is an auxiliary function to create a listing that is, in fact, a feed.
+-- The items should be sorted on date. The @$timestamp@ field should be set.
+--
+createFeed :: Template -- ^ Feed template
+ -> Template -- ^ Item template
+ -> String -- ^ URL of the feed
+ -> FeedConfiguration -- ^ Feed configuration
+ -> [Page String] -- ^ Items to include
+ -> String -- ^ Resulting feed
+createFeed feedTemplate itemTemplate url configuration items =
+ pageBody $ applyTemplate feedTemplate
+ $ setField "timestamp" timestamp
+ $ setField "title" (feedTitle configuration)
+ $ setField "description" (feedDescription configuration)
+ $ setField "authorName" (feedDescription configuration)
+ $ setField "root" (feedRoot configuration)
+ $ setField "url" url
+ $ fromBody body
+ where
+ -- Preprocess items
+ items' = flip map items $ applyTemplate itemTemplate
+ . setField "root" (feedRoot configuration)
+
+ -- Body: concatenated items
+ body = concat $ map pageBody items'
+
+ -- Take the first timestamp, which should be the most recent
+ timestamp = fromMaybe "Unknown" $ do
+ p <- listToMaybe items
+ return $ getField "timestamp" p
+
+
+-- | Abstract function to render any feed.
+--
+renderFeed :: FilePath -- ^ Feed template
+ -> FilePath -- ^ Item template
+ -> FeedConfiguration -- ^ Feed configuration
+ -> Compiler [Page String] String -- ^ Feed compiler
+renderFeed feedTemplate itemTemplate configuration =
+ id &&& getRoute >>> renderFeed'
+ where
+ -- Arrow rendering the feed from the items and the URL
+ renderFeed' = unsafeCompiler $ \(items, url) -> do
+ feedTemplate' <- loadTemplate feedTemplate
+ itemTemplate' <- loadTemplate itemTemplate
+ let url' = toUrl $ fromMaybe noUrl url
+ return $ createFeed feedTemplate' itemTemplate' url' configuration items
+
+ -- Auxiliary: load a template from a datafile
+ loadTemplate = fmap readTemplate . readFile <=< getDataFileName
+
+ -- URL is required to have a valid field
+ noUrl = error "Hakyll.Web.Feed.renderFeed: no route specified"
+
+-- | Render an RSS feed with a number of items.
+--
+renderRss :: FeedConfiguration -- ^ Feed configuration
+ -> Compiler [Page String] String -- ^ Feed compiler
+renderRss configuration = arr (map renderDate)
+ >>> renderFeed "templates/rss.xml" "templates/rss-item.xml" configuration
+ where
+ renderDate = renderDateField "timestamp" "%a, %d %b %Y %H:%M:%S UT"
+ "No date found."
+
+-- | Render an Atom feed with a number of items.
+--
+renderAtom :: FeedConfiguration -- ^ Feed configuration
+ -> Compiler [Page String] String -- ^ Feed compiler
+renderAtom configuration = arr (map renderDate)
+ >>> renderFeed "templates/atom.xml" "templates/atom-item.xml" configuration
+ where
+ renderDate = renderDateField "timestamp" "%Y-%m-%dT%H:%M:%SZ"
+ "No date found."
diff --git a/src/Hakyll/Web/FileType.hs b/src/Hakyll/Web/FileType.hs
new file mode 100644
index 0000000..cd1188a
--- /dev/null
+++ b/src/Hakyll/Web/FileType.hs
@@ -0,0 +1,55 @@
+-- | A module dealing with common file extensions and associated file types.
+--
+module Hakyll.Web.FileType
+ ( FileType (..)
+ , fileType
+ , getFileType
+ ) where
+
+import System.FilePath (takeExtension)
+import Control.Arrow ((>>^))
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler
+
+-- | Datatype to represent the different file types Hakyll can deal with by
+-- default
+--
+data FileType
+ = Html
+ | LaTeX
+ | LiterateHaskell FileType
+ | Markdown
+ | Rst
+ | PlainText
+ | Css
+ | Binary
+ deriving (Eq, Ord, Show, Read)
+
+-- | Get the file type for a certain file. The type is determined by extension.
+--
+fileType :: FilePath -> FileType
+fileType = fileType' . takeExtension
+ where
+ fileType' ".htm" = Html
+ fileType' ".html" = Html
+ fileType' ".lhs" = LiterateHaskell Markdown
+ fileType' ".markdown" = Markdown
+ fileType' ".md" = Markdown
+ fileType' ".mdn" = Markdown
+ fileType' ".mdown" = Markdown
+ fileType' ".mdwn" = Markdown
+ fileType' ".mkd" = Markdown
+ fileType' ".mkdwn" = Markdown
+ fileType' ".page" = Markdown
+ fileType' ".rst" = Rst
+ fileType' ".tex" = LaTeX
+ fileType' ".text" = PlainText
+ fileType' ".txt" = PlainText
+ fileType' ".css" = Css
+ fileType' _ = Binary -- Treat unknown files as binary
+
+-- | Get the file type for the current file
+--
+getFileType :: Compiler a FileType
+getFileType = getIdentifier >>^ fileType . toFilePath
diff --git a/src/Hakyll/Web/Page.hs b/src/Hakyll/Web/Page.hs
new file mode 100644
index 0000000..955e1a8
--- /dev/null
+++ b/src/Hakyll/Web/Page.hs
@@ -0,0 +1,124 @@
+-- | A page is a key-value mapping, representing a page on your site
+--
+-- A page is an important concept in Hakyll. It is a key-value mapping, and has
+-- one field with an arbitrary type. A 'Page' thus consists of
+--
+-- * a key-value mapping (of the type @Map String String@);
+--
+-- * a value (of the type @a@).
+--
+-- Usually, the value will be a 'String' as well, and the value will be the body
+-- of the page.
+--
+-- Pages can be constructed using Haskell, but they are usually parsed from a
+-- file. The file format for pages is pretty straightforward.
+--
+-- > This is a simple page
+-- > consisting of two lines.
+--
+-- This is a valid page with two lines. If we load this in Hakyll, there would
+-- be no metadata, and the body would be the given text. Let's look at a page
+-- with some metadata.
+--
+-- > ---
+-- > title: Alice's Adventures in Wonderland
+-- > author: Lewis Caroll
+-- > year: 1865
+-- > ---
+-- >
+-- > Chapter I
+-- > =========
+-- >
+-- > Down the Rabbit-Hole
+-- > --------------------
+-- >
+-- > Alice was beginning to get very tired of sitting by her sister on the bank,
+-- > and of having nothing to do: once or twice she had peeped into the book her
+-- > sister was reading, but it had no pictures or conversations in it, "and
+-- > what is the use of a book," thought Alice "without pictures or
+-- > conversation?"
+-- >
+-- > ...
+--
+-- As you can see, we construct a metadata header in Hakyll using @---@. Then,
+-- we simply list all @key: value@ pairs, and end with @---@ again. This page
+-- contains three metadata fields and a body. The body is given in markdown
+-- format, which can be easily rendered to HTML by Hakyll, using pandoc.
+--
+{-# LANGUAGE DeriveDataTypeable #-}
+module Hakyll.Web.Page
+ ( Page (..)
+ , fromBody
+ , fromMap
+ , toMap
+ , readPageCompiler
+ , pageCompiler
+ , addDefaultFields
+ , sortByBaseName
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow (arr, (>>^), (&&&), (>>>))
+import System.FilePath (takeBaseName, takeDirectory)
+import qualified Data.Map as M
+import Data.List (sortBy)
+import Data.Ord (comparing)
+
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler
+import Hakyll.Core.ResourceProvider
+import Hakyll.Web.Page.Internal
+import Hakyll.Web.Page.Read
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Pandoc
+import Hakyll.Web.Template
+import Hakyll.Web.Util.Url
+
+-- | Create a page from a body, without metadata
+--
+fromBody :: a -> Page a
+fromBody = Page M.empty
+
+-- | Read a page (do not render it)
+--
+readPageCompiler :: Compiler Resource (Page String)
+readPageCompiler = getResourceString >>^ readPage
+
+-- | Read a page, add default fields, substitute fields and render using pandoc
+--
+pageCompiler :: Compiler Resource (Page String)
+pageCompiler = cached "Hakyll.Web.Page.pageCompiler" $
+ readPageCompiler >>> addDefaultFields >>> arr applySelf >>> pageRenderPandoc
+
+-- | Add a number of default metadata fields to a page. These fields include:
+--
+-- * @$url@
+--
+-- * @$category@
+--
+-- * @$title@
+--
+-- * @$path@
+--
+addDefaultFields :: Compiler (Page a) (Page a)
+addDefaultFields = (getRoute &&& id >>^ uncurry addRoute)
+ >>> (getIdentifier &&& id >>^ uncurry addIdentifier)
+ where
+ -- Add root and url, based on route
+ addRoute Nothing = id
+ addRoute (Just r) = setField "url" (toUrl r)
+
+ -- Add title and category, based on identifier
+ addIdentifier i = setField "title" (takeBaseName p)
+ . setField "category" (takeBaseName $ takeDirectory p)
+ . setField "path" p
+ where
+ p = toFilePath i
+
+-- | Sort posts based on the basename of the post. This is equivalent to a
+-- chronologival sort, because of the @year-month-day-title.extension@ naming
+-- convention in Hakyll.
+--
+sortByBaseName :: [Page a] -> [Page a]
+sortByBaseName = sortBy $ comparing $ takeBaseName . getField "path"
diff --git a/src/Hakyll/Web/Page/Internal.hs b/src/Hakyll/Web/Page/Internal.hs
new file mode 100644
index 0000000..55067ed
--- /dev/null
+++ b/src/Hakyll/Web/Page/Internal.hs
@@ -0,0 +1,50 @@
+-- | Internal representation of the page datatype
+--
+{-# LANGUAGE DeriveDataTypeable #-}
+module Hakyll.Web.Page.Internal
+ ( Page (..)
+ , fromMap
+ , toMap
+ ) where
+
+import Control.Applicative ((<$>), (<*>))
+import Data.Monoid (Monoid, mempty, mappend)
+
+import Data.Map (Map)
+import Data.Binary (Binary, get, put)
+import Data.Typeable (Typeable)
+import qualified Data.Map as M
+
+import Hakyll.Core.Writable
+
+-- | Type used to represent pages
+--
+data Page a = Page
+ { pageMetadata :: Map String String
+ , pageBody :: a
+ } deriving (Eq, Show, Typeable)
+
+instance Monoid a => Monoid (Page a) where
+ mempty = Page M.empty mempty
+ mappend (Page m1 b1) (Page m2 b2) =
+ Page (M.union m1 m2) (mappend b1 b2)
+
+instance Functor Page where
+ fmap f (Page m b) = Page m (f b)
+
+instance Binary a => Binary (Page a) where
+ put (Page m b) = put m >> put b
+ get = Page <$> get <*> get
+
+instance Writable a => Writable (Page a) where
+ write p (Page _ b) = write p b
+
+-- | Create a metadata page, without a body
+--
+fromMap :: Monoid a => Map String String -> Page a
+fromMap m = Page m mempty
+
+-- | Convert a page to a map. The body will be placed in the @body@ key.
+--
+toMap :: Page String -> Map String String
+toMap (Page m b) = M.insert "body" b m
diff --git a/src/Hakyll/Web/Page/Metadata.hs b/src/Hakyll/Web/Page/Metadata.hs
new file mode 100644
index 0000000..72742e6
--- /dev/null
+++ b/src/Hakyll/Web/Page/Metadata.hs
@@ -0,0 +1,131 @@
+-- | Provides various functions to manipulate the metadata fields of a page
+--
+module Hakyll.Web.Page.Metadata
+ ( getField
+ , getFieldMaybe
+ , setField
+ , setFieldA
+ , renderField
+ , changeField
+ , copyField
+ , renderDateField
+ , renderDateFieldWith
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow (Arrow, (>>>), (***), arr)
+import Data.List (intercalate)
+import Data.Maybe (fromMaybe)
+import Data.Time.Clock (UTCTime)
+import Data.Time.Format (parseTime, formatTime)
+import qualified Data.Map as M
+import System.FilePath (takeFileName)
+import System.Locale (TimeLocale, defaultTimeLocale)
+
+import Hakyll.Web.Page.Internal
+import Hakyll.Core.Util.String
+
+-- | Get a metadata field. If the field does not exist, the empty string is
+-- returned.
+--
+getField :: String -- ^ Key
+ -> Page a -- ^ Page
+ -> String -- ^ Value
+getField key = fromMaybe "" . getFieldMaybe key
+
+-- | Get a field in a 'Maybe' wrapper
+--
+getFieldMaybe :: String -- ^ Key
+ -> Page a -- ^ Page
+ -> Maybe String -- ^ Value, if found
+getFieldMaybe key = M.lookup key . pageMetadata
+
+-- | Add a metadata field. If the field already exists, it is not overwritten.
+--
+setField :: String -- ^ Key
+ -> String -- ^ Value
+ -> Page a -- ^ Page to add it to
+ -> Page a -- ^ Resulting page
+setField k v (Page m b) = Page (M.insertWith (flip const) k v m) b
+
+-- | Arrow-based variant of 'setField'. Because of it's type, this function is
+-- very usable together with the different 'require' functions.
+--
+setFieldA :: Arrow a
+ => String -- ^ Key
+ -> a x String -- ^ Value arrow
+ -> a (Page b, x) (Page b) -- ^ Resulting arrow
+setFieldA k v = id *** v >>> arr (uncurry $ flip $ setField k)
+
+-- | Do something with a metadata value, but keep the old value as well. If the
+-- key given is not present in the metadata, nothing will happen. If the source
+-- and destination keys are the same, the value will be changed (but you should
+-- use 'changeField' for this purpose).
+--
+renderField :: String -- ^ Key of which the value should be copied
+ -> String -- ^ Key the value should be copied to
+ -> (String -> String) -- ^ Function to apply on the value
+ -> Page a -- ^ Page on which this should be applied
+ -> Page a -- ^ Resulting page
+renderField src dst f page = case M.lookup src (pageMetadata page) of
+ Nothing -> page
+ Just value -> setField dst (f value) page
+
+-- | Change a metadata value.
+--
+-- > import Data.Char (toUpper)
+-- > changeField "title" (map toUpper)
+--
+-- Will put the title in UPPERCASE.
+--
+changeField :: String -- ^ Key to change.
+ -> (String -> String) -- ^ Function to apply on the value.
+ -> Page a -- ^ Page to change
+ -> Page a -- ^ Resulting page
+changeField key = renderField key key
+
+-- | Make a copy of a metadata field (put the value belonging to a certain key
+-- under some other key as well)
+--
+copyField :: String -- ^ Key to copy
+ -> String -- ^ Destination to copy to
+ -> Page a -- ^ Page on which this should be applied
+ -> Page a -- ^ Resulting page
+copyField src dst = renderField src dst id
+
+-- | When the metadata has a field called @path@ in a
+-- @folder/yyyy-mm-dd-title.extension@ format (the convention for pages),
+-- this function can render the date.
+--
+-- > renderDate "date" "%B %e, %Y" "Date unknown"
+--
+-- Will render something like @January 32, 2010@.
+--
+renderDateField :: String -- ^ Key in which the rendered date should be placed
+ -> String -- ^ Format to use on the date
+ -> String -- ^ Default value, in case the date cannot be parsed
+ -> Page a -- ^ Page on which this should be applied
+ -> Page a -- ^ Resulting page
+renderDateField = renderDateFieldWith defaultTimeLocale
+
+-- | This is an extended version of 'renderDateField' that allows you to
+-- specify a time locale that is used for outputting the date. For more
+-- details, see 'renderDateField'.
+--
+renderDateFieldWith :: TimeLocale -- ^ Output time locale
+ -> String -- ^ Destination key
+ -> String -- ^ Format to use on the date
+ -> String -- ^ Default value
+ -> Page a -- ^ Target page
+ -> Page a -- ^ Resulting page
+renderDateFieldWith locale key format defaultValue =
+ renderField "path" key renderDate'
+ where
+ renderDate' filePath = fromMaybe defaultValue $ do
+ let dateString = intercalate "-" $ take 3
+ $ splitAll "-" $ takeFileName filePath
+ time <- parseTime defaultTimeLocale
+ "%Y-%m-%d"
+ dateString :: Maybe UTCTime
+ return $ formatTime locale format time
diff --git a/src/Hakyll/Web/Page/Read.hs b/src/Hakyll/Web/Page/Read.hs
new file mode 100644
index 0000000..cf39ddd
--- /dev/null
+++ b/src/Hakyll/Web/Page/Read.hs
@@ -0,0 +1,60 @@
+-- | Module providing a function to parse a page from a file
+--
+module Hakyll.Web.Page.Read
+ ( readPage
+ ) where
+
+import Control.Applicative ((<$>), (<*>))
+import Control.Arrow (second, (***))
+import Control.Monad.State (State, get, put, evalState)
+import Data.List (isPrefixOf)
+import Data.Map (Map)
+import qualified Data.Map as M
+
+import Hakyll.Web.Page.Internal
+import Hakyll.Core.Util.String
+
+-- | We're using a simple state monad as parser
+--
+type LineParser = State [String]
+
+-- | Read the metadata section from a page
+--
+parseMetadata :: LineParser (Map String String)
+parseMetadata = get >>= \content -> case content of
+ -- No lines means no metadata
+ [] -> return M.empty
+ -- Check if the file begins with a delimiter
+ (l : ls) -> if not (isPossibleDelimiter l)
+ then -- No delimiter means no metadata
+ return M.empty
+ else do -- Break the metadata section
+ let (metadata, rest) = second (drop 1) $ break (== l) ls
+ -- Put the rest back
+ put rest
+ -- Parse the metadata
+ return $ M.fromList $ map parseMetadata' metadata
+ where
+ -- Check if a line can be a delimiter
+ isPossibleDelimiter = isPrefixOf "---"
+
+ -- Parse a "key: value" string to a (key, value) tupple
+ parseMetadata' = (trim *** trim . drop 1) . break (== ':')
+
+-- | Read the body section of a page
+--
+parseBody :: LineParser String
+parseBody = do
+ body <- get
+ put []
+ return $ unlines body
+
+-- | Read an entire page
+--
+parsePage :: LineParser (Page String)
+parsePage = Page <$> parseMetadata <*> parseBody
+
+-- | Read a page from a string
+--
+readPage :: String -> Page String
+readPage = evalState parsePage . lines
diff --git a/src/Hakyll/Web/Pandoc.hs b/src/Hakyll/Web/Pandoc.hs
new file mode 100644
index 0000000..f225997
--- /dev/null
+++ b/src/Hakyll/Web/Pandoc.hs
@@ -0,0 +1,110 @@
+-- | Module exporting pandoc bindings
+--
+module Hakyll.Web.Pandoc
+ ( -- * The basic building blocks
+ readPandoc
+ , readPandocWith
+ , writePandoc
+ , writePandocWith
+
+ -- * Functions working on pages/compilers
+ , pageReadPandoc
+ , pageReadPandocWith
+ , pageRenderPandoc
+ , pageRenderPandocWith
+
+ -- * Default options
+ , defaultHakyllParserState
+ , defaultHakyllWriterOptions
+ ) where
+
+import Prelude hiding (id)
+import Control.Applicative ((<$>))
+import Control.Arrow ((>>^), (&&&))
+import Control.Category (id)
+
+import Text.Pandoc
+
+import Hakyll.Core.Compiler
+import Hakyll.Web.FileType
+import Hakyll.Web.Page.Internal
+
+-- | Read a string using pandoc, with the default options
+--
+readPandoc :: FileType -- ^ File type, determines how parsing happens
+ -> String -- ^ String to read
+ -> Pandoc -- ^ Resulting document
+readPandoc = readPandocWith defaultHakyllParserState
+
+-- | Read a string using pandoc, with the supplied options
+--
+readPandocWith :: ParserState -- ^ Parser options
+ -> FileType -- ^ File type, determines how parsing happens
+ -> String -- ^ String to read
+ -> Pandoc -- ^ Resulting document
+readPandocWith state fileType' = case fileType' of
+ Html -> readHtml state
+ LaTeX -> readLaTeX state
+ LiterateHaskell t -> readPandocWith state {stateLiterateHaskell = True} t
+ Markdown -> readMarkdown state
+ Rst -> readRST state
+ t -> error $
+ "Hakyll.Web.readPandocWith: I don't know how to read " ++ show t
+
+-- | Write a document (as HTML) using pandoc, with the default options
+--
+writePandoc :: Pandoc -- ^ Document to write
+ -> String -- ^ Resulting HTML
+writePandoc = writePandocWith defaultHakyllWriterOptions
+
+-- | Write a document (as HTML) using pandoc, with the supplied options
+--
+writePandocWith :: WriterOptions -- ^ Writer options for pandoc
+ -> Pandoc -- ^ Document to write
+ -> String -- ^ Resulting HTML
+writePandocWith = writeHtmlString
+
+-- | Read the resource using pandoc
+--
+pageReadPandoc :: Compiler (Page String) (Page Pandoc)
+pageReadPandoc = pageReadPandocWith defaultHakyllParserState
+
+-- | Read the resource using pandoc
+--
+pageReadPandocWith :: ParserState -> Compiler (Page String) (Page Pandoc)
+pageReadPandocWith state =
+ id &&& getFileType >>^ pageReadPandocWith'
+ where
+ pageReadPandocWith' (p, t) = readPandocWith state t <$> p
+
+-- | Render the resource using pandoc
+--
+pageRenderPandoc :: Compiler (Page String) (Page String)
+pageRenderPandoc =
+ pageRenderPandocWith defaultHakyllParserState defaultHakyllWriterOptions
+
+-- | Render the resource using pandoc
+--
+pageRenderPandocWith :: ParserState
+ -> WriterOptions
+ -> Compiler (Page String) (Page String)
+pageRenderPandocWith state options =
+ pageReadPandocWith state >>^ fmap (writePandocWith options)
+
+-- | The default reader options for pandoc parsing in hakyll
+--
+defaultHakyllParserState :: ParserState
+defaultHakyllParserState = defaultParserState
+ { -- The following option causes pandoc to read smart typography, a nice
+ -- and free bonus.
+ stateSmart = True
+ }
+
+-- | The default writer options for pandoc rendering in hakyll
+--
+defaultHakyllWriterOptions :: WriterOptions
+defaultHakyllWriterOptions = defaultWriterOptions
+ { -- This option causes literate haskell to be written using '>' marks in
+ -- html, which I think is a good default.
+ writerLiterateHaskell = True
+ }
diff --git a/src/Hakyll/Web/Preview/Server.hs b/src/Hakyll/Web/Preview/Server.hs
new file mode 100644
index 0000000..c550b69
--- /dev/null
+++ b/src/Hakyll/Web/Preview/Server.hs
@@ -0,0 +1,72 @@
+-- | Implements a basic static file server for previewing options
+--
+{-# LANGUAGE OverloadedStrings #-}
+module Hakyll.Web.Preview.Server
+ ( staticServer
+ ) where
+
+import Control.Monad.Trans (liftIO)
+import Control.Applicative ((<$>))
+import Codec.Binary.UTF8.String
+import System.FilePath ((</>))
+import System.Directory (doesFileExist)
+
+import qualified Data.ByteString as SB
+import Snap.Util.FileServe (serveFile)
+import Snap.Types (Snap, rqURI, getRequest, writeBS)
+import Snap.Http.Server ( httpServe, setAccessLog, setErrorLog, addListen
+ , ConfigListen (..), emptyConfig
+ )
+
+import Hakyll.Core.Util.String (replaceAll)
+
+-- | The first file in the list that actually exists is returned
+--
+findFile :: [FilePath] -> IO (Maybe FilePath)
+findFile [] = return Nothing
+findFile (x : xs) = do
+ exists <- doesFileExist x
+ if exists then return (Just x) else findFile xs
+
+-- | Serve a given directory
+--
+static :: FilePath -- ^ Directory to serve
+ -> (FilePath -> IO ()) -- ^ Pre-serve hook
+ -> Snap ()
+static directory preServe = do
+ -- Obtain the path
+ uri <- rqURI <$> getRequest
+ let filePath = replaceAll "\\?$" (const "") -- Remove trailing ?
+ $ replaceAll "#[^#]*$" (const "") -- Remove #section
+ $ replaceAll "^/" (const "") -- Remove leading /
+ $ decode $ SB.unpack uri
+
+ -- Try to find the requested file
+ r <- liftIO $ findFile $ map (directory </>) $
+ [ filePath
+ , filePath </> "index.htm"
+ , filePath </> "index.html"
+ ]
+
+ case r of
+ -- Not found, error
+ Nothing -> writeBS "Not found"
+ -- Found, serve
+ Just f -> do
+ liftIO $ preServe f
+ serveFile f
+
+-- | Main method, runs a static server in the given directory
+--
+staticServer :: FilePath -- ^ Directory to serve
+ -> (FilePath -> IO ()) -- ^ Pre-serve hook
+ -> Int -- ^ Port to listen on
+ -> IO () -- ^ Blocks forever
+staticServer directory preServe port =
+ httpServe config $ static directory preServe
+ where
+ -- Snap server config
+ config = addListen (ListenHttp "0.0.0.0" port)
+ $ setAccessLog Nothing
+ $ setErrorLog Nothing
+ $ emptyConfig
diff --git a/src/Hakyll/Web/RelativizeUrls.hs b/src/Hakyll/Web/RelativizeUrls.hs
new file mode 100644
index 0000000..2de4a0e
--- /dev/null
+++ b/src/Hakyll/Web/RelativizeUrls.hs
@@ -0,0 +1,62 @@
+-- | This module exposes a function which can relativize URL's on a webpage.
+--
+-- This means that one can deploy the resulting site on
+-- @http:\/\/example.com\/@, but also on @http:\/\/example.com\/some-folder\/@
+-- without having to change anything (simply copy over the files).
+--
+-- To use it, you should use absolute URL's from the site root everywhere. For
+-- example, use
+--
+-- > <img src="/images/lolcat.png" alt="Funny zomgroflcopter" />
+--
+-- in a blogpost. When running this through the relativize URL's module, this
+-- will result in (suppose your blogpost is located at @\/posts\/foo.html@:
+--
+-- > <img src="../images/lolcat.png" alt="Funny zomgroflcopter" />
+--
+module Hakyll.Web.RelativizeUrls
+ ( relativizeUrlsCompiler
+ , relativizeUrls
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Arrow ((&&&), (>>^))
+import Data.List (isPrefixOf)
+import qualified Data.Set as S
+
+import Text.HTML.TagSoup
+
+import Hakyll.Core.Compiler
+import Hakyll.Web.Page
+import Hakyll.Web.Util.Url
+
+-- | Compiler form of 'compressCss' which automatically picks the right root
+-- path
+--
+relativizeUrlsCompiler :: Compiler (Page String) (Page String)
+relativizeUrlsCompiler = getRoute &&& id >>^ uncurry relativize
+ where
+ relativize Nothing = id
+ relativize (Just r) = fmap (relativizeUrls $ toSiteRoot r)
+
+-- | Relativize URL's in HTML
+--
+relativizeUrls :: String -- ^ Path to the site root
+ -> String -- ^ HTML to relativize
+ -> String -- ^ Resulting HTML
+relativizeUrls root = renderTags . map relativizeUrls' . parseTags
+ where
+ relativizeUrls' (TagOpen s a) = TagOpen s $ map (relativizeUrlsAttrs root) a
+ relativizeUrls' x = x
+
+-- | Relativize URL's in attributes
+--
+relativizeUrlsAttrs :: String -- ^ Path to the site root
+ -> Attribute String -- ^ Attribute to relativize
+ -> Attribute String -- ^ Resulting attribute
+relativizeUrlsAttrs root (key, value)
+ | key `S.member` urls && "/" `isPrefixOf` value = (key, root ++ value)
+ | otherwise = (key, value)
+ where
+ urls = S.fromList ["src", "href"]
diff --git a/src/Hakyll/Web/Tags.hs b/src/Hakyll/Web/Tags.hs
new file mode 100644
index 0000000..211a06b
--- /dev/null
+++ b/src/Hakyll/Web/Tags.hs
@@ -0,0 +1,180 @@
+-- | Module containing some specialized functions to deal with tags.
+-- This Module follows certain conventions. My advice is to stick with them if
+-- possible.
+--
+-- More concrete: all functions in this module assume that the tags are
+-- located in the @tags@ field, and separated by commas. An example file
+-- @foo.markdown@ could look like:
+--
+-- > ---
+-- > author: Philip K. Dick
+-- > title: Do androids dream of electric sheep?
+-- > tags: future, science fiction, humanoid
+-- > ---
+-- > The novel is set in a post-apocalyptic near future, where the Earth and
+-- > its populations have been damaged greatly by Nuclear...
+--
+-- All the following functions would work with such a format. In addition to
+-- tags, Hakyll also supports categories. The convention when using categories
+-- is to place pages in subdirectories.
+--
+-- An example, the page @posts\/coding\/2010-01-28-hakyll-categories.markdown@
+-- Tags or categories are read using the @readTags@ and @readCategory@
+-- functions. This module only provides functions to work with tags:
+-- categories are represented as tags. This is perfectly possible: categories
+-- only have an additional restriction that a page can only have one category
+-- (instead of multiple tags).
+--
+{-# LANGUAGE DeriveDataTypeable, OverloadedStrings, Arrows #-}
+module Hakyll.Web.Tags
+ ( Tags (..)
+ , readTagsWith
+ , readTags
+ , readCategory
+ , renderTagCloud
+ , renderTagsField
+ , renderCategoryField
+ ) where
+
+import Prelude hiding (id)
+import Control.Category (id)
+import Control.Applicative ((<$>))
+import Data.Map (Map)
+import qualified Data.Map as M
+import Data.List (intersperse)
+import Control.Arrow (arr, (&&&), (>>>), (***), (<<^), returnA)
+import Data.Maybe (catMaybes, fromMaybe)
+import Data.Monoid (mconcat)
+
+import Data.Typeable (Typeable)
+import Data.Binary (Binary, get, put)
+import Text.Blaze.Renderer.String (renderHtml)
+import Text.Blaze ((!), toHtml, toValue)
+import qualified Text.Blaze.Html5 as H
+import qualified Text.Blaze.Html5.Attributes as A
+
+import Hakyll.Web.Page
+import Hakyll.Web.Page.Metadata
+import Hakyll.Web.Util.Url
+import Hakyll.Core.Writable
+import Hakyll.Core.Identifier
+import Hakyll.Core.Compiler
+import Hakyll.Core.Util.String
+
+-- | Data about tags
+--
+data Tags a = Tags
+ { tagsMap :: Map String [Page a]
+ } deriving (Show, Typeable)
+
+instance Binary a => Binary (Tags a) where
+ get = Tags <$> get
+ put (Tags m) = put m
+
+instance Writable (Tags a) where
+ write _ _ = return ()
+
+-- | Obtain tags from a page
+--
+getTags :: Page a -> [String]
+getTags = map trim . splitAll "," . getField "tags"
+
+-- | Obtain categories from a page
+--
+getCategory :: Page a -> [String]
+getCategory = return . getField "category"
+
+-- | Higher-level function to read tags
+--
+readTagsWith :: (Page a -> [String]) -- ^ Function extracting tags from a page
+ -> [Page a] -- ^ Pages
+ -> Tags a -- ^ Resulting tags
+readTagsWith f pages = Tags
+ { tagsMap = foldl (M.unionWith (++)) M.empty (map readTagsWith' pages)
+ }
+ where
+ -- Create a tag map for one page
+ readTagsWith' page =
+ let tags = f page
+ in M.fromList $ zip tags $ repeat [page]
+
+-- | Read a tagmap using the @tags@ metadata field
+--
+readTags :: [Page a] -> Tags a
+readTags = readTagsWith getTags
+
+-- | Read a tagmap using the @category@ metadata field
+--
+readCategory :: [Page a] -> Tags a
+readCategory = readTagsWith getCategory
+
+-- | Render a tag cloud in HTML
+--
+renderTagCloud :: (String -> Identifier) -- ^ Produce a link for a tag
+ -> Double -- ^ Smallest font size, in percent
+ -> Double -- ^ Biggest font size, in percent
+ -> Compiler (Tags a) String -- ^ Tag cloud renderer
+renderTagCloud makeUrl minSize maxSize = proc (Tags tags) -> do
+ -- In tags' we create a list: [((tag, route), count)]
+ tags' <- mapCompiler ((id &&& (getRouteFor <<^ makeUrl)) *** arr length)
+ -< M.toList tags
+
+ let -- Absolute frequencies of the pages
+ freqs = map snd tags'
+
+ -- Find out the relative count of a tag: on a scale from 0 to 1
+ relative count = (fromIntegral count - min') / (1 + max' - min')
+
+ -- Show the relative size of one 'count' in percent
+ size count =
+ let size' = floor $ minSize + relative count * (maxSize - minSize)
+ in show (size' :: Int) ++ "%"
+
+ -- The minimum and maximum count found, as doubles
+ (min', max')
+ | null freqs = (0, 1)
+ | otherwise = (minimum &&& maximum) $ map fromIntegral freqs
+
+ -- Create a link for one item
+ makeLink ((tag, url), count) =
+ H.a ! A.style (toValue $ "font-size: " ++ size count)
+ ! A.href (toValue $ fromMaybe "/" url)
+ $ toHtml tag
+
+ -- Render and return the HTML
+ returnA -< renderHtml $ mconcat $ intersperse " " $ map makeLink tags'
+
+-- | Render tags with links
+--
+renderTagsFieldWith :: (Page a -> [String]) -- ^ Function to get the tags
+ -> String -- ^ Destination key
+ -> (String -> Identifier) -- ^ Create a link for a tag
+ -> Compiler (Page a) (Page a) -- ^ Resulting compiler
+renderTagsFieldWith tags destination makeUrl =
+ id &&& arr tags >>> setFieldA destination renderTags
+ where
+ -- Compiler creating a comma-separated HTML string for a list of tags
+ renderTags :: Compiler [String] String
+ renderTags = arr (map $ id &&& makeUrl)
+ >>> mapCompiler (id *** getRouteFor)
+ >>> arr (map $ uncurry renderLink)
+ >>> arr (renderHtml . mconcat . intersperse ", " . catMaybes)
+
+ -- Render one tag link
+ renderLink _ Nothing = Nothing
+ renderLink tag (Just filePath) = Just $
+ H.a ! A.href (toValue $ toUrl filePath) $ toHtml tag
+
+-- | Render tags with links
+--
+renderTagsField :: String -- ^ Destination key
+ -> (String -> Identifier) -- ^ Create a link for a tag
+ -> Compiler (Page a) (Page a) -- ^ Resulting compiler
+renderTagsField = renderTagsFieldWith getTags
+
+-- | Render the category in a link
+--
+renderCategoryField :: String -- ^ Destination key
+ -> (String -> Identifier) -- ^ Create a category link
+ -> Compiler (Page a) (Page a) -- ^ Resulting compiler
+renderCategoryField = renderTagsFieldWith getCategory
diff --git a/src/Hakyll/Web/Template.hs b/src/Hakyll/Web/Template.hs
new file mode 100644
index 0000000..9c49278
--- /dev/null
+++ b/src/Hakyll/Web/Template.hs
@@ -0,0 +1,109 @@
+-- | This module provides means for reading and applying 'Template's.
+--
+-- Templates are tools to convert data (pages) into a string. They are
+-- perfectly suited for laying out your site.
+--
+-- Let's look at an example template:
+--
+-- > <html>
+-- > <head>
+-- > <title>My crazy homepage - $title$</title>
+-- > </head>
+-- > <body>
+-- > <div id="header">
+-- > <h1>My crazy homepage - $title$</h1>
+-- > </div>
+-- > <div id="content">
+-- > $body$
+-- > </div>
+-- > <div id="footer">
+-- > By reading this you agree that I now own your soul
+-- > </div>
+-- > </body>
+-- > </html>
+--
+-- We can use this template to render a 'Page' which has a body and a @$title$@
+-- metadata field.
+--
+-- As you can see, the format is very simple -- @$key$@ is used to render the
+-- @$key$@ field from the page, everything else is literally copied. If you want
+-- to literally insert @\"$key$\"@ into your page (for example, when you're
+-- writing a Hakyll tutorial) you can use
+--
+-- > <p>
+-- > A literal $$key$$.
+-- > </p>
+--
+-- Because of it's simplicity, these templates can be used for more than HTML:
+-- you could make, for example, CSS or JS templates as well.
+--
+-- In addition to the native format, Hakyll also supports hamlet templates. For
+-- more information on hamlet templates, please refer to:
+-- <http://hackage.haskell.org/package/hamlet>.
+--
+module Hakyll.Web.Template
+ ( Template
+ , applyTemplate
+ , applySelf
+ , templateCompiler
+ , templateCompilerWith
+ , applyTemplateCompiler
+ ) where
+
+import Control.Arrow
+import Data.Maybe (fromMaybe)
+import System.FilePath (takeExtension)
+import qualified Data.Map as M
+
+import Text.Hamlet (HamletSettings, defaultHamletSettings)
+
+import Hakyll.Core.Compiler
+import Hakyll.Core.Identifier
+import Hakyll.Core.ResourceProvider
+import Hakyll.Web.Template.Internal
+import Hakyll.Web.Template.Read
+import Hakyll.Web.Page.Internal
+
+-- | Substitutes @$identifiers@ in the given @Template@ by values from the given
+-- "Page". When a key is not found, it is left as it is. You can specify
+-- the characters used to replace escaped dollars (@$$@) here.
+--
+applyTemplate :: Template -> Page String -> Page String
+applyTemplate template page =
+ fmap (const $ substitute =<< unTemplate template) page
+ where
+ map' = toMap page
+ substitute (Chunk chunk) = chunk
+ substitute (Key key) = fromMaybe ("$" ++ key ++ "$") $ M.lookup key map'
+ substitute (Escaped) = "$"
+
+-- | Apply a page as it's own template. This is often very useful to fill in
+-- certain keys like @$root@ and @$url@.
+--
+applySelf :: Page String -> Page String
+applySelf page = applyTemplate (readTemplate $ pageBody page) page
+
+-- | Read a template. If the extension of the file we're compiling is
+-- @.hml@ or @.hamlet@, it will be considered as a Hamlet template, and parsed
+-- as such.
+--
+templateCompiler :: Compiler Resource Template
+templateCompiler = templateCompilerWith defaultHamletSettings
+
+-- | Version of 'templateCompiler' that enables custom settings.
+--
+templateCompilerWith :: HamletSettings -> Compiler Resource Template
+templateCompilerWith settings =
+ cached "Hakyll.Web.Template.templateCompilerWith" $
+ getIdentifier &&& getResourceString >>^ uncurry read'
+ where
+ read' identifier string =
+ if takeExtension (toFilePath identifier) `elem` [".hml", ".hamlet"]
+ -- Hamlet template
+ then readHamletTemplateWith settings string
+ -- Hakyll template
+ else readTemplate string
+
+applyTemplateCompiler :: Identifier -- ^ Template
+ -> Compiler (Page String) (Page String) -- ^ Compiler
+applyTemplateCompiler identifier = require identifier (flip applyTemplate)
diff --git a/src/Hakyll/Web/Template/Internal.hs b/src/Hakyll/Web/Template/Internal.hs
new file mode 100644
index 0000000..d0e0859
--- /dev/null
+++ b/src/Hakyll/Web/Template/Internal.hs
@@ -0,0 +1,45 @@
+-- | Module containing the template data structure
+--
+{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
+module Hakyll.Web.Template.Internal
+ ( Template (..)
+ , TemplateElement (..)
+ ) where
+
+import Control.Applicative ((<$>))
+
+import Data.Binary (Binary, get, getWord8, put, putWord8)
+import Data.Typeable (Typeable)
+
+import Hakyll.Core.Writable
+
+-- | Datatype used for template substitutions.
+--
+newtype Template = Template
+ { unTemplate :: [TemplateElement]
+ }
+ deriving (Show, Eq, Binary, Typeable)
+
+instance Writable Template where
+ -- Writing a template is impossible
+ write _ _ = return ()
+
+-- | Elements of a template.
+--
+data TemplateElement
+ = Chunk String
+ | Key String
+ | Escaped
+ deriving (Show, Eq, Typeable)
+
+instance Binary TemplateElement where
+ put (Chunk string) = putWord8 0 >> put string
+ put (Key key) = putWord8 1 >> put key
+ put (Escaped) = putWord8 2
+
+ get = getWord8 >>= \tag -> case tag of
+ 0 -> Chunk <$> get
+ 1 -> Key <$> get
+ 2 -> return Escaped
+ _ -> error $ "Hakyll.Web.Template.Internal: "
+ ++ "Error reading cached template"
diff --git a/src/Hakyll/Web/Template/Read.hs b/src/Hakyll/Web/Template/Read.hs
new file mode 100644
index 0000000..421b7e9
--- /dev/null
+++ b/src/Hakyll/Web/Template/Read.hs
@@ -0,0 +1,10 @@
+-- | Re-exports all different template reading modules
+--
+module Hakyll.Web.Template.Read
+ ( readTemplate
+ , readHamletTemplate
+ , readHamletTemplateWith
+ ) where
+
+import Hakyll.Web.Template.Read.Hakyll
+import Hakyll.Web.Template.Read.Hamlet
diff --git a/src/Hakyll/Web/Template/Read/Hakyll.hs b/src/Hakyll/Web/Template/Read/Hakyll.hs
new file mode 100644
index 0000000..fecf772
--- /dev/null
+++ b/src/Hakyll/Web/Template/Read/Hakyll.hs
@@ -0,0 +1,35 @@
+-- | Read templates in Hakyll's native format
+--
+module Hakyll.Web.Template.Read.Hakyll
+ ( readTemplate
+ ) where
+
+import Data.List (isPrefixOf)
+import Data.Char (isAlphaNum)
+
+import Hakyll.Web.Template.Internal
+
+-- | Construct a @Template@ from a string.
+--
+readTemplate :: String -> Template
+readTemplate = Template . readTemplate'
+ where
+ readTemplate' [] = []
+ readTemplate' string
+ | "$$" `isPrefixOf` string =
+ Escaped : readTemplate' (drop 2 string)
+ | "$" `isPrefixOf` string =
+ case readKey (drop 1 string) of
+ Just (key, rest) -> Key key : readTemplate' rest
+ Nothing -> Chunk "$" : readTemplate' (drop 1 string)
+ | otherwise =
+ let (chunk, rest) = break (== '$') string
+ in Chunk chunk : readTemplate' rest
+
+ -- Parse an key into (key, rest) if it's valid, and return
+ -- Nothing otherwise
+ readKey string =
+ let (key, rest) = span isAlphaNum string
+ in if not (null key) && "$" `isPrefixOf` rest
+ then Just (key, drop 1 rest)
+ else Nothing
diff --git a/src/Hakyll/Web/Template/Read/Hamlet.hs b/src/Hakyll/Web/Template/Read/Hamlet.hs
new file mode 100644
index 0000000..7b496de
--- /dev/null
+++ b/src/Hakyll/Web/Template/Read/Hamlet.hs
@@ -0,0 +1,46 @@
+-- | Read templates in the hamlet format
+--
+{-# LANGUAGE MultiParamTypeClasses #-}
+module Hakyll.Web.Template.Read.Hamlet
+ ( readHamletTemplate
+ , readHamletTemplateWith
+ ) where
+
+import Text.Hamlet (HamletSettings (..), defaultHamletSettings)
+import Text.Hamlet.RT
+
+import Hakyll.Web.Template.Internal
+
+-- | Read a hamlet template using the default settings
+--
+readHamletTemplate :: String -> Template
+readHamletTemplate = readHamletTemplateWith defaultHamletSettings
+
+-- | Read a hamlet template using the specified settings
+--
+readHamletTemplateWith :: HamletSettings -> String -> Template
+readHamletTemplateWith settings string =
+ let result = parseHamletRT settings string
+ in case result of
+ Just hamlet -> fromHamletRT hamlet
+ Nothing -> error
+ "Hakyll.Web.Template.Read.Hamlet.readHamletTemplateWith: \
+ \Could not parse Hamlet file"
+
+-- | Convert a 'HamletRT' to a 'Template'
+--
+fromHamletRT :: HamletRT -- ^ Hamlet runtime template
+ -> Template -- ^ Hakyll template
+fromHamletRT (HamletRT sd) = Template $ map fromSimpleDoc sd
+ where
+ fromSimpleDoc :: SimpleDoc -> TemplateElement
+ fromSimpleDoc (SDRaw chunk) = Chunk chunk
+ fromSimpleDoc (SDVar [var]) = Key var
+ fromSimpleDoc (SDVar _) = error
+ "Hakyll.Web.Template.Read.Hamlet.fromHamletRT: \
+ \Hakyll does not support '.' in identifier names when using \
+ \hamlet templates."
+ fromSimpleDoc _ = error
+ "Hakyll.Web.Template.Read.Hamlet.fromHamletRT: \
+ \Only simple $key$ identifiers are allowed when using hamlet \
+ \templates."
diff --git a/src/Hakyll/Web/Util/Url.hs b/src/Hakyll/Web/Util/Url.hs
new file mode 100644
index 0000000..54a361e
--- /dev/null
+++ b/src/Hakyll/Web/Util/Url.hs
@@ -0,0 +1,30 @@
+-- | Miscellaneous URL manipulation functions.
+--
+module Hakyll.Web.Util.Url
+ ( toUrl
+ , toSiteRoot
+ ) where
+
+import System.FilePath (splitPath, takeDirectory, joinPath)
+
+-- | Convert a filepath to an URL starting from the site root
+--
+-- Example:
+--
+-- > toUrl "foo/bar.html"
+--
+-- Result:
+--
+-- > "/foo/bar.html"
+--
+toUrl :: FilePath -> String
+toUrl = ('/' :)
+
+-- | Get the relative url to the site root, for a given (absolute) url
+--
+toSiteRoot :: String -> String
+toSiteRoot = emptyException . joinPath . map parent . splitPath . takeDirectory
+ where
+ parent = const ".."
+ emptyException [] = "."
+ emptyException x = x
diff --git a/src/Network/Hakyll/SimpleServer.hs b/src/Network/Hakyll/SimpleServer.hs
deleted file mode 100644
index 4eef689..0000000
--- a/src/Network/Hakyll/SimpleServer.hs
+++ /dev/null
@@ -1,215 +0,0 @@
--- | Module containing a small, simple http file server for testing and preview
--- purposes.
-module Network.Hakyll.SimpleServer
- ( simpleServer
- ) where
-
-import Prelude hiding (log)
-import Control.Monad (forever)
-import Control.Monad.Reader (ReaderT, runReaderT, ask, liftIO)
-import Network
-import System.IO
-import System.Directory (doesFileExist, doesDirectoryExist)
-import Control.Concurrent (forkIO)
-import Control.Concurrent.Chan (Chan, newChan, readChan, writeChan)
-import System.FilePath (takeExtension)
-import qualified Data.Map as M
-import Data.List (intercalate)
-
-import Text.Hakyll.Util
-import Text.Hakyll.Regex
-
--- | Function to log from a chan.
-log :: Chan String -> IO ()
-log logChan = forever (readChan logChan >>= hPutStrLn stderr)
-
--- | General server configuration.
-data ServerConfig = ServerConfig { documentRoot :: FilePath
- , portNumber :: PortNumber
- , logChannel :: Chan String
- }
-
--- | Custom monad stack.
-type Server = ReaderT ServerConfig IO
-
--- | Simple representation of a HTTP request.
-data Request = Request { requestMethod :: String
- , requestURI :: String
- , requestVersion :: String
- } deriving (Ord, Eq)
-
-instance Show Request where
- show request = requestMethod request ++ " "
- ++ requestURI request ++ " "
- ++ requestVersion request
-
--- | Read a HTTP request from a 'Handle'. For now, this will ignore the request
--- headers and body.
-readRequest :: Handle -> Server Request
-readRequest handle = do
- requestLine <- liftIO $ hGetLine handle
- let [method, uri, version] = map trim $ splitRegex " " requestLine
- request = Request { requestMethod = method
- , requestURI = uri
- , requestVersion = version
- }
- return request
-
--- | Simple representation of the HTTP response we send back.
-data Response = Response { responseVersion :: String
- , responseStatusCode :: Int
- , responsePhrase :: String
- , responseHeaders :: M.Map String String
- , responseBody :: String
- } deriving (Ord, Eq)
-
-instance Show Response where
- show response = responseVersion response ++ " "
- ++ show (responseStatusCode response) ++ " "
- ++ responsePhrase response
-
--- | A default response.
-defaultResponse :: Response
-defaultResponse = Response { responseVersion = "HTTP/1.1"
- , responseStatusCode = 0
- , responsePhrase = ""
- , responseHeaders = M.empty
- , responseBody = ""
- }
-
--- | Create a response for a given HTTP request.
-createResponse :: Request -> Server Response
-createResponse request
- | requestMethod request == "GET" = createGetResponse request
- | otherwise = return $ createErrorResponse 501 "Not Implemented"
-
--- | Create a simple error response.
-createErrorResponse :: Int -- ^ Error code.
- -> String -- ^ Error phrase.
- -> Response -- ^ Result.
-createErrorResponse statusCode phrase = defaultResponse
- { responseStatusCode = statusCode
- , responsePhrase = phrase
- , responseHeaders = M.singleton "Content-Type" "text/html"
- , responseBody =
- "<html> <head> <title>" ++ show statusCode ++ "</title> </head>"
- ++ "<body> <h1>" ++ show statusCode ++ "</h1>\n"
- ++ "<p>" ++ phrase ++ "</p> </body> </html>"
- }
-
--- | Create a simple get response.
-createGetResponse :: Request -> Server Response
-createGetResponse request = do
- -- Construct the complete fileName of the requested resource.
- config <- ask
- let -- Drop everything after a '?'.
- uri = takeWhile ((/=) '?') $ requestURI request
- log' = writeChan (logChannel config)
- isDirectory <- liftIO $ doesDirectoryExist $ documentRoot config ++ uri
- let fileName =
- documentRoot config ++ if isDirectory then uri ++ "/index.html"
- else uri
-
- create200 = do
- h <- openBinaryFile fileName ReadMode
- contentLength <- hFileSize h
- body <- hGetContents h
- let mimeHeader = getMIMEHeader fileName
- headers = ("Content-Length", show contentLength) : mimeHeader
- return $ defaultResponse
- { responseStatusCode = 200
- , responsePhrase = "OK"
- , responseHeaders = responseHeaders defaultResponse
- `M.union` M.fromList headers
- , responseBody = body
- }
-
- -- Called when an error occurs during the creation of a 200 response.
- create500 e = do
- log' $ "Internal Error: " ++ show e
- return $ createErrorResponse 500 "Internal Server Error"
-
- -- Send back the page if found.
- exists <- liftIO $ doesFileExist fileName
- if exists
- then liftIO $ catch create200 create500
- else do liftIO $ log' $ "Not Found: " ++ fileName
- return $ createErrorResponse 404 "Not Found"
-
--- | Get the mime header for a certain filename. This is based on the extension
--- of the given 'FilePath'.
-getMIMEHeader :: FilePath -> [(String, String)]
-getMIMEHeader fileName =
- case result of (Just x) -> [("Content-Type", x)]
- Nothing -> []
- where
- result = lookup (takeExtension fileName) [ (".css", "text/css")
- , (".gif", "image/gif")
- , (".htm", "text/html")
- , (".html", "text/html")
- , (".jpeg", "image/jpeg")
- , (".jpg", "image/jpeg")
- , (".js", "text/javascript")
- , (".png", "image/png")
- , (".txt", "text/plain")
- , (".xml", "text/xml")
- ]
-
--- | Respond to an incoming request.
-respond :: Handle -> Server ()
-respond handle = do
- -- Read the request and create a response.
- request <- readRequest handle
- response <- createResponse request
-
- -- Generate some output.
- config <- ask
- liftIO $ writeChan (logChannel config)
- $ show request ++ " => " ++ show response
-
- -- Send the response back to the handle.
- liftIO $ putResponse response
- where
- putResponse response = do hPutStr handle $ intercalate " "
- [ responseVersion response
- , show $ responseStatusCode response
- , responsePhrase response
- ]
- hPutStr handle "\r\n"
- mapM_ putHeader
- (M.toList $ responseHeaders response)
- hPutStr handle "\r\n"
- hPutStr handle $ responseBody response
- hPutStr handle "\r\n"
- hClose handle
-
- putHeader (key, value) =
- hPutStr handle $ key ++ ": " ++ value ++ "\r\n"
-
--- | Start a simple http server on the given 'PortNumber', serving the given
--- directory.
---
-simpleServer :: PortNumber -- ^ Port to listen on.
- -> FilePath -- ^ Root directory to serve.
- -> IO () -- ^ Optional pre-respond action.
- -> IO ()
-simpleServer port root preRespond = do
- -- Channel to send logs to
- logChan <- newChan
-
- let config = ServerConfig { documentRoot = root
- , portNumber = port
- , logChannel = logChan
- }
-
- -- When a client connects, respond in a separate thread.
- listen socket = do (handle, _, _) <- accept socket
- preRespond
- forkIO (runReaderT (respond handle) config)
-
- -- Handle logging in a separate thread
- _ <- forkIO (log logChan)
-
- writeChan logChan $ "Starting hakyll server on port " ++ show port ++ "..."
- socket <- listenOn (PortNumber port)
- forever (listen socket)
diff --git a/src/Text/Hakyll.hs b/src/Text/Hakyll.hs
deleted file mode 100644
index b0fe479..0000000
--- a/src/Text/Hakyll.hs
+++ /dev/null
@@ -1,185 +0,0 @@
--- | This is the main Hakyll module, exporting the important @hakyll@ function.
---
--- Most configurations would use this @hakyll@ function more or less as the
--- main function:
---
--- > main = hakyll $ do
--- > directory css "css"
--- > directory static "images"
---
-module Text.Hakyll
- ( defaultHakyllConfiguration
- , hakyll
- , hakyllWithConfiguration
- , runDefaultHakyll
-
- , module Text.Hakyll.Context
- , module Text.Hakyll.ContextManipulations
- , module Text.Hakyll.CreateContext
- , module Text.Hakyll.File
- , module Text.Hakyll.HakyllMonad
- , module Text.Hakyll.Regex
- , module Text.Hakyll.Render
- , module Text.Hakyll.HakyllAction
- , module Text.Hakyll.Paginate
- , module Text.Hakyll.Page
- , module Text.Hakyll.Pandoc
- , module Text.Hakyll.Util
- , module Text.Hakyll.Tags
- , module Text.Hakyll.Feed
- , module Text.Hakyll.Configurations.Static
- ) where
-
-import Control.Concurrent (forkIO, threadDelay)
-import Control.Monad.Reader (runReaderT, liftIO, ask)
-import Control.Monad (when)
-import Data.Monoid (mempty)
-import System.Environment (getArgs, getProgName)
-import System.Directory (doesDirectoryExist, removeDirectoryRecursive)
-import System.Time (getClockTime)
-
-import Text.Pandoc
-import Text.Hamlet (defaultHamletSettings)
-
-import Network.Hakyll.SimpleServer (simpleServer)
-import Text.Hakyll.Context
-import Text.Hakyll.ContextManipulations
-import Text.Hakyll.CreateContext
-import Text.Hakyll.File
-import Text.Hakyll.HakyllMonad
-import Text.Hakyll.Regex
-import Text.Hakyll.Render
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Paginate
-import Text.Hakyll.Page
-import Text.Hakyll.Pandoc
-import Text.Hakyll.Util
-import Text.Hakyll.Tags
-import Text.Hakyll.Feed
-import Text.Hakyll.Configurations.Static
-
--- | The default reader options for pandoc parsing.
---
-defaultPandocParserState :: ParserState
-defaultPandocParserState = defaultParserState
- { -- The following option causes pandoc to read smart typography, a nice
- -- and free bonus.
- stateSmart = True
- }
-
--- | The default writer options for pandoc rendering.
---
-defaultPandocWriterOptions :: WriterOptions
-defaultPandocWriterOptions = defaultWriterOptions
- { -- This option causes literate haskell to be written using '>' marks in
- -- html, which I think is a good default.
- writerLiterateHaskell = True
- }
-
--- | The default hakyll configuration.
---
-defaultHakyllConfiguration :: String -- ^ Absolute site URL.
- -> HakyllConfiguration -- ^ Default config.
-defaultHakyllConfiguration absoluteUrl' = HakyllConfiguration
- { absoluteUrl = absoluteUrl'
- , additionalContext = mempty
- , siteDirectory = "_site"
- , cacheDirectory = "_cache"
- , enableIndexUrl = False
- , previewMode = BuildOnRequest
- , pandocParserState = defaultPandocParserState
- , pandocWriterOptions = defaultPandocWriterOptions
- , hamletSettings = defaultHamletSettings
- }
-
--- | Main function to run Hakyll with the default configuration. The
--- absolute URL is only used in certain cases, for example RSS feeds et
--- cetera.
---
-hakyll :: String -- ^ Absolute URL of your site. Used in certain cases.
- -> Hakyll () -- ^ You code.
- -> IO ()
-hakyll absolute = hakyllWithConfiguration configuration
- where
- configuration = defaultHakyllConfiguration absolute
-
--- | Main function to run hakyll with a custom configuration.
---
-hakyllWithConfiguration :: HakyllConfiguration -> Hakyll () -> IO ()
-hakyllWithConfiguration configuration buildFunction = do
- args <- getArgs
- let f = case args of ["build"] -> buildFunction
- ["clean"] -> clean
- ["preview", p] -> preview (read p)
- ["preview"] -> preview defaultPort
- ["rebuild"] -> clean >> buildFunction
- ["server", p] -> server (read p) (return ())
- ["server"] -> server defaultPort (return ())
- _ -> help
- runReaderT f configuration
- where
- preview port = case previewMode configuration of
- BuildOnRequest -> server port buildFunction
- BuildOnInterval -> do
- let pIO = runReaderT (previewThread buildFunction) configuration
- _ <- liftIO $ forkIO pIO
- server port (return ())
-
- defaultPort = 8000
-
--- | A preview thread that periodically recompiles the site.
---
-previewThread :: Hakyll () -- ^ Build function
- -> Hakyll () -- ^ Result
-previewThread buildFunction = run =<< liftIO getClockTime
- where
- delay = 1000000
- run time = do liftIO $ threadDelay delay
- contents <- getRecursiveContents "."
- valid <- isMoreRecent time contents
- when valid buildFunction
- run =<< liftIO getClockTime
-
--- | Clean up directories.
---
-clean :: Hakyll ()
-clean = do askHakyll siteDirectory >>= remove'
- askHakyll cacheDirectory >>= remove'
- where
- remove' dir = liftIO $ do putStrLn $ "Removing " ++ dir ++ "..."
- exists <- doesDirectoryExist dir
- when exists $ removeDirectoryRecursive dir
-
--- | Show usage information.
---
-help :: Hakyll ()
-help = liftIO $ do
- name <- getProgName
- putStrLn $ "This is a Hakyll site generator program. You should always\n"
- ++ "run it from the project root directory.\n"
- ++ "\n"
- ++ "Usage:\n"
- ++ name ++ " build Generate the site.\n"
- ++ name ++ " clean Clean up and remove cache.\n"
- ++ name ++ " help Show this message.\n"
- ++ name ++ " preview [port] Run a server and autocompile.\n"
- ++ name ++ " rebuild Clean up and build again.\n"
- ++ name ++ " server [port] Run a local test server.\n"
-
--- | Start a server at the given port number.
---
-server :: Integer -- ^ Port number to serve on.
- -> Hakyll () -- ^ Pre-respond action.
- -> Hakyll ()
-server port preRespond = do
- configuration <- ask
- root <- askHakyll siteDirectory
- let preRespondIO = runReaderT preRespond configuration
- liftIO $ simpleServer (fromIntegral port) root preRespondIO
-
--- | Run a Hakyll action with default settings. This is mostly aimed at testing
--- code.
---
-runDefaultHakyll :: Hakyll a -> IO a
-runDefaultHakyll f =
- runReaderT f $ defaultHakyllConfiguration "http://example.com"
diff --git a/src/Text/Hakyll/Configurations/Static.hs b/src/Text/Hakyll/Configurations/Static.hs
deleted file mode 100644
index 5a2c1be..0000000
--- a/src/Text/Hakyll/Configurations/Static.hs
+++ /dev/null
@@ -1,59 +0,0 @@
--- | Module for a simple static configuration of a website.
---
--- The configuration works like this:
---
--- * The @templates/@ directory should contain one template.
---
--- * Renderable files in the directory tree are rendered using this template.
---
--- * The @static/@ directory is copied entirely (if it exists).
---
--- * All files in the @css/@ directory are compressed.
---
-module Text.Hakyll.Configurations.Static
- ( staticConfiguration
- ) where
-
-import Control.Applicative ((<$>))
-import Control.Monad (filterM, forM_)
-
-import Text.Hakyll.File ( getRecursiveContents, inDirectory, inHakyllDirectory
- , directory )
-import Text.Hakyll.Internal.FileType (isRenderableFile)
-import Text.Hakyll.HakyllMonad (Hakyll, logHakyll)
-import Text.Hakyll.Render (renderChain, css, static)
-import Text.Hakyll.CreateContext (createPage)
-
--- | A simple configuration for an entirely static website.
---
-staticConfiguration :: Hakyll ()
-staticConfiguration = do
- -- Find all files not in _site or _cache.
- files <- filterM isRenderableFile' =<< getRecursiveContents "."
-
- -- Find a main template to use
- mainTemplate <- take 1 <$> getRecursiveContents templateDir
- logHakyll $ case mainTemplate of [] -> "Using no template"
- (x : _) -> "Using template " ++ x
-
- -- Render all files using this template
- forM_ files $ renderChain mainTemplate . createPage
-
- -- Render a static directory
- directory static staticDir
-
- -- Render a css directory
- directory css cssDir
- where
- -- A file should have a renderable extension and not be in a hakyll
- -- directory, and not in a special directory.
- isRenderableFile' file = do
- inHakyllDirectory' <- inHakyllDirectory file
- return $ isRenderableFile file
- && not (any (inDirectory file) [templateDir, cssDir, staticDir])
- && not inHakyllDirectory'
-
- -- Directories
- templateDir = "templates"
- cssDir = "css"
- staticDir = "static"
diff --git a/src/Text/Hakyll/Context.hs b/src/Text/Hakyll/Context.hs
deleted file mode 100644
index 9045a65..0000000
--- a/src/Text/Hakyll/Context.hs
+++ /dev/null
@@ -1,16 +0,0 @@
--- | This (quite small) module exports the datatype used for contexts. A
--- @Context@ is a simple key-value mapping. You can render these @Context@s
--- with templates, and manipulate them in various ways.
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Text.Hakyll.Context
- ( Context (..)
- ) where
-
-import Data.Monoid (Monoid)
-import Data.Map (Map)
-import Data.Binary (Binary)
-
--- | Datatype used for key-value mappings.
-newtype Context = Context { -- | Extract the context.
- unContext :: Map String String
- } deriving (Show, Monoid, Binary)
diff --git a/src/Text/Hakyll/ContextManipulations.hs b/src/Text/Hakyll/ContextManipulations.hs
deleted file mode 100644
index 1c26f72..0000000
--- a/src/Text/Hakyll/ContextManipulations.hs
+++ /dev/null
@@ -1,124 +0,0 @@
--- | This module exports a number of functions that produce @HakyllAction@s to
--- manipulate @Context@s.
-module Text.Hakyll.ContextManipulations
- ( renderValue
- , changeValue
- , changeUrl
- , copyValue
- , renderDate
- , renderDateWithLocale
- , changeExtension
- , renderBody
- , takeBody
- ) where
-
-import Control.Monad (liftM)
-import Control.Arrow (arr)
-import System.Locale (TimeLocale, defaultTimeLocale)
-import System.FilePath (takeFileName, addExtension, dropExtension)
-import Data.Time.Format (parseTime, formatTime)
-import Data.Time.Clock (UTCTime)
-import Data.Maybe (fromMaybe)
-import qualified Data.Map as M
-
-import Text.Hakyll.Regex (substituteRegex)
-import Text.Hakyll.HakyllAction (HakyllAction (..))
-import Text.Hakyll.Context (Context (..))
-
--- | Do something with a value in a @Context@, but keep the old value as well.
--- If the key given is not present in the @Context@, nothing will happen.
---
-renderValue :: String -- ^ Key of which the value should be copied.
- -> String -- ^ Key the value should be copied to.
- -> (String -> String) -- ^ Function to apply on the value.
- -> HakyllAction Context Context
-renderValue source destination f = arr $ \(Context context) -> Context $
- case M.lookup source context of
- Nothing -> context
- (Just value) -> M.insert destination (f value) context
-
--- | Change a value in a @Context@.
---
--- > import Data.Char (toUpper)
--- > changeValue "title" (map toUpper)
---
--- Will put the title in UPPERCASE.
-changeValue :: String -- ^ Key to change.
- -> (String -> String) -- ^ Function to apply on the value.
- -> HakyllAction Context Context
-changeValue key = renderValue key key
-
--- | Change the URL of a page. This requires a special function, so dependency
--- handling can happen correctly.
---
-changeUrl :: (String -> String) -- ^ Function to change URL with.
- -> HakyllAction Context Context -- ^ Resulting action.
-changeUrl f = let action = changeValue "url" f
- in action {actionUrl = Right $ liftM f}
-
--- | Copy a value from one key to another in a @Context@.
-copyValue :: String -- ^ Source key.
- -> String -- ^ Destination key.
- -> HakyllAction Context Context
-copyValue source destination = renderValue source destination id
-
--- | When the context has a key called @path@ in a
--- @folder/yyyy-mm-dd-title.extension@ format (the convention for pages),
--- this function can render the date.
---
--- > renderDate "date" "%B %e, %Y" "Date unknown"
---
--- Will render something like @January 32, 2010@.
---
-renderDate :: String -- ^ Key in which the rendered date should be placed.
- -> String -- ^ Format to use on the date.
- -> String -- ^ Default key, in case the date cannot be parsed.
- -> HakyllAction Context Context
-renderDate = renderDateWithLocale defaultTimeLocale
-
--- | This is an extended version of 'renderDate' that allows you to specify a
--- time locale that is used for outputting the date. For more details, see
--- 'renderDate'.
---
-renderDateWithLocale :: TimeLocale -- ^ Output time locale.
- -> String -- ^ Destination key.
- -> String -- ^ Format to use on the date.
- -> String -- ^ Default key.
- -> HakyllAction Context Context
-renderDateWithLocale locale key format defaultValue =
- renderValue "path" key renderDate'
- where
- renderDate' filePath = fromMaybe defaultValue $ do
- let dateString = substituteRegex "^([0-9]*-[0-9]*-[0-9]*).*" "\\1"
- (takeFileName filePath)
- time <- parseTime defaultTimeLocale
- "%Y-%m-%d"
- dateString :: Maybe UTCTime
- return $ formatTime locale format time
-
--- | Change the extension of a file. This is only needed when you want to
--- render, for example, mardown to @.php@ files instead of @.html@ files.
---
--- > changeExtension "php"
---
--- Will render @test.markdown@ to @test.php@ instead of @test.html@.
-changeExtension :: String -- ^ Extension to change to.
- -> HakyllAction Context Context
-changeExtension extension = changeValue "url" changeExtension'
- where
- changeExtension' = flip addExtension extension . dropExtension
-
--- | Change the body of a file using a certain manipulation.
---
--- > import Data.Char (toUpper)
--- > renderBody (map toUpper)
---
--- Will put the entire body of the page in UPPERCASE.
-renderBody :: (String -> String)
- -> HakyllAction Context Context
-renderBody = renderValue "body" "body"
-
--- | Get the resulting body text from a context
---
-takeBody :: HakyllAction Context String
-takeBody = arr $ fromMaybe "" . M.lookup "body" . unContext
diff --git a/src/Text/Hakyll/CreateContext.hs b/src/Text/Hakyll/CreateContext.hs
deleted file mode 100644
index 6a0e321..0000000
--- a/src/Text/Hakyll/CreateContext.hs
+++ /dev/null
@@ -1,114 +0,0 @@
--- | A module that provides different ways to create a @Context@. These
--- functions all use the @HakyllAction@ arrow, so they produce values of the
--- type @HakyllAction () Context@.
-module Text.Hakyll.CreateContext
- ( createPage
- , createCustomPage
- , createListing
- , addField
- , combine
- , combineWithUrl
- ) where
-
-import Prelude hiding (id)
-
-import qualified Data.Map as M
-import Control.Arrow (second, arr, (&&&), (***))
-import Control.Monad (liftM2)
-import Control.Applicative ((<$>))
-import Control.Arrow ((>>>))
-import Control.Category (id)
-
-import Text.Hakyll.Context
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Render
-import Text.Hakyll.Page
-import Text.Hakyll.Pandoc
-import Text.Hakyll.Internal.Cache
-
--- | Create a @Context@ from a page file stored on the disk. This is probably
--- the most common way to create a @Context@.
-createPage :: FilePath -> HakyllAction () Context
-createPage path = cacheAction "pages" $ readPageAction path >>> renderAction
-
--- | Create a custom page @Context@.
---
--- The association list given maps keys to values for substitution. Note
--- that as value, you can either give a @String@ or a
--- @HakyllAction () String@. The latter is preferred for more complex data,
--- since it allows dependency checking. A @String@ is obviously more simple
--- to use in some cases.
---
-createCustomPage :: FilePath
- -> [(String, Either String (HakyllAction () String))]
- -> HakyllAction () Context
-createCustomPage url association = HakyllAction
- { actionDependencies = dataDependencies
- , actionUrl = Left $ return url
- , actionFunction = \_ -> Context . M.fromList <$> assoc'
- }
- where
- mtuple (a, b) = b >>= \b' -> return (a, b')
- toHakyllString = second (either return runHakyllAction)
- assoc' = mapM (mtuple . toHakyllString) $ ("url", Left url) : association
- dataDependencies = map snd association >>= getDependencies
- getDependencies (Left _) = []
- getDependencies (Right x) = actionDependencies x
-
--- | A @createCustomPage@ function specialized in creating listings.
---
--- This function creates a listing of a certain list of @Context@s. Every
--- item in the list is created by applying the given template to every
--- renderable. You can also specify additional context to be included in the
--- @CustomPage@.
-createListing :: FilePath -- ^ Destination of the page.
- -> [FilePath] -- ^ Templates to render items with.
- -> [HakyllAction () Context] -- ^ Renderables in the list.
- -> [(String, Either String (HakyllAction () String))]
- -> HakyllAction () Context
-createListing url templates renderables additional =
- createCustomPage url context
- where
- context = ("body", Right concatenation) : additional
- concatenation = renderAndConcat templates renderables
-
--- | Add a field to a 'Context'.
---
-addField :: String -- ^ Key
- -> Either String (HakyllAction () String) -- ^ Value
- -> HakyllAction Context Context -- ^ Result
-addField key value = arr (const ()) &&& id
- >>> value' *** id
- >>> arr (uncurry insert)
- where
- value' = arr (const ()) >>> either (arr . const) id value
- insert v = Context . M.insert key v . unContext
-
--- | Combine two @Context@s. The url will always be taken from the first
--- @Renderable@. Also, if a `$key` is present in both renderables, the
--- value from the first @Context@ will be taken as well.
---
--- You can see this as a this as a @union@ between two mappings.
-combine :: HakyllAction () Context -> HakyllAction () Context
- -> HakyllAction () Context
-combine x y = HakyllAction
- { actionDependencies = actionDependencies x ++ actionDependencies y
- , actionUrl = actionUrl x
- , actionFunction = \_ ->
- Context <$> liftM2 (M.union) (unContext <$> runHakyllAction x)
- (unContext <$> runHakyllAction y)
- }
-
--- | Combine two @Context@s and set a custom URL. This behaves like @combine@,
--- except that for the @url@ field, the given URL is always chosen.
-combineWithUrl :: FilePath
- -> HakyllAction () Context
- -> HakyllAction () Context
- -> HakyllAction () Context
-combineWithUrl url x y = combine'
- { actionUrl = Left $ return url
- , actionFunction = \_ ->
- Context . M.insert "url" url . unContext <$> runHakyllAction combine'
- }
- where
- combine' = combine x y
diff --git a/src/Text/Hakyll/Feed.hs b/src/Text/Hakyll/Feed.hs
deleted file mode 100644
index be8d023..0000000
--- a/src/Text/Hakyll/Feed.hs
+++ /dev/null
@@ -1,112 +0,0 @@
--- | A Module that allows easy rendering of RSS feeds. If you use this module,
--- you must make sure you set the `absoluteUrl` field in the main Hakyll
--- configuration.
---
--- Apart from that, the main rendering functions (@renderRss@, @renderAtom@)
--- all assume that you pass the list of items so that the most recent entry
--- in the feed is the first item in the list.
---
--- Also note that the @Context@s should have (at least) the following
--- fields to produce a correct feed:
---
--- - @$title@: Title of the item.
---
--- - @$description@: Description to appear in the feed.
---
--- - @$url@: URL to the item - this is usually set automatically.
---
--- Furthermore, the feed will be generated, but will be incorrect (it won't
--- validate) if an empty list is passed.
---
-module Text.Hakyll.Feed
- ( FeedConfiguration (..)
- , renderRss
- , renderAtom
- ) where
-
-import Control.Arrow ((>>>), second)
-import Control.Monad.Reader (liftIO)
-import Data.Maybe (fromMaybe)
-import qualified Data.Map as M
-
-import Text.Hakyll.Context (Context (..))
-import Text.Hakyll.CreateContext (createListing)
-import Text.Hakyll.ContextManipulations (renderDate)
-import Text.Hakyll.HakyllMonad (Hakyll)
-import Text.Hakyll.Render (render, renderChain)
-import Text.Hakyll.HakyllAction
-
-import Paths_hakyll
-
--- | This is a data structure to keep the configuration of a feed.
-data FeedConfiguration = FeedConfiguration
- { -- | Url of the feed (relative to site root). For example, @rss.xml@.
- feedUrl :: String
- , -- | Title of the feed.
- feedTitle :: String
- , -- | Description of the feed.
- feedDescription :: String
- , -- | Name of the feed author.
- feedAuthorName :: String
- }
-
--- | This is an auxiliary function to create a listing that is, in fact, a feed.
--- The items should be sorted on date.
-createFeed :: FeedConfiguration -- ^ Feed configuration.
- -> [HakyllAction () Context] -- ^ Items to include.
- -> FilePath -- ^ Feed template.
- -> FilePath -- ^ Item template.
- -> HakyllAction () Context
-createFeed configuration renderables template itemTemplate =
- listing >>> render template
- where
- listing = createListing (feedUrl configuration)
- [itemTemplate] renderables additional
-
- additional = map (second $ Left . ($ configuration))
- [ ("title", feedTitle)
- , ("description", feedDescription)
- , ("authorName", feedAuthorName)
- ] ++ updated
-
- -- Take the first timestamp, which should be the most recent.
- updated = let action = createHakyllAction $ return . fromMaybe "foo"
- . M.lookup "timestamp" . unContext
- toTuple r = ("timestamp", Right $ r >>> action)
- in map toTuple $ take 1 renderables
-
-
--- | Abstract function to render any feed.
-renderFeed :: FeedConfiguration -- ^ Feed configuration.
- -> [HakyllAction () Context] -- ^ Items to include in the feed.
- -> FilePath -- ^ Feed template.
- -> FilePath -- ^ Item template.
- -> Hakyll ()
-renderFeed configuration renderables template itemTemplate = do
- template' <- liftIO $ getDataFileName template
- itemTemplate' <- liftIO $ getDataFileName itemTemplate
- let renderFeed' = createFeed configuration renderables
- template' itemTemplate'
- renderChain [] renderFeed'
-
--- | Render an RSS feed with a number of items.
-renderRss :: FeedConfiguration -- ^ Feed configuration.
- -> [HakyllAction () Context] -- ^ Items to include in the feed.
- -> Hakyll ()
-renderRss configuration renderables =
- renderFeed configuration (map (>>> renderRssDate) renderables)
- "templates/rss.xml" "templates/rss-item.xml"
- where
- renderRssDate = renderDate "timestamp" "%a, %d %b %Y %H:%M:%S UT"
- "No date found."
-
--- | Render an Atom feed with a number of items.
-renderAtom :: FeedConfiguration -- ^ Feed configuration.
- -> [HakyllAction () Context] -- ^ Items to include in the feed.
- -> Hakyll ()
-renderAtom configuration renderables =
- renderFeed configuration (map (>>> renderAtomDate) renderables)
- "templates/atom.xml" "templates/atom-item.xml"
- where
- renderAtomDate = renderDate "timestamp" "%Y-%m-%dT%H:%M:%SZ"
- "No date found."
diff --git a/src/Text/Hakyll/File.hs b/src/Text/Hakyll/File.hs
deleted file mode 100644
index 167ece7..0000000
--- a/src/Text/Hakyll/File.hs
+++ /dev/null
@@ -1,196 +0,0 @@
--- | A module containing various function for manipulating and examinating
--- files and directories.
-module Text.Hakyll.File
- ( toDestination
- , toCache
- , toUrl
- , toRoot
- , inDirectory
- , inHakyllDirectory
- , removeSpaces
- , makeDirectories
- , getRecursiveContents
- , sortByBaseName
- , havingExtension
- , directory
- , isMoreRecent
- , isFileMoreRecent
- ) where
-
-import System.Directory
-import Control.Applicative ((<$>))
-import System.FilePath
-import System.Time (ClockTime)
-import Control.Monad
-import Data.List (isPrefixOf, sortBy)
-import Data.Ord (comparing)
-import Control.Monad.Reader (liftIO)
-
-import Text.Hakyll.HakyllMonad
-import Text.Hakyll.Internal.FileType (isRenderableFile)
-
--- | Auxiliary function to remove pathSeparators form the start. We don't deal
--- with absolute paths here. We also remove $root from the start.
-removeLeadingSeparator :: FilePath -> FilePath
-removeLeadingSeparator [] = []
-removeLeadingSeparator path
- | head path' `elem` pathSeparators = drop 1 path'
- | otherwise = path'
- where
- path' = if "$root" `isPrefixOf` path then drop 5 path
- else path
-
--- | Convert a relative URL to a filepath in the destination
--- (default: @_site@).
-toDestination :: FilePath -> Hakyll FilePath
-toDestination url = do dir <- askHakyll siteDirectory
- toFilePath dir url
-
--- | Convert a relative URL to a filepath in the cache
--- (default: @_cache@).
-toCache :: FilePath -> Hakyll FilePath
-toCache path = do dir <- askHakyll cacheDirectory
- toFilePath dir path
-
--- | Implementation of toDestination/toCache
---
-toFilePath :: String -- ^ Directory (site or cache)
- -> String -- ^ URL
- -> Hakyll FilePath -- ^ Resulting file path
-toFilePath dir url = do
- enableIndexUrl' <- askHakyll enableIndexUrl
- let destination = if enableIndexUrl' && separatorEnd
- then dir </> noSeparator </> "index.html"
- else dir </> noSeparator
- return destination
- where
- noSeparator = removeLeadingSeparator url
- separatorEnd = not (null url) && last url == '/'
-
--- | Get the url for a given page. For most extensions, this would be the path
--- itself. It's only for rendered extensions (@.markdown@, @.rst@, @.lhs@ this
--- function returns a path with a @.html@ extension instead.
-toUrl :: FilePath -> Hakyll FilePath
-toUrl path = do enableIndexUrl' <- askHakyll enableIndexUrl
- -- If the file does not have a renderable extension, like for
- -- example favicon.ico, we don't have to change it at all.
- return $ if not (isRenderableFile path)
- then path
- -- If index url's are enabled, we create pick it
- -- unless the page is an index already.
- else if enableIndexUrl' && not isIndex
- then indexUrl
- else withSimpleHtmlExtension
- where
- isIndex = dropExtension (takeFileName path) == "index"
- withSimpleHtmlExtension = flip addExtension ".html" $ dropExtension path
- indexUrl = dropExtension path ++ "/"
-
-
--- | Get the relative url to the site root, for a given (absolute) url
-toRoot :: FilePath -> FilePath
-toRoot = emptyException . joinPath . map parent . splitPath
- . takeDirectory . removeLeadingSeparator
- where
- parent = const ".."
- emptyException [] = "."
- emptyException x = x
-
--- | Check if a file is in a given directory.
---
-inDirectory :: FilePath -- ^ File path
- -> FilePath -- ^ Directory
- -> Bool -- ^ Result
-inDirectory path dir = case splitDirectories path of
- [] -> False
- (x : _) -> x == dir
-
--- | Check if a file is in a Hakyll directory. With a Hakyll directory, we mean
--- a directory that should be "ignored" such as the @_site@ or @_cache@
--- directory.
---
--- Example:
---
--- > inHakyllDirectory "_cache/pages/index.html"
---
--- Result:
---
--- > True
---
-inHakyllDirectory :: FilePath -> Hakyll Bool
-inHakyllDirectory path =
- or <$> mapM (liftM (inDirectory path) . askHakyll)
- [siteDirectory, cacheDirectory]
-
--- | Swaps spaces for '-'.
-removeSpaces :: FilePath -> FilePath
-removeSpaces = map swap
- where
- swap ' ' = '-'
- swap x = x
-
--- | Given a path to a file, try to make the path writable by making
--- all directories on the path.
-makeDirectories :: FilePath -> Hakyll ()
-makeDirectories path = liftIO $ createDirectoryIfMissing True dir
- where
- dir = takeDirectory path
-
--- | Get all contents of a directory. Note that files starting with a dot (.)
--- will be ignored.
---
-getRecursiveContents :: FilePath -> Hakyll [FilePath]
-getRecursiveContents topdir = do
- topdirExists <- liftIO $ doesDirectoryExist topdir
- if topdirExists
- then do names <- liftIO $ getDirectoryContents topdir
- let properNames = filter isProper names
- paths <- forM properNames $ \name -> do
- let path = topdir </> name
- isDirectory <- liftIO $ doesDirectoryExist path
- if isDirectory
- then getRecursiveContents path
- else return [normalise path]
- return (concat paths)
- else return []
- where
- isProper = not . (== '.') . head
-
--- | Sort a list of filenames on the basename.
-sortByBaseName :: [FilePath] -> [FilePath]
-sortByBaseName = sortBy compareBaseName
- where
- compareBaseName = comparing takeFileName
-
--- | A filter that takes all file names with a given extension. Prefix the
--- extension with a dot:
---
--- > havingExtension ".markdown" [ "index.markdown"
--- > , "style.css"
--- > ] == ["index.markdown"]
-havingExtension :: String -> [FilePath] -> [FilePath]
-havingExtension extension = filter ((==) extension . takeExtension)
-
--- | Perform a Hakyll action on every file in a given directory.
-directory :: (FilePath -> Hakyll ()) -> FilePath -> Hakyll ()
-directory action dir = getRecursiveContents dir >>= mapM_ action
-
--- | Check if a timestamp is newer then a number of given files.
-isMoreRecent :: ClockTime -- ^ The time to check.
- -> [FilePath] -- ^ Dependencies of the cached file.
- -> Hakyll Bool
-isMoreRecent _ [] = return True
-isMoreRecent timeStamp depends = do
- dependsModified <- liftIO $ mapM getModificationTime depends
- return (timeStamp >= maximum dependsModified)
-
--- | Check if a file is newer then a number of given files.
-isFileMoreRecent :: FilePath -- ^ The cached file.
- -> [FilePath] -- ^ Dependencies of the cached file.
- -> Hakyll Bool
-isFileMoreRecent file depends = do
- exists <- liftIO $ doesFileExist file
- if not exists
- then return False
- else do timeStamp <- liftIO $ getModificationTime file
- isMoreRecent timeStamp depends
diff --git a/src/Text/Hakyll/HakyllAction.hs b/src/Text/Hakyll/HakyllAction.hs
deleted file mode 100644
index 491f1f1..0000000
--- a/src/Text/Hakyll/HakyllAction.hs
+++ /dev/null
@@ -1,98 +0,0 @@
--- | This is the module which exports @HakyllAction@.
-module Text.Hakyll.HakyllAction
- ( HakyllAction (..)
- , createHakyllAction
- , createSimpleHakyllAction
- , createFileHakyllAction
- , chain
- , runHakyllAction
- , runHakyllActionIfNeeded
- ) where
-
-import Control.Arrow
-import Control.Category
-import Control.Monad ((<=<), unless)
-import Prelude hiding ((.), id)
-
-import Text.Hakyll.File (toDestination, isFileMoreRecent)
-import Text.Hakyll.HakyllMonad
-
--- | Type used for rendering computations that carry along dependencies.
-data HakyllAction a b = HakyllAction
- { -- | Dependencies of the @HakyllAction@.
- actionDependencies :: [FilePath]
- , -- | URL pointing to the result of this @HakyllAction@.
- actionUrl :: Either (Hakyll FilePath)
- (Hakyll FilePath -> Hakyll FilePath)
- , -- | The actual render function.
- actionFunction :: a -> Hakyll b
- }
-
--- | Create a @HakyllAction@ from a function.
-createHakyllAction :: (a -> Hakyll b) -- ^ Function to execute.
- -> HakyllAction a b
-createHakyllAction f = id { actionFunction = f }
-
--- | Create a @HakyllAction@ from a simple @Hakyll@ value.
-createSimpleHakyllAction :: Hakyll b -- ^ Hakyll value to pass on.
- -> HakyllAction () b
-createSimpleHakyllAction = createHakyllAction . const
-
--- | Create a @HakyllAction@ that operates on one file.
-createFileHakyllAction :: FilePath -- ^ File to operate on.
- -> Hakyll b -- ^ Value to pass on.
- -> HakyllAction () b -- ^ The resulting action.
-createFileHakyllAction path action = HakyllAction
- { actionDependencies = [path]
- , actionUrl = Left $ return path
- , actionFunction = const action
- }
-
--- | Run a @HakyllAction@ now.
-runHakyllAction :: HakyllAction () a -- ^ Render action to run.
- -> Hakyll a -- ^ Result of the action.
-runHakyllAction action = actionFunction action ()
-
--- | Run a @HakyllAction@, but only when it is out-of-date. At this point, the
--- @actionUrl@ field must be set.
-runHakyllActionIfNeeded :: HakyllAction () () -- ^ Action to run.
- -> Hakyll () -- ^ Empty result.
-runHakyllActionIfNeeded action = do
- url <- case actionUrl action of
- Left u -> u
- Right _ -> error "No url when checking dependencies."
- destination <- toDestination url
- valid <- isFileMoreRecent destination $ actionDependencies action
- unless valid $ do logHakyll $ "Rendering " ++ destination
- runHakyllAction action
-
--- | Chain a number of @HakyllAction@ computations.
-chain :: [HakyllAction a a] -- ^ Actions to chain.
- -> HakyllAction a a -- ^ Resulting action.
-chain [] = id
-chain list = foldl1 (>>>) list
-
-instance Category HakyllAction where
- id = HakyllAction
- { actionDependencies = []
- , actionUrl = Right id
- , actionFunction = return
- }
-
- x . y = HakyllAction
- { actionDependencies = actionDependencies x ++ actionDependencies y
- , actionUrl = case actionUrl x of
- Left ux -> Left ux
- Right fx -> case actionUrl y of
- Left uy -> Left (fx uy)
- Right fy -> Right (fx . fy)
- , actionFunction = actionFunction x <=< actionFunction y
- }
-
-instance Arrow HakyllAction where
- arr f = id { actionFunction = return . f }
-
- first x = x
- { actionFunction = \(y, z) -> do y' <- actionFunction x y
- return (y', z)
- }
diff --git a/src/Text/Hakyll/HakyllMonad.hs b/src/Text/Hakyll/HakyllMonad.hs
deleted file mode 100644
index f51cf2c..0000000
--- a/src/Text/Hakyll/HakyllMonad.hs
+++ /dev/null
@@ -1,99 +0,0 @@
--- | Module describing the Hakyll monad stack.
-module Text.Hakyll.HakyllMonad
- ( HakyllConfiguration (..)
- , PreviewMode (..)
- , Hakyll
- , askHakyll
- , getAdditionalContext
- , logHakyll
- , forkHakyllWait
- , concurrentHakyll
- ) where
-
-import Control.Monad.Trans (liftIO)
-import Control.Concurrent.MVar (MVar, putMVar, newEmptyMVar, readMVar)
-import Control.Monad.Reader (ReaderT, ask, runReaderT)
-import Control.Monad (liftM, forM, forM_)
-import qualified Data.Map as M
-import System.IO (hPutStrLn, stderr)
-
-import Text.Pandoc (ParserState, WriterOptions)
-import Text.Hamlet (HamletSettings)
-
-import Text.Hakyll.Context (Context (..))
-
--- | Our custom monad stack.
---
-type Hakyll = ReaderT HakyllConfiguration IO
-
--- | Preview mode.
---
-data PreviewMode = BuildOnRequest
- | BuildOnInterval
- deriving (Show, Eq, Ord)
-
--- | Hakyll global configuration type.
---
-data HakyllConfiguration = HakyllConfiguration
- { -- | Absolute URL of the site.
- absoluteUrl :: String
- , -- | An additional context to use when rendering. This additional context
- -- is used globally.
- additionalContext :: Context
- , -- | Directory where the site is placed.
- siteDirectory :: FilePath
- , -- | Directory for cache files.
- cacheDirectory :: FilePath
- , -- | Enable index links.
- enableIndexUrl :: Bool
- , -- | The preview mode used
- previewMode :: PreviewMode
- , -- | Pandoc parsing options
- pandocParserState :: ParserState
- , -- | Pandoc writer options
- pandocWriterOptions :: WriterOptions
- , -- | Hamlet settings (if you use hamlet for templates)
- hamletSettings :: HamletSettings
- }
-
--- | Simplified @ask@ function for the Hakyll monad stack.
---
--- Usage would typically be something like:
---
--- > doSomething :: a -> b -> Hakyll c
--- > doSomething arg1 arg2 = do
--- > siteDirectory' <- askHakyll siteDirectory
--- > ...
---
-askHakyll :: (HakyllConfiguration -> a) -> Hakyll a
-askHakyll = flip liftM ask
-
--- | Obtain the globally available, additional context.
---
-getAdditionalContext :: HakyllConfiguration -> Context
-getAdditionalContext configuration =
- let (Context c) = additionalContext configuration
- in Context $ M.insert "absolute" (absoluteUrl configuration) c
-
--- | Write some log information.
---
-logHakyll :: String -> Hakyll ()
-logHakyll = liftIO . hPutStrLn stderr
-
--- | Perform a concurrent hakyll action. Returns an MVar you can wait on
---
-forkHakyllWait :: Hakyll () -> Hakyll (MVar ())
-forkHakyllWait action = do
- mvar <- liftIO newEmptyMVar
- config <- ask
- liftIO $ do
- runReaderT action config
- putMVar mvar ()
- return mvar
-
--- | Perform a number of concurrent hakyll actions, and waits for them to finish
---
-concurrentHakyll :: [Hakyll ()] -> Hakyll ()
-concurrentHakyll actions = do
- mvars <- forM actions forkHakyllWait
- forM_ mvars (liftIO . readMVar)
diff --git a/src/Text/Hakyll/Internal/Cache.hs b/src/Text/Hakyll/Internal/Cache.hs
deleted file mode 100644
index b83d9af..0000000
--- a/src/Text/Hakyll/Internal/Cache.hs
+++ /dev/null
@@ -1,53 +0,0 @@
-module Text.Hakyll.Internal.Cache
- ( storeInCache
- , getFromCache
- , isCacheMoreRecent
- , cacheAction
- ) where
-
-import Control.Monad ((<=<))
-import Control.Monad.Reader (liftIO)
-import Data.Binary
-import System.FilePath ((</>))
-
-import Text.Hakyll.File
-import Text.Hakyll.HakyllMonad (Hakyll)
-import Text.Hakyll.HakyllAction
-
--- | We can store all datatypes instantiating @Binary@ to the cache. The cache
--- directory is specified by the @HakyllConfiguration@, usually @_cache@.
-storeInCache :: (Binary a) => a -> FilePath -> Hakyll ()
-storeInCache value path = do
- cachePath <- toCache path
- makeDirectories cachePath
- liftIO $ encodeFile cachePath value
-
--- | Get a value from the cache. The filepath given should not be located in the
--- cache. This function performs a timestamp check on the filepath and the
--- filepath in the cache, and only returns the cached value when it is still
--- up-to-date.
-getFromCache :: (Binary a) => FilePath -> Hakyll a
-getFromCache = liftIO . decodeFile <=< toCache
-
--- | Check if a file in the cache is more recent than a number of other files.
-isCacheMoreRecent :: FilePath -> [FilePath] -> Hakyll Bool
-isCacheMoreRecent file depends = toCache file >>= flip isFileMoreRecent depends
-
--- | Cache an entire arrow
---
-cacheAction :: Binary a
- => String
- -> HakyllAction () a
- -> HakyllAction () a
-cacheAction key action = action { actionFunction = const cacheFunction }
- where
- cacheFunction = do
- -- Construct a filename
- fileName <- fmap (key </>) $ either id (const $ return "unknown")
- $ actionUrl action
- -- Check the cache
- cacheOk <- isCacheMoreRecent fileName $ actionDependencies action
- if cacheOk then getFromCache fileName
- else do result <- actionFunction action ()
- storeInCache result fileName
- return result
diff --git a/src/Text/Hakyll/Internal/FileType.hs b/src/Text/Hakyll/Internal/FileType.hs
deleted file mode 100644
index 689c77f..0000000
--- a/src/Text/Hakyll/Internal/FileType.hs
+++ /dev/null
@@ -1,49 +0,0 @@
--- | A module dealing with file extensions and associated file types.
-module Text.Hakyll.Internal.FileType
- ( FileType (..)
- , getFileType
- , isRenderable
- , isRenderableFile
- ) where
-
-import System.FilePath (takeExtension)
-
--- | Datatype to represent the different file types Hakyll can deal with.
-data FileType = Html
- | LaTeX
- | LiterateHaskellMarkdown
- | Markdown
- | ReStructuredText
- | Text
- | UnknownFileType
- deriving (Eq, Ord, Show, Read)
-
--- | Get the file type for a certain file. The type is determined by extension.
-getFileType :: FilePath -> FileType
-getFileType = getFileType' . takeExtension
- where
- getFileType' ".htm" = Html
- getFileType' ".html" = Html
- getFileType' ".lhs" = LiterateHaskellMarkdown
- getFileType' ".markdown" = Markdown
- getFileType' ".md" = Markdown
- getFileType' ".mdn" = Markdown
- getFileType' ".mdown" = Markdown
- getFileType' ".mdwn" = Markdown
- getFileType' ".mkd" = Markdown
- getFileType' ".mkdwn" = Markdown
- getFileType' ".page" = Markdown
- getFileType' ".rst" = ReStructuredText
- getFileType' ".tex" = LaTeX
- getFileType' ".text" = Text
- getFileType' ".txt" = Text
- getFileType' _ = UnknownFileType
-
--- | Check if a certain @FileType@ is renderable.
-isRenderable :: FileType -> Bool
-isRenderable UnknownFileType = False
-isRenderable _ = True
-
--- | Check if a certain file is renderable.
-isRenderableFile :: FilePath -> Bool
-isRenderableFile = isRenderable . getFileType
diff --git a/src/Text/Hakyll/Internal/Template.hs b/src/Text/Hakyll/Internal/Template.hs
deleted file mode 100644
index cd6a3bd..0000000
--- a/src/Text/Hakyll/Internal/Template.hs
+++ /dev/null
@@ -1,86 +0,0 @@
-module Text.Hakyll.Internal.Template
- ( Template (..)
- , fromString
- , readTemplate
- , substitute
- , regularSubstitute
- , finalSubstitute
- ) where
-
-import Control.Arrow ((>>>))
-import Control.Applicative ((<$>))
-import Data.List (isPrefixOf)
-import Data.Char (isAlphaNum)
-import Data.Maybe (fromMaybe)
-import System.FilePath ((</>))
-import qualified Data.Map as M
-
-import Text.Hakyll.Context (Context (..))
-import Text.Hakyll.HakyllMonad (Hakyll)
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Pandoc
-import Text.Hakyll.Internal.Cache
-import Text.Hakyll.Page
-import Text.Hakyll.ContextManipulations
-import Text.Hakyll.Internal.Template.Template
-import Text.Hakyll.Internal.Template.Hamlet
-
--- | Construct a @Template@ from a string.
---
-fromString :: String -> Template
-fromString = Template . fromString'
- where
- fromString' [] = []
- fromString' string
- | "$$" `isPrefixOf` string =
- EscapeCharacter : (fromString' $ drop 2 string)
- | "$" `isPrefixOf` string =
- let (key, rest) = span isAlphaNum $ drop 1 string
- in Identifier key : fromString' rest
- | otherwise =
- let (chunk, rest) = break (== '$') string
- in Chunk chunk : fromString' rest
-
--- | Read a @Template@ from a file. This function might fetch the @Template@
--- from the cache, if available.
-readTemplate :: FilePath -> Hakyll Template
-readTemplate path = do
- isCacheMoreRecent' <- isCacheMoreRecent fileName [path]
- if isCacheMoreRecent'
- then getFromCache fileName
- else do
- template <- if isHamletRTFile path
- then readHamletTemplate
- else readDefaultTemplate
- storeInCache template fileName
- return template
- where
- fileName = "templates" </> path
- readDefaultTemplate = do
- body <- runHakyllAction $ readPageAction path
- >>> renderAction
- >>> takeBody
- return $ fromString body
-
- readHamletTemplate = fromHamletRT <$> readHamletRT path
-
--- | Substitutes @$identifiers@ in the given @Template@ by values from the given
--- "Context". When a key is not found, it is left as it is. You can specify
--- the characters used to replace escaped dollars (@$$@) here.
-substitute :: String -> Template -> Context -> String
-substitute escaper template context = substitute' =<< unTemplate template
- where
- substitute' (Chunk chunk) = chunk
- substitute' (Identifier key) =
- fromMaybe ('$' : key) $ M.lookup key $ unContext context
- substitute' (EscapeCharacter) = escaper
-
--- | @substitute@ for use during a chain. This will leave escaped characters as
--- they are.
-regularSubstitute :: Template -> Context -> String
-regularSubstitute = substitute "$$"
-
--- | @substitute@ for the end of a chain (just before writing). This renders
--- escaped characters.
-finalSubstitute :: Template -> Context -> String
-finalSubstitute = substitute "$"
diff --git a/src/Text/Hakyll/Internal/Template/Hamlet.hs b/src/Text/Hakyll/Internal/Template/Hamlet.hs
deleted file mode 100644
index 458ab35..0000000
--- a/src/Text/Hakyll/Internal/Template/Hamlet.hs
+++ /dev/null
@@ -1,56 +0,0 @@
--- | Support for Hamlet templates in Hakyll.
---
-module Text.Hakyll.Internal.Template.Hamlet
- ( isHamletRTFile
- , readHamletRT
- , fromHamletRT
- ) where
-
-import Control.Exception (try)
-import Control.Monad.Trans (liftIO)
-import System.FilePath (takeExtension)
-
-import Text.Hamlet.RT
-
-import Text.Hakyll.Internal.Template.Template
-import Text.Hakyll.HakyllMonad (Hakyll, askHakyll, hamletSettings, logHakyll)
-
--- | Determine if a file is a hamlet template by extension.
---
-isHamletRTFile :: FilePath -> Bool
-isHamletRTFile fileName = takeExtension fileName `elem` [".hamlet", ".hml"]
-
--- | Read a 'HamletRT' by file name.
---
-readHamletRT :: FilePath -- ^ Filename of the template
- -> Hakyll HamletRT -- ^ Resulting hamlet template
-readHamletRT fileName = do
- settings <- askHakyll hamletSettings
- string <- liftIO $ readFile fileName
- result <- liftIO $ try $ parseHamletRT settings string
- case result of
- Left (HamletParseException s) -> error' s
- Left (HamletUnsupportedDocException d) -> error' $ show d
- Left (HamletRenderException s) -> error' s
- Right x -> return x
- where
- error' s = do
- logHakyll $ "Parse of hamlet file " ++ fileName ++ " failed."
- logHakyll s
- error "Parse failed."
-
--- | Convert a 'HamletRT' to a 'Template'
---
-fromHamletRT :: HamletRT -- ^ Hamlet runtime template
- -> Template -- ^ Hakyll template
-fromHamletRT (HamletRT sd) = Template $ map fromSimpleDoc sd
- where
- fromSimpleDoc :: SimpleDoc -> TemplateElement
- fromSimpleDoc (SDRaw chunk) = Chunk chunk
- fromSimpleDoc (SDVar [var]) = Identifier var
- fromSimpleDoc (SDVar _) =
- error "Hakyll does not support '.' in identifier names when using \
- \hamlet templates."
- fromSimpleDoc _ =
- error "Only simple $key$ identifiers are allowed when using hamlet \
- \templates."
diff --git a/src/Text/Hakyll/Internal/Template/Template.hs b/src/Text/Hakyll/Internal/Template/Template.hs
deleted file mode 100644
index 49373fd..0000000
--- a/src/Text/Hakyll/Internal/Template/Template.hs
+++ /dev/null
@@ -1,34 +0,0 @@
--- | Module containing the template data structure.
---
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-module Text.Hakyll.Internal.Template.Template
- ( Template (..)
- , TemplateElement (..)
- ) where
-
-import Control.Applicative ((<$>))
-
-import Data.Binary (Binary, get, getWord8, put, putWord8)
-
--- | Datatype used for template substitutions.
---
-newtype Template = Template { unTemplate :: [TemplateElement] }
- deriving (Show, Eq, Binary)
-
--- | Elements of a template.
---
-data TemplateElement = Chunk String
- | Identifier String
- | EscapeCharacter
- deriving (Show, Eq)
-
-instance Binary TemplateElement where
- put (Chunk string) = putWord8 0 >> put string
- put (Identifier key) = putWord8 1 >> put key
- put (EscapeCharacter) = putWord8 2
-
- get = getWord8 >>= \tag ->
- case tag of 0 -> Chunk <$> get
- 1 -> Identifier <$> get
- 2 -> return EscapeCharacter
- _ -> error "Error reading cached template"
diff --git a/src/Text/Hakyll/Page.hs b/src/Text/Hakyll/Page.hs
deleted file mode 100644
index f2b5fde..0000000
--- a/src/Text/Hakyll/Page.hs
+++ /dev/null
@@ -1,108 +0,0 @@
--- | A module for dealing with @Page@s. This module is mostly internally used.
-module Text.Hakyll.Page
- ( PageSection (..)
- , readPage
- , readPageAction
- ) where
-
-import Data.List (isPrefixOf)
-import Data.Char (isSpace)
-import Control.Monad.Reader (liftIO)
-import System.FilePath
-import Control.Monad.State (State, evalState, get, put)
-
-import Text.Hakyll.File
-import Text.Hakyll.HakyllMonad
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Regex (substituteRegex, matchesRegex)
-import Text.Hakyll.Util (trim)
-
--- | A page is first parsed into a number of page sections. A page section
--- consists of:
---
--- * A key
---
--- * A value
---
--- * A 'Bool' flag, indicating if the value is applicable for rendering
---
-data PageSection = PageSection {unPageSection :: (String, String, Bool)}
- deriving (Show)
-
--- | Split a page into sections.
---
-splitAtDelimiters :: [String] -> State (Maybe String) [[String]]
-splitAtDelimiters [] = return []
-splitAtDelimiters ls@(x:xs) = do
- delimiter <- get
- if not (isDelimiter delimiter x)
- then return [ls]
- else do let proper = takeWhile (== '-') x
- (content, rest) = break (isDelimiter $ Just proper) xs
- put $ Just proper
- rest' <- splitAtDelimiters rest
- return $ (x : content) : rest'
- where
- isDelimiter old = case old of
- Nothing -> isPossibleDelimiter
- (Just d) -> (== d) . takeWhile (== '-')
-
--- | Check if the given string is a metadata delimiter.
-isPossibleDelimiter :: String -> Bool
-isPossibleDelimiter = isPrefixOf "---"
-
--- | Read one section of a page.
---
-readSection :: Bool -- ^ If this section is the first section in the page.
- -> [String] -- ^ Lines in the section.
- -> [PageSection] -- ^ Key-values extracted.
-readSection _ [] = []
-readSection isFirst ls
- | not isDelimiter' = [body ls]
- | isNamedDelimiter = readSectionMetaData ls
- | isFirst = readSimpleMetaData (drop 1 ls)
- | otherwise = [body (drop 1 ls)]
- where
- isDelimiter' = isPossibleDelimiter (head ls)
- isNamedDelimiter = head ls `matchesRegex` "^----* *[a-zA-Z0-9][a-zA-Z0-9]*"
- body ls' = PageSection ("body", unlines ls', True)
-
- readSimpleMetaData = map readPair . filter (not . all isSpace)
- readPair = trimPair . break (== ':')
- trimPair (key, value) = PageSection (trim key, trim (drop 1 value), False)
-
- readSectionMetaData [] = []
- readSectionMetaData (header:value) =
- let key = substituteRegex "[^a-zA-Z0-9]" "" header
- in [PageSection (key, unlines value, True)]
-
--- | Read a page from a file. Metadata is supported.
---
-readPage :: FilePath -> Hakyll [PageSection]
-readPage path = do
- let sectionFunctions = map readSection $ True : repeat False
-
- -- Read file.
- contents <- liftIO $ readFile path
- url <- toUrl path
- let sections = evalState (splitAtDelimiters $ lines contents) Nothing
- sectionsData = concat $ zipWith ($) sectionFunctions sections
-
- -- Note that url, path etc. are listed first, which means can be overwritten
- -- by section data
- return $ PageSection ("url", url, False)
- : PageSection ("path", path, False)
- : PageSection ("title", takeBaseName path, False)
- : (category ++ sectionsData)
- where
- category = let dirs = splitDirectories $ takeDirectory path
- in [PageSection ("category", last dirs, False) | not (null dirs)]
-
--- | Read a page from a file. Metadata is supported.
---
-readPageAction :: FilePath -> HakyllAction () [PageSection]
-readPageAction path = HakyllAction
- { actionDependencies = [path]
- , actionUrl = Left $ toUrl path
- , actionFunction = const $ readPage path
- }
diff --git a/src/Text/Hakyll/Paginate.hs b/src/Text/Hakyll/Paginate.hs
deleted file mode 100644
index 04194ca..0000000
--- a/src/Text/Hakyll/Paginate.hs
+++ /dev/null
@@ -1,94 +0,0 @@
--- | Module aimed to paginate web pages.
---
-module Text.Hakyll.Paginate
- ( PaginateConfiguration (..)
- , defaultPaginateConfiguration
- , paginate
- ) where
-
-import Control.Applicative ((<$>))
-
-import Text.Hakyll.Context (Context)
-import Text.Hakyll.CreateContext
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Util (link)
-
--- | A configuration for a pagination.
---
-data PaginateConfiguration = PaginateConfiguration
- { -- | Label for the link to the previous page.
- previousLabel :: String
- , -- | Label for the link to the next page.
- nextLabel :: String
- , -- | Label for the link to the first page.
- firstLabel :: String
- , -- | Label for the link to the last page.
- lastLabel :: String
- }
-
--- | A simple default configuration for pagination.
---
-defaultPaginateConfiguration :: PaginateConfiguration
-defaultPaginateConfiguration = PaginateConfiguration
- { previousLabel = "Previous"
- , nextLabel = "Next"
- , firstLabel = "First"
- , lastLabel = "Last"
- }
-
--- | The most important function for pagination. This function operates on a
--- list of @Context@s (the pages), and basically just adds fields to them
--- by combining them with a custom page.
---
--- The following metadata fields will be added:
---
--- - @$previous@: A link to the previous page.
---
--- - @$next@: A link to the next page.
---
--- - @$first@: A link to the first page.
---
--- - @$last@: A link to the last page.
---
--- - @$index@: 1-based index of the current page.
---
--- - @$length@: Total number of pages.
---
--- When @$previous@ or @$next@ are not available, they will be just a label
--- without a link. The same goes for when we are on the first or last page for
--- @$first@ and @$last@.
---
-paginate :: PaginateConfiguration
- -> [HakyllAction () Context]
- -> [HakyllAction () Context]
-paginate configuration renderables = paginate' Nothing renderables (1 :: Int)
- where
- -- Create a link with a given label, taken from the configuration.
- linkWithLabel f r = Right $ case actionUrl r of
- Left l -> createSimpleHakyllAction $
- link (f configuration) . ("$root/" ++) <$> l
- Right _ -> error "No link found for pagination."
-
- -- The main function that creates combined renderables by recursing over
- -- the list of items.
- paginate' _ [] _ = []
- paginate' maybePrev (x:xs) index =
- let (previous, first) = case maybePrev of
- (Just r) -> ( linkWithLabel previousLabel r
- , linkWithLabel firstLabel (head renderables) )
- Nothing -> ( Left $ previousLabel configuration
- , Left $ firstLabel configuration )
- (next, last') = case xs of
- (n:_) -> ( linkWithLabel nextLabel n
- , linkWithLabel lastLabel (last renderables) )
- [] -> ( Left $ nextLabel configuration
- , Left $ lastLabel configuration )
- customPage = createCustomPage ""
- [ ("previous", previous)
- , ("next", next)
- , ("first", first)
- , ("last", last')
- , ("index", Left $ show index)
- , ("length", Left $ show $ length renderables)
- ]
- in (x `combine` customPage) : paginate' (Just x) xs (index + 1)
diff --git a/src/Text/Hakyll/Pandoc.hs b/src/Text/Hakyll/Pandoc.hs
deleted file mode 100644
index c0dec77..0000000
--- a/src/Text/Hakyll/Pandoc.hs
+++ /dev/null
@@ -1,57 +0,0 @@
--- | Module exporting a pandoc arrow
---
-module Text.Hakyll.Pandoc
- ( renderAction
- , renderActionWith
- ) where
-
-import Data.Maybe (fromMaybe)
-import qualified Data.Map as M
-import Control.Arrow (second, (>>>), arr, (&&&))
-
-import Text.Pandoc
-
-import Text.Hakyll.Internal.FileType
-import Text.Hakyll.Page
-import Text.Hakyll.HakyllMonad
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Context
-
--- | Get a render function for a given extension.
---
-getRenderFunction :: HakyllAction FileType (String -> String)
-getRenderFunction = createHakyllAction $ \fileType -> case fileType of
- Html -> return id
- Text -> return id
- UnknownFileType -> return id
- _ -> do parserState <- askHakyll pandocParserState
- writerOptions <- askHakyll pandocWriterOptions
- return $ writeHtmlString writerOptions
- . readFunction fileType (readOptions parserState fileType)
- where
- readFunction ReStructuredText = readRST
- readFunction LaTeX = readLaTeX
- readFunction Markdown = readMarkdown
- readFunction LiterateHaskellMarkdown = readMarkdown
- readFunction t = error $ "Cannot render " ++ show t
-
- readOptions options LiterateHaskellMarkdown = options
- { stateLiterateHaskell = True }
- readOptions options _ = options
-
--- | An action that renders the list of page sections to a context using pandoc
---
-renderAction :: HakyllAction [PageSection] Context
-renderAction = (arr id &&& (getFileType' >>> getRenderFunction))
- >>> renderActionWith
- where
- getFileType' = arr $ getFileType . fromMaybe "unknown" . lookup "path"
- . map (\(x, y, _) -> (x, y)) . map unPageSection
-
--- | An action to render pages, offering just a little more flexibility
---
-renderActionWith :: HakyllAction ([PageSection], String -> String) Context
-renderActionWith = createHakyllAction $ \(sections, render') -> return $
- Context $ M.fromList $ map (renderTriple render' . unPageSection) sections
- where
- renderTriple render' (k, v, r) = second (if r then render' else id) (k, v)
diff --git a/src/Text/Hakyll/Regex.hs b/src/Text/Hakyll/Regex.hs
deleted file mode 100644
index ba7ee46..0000000
--- a/src/Text/Hakyll/Regex.hs
+++ /dev/null
@@ -1,77 +0,0 @@
--- | A module that exports a simple regex interface. This code is mostly copied
--- from the regex-compat package at hackage. I decided to write this module
--- because I want to abstract the regex package used.
-module Text.Hakyll.Regex
- ( splitRegex
- , substituteRegex
- , matchesRegex
- ) where
-
-import Text.Regex.TDFA
-
--- | Match a regular expression against a string, returning more information
--- about the match.
-matchRegexAll :: Regex -> String -> Maybe (String, String, String, [String])
-matchRegexAll = matchM
-
--- | Replaces every occurance of the given regexp with the replacement string.
-subRegex :: Regex -- ^ Search pattern
- -> String -- ^ Input string
- -> String -- ^ Replacement text
- -> String -- ^ Output string
-subRegex _ "" _ = ""
-subRegex regexp inp replacement =
- let -- bre matches a backslash then capture either a backslash or some digits
- bre = makeRegex "\\\\(\\\\|[0-9]+)"
- lookup' _ [] _ = []
- lookup' [] _ _ = []
- lookup' match' repl groups =
- case matchRegexAll bre repl of
- Nothing -> repl
- Just (lead, _, trail, bgroups) ->
- let newval =
- if head bgroups == "\\"
- then "\\"
- else let index :: Int
- index = read (head bgroups) - 1
- in if index == -1
- then match'
- else groups !! index
- in lead ++ newval ++ lookup' match' trail groups
- in case matchRegexAll regexp inp of
- Nothing -> inp
- Just (lead, match', trail, groups) ->
- lead ++ lookup' match' replacement groups
- ++ subRegex regexp trail replacement
-
--- | Splits a string based on a regular expression. The regular expression
--- should identify one delimiter.
-splitRegex' :: Regex -> String -> [String]
-splitRegex' _ [] = []
-splitRegex' delim strIn = loop strIn where
- loop str = case matchOnceText delim str of
- Nothing -> [str]
- Just (firstline, _, remainder) ->
- if null remainder
- then [firstline,""]
- else firstline : loop remainder
-
--- | Split a list at a certain element.
-splitRegex :: String -> String -> [String]
-splitRegex pattern = filter (not . null)
- . splitRegex' (makeRegex pattern)
-
--- | Substitute a regex. Simplified interface. This function performs a global
--- substitution.
-substituteRegex :: String -- ^ Pattern to replace (regex).
- -> String -- ^ Replacement string.
- -> String -- ^ Input string.
- -> String -- ^ Result.
-substituteRegex pattern replacement string =
- subRegex (makeRegex pattern) string replacement
-
--- | Simple regex matching.
-matchesRegex :: String -- ^ Input string.
- -> String -- ^ Pattern to match.
- -> Bool
-matchesRegex = (=~)
diff --git a/src/Text/Hakyll/Render.hs b/src/Text/Hakyll/Render.hs
deleted file mode 100644
index aa3ef8c..0000000
--- a/src/Text/Hakyll/Render.hs
+++ /dev/null
@@ -1,126 +0,0 @@
--- | Module containing rendering functions. All these functions are used to
--- render files to the @_site@ directory.
-module Text.Hakyll.Render
- ( render
- , renderAndConcat
- , renderChain
- , static
- , css
- , writePage
- ) where
-
-import Control.Arrow ((>>>))
-import Control.Applicative ((<$>))
-import Control.Monad.Reader (liftIO)
-import System.Directory (copyFile)
-import Data.Maybe (fromMaybe)
-import qualified Data.Map as M
-
-import Text.Hakyll.Context (Context (..))
-import Text.Hakyll.HakyllMonad (Hakyll, askHakyll, getAdditionalContext)
-import Text.Hakyll.File
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.ContextManipulations
-import Text.Hakyll.Internal.CompressCss
-import Text.Hakyll.Internal.Template
-
--- | A pure render function - used internally.
-pureRender :: Template -- ^ Template to use for rendering.
- -> Context -- ^ Renderable object to render with given template.
- -> Context -- ^ The body of the result will contain the render.
-pureRender template (Context c) =
- -- Ignore $root when substituting here. We will only replace that in the
- -- final render (just before writing).
- let contextIgnoringRoot = Context $ M.insert "root" "$root" c
- body = regularSubstitute template $ contextIgnoringRoot
- in Context $ M.insert "body" body c
-
--- | This is the most simple render action. You render a @Context@ with a
--- template, and get back the result.
-render :: FilePath -- ^ Template to use for rendering.
- -> HakyllAction Context Context -- ^ The render computation.
-render templatePath = HakyllAction
- { actionDependencies = [templatePath]
- , actionUrl = Right id
- , actionFunction = \context ->
- flip pureRender context <$> readTemplate templatePath
- }
-
--- | Render each @Context@ with the given templates, then concatenate the
--- result. So, basically this function:
---
--- - Takes every @Context@.
---
--- - Renders every @Context@ with all given templates. This is comparable
--- with a renderChain action.
---
--- - Concatenates the result and returns that as a @String@.
-renderAndConcat :: [FilePath]
- -> [HakyllAction () Context]
- -> HakyllAction () String
-renderAndConcat templatePaths renderables = HakyllAction
- { actionDependencies = renders >>= actionDependencies
- , actionUrl = Right id
- , actionFunction = actionFunction'
- }
- where
- render' = chain (map render templatePaths)
- renders = map (>>> render') renderables
-
- actionFunction' _ = do
- contexts <- mapM (runHakyllAction . (>>> takeBody)) renders
- return $ concat contexts
-
--- | Chain a render action for a page with a number of templates. This will
--- also write the result to the site destination. This is the preferred way
--- to do general rendering.
---
--- > renderChain [ "templates/notice.html"
--- > , "templates/default.html"
--- > ] $ createPagePath "warning.html"
---
--- This code will first render @warning.html@ using @templates/notice.html@,
--- and will then render the result with @templates/default.html@.
-renderChain :: [FilePath]
- -> HakyllAction () Context
- -> Hakyll ()
-renderChain templatePaths initial =
- runHakyllActionIfNeeded renderChainWith'
- where
- renderChainWith' = initial >>> chain' >>> writePage
- chain' = chain $ map render templatePaths
-
--- | Mark a certain file as static, so it will just be copied when the site is
--- generated.
-static :: FilePath -> Hakyll ()
-static source = runHakyllActionIfNeeded static'
- where
- static' = createFileHakyllAction source $ do
- destination <- toDestination source
- makeDirectories destination
- liftIO $ copyFile source destination
-
--- | Render a css file, compressing it.
-css :: FilePath -> Hakyll ()
-css source = runHakyllActionIfNeeded css'
- where
- css' = createFileHakyllAction source $ do
- contents <- liftIO $ readFile source
- destination <- toDestination source
- makeDirectories destination
- liftIO $ writeFile destination (compressCss contents)
-
--- | Write a page to the site destination. Final action after render
--- chains and such.
-writePage :: HakyllAction Context ()
-writePage = createHakyllAction $ \(Context initialContext) -> do
- additionalContext' <- unContext <$> askHakyll getAdditionalContext
- let url = fromMaybe (error "No url defined at write time.")
- (M.lookup "url" initialContext)
- body = fromMaybe "" (M.lookup "body" initialContext)
- let context = additionalContext' `M.union` M.singleton "root" (toRoot url)
- destination <- toDestination url
- makeDirectories destination
- -- Substitute $root here, just before writing.
- liftIO $ writeFile destination $ finalSubstitute (fromString body)
- (Context context)
diff --git a/src/Text/Hakyll/Tags.hs b/src/Text/Hakyll/Tags.hs
deleted file mode 100644
index d427aa5..0000000
--- a/src/Text/Hakyll/Tags.hs
+++ /dev/null
@@ -1,172 +0,0 @@
--- | Module containing some specialized functions to deal with tags.
--- This Module follows certain conventions. Stick with them.
---
--- More concrete: all functions in this module assume that the tags are
--- located in the @tags@ field, and separated by commas. An example file
--- @foo.markdown@ could look like:
---
--- > ---
--- > author: Philip K. Dick
--- > title: Do androids dream of electric sheep?
--- > tags: future, science fiction, humanoid
--- > ---
--- > The novel is set in a post-apocalyptic near future, where the Earth and
--- > its populations have been damaged greatly by Nuclear...
---
--- All the following functions would work with such a format. In addition to
--- tags, Hakyll also supports categories. The convention when using categories
--- is to place pages in subdirectories.
---
--- An example, the page @posts\/coding\/2010-01-28-hakyll-categories.markdown@
--- Tags or categories are read using the @readTagMap@ and @readCategoryMap@
--- functions. Because categories are implemented using tags - categories can
--- be seen as tags, with the restriction that a page can only have one
--- category - all functions for tags also work with categories.
---
--- When reading a @TagMap@ (which is also used for category maps) using the
--- @readTagMap@ or @readCategoryMap@ function, you also have to give a unique
--- identifier to it. This identifier is simply for caching reasons, so Hakyll
--- can tell different maps apart; it has no other use.
---
-module Text.Hakyll.Tags
- ( TagMap
- , readTagMap
- , readCategoryMap
- , withTagMap
- , renderTagCloud
- , renderTagLinks
- ) where
-
-import qualified Data.Map as M
-import Data.List (intercalate)
-import Data.Maybe (fromMaybe, maybeToList)
-import Control.Arrow (second, (>>>))
-import Control.Applicative ((<$>))
-import System.FilePath
-
-import Text.Blaze.Renderer.String (renderHtml)
-import Text.Blaze.Html5 ((!), string, stringValue)
-import qualified Text.Blaze.Html5 as H
-import qualified Text.Blaze.Html5.Attributes as A
-
-import Text.Hakyll.Context (Context (..))
-import Text.Hakyll.ContextManipulations (changeValue)
-import Text.Hakyll.CreateContext (createPage)
-import Text.Hakyll.HakyllMonad (Hakyll)
-import Text.Hakyll.Regex
-import Text.Hakyll.HakyllAction
-import Text.Hakyll.Util
-import Text.Hakyll.Internal.Cache
-
--- | Type for a tag map.
---
--- This is a map associating tags or categories to the appropriate pages
--- using that tag or category. In the case of categories, each path will only
--- appear under one category - this is not the case with tags.
-type TagMap = M.Map String [HakyllAction () Context]
-
--- | Read a tag map. This is a internally used function that can be used for
--- tags as well as for categories.
-readMap :: (Context -> [String]) -- ^ Function to get tags from a context.
- -> String -- ^ Unique identifier for the tagmap.
- -> [FilePath]
- -> HakyllAction () TagMap
-readMap getTagsFunction identifier paths = HakyllAction
- { actionDependencies = paths
- , actionUrl = Right id
- , actionFunction = actionFunction'
- }
- where
- fileName = "tagmaps" </> identifier
-
- actionFunction' _ = do
- isCacheMoreRecent' <- isCacheMoreRecent fileName paths
- assocMap <- if isCacheMoreRecent'
- then M.fromAscList <$> getFromCache fileName
- else do assocMap' <- readTagMap'
- storeInCache (M.toAscList assocMap') fileName
- return assocMap'
- return $ M.map (map createPage) assocMap
-
- -- TODO: preserve order
- readTagMap' :: Hakyll (M.Map String [FilePath])
- readTagMap' = do
- pairs' <- concat <$> mapM pairs paths
- return $ M.fromListWith (flip (++)) pairs'
-
- -- | Read a page, and return an association list where every tag is
- -- associated with some paths. Of course, this will always be just one
- -- @FilePath@ here.
- pairs :: FilePath -> Hakyll [(String, [FilePath])]
- pairs path = do
- context <- runHakyllAction $ createPage path
- let tags = getTagsFunction context
- return $ map (\tag -> (tag, [path])) tags
-
--- | Read a @TagMap@, using the @tags@ metadata field.
-readTagMap :: String -- ^ Unique identifier for the map.
- -> [FilePath] -- ^ Paths to get tags from.
- -> HakyllAction () TagMap
-readTagMap = readMap getTagsFunction
- where
- getTagsFunction = map trim . splitRegex ","
- . fromMaybe [] . M.lookup "tags" . unContext
-
--- | Read a @TagMap@, using the subdirectories the pages are placed in.
-readCategoryMap :: String -- ^ Unique identifier for the map.
- -> [FilePath] -- ^ Paths to get tags from.
- -> HakyllAction () TagMap
-readCategoryMap = readMap $ maybeToList . M.lookup "category" . unContext
-
--- | Perform a @Hakyll@ action on every item in the tag
---
-withTagMap :: HakyllAction () TagMap
- -> (String -> [HakyllAction () Context] -> Hakyll ())
- -> Hakyll ()
-withTagMap tagMap function = runHakyllAction (tagMap >>> action)
- where
- action = createHakyllAction (mapM_ (uncurry function) . M.toList)
-
--- | Render a tag cloud.
-renderTagCloud :: (String -> String) -- ^ Function to produce an url for a tag.
- -> Float -- ^ Smallest font size, in percent.
- -> Float -- ^ Biggest font size, in percent.
- -> HakyllAction TagMap String
-renderTagCloud urlFunction minSize maxSize = createHakyllAction renderTagCloud'
- where
- renderTagCloud' tagMap =
- return $ intercalate " " $ map (renderTag tagMap) (tagCount tagMap)
-
- renderTag tagMap (tag, count) = renderHtml $
- H.a ! A.style (stringValue $ "font-size: " ++ sizeTag tagMap count)
- ! A.href (stringValue $ urlFunction tag)
- $ string tag
-
- sizeTag tagMap count = show (size' :: Int) ++ "%"
- where
- size' = floor $ minSize + relative tagMap count * (maxSize - minSize)
-
- minCount = minimum . map snd . tagCount
- maxCount = maximum . map snd . tagCount
- relative tagMap count = (count - minCount tagMap) /
- (1 + maxCount tagMap - minCount tagMap)
-
- tagCount = map (second $ fromIntegral . length) . M.toList
-
--- | Render all tags to links.
---
--- On your site, it is nice if you can display the tags on a page, but
--- naturally, most people would expect these are clickable.
---
--- So, this function takes a function to produce an url for a given tag, and
--- applies it on all tags.
---
--- Note that it is your own responsibility to ensure a page with such an url
--- exists.
-renderTagLinks :: (String -> String) -- ^ Function to produce an url for a tag.
- -> HakyllAction Context Context
-renderTagLinks urlFunction = changeValue "tags" renderTagLinks'
- where
- renderTagLinks' = intercalate ", "
- . map ((\t -> link t $ urlFunction t) . trim)
- . splitRegex ","
diff --git a/src/Text/Hakyll/Util.hs b/src/Text/Hakyll/Util.hs
deleted file mode 100644
index e032c52..0000000
--- a/src/Text/Hakyll/Util.hs
+++ /dev/null
@@ -1,34 +0,0 @@
--- | Miscellaneous text manipulation functions.
-module Text.Hakyll.Util
- ( trim
- , stripHtml
- , link
- ) where
-
-import Data.Char (isSpace)
-
-import Text.Blaze.Html5 ((!), string, stringValue, a)
-import Text.Blaze.Html5.Attributes (href)
-import Text.Blaze.Renderer.String (renderHtml)
-
--- | Trim a string (drop spaces, tabs and newlines at both sides).
-trim :: String -> String
-trim = reverse . trim' . reverse . trim'
- where
- trim' = dropWhile isSpace
-
--- | Strip html tags from the given string.
-stripHtml :: String -> String
-stripHtml [] = []
-stripHtml str = let (beforeTag, rest) = break (== '<') str
- (_, afterTag) = break (== '>') rest
- in beforeTag ++ stripHtml (drop 1 afterTag)
-
--- | Make a HTML link.
---
--- > link "foo" "bar.html" == "<a href='bar.html'>foo</a>"
-link :: String -- ^ Link text.
- -> String -- ^ Link destination.
- -> String
-link text destination = renderHtml $ a ! href (stringValue destination)
- $ string text
diff --git a/tests/CompressCss.hs b/tests/CompressCss.hs
deleted file mode 100644
index 164df59..0000000
--- a/tests/CompressCss.hs
+++ /dev/null
@@ -1,42 +0,0 @@
--- | Module testing @Text.Hakyll.Internal.CompressCss@.
-module CompressCss
- ( compressCssGroup
- ) where
-
-import qualified Data.Map as M
-
-import Data.Binary
-import Test.Framework (testGroup)
-import Test.Framework.Providers.HUnit
-import Test.Framework.Providers.QuickCheck2
-import Test.HUnit
-
-import Text.Hakyll.Internal.CompressCss
-
--- CompressCss test group.
-compressCssGroup = testGroup "CompressCss"
- [ testProperty "prop_compressCss_length" prop_compressCss_length
- , testCase "test_compressCss_1" test_compressCss_1
- , testCase "test_compressCss_2" test_compressCss_2
- , testCase "test_compressCss_3" test_compressCss_3
- , testCase "test_compressCss_4" test_compressCss_4
- ]
-
--- | Css compression should always decrease the text length.
-prop_compressCss_length str = length str >= length (compressCss str)
-
--- | compressCss test case 1.
-test_compressCss_1 = compressCss "a { \n color : red; }" @?= "a{color:red}"
-
--- | compressCss test case 2.
-test_compressCss_2 = compressCss "img {border :none;;;; }"
- @?= "img{border:none}"
-
--- | compressCss test case 3.
-test_compressCss_3 =
- compressCss "p {font-size : 90%;} h1 {color :white;;; }"
- @?= "p{font-size:90%}h1{color:white}"
-
--- | compressCss test case 4.
-test_compressCss_4 = compressCss "a { /* /* red is pretty cool */ color: red; }"
- @?= "a{color:red}"
diff --git a/tests/File.hs b/tests/File.hs
deleted file mode 100644
index 9c1ae67..0000000
--- a/tests/File.hs
+++ /dev/null
@@ -1,62 +0,0 @@
-module File
- ( fileGroup
- ) where
-
-import qualified Data.Map as M
-
-import Control.Applicative ((<$>))
-import Data.Binary
-import Test.Framework (testGroup)
-import Test.Framework.Providers.HUnit
-import Test.Framework.Providers.QuickCheck2
-import Test.HUnit
-import Test.QuickCheck
-
-import Text.Hakyll (runDefaultHakyll)
-import Text.Hakyll.File
-
--- File test group.
-fileGroup = testGroup "File"
- [ testCase "test_toRoot_1" test_toRoot_1
- , testCase "test_toRoot_2" test_toRoot_2
- , testCase "test_toRoot_3" test_toRoot_3
- , testCase "test_inHakyllDirectory_1" test_inHakyllDirectory_1
- , testCase "test_inHakyllDirectory_2" test_inHakyllDirectory_2
- , testCase "test_inHakyllDirectory_3" test_inHakyllDirectory_3
- , testCase "test_inHakyllDirectory_4" test_inHakyllDirectory_4
- , testCase "test_removeSpaces_1" test_removeSpaces_1
- , testCase "test_removeSpaces_2" test_removeSpaces_2
- , testCase "test_havingExtension_1" test_havingExtension_1
- , testCase "test_havingExtension_2" test_havingExtension_2
- ]
-
-
--- toRoot test cases
-test_toRoot_1 = toRoot "/posts/foo.html" @?= ".."
-test_toRoot_2 = toRoot "posts/foo.html" @?= ".."
-test_toRoot_3 = toRoot "foo.html" @?= "."
-
--- inHakyllDirectory test cases
-test_inHakyllDirectory_1 =
- (runDefaultHakyll $ inHakyllDirectory "_site/foo.html")
- @? "test_inHakyllDirectory_1"
-test_inHakyllDirectory_2 =
- (not <$> (runDefaultHakyll $ inHakyllDirectory "posts/foo.html"))
- @? "test_inHakyllDirectory_2"
-test_inHakyllDirectory_3 =
- (not <$> (runDefaultHakyll $ inHakyllDirectory "index.html"))
- @? "test_inHakyllDirectory_3"
-test_inHakyllDirectory_4 =
- (runDefaultHakyll $ inHakyllDirectory "_cache/index.html")
- @? "test_inHakyllDirectory_4"
-
--- removeSpaces test cases
-test_removeSpaces_1 = removeSpaces "$root/tags/random crap.html"
- @?= "$root/tags/random-crap.html"
-test_removeSpaces_2 = removeSpaces "another simple example.zip"
- @?= "another-simple-example.zip"
-
--- Having extension test cases
-test_havingExtension_1 = havingExtension ".foo" ["file.bar", "file.txt"] @?= []
-test_havingExtension_2 = havingExtension ".foo" ["file.foo", "file.txt"]
- @?= ["file.foo"]
diff --git a/tests/Hakyll/Core/DirectedGraph/Tests.hs b/tests/Hakyll/Core/DirectedGraph/Tests.hs
new file mode 100644
index 0000000..3e04b49
--- /dev/null
+++ b/tests/Hakyll/Core/DirectedGraph/Tests.hs
@@ -0,0 +1,36 @@
+module Hakyll.Core.DirectedGraph.Tests
+ ( tests
+ ) where
+
+import Data.Set (Set)
+import qualified Data.Set as S
+
+import Test.Framework
+import Test.Framework.Providers.HUnit
+import Test.HUnit hiding (Test)
+
+import Hakyll.Core.DirectedGraph
+import Hakyll.Core.DirectedGraph.DependencySolver
+
+tests :: [Test]
+tests =
+ [ testCase "solveDependencies [1]" solveDependencies1
+ ]
+
+node :: Ord a => a -> [a] -> (a, Set a)
+node t n = (t, S.fromList n)
+
+testGraph01 :: DirectedGraph Int
+testGraph01 = fromList
+ [ node 8 [2, 4, 6]
+ , node 2 [4, 3]
+ , node 4 [3]
+ , node 6 [4]
+ , node 3 []
+ ]
+
+solveDependencies1 :: Assertion
+solveDependencies1 = result == [3, 4, 2, 6, 8] || result == [3, 4, 2, 6, 8]
+ @? "solveDependencies1"
+ where
+ result = solveDependencies testGraph01
diff --git a/tests/Hakyll/Core/Identifier/Tests.hs b/tests/Hakyll/Core/Identifier/Tests.hs
new file mode 100644
index 0000000..43dd6c1
--- /dev/null
+++ b/tests/Hakyll/Core/Identifier/Tests.hs
@@ -0,0 +1,22 @@
+{-# LANGUAGE OverloadedStrings #-}
+module Hakyll.Core.Identifier.Tests
+ ( tests
+ ) where
+
+import Test.Framework
+import Test.HUnit hiding (Test)
+
+import Hakyll.Core.Identifier.Pattern
+import TestSuite.Util
+
+tests :: [Test]
+tests = fromAssertions "match"
+ [ Just ["bar"] @=? match "foo/**" "foo/bar"
+ , Just ["foo/bar"] @=? match "**" "foo/bar"
+ , Nothing @=? match "*" "foo/bar"
+ , Just [] @=? match "foo" "foo"
+ , Just ["foo"] @=? match "*/bar" "foo/bar"
+ , Just ["foo/bar"] @=? match "**/qux" "foo/bar/qux"
+ , Just ["foo/bar", "qux"] @=? match "**/*" "foo/bar/qux"
+ , Just ["foo", "bar/qux"] @=? match "*/**" "foo/bar/qux"
+ ]
diff --git a/tests/Hakyll/Core/Routes/Tests.hs b/tests/Hakyll/Core/Routes/Tests.hs
new file mode 100644
index 0000000..3361846
--- /dev/null
+++ b/tests/Hakyll/Core/Routes/Tests.hs
@@ -0,0 +1,24 @@
+{-# LANGUAGE OverloadedStrings #-}
+module Hakyll.Core.Routes.Tests
+ ( tests
+ ) where
+
+import Test.Framework
+import Test.HUnit hiding (Test)
+
+import Hakyll.Core.Routes
+import TestSuite.Util
+
+tests :: [Test]
+tests = fromAssertions "runRoutes"
+ [ Just "foo.html" @=? runRoutes (setExtension "html") "foo"
+ , Just "foo.html" @=? runRoutes (setExtension ".html") "foo"
+ , Just "foo.html" @=? runRoutes (setExtension "html") "foo.markdown"
+ , Just "foo.html" @=? runRoutes (setExtension ".html") "foo.markdown"
+
+ , Just "tags/bar.xml" @=?
+ runRoutes (gsubRoute "rss/" (const "")) "tags/rss/bar.xml"
+ , Just "tags/bar.xml" @=?
+ runRoutes (gsubRoute "rss/" (const "") `composeRoutes`
+ setExtension "xml") "tags/rss/bar"
+ ]
diff --git a/tests/Hakyll/Web/Page/Tests.hs b/tests/Hakyll/Web/Page/Tests.hs
new file mode 100644
index 0000000..b44daca
--- /dev/null
+++ b/tests/Hakyll/Web/Page/Tests.hs
@@ -0,0 +1,32 @@
+{-# LANGUAGE OverloadedStrings #-}
+module Hakyll.Web.Page.Tests
+ ( tests
+ ) where
+
+import Test.Framework
+import Test.HUnit hiding (Test)
+
+import qualified Data.Map as M
+
+import Hakyll.Web.Page
+import Hakyll.Web.Page.Read
+import TestSuite.Util
+
+tests :: [Test]
+tests = fromAssertions "readPage"
+ [ Page (M.singleton "foo" "bar") "body\n" @=? readPage
+ "--- \n\
+ \foo: bar \n\
+ \--- \n\
+ \body"
+
+ , Page M.empty "line one\nlijn twee\n" @=? readPage
+ "line one\n\
+ \lijn twee"
+
+ , Page (M.fromList [("field1", "unos"), ("veld02", "deux")]) "" @=? readPage
+ "---\n\
+ \veld02: deux\n\
+ \field1: unos\n\
+ \---"
+ ]
diff --git a/tests/Hakyll/Web/RelativizeUrls/Tests.hs b/tests/Hakyll/Web/RelativizeUrls/Tests.hs
new file mode 100644
index 0000000..05971ad
--- /dev/null
+++ b/tests/Hakyll/Web/RelativizeUrls/Tests.hs
@@ -0,0 +1,20 @@
+{-# LANGUAGE OverloadedStrings #-}
+module Hakyll.Web.RelativizeUrls.Tests
+ ( tests
+ ) where
+
+import Test.Framework
+import Test.HUnit hiding (Test)
+
+import Hakyll.Web.RelativizeUrls
+import TestSuite.Util
+
+tests :: [Test]
+tests = fromAssertions "relativizeUrls"
+ [ "<a href=\"../foo\">bar</a>" @=?
+ relativizeUrls ".." "<a href=\"/foo\">bar</a>"
+ , "<img src=\"../../images/lolcat.png\"></img>" @=?
+ relativizeUrls "../.." "<img src=\"/images/lolcat.png\" />"
+ , "<a href=\"http://haskell.org\">Haskell</a>" @=?
+ relativizeUrls "../.." "<a href=\"http://haskell.org\">Haskell</a>"
+ ]
diff --git a/tests/Hakyll/Web/Template/Tests.hs b/tests/Hakyll/Web/Template/Tests.hs
new file mode 100644
index 0000000..c772fa8
--- /dev/null
+++ b/tests/Hakyll/Web/Template/Tests.hs
@@ -0,0 +1,44 @@
+{-# LANGUAGE OverloadedStrings #-}
+module Hakyll.Web.Template.Tests
+ ( tests
+ ) where
+
+import Test.Framework
+import Test.HUnit hiding (Test)
+
+import qualified Data.Map as M
+
+import Hakyll.Web.Page
+import Hakyll.Web.Template
+import Hakyll.Web.Template.Read
+import TestSuite.Util
+
+tests :: [Test]
+tests = fromAssertions "applyTemplate"
+ -- Hakyll templates
+ [ applyTemplateAssertion readTemplate
+ "bar" "$foo$" [("foo", "bar")]
+
+ , applyTemplateAssertion readTemplate
+ "$ barqux" "$$ $foo$$bar$" [("foo", "bar"), ("bar", "qux")]
+
+ -- Hamlet templates
+ , applyTemplateAssertion readHamletTemplate
+ "<head><title>notice</title></head><body>A paragraph</body>"
+ "<head\n\
+ \ <title>#{title}\n\
+ \<body\n\
+ \ A paragraph\n"
+ [("title", "notice")]
+ ]
+
+-- | Utility function to create quick template tests
+--
+applyTemplateAssertion :: (String -> Template) -- ^ Template parser
+ -> String -- ^ Expected
+ -> String -- ^ Template
+ -> [(String, String)] -- ^ Page
+ -> Assertion -- ^ Resulting assertion
+applyTemplateAssertion parser expected template page =
+ expected @=? pageBody (applyTemplate (parser template)
+ (fromMap $ M.fromList page))
diff --git a/tests/Main.hs b/tests/Main.hs
deleted file mode 100644
index adcb613..0000000
--- a/tests/Main.hs
+++ /dev/null
@@ -1,22 +0,0 @@
-module Main where
-
-import Test.Framework (defaultMain, testGroup)
-import Test.Framework.Providers.HUnit
-import Test.Framework.Providers.QuickCheck2
-
-import CompressCss
-import File
-import Page
-import Regex
-import Template
-import Util
-
--- | Run all tests.
-main :: IO ()
-main = defaultMain [ compressCssGroup
- , fileGroup
- , pageGroup
- , regexGroup
- , templateGroup
- , utilGroup
- ]
diff --git a/tests/Page.hs b/tests/Page.hs
deleted file mode 100644
index be49ad0..0000000
--- a/tests/Page.hs
+++ /dev/null
@@ -1,92 +0,0 @@
-module Page
- ( pageGroup
- ) where
-
-import qualified Data.Map as M
-
-import Control.Monad.Reader (runReaderT)
-import Data.Binary
-import Test.Framework (testGroup)
-import Test.Framework.Providers.HUnit
-import Test.Framework.Providers.QuickCheck2
-import Test.HUnit
-import System.Directory (getTemporaryDirectory, removeFile)
-import System.FilePath ((</>))
-
-import Text.Hakyll.Page
-import Text.Hakyll.Pandoc
-import Text.Hakyll.Context
-import Text.Hakyll.HakyllAction
-import Text.Hakyll
-
--- Page test group.
-pageGroup = testGroup "Page"
- [ testCase "test_readPage_1" test_readPage_1
- , testCase "test_readPage_2" test_readPage_2
- , testCase "test_readPage_3" test_readPage_3
- , testCase "test_readPage_4" test_readPage_4
- ]
-
--- | An abstract function to test page reading.
-test_readPage :: FilePath -- ^ Filename to give to the temporary file.
- -> String -- ^ Content to put in the file.
- -> (Context -> Bool) -- ^ Assertion to run on the result Context.
- -> IO Bool -- ^ Result of the assertion.
-test_readPage fileName content assertion = do
- temporaryDir <- getTemporaryDirectory
- let temporaryFile = temporaryDir </> fileName
- writeFile temporaryFile content
- page <- runDefaultHakyll (runHakyllAction $ createPage temporaryFile)
- removeFile temporaryFile
- return $ assertion page
-
--- | readPage test case 1.
-test_readPage_1 = test_readPage fileName content assertion @? "test_readPage_1"
- where
- fileName = "test_readPage_1.markdown"
- content = unlines [ "---"
- , "author: Eric Cartman"
- , "---"
- , "This is a simple test."
- ]
- assertion page = M.lookup "author" (unContext page) == Just "Eric Cartman"
-
--- | readPage test case 2.
-test_readPage_2 = test_readPage fileName content assertion @? "test_readPage_2"
- where
- fileName = "test_readPage_2.txt"
- content = unlines [ "--- someSection"
- , "This is a section."
- , "---"
- , "This is the body."
- ]
- assertion page =
- let m = unContext page
- in M.lookup "someSection" m == Just "This is a section.\n"
- && M.lookup "body" m == Just "This is the body.\n"
-
--- | readPage test case 3.
-test_readPage_3 = test_readPage fileName content assertion @? "test_readPage_3"
- where
- fileName = "test_readPage_3.txt"
- content = unlines [ "No metadata here, sorry."
- ]
- assertion page =
- M.lookup "body" (unContext page) == Just "No metadata here, sorry.\n"
-
--- | readPage test case 4.
-test_readPage_4 = test_readPage fileName content assertion @? "test_readPage_4"
- where
- fileName = "test_readPage_4.txt"
- content = unlines [ "--- section"
- , "This is a section."
- , "---"
- , "Header"
- , "------"
- , "The header is not a separate section."
- ]
- assertion page = M.lookup "body" (unContext page) == Just body
- body = unlines [ "Header"
- , "------"
- , "The header is not a separate section."
- ]
diff --git a/tests/Regex.hs b/tests/Regex.hs
deleted file mode 100644
index 5aac932..0000000
--- a/tests/Regex.hs
+++ /dev/null
@@ -1,24 +0,0 @@
-module Regex
- ( regexGroup
- ) where
-
-import qualified Data.Map as M
-
-import Data.Binary
-import Test.Framework (testGroup)
-import Test.Framework.Providers.HUnit
-import Test.Framework.Providers.QuickCheck2
-import Test.HUnit
-import Test.QuickCheck
-
-import Text.Hakyll.Regex
-
--- Regex test group.
-regexGroup = testGroup "Regex"
- [ testCase "test_splitRegex_1" test_splitRegex_1
- , testCase "test_splitRegex_2" test_splitRegex_2
- ]
-
--- Split Regex test cases.
-test_splitRegex_1 = splitRegex "," "1,2,3" @?= ["1", "2", "3"]
-test_splitRegex_2 = splitRegex "," ",1,2," @?= ["1", "2"]
diff --git a/tests/Template.hs b/tests/Template.hs
deleted file mode 100644
index 648e3de..0000000
--- a/tests/Template.hs
+++ /dev/null
@@ -1,70 +0,0 @@
-module Template
- ( templateGroup
- ) where
-
-import qualified Data.Map as M
-import Control.Applicative ((<$>))
-import Control.Monad (replicateM)
-import Data.Monoid (mempty)
-
-import Data.Binary
-import Test.Framework (testGroup)
-import Test.Framework.Providers.HUnit
-import Test.Framework.Providers.QuickCheck2
-import Test.HUnit
-import Test.QuickCheck
-
-import Text.Hakyll.Context (Context (..))
-import Text.Hakyll.Internal.Template
-import Text.Hakyll.Internal.Template.Template
-
--- Template test group.
-templateGroup = testGroup "Template"
- [ testProperty "prop_template_encode_id" prop_template_encode_id
- , testProperty "prop_substitute_id" prop_substitute_id
- , testCase "test_substitute_1" test_substitute_1
- , testCase "test_substitute_2" test_substitute_2
- ]
-
--- | Generate arbitrary templates from a given length.
---
-instance Arbitrary TemplateElement where
- arbitrary = oneof
- -- Random chunk
- [ Chunk <$> do
- string <- arbitrary
- let sanitized = filter (/= '$') string
- return $ if null sanitized then "foo" else sanitized
- -- Random identifier
- , fmap Identifier $
- choose (5, 10) >>= flip replicateM (choose ('a', 'z'))
- -- Escape character
- , return EscapeCharacter
- ]
-
--- | Make @Template@ testable.
-instance Arbitrary Template where
- arbitrary = Template <$> arbitrary
- shrink = map Template . shrink . unTemplate
-
--- Test encoding/decoding of templates.
-prop_template_encode_id :: Template -> Bool
-prop_template_encode_id template = decode (encode template) == template
-
--- Check we get the same sting with empty substitutions.
-prop_substitute_id string =
- regularSubstitute (fromString string) mempty == string
-
--- substitute test case 1.
-test_substitute_1 =
- finalSubstitute template context @?= "Banana costs $4."
- where
- template = fromString "$product costs $$$price."
- context = Context $ M.fromList [("product", "Banana"), ("price", "4")]
-
--- substitute test case 2.
-test_substitute_2 =
- regularSubstitute template context @?= "$$root is a special key."
- where
- template = fromString "$$root is a special $thing."
- context = Context $ M.fromList [("root", "foo"), ("thing", "key")]
diff --git a/tests/TestSuite.hs b/tests/TestSuite.hs
new file mode 100644
index 0000000..8787bbd
--- /dev/null
+++ b/tests/TestSuite.hs
@@ -0,0 +1,26 @@
+module TestSuite where
+
+import Test.Framework (defaultMain, testGroup)
+
+import qualified Hakyll.Core.DirectedGraph.Tests
+import qualified Hakyll.Core.Identifier.Tests
+import qualified Hakyll.Core.Routes.Tests
+import qualified Hakyll.Web.Page.Tests
+import qualified Hakyll.Web.RelativizeUrls.Tests
+import qualified Hakyll.Web.Template.Tests
+
+main :: IO ()
+main = defaultMain
+ [ testGroup "Hakyll.Core.DirectedGraph.Tests"
+ Hakyll.Core.DirectedGraph.Tests.tests
+ , testGroup "Hakyll.Core.Identifier.Tests"
+ Hakyll.Core.Identifier.Tests.tests
+ , testGroup "Hakyll.Core.Routes.Tests"
+ Hakyll.Core.Routes.Tests.tests
+ , testGroup "Hakyll.Web.Page.Tests"
+ Hakyll.Web.Page.Tests.tests
+ , testGroup "Hakyll.Web.RelativizeUrls.Tests"
+ Hakyll.Web.RelativizeUrls.Tests.tests
+ , testGroup "Hakyll.Web.Template.Tests"
+ Hakyll.Web.Template.Tests.tests
+ ]
diff --git a/tests/TestSuite/Util.hs b/tests/TestSuite/Util.hs
new file mode 100644
index 0000000..f32bcad
--- /dev/null
+++ b/tests/TestSuite/Util.hs
@@ -0,0 +1,16 @@
+-- | Test utilities
+--
+module TestSuite.Util
+ ( fromAssertions
+ ) where
+
+import Test.Framework
+import Test.Framework.Providers.HUnit
+import Test.HUnit hiding (Test)
+
+fromAssertions :: String -- ^ Name
+ -> [Assertion] -- ^ Cases
+ -> [Test] -- ^ Result tests
+fromAssertions name = zipWith testCase names
+ where
+ names = map (\n -> name ++ " [" ++ show n ++ "]") [1 :: Int ..]
diff --git a/tests/Util.hs b/tests/Util.hs
deleted file mode 100644
index 4b29f5f..0000000
--- a/tests/Util.hs
+++ /dev/null
@@ -1,54 +0,0 @@
-module Util
- ( utilGroup
- ) where
-
-import Data.Char
-
-import Test.QuickCheck
-import Test.Framework (testGroup)
-import Test.Framework.Providers.HUnit
-import Test.Framework.Providers.QuickCheck2
-import Test.HUnit
-
-import Text.Hakyll.Util
-
--- Util test group.
-utilGroup = testGroup "Util"
- [ testProperty "prop_trim_length" prop_trim_length
- , testProperty "prop_trim_id" prop_trim_id
- , testProperty "prop_stripHtml_length" prop_stripHtml_length
- , testProperty "prop_stripHtml_id" prop_stripHtml_id
- , testCase "test_stripHtml_1" test_stripHtml_1
- , testCase "test_stripHtml_2" test_stripHtml_2
- , testCase "test_stripHtml_3" test_stripHtml_3
- , testCase "test_link_1" test_link_1
- , testCase "test_link_2" test_link_2
- ]
-
--- Test that a string always becomes shorter when trimmed.
-prop_trim_length str = length str >= length (trim str)
-
--- Check that a string which does not start or end with a space is not trimmed.
-prop_trim_id str = (not $ null str) && isAlreadyTrimmed ==> str == (trim str)
- where
- isAlreadyTrimmed :: Bool
- isAlreadyTrimmed = (not $ isSpace $ head str) && (not $ isSpace $ last str)
-
--- Check that a stripped string is shorter.
-prop_stripHtml_length str = length str >= length (stripHtml str)
-
--- Check that strings without tags remain untouched.
-prop_stripHtml_id str = (not $ any (`elem` ['>', '<']) str)
- ==> str == stripHtml str
-
--- Strip Html test cases.
-test_stripHtml_1 = stripHtml "<b>text</b>" @?= "text"
-test_stripHtml_2 = stripHtml "text" @?= "text"
-test_stripHtml_3 =
- stripHtml "<b>Hakyll</b>, a <i>website</i> generator<img src=\"foo.png\" />"
- @?= "Hakyll, a website generator"
-
--- Link test cases.
-test_link_1 = link "foo bar" "/foo/bar.html"
- @?= "<a href=\"/foo/bar.html\">foo bar</a>"
-test_link_2 = link "back home" "/" @?= "<a href=\"/\">back home</a>"