diff options
-rw-r--r-- | src/Text/Pandoc/Readers/LaTeX.hs | 2 | ||||
-rw-r--r-- | src/Text/Pandoc/Readers/LaTeX/Parsing.hs | 9 | ||||
-rw-r--r-- | test/Tests/Readers/LaTeX.hs | 17 |
3 files changed, 10 insertions, 18 deletions
diff --git a/src/Text/Pandoc/Readers/LaTeX.hs b/src/Text/Pandoc/Readers/LaTeX.hs index 772263578..e63fbc185 100644 --- a/src/Text/Pandoc/Readers/LaTeX.hs +++ b/src/Text/Pandoc/Readers/LaTeX.hs @@ -22,8 +22,6 @@ module Text.Pandoc.Readers.LaTeX ( readLaTeX, rawLaTeXInline, rawLaTeXBlock, inlineCommand, - tokenize, - untokenize ) where import Control.Applicative (many, optional, (<|>)) diff --git a/src/Text/Pandoc/Readers/LaTeX/Parsing.hs b/src/Text/Pandoc/Readers/LaTeX/Parsing.hs index a5a39d3c9..db58b333d 100644 --- a/src/Text/Pandoc/Readers/LaTeX/Parsing.hs +++ b/src/Text/Pandoc/Readers/LaTeX/Parsing.hs @@ -292,6 +292,15 @@ applyMacros s = (guardDisabled Ext_latex_macros >> return s) <|> Left e -> Prelude.fail (show e) Right s' -> return s' +{- +When tokenize or untokenize change, test with this +QuickCheck property: + +> tokUntokRoundtrip :: String -> Bool +> tokUntokRoundtrip s = +> let t = T.pack s in untokenize (tokenize "random" t) == t +-} + tokenize :: SourceName -> Text -> [Tok] tokenize sourcename = totoks (initialPos sourcename) diff --git a/test/Tests/Readers/LaTeX.hs b/test/Tests/Readers/LaTeX.hs index 8385b751e..9388fd040 100644 --- a/test/Tests/Readers/LaTeX.hs +++ b/test/Tests/Readers/LaTeX.hs @@ -15,10 +15,8 @@ module Tests.Readers.LaTeX (tests) where import Data.Text (Text) import qualified Data.Text as T import qualified Text.Pandoc.UTF8 as UTF8 -import Text.Pandoc.Readers.LaTeX (tokenize, untokenize) import Test.Tasty import Test.Tasty.HUnit -import Test.Tasty.QuickCheck import Tests.Helpers import Text.Pandoc import Text.Pandoc.Arbitrary () @@ -47,21 +45,8 @@ simpleTable' aligns rows where toRow = Row nullAttr . map simpleCell -tokUntokRt :: String -> Bool -tokUntokRt s = untokenize (tokenize "random" t) == t - where t = T.pack s - tests :: [TestTree] -tests = [ testGroup "tokenization" - [ testCase "tokenizer round trip on test case" $ do - orig <- UTF8.readFile "../test/latex-reader.latex" - let new = untokenize $ tokenize "../test/latex-reader.latex" - orig - assertEqual "untokenize . tokenize is identity" orig new - , testProperty "untokenize . tokenize is identity" tokUntokRt - ] - - , testGroup "basic" +tests = [ testGroup "basic" [ "simple" =: "word" =?> para "word" , "space" =: |