diff options
author | John MacFarlane <jgm@berkeley.edu> | 2018-10-27 11:03:03 -0700 |
---|---|---|
committer | John MacFarlane <jgm@berkeley.edu> | 2018-10-27 11:03:03 -0700 |
commit | f5e26e4512c1a797fa3a20cbc451f5d56b248870 (patch) | |
tree | 285f6244eb74ef6fe52a3bf6bc5d0e804356b156 /src/Text/Pandoc/Readers/Roff.hs | |
parent | a3b351c3a627b9eea080475d5e36fe9a50911831 (diff) | |
download | pandoc-f5e26e4512c1a797fa3a20cbc451f5d56b248870.tar.gz |
Roff tokenizer: check for first-column before parsing macro.
Also add SourcePos as argument to lexRoff, so we can pass in
current source pos when parsing a table cell.
Closes #5025.
Diffstat (limited to 'src/Text/Pandoc/Readers/Roff.hs')
-rw-r--r-- | src/Text/Pandoc/Readers/Roff.hs | 12 |
1 files changed, 8 insertions, 4 deletions
diff --git a/src/Text/Pandoc/Readers/Roff.hs b/src/Text/Pandoc/Readers/Roff.hs index a98678a30..e83821ed4 100644 --- a/src/Text/Pandoc/Readers/Roff.hs +++ b/src/Text/Pandoc/Readers/Roff.hs @@ -311,6 +311,7 @@ lexComment = do lexMacro :: PandocMonad m => RoffLexer m RoffTokens lexMacro = do pos <- getPosition + guard $ sourceColumn pos == 1 char '.' <|> char '\'' skipMany spacetab macroName <- many (satisfy (not . isSpace)) @@ -369,7 +370,9 @@ lexTableRows = do return $ zip aligns rows tableCell :: PandocMonad m => RoffLexer m RoffTokens -tableCell = (enclosedCell <|> simpleCell) >>= lexRoff . T.pack +tableCell = do + pos <- getPosition + (enclosedCell <|> simpleCell) >>= lexRoff pos . T.pack where enclosedCell = do try (string "T{") @@ -642,9 +645,10 @@ linePartsToString = mconcat . map go go _ = mempty -- | Tokenize a string as a sequence of groff tokens. -lexRoff :: PandocMonad m => T.Text -> m RoffTokens -lexRoff txt = do - eithertokens <- readWithM (mconcat <$> many manToken) def (T.unpack txt) +lexRoff :: PandocMonad m => SourcePos -> T.Text -> m RoffTokens +lexRoff pos txt = do + eithertokens <- readWithM (do setPosition pos + mconcat <$> many manToken) def (T.unpack txt) case eithertokens of Left e -> throwError e Right tokenz -> return tokenz |