T.P.Readers.LaTeX: Don't export tokenize, untokenize.
[API change] These were only exported for testing, which seems the wrong thing to do. They don't belong in the public API and are not really usable as they are, without access to the Tok type which is not exported. Removed the tokenize/untokenize roundtrip test. We put a quickcheck property in the comments which may be used when this code is touched (if it is).
This commit is contained in:
parent
2463fbf61d
commit
7e38b8e55a
3 changed files with 10 additions and 18 deletions
|
@ -22,8 +22,6 @@ module Text.Pandoc.Readers.LaTeX ( readLaTeX,
|
|||
rawLaTeXInline,
|
||||
rawLaTeXBlock,
|
||||
inlineCommand,
|
||||
tokenize,
|
||||
untokenize
|
||||
) where
|
||||
|
||||
import Control.Applicative (many, optional, (<|>))
|
||||
|
|
|
@ -292,6 +292,15 @@ applyMacros s = (guardDisabled Ext_latex_macros >> return s) <|>
|
|||
Left e -> Prelude.fail (show e)
|
||||
Right s' -> return s'
|
||||
|
||||
{-
|
||||
When tokenize or untokenize change, test with this
|
||||
QuickCheck property:
|
||||
|
||||
> tokUntokRoundtrip :: String -> Bool
|
||||
> tokUntokRoundtrip s =
|
||||
> let t = T.pack s in untokenize (tokenize "random" t) == t
|
||||
-}
|
||||
|
||||
tokenize :: SourceName -> Text -> [Tok]
|
||||
tokenize sourcename = totoks (initialPos sourcename)
|
||||
|
||||
|
|
|
@ -15,10 +15,8 @@ module Tests.Readers.LaTeX (tests) where
|
|||
import Data.Text (Text)
|
||||
import qualified Data.Text as T
|
||||
import qualified Text.Pandoc.UTF8 as UTF8
|
||||
import Text.Pandoc.Readers.LaTeX (tokenize, untokenize)
|
||||
import Test.Tasty
|
||||
import Test.Tasty.HUnit
|
||||
import Test.Tasty.QuickCheck
|
||||
import Tests.Helpers
|
||||
import Text.Pandoc
|
||||
import Text.Pandoc.Arbitrary ()
|
||||
|
@ -47,21 +45,8 @@ simpleTable' aligns rows
|
|||
where
|
||||
toRow = Row nullAttr . map simpleCell
|
||||
|
||||
tokUntokRt :: String -> Bool
|
||||
tokUntokRt s = untokenize (tokenize "random" t) == t
|
||||
where t = T.pack s
|
||||
|
||||
tests :: [TestTree]
|
||||
tests = [ testGroup "tokenization"
|
||||
[ testCase "tokenizer round trip on test case" $ do
|
||||
orig <- UTF8.readFile "../test/latex-reader.latex"
|
||||
let new = untokenize $ tokenize "../test/latex-reader.latex"
|
||||
orig
|
||||
assertEqual "untokenize . tokenize is identity" orig new
|
||||
, testProperty "untokenize . tokenize is identity" tokUntokRt
|
||||
]
|
||||
|
||||
, testGroup "basic"
|
||||
tests = [ testGroup "basic"
|
||||
[ "simple" =:
|
||||
"word" =?> para "word"
|
||||
, "space" =:
|
||||
|
|
Loading…
Add table
Reference in a new issue