T.P.Readers.LaTeX.Parsing: don't export totoks.
Make the first param of `tokenize` a SourcePos instead of SourceName, and use it instead of `totoks`.
This commit is contained in:
parent
55cc9040cb
commit
4214218256
3 changed files with 12 additions and 13 deletions
|
@ -126,7 +126,8 @@ resolveRefs _ x = x
|
||||||
-- res <- runIOorExplode (runParserT p defaultLaTeXState{
|
-- res <- runIOorExplode (runParserT p defaultLaTeXState{
|
||||||
-- sOptions = def{ readerExtensions =
|
-- sOptions = def{ readerExtensions =
|
||||||
-- enableExtension Ext_raw_tex $
|
-- enableExtension Ext_raw_tex $
|
||||||
-- getDefaultExtensions "latex" }} "source" (tokenize "source" t))
|
-- getDefaultExtensions "latex" }} "source"
|
||||||
|
-- (tokenize (initialPos "source") t))
|
||||||
-- case res of
|
-- case res of
|
||||||
-- Left e -> error (show e)
|
-- Left e -> error (show e)
|
||||||
-- Right r -> return r
|
-- Right r -> return r
|
||||||
|
@ -721,7 +722,7 @@ insertIncluded defaultExtension f' = do
|
||||||
Nothing -> do
|
Nothing -> do
|
||||||
report $ CouldNotLoadIncludeFile (T.pack f) pos
|
report $ CouldNotLoadIncludeFile (T.pack f) pos
|
||||||
return ""
|
return ""
|
||||||
getInput >>= setInput . (tokenize f contents ++)
|
getInput >>= setInput . (tokenize (initialPos f) contents ++)
|
||||||
updateState dropLatestIncludeFile
|
updateState dropLatestIncludeFile
|
||||||
|
|
||||||
authors :: PandocMonad m => LP m ()
|
authors :: PandocMonad m => LP m ()
|
||||||
|
|
|
@ -96,7 +96,7 @@ verbTok stopchar = do
|
||||||
let (t1, t2) = T.splitAt i txt
|
let (t1, t2) = T.splitAt i txt
|
||||||
inp <- getInput
|
inp <- getInput
|
||||||
setInput $ Tok (incSourceColumn pos i) Symbol (T.singleton stopchar)
|
setInput $ Tok (incSourceColumn pos i) Symbol (T.singleton stopchar)
|
||||||
: totoks (incSourceColumn pos (i + 1)) (T.drop 1 t2) ++ inp
|
: tokenize (incSourceColumn pos (i + 1)) (T.drop 1 t2) ++ inp
|
||||||
return $ Tok pos toktype t1
|
return $ Tok pos toktype t1
|
||||||
|
|
||||||
listingsLanguage :: [(Text, Text)] -> Maybe Text
|
listingsLanguage :: [(Text, Text)] -> Maybe Text
|
||||||
|
|
|
@ -32,7 +32,6 @@ module Text.Pandoc.Readers.LaTeX.Parsing
|
||||||
, getInputTokens
|
, getInputTokens
|
||||||
, untokenize
|
, untokenize
|
||||||
, untoken
|
, untoken
|
||||||
, totoks
|
|
||||||
, toksToString
|
, toksToString
|
||||||
, satisfyTok
|
, satisfyTok
|
||||||
, parseFromToks
|
, parseFromToks
|
||||||
|
@ -307,7 +306,7 @@ applyMacros s = (guardDisabled Ext_latex_macros >> return s) <|>
|
||||||
pstate <- getState
|
pstate <- getState
|
||||||
let lstate = def{ sOptions = extractReaderOptions pstate
|
let lstate = def{ sOptions = extractReaderOptions pstate
|
||||||
, sMacros = extractMacros pstate :| [] }
|
, sMacros = extractMacros pstate :| [] }
|
||||||
res <- runParserT retokenize lstate "math" (tokenize "math" s)
|
res <- runParserT retokenize lstate "math" (tokenize (initialPos "math") s)
|
||||||
case res of
|
case res of
|
||||||
Left e -> Prelude.fail (show e)
|
Left e -> Prelude.fail (show e)
|
||||||
Right s' -> return s'
|
Right s' -> return s'
|
||||||
|
@ -324,7 +323,7 @@ QuickCheck property:
|
||||||
tokenizeSources :: Sources -> [Tok]
|
tokenizeSources :: Sources -> [Tok]
|
||||||
tokenizeSources = concatMap tokenizeSource . unSources
|
tokenizeSources = concatMap tokenizeSource . unSources
|
||||||
where
|
where
|
||||||
tokenizeSource (pos, t) = totoks pos t
|
tokenizeSource (pos, t) = tokenize pos t
|
||||||
|
|
||||||
-- Return tokens from input sources. Ensure that starting position is
|
-- Return tokens from input sources. Ensure that starting position is
|
||||||
-- correct.
|
-- correct.
|
||||||
|
@ -337,12 +336,11 @@ getInputTokens = do
|
||||||
Sources [] -> []
|
Sources [] -> []
|
||||||
Sources ((_,t):rest) -> tokenizeSources $ Sources ((pos,t):rest)
|
Sources ((_,t):rest) -> tokenizeSources $ Sources ((pos,t):rest)
|
||||||
|
|
||||||
tokenize :: SourceName -> Text -> [Tok]
|
tokenize :: SourcePos -> Text -> [Tok]
|
||||||
tokenize sourcename = totoks (initialPos sourcename)
|
tokenize = totoks
|
||||||
|
where
|
||||||
totoks :: SourcePos -> Text -> [Tok]
|
totoks pos t =
|
||||||
totoks pos t =
|
case T.uncons t of
|
||||||
case T.uncons t of
|
|
||||||
Nothing -> []
|
Nothing -> []
|
||||||
Just (c, rest)
|
Just (c, rest)
|
||||||
| c == '\n' ->
|
| c == '\n' ->
|
||||||
|
@ -806,7 +804,7 @@ retokenizeComment = (do
|
||||||
let updPos (Tok pos' toktype' txt') =
|
let updPos (Tok pos' toktype' txt') =
|
||||||
Tok (incSourceColumn (incSourceLine pos' (sourceLine pos - 1))
|
Tok (incSourceColumn (incSourceLine pos' (sourceLine pos - 1))
|
||||||
(sourceColumn pos)) toktype' txt'
|
(sourceColumn pos)) toktype' txt'
|
||||||
let newtoks = map updPos $ tokenize (sourceName pos) $ T.tail txt
|
let newtoks = map updPos $ tokenize pos $ T.tail txt
|
||||||
getInput >>= setInput . ((Tok pos Symbol "%" : newtoks) ++))
|
getInput >>= setInput . ((Tok pos Symbol "%" : newtoks) ++))
|
||||||
<|> return ()
|
<|> return ()
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue