aboutsummaryrefslogtreecommitdiff
path: root/src/Haddock
diff options
context:
space:
mode:
authorDavid Waern <david.waern@gmail.com>2010-04-07 17:05:20 +0000
committerDavid Waern <david.waern@gmail.com>2010-04-07 17:05:20 +0000
commitae37c71b42a81ad52b96eda5a765b6ade8dfd291 (patch)
tree80c92b784a7b37b5e8942aeb618b10edeceb5149 /src/Haddock
parentb5ec4e7d0f847215677752b191677379f045efb0 (diff)
Propagate source positions from Lex.x to Parse.y
Diffstat (limited to 'src/Haddock')
-rw-r--r--src/Haddock/Interface/LexParseRn.hs2
-rw-r--r--src/Haddock/Interface/ParseModuleHeader.hs6
-rw-r--r--src/Haddock/Lex.x43
-rw-r--r--src/Haddock/Parse.y42
4 files changed, 50 insertions, 43 deletions
diff --git a/src/Haddock/Interface/LexParseRn.hs b/src/Haddock/Interface/LexParseRn.hs
index 89440139..02fd4bc7 100644
--- a/src/Haddock/Interface/LexParseRn.hs
+++ b/src/Haddock/Interface/LexParseRn.hs
@@ -44,7 +44,7 @@ lexParseRnHaddockComment :: HaddockCommentType ->
GlobalRdrEnv -> HsDocString -> ErrMsgM (Maybe (Doc Name))
lexParseRnHaddockComment hty gre (HsDocString fs) = do
let str = unpackFS fs
- let toks = tokenise str
+ let toks = tokenise str (0,0) -- TODO: real position
let parse = case hty of
NormalHaddockComment -> parseParas
DocSectionComment -> parseString
diff --git a/src/Haddock/Interface/ParseModuleHeader.hs b/src/Haddock/Interface/ParseModuleHeader.hs
index dc701f36..2bdd30a7 100644
--- a/src/Haddock/Interface/ParseModuleHeader.hs
+++ b/src/Haddock/Interface/ParseModuleHeader.hs
@@ -46,13 +46,15 @@ parseModuleHeader str0 =
description1 :: Either String (Maybe (Doc RdrName))
description1 = case descriptionOpt of
Nothing -> Right Nothing
- Just description -> case parseString . tokenise $ description of
+ -- TODO: pass real file position
+ Just description -> case parseString $ tokenise description (0,0) of
Nothing -> Left ("Cannot parse Description: " ++ description)
Just doc -> Right (Just doc)
in
case description1 of
Left mess -> Left mess
- Right docOpt -> case parseParas . tokenise $ str8 of
+ -- TODO: pass real file position
+ Right docOpt -> case parseParas $ tokenise str8 (0,0) of
Nothing -> Left "Cannot parse header documentation paragraphs"
Just doc -> Right (HaddockModInfo {
hmi_description = docOpt,
diff --git a/src/Haddock/Lex.x b/src/Haddock/Lex.x
index 40ffbe92..14843aa3 100644
--- a/src/Haddock/Lex.x
+++ b/src/Haddock/Lex.x
@@ -17,6 +17,7 @@
module Haddock.Lex (
Token(..),
+ LToken,
tokenise
) where
@@ -32,6 +33,8 @@ import Numeric
import System.IO.Unsafe
}
+%wrapper "posn"
+
$ws = $white # \n
$digit = [0-9]
$hexdigit = [0-9a-fA-F]
@@ -105,6 +108,9 @@ $ident = [$alphanum \'\_\.\!\#\$\%\&\*\+\/\<\=\>\?\@\\\\\^\|\-\~]
}
{
+-- | A located token
+type LToken = (Token, AlexPosn)
+
data Token
= TokPara
| TokNumber
@@ -124,55 +130,54 @@ data Token
| TokExampleResult String
-- deriving Show
+tokenPos :: LToken -> (Int, Int)
+tokenPos t = let AlexPn _ line col = snd t in (line, col)
+
-- -----------------------------------------------------------------------------
-- Alex support stuff
type StartCode = Int
-type Action = String -> StartCode -> (StartCode -> [Token]) -> [Token]
-
-type AlexInput = (Char,String)
-
-alexGetChar (_, []) = Nothing
-alexGetChar (_, c:cs) = Just (c, (c,cs))
+type Action = AlexPosn -> String -> StartCode -> (StartCode -> [LToken]) -> [LToken]
-alexInputPrevChar (c,_) = c
+tokenise :: String -> (Int, Int) -> [LToken]
+tokenise str (line, col) = let toks = go (posn, '\n', eofHack str) para in {-trace (show toks)-} toks
+ where
+ posn = AlexPn 0 line col
-tokenise :: String -> [Token]
-tokenise str = let toks = go ('\n', eofHack str) para in {-trace (show toks)-} toks
- where go inp@(_,str) sc =
+ go inp@(pos, _, str) sc =
case alexScan inp sc of
AlexEOF -> []
AlexError _ -> error "lexical error"
AlexSkip inp' _ -> go inp' sc
- AlexToken inp' len act -> act (take len str) sc (\sc -> go inp' sc)
+ AlexToken inp'@(pos',_,_) len act -> act pos (take len str) sc (\sc -> go inp' sc)
-- NB. we add a final \n to the string, (see comment in the beginning of line
-- production above).
eofHack str = str++"\n"
andBegin :: Action -> StartCode -> Action
-andBegin act new_sc = \str _ cont -> act str new_sc cont
+andBegin act new_sc = \pos str _ cont -> act pos str new_sc cont
token :: Token -> Action
-token t = \_ sc cont -> t : cont sc
+token t = \pos _ sc cont -> (t, pos) : cont sc
strtoken, strtokenNL :: (String -> Token) -> Action
-strtoken t = \str sc cont -> t str : cont sc
-strtokenNL t = \str sc cont -> t (filter (/= '\r') str) : cont sc
+strtoken t = \pos str sc cont -> (t str, pos) : cont sc
+strtokenNL t = \pos str sc cont -> (t (filter (/= '\r') str), pos) : cont sc
-- ^ We only want LF line endings in our internal doc string format, so we
-- filter out all CRs.
begin :: StartCode -> Action
-begin sc = \_ _ cont -> cont sc
+begin sc = \_ _ _ cont -> cont sc
-- -----------------------------------------------------------------------------
-- Lex a string as a Haskell identifier
ident :: Action
-ident str sc cont =
+ident pos str sc cont =
case strToHsQNames id of
- Just names -> TokIdent names : cont sc
- Nothing -> TokString str : cont sc
+ Just names -> (TokIdent names, pos) : cont sc
+ Nothing -> (TokString str, pos) : cont sc
where id = init (tail str)
strToHsQNames :: String -> Maybe [RdrName]
diff --git a/src/Haddock/Parse.y b/src/Haddock/Parse.y
index d1146da3..1aad3a18 100644
--- a/src/Haddock/Parse.y
+++ b/src/Haddock/Parse.y
@@ -20,26 +20,26 @@ import Data.List (stripPrefix)
%expect 0
-%tokentype { Token }
-
-%token '/' { TokSpecial '/' }
- '@' { TokSpecial '@' }
- '[' { TokDefStart }
- ']' { TokDefEnd }
- DQUO { TokSpecial '\"' }
- URL { TokURL $$ }
- PIC { TokPic $$ }
- ANAME { TokAName $$ }
- '/../' { TokEmphasis $$ }
- '-' { TokBullet }
- '(n)' { TokNumber }
- '>..' { TokBirdTrack $$ }
- PROMPT { TokExamplePrompt $$ }
- RESULT { TokExampleResult $$ }
- EXP { TokExampleExpression $$ }
- IDENT { TokIdent $$ }
- PARA { TokPara }
- STRING { TokString $$ }
+%tokentype { LToken }
+
+%token '/' { (TokSpecial '/',_) }
+ '@' { (TokSpecial '@',_) }
+ '[' { (TokDefStart,_) }
+ ']' { (TokDefEnd,_) }
+ DQUO { (TokSpecial '\"',_) }
+ URL { (TokURL $$,_) }
+ PIC { (TokPic $$,_) }
+ ANAME { (TokAName $$,_) }
+ '/../' { (TokEmphasis $$,_) }
+ '-' { (TokBullet,_) }
+ '(n)' { (TokNumber,_) }
+ '>..' { (TokBirdTrack $$,_) }
+ PROMPT { (TokExamplePrompt $$,_) }
+ RESULT { (TokExampleResult $$,_) }
+ EXP { (TokExampleExpression $$,_) }
+ IDENT { (TokIdent $$,_) }
+ PARA { (TokPara,_) }
+ STRING { (TokString $$,_) }
%monad { Maybe }
@@ -117,7 +117,7 @@ strings :: { String }
| STRING strings { $1 ++ $2 }
{
-happyError :: [Token] -> Maybe a
+happyError :: [LToken] -> Maybe a
happyError toks = Nothing
-- | Create an 'Example', stripping superfluous characters as appropriate