aboutsummaryrefslogtreecommitdiff
path: root/src/HaddockLex.hs
blob: f16464530156285c910aa8a9115e3b317d741dae (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
--
-- Haddock - A Haskell Documentation Tool
--
-- (c) Simon Marlow 2002
--

module HaddockLex ( 
	Token(..), 
	tokenise 
 ) where

import Char

special = '`' : '\'' : '\"' : '@' : []

data Token
  = TokPara
  | TokNumber
  | TokBullet
  | TokSpecial Char
  | TokString String
  | TokEmph String
  | TokURL String
  | TokBirdTrack
  deriving Show

-- simple finite-state machine for tokenising the doc string

-- ----------------------------------------------------------------------------
-- At the beginning of a paragraph, we throw away initial whitespace

tokenise :: String -> [Token]
tokenise "" = []
tokenise str = case str of
  '<':cs  -> tokenise_url cs
  '\n':cs -> tokenise_newline tokenise cs
  '/':cs  -> tokenise_emph tokenise cs
  c:cs | c `elem` special -> TokSpecial c : tokenise cs
  _other  -> tokenise_string "" str

tokenise_newline next cs =
 case dropWhile nonNewlineSpace cs of
   '\n':cs -> TokPara : tokenise_para cs -- paragraph break
   '>':cs  -> TokBirdTrack : next cs -- bird track
   _other  -> tokenise_string "" cs

tokenise_emph next cs = 
 case break newlineSlash cs of
   (bef, aft@('\n':cs)) -> TokString ('/':bef) : next aft -- paragraph break
   (bef, '/':cs)        -> TokEmph bef : next cs
   _other               -> tokenise_string "" cs

tokenise_para cs =
  case dropWhile nonNewlineSpace cs of   
	-- bullet:  '*'
   '*':cs  -> TokBullet  : tokenise cs
	-- bullet: '-'
   '-':cs  -> TokBullet  : tokenise cs
	-- enumerated item: '1.'
   '>':cs  -> TokBirdTrack : tokenise cs
	-- bird track
   str | (ds,'.':cs) <- span isDigit str, not (null ds)
		-> TokNumber : tokenise cs
	-- enumerated item: '(1)'
   '(':cs | (ds,')':cs') <- span isDigit cs, not (null ds)
		-> TokNumber : tokenise cs'
   other -> tokenise cs

nonNewlineSpace c = isSpace c && c /= '\n'

newlineSlash c = c == '\n' || c == '/'

-- ----------------------------------------------------------------------------
-- Within a paragraph, we don't throw away any whitespace (except before a
-- birdtrack, and before a paragraph break).

tokenise1 :: String -> [Token]
tokenise1 "" = []
tokenise1 str = case str of
  '<':cs  -> tokenise_url cs
  '\n':cs -> tokenise_newline1 cs
  '/':cs  -> tokenise_emph tokenise1 cs
  c:cs | c `elem` special -> TokSpecial c : tokenise1 cs
  _other  -> tokenise_string "" str

tokenise_newline1 cs =
 case dropWhile nonNewlineSpace cs of
   '\n':cs -> TokPara : tokenise_para cs -- paragraph break
   '>':cs  -> TokString "\n" : TokBirdTrack : tokenise1 cs -- bird track
   _other  -> tokenise_string "\n" cs

tokenise_url cs =
  let (url,rest) = break (=='>') cs in
  TokURL url : case rest of
		 '>':rest -> tokenise1 rest
		 _ -> tokenise1 rest

-- ----------------------------------------------------------------------------
-- Within a string, we don't throw away any whitespace

tokenise_string str cs = 
  case cs of
    [] -> [TokString (reverse str)]
    '\\':c:cs -> tokenise_string (c:str) cs
    '\n':cs   -> tokenise_string_newline str cs
    '<':cs    -> TokString (reverse str) : tokenise_url cs
    '/':cs    -> TokString (reverse str) : tokenise_emph (tokenise_string "") cs
    c:cs | c `elem` special -> TokString (reverse str) : tokenise1 (c:cs)
         | otherwise 	    -> tokenise_string (c:str) cs

tokenise_string_newline str cs =
  case dropWhile nonNewlineSpace cs  of
   '\n':cs -> TokString (reverse str) : TokPara : tokenise_para cs
   '>':cs  -> TokString (reverse ('\n':str)) : TokBirdTrack : tokenise1 cs
		 -- bird track
   _other  -> tokenise_string ('\n':str) cs  -- don't throw away whitespace