summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSimonMarlow <>2017-10-12 07:46:00 (GMT)
committerhdiff <hdiff@hdiff.luite.com>2017-10-12 07:46:00 (GMT)
commit2c141b4ea928f2f6e38945ce40cd5562a6f0f69e (patch)
tree61db40cb4e68717029d9ad40d49f57225c946253
parent8af57e224b3be4fd1cbb6bbf8f5da9f2ae0e3ef7 (diff)
version 1.19.8HEAD1.19.8master
-rw-r--r--CHANGES6
-rw-r--r--happy.cabal7
-rw-r--r--src/Grammar.lhs6
-rw-r--r--src/LALR.lhs17
-rw-r--r--src/ProduceCode.lhs236
-rw-r--r--templates/GenericTemplate.hs17
-rw-r--r--tests/Makefile3
-rw-r--r--tests/issue93.y1246
-rw-r--r--tests/issue94.y33
9 files changed, 1453 insertions, 118 deletions
diff --git a/CHANGES b/CHANGES
index 8bb525c..eef476a 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,4 +1,10 @@
-----------------------------------------------------------------------------
+1.19.8
+
+ * Fix issue #94 (some grammars don't compile due to new type
+ signatures introduced to allow overloading to be used)
+
+-----------------------------------------------------------------------------
1.19.7
* Fix misisng test suite files in the sdist
diff --git a/happy.cabal b/happy.cabal
index 91127b8..413c257 100644
--- a/happy.cabal
+++ b/happy.cabal
@@ -1,5 +1,5 @@
name: happy
-version: 1.19.7
+version: 1.19.8
license: BSD2
license-file: LICENSE
copyright: (c) Andy Gill, Simon Marlow
@@ -120,6 +120,8 @@ extra-source-files:
tests/precedence002.y
tests/test_rules.y
tests/issue91.y
+ tests/issue93.y
+ tests/issue94.y
tests/issue95.y
tests/monaderror-explist.y
tests/typeclass_monad001.y
@@ -146,7 +148,8 @@ executable happy
build-depends: base < 5,
array,
containers >= 0.4.2,
- mtl >= 1.0
+ mtl >= 2.2.1
+ -- mtl-2.2.1 added Control.Monad.Except
extensions: CPP, MagicHash, FlexibleContexts
ghc-options: -Wall
diff --git a/src/Grammar.lhs b/src/Grammar.lhs
index f2658fa..4dd0ae0 100644
--- a/src/Grammar.lhs
+++ b/src/Grammar.lhs
@@ -32,12 +32,6 @@ Here is our mid-section datatype
> import Control.Monad.Writer
-#ifdef DEBUG
-
-> import System.IOExts
-
-#endif
-
> type Name = Int
> type Production = (Name,[Name],(String,[Int]),Priority)
diff --git a/src/LALR.lhs b/src/LALR.lhs
index c66d02d..cc9b46a 100644
--- a/src/LALR.lhs
+++ b/src/LALR.lhs
@@ -35,9 +35,24 @@ Generation of LALR parsing tables.
This means rule $a$, with dot at $b$ (all starting at 0)
> data Lr0Item = Lr0 {-#UNPACK#-}!Int {-#UNPACK#-}!Int -- (rule, dot)
-> deriving (Eq,Ord)
+> deriving (Eq,Ord
+
+#ifdef DEBUG
+
+> ,Show
+
+#endif
+
+> )
> data Lr1Item = Lr1 {-#UNPACK#-}!Int {-#UNPACK#-}!Int NameSet -- (rule, dot, lookahead)
+
+#ifdef DEBUG
+
+> deriving (Show)
+
+#endif
+
> type RuleList = [Lr0Item]
-----------------------------------------------------------------------------
diff --git a/src/ProduceCode.lhs b/src/ProduceCode.lhs
index 812ac66..9a66797 100644
--- a/src/ProduceCode.lhs
+++ b/src/ProduceCode.lhs
@@ -14,15 +14,15 @@ The code generator.
> interleave, interleave', maybestr,
> brack, brack' )
-> import Data.Maybe ( isJust, isNothing )
+> import Data.Maybe ( isJust, isNothing, fromMaybe )
> import Data.Char
> import Data.List
-> import Control.Monad ( forM_ )
+> import Control.Monad ( forM_ )
> import Control.Monad.ST
-> import Data.Bits ( setBit )
-> import Data.Array.ST ( STUArray )
-> import Data.Array.Unboxed ( UArray )
+> import Data.Bits ( setBit )
+> import Data.Array.ST ( STUArray )
+> import Data.Array.Unboxed ( UArray )
> import Data.Array.MArray
> import Data.Array.IArray
@@ -90,13 +90,42 @@ Produce the complete output file.
> -- fix, others not so easy, and others would require GHC version
> -- #ifdefs. For now I'm just disabling all of them.
>
-> top_opts = nowarn_opts .
-> case top_options of
-> "" -> str ""
-> _ -> str (unwords [ "{-# OPTIONS"
-> , top_options
-> , "#-}"
-> ]) . nl
+> partTySigs_opts = ifGeGhc710 (str "{-# OPTIONS_GHC -XPartialTypeSignatures #-}" . nl)
+>
+> intMaybeHash | ghc = str "Happy_GHC_Exts.Int#"
+> | otherwise = str "Int"
+>
+> -- Parsing monad and its constraints
+> pty = str monad_tycon
+> pcont = str monad_context
+>
+> -- If GHC is enabled, wrap the content in a CPP ifdef that includes the
+> -- content and tests whether the GHC version is >= 7.10.3
+> ifGeGhc710 :: (String -> String) -> String -> String
+> ifGeGhc710 content | ghc = str "#if __GLASGOW_HASKELL__ >= 710" . nl
+> . content
+> . str "#endif" . nl
+> | otherwise = id
+>
+> n_missing_types = length (filter isNothing (elems nt_types))
+> happyAbsSyn = str "(HappyAbsSyn " . str wild_tyvars . str ")"
+> where wild_tyvars = unwords (replicate n_missing_types "_")
+>
+> -- This decides how to include (if at all) a type signature
+> -- See <https://github.com/simonmar/happy/issues/94>
+> filterTypeSig :: (String -> String) -> String -> String
+> filterTypeSig content | n_missing_types == 0 = content
+> | otherwise = ifGeGhc710 content
+>
+> top_opts =
+> nowarn_opts
+> . (case top_options of
+> "" -> str ""
+> _ -> str (unwords [ "{-# OPTIONS"
+> , top_options
+> , "#-}"
+> ]) . nl)
+> . partTySigs_opts
%-----------------------------------------------------------------------------
Make the abstract syntax type declaration, of the form:
@@ -130,14 +159,14 @@ If we're using coercions, we need to generate the injections etc.
> bhappy_item = brack' happy_item
>
> inject n ty
-> = mkHappyIn n . str " :: " . type_param n ty
+> = mkHappyIn n . str " :: " . typeParam n ty
> . str " -> " . bhappy_item . char '\n'
> . mkHappyIn n . str " x = Happy_GHC_Exts.unsafeCoerce# x\n"
> . str "{-# INLINE " . mkHappyIn n . str " #-}"
>
> extract n ty
> = mkHappyOut n . str " :: " . bhappy_item
-> . str " -> " . type_param n ty . char '\n'
+> . str " -> " . typeParam n ty . char '\n'
> . mkHappyOut n . str " x = Happy_GHC_Exts.unsafeCoerce# x\n"
> . str "{-# INLINE " . mkHappyOut n . str " #-}"
> in
@@ -183,7 +212,7 @@ example where this matters.
> . str "\n\t= HappyTerminal " . token
> . str "\n\t| HappyErrorToken Int\n"
> . interleave "\n"
-> [ str "\t| " . makeAbsSynCon n . strspace . type_param n ty
+> [ str "\t| " . makeAbsSynCon n . strspace . typeParam n ty
> | (n, ty) <- assocs nt_types,
> (nt_types_index ! n) == n]
@@ -219,9 +248,7 @@ based parsers -- types aren't as important there).
> | otherwise = id
-> where intMaybeHash | ghc = str "Happy_GHC_Exts.Int#"
-> | otherwise = str "Int"
-> tokens =
+> where tokens =
> case lexer' of
> Nothing -> char '[' . token . str "] -> "
> Just _ -> id
@@ -314,7 +341,7 @@ happyMonadReduce to get polymorphic recursion. Sigh.
> id
> else
> nl . reductionFun . strspace
-> . interleave " " (map str (take (length toks) (repeat "_")))
+> . interleave " " (replicate (length toks) (str "_"))
> . str " = notHappyAtAll ")
> | otherwise
@@ -340,16 +367,7 @@ happyMonadReduce to get polymorphic recursion. Sigh.
> | otherwise = nt
>
> mkReductionHdr lt' s =
-> let pcont = str monad_context
-> pty = str monad_tycon
-> all_tyvars = [ 't':show n | (n, Nothing) <-
-> assocs nt_types ]
-> str_tyvars = str (unwords all_tyvars)
-> happyAbsSyn = str "(HappyAbsSyn "
-> . str_tyvars . str ")"
-> intMaybeHash | ghc = str "Happy_GHC_Exts.Int#"
-> | otherwise = str "Int"
-> tysig = case lexer' of
+> let tysig = case lexer' of
> Nothing -> id
> _ | target == TargetArrayBased ->
> mkReduceFun i . str " :: " . pcont
@@ -360,7 +378,7 @@ happyMonadReduce to get polymorphic recursion. Sigh.
> . happyAbsSyn . str " -> "
> . pty . str " " . happyAbsSyn . str "\n"
> | otherwise -> id in
-> tysig . mkReduceFun i . str " = "
+> filterTypeSig tysig . mkReduceFun i . str " = "
> . str s . strspace . lt' . strspace . showInt adjusted_nt
> . strspace . reductionFun . nl
> . reductionFun . strspace
@@ -388,7 +406,7 @@ happyMonadReduce to get polymorphic recursion. Sigh.
> tokLets code''
> | coerce && not (null cases)
> = interleave "\n\t" cases
-> . code'' . str (take (length cases) (repeat '}'))
+> . code'' . str (replicate (length cases) '}')
> | otherwise = code''
>
> cases = [ str "case " . extract t . strspace . mkDummyVar n
@@ -428,8 +446,6 @@ The token conversion function.
> Just (lexer'',eof') ->
> case (target, ghc) of
> (TargetHaskell, True) ->
-> let pcont = str monad_context
-> pty = str monad_tycon in
> str "happyNewToken :: " . pcont . str " => "
> . str "(Happy_GHC_Exts.Int#\n"
> . str " -> Happy_GHC_Exts.Int#\n"
@@ -638,14 +654,23 @@ action array indexed by (terminal * last_state) + state
> | ghc
> = str "happyActOffsets :: HappyAddr\n"
> . str "happyActOffsets = HappyA# \"" --"
-> . str (hexChars act_offs)
+> . str (checkedHexChars min_off act_offs)
> . str "\"#\n\n" --"
>
> . str "happyGotoOffsets :: HappyAddr\n"
> . str "happyGotoOffsets = HappyA# \"" --"
-> . str (hexChars goto_offs)
+> . str (checkedHexChars min_off goto_offs)
> . str "\"#\n\n" --"
>
+> . str "happyAdjustOffset :: Happy_GHC_Exts.Int# -> Happy_GHC_Exts.Int#\n"
+> . str "happyAdjustOffset off = "
+> . (if length table < 32768
+> then str "off"
+> else str "if happyLt off (" . shows min_off . str "# :: Happy_GHC_Exts.Int#)"
+> . str " then off Happy_GHC_Exts.+# 65536#"
+> . str " else off")
+> . str "\n\n" --"
+>
> . str "happyDefActions :: HappyAddr\n"
> . str "happyDefActions = HappyA# \"" --"
> . str (hexChars defaults)
@@ -664,19 +689,22 @@ action array indexed by (terminal * last_state) + state
> | otherwise
> = str "happyActOffsets :: Happy_Data_Array.Array Int Int\n"
> . str "happyActOffsets = Happy_Data_Array.listArray (0,"
-> . shows (n_states) . str ") (["
+> . shows n_states . str ") (["
> . interleave' "," (map shows act_offs)
> . str "\n\t])\n\n"
>
> . str "happyGotoOffsets :: Happy_Data_Array.Array Int Int\n"
> . str "happyGotoOffsets = Happy_Data_Array.listArray (0,"
-> . shows (n_states) . str ") (["
+> . shows n_states . str ") (["
> . interleave' "," (map shows goto_offs)
> . str "\n\t])\n\n"
+>
+> . str "happyAdjustOffset :: Int -> Int\n"
+> . str "happyAdjustOffset = id\n\n"
>
> . str "happyDefActions :: Happy_Data_Array.Array Int Int\n"
> . str "happyDefActions = Happy_Data_Array.listArray (0,"
-> . shows (n_states) . str ") (["
+> . shows n_states . str ") (["
> . interleave' "," (map shows defaults)
> . str "\n\t])\n\n"
>
@@ -710,7 +738,7 @@ action array indexed by (terminal * last_state) + state
> n_terminals = length terms
> n_nonterminals = length nonterms - n_starts -- lose %starts
>
-> (act_offs,goto_offs,table,defaults,check,explist)
+> (act_offs,goto_offs,table,defaults,check,explist,min_off)
> = mkTables action goto first_nonterm' fst_term
> n_terminals n_nonterminals n_starts (bounds token_names')
>
@@ -757,9 +785,7 @@ outlaw them inside { }
> [ (a, fn a b) | (a, b) <- assocs nt_types ]
> where
> fn n Nothing = n
-> fn _ (Just a) = case lookup a assoc_list of
-> Just v -> v
-> Nothing -> error ("cant find an item in list")
+> fn _ (Just a) = fromMaybe (error "can't find an item in list") (lookup a assoc_list)
> assoc_list = [ (b,a) | (a, Just b) <- assocs nt_types ]
> makeAbsSynCon = mkAbsSynCon nt_types_index
@@ -808,8 +834,6 @@ MonadStuff:
> produceMonadStuff =
-> let pcont = str monad_context
-> pty = str monad_tycon in
> str "happyThen :: " . pcont . str " => " . pty
> . str " a -> (a -> " . pty
> . str " b) -> " . pty . str " b\n"
@@ -833,11 +857,6 @@ MonadStuff:
> . errorHandler . str "\n"
> _ ->
> let
-> all_tyvars = [ 't':show n | (n, Nothing) <- assocs nt_types ]
-> str_tyvars = str (unwords all_tyvars)
-> happyAbsSyn = str "(HappyAbsSyn " . str_tyvars . str ")"
-> intMaybeHash | ghc = str "Happy_GHC_Exts.Int#"
-> | otherwise = str "Int"
> happyParseSig
> | target == TargetArrayBased =
> str "happyParse :: " . pcont . str " => " . intMaybeHash
@@ -868,7 +887,7 @@ MonadStuff:
> . str " -> " . pty . str " " . happyAbsSyn . str ")\n"
> . str "\n"
> | otherwise = id in
-> happyParseSig . newTokenSig . doActionSig . reduceArrSig
+> filterTypeSig (happyParseSig . newTokenSig . doActionSig . reduceArrSig)
> . str "happyThen1 :: " . pcont . str " => " . pty
> . str " a -> (a -> " . pty
> . str " b) -> " . pty . str " b\n"
@@ -1040,8 +1059,8 @@ See notes under "Action Tables" above for some subtleties in this function.
> (act:_) -> act -- pick the first one we see for now
>
> where reduces
-> = [ act | (_,act@(LR'Reduce _ _)) <- actions ]
-> ++ [ act | (_,(LR'Multiple _ act@(LR'Reduce _ _))) <- actions ]
+> = [ act | (_, act@(LR'Reduce _ _)) <- actions ]
+> ++ [ act | (_, LR'Multiple _ act@(LR'Reduce _ _)) <- actions ]
-----------------------------------------------------------------------------
-- Generate packed parsing tables.
@@ -1091,28 +1110,30 @@ See notes under "Action Tables" above for some subtleties in this function.
> mkTables
> :: ActionTable -> GotoTable -> Name -> Int -> Int -> Int -> Int -> (Int, Int) ->
-> ([Int] -- happyActOffsets
-> ,[Int] -- happyGotoOffsets
-> ,[Int] -- happyTable
-> ,[Int] -- happyDefAction
-> ,[Int] -- happyCheck
-> ,[Int] -- happyExpList
+> ( [Int] -- happyActOffsets
+> , [Int] -- happyGotoOffsets
+> , [Int] -- happyTable
+> , [Int] -- happyDefAction
+> , [Int] -- happyCheck
+> , [Int] -- happyExpList
+> , Int -- happyMinOffset
> )
>
> mkTables action goto first_nonterm' fst_term
> n_terminals n_nonterminals n_starts
> token_names_bound
>
-> = ( elems act_offs,
-> elems goto_offs,
-> take max_off (elems table),
-> def_actions,
-> take max_off (elems check),
-> elems explist
-> )
+> = ( elems act_offs
+> , elems goto_offs
+> , take max_off (elems table)
+> , def_actions
+> , take max_off (elems check)
+> , elems explist
+> , min_off
+> )
> where
>
-> (table,check,act_offs,goto_offs,explist,max_off)
+> (table,check,act_offs,goto_offs,explist,min_off,max_off)
> = runST (genTables (length actions)
> max_token token_names_bound
> sorted_actions explist_actions)
@@ -1134,13 +1155,13 @@ See notes under "Action Tables" above for some subtleties in this function.
> | (state, acts) <- assocs action,
> let (err:_dummy:vec) = assocs acts
> vec' = drop (n_starts+n_nonterminals) vec
-> acts' = filter (notFail) (err:vec')
+> acts' = filter notFail (err:vec')
> default_act = getDefault acts'
> acts'' = mkActVals acts' default_act
> ]
>
> explist_actions :: [(Int, [Int])]
-> explist_actions = [ (state, concat $ map f $ assocs acts)
+> explist_actions = [ (state, concatMap f $ assocs acts)
> | (state, acts) <- assocs action ]
> where
> f (t, LR'Shift _ _ ) = [t - fst token_names_bound]
@@ -1173,19 +1194,20 @@ See notes under "Action Tables" above for some subtleties in this function.
> mkGotoVals assocs' =
> [ (token - first_nonterm', i) | (token, Goto i) <- assocs' ]
>
-> sorted_actions = reverse (sortBy cmp_state (actions++gotos))
+> sorted_actions = sortBy (flip cmp_state) (actions ++ gotos)
> cmp_state (_,_,_,width1,tally1,_) (_,_,_,width2,tally2,_)
> | width1 < width2 = LT
> | width1 == width2 = compare tally1 tally2
> | otherwise = GT
> data ActionOrGoto = ActionEntry | GotoEntry
-> type TableEntry = (ActionOrGoto,
-> Int{-stateno-},
-> Int{-default-},
-> Int{-width-},
-> Int{-tally-},
-> [(Int,Int)])
+> type TableEntry = ( ActionOrGoto
+> , Int {-stateno-}
+> , Int {-default-}
+> , Int {-width-}
+> , Int {-tally-}
+> , [(Int,Int)]
+> )
> genTables
> :: Int -- number of actions
@@ -1193,13 +1215,14 @@ See notes under "Action Tables" above for some subtleties in this function.
> -> (Int, Int) -- token names bounds
> -> [TableEntry] -- entries for the table
> -> [(Int, [Int])] -- expected tokens lists
-> -> ST s (UArray Int Int, -- table
-> UArray Int Int, -- check
-> UArray Int Int, -- action offsets
-> UArray Int Int, -- goto offsets
-> UArray Int Int, -- expected tokens list
-> Int -- highest offset in table
-> )
+> -> ST s ( UArray Int Int -- table
+> , UArray Int Int -- check
+> , UArray Int Int -- action offsets
+> , UArray Int Int -- goto offsets
+> , UArray Int Int -- expected tokens list
+> , Int -- lowest offset in table
+> , Int -- highest offset in table
+> )
>
> genTables n_actions max_token token_names_bound entries explist = do
>
@@ -1210,15 +1233,15 @@ See notes under "Action Tables" above for some subtleties in this function.
> off_arr <- newArray (-max_token, mAX_TABLE_SIZE) 0
> exp_array <- newArray (0, (n_actions * n_token_names + 15) `div` 16) 0
>
-> max_off <- genTables' table check act_offs goto_offs off_arr exp_array entries
-> explist max_token n_token_names
+> (min_off,max_off) <- genTables' table check act_offs goto_offs off_arr exp_array entries
+> explist max_token n_token_names
>
> table' <- freeze table
> check' <- freeze check
> act_offs' <- freeze act_offs
> goto_offs' <- freeze goto_offs
> exp_array' <- freeze exp_array
-> return (table',check',act_offs',goto_offs',exp_array',max_off+1)
+> return (table',check',act_offs',goto_offs',exp_array',min_off,max_off+1)
> where
> n_states = n_actions - 1
@@ -1238,19 +1261,19 @@ See notes under "Action Tables" above for some subtleties in this function.
> -> [(Int, [Int])] -- expected tokens lists
> -> Int -- maximum token no.
> -> Int -- number of token names
-> -> ST s Int -- highest offset in table
+> -> ST s (Int,Int) -- lowest and highest offsets in table
>
> genTables' table check act_offs goto_offs off_arr exp_array entries
> explist max_token n_token_names
-> = fill_exp_array >> fit_all entries 0 1
+> = fill_exp_array >> fit_all entries 0 0 1
> where
>
-> fit_all [] max_off _ = return max_off
-> fit_all (s:ss) max_off fst_zero = do
-> (off, new_max_off, new_fst_zero) <- fit s max_off fst_zero
+> fit_all [] min_off max_off _ = return (min_off, max_off)
+> fit_all (s:ss) min_off max_off fst_zero = do
+> (off, new_min_off, new_max_off, new_fst_zero) <- fit s min_off max_off fst_zero
> ss' <- same_states s ss off
> writeArray off_arr off 1
-> fit_all ss' new_max_off new_fst_zero
+> fit_all ss' new_min_off new_max_off new_fst_zero
>
> fill_exp_array =
> forM_ explist $ \(state, tokens) ->
@@ -1276,16 +1299,19 @@ See notes under "Action Tables" above for some subtleties in this function.
> -- fit a vector into the table. Return the offset of the vector,
> -- the maximum offset used in the table, and the offset of the first
> -- entry in the table (used to speed up the lookups a bit).
-> fit (_,_,_,_,_,[]) max_off fst_zero = return (0,max_off,fst_zero)
+> fit (_,_,_,_,_,[]) min_off max_off fst_zero = return (0,min_off,max_off,fst_zero)
>
> fit (act_or_goto, state_no, _deflt, _, _, state@((t,_):_))
-> max_off fst_zero = do
+> min_off max_off fst_zero = do
> -- start at offset 1 in the table: all the empty states
> -- (states with just a default reduction) are mapped to
> -- offset zero.
> off <- findFreeOffset (-t+fst_zero) check off_arr state
-> let new_max_off | furthest_right > max_off = furthest_right
+> let new_min_off | furthest_left < min_off = furthest_left
+> | otherwise = min_off
+> new_max_off | furthest_right > max_off = furthest_right
> | otherwise = max_off
+> furthest_left = off
> furthest_right = off + max_token
>
> -- trace ("fit: state " ++ show state_no ++ ", off " ++ show off ++ ", elems " ++ show state) $ do
@@ -1293,7 +1319,7 @@ See notes under "Action Tables" above for some subtleties in this function.
> writeArray (which_off act_or_goto) state_no off
> addState off table check state
> new_fst_zero <- findFstFreeSlot check fst_zero
-> return (off, new_max_off, new_fst_zero)
+> return (off, new_min_off, new_max_off, new_fst_zero)
When looking for a free offest in the table, we use the 'check' table
rather than the main table. The check table starts off with (-1) in
@@ -1365,9 +1391,9 @@ slot is free or not.
> mkHappyIn n = str "happyIn" . shows n
> mkHappyOut n = str "happyOut" . shows n
-> type_param :: Int -> Maybe String -> ShowS
-> type_param n Nothing = char 't' . shows n
-> type_param _ (Just ty) = brack ty
+> typeParam :: Int -> Maybe String -> ShowS
+> typeParam n Nothing = char 't' . shows n
+> typeParam _ (Just ty) = brack ty
> specReduceFun :: Int -> Bool
> specReduceFun = (<= 3)
@@ -1377,7 +1403,7 @@ slot is free or not.
-- for placing in a string.
> hexChars :: [Int] -> String
-> hexChars acts = concat (map hexChar acts)
+> hexChars = concatMap hexChar
> hexChar :: Int -> String
> hexChar i | i < 0 = hexChar (i + 65536)
@@ -1389,3 +1415,15 @@ slot is free or not.
> hexDig :: Int -> Char
> hexDig i | i <= 9 = chr (i + ord '0')
> | otherwise = chr (i - 10 + ord 'a')
+
+This guards against integers that are so large as to (when converted using
+'hexChar') wrap around the maximum value of 16-bit numbers and then end up
+larger than an expected minimum value.
+
+> checkedHexChars :: Int -> [Int] -> String
+> checkedHexChars minValue = concatMap hexChar'
+> where hexChar' i | checkHexChar minValue i = hexChar i
+> | otherwise = error "grammar does not fit in 16-bit representation that is used with '--ghc'"
+
+> checkHexChar :: Int -> Int -> Bool
+> checkHexChar minValue i = i <= 32767 || i - 65536 < minValue
diff --git a/templates/GenericTemplate.hs b/templates/GenericTemplate.hs
index 5eb2f36..8a1ada2 100644
--- a/templates/GenericTemplate.hs
+++ b/templates/GenericTemplate.hs
@@ -2,9 +2,6 @@
#ifdef HAPPY_GHC
#undef __GLASGOW_HASKELL__
-#define HAPPY_IF_GHC_GT_500 #if __GLASGOW_HASKELL__ > 500
-#define HAPPY_IF_GHC_GE_503 #if __GLASGOW_HASKELL__ >= 503
-#define HAPPY_ELIF_GHC_500 #elif __GLASGOW_HASKELL__ == 500
#define HAPPY_IF_GHC_GT_706 #if __GLASGOW_HASKELL__ > 706
#define HAPPY_ELSE #else
#define HAPPY_ENDIF #endif
@@ -127,8 +124,8 @@ happyDoAction i tk st
++ "\n")
happyShift new_state i tk st
where new_state = MINUS(n,(ILIT(1) :: FAST_INT))
- where off = indexShortOffAddr happyActOffsets st
- off_i = PLUS(off,i)
+ where off = happyAdjustOffset (indexShortOffAddr happyActOffsets st)
+ off_i = PLUS(off, i)
check = if GTE(off_i,(ILIT(0) :: FAST_INT))
then EQ(indexShortOffAddr happyCheck off_i, i)
else False
@@ -150,6 +147,8 @@ indexShortOffAddr (HappyA# arr) off =
indexShortOffAddr arr off = arr Happy_Data_Array.! off
#endif
+{-# INLINE happyLt #-}
+happyLt x y = LT(x,y)
#ifdef HAPPY_GHC
readArrayBit arr bit =
@@ -238,8 +237,8 @@ happyMonad2Reduce k nt fn j tk st sts stk =
sts1@(CONS(st1@HAPPYSTATE(action),_)) ->
let drop_stk = happyDropStk k stk
#if defined(HAPPY_ARRAY)
- off = indexShortOffAddr happyGotoOffsets st1
- off_i = PLUS(off,nt)
+ off = happyAdjustOffset (indexShortOffAddr happyGotoOffsets st1)
+ off_i = PLUS(off, nt)
new_state = indexShortOffAddr happyTable off_i
#else
_ = nt :: FAST_INT
@@ -261,8 +260,8 @@ happyDropStk n (x `HappyStk` xs) = happyDropStk MINUS(n,(ILIT(1)::FAST_INT)) xs
happyGoto nt j tk st =
DEBUG_TRACE(", goto state " ++ show IBOX(new_state) ++ "\n")
happyDoAction j tk new_state
- where off = indexShortOffAddr happyGotoOffsets st
- off_i = PLUS(off,nt)
+ where off = happyAdjustOffset (indexShortOffAddr happyGotoOffsets st)
+ off_i = PLUS(off, nt)
new_state = indexShortOffAddr happyTable off_i
#else
happyGoto action j tk st = action j j tk (HappyState action)
diff --git a/tests/Makefile b/tests/Makefile
index 2539ce0..21a78db 100644
--- a/tests/Makefile
+++ b/tests/Makefile
@@ -19,7 +19,8 @@ endif
TESTS = Test.ly TestMulti.ly TestPrecedence.ly bug001.ly \
monad001.y monad002.ly precedence001.ly precedence002.y \
- bogus-token.y bug002.y Partial.ly issue91.y issue95.y \
+ bogus-token.y bug002.y Partial.ly \
+ issue91.y issue93.y issue94.y issue95.y \
AttrGrammar001.y AttrGrammar002.y \
test_rules.y monaderror.y monaderror-explist.y \
typeclass_monad001.y typeclass_monad002.ly typeclass_monad_lexer.y
diff --git a/tests/issue93.y b/tests/issue93.y
new file mode 100644
index 0000000..2e16680
--- /dev/null
+++ b/tests/issue93.y
@@ -0,0 +1,1246 @@
+-- See <https://github.com/simonmar/happy/issues/93> for more information
+-- This is an example of a grammar that has more than 2^15 entries in `happyTable` (39817).
+{
+import System.Exit
+import Data.Char
+}
+
+%name parseLit lit
+%name parseAttr export_attribute
+%name parseTy export_ty
+%name parsePat pat
+%name parseStmt stmt
+%name parseExpr expr
+%name parseItem mod_item
+%name parseSourceFileContents source_file
+%name parseBlock export_block
+%name parseImplItem impl_item
+%name parseTraitItem trait_item
+%name parseTt token_tree
+%name parseTokenStream token_stream
+%name parseTyParam ty_param
+%name parseLifetimeDef lifetime_def
+%name parseWhereClause where_clause
+%name parseGenerics generics
+
+%tokentype { Token }
+%lexer { lexNonSpace `bindP` } { Eof }
+%monad { P } { bindP } { returnP }
+
+%error { parseError }
+
+%expect 0
+
+%token
+
+
+ '=' { Equal }
+ '<' { Less }
+ '>' { Greater }
+ '!' { Exclamation }
+ '~' { Tilde }
+
+ '+' { Plus }
+ '-' { Minus }
+ '*' { Star }
+ '/' { Slash }
+ '%' { Percent }
+ '^' { Caret }
+ '&' { Ampersand }
+ '|' { Pipe }
+
+
+ '@' { At }
+ '...' { DotDotDot }
+ '..' { DotDot }
+ '.' { Dot }
+ ',' { Comma }
+ ';' { Semicolon }
+ '::' { ModSep }
+ ':' { Colon }
+ '->' { RArrow }
+ '<-' { LArrow }
+ '=>' { FatArrow }
+ '#' { Pound }
+ '$' { Dollar }
+ '?' { Question }
+ '#!' { Shebang }
+
+ '||' { PipePipe }
+ '&&' { AmpersandAmpersand }
+ '>=' { GreaterEqual }
+ '>>=' { GreaterGreaterEqual }
+ '<<' { LessLess }
+ '>>' { GreaterGreater }
+
+ '==' { EqualEqual }
+ '!=' { NotEqual }
+ '<=' { LessEqual }
+ '<<=' { LessLessEqual }
+ '-=' { MinusEqual }
+ '&=' { AmpersandEqual }
+ '|=' { PipeEqual }
+ '+=' { PlusEqual }
+ '*=' { StarEqual }
+ '/=' { SlashEqual }
+ '^=' { CaretEqual }
+ '%=' { PercentEqual }
+
+ '(' { OpenParen }
+ '[' { OpenBracket }
+ '{' { OpenBrace }
+ ')' { CloseParen }
+ ']' { CloseBracket }
+ '}' { CloseBrace }
+
+
+ byte { ByteTok{} }
+ char { CharTok{} }
+ int { IntegerTok{} }
+ float { FloatTok{} }
+ str { StrTok{} }
+ byteStr { ByteStrTok{} }
+ rawStr { StrRawTok{} }
+ rawByteStr { ByteStrRawTok{} }
+
+
+ as { IdentTok "as" }
+ box { IdentTok "box" }
+ break { IdentTok "break" }
+ const { IdentTok "const" }
+ continue { IdentTok "continue" }
+ crate { IdentTok "crate" }
+ else { IdentTok "else" }
+ enum { IdentTok "enum" }
+ extern { IdentTok "extern" }
+ false { IdentTok "false" }
+ fn { IdentTok "fn" }
+ for { IdentTok "for" }
+ if { IdentTok "if" }
+ impl { IdentTok "impl" }
+ in { IdentTok "in" }
+ let { IdentTok "let" }
+ loop { IdentTok "loop" }
+ match { IdentTok "match" }
+ mod { IdentTok "mod" }
+ move { IdentTok "move" }
+ mut { IdentTok "mut" }
+ pub { IdentTok "pub" }
+ ref { IdentTok "ref" }
+ return { IdentTok "return" }
+ Self { IdentTok "Self" }
+ self { IdentTok "self" }
+ static { IdentTok "static" }
+ struct { IdentTok "struct" }
+ super { IdentTok "super" }
+ trait { IdentTok "trait" }
+ true { IdentTok "true" }
+ type { IdentTok "type" }
+ unsafe { IdentTok "unsafe" }
+ use { IdentTok "use" }
+ where { IdentTok "where" }
+ while { IdentTok "while" }
+ do { IdentTok "do" }
+
+ abstract { IdentTok "abstract" }
+ alignof { IdentTok "alignof" }
+ become { IdentTok "become" }
+ final { IdentTok "final" }
+ macro { IdentTok "macro" }
+ offsetof { IdentTok "offsetof" }
+ override { IdentTok "override" }
+ priv { IdentTok "priv" }
+ proc { IdentTok "proc" }
+ pure { IdentTok "pure" }
+ sizeof { IdentTok "sizeof" }
+ typeof { IdentTok "typeof" }
+ unsized { IdentTok "unsized" }
+ virtual { IdentTok "virtual" }
+ yield { IdentTok "yield" }
+
+
+ default { IdentTok "default" }
+ union { IdentTok "union" }
+ catch { IdentTok "catch" }
+
+
+ outerDoc { OuterDoc }
+ innerDoc { InnerDoc }
+
+
+ IDENT { IdentTok{} }
+ '_' { Underscore }
+
+
+ LIFETIME { LifetimeTok _ }
+
+
+ ntItem { Interpolated 0 }
+ ntBlock { Interpolated 1 }
+ ntStmt { Interpolated 2 }
+ ntPat { Interpolated 3 }
+ ntExpr { Interpolated 4 }
+ ntTy { Interpolated 5 }
+ ntIdent { Interpolated 6 }
+ ntPath { Interpolated 7 }
+ ntTT { Interpolated 8 }
+ ntArm { Interpolated 9 }
+ ntImplItem { Interpolated 10 }
+ ntTraitItem { Interpolated 11 }
+ ntGenerics { Interpolated 12 }
+ ntWhereClause { Interpolated 13 }
+ ntArg { Interpolated 14 }
+ ntLit { Interpolated 15 }
+
+%nonassoc SEG
+%nonassoc mut DEF EQ '::'
+%nonassoc IDENT ntIdent default union catch self
+%nonassoc box return break continue IMPLTRAIT LAMBDA
+%right '=' '>>=' '<<=' '-=' '+=' '*=' '/=' '^=' '|=' '&=' '%='
+%right '<-'
+%nonassoc SINGLERNG
+%nonassoc INFIXRNG
+%nonassoc POSTFIXRNG
+%nonassoc PREFIXRNG
+%nonassoc '..' '...'
+%left '||'
+%left '&&'
+%left '==' '!=' '<' '>' '<=' '>='
+%left '|'
+%left '^'
+%left '&'
+%left '<<' '>>'
+%left '+' '-'
+%left '*' '/' '%'
+%nonassoc ':' as
+%nonassoc UNARY
+%nonassoc FIELD VIS PATH WHERE NOSEMI
+%nonassoc '?' '.'
+%nonassoc '{' ntBlock '[' '(' '!' ';'
+
+%%
+
+ident :: { Int }
+ : ntIdent { 0 }
+ | union { 1 }
+ | default { 2 }
+ | catch { 3 }
+ | IDENT { 4 }
+
+gt :: { Int }
+ : {- empty -} { 5 }
+
+some(p) :: { Int }
+ : some(p) p { 6 }
+ | p { 7 }
+
+many(p) :: { Int }
+ : some(p) { 8 }
+ | {- empty -} { 9 }
+
+sep_by1(p,sep) :: { Int }
+ : sep_by1(p,sep) sep p { 10 }
+ | p { 11 }
+
+sep_by(p,sep) :: { Int }
+ : sep_by1(p,sep) { 12 }
+ | {- empty -} { 13 }
+
+sep_by1T(p,sep) :: { Int }
+ : sep_by1(p,sep) sep { 14 }
+ | sep_by1(p,sep) { 15 }
+
+sep_byT(p,sep) :: { Int }
+ : sep_by1T(p,sep) { 16 }
+ | {- empty -} { 17 }
+
+source_file :: { Int }
+ : inner_attrs many(mod_item) { 18 }
+ | many(mod_item) { 19 }
+
+outer_attribute :: { Int }
+ : '#' '[' mod_path token_stream ']' { 20 }
+ | outerDoc { 21 }
+
+inner_attribute :: { Int }
+ : '#' '!' '[' mod_path token_stream ']' { 22 }
+ | '#!' '[' mod_path token_stream ']' { 23 }
+ | innerDoc { 24 }
+
+inner_attrs :: { Int }
+ : inner_attrs inner_attribute { 25 }
+ | inner_attribute { 26 }
+
+lit :: { Int }
+ : ntLit { 27 }
+ | byte { 28 }
+ | char { 29 }
+ | int { 30 }
+ | float { 31 }
+ | true { 32 }
+ | false { 33 }
+ | string { 34 }
+
+string :: { Int }
+ : str { 35 }
+ | rawStr { 36 }
+ | byteStr { 37 }
+ | rawByteStr { 38 }
+
+qual_path(segs) :: { Int }
+ : '<' qual_path_suf(segs) { 39 }
+ | lt_ty_qual_path as ty_path '>' '::' segs { 40 }
+
+qual_path_suf(segs) :: { Int }
+ : ty '>' '::' segs { 41 }
+ | ty as ty_path '>' '::' segs { 42 }
+
+lt_ty_qual_path :: { Int }
+ : '<<' qual_path_suf(path_segments_without_colons) { 43 }
+
+generic_values :: { Int }
+ : '<' sep_by1(lifetime,',') ',' sep_by1T(ty,',') gt '>' { 45 }
+ | '<' sep_by1(lifetime,',') ',' sep_by1T(binding,',') gt '>' { 46 }
+ | '<' sep_by1T(lifetime,',') gt '>' { 47 }
+ | '<' sep_by1(ty,',') ',' sep_by1T(binding,',') gt '>' { 48 }
+ | '<' sep_by1T(ty,',') gt '>' { 49 }
+ | '<' sep_by1T(binding,',') gt '>' { 50 }
+ | '<' gt '>' { 51 }
+ | lt_ty_qual_path ',' sep_by1T(ty,',') gt '>' { 53 }
+ | lt_ty_qual_path ',' sep_by1T(binding,',') gt '>' { 54 }
+ | lt_ty_qual_path gt '>' { 55 }
+
+binding :: { Int }
+ : ident '=' ty { 56 }
+
+ty_path :: { Int }
+ : ntPath { 57 }
+ | path_segments_without_colons { 58 }
+ | '::' path_segments_without_colons { 59 }
+
+ty_qual_path :: { Int }
+ : qual_path(path_segments_without_colons) { 60 }
+
+path_segments_without_colons :: { Int }
+ : sep_by1(path_segment_without_colons, '::') %prec SEG { 61 }
+
+path_segment_without_colons :: { Int }
+ : self_or_ident path_parameter1 { 62 }
+
+path_parameter1 :: { Int }
+ : generic_values { 63 }
+ | '(' sep_byT(ty,',') ')' { 64 }
+ | '(' sep_byT(ty,',') ')' '->' ty_no_plus { 65 }
+ | {- empty -} %prec IDENT { 66 }
+
+expr_path :: { Int }
+ : ntPath { 67 }
+ | path_segments_with_colons { 68 }
+ | '::' path_segments_with_colons { 69 }
+
+expr_qual_path :: { Int }
+ : qual_path(path_segments_with_colons) { 70 }
+
+path_segments_with_colons :: { Int }
+ : self_or_ident { 71 }
+ | path_segments_with_colons '::' self_or_ident { 72 }
+ | path_segments_with_colons '::' generic_values { 73 }
+
+mod_path :: { Int }
+ : ntPath { 74 }
+ | self_or_ident { 75 }
+ | '::' self_or_ident { 76 }
+ | mod_path '::' ident { 77 }
+
+lifetime :: { Int }
+ : LIFETIME { 78 }
+
+trait_ref :: { Int }
+ : ty_path { 79 }
+
+ty :: { Int }
+ : ty_no_plus { 80 }
+ | poly_trait_ref_mod_bound '+' sep_by1T(ty_param_bound_mod,'+') { 81 }
+
+ty_no_plus :: { Int }
+ : ntTy { 82 }
+ | no_for_ty { 83 }
+ | for_ty_no_plus { 84 }
+
+ty_prim :: { Int }
+ : no_for_ty_prim { 85 }
+ | for_ty_no_plus { 86 }
+ | poly_trait_ref_mod_bound '+' sep_by1T(ty_param_bound_mod,'+') { 87 }
+
+no_for_ty :: { Int }
+ : no_for_ty_prim { 88 }
+ | '(' ')' { 89 }
+ | '(' ty ')' { 90 }
+ | '(' ty ',' ')' { 91 }
+ | '(' ty ',' sep_by1T(ty,',') ')' { 92 }
+ | ty_qual_path { 93 }
+
+no_for_ty_prim :: { Int }
+ : '_' { 94 }
+ | '!' { 95 }
+ | '[' ty ']' { 96 }
+ | '*' ty_no_plus { 97 }
+ | '*' const ty_no_plus { 98 }
+ | '*' mut ty_no_plus { 99 }
+ | '&' ty_no_plus { 100 }
+ | '&' lifetime ty_no_plus { 101 }
+ | '&' mut ty_no_plus { 102 }
+ | '&' lifetime mut ty_no_plus { 103 }
+ | '&&' ty_no_plus { 104 }
+ | '&&' lifetime ty_no_plus { 105 }
+ | '&&' mut ty_no_plus { 106 }
+ | '&&' lifetime mut ty_no_plus { 107 }
+ | ty_path %prec PATH { 108 }
+ | ty_mac { 109 }
+ | unsafe extern abi fn fn_decl(arg_general) { 110 }
+ | unsafe fn fn_decl(arg_general) { 111 }
+ | extern abi fn fn_decl(arg_general) { 112 }
+ | fn fn_decl(arg_general) { 113 }
+ | typeof '(' expr ')' { 114 }
+ | '[' ty ';' expr ']' { 115 }
+ | '?' trait_ref { 116 }
+ | '?' for_lts trait_ref { 117 }
+
+for_ty_no_plus :: { Int }
+ : for_lts unsafe extern abi fn fn_decl(arg_general) { 118 }
+ | for_lts unsafe fn fn_decl(arg_general) { 119 }
+ | for_lts extern abi fn fn_decl(arg_general) { 120 }
+ | for_lts fn fn_decl(arg_general) { 121 }
+ | for_lts trait_ref { 122 }
+
+impl_ty :: { Int }
+ : impl sep_by1(ty_param_bound_mod,'+') %prec IMPLTRAIT { 123 }
+
+lifetime_mut :: { Int }
+ : lifetime mut { 124 }
+ | lifetime { 125 }
+ | mut { 126 }
+ | {- empty -} { 127 }
+
+fn_decl(arg) :: { Int }
+ : '(' sep_by1(arg,',') ',' '...' ')' ret_ty { 128 }
+ | '(' sep_byT(arg,',') ')' ret_ty { 129 }
+
+fn_decl_with_self_general :: { Int }
+ : '(' arg_self_general ',' sep_byT(arg_general,',') ')' ret_ty { 130 }
+ | '(' arg_self_general ')' ret_ty { 131 }
+ | '(' ')' ret_ty { 132 }
+
+fn_decl_with_self_named :: { Int }
+ : '(' arg_self_named ',' sep_by1(arg_named,',') ',' ')' ret_ty { 133 }
+ | '(' arg_self_named ',' sep_by1(arg_named,',') ')' ret_ty { 134 }
+ | '(' arg_self_named ',' ')' ret_ty { 135 }
+ | '(' arg_self_named ')' ret_ty { 136 }
+ | fn_decl(arg_named) { 137 }
+
+ty_param_bound :: { Int }
+ : lifetime { 138 }
+ | poly_trait_ref { 139 }
+
+poly_trait_ref_mod_bound :: { Int }
+ : poly_trait_ref { 140 }
+ | '?' poly_trait_ref { 141 }
+
+ty_param_bound_mod :: { Int }
+ : ty_param_bound { 142 }
+ | '?' poly_trait_ref { 143 }
+
+abi :: { Int }
+ : str { 144 }
+ | {- empty -} { 145 }
+
+ret_ty :: { Int }
+ : '->' ty_no_plus { 146 }
+ | '->' impl_ty { 147 }
+ | {- empty -} { 148 }
+
+poly_trait_ref :: { Int }
+ : trait_ref { 149 }
+ | for_lts trait_ref { 150 }
+
+for_lts :: { Int }
+ : for '<' sep_byT(lifetime_def,',') '>' { 151 }
+
+lifetime_def :: { Int }
+ : many(outer_attribute) lifetime ':' sep_by1T(lifetime,'+') { 152 }
+ | many(outer_attribute) lifetime { 153 }
+
+arg_named :: { Int }
+ : ntArg { 154 }
+ | pat ':' ty { 155 }
+
+arg_general :: { Int }
+ : ntArg { 156 }
+ | ty { 157 }
+ | '_' ':' ty { 158 }
+ | ident ':' ty { 159 }
+ | mut ident ':' ty { 160 }
+ | '&' '_' ':' ty { 161 }
+ | '&' ident ':' ty { 162 }
+ | '&&' '_' ':' ty { 163 }
+ | '&&' ident ':' ty { 164 }
+
+arg_self_general :: { Int }
+ : mut self { 165 }
+ | self ':' ty { 166 }
+ | mut self ':' ty { 167 }
+ | arg_general { 168 }
+
+arg_self_named :: { Int }
+ : self { 169 }
+ | mut self { 170 }
+ | '&' self { 171 }
+ | '&' lifetime self { 172 }
+ | '&' mut self { 173 }
+ | '&' lifetime mut self { 174 }
+ | self ':' ty { 175 }
+ | mut self ':' ty { 176 }
+
+lambda_arg :: { Int }
+ : ntArg { 177 }
+ | pat ':' ty { 178 }
+ | pat { 179 }
+
+pat :: { Int }
+ : ntPat { 180 }
+ | '_' { 181 }
+ | '&' mut pat { 182 }
+ | '&' pat { 183 }
+ | '&&' mut pat { 184 }
+ | '&&' pat { 185 }
+ | lit_expr { 186 }
+ | '-' lit_expr { 187 }
+ | box pat { 188 }
+ | binding_mode1 ident '@' pat { 189 }
+ | binding_mode1 ident { 190 }
+ | ident '@' pat { 191 }
+ | expr_path { 192 }
+ | expr_qual_path { 193 }
+ | lit_or_path '...' lit_or_path { 194 }
+ | expr_path '{' '..' '}' { 195 }
+ | expr_path '{' pat_fields '}' { 196 }
+ | expr_path '(' pat_tup ')' { 197 }
+ | expr_mac { 198 }
+ | '[' pat_slice ']' { 199 }
+ | '(' pat_tup ')' { 200 }
+
+pat_tup :: { Int }
+ : sep_by1(pat,',') ',' '..' ',' sep_by1(pat,',') { 201 }
+ | sep_by1(pat,',') ',' '..' ',' sep_by1(pat,',') ',' { 202 }
+ | sep_by1(pat,',') ',' '..' { 203 }
+ | sep_by1(pat,',') { 204 }
+ | sep_by1(pat,',') ',' { 205 }
+ | '..' ',' sep_by1(pat,',') { 206 }
+ | '..' ',' sep_by1(pat,',') ',' { 207 }
+ | '..' { 208 }
+ | {- empty -} { 209 }
+
+pat_slice :: { Int }
+ : sep_by1(pat,',') ',' '..' ',' sep_by1T(pat,',') { 210 }
+ | sep_by1(pat,',') ',' '..' { 211 }
+ | sep_by1(pat,',') '..' ',' sep_by1T(pat,',') { 212 }
+ | sep_by1(pat,',') '..' { 213 }
+ | sep_by1T(pat,',') { 214 }
+ | '..' ',' sep_by1T(pat,',') { 215 }
+ | '..' { 216 }
+ | {- empty -} { 217 }
+
+lit_or_path :: { Int }
+ : expr_path { 218 }
+ | expr_qual_path { 219 }
+ | '-' lit_expr { 220 }
+ | lit_expr { 221 }
+
+pat_fields :: { Int }
+ : sep_byT(pat_field,',') { 222 }
+ | sep_by1(pat_field,',') ',' '..' { 223 }
+
+pat_field :: { Int }
+ : binding_mode ident { 224 }
+ | box binding_mode ident { 225 }
+ | binding_mode ident ':' pat { 226 }
+
+binding_mode1 :: { Int }
+ : ref mut { 227 }
+ | ref { 228 }
+ | mut { 229 }
+
+binding_mode :: { Int }
+ : binding_mode1 { 230 }
+ | {- empty -} { 231 }
+
+gen_expression(lhs,rhs,rhs2) :: { Int }
+ : ntExpr { 232 }
+ | lit_expr { 233 }
+ | '[' sep_byT(expr,',') ']' { 234 }
+ | '[' inner_attrs sep_byT(expr,',') ']' { 235 }
+ | '[' expr ';' expr ']' { 236 }
+ | expr_mac { 237 }
+ | expr_path %prec PATH { 238 }
+ | expr_qual_path { 239 }
+ | '*' rhs %prec UNARY { 240 }
+ | '!' rhs %prec UNARY { 241 }
+ | '-' rhs %prec UNARY { 242 }
+ | '&' rhs %prec UNARY { 243 }
+ | '&' mut rhs %prec UNARY { 244 }
+ | '&&' rhs %prec UNARY { 245 }
+ | '&&' mut rhs %prec UNARY { 246 }
+ | box rhs %prec UNARY { 247 }
+ | left_gen_expression(lhs,rhs,rhs2) { 248 }
+ | '..' rhs2 %prec PREFIXRNG { 249 }
+ | '...' rhs2 %prec PREFIXRNG { 250 }
+ | '..' %prec SINGLERNG { 251 }
+ | '...' %prec SINGLERNG { 252 }
+ | return { 253 }
+ | return rhs { 254 }
+ | continue { 255 }
+ | continue lifetime { 256 }
+ | break { 257 }
+ | break rhs { 258 }
+ | break lifetime { 259 }
+ | break lifetime rhs %prec break { 260 }
+ | move lambda_args rhs %prec LAMBDA { 261 }
+ | lambda_args rhs %prec LAMBDA { 262 }
+
+left_gen_expression(lhs,rhs,rhs2) :: { Int }
+ : postfix_blockexpr(lhs) { 263 }
+ | lhs '[' expr ']' { 264 }
+ | lhs '(' sep_byT(expr,',') ')' { 265 }
+ | lhs ':' ty_no_plus { 266 }
+ | lhs as ty_no_plus { 267 }
+ | lhs '*' rhs { 268 }
+ | lhs '/' rhs { 269 }
+ | lhs '%' rhs { 270 }
+ | lhs '+' rhs { 271 }
+ | lhs '-' rhs { 272 }
+ | lhs '<<' rhs { 273 }
+ | lhs '>>' rhs { 274 }
+ | lhs '&' rhs { 275 }
+ | lhs '^' rhs { 276 }
+ | lhs '|' rhs { 277 }
+ | lhs '==' rhs { 278 }
+ | lhs '!=' rhs { 279 }
+ | lhs '<' rhs { 280 }
+ | lhs '>' rhs { 281 }
+ | lhs '<=' rhs { 282 }
+ | lhs '>=' rhs { 283 }
+ | lhs '&&' rhs { 284 }
+ | lhs '||' rhs { 285 }
+ | lhs '..' %prec POSTFIXRNG { 286 }
+ | lhs '...' %prec POSTFIXRNG { 287 }
+ | lhs '..' rhs2 %prec INFIXRNG { 288 }
+ | lhs '...' rhs2 %prec INFIXRNG { 289 }
+ | lhs '<-' rhs { 290 }
+ | lhs '=' rhs { 291 }
+ | lhs '>>=' rhs { 292 }
+ | lhs '<<=' rhs { 293 }
+ | lhs '-=' rhs { 294 }
+ | lhs '+=' rhs { 295 }
+ | lhs '*=' rhs { 296 }
+ | lhs '/=' rhs { 297 }
+ | lhs '^=' rhs { 298 }
+ | lhs '|=' rhs { 299 }
+ | lhs '&=' rhs { 300 }
+ | lhs '%=' rhs { 301 }
+
+postfix_blockexpr(lhs) :: { Int }
+ : lhs '?' { 302 }
+ | lhs '.' ident %prec FIELD { 303 }
+ | lhs '.' ident '(' sep_byT(expr,',') ')' { 304 }
+ | lhs '.' ident '::' '<' sep_byT(ty,',') '>' '(' sep_byT(expr,',') ')' { 305 }
+ | lhs '.' int { 306 }
+
+expr :: { Int }
+ : gen_expression(expr,expr,expr) { 307 }
+ | paren_expr { 308 }
+ | struct_expr { 309 }
+ | block_expr { 310 }
+ | lambda_expr_block { 311 }
+
+nostruct_expr :: { Int }
+ : gen_expression(nostruct_expr,nostruct_expr,nonstructblock_expr) { 312 }
+ | paren_expr { 313 }
+ | block_expr { 314 }
+
+nonstructblock_expr :: { Int }
+ : gen_expression(nonstructblock_expr,nostruct_expr,nonstructblock_expr) { 315 }
+ | paren_expr { 316 }
+ | block_like_expr { 317 }
+ | unsafe inner_attrs_block { 318 }
+
+nonblock_expr :: { Int }
+ : gen_expression(nonblock_expr,expr,expr) { 319 }
+ | paren_expr { 320 }
+ | struct_expr { 321 }
+ | lambda_expr_block { 322 }
+
+blockpostfix_expr :: { Int }
+ : postfix_blockexpr(block_like_expr) { 323 }
+ | postfix_blockexpr(vis_safety_block) { 324 }
+ | left_gen_expression(blockpostfix_expr,expr,expr) { 325 }
+
+lit_expr :: { Int }
+ : lit { 326 }
+
+block_expr :: { Int }
+ : block_like_expr { 327 }
+ | inner_attrs_block { 328 }
+ | unsafe inner_attrs_block { 329 }
+
+
+block_like_expr :: { Int }
+ : if_expr { 330 }
+ | loop inner_attrs_block { 331 }
+ | lifetime ':' loop inner_attrs_block { 332 }
+ | for pat in nostruct_expr inner_attrs_block { 333 }
+ | lifetime ':' for pat in nostruct_expr inner_attrs_block { 334 }
+ | while nostruct_expr inner_attrs_block { 335 }
+ | lifetime ':' while nostruct_expr inner_attrs_block { 336 }
+ | while let pat '=' nostruct_expr inner_attrs_block { 337 }
+ | lifetime ':' while let pat '=' nostruct_expr inner_attrs_block { 338 }
+ | match nostruct_expr '{' '}' { 339 }
+ | match nostruct_expr '{' inner_attrs '}' { 340 }
+ | match nostruct_expr '{' arms '}' { 341 }
+ | match nostruct_expr '{' inner_attrs arms '}' { 342 }
+ | expr_path '!' '{' token_stream '}' { 343 }
+ | do catch inner_attrs_block { 344 }
+
+if_expr :: { Int }
+ : if nostruct_expr block else_expr { 345 }
+ | if let pat '=' nostruct_expr block else_expr { 346 }
+
+else_expr :: { Int }
+ : else block { 347 }
+ | else if_expr { 348 }
+ | {- empty -} { 349 }
+
+arms :: { Int }
+ : ntArm { 350 }
+ | ntArm arms { 351 }
+ | many(outer_attribute) sep_by1(pat,'|') arm_guard '=>' expr_arms { 352 }
+
+arm_guard :: { Int }
+ : {- empty -} { 353 }
+ | if expr { 354 }
+
+comma_arms :: { Int }
+ : {- empty -} { 355 }
+ | ',' { 356 }
+ | ',' arms { 357 }
+
+expr_arms :: { Int }
+ : nonblock_expr comma_arms { 358 }
+ | blockpostfix_expr comma_arms { 359 }
+ | vis_safety_block comma_arms { 360 }
+ | vis_safety_block arms { 361 }
+ | block_like_expr comma_arms { 362 }
+ | block_like_expr arms { 363 }
+
+paren_expr :: { Int }
+ : '(' ')' { 364 }
+ | '(' inner_attrs ')' { 365 }
+ | '(' expr ')' { 366 }
+ | '(' inner_attrs expr ')' { 367 }
+ | '(' expr ',' ')' { 368 }
+ | '(' inner_attrs expr ',' ')' { 369 }
+ | '(' expr ',' sep_by1T(expr,',') ')' { 370 }
+ | '(' inner_attrs expr ',' sep_by1T(expr,',') ')' { 371 }
+
+lambda_expr_block :: { Int }
+ : move lambda_args '->' ty_no_plus block { 372 }
+ | lambda_args '->' ty_no_plus block { 373 }
+
+lambda_args :: { Int }
+ : '||' { 374 }
+ | '|' sep_byT(lambda_arg,',') '|' { 375 }
+
+struct_expr :: { Int }
+ : expr_path '{' '..' expr '}' { 376 }
+ | expr_path '{' inner_attrs '..' expr '}' { 377 }
+ | expr_path '{' sep_by1(field,',') ',' '..' expr '}' { 378 }
+ | expr_path '{' inner_attrs sep_by1(field,',') ',' '..' expr '}' { 379 }
+ | expr_path '{' sep_byT(field,',') '}' { 380 }
+ | expr_path '{' inner_attrs sep_byT(field,',') '}' { 381 }
+
+field :: { Int }
+ : ident ':' expr { 382 }
+ | ident { 383 }
+
+vis_safety_block :: { Int }
+ : pub_or_inherited safety inner_attrs_block { 384 }
+
+vis_union_nonblock_expr :: { Int }
+ : union_expr { 385 }
+ | left_gen_expression(vis_union_nonblock_expr, expr, expr) { 386 }
+
+union_expr :: { Int }
+ : pub_or_inherited union { 387 }
+
+stmt :: { Int }
+ : ntStmt { 388 }
+ | many(outer_attribute) let pat ':' ty initializer ';' { 389 }
+ | many(outer_attribute) let pat initializer ';' { 390 }
+ | many(outer_attribute) nonblock_expr ';' { 391 }
+ | many(outer_attribute) block_like_expr ';' { 392 }
+ | many(outer_attribute) blockpostfix_expr ';' { 393 }
+ | many(outer_attribute) vis_union_nonblock_expr ';' { 394 }
+ | many(outer_attribute) block_like_expr %prec NOSEMI { 395 }
+ | many(outer_attribute) vis_safety_block ';' { 396 }
+ | many(outer_attribute) vis_safety_block %prec NOSEMI { 397 }
+ | gen_item(pub_or_inherited) { 398 }
+ | many(outer_attribute) expr_path '!' ident '[' token_stream ']' ';' { 399 }
+ | many(outer_attribute) expr_path '!' ident '(' token_stream ')' ';' { 400 }
+ | many(outer_attribute) expr_path '!' ident '{' token_stream '}' { 401 }
+
+pub_or_inherited :: { Int }
+ : pub %prec VIS { 402 }
+ | {- empty -} %prec VIS { 403 }
+
+stmtOrSemi :: { Int }
+ : ';' { 404 }
+ | stmt { 405 }
+
+stmts_possibly_no_semi :: { Int }
+ : stmtOrSemi stmts_possibly_no_semi { 406 }
+ | stmtOrSemi { 407 }
+ | many(outer_attribute) nonblock_expr { 408 }
+ | many(outer_attribute) blockpostfix_expr { 409 }
+
+initializer :: { Int }
+ : '=' expr { 410 }
+ | {- empty -} { 411 }
+
+block :: { Int }
+ : ntBlock { 412 }
+ | '{' '}' { 413 }
+ | '{' stmts_possibly_no_semi '}' { 414 }
+
+inner_attrs_block :: { Int }
+ : block { 415 }
+ | '{' inner_attrs '}' { 416 }
+ | '{' inner_attrs stmts_possibly_no_semi '}' { 417 }
+
+gen_item(vis) :: { Int }
+ : many(outer_attribute) vis static ident ':' ty '=' expr ';' { 418 }
+ | many(outer_attribute) vis static mut ident ':' ty '=' expr ';' { 419 }
+ | many(outer_attribute) vis const ident ':' ty '=' expr ';' { 420 }
+ | many(outer_attribute) vis type ident generics where_clause '=' ty ';' { 421 }
+ | many(outer_attribute) vis use view_path ';' { 422 }
+ | many(outer_attribute) vis safety extern crate ident ';' { 423 }
+ | many(outer_attribute) vis safety extern crate ident as ident ';' { 424 }
+ | many(outer_attribute) vis const safety fn ident generics fn_decl(arg_named) where_clause inner_attrs_block { 425 }
+ | many(outer_attribute) vis safety extern abi fn ident generics fn_decl(arg_named) where_clause inner_attrs_block { 426 }
+ | many(outer_attribute) vis safety fn ident generics fn_decl(arg_named) where_clause inner_attrs_block { 427 }
+ | many(outer_attribute) vis mod ident ';' { 428 }
+ | many(outer_attribute) vis mod ident '{' many(mod_item) '}' { 429 }
+ | many(outer_attribute) vis mod ident '{' inner_attrs many(mod_item) '}' { 430 }
+ | many(outer_attribute) vis safety extern abi '{' many(foreign_item) '}' { 431 }
+ | many(outer_attribute) vis safety extern abi '{' inner_attrs many(foreign_item) '}' { 432 }
+ | many(outer_attribute) vis struct ident generics struct_decl_args { 433 }
+ | many(outer_attribute) vis union ident generics struct_decl_args { 434 }
+ | many(outer_attribute) vis enum ident generics where_clause '{' sep_byT(enum_def,',') '}' { 435 }
+ | many(outer_attribute) vis safety trait ident generics where_clause '{' many(trait_item) '}' { 437 }
+ | many(outer_attribute) vis safety impl generics ty_prim where_clause '{' impl_items '}' { 438 }
+ | many(outer_attribute) vis default safety impl generics ty_prim where_clause '{' impl_items '}' { 439 }
+ | many(outer_attribute) vis safety impl generics '(' ty_no_plus ')' where_clause '{' impl_items '}' { 440 }
+ | many(outer_attribute) vis default safety impl generics '(' ty_no_plus ')' where_clause '{' impl_items '}' { 441 }
+ | many(outer_attribute) vis safety impl generics '!' trait_ref for ty where_clause '{' impl_items '}' { 442 }
+ | many(outer_attribute) vis default safety impl generics '!' trait_ref for ty where_clause '{' impl_items '}' { 443 }
+ | many(outer_attribute) vis safety impl generics trait_ref for ty where_clause '{' impl_items '}' { 444 }
+ | many(outer_attribute) vis default safety impl generics trait_ref for ty where_clause '{' impl_items '}' { 445 }
+ | many(outer_attribute) vis safety impl generics trait_ref for '..' '{' '}' { 446 }
+
+mod_item :: { Int }
+ : ntItem { 447 }
+ | gen_item(vis) { 448 }
+ | many(outer_attribute) expr_path '!' ident '[' token_stream ']' ';' { 449 }
+ | many(outer_attribute) expr_path '!' '[' token_stream ']' ';' { 450 }
+ | many(outer_attribute) expr_path '!' ident '(' token_stream ')' ';' { 451 }
+ | many(outer_attribute) expr_path '!' '(' token_stream ')' ';' { 452 }
+ | many(outer_attribute) expr_path '!' ident '{' token_stream '}' { 453 }
+ | many(outer_attribute) expr_path '!' '{' token_stream '}' { 454 }
+
+foreign_item :: { Int }
+ : many(outer_attribute) vis static ident ':' ty ';' { 455 }
+ | many(outer_attribute) vis static mut ident ':' ty ';' { 456 }
+ | many(outer_attribute) vis fn ident generics fn_decl(arg_named) where_clause ';' { 457 }
+
+
+
+generics :: { Int }
+ : ntGenerics { 458 }
+ | '<' sep_by1(lifetime_def,',') ',' sep_by1T(ty_param,',') gt '>' { 459 }
+ | '<' sep_by1T(lifetime_def,',') gt '>' { 460 }
+ | '<' sep_by1T(ty_param,',') gt '>' { 461 }
+ | '<' gt '>' { 462 }
+ | {- empty -} { 463 }
+
+ty_param :: { Int }
+ : many(outer_attribute) ident { 464 }
+ | many(outer_attribute) ident ':' sep_by1T(ty_param_bound_mod,'+') { 465 }
+ | many(outer_attribute) ident '=' ty { 466 }
+ | many(outer_attribute) ident ':' sep_by1T(ty_param_bound_mod,'+') '=' ty { 467 }
+
+struct_decl_args :: { Int }
+ : where_clause ';' { 468 }
+ | where_clause '{' sep_byT(struct_decl_field,',') '}' { 469 }
+ | '(' sep_byT(tuple_decl_field,',') ')' where_clause ';' { 470 }
+
+struct_decl_field :: { Int }
+ : many(outer_attribute) vis ident ':' ty { 471 }
+
+tuple_decl_field :: { Int }
+ : many(outer_attribute) vis ty { 472 }
+
+enum_def :: { Int }
+ : many(outer_attribute) ident '{' sep_byT(struct_decl_field,',') '}' { 473 }
+ | many(outer_attribute) ident '(' sep_byT(tuple_decl_field,',') ')' { 474 }
+ | many(outer_attribute) ident initializer { 475 }
+
+where_clause :: { Int }
+ : {- empty -} { 476 }
+ | ntWhereClause { 477 }
+ | where sep_by(where_predicate,',') %prec WHERE { 478 }
+ | where sep_by1(where_predicate,',') ',' %prec WHERE { 479 }
+
+where_predicate :: { Int }
+ : lifetime { 480 }
+ | lifetime ':' sep_by1T(lifetime,'+') { 481 }
+ | no_for_ty %prec EQ { 482 }
+ | no_for_ty '=' ty { 483 }
+ | no_for_ty ':' sep_by1T(ty_param_bound_mod,'+') { 484 }
+ | for_lts no_for_ty { 485 }
+ | for_lts no_for_ty ':' sep_by1T(ty_param_bound_mod,'+') { 486 }
+
+impl_items :: { Int }
+ : many(impl_item) { 487 }
+ | inner_attrs many(impl_item) { 488 }
+
+impl_item :: { Int }
+ : many(outer_attribute) vis def type ident '=' ty ';' { 489 }
+ | many(outer_attribute) vis def const ident ':' ty '=' expr ';' { 490 }
+ | many(outer_attribute) def mod_mac { 491 }
+
+trait_item :: { Int }
+ : ntTraitItem { 494 }
+ | many(outer_attribute) const ident ':' ty initializer ';' { 495 }
+ | many(outer_attribute) mod_mac { 496 }
+ | many(outer_attribute) type ident ';' { 497 }
+ | many(outer_attribute) type ident '=' ty ';' { 498 }
+ | many(outer_attribute) type ident ':' sep_by1T(ty_param_bound_mod,'+') ';' { 499 }
+
+safety :: { Int }
+ : {- empty -} { 503 }
+ | unsafe { 504 }
+
+ext_abi :: { Int }
+ : {- empty -} { 505 }
+ | extern abi { 506 }
+
+vis :: { Int }
+ : {- empty -} %prec VIS { 507 }
+ | pub %prec VIS { 508 }
+ | pub '(' crate ')' { 509 }
+ | pub '(' in mod_path ')' { 510 }
+ | pub '(' super ')' { 511 }
+ | pub '(' self ')' { 512 }
+
+def :: { Int }
+ : {- empty -} %prec DEF { 513 }
+ | default { 514 }
+
+view_path :: { Int }
+ : '::' sep_by1(self_or_ident,'::') { 515 }
+ | '::' sep_by1(self_or_ident,'::') as ident { 516 }
+ | '::' '*' { 517 }
+ | '::' sep_by1(self_or_ident,'::') '::' '*' { 518 }
+ | '::' sep_by1(self_or_ident,'::') '::' '{' sep_byT(plist,',') '}' { 519 }
+ | '::' '{' sep_byT(plist,',') '}' { 520 }
+ | sep_by1(self_or_ident,'::') { 521 }
+ | sep_by1(self_or_ident,'::') as ident { 522 }
+ | '*' { 523 }
+ | sep_by1(self_or_ident,'::') '::' '*' { 524 }
+ | sep_by1(self_or_ident,'::') '::' '{' sep_byT(plist,',') '}' { 525 }
+ | '{' sep_byT(plist,',') '}' { 526 }
+
+self_or_ident :: { Int }
+ : ident { 527 }
+ | self { 528 }
+ | Self { 529 }
+ | super { 530 }
+
+plist :: { Int }
+ : self_or_ident { 531 }
+ | self_or_ident as ident { 532 }
+
+expr_mac :: { Int }
+ : expr_path '!' '[' token_stream ']' { 533 }
+ | expr_path '!' '(' token_stream ')' { 534 }
+
+ty_mac :: { Int }
+ : ty_path '!' '[' token_stream ']' { 535 }
+ | ty_path '!' '{' token_stream '}' { 536 }
+ | ty_path '!' '(' token_stream ')' { 537 }
+
+mod_mac :: { Int }
+ : mod_path '!' '[' token_stream ']' ';' { 538 }
+ | mod_path '!' '{' token_stream '}' { 539 }
+ | mod_path '!' '(' token_stream ')' ';' { 540 }
+
+token_stream :: { Int }
+ : {- empty -} { 541 }
+ | some(token_tree) { 542 }
+
+token_tree :: { Int }
+ : ntTT { 543 }
+ | '(' token_stream ')' { 544 }
+ | '{' token_stream '}' { 545 }
+ | '[' token_stream ']' { 546 }
+ | token { 547 }
+
+token :: { Int }
+ : '=' { 548 }
+ | '<' { 549 }
+ | '>' { 550 }
+ | '!' { 551 }
+ | '~' { 552 }
+ | '-' { 553 }
+ | '/' { 554 }
+ | '+' { 555 }
+ | '*' { 556 }
+ | '%' { 557 }
+ | '^' { 558 }
+ | '&' { 559 }
+ | '|' { 560 }
+ | '<<=' { 561 }
+ | '>>=' { 562 }
+ | '-=' { 563 }
+ | '&=' { 564 }
+ | '|=' { 565 }
+ | '+=' { 566 }
+ | '*=' { 567 }
+ | '/=' { 568 }
+ | '^=' { 569 }
+ | '%=' { 571 }
+ | '||' { 572 }
+ | '&&' { 573 }
+ | '==' { 574 }
+ | '!=' { 575 }
+ | '<=' { 576 }
+ | '>=' { 577 }
+ | '<<' { 578 }
+ | '>>' { 579 }
+ | '@' { 580 }
+ | '...' { 581 }
+ | '..' { 582 }
+ | '.' { 583 }
+ | ',' { 584 }
+ | ';' { 585 }
+ | '::' { 586 }
+ | ':' { 587 }
+ | '->' { 588 }
+ | '<-' { 589 }
+ | '=>' { 590 }
+ | '#' { 591 }
+ | '$' { 592 }
+ | '?' { 593 }
+ | '#!' { 594 }
+ | byte { 595 }
+ | char { 596 }
+ | int { 597 }
+ | float { 598 }
+ | str { 599 }
+ | byteStr { 600 }
+ | rawStr { 601 }
+ | rawByteStr { 602 }
+ | as { 603 }
+ | box { 604 }
+ | break { 605 }
+ | const { 606 }
+ | continue { 607 }
+ | crate { 608 }
+ | else { 609 }
+ | enum { 610 }
+ | extern { 611 }
+ | false { 612 }
+ | fn { 613 }
+ | for { 614 }
+ | if { 615 }
+ | impl { 616 }
+ | in { 617 }
+ | let { 618 }
+ | loop { 619 }
+ | match { 620 }
+ | mod { 621 }
+ | move { 622 }
+ | mut { 623 }
+ | pub { 624 }
+ | ref { 625 }
+ | return { 626 }
+ | Self { 627 }
+ | self { 628 }
+ | static { 629 }
+ | struct { 630 }
+ | super { 631 }
+ | trait { 632 }
+ | true { 633 }
+ | type { 634 }
+ | unsafe { 635 }
+ | use { 636 }
+ | where { 637 }
+ | while { 638 }
+ | abstract { 639 }
+ | alignof { 640 }
+ | become { 641 }
+ | do { 642 }
+ | final { 643 }
+ | macro { 644 }
+ | offsetof { 645 }
+ | override { 646 }
+ | priv { 647 }
+ | proc { 648 }
+ | pure { 649 }
+ | sizeof { 650 }
+ | typeof { 651 }
+ | unsized { 652 }
+ | virtual { 653 }
+ | yield { 654 }
+ | default { 655 }
+ | union { 656 }
+ | catch { 657 }
+ | outerDoc { 658 }
+ | innerDoc { 659 }
+ | IDENT { 660 }
+ | '_' { 661 }
+ | LIFETIME { 662 }
+
+export_attribute :: { Int }
+ : inner_attribute { 663 }
+ | outer_attribute { 664 }
+
+export_block :: { Int }
+ : ntBlock { 665 }
+ | safety '{' '}' { 666 }
+ | safety '{' stmts_possibly_no_semi '}' { 667 }
+
+export_ty :: { Int }
+ : ty { 668 }
+ | impl_ty { 669 }
+
+
+{
+
+type P a = String -> Either String (a, String)
+
+bindP :: P a -> (a -> P b) -> P b
+bindP p f s = case p s of
+ Left m -> Left m
+ Right (x,s') -> f x s'
+
+returnP :: a -> P a
+returnP x s = Right (x,s)
+
+parseError :: Show b => b -> P a
+parseError b _ = Left ("Syntax error: the symbol `" ++ show b ++ "' does not fit here")
+
+
+data Token
+ = Equal
+ | Less
+ | Greater
+ | Ampersand
+ | Pipe
+ | Exclamation
+ | Tilde
+ | Plus
+ | Minus
+ | Star
+ | Slash
+ | Percent
+ | Caret
+ | GreaterEqual
+ | GreaterGreaterEqual
+ | AmpersandAmpersand
+ | PipePipe
+ | LessLess
+ | GreaterGreater
+ | EqualEqual
+ | NotEqual
+ | LessEqual
+ | LessLessEqual
+ | MinusEqual
+ | AmpersandEqual
+ | PipeEqual
+ | PlusEqual
+ | StarEqual
+ | SlashEqual
+ | CaretEqual
+ | PercentEqual
+ | At
+ | Dot
+ | DotDot
+ | DotDotDot
+ | Comma
+ | Semicolon
+ | Colon
+ | ModSep
+ | RArrow
+ | LArrow
+ | FatArrow
+ | Pound
+ | Dollar
+ | Question
+ | OpenParen
+ | OpenBracket
+ | OpenBrace
+ | CloseParen
+ | CloseBracket
+ | CloseBrace
+ | IdentTok String
+ | Underscore
+ | LifetimeTok String
+ | Space
+ | InnerDoc
+ | OuterDoc
+ | Shebang
+ | Eof
+ | ByteTok String
+ | CharTok String
+ | IntegerTok String
+ | FloatTok String
+ | StrTok String
+ | StrRawTok String
+ | ByteStrTok String
+ | ByteStrRawTok String
+ | Interpolated Int
+ deriving Show
+
+
+-- This is an intentionally simplfied tokenizer
+lexNonSpace :: P Token
+lexNonSpace "" = Right (Eof, "")
+lexNonSpace ('.':cs) = Right (Dot, cs)
+lexNonSpace ('+':cs) = Right (Plus, cs)
+lexNonSpace (';':cs) = Right (Semicolon, cs)
+lexNonSpace (',':cs) = Right (Comma, cs)
+lexNonSpace ('=':cs) = Right (Equal, cs)
+lexNonSpace ('{':cs) = Right (OpenBrace, cs)
+lexNonSpace ('}':cs) = Right (CloseBrace, cs)
+lexNonSpace ('(':cs) = Right (OpenParen, cs)
+lexNonSpace (')':cs) = Right (CloseParen, cs)
+lexNonSpace (c:cs)
+ | isSpace c = lexNonSpace cs
+ | isNumber c = let (tok,cs') = span isNumber (c:cs) in Right (IntegerTok tok, cs')
+ | isAlpha c = let (tok,cs') = span isAlphaNum (c:cs) in Right (IdentTok tok, cs')
+ | otherwise = Left ("Unexpected character: `" ++ [c] ++ "'")
+
+
+main = case parseStmt "union.1 + 2;" of
+ Right (394, "") -> pure ()
+ _ -> exitWith (ExitFailure 1)
+}
diff --git a/tests/issue94.y b/tests/issue94.y
new file mode 100644
index 0000000..2bdd883
--- /dev/null
+++ b/tests/issue94.y
@@ -0,0 +1,33 @@
+-- See <https://github.com/simonmar/happy/issues/94> for more information
+%name parse prod
+
+%tokentype { Token }
+
+%monad { P } { bindP } { returnP }
+%error { error "parse error" }
+%lexer { lexer } { EOF }
+
+%token
+ IDENT { Identifier $$ }
+
+%%
+
+prod
+ : IDENT { () }
+
+{
+data Token = EOF | Identifier String
+
+type P a = String -> (a, String)
+
+bindP :: P a -> (a -> P b) -> P b
+bindP p f s = let (x,s') = p s in f x s'
+
+returnP :: a -> P a
+returnP = (,)
+
+lexer :: (Token -> P a) -> P a
+lexer cont s = cont (case s of { "" -> EOF; _ -> Identifier s }) ""
+
+main = return ()
+}