]> Git — Sourcephile - haskell/symantic.git/blob - symantic/Language/Symantic/Compiling/Term/Grammar.hs
Complexify the type system to support rank-1 polymorphic types and terms.
[haskell/symantic.git] / symantic / Language / Symantic / Compiling / Term / Grammar.hs
1 {-# LANGUAGE ConstraintKinds #-}
2 {-# LANGUAGE DeriveFunctor #-}
3 {-# LANGUAGE GADTs #-}
4 {-# LANGUAGE GeneralizedNewtypeDeriving #-}
5 {-# LANGUAGE PolyKinds #-}
6 {-# LANGUAGE UndecidableInstances #-}
7 {-# OPTIONS_GHC -fno-warn-orphans #-}
8 module Language.Symantic.Compiling.Term.Grammar where
9
10 import Control.Arrow (left)
11 import Control.Monad (foldM, void, (=<<))
12 import qualified Data.Char as Char
13 import qualified Data.Function as Fun
14 import Data.Map.Strict (Map)
15 import qualified Data.Map.Strict as Map
16 import Data.Proxy (Proxy(..))
17 import Data.Semigroup (Semigroup(..))
18 import Data.String (IsString(..))
19 import Data.Text (Text)
20 import qualified Data.Text as Text
21 import Prelude hiding (mod, not, any)
22
23 import Language.Symantic.Parsing
24 import Language.Symantic.Typing
25
26 -- * Type 'TeName'
27 newtype TeName = TeName Text
28 deriving (Eq, Ord, Show)
29 instance IsString TeName where
30 fromString = TeName . fromString
31
32 -- * Type 'ProTok'
33 -- | Proto 'EToken'. It's almost like a free monad,
34 -- but with more constructors to avoid the need
35 -- for sub constructors when requiring a type rather than a term.
36 --
37 -- NOTE: this type may one day be removed
38 -- if proper polymorphic types are implemented.
39 -- In the meantime it is used to require
40 -- term or type arguments needed to build
41 -- the 'EToken's of polymorphic terms.
42 data ProTok meta ts
43 = ProTokLam (EToken meta ts -> ProTok meta ts)
44 -- ^ Require a term argument.
45 | ProTokPi (EToken meta '[Proxy Token_Type] -> ProTok meta ts)
46 -- ^ Require a type argument.
47 | ProTokTe (EToken meta ts)
48 -- ^ Immediat term, no need for any argument.
49 | ProTokTy (EToken meta '[Proxy Token_Type])
50 -- ^ Immediat type, no need for any argument.
51 | ProTokApp
52 -- ^ Require a term argument to perform a 'Token_Term_App'.
53 --
54 -- Useful to make the term application symbolized by a space
55 -- be an operator, and thus be integrated with the other operators.
56 -- This enables prefix and postfix operators to be applied
57 -- before the term application.
58 instance (Show_Token meta ts, Show meta) => Show (ProTok meta ts) where
59 showsPrec n ProTokLam{} = showParen (n > 10) $ showString "ProTokLam"
60 showsPrec n ProTokPi{} = showParen (n > 10) $ showString "ProTokPi"
61 showsPrec n (ProTokTe tok) = showParen (n > 10) $ showString "ProTokTe " . showsPrec 11 tok
62 showsPrec n (ProTokTy typ) = showParen (n > 10) $ showString "ProTokTe " . showsPrec 11 typ
63 showsPrec n ProTokApp = showParen (n > 10) $ showString "ProTokApp"
64
65 -- | Declared here and not in @Compiling.Lambda@
66 -- to be able to use 'Token_Term_Var'.
67 data instance TokenT meta (ts::[*]) (Proxy (->))
68 = Token_Term_Abst TeName (EToken meta '[Proxy Token_Type]) (EToken meta ts)
69 | Token_Term_App (EToken meta ts) (EToken meta ts)
70 | Token_Term_Let TeName (EToken meta ts) (EToken meta ts)
71 | Token_Term_Var TeName
72 | Token_Term_Compose (EToken meta ts) (EToken meta ts)
73
74 -- * Class 'Tokenize'
75 type Tokenize meta ts
76 = TokenizeR meta ts ts
77
78 -- ** Type 'Tokenizers'
79 data Tokenizers meta ts
80 = Tokenizers
81 { tokenizers_prefix :: Map Mod_Path (Map TeName (ProTok_Term Unifix meta ts))
82 , tokenizers_infix :: Map Mod_Path (Map TeName (ProTok_Term Infix meta ts))
83 , tokenizers_postfix :: Map Mod_Path (Map TeName (ProTok_Term Unifix meta ts))
84 }
85 deriving instance
86 ( Show (ProTok_Term Unifix meta ts)
87 , Show (ProTok_Term Infix meta ts)
88 ) => Show (Tokenizers meta ts)
89 instance Semigroup (Tokenizers meta ts) where
90 x <> y =
91 Tokenizers
92 (Map.unionWith Map.union
93 (tokenizers_prefix x)
94 (tokenizers_prefix y))
95 (Map.unionWith Map.union
96 (tokenizers_infix x)
97 (tokenizers_infix y))
98 (Map.unionWith Map.union
99 (tokenizers_postfix x)
100 (tokenizers_postfix y))
101 instance Monoid (Tokenizers meta ts) where
102 mempty = Tokenizers Map.empty Map.empty Map.empty
103 mappend = (<>)
104
105 -- ** Type 'ProTok_Term'
106 data ProTok_Term fixy meta ts
107 = ProTok_Term
108 { protok_term :: meta -> ProTok meta ts
109 , protok_fixity :: fixy
110 }
111
112 tokenizers :: forall meta ts. Tokenize meta ts => Tokenizers meta ts
113 tokenizers = tokenizeR (Proxy @ts)
114
115 unProTok
116 :: ProTok meta ts
117 -> Either Error_Term_Gram (EToken meta ts)
118 unProTok (ProTokTe t) = Right t
119 unProTok (ProTokTy _) = Left $ Error_Term_Gram_Type_applied_to_nothing
120 unProTok _tok = Left $ Error_Term_Gram_Term_incomplete
121
122 -- | Lookup the given 'Mod' 'TeName' into the given 'Tokenizers',
123 -- returning an error or a 'ProTok' constructor for prefix, infix and postfix positions.
124 protok_lookup
125 :: Inj_Token meta ts (->)
126 => Mod TeName
127 -> Tokenizers meta ts
128 -> Either Error_Term_Gram
129 ( Maybe (ProTok_Term Unifix meta ts)
130 , Maybe (ProTok_Term Infix meta ts)
131 , Maybe (ProTok_Term Unifix meta ts)
132 )
133 protok_lookup mn@(mod `Mod` n) (Tokenizers pres ins posts) = do
134 let pre = Map.lookup mod pres >>= Map.lookup n
135 let post = Map.lookup mod posts >>= Map.lookup n
136 let in_ =
137 case mn of
138 [] `Mod` " " -> Just
139 ProTok_Term
140 { protok_term = \_meta -> ProTokApp
141 , protok_fixity = Infix (Just AssocL) 9
142 }
143 _ -> Map.lookup mod ins >>= Map.lookup n
144 return (pre, in_, post)
145
146 -- | Apply a proto-token to another.
147 --
148 -- This is required because polymorphic types are not implemented (yet),
149 -- therefore tokens for polymorphic types must have enough 'EToken's
150 -- to make them monomorphic types.
151 protok_app
152 :: Inj_Token meta ts (->)
153 => ProTok meta ts
154 -> ProTok meta ts
155 -> Either Error_Term_Gram (ProTok meta ts)
156 protok_app (ProTokLam f) (ProTokTe a) = Right $ f a
157 protok_app (ProTokPi f) (ProTokTy a) = Right $ f a
158 protok_app (ProTokTe f) (ProTokTe a) =
159 Right $ ProTokTe $ inj_EToken (meta_of f) $ -- TODO: merge (meta_of a)
160 f `Token_Term_App` a
161 protok_app ProTokApp f@ProTokLam{} = Right f
162 protok_app ProTokApp f@ProTokPi{} = Right f
163 protok_app ProTokApp (ProTokTe f) =
164 Right $ ProTokLam $ \a ->
165 ProTokTe $ inj_EToken (meta_of f) $ -- TODO: merge (meta_of a)
166 f `Token_Term_App` a
167 protok_app ProTokLam{} _ = Left $ Error_Term_Gram_application_mismatch
168 protok_app ProTokPi{} _ = Left $ Error_Term_Gram_application_mismatch
169 protok_app ProTokTe{} _ = Left $ Error_Term_Gram_not_applicable
170 protok_app ProTokTy{} _ = Left $ Error_Term_Gram_not_applicable
171 protok_app ProTokApp{} _ = Left $ Error_Term_Gram_application
172
173 -- ** Class 'TokenizeR'
174 class TokenizeR meta (ts::[*]) (rs::[*]) where
175 tokenizeR :: Proxy rs -> Tokenizers meta ts
176 instance TokenizeR meta ts '[] where
177 tokenizeR _rs = mempty
178 instance
179 ( TokenizeT meta ts t
180 , TokenizeR meta ts rs
181 ) => TokenizeR meta ts (t ': rs) where
182 tokenizeR _ =
183 tokenizeR (Proxy @rs) <>
184 tokenizeT (Proxy @t)
185
186 -- ** Class 'TokenizeT'
187 class TokenizeT meta ts t where
188 tokenizeT :: Proxy t -> Tokenizers meta ts
189 -- tokenizeT _t = [] `Mod` []
190 tokenizeT _t = mempty
191
192 tokenizeTMod
193 :: Mod_Path
194 -> [(TeName, ProTok_Term fix meta ts)]
195 -> Map Mod_Path (Map TeName (ProTok_Term fix meta ts))
196 tokenizeTMod mod tbl = Map.singleton mod $ Map.fromList tbl
197
198 tokenize0
199 :: Inj_Token meta ts t
200 => Text -> fixity -> TokenT meta ts (Proxy t)
201 -> (TeName, ProTok_Term fixity meta ts)
202 tokenize0 n protok_fixity tok =
203 (TeName n,) ProTok_Term
204 { protok_term = \meta -> ProTokTe $ inj_EToken meta $ tok
205 , protok_fixity }
206
207 tokenize1
208 :: Inj_Token meta ts t
209 => Text -> fixity
210 -> (EToken meta ts -> TokenT meta ts (Proxy t))
211 -> (TeName, ProTok_Term fixity meta ts)
212 tokenize1 n protok_fixity tok =
213 (TeName n,) ProTok_Term
214 { protok_term = \meta ->
215 ProTokLam $ \a ->
216 ProTokTe $ inj_EToken meta $ tok a
217 , protok_fixity }
218
219 tokenize2
220 :: Inj_Token meta ts t
221 => Text -> fixity
222 -> (EToken meta ts -> EToken meta ts -> TokenT meta ts (Proxy t))
223 -> (TeName, ProTok_Term fixity meta ts)
224 tokenize2 n protok_fixity tok =
225 (TeName n,) ProTok_Term
226 { protok_term = \meta ->
227 ProTokLam $ \a -> ProTokLam $ \b ->
228 ProTokTe $ inj_EToken meta $ tok a b
229 , protok_fixity
230 }
231
232 tokenize3
233 :: Inj_Token meta ts t
234 => Text -> fixity
235 -> (EToken meta ts -> EToken meta ts -> EToken meta ts -> TokenT meta ts (Proxy t))
236 -> (TeName, ProTok_Term fixity meta ts)
237 tokenize3 n protok_fixity tok =
238 (TeName n,) ProTok_Term
239 { protok_term = \meta ->
240 ProTokLam $ \a -> ProTokLam $ \b -> ProTokLam $ \c ->
241 ProTokTe $ inj_EToken meta $ tok a b c
242 , protok_fixity
243 }
244
245 -- * Type 'Mod'
246 type Mod_Path = [Mod_Name]
247 newtype Mod_Name = Mod_Name Text
248 deriving (Eq, Ord, Show)
249 data Mod a = Mod Mod_Path a
250 deriving (Eq, Functor, Ord, Show)
251
252 -- * Class 'Gram_Name'
253 class
254 ( Alt g
255 , Alter g
256 , Alter g
257 , App g
258 , Try g
259 , Gram_CF g
260 , Gram_Op g
261 , Gram_Lexer g
262 , Gram_RegL g
263 , Gram_Rule g
264 , Gram_Terminal g
265 ) => Gram_Name g where
266 g_mod_path :: CF g Mod_Path
267 g_mod_path = rule "mod_path" $
268 infixrG
269 (pure <$> g_mod_name)
270 (op <$ char '.')
271 where op = (<>)
272 g_mod_name :: CF g Mod_Name
273 g_mod_name = rule "mod_name" $
274 (Mod_Name . Text.pack <$>) $
275 (identG `minus`) $
276 Fun.const
277 <$> g_term_keywords
278 <*. (any `but` g_term_idname_tail)
279 where
280 identG = (:) <$> headG <*> many (cf_of_Terminal g_term_idname_tail)
281 headG = unicat $ Unicat Char.UppercaseLetter
282
283 g_term_mod_name :: CF g (Mod TeName)
284 g_term_mod_name = rule "term_mod_name" $
285 lexeme $
286 g_term_mod_idname <+>
287 parens g_term_mod_opname
288 g_term_name :: CF g TeName
289 g_term_name = rule "term_name" $
290 lexeme $
291 g_term_idname <+>
292 parens g_term_opname
293
294 g_term_mod_idname :: CF g (Mod TeName)
295 g_term_mod_idname = rule "term_mod_idname" $
296 Mod
297 <$> option [] (try $ g_mod_path <* char '.')
298 <*> g_term_idname
299 g_term_idname :: CF g TeName
300 g_term_idname = rule "term_idname" $
301 (TeName . Text.pack <$>) $
302 (identG `minus`) $
303 Fun.const
304 <$> g_term_keywords
305 <*. (any `but` g_term_idname_tail)
306 where
307 identG = (:) <$> headG <*> many (cf_of_Terminal g_term_idname_tail)
308 headG = unicat $ Unicat_Letter
309 g_term_idname_tail :: Terminal g Char
310 g_term_idname_tail = rule "term_idname_tail" $
311 unicat Unicat_Letter <+>
312 unicat Unicat_Number
313 g_term_keywords :: Reg rl g String
314 g_term_keywords = rule "term_keywords" $
315 choice $ string <$> ["in", "let"]
316
317 g_term_mod_opname :: CF g (Mod TeName)
318 g_term_mod_opname = rule "term_mod_opname" $
319 Mod
320 <$> option [] (try $ g_mod_path <* char '.')
321 <*> g_term_opname
322 g_term_opname :: CF g TeName
323 g_term_opname = rule "term_opname" $
324 (TeName . Text.pack <$>) $
325 (symG `minus`) $
326 Fun.const
327 <$> g_term_keysyms
328 <*. (any `but` g_term_opname_ok)
329 where
330 symG = some $ cf_of_Terminal g_term_opname_ok
331 g_term_opname_ok :: Terminal g Char
332 g_term_opname_ok = rule "term_opname_ok" $
333 choice (unicat <$>
334 [ Unicat_Symbol
335 , Unicat_Punctuation
336 , Unicat_Mark
337 ]) `but` koG
338 where
339 koG = choice (char <$> ['(', ')', '`', '\'', ',', '[', ']'])
340 g_term_keysyms :: Reg rl g String
341 g_term_keysyms = rule "term_keysyms" $
342 choice $ string <$> ["\\", "->", "=", "@"]
343
344 deriving instance Gram_Name g => Gram_Name (CF g)
345 instance Gram_Name EBNF
346 instance Gram_Name RuleDef
347
348 -- * Class 'Gram_Term_Type'
349 class
350 ( Alt g
351 , Alter g
352 , App g
353 , Gram_CF g
354 , Gram_Lexer g
355 , Gram_Meta meta g
356 , Gram_Rule g
357 , Gram_Terminal g
358 , Gram_Name g
359 , Gram_Type meta g
360 ) => Gram_Term_Type meta g where
361 g_term_abst_decl
362 :: CF g (TeName, TokType meta)
363 g_term_abst_decl = rule "term_abst_decl" $
364 parens $ (,)
365 <$> g_term_name
366 <* symbol ":"
367 <*> g_type
368
369 deriving instance Gram_Term_Type meta g => Gram_Term_Type meta (CF g)
370 instance Gram_Term_Type meta EBNF
371 instance Gram_Term_Type meta RuleDef
372
373 -- * Class 'Gram_Error'
374 class Gram_Error g where
375 term_unError :: CF g (Either Error_Term_Gram a) -> CF g a
376 deriving instance Gram_Error g => Gram_Error (CF g)
377 instance Gram_Error EBNF where
378 term_unError (CF (EBNF g)) = CF $ EBNF g
379 instance Gram_Error RuleDef where
380 term_unError (CF (RuleDef (EBNF g))) =
381 CF $ RuleDef $ EBNF g
382
383 -- ** Type 'Error_Term_Gram'
384 data Error_Term_Gram
385 = Error_Term_Gram_Fixity Error_Fixity
386 | Error_Term_Gram_Term_incomplete
387 | Error_Term_Gram_Type_applied_to_nothing
388 | Error_Term_Gram_not_applicable
389 | Error_Term_Gram_application
390 | Error_Term_Gram_application_mismatch
391 deriving (Eq, Show)
392
393 -- * Class 'Gram_Term'
394 class
395 ( Alt g
396 , Alter g
397 , App g
398 , Gram_CF g
399 , Gram_Lexer g
400 , Gram_Meta meta g
401 , Gram_Rule g
402 , Gram_Terminal g
403 , Gram_Error g
404 , Gram_Term_AtomsR meta ts ts g
405 , Gram_Name g
406 , Gram_Term_Type meta g
407 , Gram_Type meta g
408 , Inj_Token meta ts (->)
409 ) => Gram_Term ts meta g where
410 tokenizers_get :: CF g (Tokenizers meta ts -> a) -> CF g a
411 tokenizers_put :: CF g (Tokenizers meta ts, a) -> CF g a
412
413 g_term
414 :: Inj_Tokens meta ts '[Proxy (->)]
415 => CF g (EToken meta ts)
416 g_term = rule "term" $
417 choice
418 [ try g_term_abst
419 , g_term_operators
420 , g_term_let
421 ]
422 g_term_operators
423 :: Inj_Tokens meta ts '[Proxy (->)]
424 => CF g (EToken meta ts)
425 g_term_operators = rule "term_operators" $
426 term_unError $
427 ((unProTok =<<) <$>) $
428 term_unError $
429 left Error_Term_Gram_Fixity <$>
430 ops
431 where
432 ops :: CF g (Either Error_Fixity
433 (Either Error_Term_Gram (ProTok meta ts)))
434 ops =
435 operators
436 (Right <$> g_term_atom)
437 (term_unError $ metaG $ tokenizers_get $ op_prefix <$> g_term_op_prefix)
438 (term_unError $ metaG $ tokenizers_get $ op_infix <$> g_term_op_infix)
439 (term_unError $ metaG $ tokenizers_get $ op_postfix <$> g_term_op_postfix)
440 op_infix
441 :: Mod TeName
442 -> Tokenizers meta ts
443 -> meta
444 -> Either Error_Term_Gram
445 ( Infix
446 , Either Error_Term_Gram (ProTok meta ts)
447 -> Either Error_Term_Gram (ProTok meta ts)
448 -> Either Error_Term_Gram (ProTok meta ts) )
449 op_infix name toks meta = do
450 (_pre, in_, _post) <- protok_lookup name toks
451 case in_ of
452 Nothing -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedInfix
453 Just p ->
454 Right $ (protok_fixity p,) $ \ma mb -> do
455 a <- ma
456 b <- mb
457 foldM protok_app (protok_term p meta) [a, b]
458 op_prefix, op_postfix
459 :: Mod TeName
460 -> Tokenizers meta ts
461 -> meta
462 -> Either Error_Term_Gram
463 ( Unifix
464 , Either Error_Term_Gram (ProTok meta ts)
465 -> Either Error_Term_Gram (ProTok meta ts) )
466 op_prefix name toks meta = do
467 (pre, _in_, _post) <- protok_lookup name toks
468 case pre of
469 Nothing -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedPrefix
470 Just p ->
471 Right $ (protok_fixity p,) $ \ma -> do
472 a <- ma
473 protok_term p meta `protok_app` a
474 op_postfix name toks meta = do
475 (_pre, _in_, post) <- protok_lookup name toks
476 case post of
477 Nothing -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedPostfix
478 Just p ->
479 Right $ (protok_fixity p,) $ \ma -> do
480 a <- ma
481 protok_term p meta `protok_app` a
482 g_term_op_postfix :: CF g (Mod TeName)
483 g_term_op_postfix = rule "term_op_postfix" $
484 lexeme $
485 g_backquote *> g_term_mod_idname <+> -- <* (cf_of_Terminal $ Gram.Term (pure ' ') `but` g_backquote)
486 g_term_mod_opname
487 g_term_op_infix :: CF g (Mod TeName)
488 g_term_op_infix = rule "term_op_infix" $
489 lexeme $
490 between g_backquote g_backquote g_term_mod_idname <+>
491 try (Fun.const <$> g_term_mod_opname <*> (string " " <+> string "\n")) <+>
492 pure (Mod [] " ")
493 g_term_op_prefix :: CF g (Mod TeName)
494 g_term_op_prefix = rule "term_op_prefix" $
495 lexeme $
496 g_term_mod_idname <* g_backquote <+>
497 g_term_mod_opname
498 g_backquote :: Gram_Terminal g' => g' Char
499 g_backquote = char '`'
500
501 g_term_atom
502 :: Inj_Tokens meta ts '[Proxy (->)]
503 => CF g (ProTok meta ts)
504 g_term_atom = rule "term_atom" $
505 choice $
506 (try (ProTokTy <$ char '@' <*> g_type) :) $
507 try <$> gs_term_atomsR (Proxy @ts) <>
508 [ try $
509 metaG $ term_unError $ tokenizers_get $
510 (\mn toks -> do
511 (_, in_, _) <- protok_lookup mn toks
512 case in_ of
513 Nothing ->
514 case mn of
515 [] `Mod` n -> Right $ \meta ->
516 ProTokTe $ inj_EToken meta $ Token_Term_Var n
517 _ -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedInfix
518 Just p -> Right $ protok_term p
519 ) <$> g_term_mod_name
520 , ProTokTe <$> g_term_group
521 ]
522 g_term_group
523 :: Inj_Tokens meta ts '[Proxy (->)]
524 => CF g (EToken meta ts)
525 g_term_group = rule "term_group" $ parens g_term
526 g_term_abst
527 :: Inj_Tokens meta ts '[Proxy (->)]
528 => CF g (EToken meta ts)
529 g_term_abst = rule "term_abst" $
530 metaG $
531 ((\(xs, te) meta ->
532 foldr (\(x, ty_x) ->
533 inj_EToken meta .
534 Token_Term_Abst x ty_x) te xs) <$>) $
535 g_term_abst_args_body
536 (symbol "\\" *> some g_term_abst_decl <* symbol "->")
537 g_term
538 g_term_abst_args_body
539 :: CF g [(TeName, TokType meta)]
540 -> CF g (EToken meta ts)
541 -> CF g ([(TeName, TokType meta)], EToken meta ts)
542 -- g_term_abst_args_body args body = (,) <$> args <*> body
543 g_term_abst_args_body cf_args cf_body =
544 tokenizers_put $ tokenizers_get $
545 (\a b (toks::Tokenizers meta ts) -> (toks, (a, b)))
546 <$> (tokenizers_put $ tokenizers_get $
547 (\args (toks::Tokenizers meta ts) -> (,args)
548 Tokenizers
549 { tokenizers_prefix = del (tokenizers_prefix toks) args
550 , tokenizers_infix = ins (tokenizers_infix toks) args
551 , tokenizers_postfix = del (tokenizers_postfix toks) args
552 }) <$> cf_args)
553 <*> cf_body
554 where
555 del = foldr $ \(n, _) -> Map.adjust (Map.delete n) []
556 ins = foldr $ \(n, _) ->
557 Map.insertWith (<>) [] $
558 Map.singleton n
559 ProTok_Term
560 { protok_term = \meta -> ProTokTe $ inj_EToken meta $ Token_Term_Var n
561 , protok_fixity = infixN5
562 }
563 g_term_let
564 :: Inj_Tokens meta ts '[Proxy (->)]
565 => CF g (EToken meta ts)
566 g_term_let = rule "term_let" $
567 metaG $
568 (\name args bound body meta ->
569 inj_EToken meta $
570 Token_Term_Let name
571 (foldr (\(x, ty_x) ->
572 inj_EToken meta . Token_Term_Abst x ty_x) bound args) body)
573 <$ symbol "let"
574 <*> g_term_name
575 <*> many g_term_abst_decl
576 <* symbol "="
577 <*> g_term
578 <* symbol "in"
579 <*> g_term
580
581 deriving instance
582 ( Gram_Term ts meta g
583 , Gram_Term_AtomsR meta ts ts (CF g)
584 ) => Gram_Term ts meta (CF g)
585 instance
586 ( Gram_Term_AtomsR meta ts ts EBNF
587 , Inj_Token meta ts (->)
588 ) => Gram_Term ts meta EBNF where
589 tokenizers_get (CF (EBNF g)) = CF $ EBNF g
590 tokenizers_put (CF (EBNF g)) = CF $ EBNF g
591 instance
592 ( Gram_Term_AtomsR meta ts ts RuleDef
593 , Inj_Token meta ts (->)
594 ) => Gram_Term ts meta RuleDef where
595 tokenizers_get (CF (RuleDef (EBNF g))) = CF $ RuleDef $ EBNF g
596 tokenizers_put (CF (RuleDef (EBNF g))) = CF $ RuleDef $ EBNF g
597
598 -- ** Class 'Gram_Term_AtomsR'
599 class Gram_Term_AtomsR meta (ts::[*]) (rs::[*]) g where
600 gs_term_atomsR :: Proxy rs -> [CF g (ProTok meta ts)]
601 instance Gram_Term_AtomsR meta ts '[] g where
602 gs_term_atomsR _rs = []
603 instance
604 ( Gram_Term_AtomsT meta ts t g
605 , Gram_Term_AtomsR meta ts rs g
606 ) => Gram_Term_AtomsR meta ts (t ': rs) g where
607 gs_term_atomsR _ =
608 gs_term_atomsT (Proxy @t) <>
609 gs_term_atomsR (Proxy @rs)
610
611 -- ** Class 'Gram_Term_AtomsT'
612 class Gram_Term_AtomsT meta ts t g where
613 gs_term_atomsT :: Proxy t -> [CF g (ProTok meta ts)]
614 gs_term_atomsT _t = []
615 -- instance Gram_Term_AtomsT meta ts t RuleDef
616
617 gram_term
618 :: forall g.
619 ( Gram_Term '[Proxy (->), Proxy Integer] () g
620 ) => [CF g ()]
621 gram_term =
622 [ ue g_term
623 , ue g_term_operators
624 , up g_term_atom
625 , ue g_term_group
626 , ue g_term_abst
627 , void (g_term_abst_decl::CF g (TeName, TokType ()))
628 , ue g_term_let
629 , void g_term_mod_name
630 , void g_term_name
631 , void g_term_idname
632 , void $ cf_of_Terminal g_term_idname_tail
633 , void $ cf_of_Reg g_term_keywords
634 , void g_term_mod_opname
635 , void g_term_opname
636 , void $ cf_of_Terminal g_term_opname_ok
637 , void $ cf_of_Reg g_term_keysyms
638 ] where
639 up :: CF g (ProTok () '[Proxy (->), Proxy Integer]) -> CF g ()
640 up = (() <$)
641 ue :: CF g (EToken () '[Proxy (->), Proxy Integer]) -> CF g ()
642 ue = (() <$)