]> Git — Sourcephile - haskell/symantic.git/blob - symantic/Language/Symantic/Compiling/Term/Grammar.hs
Improve Show of Type.
[haskell/symantic.git] / symantic / Language / Symantic / Compiling / Term / Grammar.hs
1 {-# LANGUAGE ConstraintKinds #-}
2 {-# LANGUAGE DeriveFunctor #-}
3 {-# LANGUAGE GADTs #-}
4 {-# LANGUAGE GeneralizedNewtypeDeriving #-}
5 {-# LANGUAGE PolyKinds #-}
6 {-# LANGUAGE UndecidableInstances #-}
7 {-# OPTIONS_GHC -fno-warn-orphans #-}
8 module Language.Symantic.Compiling.Term.Grammar where
9
10 import Control.Arrow (left)
11 import Control.Monad (void)
12 import Data.Map.Strict (Map)
13 import Data.Proxy (Proxy(..))
14 import Data.Semigroup (Semigroup(..))
15 import Data.String (IsString(..))
16 import Data.Text (Text)
17 import Prelude hiding (mod, not, any)
18 import qualified Data.Char as Char
19 import qualified Data.Function as Fun
20 import qualified Data.Map.Strict as Map
21 import qualified Data.Text as Text
22
23 import Language.Symantic.Parsing
24 import Language.Symantic.Typing
25
26 -- * Type 'TeName'
27 newtype TeName = TeName Text
28 deriving (Eq, Ord, Show)
29 instance IsString TeName where
30 fromString = TeName . fromString
31
32 -- * Type 'Mod'
33 type Mod_Path = [Mod_Name]
34 newtype Mod_Name = Mod_Name Text
35 deriving (Eq, Ord, Show)
36 data Mod a = Mod Mod_Path a
37 deriving (Eq, Functor, Ord, Show)
38
39 -- * Type 'AST_Term'
40 -- type AST_Term src ts = BinTree (EToken src ts)
41
42 -- NOTE: Declared here rather than in @Lib.Lambda@ to be able to use them here.
43 data instance TokenT src (ts::[*]) (Proxy (->))
44 = Token_Term_Abst TeName (AST_Type src) (AST_Term src ts)
45 | Token_Term_App
46 | Token_Term_Let TeName (AST_Term src ts) (AST_Term src ts)
47 | Token_Term_Var TeName
48 | Token_Term_Type (AST_Type src)
49 | Token_Term_Compose
50
51 {-
52 (~>) :: Inj_TyConst cs (->) => Type cs a -> Type cs b -> Type cs (a -> b)
53 (~>) a b = ty @(->) :$ a :$ b
54 infixr 5 ~>
55
56 -- (.) :: (b -> c) -> (a -> b) -> a -> c
57 ty_compose :: Inj_TyConsts cs [Proxy (->), Proxy Monad] => Type cs (Any::Void)
58 ty_compose =
59 TyPi SKiType $ \a ->
60 TyPi SKiType $ \b ->
61 TyPi SKiType $ \c ->
62 tyVoid $
63 (b ~> c) ~> (a ~> b) ~> a ~> c
64
65 -- ($) :: (a -> b) -> a -> b
66 ty_app :: Inj_TyConsts cs [Proxy (->), Proxy Monad] => Type cs (Any::Void)
67 ty_app =
68 TyPi SKiType $ \a ->
69 TyPi SKiType $ \b ->
70 tyVoid $
71 (a ~> b) ~> a ~> b
72 -}
73
74 -- * Class 'Tokenize'
75 type Tokenize src ts
76 = TokenizeR src ts ts
77
78 -- ** Type 'Tokenizer'
79 data Tokenizer fixy src ts
80 = Tokenizer
81 { tokenizer :: src -> EToken src ts
82 , tokenizer_fixity :: fixy
83 }
84
85 -- ** Type 'Tokenizers'
86 data Tokenizers src ts
87 = Tokenizers
88 { tokenizers_prefix :: Map Mod_Path (Map TeName (Tokenizer Unifix src ts))
89 , tokenizers_infix :: Map Mod_Path (Map TeName (Tokenizer Infix src ts))
90 , tokenizers_postfix :: Map Mod_Path (Map TeName (Tokenizer Unifix src ts))
91 }
92
93 deriving instance
94 ( Show (Tokenizer Unifix src ts)
95 , Show (Tokenizer Infix src ts)
96 ) => Show (Tokenizers src ts)
97 instance Semigroup (Tokenizers src ts) where
98 x <> y =
99 Tokenizers
100 (Map.unionWith Map.union
101 (tokenizers_prefix x)
102 (tokenizers_prefix y))
103 (Map.unionWith Map.union
104 (tokenizers_infix x)
105 (tokenizers_infix y))
106 (Map.unionWith Map.union
107 (tokenizers_postfix x)
108 (tokenizers_postfix y))
109 instance Monoid (Tokenizers src ts) where
110 mempty = Tokenizers Map.empty Map.empty Map.empty
111 mappend = (<>)
112
113 tokenizers :: forall src ts. Tokenize src ts => Tokenizers src ts
114 tokenizers = tokenizeR (Proxy @ts)
115
116 {-
117 unProTok
118 :: ProTok src ts
119 -> Either Error_Term_Gram (EToken src ts)
120 unProTok (ProTokTe t) = Right t
121 unProTok (ProTokTy _) = Left $ Error_Term_Gram_Type_applied_to_nothing
122 unProTok _tok = Left $ Error_Term_Gram_Term_incomplete
123 -}
124
125 -- | Lookup the given 'Mod' 'TeName' into the given 'Tokenizers',
126 -- returning for prefix, infix and postfix positions, when there is a match.
127 tokenizer_lookup
128 :: Inj_Token src ts (->)
129 => Mod TeName
130 -> Tokenizers src ts
131 -> ( Maybe (Tokenizer Unifix src ts)
132 , Maybe (Tokenizer Infix src ts)
133 , Maybe (Tokenizer Unifix src ts)
134 )
135 tokenizer_lookup mn@(mod `Mod` n) (Tokenizers pres ins posts) = do
136 let pre = Map.lookup mod pres >>= Map.lookup n
137 let post = Map.lookup mod posts >>= Map.lookup n
138 let in_ =
139 case mn of
140 [] `Mod` " " -> Just
141 Tokenizer
142 { tokenizer = \src -> inj_EToken src Token_Term_App
143 , tokenizer_fixity = Infix (Just AssocL) 9
144 }
145 _ -> Map.lookup mod ins >>= Map.lookup n
146 (pre, in_, post)
147
148 {-
149 -- | Apply a proto-token to another.
150 --
151 -- This is required because polymorphic types are not implemented (yet),
152 -- therefore tokens for polymorphic types must have enough 'EToken's
153 -- to make them monomorphic types.
154 protok_app
155 :: Inj_Token src ts (->)
156 => ProTok src ts
157 -> ProTok src ts
158 -> Either Error_Term_Gram (ProTok src ts)
159 protok_app (ProTokLam f) (ProTokTe a) = Right $ f a
160 protok_app (ProTokPi f) (ProTokTy a) = Right $ f a
161 protok_app (ProTokTe f) (ProTokTe a) =
162 Right $ ProTokTe $ inj_EToken (meta_of f) $ -- TODO: merge (meta_of a)
163 f `Token_Term_App` a
164 protok_app ProTokApp f@ProTokLam{} = Right f
165 protok_app ProTokApp f@ProTokPi{} = Right f
166 protok_app ProTokApp (ProTokTe f) =
167 Right $ ProTokLam $ \a ->
168 ProTokTe $ inj_EToken (meta_of f) $ -- TODO: merge (meta_of a)
169 f `Token_Term_App` a
170 protok_app ProTokLam{} _ = Left $ Error_Term_Gram_application_mismatch
171 protok_app ProTokPi{} _ = Left $ Error_Term_Gram_application_mismatch
172 protok_app ProTokTe{} _ = Left $ Error_Term_Gram_not_applicable
173 protok_app ProTokTy{} _ = Left $ Error_Term_Gram_not_applicable
174 protok_app ProTokApp{} _ = Left $ Error_Term_Gram_application
175 -}
176
177 -- ** Class 'TokenizeR'
178 class TokenizeR src (ts::[*]) (rs::[*]) where
179 tokenizeR :: Proxy rs -> Tokenizers src ts
180 instance TokenizeR src ts '[] where
181 tokenizeR _rs = mempty
182 instance
183 ( TokenizeT src ts t
184 , TokenizeR src ts rs
185 ) => TokenizeR src ts (t ': rs) where
186 tokenizeR _ =
187 tokenizeR (Proxy @rs) <>
188 tokenizeT (Proxy @t)
189
190 -- ** Class 'TokenizeT'
191 class TokenizeT src ts t where
192 tokenizeT :: Proxy t -> Tokenizers src ts
193 -- tokenizeT _t = [] `Mod` []
194 tokenizeT _t = mempty
195
196
197 tokenizeTMod
198 :: Mod_Path
199 -> [(TeName, Tokenizer fix src ts)]
200 -> Map Mod_Path (Map TeName (Tokenizer fix src ts))
201 tokenizeTMod mod tbl = Map.singleton mod $ Map.fromList tbl
202
203 tokenize0
204 :: Inj_Token src ts t
205 => Text -> fixity -> TokenT src ts (Proxy t)
206 -> (TeName, Tokenizer fixity src ts)
207 tokenize0 n tokenizer_fixity tok =
208 (TeName n,) Tokenizer
209 { tokenizer = \src -> inj_EToken src tok
210 , tokenizer_fixity }
211
212 {-
213 tokenize1
214 :: Inj_Token src ts t
215 => Text -> fixity
216 -> (EToken src ts -> TokenT src ts (Proxy t))
217 -> (TeName, Tokenizer fixity src ts)
218 tokenize1 n tokenizer_fixity tok =
219 (TeName n,) Tokenizer
220 { tokenizer = \src ->
221 ProTokLam $ \a ->
222 ProTokTe $ inj_EToken src $ tok a
223 , tokenizer_fixity }
224
225 tokenize2
226 :: Inj_Token src ts t
227 => Text -> fixity
228 -> (EToken src ts -> EToken src ts -> TokenT src ts (Proxy t))
229 -> (TeName, Tokenizer fixity src ts)
230 tokenize2 n tokenizer_fixity tok =
231 (TeName n,) Tokenizer
232 { tokenizer = \src ->
233 ProTokLam $ \a -> ProTokLam $ \b ->
234 ProTokTe $ inj_EToken src $ tok a b
235 , tokenizer_fixity
236 }
237
238 tokenize3
239 :: Inj_Token src ts t
240 => Text -> fixity
241 -> (EToken src ts -> EToken src ts -> EToken src ts -> TokenT src ts (Proxy t))
242 -> (TeName, Tokenizer fixity src ts)
243 tokenize3 n tokenizer_fixity tok =
244 (TeName n,) Tokenizer
245 { tokenizer = \src ->
246 ProTokLam $ \a -> ProTokLam $ \b -> ProTokLam $ \c ->
247 ProTokTe $ inj_EToken src $ tok a b c
248 , tokenizer_fixity
249 }
250 -}
251
252 -- * Class 'Gram_Name'
253 class
254 ( Alt g
255 , Alter g
256 , Alter g
257 , App g
258 , Try g
259 , Gram_CF g
260 , Gram_Op g
261 , Gram_Lexer g
262 , Gram_RegL g
263 , Gram_Rule g
264 , Gram_Terminal g
265 ) => Gram_Name g where
266 g_mod_path :: CF g Mod_Path
267 g_mod_path = rule "mod_path" $
268 infixrG
269 (pure <$> g_mod_name)
270 (op <$ char '.')
271 where op = (<>)
272 g_mod_name :: CF g Mod_Name
273 g_mod_name = rule "mod_name" $
274 (Mod_Name . Text.pack <$>) $
275 (identG `minus`) $
276 Fun.const
277 <$> g_term_keywords
278 <*. (any `but` g_term_idname_tail)
279 where
280 identG = (:) <$> headG <*> many (cf_of_Terminal g_term_idname_tail)
281 headG = unicat $ Unicat Char.UppercaseLetter
282
283 g_term_mod_name :: CF g (Mod TeName)
284 g_term_mod_name = rule "term_mod_name" $
285 lexeme $
286 g_term_mod_idname <+>
287 parens g_term_mod_opname
288 g_term_name :: CF g TeName
289 g_term_name = rule "term_name" $
290 lexeme $
291 g_term_idname <+>
292 parens g_term_opname
293
294 g_term_mod_idname :: CF g (Mod TeName)
295 g_term_mod_idname = rule "term_mod_idname" $
296 Mod
297 <$> option [] (try $ g_mod_path <* char '.')
298 <*> g_term_idname
299 g_term_idname :: CF g TeName
300 g_term_idname = rule "term_idname" $
301 (TeName . Text.pack <$>) $
302 (identG `minus`) $
303 Fun.const
304 <$> g_term_keywords
305 <*. (any `but` g_term_idname_tail)
306 where
307 identG = (:) <$> headG <*> many (cf_of_Terminal g_term_idname_tail)
308 headG = unicat $ Unicat_Letter
309 g_term_idname_tail :: Terminal g Char
310 g_term_idname_tail = rule "term_idname_tail" $
311 unicat Unicat_Letter <+>
312 unicat Unicat_Number
313 g_term_keywords :: Reg rl g String
314 g_term_keywords = rule "term_keywords" $
315 choice $ string <$> ["in", "let"]
316
317 g_term_mod_opname :: CF g (Mod TeName)
318 g_term_mod_opname = rule "term_mod_opname" $
319 Mod
320 <$> option [] (try $ g_mod_path <* char '.')
321 <*> g_term_opname
322 g_term_opname :: CF g TeName
323 g_term_opname = rule "term_opname" $
324 (TeName . Text.pack <$>) $
325 (symG `minus`) $
326 Fun.const
327 <$> g_term_keysyms
328 <*. (any `but` g_term_opname_ok)
329 where
330 symG = some $ cf_of_Terminal g_term_opname_ok
331 g_term_opname_ok :: Terminal g Char
332 g_term_opname_ok = rule "term_opname_ok" $
333 choice (unicat <$>
334 [ Unicat_Symbol
335 , Unicat_Punctuation
336 , Unicat_Mark
337 ]) `but` koG
338 where
339 koG = choice (char <$> ['(', ')', '`', '\'', ',', '[', ']'])
340 g_term_keysyms :: Reg rl g String
341 g_term_keysyms = rule "term_keysyms" $
342 choice $ string <$> ["\\", "->", "=", "@"]
343
344 deriving instance Gram_Name g => Gram_Name (CF g)
345 instance Gram_Name EBNF
346 instance Gram_Name RuleDef
347
348 -- * Class 'Gram_Term_Type'
349 class
350 ( Alt g
351 , Alter g
352 , App g
353 , Gram_CF g
354 , Gram_Lexer g
355 , Gram_Meta src g
356 , Gram_Rule g
357 , Gram_Terminal g
358 , Gram_Name g
359 , Gram_Type src g
360 ) => Gram_Term_Type src g where
361 g_term_abst_decl
362 :: CF g (TeName, AST_Type src)
363 g_term_abst_decl = rule "term_abst_decl" $
364 parens $ (,)
365 <$> g_term_name
366 <* symbol ":"
367 <*> g_type
368
369 deriving instance Gram_Term_Type src g => Gram_Term_Type src (CF g)
370 instance Inj_Source (Text_of_Source src) src => Gram_Term_Type src EBNF
371 instance Inj_Source (Text_of_Source src) src => Gram_Term_Type src RuleDef
372
373 -- * Class 'Gram_Error'
374 class Gram_Error g where
375 errorG :: CF g (Either Error_Term_Gram a) -> CF g a
376 deriving instance Gram_Error g => Gram_Error (CF g)
377 instance Gram_Error EBNF where
378 errorG (CF (EBNF g)) = CF $ EBNF g
379 instance Gram_Error RuleDef where
380 errorG (CF (RuleDef (EBNF g))) =
381 CF $ RuleDef $ EBNF g
382
383 -- ** Type 'Error_Term_Gram'
384 data Error_Term_Gram
385 = Error_Term_Gram_Fixity Error_Fixity
386 | Error_Term_Gram_Term_incomplete
387 | Error_Term_Gram_Type_applied_to_nothing
388 | Error_Term_Gram_not_applicable
389 | Error_Term_Gram_application
390 | Error_Term_Gram_application_mismatch
391 deriving (Eq, Show)
392
393 -- * Class 'Gram_Term'
394 class
395 ( Alt g
396 , Alter g
397 , App g
398 , Gram_CF g
399 , Gram_Lexer g
400 , Gram_Meta src g
401 , Gram_Rule g
402 , Gram_Terminal g
403 , Gram_Error g
404 , Gram_Term_AtomsR src ts ts g
405 , Gram_Name g
406 , Gram_Term_Type src g
407 , Gram_Type src g
408 , Inj_Token src ts (->)
409 ) => Gram_Term ts src g where
410 tokenizers_get :: CF g (Tokenizers src ts -> a) -> CF g a
411 tokenizers_put :: CF g (Tokenizers src ts, a) -> CF g a
412 g_term
413 :: Inj_Token src ts (->)
414 => CF g (AST_Term src ts)
415 g_term = rule "term" $
416 choice
417 [ try g_term_abst
418 , g_term_operators
419 , g_term_let
420 ]
421 g_term_operators
422 :: Inj_Token src ts (->)
423 => CF g (AST_Term src ts)
424 g_term_operators = rule "term_operators" $
425 errorG $
426 left Error_Term_Gram_Fixity <$>
427 g_ops
428 where
429 g_ops :: CF g (Either Error_Fixity (AST_Term src ts))
430 g_ops = operators g_term_atom g_prefix g_infix g_postfix
431 g_prefix :: CF g (Unifix, AST_Term src ts -> AST_Term src ts)
432 g_infix :: CF g (Infix, AST_Term src ts -> AST_Term src ts -> AST_Term src ts)
433 g_postfix :: CF g (Unifix, AST_Term src ts -> AST_Term src ts)
434 g_prefix = errorG $ metaG $ tokenizers_get $ op_prefix <$> g_prefix_op
435 g_infix = errorG $ metaG $ tokenizers_get $ op_infix <$> g_infix_op
436 g_postfix = errorG $ metaG $ tokenizers_get $ op_postfix <$> g_postfix_op
437 op_infix
438 :: Mod TeName
439 -> Tokenizers src ts
440 -> src
441 -> Either Error_Term_Gram
442 (Infix, AST_Term src ts -> AST_Term src ts -> AST_Term src ts)
443 op_infix name toks src = do
444 let (_pre, in_, _post) = tokenizer_lookup name toks
445 case in_ of
446 Nothing -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedInfix
447 Just p ->
448 Right $ (tokenizer_fixity p,) $ \a b ->
449 (BinTree0 (tokenizer p src) `BinTree1` a) `BinTree1` b
450 op_prefix, op_postfix
451 :: Mod TeName
452 -> Tokenizers src ts
453 -> src
454 -> Either Error_Term_Gram
455 ( Unifix
456 , AST_Term src ts -> AST_Term src ts )
457 op_prefix name toks src = do
458 let (pre, _in_, _post) = tokenizer_lookup name toks
459 case pre of
460 Nothing -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedPrefix
461 Just p ->
462 Right $ (tokenizer_fixity p,) $ \a ->
463 BinTree0 (tokenizer p src) `BinTree1` a
464 op_postfix name toks src = do
465 let (_pre, _in_, post) = tokenizer_lookup name toks
466 case post of
467 Nothing -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedPostfix
468 Just p ->
469 Right $ (tokenizer_fixity p,) $ \a ->
470 BinTree0 (tokenizer p src) `BinTree1` a
471 g_postfix_op :: CF g (Mod TeName)
472 g_postfix_op = rule "term_op_postfix" $
473 lexeme $
474 g_backquote *> g_term_mod_idname <+> -- <* (cf_of_Terminal $ Gram.Term (pure ' ') `but` g_backquote)
475 g_term_mod_opname
476 g_infix_op :: CF g (Mod TeName)
477 g_infix_op = rule "term_op_infix" $
478 lexeme $
479 between g_backquote g_backquote g_term_mod_idname <+>
480 try (Fun.const <$> g_term_mod_opname <*> (string " " <+> string "\n")) <+>
481 pure (Mod [] " ")
482 g_prefix_op :: CF g (Mod TeName)
483 g_prefix_op = rule "term_op_prefix" $
484 lexeme $
485 g_term_mod_idname <* g_backquote <+>
486 g_term_mod_opname
487 g_backquote :: Gram_Terminal g' => g' Char
488 g_backquote = char '`'
489
490 g_term_atom
491 :: Inj_Token src ts (->)
492 => CF g (AST_Term src ts)
493 g_term_atom = rule "term_atom" $
494 choice $
495 (try (
496 metaG $
497 (\typ src -> BinTree0 $ inj_EToken src $ Token_Term_Type typ)
498 <$ char '@' <*> g_type) :) $
499 (try . (BinTree0 <$>) <$> gs_term_atomsR (Proxy @ts)) <>
500 [ try $
501 errorG $ metaG $ tokenizers_get $
502 (\mn toks src -> do
503 let (_, in_, _) = tokenizer_lookup mn toks
504 case in_ of
505 Just p -> Right $ BinTree0 $ tokenizer p src
506 Nothing ->
507 case mn of
508 [] `Mod` n -> Right $ BinTree0 $ inj_EToken src $ Token_Term_Var n
509 _ -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedInfix
510 ) <$> g_term_mod_name
511 , g_term_group
512 ]
513 g_term_group
514 :: Inj_Token src ts (->)
515 => CF g (AST_Term src ts)
516 g_term_group = rule "term_group" $ parens g_term
517 g_term_abst
518 :: Inj_Token src ts (->)
519 => CF g (AST_Term src ts)
520 g_term_abst = rule "term_abst" $
521 metaG $
522 ((\(xs, te) src ->
523 foldr (\(x, ty_x) ->
524 BinTree0 .
525 inj_EToken src .
526 Token_Term_Abst x ty_x) te xs) <$>) $
527 g_term_abst_args_body
528 (symbol "\\" *> some g_term_abst_decl <* symbol "->")
529 g_term
530 g_term_abst_args_body
531 :: CF g [(TeName, AST_Type src)]
532 -> CF g (AST_Term src ts)
533 -> CF g ([(TeName, AST_Type src)], AST_Term src ts)
534 -- g_term_abst_args_body args body = (,) <$> args <*> body
535 g_term_abst_args_body cf_args cf_body =
536 tokenizers_put $ tokenizers_get $
537 (\a b (toks::Tokenizers src ts) -> (toks, (a, b)))
538 <$> (tokenizers_put $ tokenizers_get $
539 (\args (toks::Tokenizers src ts) -> (,args)
540 Tokenizers
541 { tokenizers_prefix = del (tokenizers_prefix toks) args
542 , tokenizers_infix = ins (tokenizers_infix toks) args
543 , tokenizers_postfix = del (tokenizers_postfix toks) args
544 }) <$> cf_args)
545 <*> cf_body
546 where
547 del = foldr $ \(n, _) -> Map.adjust (Map.delete n) []
548 ins = foldr $ \(n, _) ->
549 Map.insertWith (<>) [] $
550 Map.singleton n
551 Tokenizer
552 { tokenizer = \src -> inj_EToken src $ Token_Term_Var n
553 , tokenizer_fixity = infixN5
554 }
555 g_term_let
556 :: Inj_Token src ts (->)
557 => CF g (AST_Term src ts)
558 g_term_let = rule "term_let" $
559 metaG $
560 (\name args bound body src ->
561 BinTree0 $
562 inj_EToken src $
563 Token_Term_Let name
564 (foldr (\(x, ty_x) ->
565 BinTree0 .
566 inj_EToken src .
567 Token_Term_Abst x ty_x) bound args) body)
568 <$ symbol "let"
569 <*> g_term_name
570 <*> many g_term_abst_decl
571 <* symbol "="
572 <*> g_term
573 <* symbol "in"
574 <*> g_term
575
576 deriving instance
577 ( Gram_Term ts src g
578 , Gram_Term_AtomsR src ts ts (CF g)
579 ) => Gram_Term ts src (CF g)
580 instance
581 ( Gram_Term_AtomsR src ts ts EBNF
582 , Inj_Token src ts (->)
583 , Inj_Source (Text_of_Source src) src
584 ) => Gram_Term ts src EBNF where
585 tokenizers_get (CF (EBNF g)) = CF $ EBNF g
586 tokenizers_put (CF (EBNF g)) = CF $ EBNF g
587 instance
588 ( Gram_Term_AtomsR src ts ts RuleDef
589 , Inj_Token src ts (->)
590 , Inj_Source (Text_of_Source src) src
591 ) => Gram_Term ts src RuleDef where
592 tokenizers_get (CF (RuleDef (EBNF g))) = CF $ RuleDef $ EBNF g
593 tokenizers_put (CF (RuleDef (EBNF g))) = CF $ RuleDef $ EBNF g
594
595 -- ** Class 'Gram_Term_AtomsR'
596 class Gram_Term_AtomsR src (ts::[*]) (rs::[*]) g where
597 gs_term_atomsR :: Proxy rs -> [CF g (EToken src ts)]
598 instance Gram_Term_AtomsR src ts '[] g where
599 gs_term_atomsR _rs = []
600 instance
601 ( Gram_Term_AtomsT src ts t g
602 , Gram_Term_AtomsR src ts rs g
603 ) => Gram_Term_AtomsR src ts (t ': rs) g where
604 gs_term_atomsR _ =
605 gs_term_atomsT (Proxy @t) <>
606 gs_term_atomsR (Proxy @rs)
607
608 -- ** Class 'Gram_Term_AtomsT'
609 class Gram_Term_AtomsT src ts t g where
610 gs_term_atomsT :: Proxy t -> [CF g (EToken src ts)]
611 gs_term_atomsT _t = []
612 -- instance Gram_Term_AtomsT src ts t RuleDef
613
614 gram_term
615 :: forall g.
616 ( Gram_Term '[Proxy (->), Proxy Integer] () g
617 ) => [CF g ()]
618 gram_term =
619 [ voiD g_term
620 , voiD g_term_operators
621 , voiD g_term_atom
622 , voiD g_term_group
623 , voiD g_term_abst
624 , void (g_term_abst_decl::CF g (TeName, AST_Type ()))
625 , voiD g_term_let
626 , void g_term_mod_name
627 , void g_term_name
628 , void g_term_idname
629 , void $ cf_of_Terminal g_term_idname_tail
630 , void $ cf_of_Reg g_term_keywords
631 , void g_term_mod_opname
632 , void g_term_opname
633 , void $ cf_of_Terminal g_term_opname_ok
634 , void $ cf_of_Reg g_term_keysyms
635 ] where
636 voiD :: CF g (AST_Term () '[Proxy (->), Proxy Integer]) -> CF g ()
637 voiD = (() <$)