]> Git — Sourcephile - haskell/symantic.git/blob - symantic/Language/Symantic/Compiling/Term/Grammar.hs
Backtrack (try) the grammar only when necessary to get better error messages.
[haskell/symantic.git] / symantic / Language / Symantic / Compiling / Term / Grammar.hs
1 {-# LANGUAGE ConstraintKinds #-}
2 {-# LANGUAGE DeriveFunctor #-}
3 {-# LANGUAGE GADTs #-}
4 {-# LANGUAGE GeneralizedNewtypeDeriving #-}
5 {-# LANGUAGE PolyKinds #-}
6 {-# LANGUAGE UndecidableInstances #-}
7 {-# OPTIONS_GHC -fno-warn-orphans #-}
8 module Language.Symantic.Compiling.Term.Grammar where
9
10 import Control.Arrow (left)
11 import Control.Monad (foldM, void, (=<<))
12 import qualified Data.Char as Char
13 import qualified Data.Function as Fun
14 import qualified Data.List as List
15 import Data.Map.Strict (Map)
16 import qualified Data.Map.Strict as Map
17 import Data.Monoid ((<>))
18 import Data.Proxy (Proxy(..))
19 import Data.Text (Text)
20 import qualified Data.Text as Text
21 import Prelude hiding (mod, not, any)
22
23 import Language.Symantic.Parsing
24 import Language.Symantic.Typing
25
26 -- * Type 'Term_Name'
27 newtype Term_Name = Term_Name Text
28 deriving (Eq, Ord, Show)
29
30 -- * Type 'ProTok'
31 -- | Proto 'EToken'. It's almost like a free monad,
32 -- but has a third constructor ('ProTokPi')
33 -- to require a type argument.
34 --
35 -- NOTE: this type may one day be removed
36 -- if proper type inferencing is done.
37 -- In the meantime it is used to require
38 -- term or type arguments needed to build
39 -- the 'EToken's of polymorphic terms.
40 data ProTok meta ts
41 = ProTokLam (EToken meta ts -> ProTok meta ts)
42 -- ^ Require a term argument.
43 | ProTokPi (EToken meta '[Proxy Token_Type] -> ProTok meta ts)
44 -- ^ Require a type argument.
45 | ProTok (EToken meta ts)
46 -- ^ No need for any argument.
47
48 -- | Declared here and not in @Compiling.Lambda@
49 -- to be able to use 'Token_Term_Var' in 'protok'.
50 data instance TokenT meta (ts::[*]) (Proxy (->))
51 = Token_Term_Abst Term_Name (EToken meta '[Proxy Token_Type]) (EToken meta ts)
52 | Token_Term_App (EToken meta ts) (EToken meta ts)
53 | Token_Term_Let Term_Name (EToken meta ts) (EToken meta ts)
54 | Token_Term_Var Term_Name
55 | Token_Term_Compose (EToken meta ts) (EToken meta ts)
56
57 -- * Class 'Tokenize'
58 type Tokenize meta ts
59 = TokenizeR meta ts ts
60
61 -- ** Type 'Tokenizers'
62 data Tokenizers meta ts
63 = Tokenizers
64 { tokenizers_prefix :: Map Mod_Path (Map Term_Name (Term_ProTok Unifix meta ts))
65 , tokenizers_infix :: Map Mod_Path (Map Term_Name (Term_ProTok Infix meta ts))
66 , tokenizers_postfix :: Map Mod_Path (Map Term_Name (Term_ProTok Unifix meta ts))
67 }
68 instance Monoid (Tokenizers meta ts) where
69 mempty = Tokenizers Map.empty Map.empty Map.empty
70 mappend x y =
71 Tokenizers
72 (Map.unionWith Map.union
73 (tokenizers_prefix x)
74 (tokenizers_prefix y))
75 (Map.unionWith Map.union
76 (tokenizers_infix x)
77 (tokenizers_infix y))
78 (Map.unionWith Map.union
79 (tokenizers_postfix x)
80 (tokenizers_postfix y))
81
82 data Term_ProTok fixy meta ts
83 = Term_ProTok
84 { term_protok :: meta -> ProTok meta ts
85 , term_fixity :: fixy
86 }
87
88 tokenizers :: forall meta ts. Tokenize meta ts => Tokenizers meta ts
89 tokenizers = tokenizeR (Proxy @ts)
90
91 unProTok
92 :: ProTok meta ts
93 -> Either Error_Term_Gram (EToken meta ts)
94 unProTok (ProTok t) = Right t
95 unProTok _ = Left Error_Term_Gram_Term_incomplete
96
97 protok
98 :: Inj_Token meta ts (->)
99 => Mod Term_Name
100 -> Tokenizers meta ts
101 -> Either Error_Term_Gram
102 ( Maybe (Term_ProTok Unifix meta ts)
103 , Term_ProTok Infix meta ts
104 , Maybe (Term_ProTok Unifix meta ts)
105 )
106 protok (mod `Mod` tn) (Tokenizers pres ins posts) = do
107 let pre = Map.lookup mod pres >>= Map.lookup tn
108 let post = Map.lookup mod posts >>= Map.lookup tn
109 in_ <- var_or_err $ Map.lookup mod ins >>= Map.lookup tn
110 return (pre, in_, post)
111 where
112 var_or_err (Just x) = Right x
113 var_or_err Nothing =
114 case mod of
115 [] -> Right (var infixN5)
116 _ -> Left $ Error_Term_Gram_Undefined_term
117 var term_fixity =
118 Term_ProTok
119 { term_protok = \meta -> ProTok $ inj_EToken meta $ Token_Term_Var tn
120 , term_fixity
121 }
122
123 protok_app
124 :: Inj_Token meta ts (->)
125 => ProTok meta ts
126 -> [Either (EToken meta '[Proxy Token_Type]) (EToken meta ts)]
127 -> Either Error_Term_Gram (ProTok meta ts)
128 protok_app =
129 foldM app
130 where
131 app acc (Left typ) =
132 case acc of
133 ProTokPi g -> Right $ g typ
134 _ -> Left Error_Term_Gram_Cannot_apply_type
135 app acc (Right te) =
136 case acc of
137 ProTokLam f -> Right $ f te
138 ProTok tok@(EToken e) -> Right $
139 ProTok $ inj_EToken (meta_of e) $
140 Token_Term_App tok te
141 _ -> Left Error_Term_Gram_Cannot_apply_term
142
143 -- ** Class 'TokenizeR'
144 class TokenizeR meta (ts::[*]) (rs::[*]) where
145 tokenizeR :: Proxy rs -> Tokenizers meta ts
146 instance TokenizeR meta ts '[] where
147 tokenizeR _rs = mempty
148 instance
149 ( TokenizeT meta ts t
150 , TokenizeR meta ts rs
151 ) => TokenizeR meta ts (t ': rs) where
152 tokenizeR _ =
153 tokenizeR (Proxy @rs) `mappend`
154 tokenizeT (Proxy @t)
155
156 -- ** Class 'TokenizeT'
157 class TokenizeT meta ts t where
158 tokenizeT :: Proxy t -> Tokenizers meta ts
159 -- tokenizeT _t = [] `Mod` []
160 tokenizeT _t = mempty
161
162 tokenizeTMod
163 :: Mod_Path
164 -> [(Term_Name, Term_ProTok fix meta ts)]
165 -> Map Mod_Path (Map Term_Name (Term_ProTok fix meta ts))
166 tokenizeTMod mod tbl = Map.singleton mod $ Map.fromList tbl
167
168 tokenize0
169 :: Inj_Token meta ts t
170 => Text -> fixity -> TokenT meta ts (Proxy t)
171 -> (Term_Name, Term_ProTok fixity meta ts)
172 tokenize0 n term_fixity tok =
173 (Term_Name n,) Term_ProTok
174 { term_protok = \meta -> ProTok $ inj_EToken meta $ tok
175 , term_fixity }
176
177 tokenize1
178 :: Inj_Token meta ts t
179 => Text -> fixity
180 -> (EToken meta ts -> TokenT meta ts (Proxy t))
181 -> (Term_Name, Term_ProTok fixity meta ts)
182 tokenize1 n term_fixity tok =
183 (Term_Name n,) Term_ProTok
184 { term_protok = \meta ->
185 ProTokLam $ \a ->
186 ProTok $ inj_EToken meta $ tok a
187 , term_fixity }
188
189 tokenize2
190 :: Inj_Token meta ts t
191 => Text -> fixity
192 -> (EToken meta ts -> EToken meta ts -> TokenT meta ts (Proxy t))
193 -> (Term_Name, Term_ProTok fixity meta ts)
194 tokenize2 n term_fixity tok =
195 (Term_Name n,) Term_ProTok
196 { term_protok = \meta ->
197 ProTokLam $ \a -> ProTokLam $ \b ->
198 ProTok $ inj_EToken meta $ tok a b
199 , term_fixity
200 }
201
202 tokenize3
203 :: Inj_Token meta ts t
204 => Text -> fixity
205 -> (EToken meta ts -> EToken meta ts -> EToken meta ts -> TokenT meta ts (Proxy t))
206 -> (Term_Name, Term_ProTok fixity meta ts)
207 tokenize3 n term_fixity tok =
208 (Term_Name n,) Term_ProTok
209 { term_protok = \meta ->
210 ProTokLam $ \a -> ProTokLam $ \b -> ProTokLam $ \c ->
211 ProTok $ inj_EToken meta $ tok a b c
212 , term_fixity
213 }
214
215 -- * Type 'Mod'
216 type Mod_Path = [Mod_Name]
217 newtype Mod_Name = Mod_Name Text
218 deriving (Eq, Ord, Show)
219 data Mod a = Mod Mod_Path a
220 deriving (Eq, Functor, Ord, Show)
221
222 -- * Class 'Gram_Term_Name'
223 class
224 ( Alt g
225 , Alter g
226 , Alter g
227 , App g
228 , Try g
229 , Gram_CF g
230 , Gram_Op g
231 , Gram_Lexer g
232 , Gram_RegL g
233 , Gram_Rule g
234 , Gram_Terminal g
235 ) => Gram_Term_Name g where
236 mod_path :: CF g Mod_Path
237 mod_path = rule "mod_path" $
238 infixrG
239 (pure <$> mod_name)
240 (op <$ char '.')
241 where op = mappend
242 mod_name :: CF g Mod_Name
243 mod_name = rule "mod_name" $
244 (Mod_Name . Text.pack <$>) $
245 identG `minus`
246 (Fun.const
247 <$> term_keywords
248 <*. (any `but` term_idname_tail))
249 where
250 identG = (:) <$> headG <*> many (cf_of_Terminal term_idname_tail)
251 headG = unicat $ Unicat Char.UppercaseLetter
252
253 term_mod_name :: CF g (Mod Term_Name)
254 term_mod_name = rule "term_mod_name" $
255 lexeme $
256 term_mod_idname <+>
257 parens term_mod_opname
258 term_name :: CF g Term_Name
259 term_name = rule "term_name" $
260 lexeme $
261 term_idname <+>
262 parens term_opname
263
264 term_mod_idname :: CF g (Mod Term_Name)
265 term_mod_idname = rule "term_mod_idname" $
266 Mod
267 <$> option [] (try $ mod_path <* char '.')
268 <*> term_idname
269 term_idname :: CF g Term_Name
270 term_idname = rule "term_idname" $
271 (Term_Name . Text.pack <$>) $
272 (identG `minus`) $
273 Fun.const
274 <$> term_keywords
275 <*. (any `but` term_idname_tail)
276 where
277 identG = (:) <$> headG <*> many (cf_of_Terminal term_idname_tail)
278 headG = unicat $ Unicat_Letter
279 term_idname_tail :: Terminal g Char
280 term_idname_tail = rule "term_idname_tail" $
281 unicat Unicat_Letter <+>
282 unicat Unicat_Number
283 term_keywords :: Reg rl g String
284 term_keywords = rule "term_keywords" $
285 choice $ string <$> ["in", "let"]
286
287 term_mod_opname :: CF g (Mod Term_Name)
288 term_mod_opname = rule "term_mod_opname" $
289 Mod
290 <$> option [] (try $ mod_path <* char '.')
291 <*> term_opname
292 term_opname :: CF g Term_Name
293 term_opname = rule "term_opname" $
294 (Term_Name . Text.pack <$>) $
295 (symG `minus`) $
296 Fun.const
297 <$> term_keysyms
298 <*. (any `but` term_opname_ok)
299 where
300 symG = some $ cf_of_Terminal $ term_opname_ok
301 term_opname_ok :: Terminal g Char
302 term_opname_ok = rule "term_opname_ok" $
303 choice (unicat <$>
304 [ Unicat_Symbol
305 , Unicat_Punctuation
306 , Unicat_Mark
307 ]) `but` koG
308 where
309 koG = choice (char <$> ['(', ')', '`', '\'', ',', '[', ']'])
310 term_keysyms :: Reg rl g String
311 term_keysyms = rule "term_keysyms" $
312 choice $ string <$> ["\\", "->", "="]
313
314 deriving instance Gram_Term_Name g => Gram_Term_Name (CF g)
315 instance Gram_Term_Name EBNF
316 instance Gram_Term_Name RuleDef
317
318 -- * Class 'Gram_Term_Type'
319 class
320 ( Alt g
321 , Alter g
322 , App g
323 , Gram_CF g
324 , Gram_Lexer g
325 , Gram_Meta meta g
326 , Gram_Rule g
327 , Gram_Terminal g
328 , Gram_Term_Name g
329 , Gram_Type meta g
330 ) => Gram_Term_Type meta g where
331 term_abst_decl
332 :: CF g (Term_Name, TokType meta)
333 term_abst_decl = rule "term_abst_decl" $
334 parens $ (,)
335 <$> term_name
336 <* symbol ":"
337 <*> typeG
338
339 deriving instance Gram_Term_Type meta g => Gram_Term_Type meta (CF g)
340 instance Gram_Term_Type meta EBNF
341 instance Gram_Term_Type meta RuleDef
342
343 -- * Class 'Gram_Error'
344 class Gram_Error g where
345 term_unError :: CF g (Either Error_Term_Gram a) -> CF g a
346 deriving instance Gram_Error g => Gram_Error (CF g)
347 instance Gram_Error EBNF where
348 term_unError (CF (EBNF g)) = CF $ EBNF g
349 instance Gram_Error RuleDef where
350 term_unError (CF (RuleDef (EBNF g))) =
351 CF $ RuleDef $ EBNF $ g
352
353 -- ** Type 'Error_Term_Gram'
354 data Error_Term_Gram
355 = Error_Term_Gram_Fixity Error_Fixity
356 | Error_Term_Gram_Cannot_apply_term
357 | Error_Term_Gram_Cannot_apply_type
358 | Error_Term_Gram_Undefined_term
359 | Error_Term_Gram_Term_incomplete
360 deriving (Eq, Show)
361
362 -- * Class 'Gram_Term'
363 class
364 ( Alt g
365 , Alter g
366 , App g
367 , Gram_CF g
368 , Gram_Lexer g
369 , Gram_Meta meta g
370 , Gram_Rule g
371 , Gram_Terminal g
372 , Gram_Error g
373 , Gram_Term_AtomsR meta ts ts g
374 , Gram_Term_Name g
375 , Gram_Term_Type meta g
376 , Gram_Type meta g
377 ) => Gram_Term ts meta g where
378 -- | Wrap 'term_abst'. Useful to modify body's scope.
379 term_abst_args_body
380 :: CF g [(Term_Name, TokType meta)]
381 -> CF g (EToken meta ts)
382 -> CF g ([(Term_Name, TokType meta)], EToken meta ts)
383 term_abst_args_body args body = (,) <$> args <*> body
384 term_tokenizers :: CF g (Tokenizers meta ts -> a) -> CF g a
385
386 termG
387 :: Inj_Tokens meta ts '[Proxy (->)]
388 => CF g (EToken meta ts)
389 termG = rule "term" $
390 choice
391 [ try term_abst
392 , term_operators
393 , term_let
394 ]
395 term_operators
396 :: Inj_Tokens meta ts '[Proxy (->)]
397 => CF g (EToken meta ts)
398 term_operators = rule "term_operators" $
399 term_unError $
400 term_unError $
401 left Error_Term_Gram_Fixity <$>
402 operators
403 (Right <$> term_app)
404 (term_unError $ metaG $ term_tokenizers $ op_prefix <$> term_op_prefix)
405 (term_unError $ metaG $ term_tokenizers $ op_infix <$> term_op_infix)
406 (term_unError $ metaG $ term_tokenizers $ op_postfix <$> term_op_postfix)
407 where
408 bqG :: Gram_Terminal g' => g' Char
409 bqG = char '`'
410 op_infix name toks meta = do
411 (_pre, in_, _post) <- protok name toks
412 return $
413 (term_fixity in_,) $ \ma mb -> do
414 a <- ma
415 b <- mb
416 unProTok =<< term_protok in_ meta `protok_app` [Right a, Right b]
417 op_prefix name toks meta = do
418 (pre, _in_, _post) <- protok name toks
419 case pre of
420 Just p ->
421 Right $ (term_fixity p,) $ (=<<) $ \a ->
422 unProTok =<< term_protok p meta `protok_app` [Right a]
423 Nothing -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedPrefix
424 op_postfix name toks meta = do
425 (_pre, _in_, post) <- protok name toks
426 case post of
427 Just p ->
428 Right $ (term_fixity p,) $ (=<<) $ \a ->
429 unProTok =<< term_protok p meta `protok_app` [Right a]
430 Nothing -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedPostfix
431 term_op_postfix :: CF g (Mod Term_Name)
432 term_op_postfix = rule "term_op_postfix" $
433 lexeme $
434 bqG *> term_mod_idname <+> -- <* (cf_of_Terminal $ Gram.Term (pure ' ') `but` bqG)
435 term_mod_opname
436 term_op_infix :: CF g (Mod Term_Name)
437 term_op_infix = rule "term_op_infix" $
438 lexeme $
439 between bqG bqG term_mod_idname <+>
440 term_mod_opname
441 term_op_prefix :: CF g (Mod Term_Name)
442 term_op_prefix = rule "term_op_prefix" $
443 lexeme $
444 term_mod_idname <* bqG <+>
445 term_mod_opname
446 term_app
447 :: Inj_Tokens meta ts '[Proxy (->)]
448 => CF g (EToken meta ts)
449 term_app = rule "term_app" $
450 term_unError $
451 (\a as -> unProTok =<< protok_app a as)
452 <$> term_atom_proto
453 <*> many (try term_atom)
454 term_atom
455 :: Inj_Tokens meta ts '[Proxy (->)]
456 => CF g (Either (EToken meta '[Proxy Token_Type])
457 (EToken meta ts))
458 term_atom = rule "term_atom" $
459 (Left <$ char '@' <*> typeG) <+>
460 (Right <$> term_unError (unProTok <$> term_atom_proto))
461 term_atom_proto
462 :: Inj_Tokens meta ts '[Proxy (->)]
463 => CF g (ProTok meta ts)
464 term_atom_proto =
465 choice $
466 try <$> term_atomsR (Proxy @ts) <>
467 [ try $
468 metaG $ ((\(_, in_, _) -> term_protok in_) <$>) $
469 term_unError $
470 term_tokenizers $
471 protok <$> term_mod_name
472 , ProTok <$> term_group
473 ]
474 term_group
475 :: Inj_Tokens meta ts '[Proxy (->)]
476 => CF g (EToken meta ts)
477 term_group = rule "term_group" $ parens termG
478 term_abst
479 :: Inj_Tokens meta ts '[Proxy (->)]
480 => CF g (EToken meta ts)
481 term_abst = rule "term_abst" $
482 metaG $
483 ((\(xs, te) meta ->
484 List.foldr (\(x, ty_x) ->
485 inj_EToken meta .
486 Token_Term_Abst x ty_x) te xs) <$>) $
487 term_abst_args_body
488 (symbol "\\" *> some term_abst_decl <* symbol "->")
489 termG
490 term_let
491 :: Inj_Tokens meta ts '[Proxy (->)]
492 => CF g (EToken meta ts)
493 term_let = rule "term_let" $
494 metaG $
495 (\name args bound body meta ->
496 inj_EToken meta $
497 Token_Term_Let name
498 (List.foldr
499 (\(x, ty_x) -> inj_EToken meta . Token_Term_Abst x ty_x) bound args
500 ) body)
501 <$ symbol "let"
502 <*> term_name
503 <*> many term_abst_decl
504 <* symbol "="
505 <*> termG
506 <* symbol "in"
507 <*> termG
508
509 deriving instance
510 ( Gram_Term ts meta g
511 , Gram_Term_AtomsR meta ts ts (CF g)
512 ) => Gram_Term ts meta (CF g)
513 instance
514 Gram_Term_AtomsR meta ts ts EBNF =>
515 Gram_Term ts meta EBNF where
516 term_tokenizers (CF (EBNF g)) = CF $ EBNF g
517 instance
518 Gram_Term_AtomsR meta ts ts RuleDef =>
519 Gram_Term ts meta RuleDef where
520 term_tokenizers (CF (RuleDef (EBNF g))) =
521 CF $ RuleDef $ EBNF $ g
522
523 -- ** Class 'Gram_Term_AtomsR'
524 class Gram_Term_AtomsR meta (ts::[*]) (rs::[*]) g where
525 term_atomsR :: Proxy rs -> [CF g (ProTok meta ts)]
526 instance Gram_Term_AtomsR meta ts '[] g where
527 term_atomsR _rs = []
528 instance
529 ( Gram_Term_AtomsT meta ts t g
530 , Gram_Term_AtomsR meta ts rs g
531 ) => Gram_Term_AtomsR meta ts (t ': rs) g where
532 term_atomsR _ =
533 term_atomsT (Proxy @t) <>
534 term_atomsR (Proxy @rs)
535
536 -- ** Class 'Gram_Term_AtomsT'
537 class Gram_Term_AtomsT meta ts t g where
538 term_atomsT :: Proxy t -> [CF g (ProTok meta ts)]
539 term_atomsT _t = []
540 instance Gram_Term_AtomsT meta ts t RuleDef
541
542 gram_term
543 :: forall g.
544 ( Gram_Term '[Proxy (->), Proxy Integer] () g
545 ) => [CF g ()]
546 gram_term =
547 [ ue termG
548 , ue term_operators
549 , ue term_app
550 , ug term_atom
551 , ue term_group
552 , ue term_abst
553 , void (term_abst_decl::CF g (Term_Name, TokType ()))
554 , ue term_let
555 , void term_mod_name
556 , void term_name
557 , void term_idname
558 , void $ cf_of_Terminal term_idname_tail
559 , void $ cf_of_Reg term_keywords
560 , void term_mod_opname
561 , void term_opname
562 , void $ cf_of_Terminal term_opname_ok
563 , void $ cf_of_Reg term_keysyms
564 ] where
565 ue :: CF g (EToken () '[Proxy (->), Proxy Integer]) -> CF g ()
566 ue = (() <$)
567 -- uf :: CF g (ProTok () '[Proxy (->)]) -> CF g ()
568 -- uf = (() <$)
569 ug :: CF g (Either (EToken () '[Proxy Token_Type])
570 (EToken () '[Proxy (->), Proxy Integer])) -> CF g ()
571 ug = (() <$)