1 {-# LANGUAGE ConstraintKinds #-}
2 {-# LANGUAGE DeriveFunctor #-}
4 {-# LANGUAGE GeneralizedNewtypeDeriving #-}
5 {-# LANGUAGE PolyKinds #-}
6 {-# LANGUAGE UndecidableInstances #-}
7 {-# OPTIONS_GHC -fno-warn-orphans #-}
8 module Language.Symantic.Compiling.Term.Grammar where
10 import Control.Arrow (left)
11 import Control.Monad (foldM, void, (=<<))
12 import qualified Data.Char as Char
13 import qualified Data.Function as Fun
14 import qualified Data.List as List
15 import Data.Map.Strict (Map)
16 import qualified Data.Map.Strict as Map
17 import Data.Monoid ((<>))
18 import Data.Proxy (Proxy(..))
19 import Data.Text (Text)
20 import qualified Data.Text as Text
21 import Prelude hiding (mod, not, any)
23 import Language.Symantic.Parsing
24 import Language.Symantic.Parsing.Grammar
25 import Language.Symantic.Parsing.EBNF
26 import Language.Symantic.Typing
29 newtype Term_Name = Term_Name Text
30 deriving (Eq, Ord, Show)
33 -- | Proto 'EToken'. It's almost like a free monad,
34 -- but has a third constructor ('ProTokPi')
35 -- to require a type argument.
37 -- NOTE: this type may one day be removed
38 -- if proper type inferencing is done.
39 -- In the meantime it is used to require
40 -- term or type arguments needed to build
41 -- the 'EToken's of polymorphic terms.
43 = ProTokLam (EToken meta ts -> ProTok meta ts)
44 -- ^ Require a term argument.
45 | ProTokPi (EToken meta '[Proxy Token_Type] -> ProTok meta ts)
46 -- ^ Require a type argument.
47 | ProTok (EToken meta ts)
48 -- ^ No need for any argument.
50 -- | Declared here and not in @Compiling.Lambda@
51 -- to be able to use 'Token_Term_Var' in 'protok'.
52 data instance TokenT meta (ts::[*]) (Proxy (->))
53 = Token_Term_Abst Term_Name (EToken meta '[Proxy Token_Type]) (EToken meta ts)
54 | Token_Term_App (EToken meta ts) (EToken meta ts)
55 | Token_Term_Let Term_Name (EToken meta ts) (EToken meta ts)
56 | Token_Term_Var Term_Name
57 | Token_Term_Compose (EToken meta ts) (EToken meta ts)
61 = TokenizeR meta ts ts
63 -- ** Type 'Tokenizers'
64 data Tokenizers meta ts
66 { tokenizers_prefix :: Map Mod_Path (Map Term_Name (Term_ProTok Unifix meta ts))
67 , tokenizers_infix :: Map Mod_Path (Map Term_Name (Term_ProTok Infix meta ts))
68 , tokenizers_postfix :: Map Mod_Path (Map Term_Name (Term_ProTok Unifix meta ts))
70 instance Monoid (Tokenizers meta ts) where
71 mempty = Tokenizers Map.empty Map.empty Map.empty
74 (Map.unionWith Map.union
76 (tokenizers_prefix y))
77 (Map.unionWith Map.union
80 (Map.unionWith Map.union
81 (tokenizers_postfix x)
82 (tokenizers_postfix y))
84 data Term_ProTok fixy meta ts
86 { term_protok :: meta -> ProTok meta ts
90 tokenizers :: forall meta ts. Tokenize meta ts => Tokenizers meta ts
91 tokenizers = tokenizeR (Proxy @ts)
95 -> Either Error_Term_Gram (EToken meta ts)
96 unProTok (ProTok t) = Right t
97 unProTok _ = Left Error_Term_Gram_Term_incomplete
100 :: Inj_Token meta ts (->)
102 -> Tokenizers meta ts
103 -> Either Error_Term_Gram
104 ( Maybe (Term_ProTok Unifix meta ts)
105 , Term_ProTok Infix meta ts
106 , Maybe (Term_ProTok Unifix meta ts)
108 protok (mod `Mod` tn) (Tokenizers pres ins posts) = do
109 let pre = Map.lookup mod pres >>= Map.lookup tn
110 let post = Map.lookup mod posts >>= Map.lookup tn
111 in_ <- var_or_err $ Map.lookup mod ins >>= Map.lookup tn
112 return (pre, in_, post)
114 var_or_err (Just x) = Right x
117 [] -> Right (var infixN5)
118 _ -> Left $ Error_Term_Gram_Undefined_term
121 { term_protok = \meta -> ProTok $ inj_etoken meta $ Token_Term_Var tn
126 :: Inj_Token meta ts (->)
128 -> [Either (EToken meta '[Proxy Token_Type]) (EToken meta ts)]
129 -> Either Error_Term_Gram (ProTok meta ts)
135 ProTokPi g -> Right $ g typ
136 _ -> Left Error_Term_Gram_Cannot_apply_type
139 ProTokLam f -> Right $ f te
140 ProTok tok@(EToken e) -> Right $
141 ProTok $ inj_etoken (meta_of e) $
142 Token_Term_App tok te
143 _ -> Left Error_Term_Gram_Cannot_apply_term
145 -- ** Class 'TokenizeR'
146 class TokenizeR meta (ts::[*]) (rs::[*]) where
147 tokenizeR :: Proxy rs -> Tokenizers meta ts
148 instance TokenizeR meta ts '[] where
149 tokenizeR _rs = mempty
151 ( TokenizeT meta ts t
152 , TokenizeR meta ts rs
153 ) => TokenizeR meta ts (t ': rs) where
155 tokenizeR (Proxy @rs) `mappend`
158 -- ** Class 'TokenizeT'
159 class TokenizeT meta ts t where
160 tokenizeT :: Proxy t -> Tokenizers meta ts
161 -- tokenizeT _t = [] `Mod` []
162 tokenizeT _t = mempty
166 -> [(Term_Name, Term_ProTok fix meta ts)]
167 -> Map Mod_Path (Map Term_Name (Term_ProTok fix meta ts))
168 tokenizeTMod mod tbl = Map.singleton mod $ Map.fromList tbl
171 :: Inj_Token meta ts t
172 => Text -> fixity -> TokenT meta ts (Proxy t)
173 -> (Term_Name, Term_ProTok fixity meta ts)
174 tokenize0 n term_fixity tok =
175 (Term_Name n,) Term_ProTok
176 { term_protok = \meta -> ProTok $ inj_etoken meta $ tok
180 :: Inj_Token meta ts t
182 -> (EToken meta ts -> TokenT meta ts (Proxy t))
183 -> (Term_Name, Term_ProTok fixity meta ts)
184 tokenize1 n term_fixity tok =
185 (Term_Name n,) Term_ProTok
186 { term_protok = \meta ->
188 ProTok $ inj_etoken meta $ tok a
192 :: Inj_Token meta ts t
194 -> (EToken meta ts -> EToken meta ts -> TokenT meta ts (Proxy t))
195 -> (Term_Name, Term_ProTok fixity meta ts)
196 tokenize2 n term_fixity tok =
197 (Term_Name n,) Term_ProTok
198 { term_protok = \meta ->
199 ProTokLam $ \a -> ProTokLam $ \b ->
200 ProTok $ inj_etoken meta $ tok a b
205 :: Inj_Token meta ts t
207 -> (EToken meta ts -> EToken meta ts -> EToken meta ts -> TokenT meta ts (Proxy t))
208 -> (Term_Name, Term_ProTok fixity meta ts)
209 tokenize3 n term_fixity tok =
210 (Term_Name n,) Term_ProTok
211 { term_protok = \meta ->
212 ProTokLam $ \a -> ProTokLam $ \b -> ProTokLam $ \c ->
213 ProTok $ inj_etoken meta $ tok a b c
218 type Mod_Path = [Mod_Name]
219 newtype Mod_Name = Mod_Name Text
220 deriving (Eq, Ord, Show)
221 data Mod a = Mod Mod_Path a
222 deriving (Eq, Functor, Ord, Show)
224 -- * Class 'Gram_Term_Name'
235 ) => Gram_Term_Name g where
236 mod_path :: CF g Mod_Path
237 mod_path = rule "mod_path" $
242 mod_name :: CF g Mod_Name
243 mod_name = rule "mod_name" $
244 (Mod_Name . Text.pack <$>) $
248 <*. (any `but` term_idname_tail))
250 identG = (:) <$> headG <*> many (cf_of_term term_idname_tail)
251 headG = unicat $ Unicat Char.UppercaseLetter
253 term_mod_name :: CF g (Mod Term_Name)
254 term_mod_name = rule "term_mod_name" $
257 parens term_mod_opname
258 term_name :: CF g Term_Name
259 term_name = rule "term_name" $
264 term_mod_idname :: CF g (Mod Term_Name)
265 term_mod_idname = rule "term_mod_idname" $
267 <$> option [] (mod_path <* char '.')
269 term_idname :: CF g Term_Name
270 term_idname = rule "term_idname" $
271 (Term_Name . Text.pack <$>) $
275 <*. (any `but` term_idname_tail)
277 identG = (:) <$> headG <*> many (cf_of_term term_idname_tail)
278 headG = unicat $ Unicat_Letter
279 term_idname_tail :: Terminal g Char
280 term_idname_tail = rule "term_idname_tail" $
281 unicat Unicat_Letter <+>
283 term_keywords :: Reg rl g String
284 term_keywords = rule "term_keywords" $
285 choice $ string <$> ["in", "let"]
287 term_mod_opname :: CF g (Mod Term_Name)
288 term_mod_opname = rule "term_mod_opname" $
290 <$> option [] (mod_path <* char '.')
292 term_opname :: CF g Term_Name
293 term_opname = rule "term_opname" $
294 (Term_Name . Text.pack <$>) $
298 <*. (any `but` term_opname_ok)
300 symG = some $ cf_of_term $ term_opname_ok
301 term_opname_ok :: Terminal g Char
302 term_opname_ok = rule "term_opname_ok" $
309 koG = choice (char <$> ['(', ')', '`', '\'', ',', '[', ']'])
310 term_keysyms :: Reg rl g String
311 term_keysyms = rule "term_keysyms" $
312 choice $ string <$> ["\\", "->", "="]
314 deriving instance Gram_Term_Name g => Gram_Term_Name (CF g)
315 instance Gram_Term_Name EBNF
316 instance Gram_Term_Name RuleDef
318 -- * Class 'Gram_Term_Type'
330 ) => Gram_Term_Type meta g where
332 :: CF g (Term_Name, TokType meta)
333 term_abst_decl = rule "term_abst_decl" $
339 deriving instance Gram_Term_Type meta g => Gram_Term_Type meta (CF g)
340 instance Gram_Term_Type meta EBNF
341 instance Gram_Term_Type meta RuleDef
343 -- * Class 'Gram_Error'
344 class Gram_Error g where
345 term_unError :: CF g (Either Error_Term_Gram a) -> CF g a
346 deriving instance Gram_Error g => Gram_Error (CF g)
347 instance Gram_Error EBNF where
348 term_unError (CF (EBNF g)) = CF $ EBNF g
349 instance Gram_Error RuleDef where
350 term_unError (CF (RuleDef (EBNF g))) =
351 CF $ RuleDef $ EBNF $ g
353 -- ** Type 'Error_Term_Gram'
355 = Error_Term_Gram_Fixity Error_Fixity
356 | Error_Term_Gram_Cannot_apply_term
357 | Error_Term_Gram_Cannot_apply_type
358 | Error_Term_Gram_Undefined_term
359 | Error_Term_Gram_Term_incomplete
362 -- * Class 'Gram_Term'
373 , Gram_Term_AtomsR meta ts ts g
375 , Gram_Term_Type meta g
377 ) => Gram_Term ts meta g where
378 -- | Wrap 'term_abst'. Useful to modify body's scope.
380 :: CF g [(Term_Name, TokType meta)]
381 -> CF g (EToken meta ts)
382 -> CF g ([(Term_Name, TokType meta)], EToken meta ts)
383 term_abst_args_body args body = (,) <$> args <*> body
384 term_tokenizers :: CF g (Tokenizers meta ts -> a) -> CF g a
387 :: Inj_Tokens meta ts '[Proxy (->)]
388 => CF g (EToken meta ts)
389 termG = rule "term" $
396 :: Inj_Tokens meta ts '[Proxy (->)]
397 => CF g (EToken meta ts)
398 term_operators = rule "term_operators" $
401 left Error_Term_Gram_Fixity <$>
404 (term_unError $ metaG $ term_tokenizers $ op_prefix <$> term_op_prefix)
405 (term_unError $ metaG $ term_tokenizers $ op_infix <$> term_op_infix)
406 (term_unError $ metaG $ term_tokenizers $ op_postfix <$> term_op_postfix)
408 bqG :: Gram_Terminal g' => g' Char
410 op_infix name toks meta = do
411 (_pre, in_, _post) <- protok name toks
413 (term_fixity in_,) $ \ma mb -> do
416 unProTok =<< term_protok in_ meta `protok_app` [Right a, Right b]
417 op_prefix name toks meta = do
418 (pre, _in_, _post) <- protok name toks
421 Right $ (term_fixity p,) $ (=<<) $ \a ->
422 unProTok =<< term_protok p meta `protok_app` [Right a]
423 Nothing -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedPrefix
424 op_postfix name toks meta = do
425 (_pre, _in_, post) <- protok name toks
428 Right $ (term_fixity p,) $ (=<<) $ \a ->
429 unProTok =<< term_protok p meta `protok_app` [Right a]
430 Nothing -> Left $ Error_Term_Gram_Fixity Error_Fixity_NeedPostfix
431 term_op_postfix :: CF g (Mod Term_Name)
432 term_op_postfix = rule "term_op_postfix" $
434 bqG *> term_mod_idname <+> -- <* (cf_of_term $ Gram.Term (pure ' ') `but` bqG)
436 term_op_infix :: CF g (Mod Term_Name)
437 term_op_infix = rule "term_op_infix" $
439 between bqG bqG term_mod_idname <+>
441 term_op_prefix :: CF g (Mod Term_Name)
442 term_op_prefix = rule "term_op_prefix" $
444 term_mod_idname <* bqG <+>
447 :: Inj_Tokens meta ts '[Proxy (->)]
448 => CF g (EToken meta ts)
449 term_app = rule "term_app" $
451 (\a as -> unProTok =<< protok_app a as)
455 :: Inj_Tokens meta ts '[Proxy (->)]
456 => CF g (Either (EToken meta '[Proxy Token_Type])
458 term_atom = rule "term_atom" $
459 (Left <$ char '@' <*> typeG) <+>
460 (Right <$> term_unError (unProTok <$> term_atom_proto))
462 :: Inj_Tokens meta ts '[Proxy (->)]
463 => CF g (ProTok meta ts)
466 term_atomsR (Proxy @ts) <>
467 [ metaG $ ((\(_, in_, _) -> term_protok in_) <$>) $ term_unError $ term_tokenizers $
468 protok <$> term_mod_name
469 , ProTok <$> term_group
472 :: Inj_Tokens meta ts '[Proxy (->)]
473 => CF g (EToken meta ts)
474 term_group = rule "term_group" $ parens termG
476 :: Inj_Tokens meta ts '[Proxy (->)]
477 => CF g (EToken meta ts)
478 term_abst = rule "term_abst" $
481 List.foldr (\(x, ty_x) ->
483 Token_Term_Abst x ty_x) te xs) <$>) $
485 (symbol "\\" *> some term_abst_decl <* symbol "->")
488 :: Inj_Tokens meta ts '[Proxy (->)]
489 => CF g (EToken meta ts)
490 term_let = rule "term_let" $
492 (\name args bound body meta ->
496 (\(x, ty_x) -> inj_etoken meta . Token_Term_Abst x ty_x) bound args
500 <*> many term_abst_decl
507 ( Gram_Term ts meta g
508 , Gram_Term_AtomsR meta ts ts (CF g)
509 ) => Gram_Term ts meta (CF g)
511 Gram_Term_AtomsR meta ts ts EBNF =>
512 Gram_Term ts meta EBNF where
513 term_tokenizers (CF (EBNF g)) = CF $ EBNF g
515 Gram_Term_AtomsR meta ts ts RuleDef =>
516 Gram_Term ts meta RuleDef where
517 term_tokenizers (CF (RuleDef (EBNF g))) =
518 CF $ RuleDef $ EBNF $ g
520 -- ** Class 'Gram_Term_AtomsR'
521 class Gram_Term_AtomsR meta (ts::[*]) (rs::[*]) g where
522 term_atomsR :: Proxy rs -> [CF g (ProTok meta ts)]
523 instance Gram_Term_AtomsR meta ts '[] g where
526 ( Gram_Term_AtomsT meta ts t g
527 , Gram_Term_AtomsR meta ts rs g
528 ) => Gram_Term_AtomsR meta ts (t ': rs) g where
530 term_atomsT (Proxy @t) <>
531 term_atomsR (Proxy @rs)
533 -- ** Class 'Gram_Term_AtomsT'
534 class Gram_Term_AtomsT meta ts t g where
535 term_atomsT :: Proxy t -> [CF g (ProTok meta ts)]
537 instance Gram_Term_AtomsT meta ts t RuleDef
541 ( Gram_Term '[Proxy (->), Proxy Integer] () g
550 , void (term_abst_decl::CF g (Term_Name, TokType ()))
555 , void $ cf_of_term term_idname_tail
556 , void $ cf_of_reg term_keywords
557 , void term_mod_opname
559 , void $ cf_of_term term_opname_ok
560 , void $ cf_of_reg term_keysyms
562 ue :: CF g (EToken () '[Proxy (->), Proxy Integer]) -> CF g ()
564 -- uf :: CF g (ProTok () '[Proxy (->)]) -> CF g ()
566 ug :: CF g (Either (EToken () '[Proxy Token_Type])
567 (EToken () '[Proxy (->), Proxy Integer])) -> CF g ()