1 {-# LANGUAGE UndecidableInstances #-}
2 {-# OPTIONS_GHC -fno-warn-orphans #-}
3 module Compiling.Term.Test where
6 import Test.Tasty.HUnit
9 import qualified Control.Monad.Classes as MC
10 import qualified Control.Monad.Classes.Run as MC
11 import qualified Control.Monad.Trans.State.Strict as SS
12 import Data.Functor.Identity (Identity(..))
13 import qualified Data.Map.Strict as Map
14 import qualified Data.List as List
15 import Data.Proxy (Proxy(..))
16 import Data.Text (Text)
17 import qualified Data.Text as Text
18 import Data.Type.Equality ((:~:)(Refl))
19 import qualified Text.Megaparsec as P
21 import Language.Symantic.Compiling
22 import Language.Symantic.Interpreting
23 import Language.Symantic.Parsing
24 import qualified Language.Symantic.Grammar as Gram
25 import Language.Symantic.Typing
26 import qualified Language.Symantic.Lib as Sym
34 -- P.ParsecT instances
36 type instance MC.CanDo (P.ParsecT e s m) eff = 'False
37 instance ParsecC e s => Gram_Name (P.ParsecT e s m)
38 instance ParsecC e s => Gram.Gram_Meta Meta (P.ParsecT e s m) where
42 , Gram.Gram_Meta meta (P.ParsecT e s m)
43 ) => Gram_Term_Type meta (P.ParsecT e s m)
46 ) => Gram_Error (P.ParsecT e s m) where
47 term_unError (Gram.CF me) = Gram.CF $ do
50 Left err -> fail $ show err
54 , Gram.Gram_Meta meta (P.ParsecT e s m)
55 , Gram_Term_AtomsR meta ts ts (P.ParsecT e s m)
56 , MC.MonadState (Tokenizers meta ts) m
57 ) => Gram_Term ts meta (P.ParsecT e s m) where
58 term_tokenizers (Gram.CF ma) = Gram.CF $ do
60 toks :: Tokenizers meta ts <- MC.get
62 g_term_abst_args_body (Gram.CF args) (Gram.CF body) = Gram.CF $ do
65 toks :: Tokenizers meta ts <- MC.get
68 { tokenizers_prefix = del (tokenizers_prefix toks) as
69 , tokenizers_infix = del (tokenizers_infix toks) as
70 , tokenizers_postfix = del (tokenizers_postfix toks) as
74 where del = foldr $ \(n, _) -> Map.adjust (Map.delete n) []
76 test_tokenizer :: forall is.
77 ( Inj_Tokens Meta is [Proxy (->), Proxy Integer]
78 , Gram_Term is Meta (P.ParsecT P.Dec Text (SS.StateT (Tokenizers Meta is) Identity))
80 ) => Text -> Either (P.ParseError Char P.Dec) (EToken Meta is)
83 MC.evalStateStrict (tokenizers::Tokenizers Meta is) $
85 where g = Gram.unCF $ g_term <* Gram.eoi
92 , Gram_Term is Meta (P.ParsecT P.Dec Text (SS.StateT (Tokenizers Meta is) Identity))
93 , Inj_Tokens Meta is [Proxy (->), Proxy Integer]
97 , Sym_of_Ifaces is HostI
98 , Sym_of_Ifaces is TextI
100 , cs ~ TyConsts_of_Ifaces is
102 -> Either ( Type cs h
103 , Either (P.ParseError Char P.Dec)
104 (Error_Term Meta cs is) )
107 test_compile inp expected =
108 testCase (elide inp) $
109 case test_tokenizer inp of
110 Left err -> Left (Left err) @?= snd `left` expected
112 case compileWithoutCtx tok of
113 Left err -> Left (Right err) @?= snd `left` expected
114 Right (ETermClosed typ (TermClosed te)) ->
116 Left (_, err) -> Right ("…"::Text) @?= Left err
117 Right (ty_expected::Type cs h, _::h, _::Text) ->
118 (>>= (@?= (\(_::Type cs h, err) -> err) `left` expected)) $
119 case typ `eq_Type` ty_expected of
120 Nothing -> return $ Left $ Right $
121 Error_Term_Con_Type $ Right $
123 (Right $ At Nothing $ EType typ)
124 (At Nothing $ EType ty_expected)
126 let h = host_from_term te
132 -- , (text_from_term :: Repr_Text h -> Text) r
135 maybeRight :: Either l r -> Maybe r
136 maybeRight (Right r) = Just r
137 maybeRight Left{} = Nothing
139 elide :: Text -> String
140 elide s | Text.length s > 42 = List.take 42 (Text.unpack s) List.++ ['…']
141 elide s = Text.unpack s