]> Git — Sourcephile - haskell/symantic.git/blob - symantic-lib/Language/Symantic/Lib/Char.hs
Backtrack (try) the grammar only when necessary to get better error messages.
[haskell/symantic.git] / symantic-lib / Language / Symantic / Lib / Char.hs
1 {-# LANGUAGE UndecidableInstances #-}
2 {-# OPTIONS_GHC -fno-warn-orphans #-}
3 -- | Symantic for 'Char'.
4 module Language.Symantic.Lib.Char where
5
6 import Control.Monad (liftM)
7 import qualified Data.Char as Char
8 import Data.Proxy
9 import qualified Data.Text as Text
10 import Data.Type.Equality ((:~:)(Refl))
11
12 import Language.Symantic.Parsing hiding (char)
13 import qualified Language.Symantic.Grammar as Gram
14 import Language.Symantic.Typing
15 import Language.Symantic.Compiling
16 import Language.Symantic.Interpreting
17 import Language.Symantic.Transforming
18 import Language.Symantic.Lib.Lambda
19
20 -- * Class 'Sym_Char'
21 class Sym_Char term where
22 char :: Char -> term Char
23 char_toUpper :: term Char -> term Char
24 char_toLower :: term Char -> term Char
25
26 default char :: Trans t term => Char -> t term Char
27 default char_toUpper :: Trans t term => t term Char -> t term Char
28 default char_toLower :: Trans t term => t term Char -> t term Char
29
30 char = trans_lift . char
31 char_toUpper = trans_map1 char_toUpper
32 char_toLower = trans_map1 char_toLower
33
34 type instance Sym_of_Iface (Proxy Char) = Sym_Char
35 type instance TyConsts_of_Iface (Proxy Char) = Proxy Char ': TyConsts_imported_by Char
36 type instance TyConsts_imported_by Char =
37 [ Proxy Bounded
38 , Proxy Enum
39 , Proxy Eq
40 , Proxy Ord
41 , Proxy Show
42 ]
43
44 instance Sym_Char HostI where
45 char = HostI
46 char_toUpper = liftM Char.toUpper
47 char_toLower = liftM Char.toLower
48 instance Sym_Char TextI where
49 char a = TextI $ \_p _v ->
50 Text.pack (show a)
51 char_toUpper = textI1 "Char.toUpper"
52 char_toLower = textI1 "Char.toLower"
53 instance (Sym_Char r1, Sym_Char r2) => Sym_Char (DupI r1 r2) where
54 char x = char x `DupI` char x
55 char_toUpper = dupI1 (Proxy @Sym_Char) char_toUpper
56 char_toLower = dupI1 (Proxy @Sym_Char) char_toLower
57
58 instance
59 ( Read_TyNameR TyName cs rs
60 , Inj_TyConst cs Char
61 ) => Read_TyNameR TyName cs (Proxy Char ': rs) where
62 read_TyNameR _cs (TyName "Char") k = k (ty @Char)
63 read_TyNameR _rs raw k = read_TyNameR (Proxy @rs) raw k
64 instance Show_TyConst cs => Show_TyConst (Proxy Char ': cs) where
65 show_TyConst TyConstZ{} = "Char"
66 show_TyConst (TyConstS c) = show_TyConst c
67
68 instance -- Proj_TyConC
69 ( Proj_TyConst cs Char
70 , Proj_TyConsts cs (TyConsts_imported_by Char)
71 ) => Proj_TyConC cs (Proxy Char) where
72 proj_TyConC _ (TyConst q :$ TyConst c)
73 | Just Refl <- eq_skind (kind_of_TyConst c) SKiType
74 , Just Refl <- proj_TyConst c (Proxy @Char)
75 = case () of
76 _ | Just Refl <- proj_TyConst q (Proxy @Bounded) -> Just TyCon
77 | Just Refl <- proj_TyConst q (Proxy @Enum) -> Just TyCon
78 | Just Refl <- proj_TyConst q (Proxy @Eq) -> Just TyCon
79 | Just Refl <- proj_TyConst q (Proxy @Ord) -> Just TyCon
80 | Just Refl <- proj_TyConst q (Proxy @Show) -> Just TyCon
81 _ -> Nothing
82 proj_TyConC _c _q = Nothing
83 data instance TokenT meta (ts::[*]) (Proxy Char)
84 = Token_Term_Char Char
85 | Token_Term_Char_toUpper
86 | Token_Term_Char_toLower
87 deriving instance Eq_Token meta ts => Eq (TokenT meta ts (Proxy Char))
88 deriving instance Show_Token meta ts => Show (TokenT meta ts (Proxy Char))
89 instance -- CompileI
90 ( Inj_TyConst (TyConsts_of_Ifaces is) Char
91 , Inj_TyConst (TyConsts_of_Ifaces is) (->)
92 ) => CompileI is (Proxy Char) where
93 compileI tok _ctx k =
94 case tok of
95 Token_Term_Char c -> k (ty @Char) $ TermO $ \_c -> char c
96 Token_Term_Char_toUpper -> from_op char_toUpper
97 Token_Term_Char_toLower -> from_op char_toLower
98 where
99 from_op (op::forall term. Sym_Char term => term Char -> term Char) =
100 k (ty @Char ~> ty @Char) $ TermO $ \_c -> lam op
101 instance -- TokenizeT
102 Inj_Token meta ts Char =>
103 TokenizeT meta ts (Proxy Char) where
104 tokenizeT _t = mempty
105 { tokenizers_infix = tokenizeTMod [Mod_Name "Char"]
106 [ tokenize0 "toLower" infixN5 Token_Term_Char_toLower
107 , tokenize0 "toUpper" infixN5 Token_Term_Char_toUpper
108 ]
109 }
110 instance -- Gram_Term_AtomsT
111 ( Alt g
112 , Gram_Rule g
113 , Gram_Lexer g
114 , Gram_Meta meta g
115 , Inj_Token meta ts Char
116 ) => Gram_Term_AtomsT meta ts (Proxy Char) g where
117 term_atomsT _t =
118 [ rule "term_char" $
119 lexeme $ metaG $
120 (\c meta -> ProTok $ inj_EToken meta $ Token_Term_Char c)
121 <$> Gram.between tickG tickG (
122 Gram.cf_of_Terminal (Gram.any `Gram.but` tickG) Gram.<+>
123 '\'' <$ Gram.string "\\'"
124 )
125 ]
126 where
127 tickG :: Gram_Terminal g' => g' Char
128 tickG = Gram.char '\''