]> Git — Sourcephile - gargantext.git/blob - src/Gargantext/Text/Eleve.hs
[Ngrams] Entropy Variation OK in tests.
[gargantext.git] / src / Gargantext / Text / Eleve.hs
1 {-|
2 Module : Gargantext.Text.Eleve
3 Description : Unsupervized Word segmentation
4 Copyright : (c) CNRS, 2019-Present
5 License : AGPL + CECILL v3
6 Maintainer : team@gargantext.org
7 Stability : experimental
8 Portability : POSIX
9
10 # Implementation of Unsupervized Word Segmentation
11
12 References:
13
14 - Python implementation (Korantin August, Emmanuel Navarro):
15 [EleVe](https://github.com/kodexlab/eleve.git)
16
17 - Unsupervized Word Segmentation:the case for Mandarin Chinese Pierre
18 Magistry, Benoît Sagot, Alpage, INRIA & Univ. Paris 7, Proceedings of
19 the 50th Annual Meeting of the Association for Computational Linguistics
20 , pages 383–387. [PDF](https://www.aclweb.org/anthology/P12-2075)
21
22 Notes for current implementation:
23 - TODO fix normalization
24 - TODO extract longer ngrams (see paper above, viterbi algo can be used)
25 - TODO AD TEST: prop (Node c _e f) = c == Map.size f
26
27 - AD: Real ngrams extraction test
28 from Gargantext.Text.Terms import extractTermsUnsupervised
29 docs <- runCmdRepl $ selectDocs 1004
30 extractTermsUnsupervised 3 $ DT.intercalate " "
31 $ catMaybes
32 $ Gargantext.map _hyperdataDocument_abstract docs
33
34 -}
35 {-# LANGUAGE ConstraintKinds #-}
36 {-# LANGUAGE NoImplicitPrelude #-}
37 {-# LANGUAGE OverloadedStrings #-}
38 {-# LANGUAGE RankNTypes #-}
39 {-# LANGUAGE TemplateHaskell #-}
40 {-# LANGUAGE TypeFamilies #-}
41
42 module Gargantext.Text.Eleve where
43
44 import Debug.Trace (trace)
45 -- import Debug.SimpleReflect
46
47 import Data.Functor.Reverse
48 import Control.Lens (Lens', Getting, (^.), (^?), view, makeLenses, _Just, under, reversed, at, (.~), to, set)
49 import Control.Monad (forM_)
50 import Data.Ord (Ord)
51 import qualified Data.List as L
52 import Data.Monoid
53 import Data.Text (Text)
54 import qualified Data.Text as T
55 import Data.Map (Map)
56 import Data.Maybe (fromMaybe)
57 import qualified Data.Map as Map
58 import Gargantext.Prelude hiding (cs)
59 import qualified Gargantext.Prelude as GP
60 import qualified Data.Tree as Tree
61 import Data.Tree (Tree)
62 import qualified Prelude as P (putStrLn, logBase, isNaN, RealFloat)
63
64 type Entropy e =
65 ( Fractional e
66 , Floating e
67 , P.RealFloat e
68 , Show e
69 -- ^ TODO: only used for debugging
70 )
71 ------------------------------------------------------------------------
72 -- | Example and tests for development
73 data I e = I
74 { _info_entropy :: e
75 , _info_entropy_var :: e
76 , _info_autonomy :: e
77 }
78
79 instance Show e => Show (I e) where
80 show (I e v n) = show (e, v, n)
81
82 makeLenses ''I
83
84 type ModEntropy i o e = (e -> e) -> i -> o
85
86 set_autonomy :: ModEntropy e (I e) e
87 set_autonomy f e = I e e (f e)
88
89 set_entropy_var :: ModEntropy e (I e) e
90 set_entropy_var f e = I e (f e) e
91
92
93 data StartStop = Start | Stop
94 deriving (Ord, Eq, Show)
95
96 data Token = NonTerminal Text
97 | Terminal StartStop
98 deriving (Ord, Eq, Show)
99
100 rootTrie :: Token
101 rootTrie = NonTerminal ""
102
103 isTerminal :: Token -> Bool
104 isTerminal (Terminal _) = True
105 isTerminal (NonTerminal _) = False
106
107 parseToken :: Text -> Token
108 parseToken "<start>" = Terminal Start
109 parseToken "<stop>" = Terminal Stop
110 parseToken t = NonTerminal t
111
112 -- |
113 -- >>> reverseTokens [Terminal Start, NonTerminal "new", NonTerminal "york", Terminal Stop]
114 -- [Terminal Start,NonTerminal "york",NonTerminal "new",Terminal Stop]
115 reverseTokens :: [Token] -> [Token]
116 reverseTokens xs = case lastMay xs of
117 Nothing -> []
118 Just (Terminal Stop) -> reverseTokens' xs <> [Terminal Stop]
119 _ -> reverseTokens' xs
120
121 reverseTokens' :: [Token] -> [Token]
122 reverseTokens' [] = []
123 reverseTokens' [Terminal Stop] = []
124 reverseTokens' [x] = [x]
125 reverseTokens' (x:xs) = case x of
126 Terminal Start -> [Terminal Start] <> reverseTokens' xs
127 _ -> reverseTokens' xs <> [x]
128
129
130 toToken :: [Text] -> [Token]
131 toToken xs = Terminal Start : (NonTerminal <$> xs) <> [Terminal Stop]
132
133 printToken :: Token -> Text
134 printToken = f
135 where
136 f (NonTerminal x) = x
137 f (Terminal Start) = "<start>"
138 f (Terminal Stop) = "<stop>"
139 ------------------------------------------------------------------------
140
141 data Trie k e
142 = Node { _node_count :: Int
143 , _node_entropy :: e
144 , _node_children :: Map k (Trie k e)
145 }
146 | Leaf { _node_count :: Int }
147 deriving (Show)
148
149 makeLenses ''Trie
150
151 insertTries :: Ord k => [[k]] -> Trie k ()
152 insertTries = L.foldr insertTrie emptyTrie
153
154 insertTrie :: Ord k => [k] -> Trie k () -> Trie k ()
155 insertTrie [] n = n { _node_count = _node_count n +1}
156 insertTrie (x:xs) (Leaf c) = mkTrie (c+1) $ Map.singleton x $ insertTrie xs emptyTrie
157 insertTrie (x:xs) (Node c _e children) = mkTrie (c+1) $ Map.alter f x children
158 where
159 f = Just . insertTrie xs . fromMaybe emptyTrie
160
161 -- emptyTrie :: (Ord k, Monoid e) => Trie k e
162 -- emptyTrie = Node 0 mempty mempty
163 emptyTrie :: Trie k e
164 emptyTrie = Leaf 0
165
166 mkTrie :: Monoid e => Int -> Map k (Trie k e) -> Trie k e
167 mkTrie c children
168 | Map.null children = Leaf c
169 | otherwise = Node c mempty children
170
171 -----------------------------
172 -- | Trie to Tree since Tree as nice print function
173 toTree :: k -> Trie k e -> Tree (k,Int,Maybe e)
174 toTree k (Leaf c) = Tree.Node (k, c, Nothing) []
175 toTree k (Node c e cs) = Tree.Node (k, c, Just e) (map (uncurry toTree) $ Map.toList cs)
176
177 ------------------------------------------------------------------------
178 ------------------------------------------------------------------------
179 nan :: Floating e => e
180 nan = 0 / 0
181
182 noNaNs :: P.RealFloat e => [e] -> [e]
183 noNaNs = filter (not . P.isNaN)
184
185 updateIfDefined :: P.RealFloat e => e -> e -> e
186 updateIfDefined e0 e | P.isNaN e = e0
187 | otherwise = e
188
189 sim :: Entropy e => e -> e -> Bool
190 sim x y = x == y || (P.isNaN x && P.isNaN y)
191
192 subst :: Entropy e => (e, e) -> e -> e
193 subst (src, dst) x | sim src x = dst
194 | otherwise = x
195
196 entropyTrie :: Entropy e => (k -> Bool) -> Trie k () -> Trie k e
197 entropyTrie _ (Leaf c) = Leaf c
198 entropyTrie pred (Node c () children) = Node c e (map (entropyTrie pred) children)
199 where
200 e = sum $ map f $ Map.toList children
201 f (k, child) = if pred k then chc * P.logBase 2 (fromIntegral c)
202 else - chc * P.logBase 2 chc
203 where
204 chc = fromIntegral (_node_count child) / fromIntegral c
205 ------------------------------------------------------------------------
206 normalizeLevel :: Entropy e => [e] -> e -> e
207 normalizeLevel = checkDiff (go . noNaNs)
208
209 where
210 -- checkDiff f es e = let e' = f es e in if e == e' then e' else trace ("normalizeLevel: diff " <> show e <> " " <> show e') e'
211 checkDiff = identity
212 -- go [] = panic "normalizeLevel: impossible"
213 -- trace "normalizeLevel"
214 -- go [_] = identity
215 go es = \e -> (e - m) / v
216 {-
217 in if P.isNaN e'
218 then trace ("normalizeLevel " <> show (e,m,v,es))
219 e
220 else e'
221 -}
222 where
223 m = mean es
224 v = deviation es
225
226 {- Unused
227
228 nodeChildren :: Trie k e -> Map k (Trie k e)
229 nodeChildren (Node _ _ cs) = cs
230 nodeChildren (Leaf _) = Map.empty
231
232 -}
233
234
235 class IsTrie trie where
236 buildTrie :: Entropy e => (Int -> [[Text]] -> [[Token]]) -> Int -> [[Text]] -> trie Token e
237 nodeEntropy :: Entropy e => Getting e i e -> trie k i -> e
238 nodeChild :: Ord k => k -> trie k e -> trie k e
239 findTrie :: Ord k => [k] -> trie k e -> trie k e
240 normalizeEntropy :: Entropy e
241 => Getting e i e -> ModEntropy i o e
242 -> trie k i -> trie k o
243
244 -- UNUSED
245 --nodeAutonomy :: (Ord k, Entropy e) => Getting e i e -> trie k i -> [k] -> e
246 --nodeAutonomy inE t ks = nodeEntropy inE $ findTrie ks t
247
248 instance IsTrie Trie where
249 buildTrie to n ts = entropyTrie isTerminal $ insertTries $ to n ts
250
251 nodeEntropy inE (Node _ e _) = e ^. inE
252 nodeEntropy _ (Leaf _) = nan
253
254 nodeChild k (Node _ _ cs) = fromMaybe emptyTrie (Map.lookup k cs)
255 nodeChild _ (Leaf _) = emptyTrie
256
257 findTrie ks t = L.foldl (flip nodeChild) t ks
258
259 normalizeEntropy inE modE t = trace (show level) $ go (modE identity) level t
260 where
261 level = (entropyLevels inE t)
262 go _ [] _ = panic "normalizeEntropy' empty levels"
263 go _ _ (Leaf c) = Leaf c
264 -- go _ ([] : _) _ = panic "normalizeEntropy': empty level"
265 go f (es : ess) (Node c i children)
266 -- | any (sim (i ^. inE)) es
267 = Node c (f i) $ go (modE $ normalizeLevel es) ess <$> children
268 -- | otherwise
269 -- = panic "NOT an elem"
270
271
272 {-
273 This is only normalizing a node with respect to its brothers (unlike all the
274 nodes of the same level).
275
276 normalizeEntropy inE modE = go $ modE identity
277 where
278 go _ (Leaf c) = Leaf c
279 go f (Node c i children)
280 | Map.null children =
281 panic "normalizeEntropy: impossible"
282 | otherwise =
283 Node c (f i) $ go (modE $ normalizeLevel es) <$> children
284 where
285 es = [ i' ^. inE | Node _ i' _ <- Map.elems children ]
286 -}
287 ------------------------------------------------------------------------
288
289 levels :: Trie k e -> [[Trie k e]]
290 levels = L.takeWhile (not . L.null) . L.iterate (L.concatMap subForest) . pure
291 where
292 subForest :: Trie k e -> [Trie k e]
293 subForest (Leaf _) = []
294 subForest (Node _ _ children) = Map.elems children
295
296 entropyLevels :: Entropy e => Getting e i e -> Trie k i -> [[e]]
297 entropyLevels inE = fmap (noNaNs . map (nodeEntropy inE)) . levels
298
299 ------------------------------------------------------------------------
300
301 data Tries k e = Tries
302 { _fwd :: Trie k e
303 , _bwd :: Trie k e
304 }
305
306 makeLenses ''Tries
307
308
309
310 instance IsTrie Tries where
311 buildTrie to n tts = Tries { _fwd = buildTrie to n tts
312 , _bwd = buildTrie to n (map reverse $ tts)
313 }
314
315 nodeEntropy inE (Tries fwd bwd) =
316 -- VETODO reverse the query for bwd here
317 -- mean $ noNaNs [nodeEntropy inE fwd, nodeEntropy inE bwd . under reversed]
318 mean $ noNaNs [nodeEntropy inE fwd, nodeEntropy inE bwd]
319
320 findTrie ks (Tries fwd bwd) = Tries (findTrie ks fwd) (findTrie ks bwd)
321 -- ^^
322 -- TODO: here this is tempting to reverse but this is not always what we
323 -- want. See also nodeAutonomy.
324 -- AD: I also tried to reverse here and I confirm getting unexpected results (whereas VETODO FIX below is ok)
325 -- since recursivity of the function makes the reverse multiple times (I guess)
326
327 nodeChild k (Tries fwd bwd) = Tries (nodeChild k fwd) (nodeChild k bwd)
328
329 normalizeEntropy inE modE = onTries (normalizeEntropy inE modE)
330
331 onTries :: (Trie k i -> Trie k o) -> Tries k i -> Tries k o
332 onTries f (Tries fwd bwd) = Tries (f fwd) (f bwd)
333
334 ------------------------------------------------------------------------
335 split :: (IsTrie trie, Entropy e) => Lens' i e -> trie Token i -> [Token] -> [[Token]]
336 split _ _ [] = []
337 split inE t0 (Terminal Start:xs0) = split inE (nodeChild (Terminal Start) t0) xs0
338 split inE t0 (x0:xs0) = go (nodeChild x0 t0) [x0] xs0
339 where
340 consRev [] xss = xss
341 consRev xs xss = reverse xs : xss
342
343 go _ pref [] = [reverse pref]
344 go _ pref (Terminal Stop:_) = [reverse pref]
345 go t pref (Terminal Start:xs) = go t pref xs
346 go t pref (x:xs) =
347 -- trace (show (if acc then "ACC" else "CUT", (reverse (x : pref), ext), if acc then ">" else "<=", ((reverse pref, et), "+", ([x], ext0)))) $
348 if acc
349 then go xt (x:pref) xs
350 else consRev pref $ go xt0 [x] xs
351 where
352 xt = nodeChild x t
353 xt0 = nodeChild x t0
354 et = ne 0 t
355 -- ^ entropy of the current prefix
356 ext0 = ne 0 xt0
357 -- ^ entropy of [x]
358 ext = ne 0 xt
359 -- ^ entropy of the current prefix plus x
360 acc = ext > et + ext0
361 -- aut(["in","this","paper"]) > aut(["in","this"]) + aut(["paper"])
362
363 ne d t = if P.isNaN e then d else e
364 where e = nodeEntropy inE t
365
366 {-
367 split :: Entropy e => Lens' i e -> Tries Token i -> [Token] -> [[Token]]
368 split inE t0 ts =
369 maximumWith (sum . map $ nodeAutonomy inE t0) (all the splits of ts)
370 -}
371
372 ------------------------------------------------------------------------
373 ------------------------------------------------------------------------
374
375 mainEleve :: Int -> [[Text]] -> [[[Text]]]
376 mainEleve _ _ = []
377 {-
378 mainEleve n input = map (map printToken) . split identity (t :: Trie Token Double) <$> inp
379 where
380 inp = toToken <$> input
381 t = buildTrie $ L.concat $ chunkAlong n 1 <$> inp
382 -}
383
384 chunkAlongEleve :: Int -> [a] -> [[a]]
385 chunkAlongEleve n xs = L.take n <$> L.tails xs
386
387 toToken' :: Int -> [[Text]] -> [[Token]]
388 toToken' n input = L.concat $ (filter (/= [Terminal Stop]) . chunkAlongEleve (n + 2)) <$> toToken <$> input
389
390 ---------------------------------------------
391 set_entropy_vars :: Entropy e => Getting e i e -> (e -> i -> o) -> Tries Token i -> Trie Token o
392 set_entropy_vars inE modE tries@(Tries fwd _bwd) =
393 mapTree (\k -> modE $ entropy_var'' inE tries k) [rootTrie] fwd
394
395 mapTree :: ([Token] -> t -> e) -> [Token] -> Trie Token t -> Trie Token e
396 mapTree f k t = go f k t
397 where
398 go _ _ (Leaf c) = Leaf c
399 go f k (Node c i children) = Node c (f k i) (Map.mapWithKey (\k'-> go f (filter (/= rootTrie) $ k <> [k'])) children)
400
401 entropy_var'' :: Entropy e => Getting e i e -> Tries Token i -> [Token] -> e
402 entropy_var'' inE tries ng = mean $ noNaNs [fwd, bwd]
403 where
404 fwd = (nodeEntropy inE (_fwd $ findTrie ng tries))
405 bwd = (nodeEntropy inE (_bwd $ findTrie (reverseTokens ng) tries))
406
407 ---------------------------------------------
408 -- | TODO remove function below after following bug fixed
409 -- | TODO entropy_var' /= entropy_var on "<start> token.."
410 entropy_var' :: Entropy e => Tries Token (I e) -> [Token] -> e
411 entropy_var' tries ng = (mean $ noNaNs [ (nodeEntropy info_entropy (_fwd $ findTrie ng tries))
412 , (nodeEntropy info_entropy (_bwd $ findTrie (reverseTokens ng) tries))
413 ]
414 )
415
416 entropy_var :: Entropy e => [Text] -> Tries Token (I e) -> e
417 entropy_var ng trie = (mean [ (nodeEntropy info_entropy (_fwd $ findTrie ntf trie))
418 , (nodeEntropy info_entropy (_bwd $ findTrie ntb trie))
419 ]
420 )
421 where
422 ntf = parseToken <$> ng
423 ntb = parseToken <$> reverse ng
424
425 ---------------------------------------------
426
427 testEleve :: e ~ Double => Bool -> Int -> [Text] -> [(Text, Int, e, e, e, e, e)] -> IO Bool
428 testEleve debug n output checks = do
429 let
430 {-
431 pss = [ (ps, findTrie ps fwd ^? _Just . node_entropy) -- . info_entropy)
432 | ps <- L.nub $ [ c
433 | m <- [1..n]
434 , cs <- chunkAlong m 1 <$> inp
435 , c <- cs
436 ]
437 ]
438 -}
439 --res = map (map printToken) . split identity fwd <$> inp
440 --res = map (map printToken) . split info_norm_entropy' nt' <$> inp
441 res = map (map printToken) . split info_autonomy nt <$> inp
442 when debug $ do
443 P.putStrLn (show input)
444 -- forM_ pss (P.putStrLn . show)
445 P.putStrLn ""
446 P.putStrLn "Levels:"
447 forM_ (entropyLevels identity (_fwd t)) $ \level ->
448 P.putStrLn $ " " <> show level
449 P.putStrLn ""
450 P.putStrLn "Forward:"
451 printTrie (_fwd nt)
452 P.putStrLn ""
453 P.putStrLn "Backward:"
454 printTrie (_bwd nt)
455 P.putStrLn ""
456 P.putStrLn "Splitting:"
457 P.putStrLn $ show res
458 forM_ checks checker
459 pure $ expected == res
460
461 where
462 out = T.words <$> output
463 expected = fmap (T.splitOn "-") <$> out
464 input = (T.splitOn "-" =<<) <$> out
465 inp = toToken <$> input
466
467 t :: Tries Token Double
468 t = buildTrie toToken' n input
469 & bwd . node_children . at (Terminal Start) . _Just . node_entropy .~ nan
470 -- NP: this is a hack to set the bwd entropy of Start at NaN.
471
472 t'' :: Trie Token Double
473 t'' = set_entropy_vars identity (\e _i -> e) t
474
475 -- keeping nt for fwd and bwd checks
476 -- it has no sense to calculate entropy_var on fwd and bwd each
477 nt :: Tries Token (I Double)
478 nt = normalizeEntropy identity set_autonomy t
479
480 nt' :: Trie Token (I Double)
481 nt' = normalizeEntropy identity set_autonomy t''
482
483 -- nt = normalizeEntropy identity set_autonomy (fwd :: Trie Token Double)
484 -- nt = normalizeEntropy' info_entropy (\f -> info_norm_entropy' %~ f) nt
485
486 check f msg ref my =
487 if f ref my
488 then P.putStrLn $ " PASS " <> msg <> " " <> show ref
489 else P.putStrLn $ " FAIL " <> msg <> " ref=" <> show ref <> " my=" <> show my
490
491 checker (ngram, count, entropy, _ev, autonomy, bwd_entropy, fwd_entropy) = do
492 let ns = parseToken <$> T.words ngram
493 nsb = parseToken <$> (reverse $ T.words ngram)
494 t' = findTrie ns nt
495 tvar = findTrie ns t''
496
497 P.putStrLn $ " " <> T.unpack ngram <> ":"
498 check (==) "count" count (_node_count tvar)
499 check sim "entropy_var" entropy (nodeEntropy identity tvar)
500 --check sim ("entropy_varOK") entropy (entropy_var (T.words ngram) nt)
501 --check sim "entropy" entropy (entropy_var' nt (parseToken <$> T.words ngram))
502 {- ^ FIXME 2 fun above should have same results (error in reverseToken):
503 <start> New York:
504 PASS count 1
505 FAIL entropy ref=NaN my=0.0
506 -}
507
508 check sim "autonomy" autonomy (nodeEntropy info_autonomy nt')
509 check sim "fwd_entropy" fwd_entropy (nodeEntropy info_entropy (_fwd t'))
510 check sim "bwd_entropy" bwd_entropy (nodeEntropy info_entropy (_bwd t'))
511
512 printTrie =
513 P.putStrLn . Tree.drawTree
514 . fmap show
515 . toTree rootTrie
516
517 -- | TODO real data is a list of tokenized sentences
518 example0, example1, example2, example3, example4, example5, example6 :: [Text]
519 example0 = ["New-York is New-York and New-York"]
520 example1 = ["to-be or not to-be"]
521 example2 = ["to-be-or not to-be-or NOT to-be and"]
522 example3 = example0 <> example0
523 -- > TEST: Should not have York New in the trie
524 example4 = ["a-b-c-d e a-b-c-d f"]
525 example5 = ["a-b-c-d-e f a-b-c-d-e g a-b-c-d-e"]
526 example6 = ["le-petit chat"
527 ,"le-petit chien"
528 ,"le-petit rat"
529 ,"le gros rat"
530 ]
531
532 checks0, checks2 :: [(Text, Int, Double, Double, Double, Double, Double)]
533
534 checks0 =
535 [("<start>", 1, nan, nan, nan, nan, 0.0)
536 ,("New", 3, 0.792481250360578, -1.3208020839342969, 0.7499999999999999, 1.584962500721156, 0.0)
537 ,("York", 3, 0.792481250360578, -1.3208020839342969, 0.7499999999999999, 0.0, 1.584962500721156)
538 ,("is", 1, 0.0, -2.113283334294875, -0.5000000000000002, 0.0, 0.0)
539 ,("and", 1, 0.0, -2.113283334294875, -0.5000000000000002, 0.0, 0.0)
540 --,("<stop>", 0, nan, nan, nan, 0.0, nan) Since it is not in the trie it no,
541 -- need to count it.
542
543 {-
544 ,("<start> New", 1, nan, nan, nan, nan, 0.0)
545 ,("New York", 3, 1.584962500721156, 1.584962500721156, 1.4142135623730951, nan, 1.584962500721156)
546 ,("York is", 1, 0.0, nan, nan, nan, 0.0)
547 ,("is New", 1, 0.0, nan, nan, nan, 0.0)
548 ,("York and", 1, 0.0, nan, nan, nan, 0.0)
549 ,("and New", 1, 0.0, nan, nan, nan, 0.0)
550 ,("York <stop>", 1, nan, nan, nan, nan, nan)
551
552 ,("<start> New York", 1, nan, nan, nan, nan, 0.0)
553 ,("New York is", 1, 0.0, nan, nan, nan, 0.0)
554 ,("York is New", 1, 0.0, nan, nan, nan, 0.0)
555 ,("is New York", 1, 0.0, nan, nan, nan, 0.0)
556 ,("New York and", 1, 0.0, nan, nan, nan, 0.0)
557 ,("York and New", 1, 0.0, nan, nan, nan, 0.0)
558 ,("and New York", 1, 0.0, nan, nan, nan, 0.0)
559 ,("New York <stop>", 1, nan, nan, nan, nan, nan)
560 --}
561 ]
562
563
564
565 checks2 =
566 [("to be", 3, 1.2516291673878228, 1.2516291673878228, 1.5535694744293167, nan, 0.9182958340544896)
567 ,("be or", 2, 0.5, nan, nan, nan, 1.0)
568 ,("or not", 1, 0.0, nan, nan, nan, 0.0)
569 ,("not to", 1, 0.0, nan, nan, nan, 0.0)
570 ,("or NOT", 1, 0.0, nan, nan, nan, 0.0)
571 ,("NOT to", 1, 0.0, nan, nan, nan, 0.0)
572 ,("be and", 1, 0.0, nan, nan, nan, 0.0)
573 ]
574
575
576 runTests :: IO ()
577 runTests =
578 forM_
579 [("example0", 2, example0, checks0)
580 ,("example1", 2, example1, [])
581 ,("example2", 3, example2, checks2)
582 ,("example3", 2, example3, [])
583 ,("example4", 4, example4, [])
584 ,("example5", 5, example5, [])
585 ]
586 (\(name, n, ex, checks) -> do
587 P.putStrLn $ name <> " " <> show n
588 b <- testEleve False n ex checks
589 P.putStrLn $ " splitting: " <> if b then "PASS" else "FAIL"
590 )