-- TODO-EVENTS: InsertedNodes
-}
-{-# OPTIONS_GHC -fno-warn-orphans #-}
+{-# OPTIONS_GHC -fno-warn-orphans #-}
-{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ConstrainedClassMethods #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE InstanceSigs #-}
+{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
module Gargantext.Database.Action.Flow -- (flowDatabase, ngrams2list)
- ( FlowCmdM
+ ( DataText(..)
, getDataText
, flowDataText
, flow
, flowCorpusFile
, flowCorpus
, flowAnnuaire
+ , insertMasterDocs
+ , saveDocNgramsWith
, getOrMkRoot
, getOrMk_RootWithCorpus
, allDataOrigins
, do_api
+ , indexAllDocumentsWithPosTag
)
where
import Control.Lens ((^.), view, _Just, makeLenses)
import Data.Aeson.TH (deriveJSON)
import Data.Either
+import Data.HashMap.Strict (HashMap)
+import Data.Hashable (Hashable)
import Data.List (concat)
-import qualified Data.Map as Map
import Data.Map (Map, lookup)
-import Data.Maybe (Maybe(..), catMaybes)
+import Data.Maybe (catMaybes)
import Data.Monoid
import Data.Swagger
-import Data.Text (splitOn, intercalate)
+import qualified Data.Text as T
import Data.Traversable (traverse)
import Data.Tuple.Extra (first, second)
import GHC.Generics (Generic)
import System.FilePath (FilePath)
+import qualified Data.HashMap.Strict as HashMap
+import qualified Gargantext.Data.HashMap.Strict.Utils as HashMap
+import qualified Data.Map as Map
-import Gargantext.Core (Lang(..))
+import Gargantext.API.Admin.Orchestrator.Types (JobLog(..))
+import Gargantext.Core (Lang(..), PosTagAlgo(..))
+import Gargantext.Core.Ext.IMT (toSchoolName)
+import Gargantext.Core.Ext.IMTUser (readFile_Annuaire)
import Gargantext.Core.Flow.Types
-import Gargantext.Core.Types (Terms(..))
+import Gargantext.Core.Text
+import Gargantext.Core.Text.List.Group.WithStem ({-StopSize(..),-} GroupParams(..))
+import Gargantext.Core.Text.Corpus.Parsers (parseFile, FileFormat)
+import Gargantext.Core.Text.List (buildNgramsLists)
+import Gargantext.Core.Text.List.Social (FlowSocialListWith)
+import Gargantext.Core.Text.Terms
+import Gargantext.Core.Text.Terms.Mono.Stem.En (stemIt)
+import Gargantext.Core.Types (POS(NP))
import Gargantext.Core.Types.Individu (User(..))
import Gargantext.Core.Types.Main
+import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Gargantext.Database.Action.Flow.List
import Gargantext.Database.Action.Flow.Types
-import Gargantext.Database.Action.Flow.Utils (insertDocNgrams)
-import Gargantext.Database.Query.Table.Node
-import Gargantext.Database.Query.Table.Node.Document.Insert -- (insertDocuments, ReturnId(..), addUniqIdsDoc, addUniqIdsContact, ToDbData(..))
-import Gargantext.Database.Query.Tree.Root (getOrMkRoot, getOrMk_RootWithCorpus)
+import Gargantext.Database.Action.Flow.Utils (insertDocNgrams, DocumentIdWithNgrams(..))
import Gargantext.Database.Action.Search (searchDocInDatabase)
import Gargantext.Database.Admin.Config (userMaster, corpusMasterName)
-import Gargantext.Database.Query.Table.Node.Error (HasNodeError(..))
import Gargantext.Database.Admin.Types.Hyperdata
import Gargantext.Database.Admin.Types.Node -- (HyperdataDocument(..), NodeType(..), NodeId, UserId, ListId, CorpusId, RootId, MasterCorpusId, MasterUserId)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Ngrams
+import Gargantext.Database.Query.Table.Node
+import Gargantext.Database.Query.Table.Node.Document.Insert -- (insertDocuments, ReturnId(..), addUniqIdsDoc, addUniqIdsContact, ToDbData(..))
+import Gargantext.Database.Query.Table.Node.Error (HasNodeError(..))
import Gargantext.Database.Query.Table.NodeNgrams (listInsertDb , getCgramsId)
import Gargantext.Database.Query.Table.NodeNodeNgrams2
-import Gargantext.Database.Schema.Node (NodePoly(..))
-import Gargantext.Core.Ext.IMT (toSchoolName)
-import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
-import Gargantext.Core.Ext.IMTUser (deserialiseImtUsersFromFile)
-import Gargantext.Core.Text
+import Gargantext.Database.Query.Tree.Root (getOrMkRoot, getOrMk_RootWithCorpus)
+import Gargantext.Database.Schema.Node (NodePoly(..), node_id)
+import Gargantext.Database.Types
import Gargantext.Prelude
import Gargantext.Prelude.Crypto.Hash (Hash)
-import Gargantext.Core.Text.Corpus.Parsers (parseFile, FileFormat)
-import Gargantext.Core.Text.List (buildNgramsLists,StopSize(..))
-import Gargantext.Core.Text.Terms.Mono.Stem.En (stemIt)
-import Gargantext.Core.Text.Terms
-import qualified Gargantext.Database.Query.Table.Node.Document.Add as Doc (add)
import qualified Gargantext.Core.Text.Corpus.API as API
+import qualified Gargantext.Database.Query.Table.Node.Document.Add as Doc (add)
+------------------------------------------------------------------------
+-- Imports for upgrade function
+import Gargantext.Database.Query.Tree.Root (getRootId)
+import Gargantext.Database.Query.Tree (findNodesId)
+import qualified Data.List as List
------------------------------------------------------------------------
-- TODO use internal with API name (could be old data)
data DataOrigin = InternalOrigin { _do_api :: API.ExternalAPIs }
getDataText (ExternalOrigin api) la q li = liftBase $ DataNew
<$> splitEvery 500
<$> API.get api (_tt_lang la) q li
+
getDataText (InternalOrigin _) _la q _li = do
(_masterUserId, _masterRootId, cId) <- getOrMk_RootWithCorpus
(UserName userMaster)
pure $ DataOld ids
-------------------------------------------------------------------------------
-flowDataText :: FlowCmdM env err m
- => User
- -> DataText
- -> TermType Lang
- -> CorpusId
- -> m CorpusId
-flowDataText u (DataOld ids) tt cid = flowCorpusUser (_tt_lang tt) u (Right [cid]) corpusType ids
+flowDataText :: ( FlowCmdM env err m
+ )
+ => User
+ -> DataText
+ -> TermType Lang
+ -> CorpusId
+ -> Maybe FlowSocialListWith
+ -> (JobLog -> m ())
+ -> m CorpusId
+flowDataText u (DataOld ids) tt cid mfslw _ = flowCorpusUser (_tt_lang tt) u (Right [cid]) corpusType ids mfslw
where
corpusType = (Nothing :: Maybe HyperdataCorpus)
-flowDataText u (DataNew txt) tt cid = flowCorpus u (Right [cid]) tt txt
+flowDataText u (DataNew txt) tt cid mfslw logStatus = flowCorpus u (Right [cid]) tt mfslw txt logStatus
------------------------------------------------------------------------
-- TODO use proxy
-flowAnnuaire :: FlowCmdM env err m
+flowAnnuaire :: (FlowCmdM env err m)
=> User
-> Either CorpusName [CorpusId]
-> (TermType Lang)
-> FilePath
+ -> (JobLog -> m ())
-> m AnnuaireId
-flowAnnuaire u n l filePath = do
- docs <- liftBase $ (( splitEvery 500 <$> deserialiseImtUsersFromFile filePath) :: IO [[HyperdataContact]])
- flow (Nothing :: Maybe HyperdataAnnuaire) u n l docs
+flowAnnuaire u n l filePath logStatus = do
+ docs <- liftBase $ (( splitEvery 500 <$> readFile_Annuaire filePath) :: IO [[HyperdataContact]])
+ flow (Nothing :: Maybe HyperdataAnnuaire) u n l Nothing docs logStatus
------------------------------------------------------------------------
-flowCorpusFile :: FlowCmdM env err m
+flowCorpusFile :: (FlowCmdM env err m)
=> User
-> Either CorpusName [CorpusId]
-> Limit -- Limit the number of docs (for dev purpose)
-> TermType Lang -> FileFormat -> FilePath
+ -> Maybe FlowSocialListWith
+ -> (JobLog -> m ())
-> m CorpusId
-flowCorpusFile u n l la ff fp = do
- docs <- liftBase ( splitEvery 500
- <$> take l
- <$> parseFile ff fp
- )
- flowCorpus u n la (map (map toHyperdataDocument) docs)
+flowCorpusFile u n l la ff fp mfslw logStatus = do
+ eParsed <- liftBase $ parseFile ff fp
+ case eParsed of
+ Right parsed -> do
+ let docs = splitEvery 500 $ take l parsed
+ flowCorpus u n la mfslw (map (map toHyperdataDocument) docs) logStatus
+ Left e -> panic $ "Error: " <> (T.pack e)
------------------------------------------------------------------------
-- | TODO improve the needed type to create/update a corpus
=> User
-> Either CorpusName [CorpusId]
-> TermType Lang
+ -> Maybe FlowSocialListWith
-> [[a]]
+ -> (JobLog -> m ())
-> m CorpusId
flowCorpus = flow (Nothing :: Maybe HyperdataCorpus)
-flow :: (FlowCmdM env err m, FlowCorpus a, MkCorpus c)
- => Maybe c
- -> User
- -> Either CorpusName [CorpusId]
- -> TermType Lang
- -> [[a]]
- -> m CorpusId
-flow c u cn la docs = do
+flow :: ( FlowCmdM env err m
+ , FlowCorpus a
+ , MkCorpus c
+ )
+ => Maybe c
+ -> User
+ -> Either CorpusName [CorpusId]
+ -> TermType Lang
+ -> Maybe FlowSocialListWith
+ -> [[a]]
+ -> (JobLog -> m ())
+ -> m CorpusId
+flow c u cn la mfslw docs logStatus = do
-- TODO if public insertMasterDocs else insertUserDocs
- ids <- traverse (insertMasterDocs c la) docs
- flowCorpusUser (la ^. tt_lang) u cn c (concat ids)
+ ids <- traverse (\(idx, doc) -> do
+ id <- insertMasterDocs c la doc
+ logStatus JobLog { _scst_succeeded = Just $ 1 + idx
+ , _scst_failed = Just 0
+ , _scst_remaining = Just $ length docs - idx
+ , _scst_events = Just []
+ }
+ pure id
+ ) (zip [1..] docs)
+ flowCorpusUser (la ^. tt_lang) u cn c (concat ids) mfslw
------------------------------------------------------------------------
-flowCorpusUser :: (FlowCmdM env err m, MkCorpus c)
+flowCorpusUser :: ( FlowCmdM env err m
+ , MkCorpus c
+ )
=> Lang
-> User
-> Either CorpusName [CorpusId]
-> Maybe c
-> [NodeId]
+ -> Maybe FlowSocialListWith
-> m CorpusId
-flowCorpusUser l user corpusName ctype ids = do
+flowCorpusUser l user corpusName ctype ids mfslw = do
-- User Flow
(userId, _rootId, userCorpusId) <- getOrMk_RootWithCorpus user corpusName ctype
+ -- NodeTexts is first
+ _tId <- insertDefaultNode NodeTexts userCorpusId userId
+ -- printDebug "NodeTexts: " tId
+
+ -- NodeList is second
listId <- getOrMkList userCorpusId userId
- _cooc <- insertDefaultNode NodeListCooc listId userId
+ -- _cooc <- insertDefaultNode NodeListCooc listId userId
-- TODO: check if present already, ignore
_ <- Doc.add userCorpusId ids
- _tId <- insertDefaultNode NodeTexts userCorpusId userId
-- printDebug "Node Text Ids:" tId
-- User List Flow
- (masterUserId, _masterRootId, masterCorpusId) <- getOrMk_RootWithCorpus (UserName userMaster) (Left "") ctype
- ngs <- buildNgramsLists l 2 3 (StopSize 3) userCorpusId masterCorpusId
+ (masterUserId, _masterRootId, masterCorpusId)
+ <- getOrMk_RootWithCorpus (UserName userMaster) (Left "") ctype
+
+ --let gp = (GroupParams l 2 3 (StopSize 3))
+ let gp = GroupWithPosTag l CoreNLP HashMap.empty
+ ngs <- buildNgramsLists user userCorpusId masterCorpusId mfslw gp
+
_userListId <- flowList_DbRepo listId ngs
_mastListId <- getOrMkList masterCorpusId masterUserId
-- _ <- insertOccsUpdates userCorpusId mastListId
-- _ <- mkAnnuaire rootUserId userId
pure userCorpusId
--- TODO Type NodeDocumentUnicised
-insertDocs :: ( FlowCmdM env err m
- -- , FlowCorpus a
- , FlowInsertDB a
- )
- => UserId
- -> CorpusId
- -> [a]
- -> m ([DocId], [DocumentWithId a])
-insertDocs uId cId hs = do
- let docs = map addUniqId hs
- newIds <- insertDb uId cId docs
- -- printDebug "newIds" newIds
- let
- newIds' = map reId newIds
- documentsWithId = mergeData (toInserted newIds) (Map.fromList $ map viewUniqId' docs)
- _ <- Doc.add cId newIds'
- pure (newIds', documentsWithId)
-
insertMasterDocs :: ( FlowCmdM env err m
, FlowCorpus a
-- this will enable global database monitoring
-- maps :: IO Map Ngrams (Map NgramsType (Map NodeId Int))
- mapNgramsDocs <- mapNodeIdNgrams
- <$> documentIdWithNgrams (extractNgramsT $ withLang lang documentsWithId) documentsWithId
+ mapNgramsDocs' :: HashMap ExtractedNgrams (Map NgramsType (Map NodeId Int))
+ <- mapNodeIdNgrams
+ <$> documentIdWithNgrams
+ (extractNgramsT $ withLang lang documentsWithId)
+ documentsWithId
+
+ lId <- getOrMkList masterCorpusId masterUserId
+ _ <- saveDocNgramsWith lId mapNgramsDocs'
+
+ -- _cooc <- insertDefaultNode NodeListCooc lId masterUserId
+ pure ids'
+
+saveDocNgramsWith :: ( FlowCmdM env err m)
+ => ListId
+ -> HashMap ExtractedNgrams (Map NgramsType (Map NodeId Int))
+ -> m ()
+saveDocNgramsWith lId mapNgramsDocs' = do
+ terms2id <- insertExtractedNgrams $ HashMap.keys mapNgramsDocs'
+ let mapNgramsDocs = HashMap.mapKeys extracted2ngrams mapNgramsDocs'
- terms2id <- insertNgrams $ Map.keys mapNgramsDocs
-- to be removed
- let indexedNgrams = Map.mapKeys (indexNgrams terms2id) mapNgramsDocs
+ let indexedNgrams = HashMap.mapKeys (indexNgrams terms2id) mapNgramsDocs
-- new
- lId <- getOrMkList masterCorpusId masterUserId
mapCgramsId <- listInsertDb lId toNodeNgramsW'
- $ map (first _ngramsTerms . second Map.keys)
- $ Map.toList mapNgramsDocs
+ $ map (first _ngramsTerms . second Map.keys)
+ $ HashMap.toList mapNgramsDocs
+
-- insertDocNgrams
_return <- insertNodeNodeNgrams2
$ catMaybes [ NodeNodeNgrams2 <$> Just nId
<*> getCgramsId mapCgramsId ngrams_type (_ngramsTerms terms'')
<*> Just (fromIntegral w :: Double)
- | (terms'', mapNgramsTypes) <- Map.toList mapNgramsDocs
+ | (terms'', mapNgramsTypes) <- HashMap.toList mapNgramsDocs
, (ngrams_type, mapNodeIdWeight) <- Map.toList mapNgramsTypes
- , (nId, w) <- Map.toList mapNodeIdWeight
+ , (nId, w) <- Map.toList mapNodeIdWeight
]
-
- _cooc <- insertDefaultNode NodeListCooc lId masterUserId
-- to be removed
_ <- insertDocNgrams lId indexedNgrams
- pure ids'
+
+ pure ()
+
------------------------------------------------------------------------
+-- TODO Type NodeDocumentUnicised
+insertDocs :: ( FlowCmdM env err m
+ -- , FlowCorpus a
+ , FlowInsertDB a
+ )
+ => UserId
+ -> CorpusId
+ -> [a]
+ -> m ([DocId], [Indexed NodeId a])
+insertDocs uId cId hs = do
+ let docs = map addUniqId hs
+ newIds <- insertDb uId cId docs
+ -- printDebug "newIds" newIds
+ let
+ newIds' = map reId newIds
+ documentsWithId = mergeData (toInserted newIds) (Map.fromList $ map viewUniqId' docs)
+ _ <- Doc.add cId newIds'
+ pure (newIds', documentsWithId)
+
+
------------------------------------------------------------------------
viewUniqId' :: UniqId a
=> a
mergeData :: Map Hash ReturnId
-> Map Hash a
- -> [DocumentWithId a]
+ -> [Indexed NodeId a]
mergeData rs = catMaybes . map toDocumentWithId . Map.toList
where
toDocumentWithId (sha,hpd) =
- DocumentWithId <$> fmap reId (lookup sha rs)
- <*> Just hpd
+ Indexed <$> fmap reId (lookup sha rs)
+ <*> Just hpd
------------------------------------------------------------------------
-instance HasText HyperdataContact
- where
- hasText = undefined
------------------------------------------------------------------------
------------------------------------------------------------------------
documentIdWithNgrams :: HasNodeError err
=> (a
- -> Cmd err (Map Ngrams (Map NgramsType Int)))
- -> [DocumentWithId a]
- -> Cmd err [DocumentIdWithNgrams a]
+ -> Cmd err (HashMap b (Map NgramsType Int)))
+ -> [Indexed NodeId a]
+ -> Cmd err [DocumentIdWithNgrams a b]
documentIdWithNgrams f = traverse toDocumentIdWithNgrams
where
toDocumentIdWithNgrams d = do
- e <- f $ documentData d
+ e <- f $ _unIndex d
pure $ DocumentIdWithNgrams d e
+
+-- | TODO check optimization
+mapNodeIdNgrams :: (Ord b, Hashable b)
+ => [DocumentIdWithNgrams a b]
+ -> HashMap b
+ (Map NgramsType
+ (Map NodeId Int)
+ )
+mapNodeIdNgrams = HashMap.unionsWith (Map.unionWith (Map.unionWith (+))) . fmap f
+ where
+ f :: DocumentIdWithNgrams a b
+ -> HashMap b (Map NgramsType (Map NodeId Int))
+ f d = fmap (fmap (Map.singleton nId)) $ documentNgrams d
+ where
+ nId = _index $ documentWithId d
+
+
------------------------------------------------------------------------
instance ExtractNgramsT HyperdataContact
where
- extractNgramsT l hc = filterNgramsT 255 <$> extract l hc
+ extractNgramsT l hc = HashMap.mapKeys (cleanExtractedNgrams 255) <$> extract l hc
where
extract :: TermType Lang -> HyperdataContact
- -> Cmd err (Map Ngrams (Map NgramsType Int))
+ -> Cmd err (HashMap ExtractedNgrams (Map NgramsType Int))
extract _l hc' = do
let authors = map text2ngrams
- $ maybe ["Nothing"] (\a -> [a])
- $ view (hc_who . _Just . cw_lastName) hc'
-
- pure $ Map.fromList $ [(a', Map.singleton Authors 1) | a' <- authors ]
+ $ maybe ["Nothing"] (\a -> [a])
+ $ view (hc_who . _Just . cw_lastName) hc'
-instance HasText HyperdataDocument
- where
- hasText h = catMaybes [ _hd_title h
- , _hd_abstract h
- ]
+ pure $ HashMap.fromList $ [(SimpleNgrams a', Map.singleton Authors 1) | a' <- authors ]
instance ExtractNgramsT HyperdataDocument
where
extractNgramsT :: TermType Lang
-> HyperdataDocument
- -> Cmd err (Map Ngrams (Map NgramsType Int))
- extractNgramsT lang hd = filterNgramsT 255 <$> extractNgramsT' lang hd
+ -> Cmd err (HashMap ExtractedNgrams (Map NgramsType Int))
+ extractNgramsT lang hd = HashMap.mapKeys (cleanExtractedNgrams 255) <$> extractNgramsT' lang hd
where
extractNgramsT' :: TermType Lang
-> HyperdataDocument
- -> Cmd err (Map Ngrams (Map NgramsType Int))
+ -> Cmd err (HashMap ExtractedNgrams (Map NgramsType Int))
extractNgramsT' lang' doc = do
let source = text2ngrams
$ maybe "Nothing" identity
$ _hd_source doc
institutes = map text2ngrams
- $ maybe ["Nothing"] (map toSchoolName . (splitOn ", "))
+ $ maybe ["Nothing"] (map toSchoolName . (T.splitOn ", "))
$ _hd_institutes doc
authors = map text2ngrams
- $ maybe ["Nothing"] (splitOn ", ")
+ $ maybe ["Nothing"] (T.splitOn ", ")
$ _hd_authors doc
- terms' <- map text2ngrams
- <$> map (intercalate " " . _terms_label)
+ terms' <- map (enrichedTerms (lang' ^. tt_lang) CoreNLP NP)
<$> concat
<$> liftBase (extractTerms lang' $ hasText doc)
- pure $ Map.fromList $ [(source, Map.singleton Sources 1)]
- <> [(i', Map.singleton Institutes 1) | i' <- institutes ]
- <> [(a', Map.singleton Authors 1) | a' <- authors ]
- <> [(t', Map.singleton NgramsTerms 1) | t' <- terms' ]
+ pure $ HashMap.fromList
+ $ [(SimpleNgrams source, Map.singleton Sources 1) ]
+ <> [(SimpleNgrams i', Map.singleton Institutes 1) | i' <- institutes ]
+ <> [(SimpleNgrams a', Map.singleton Authors 1) | a' <- authors ]
+ <> [(EnrichedNgrams t', Map.singleton NgramsTerms 1) | t' <- terms' ]
instance (ExtractNgramsT a, HasText a) => ExtractNgramsT (Node a)
where
hasText (Node _ _ _ _ _ _ _ h) = hasText h
+
+-- | TODO putelsewhere
+-- | Upgrade function
+-- Suppose all documents are English (this is the case actually)
+indexAllDocumentsWithPosTag :: FlowCmdM env err m => m ()
+indexAllDocumentsWithPosTag = do
+ rootId <- getRootId (UserName userMaster)
+ corpusIds <- findNodesId rootId [NodeCorpus]
+ docs <- List.concat <$> mapM getDocumentsWithParentId corpusIds
+
+ _ <- mapM extractInsert (splitEvery 1000 docs)
+
+ pure ()
+
+extractInsert :: FlowCmdM env err m => [Node HyperdataDocument] -> m ()
+extractInsert docs = do
+ let documentsWithId = map (\doc -> Indexed (doc ^. node_id) doc) docs
+
+ mapNgramsDocs' <- mapNodeIdNgrams
+ <$> documentIdWithNgrams
+ (extractNgramsT $ withLang (Multi EN) documentsWithId)
+ documentsWithId
+
+ _ <- insertExtractedNgrams $ HashMap.keys mapNgramsDocs'
+
+ pure ()
+
+