{-# LANGUAGE TemplateHaskell #-}
module Gargantext.Database.Action.Flow -- (flowDatabase, ngrams2list)
- ( FlowCmdM
- , getDataText
+ ( getDataText
, flowDataText
, flow
, flowCorpus
, flowAnnuaire
, insertMasterDocs
+ , saveDocNgramsWith
, getOrMkRoot
, getOrMk_RootWithCorpus
, allDataOrigins
, do_api
+ , indexAllDocumentsWithPosTag
)
where
import Data.Maybe (catMaybes)
import Data.Monoid
import Data.Swagger
-import Data.Text (splitOn)
+import qualified Data.Text as T
import Data.Traversable (traverse)
import Data.Tuple.Extra (first, second)
import GHC.Generics (Generic)
import Gargantext.Core (Lang(..), PosTagAlgo(..))
import Gargantext.Core.Ext.IMT (toSchoolName)
-import Gargantext.Core.Ext.IMTUser (deserialiseImtUsersFromFile)
+import Gargantext.Core.Ext.IMTUser (readFile_Annuaire)
import Gargantext.Core.Flow.Types
import Gargantext.Core.Text
import Gargantext.Core.Text.List.Group.WithStem ({-StopSize(..),-} GroupParams(..))
import Gargantext.Core.Text.Corpus.Parsers (parseFile, FileFormat)
import Gargantext.Core.Text.List (buildNgramsLists)
+import Gargantext.Core.Text.List.Social (FlowSocialListWith)
import Gargantext.Core.Text.Terms
import Gargantext.Core.Text.Terms.Mono.Stem.En (stemIt)
import Gargantext.Core.Types (POS(NP))
import Gargantext.Database.Query.Table.NodeNgrams (listInsertDb , getCgramsId)
import Gargantext.Database.Query.Table.NodeNodeNgrams2
import Gargantext.Database.Query.Tree.Root (getOrMkRoot, getOrMk_RootWithCorpus)
-import Gargantext.Database.Schema.Node (NodePoly(..))
+import Gargantext.Database.Schema.Node (NodePoly(..), node_id)
import Gargantext.Database.Types
import Gargantext.Prelude
import Gargantext.Prelude.Crypto.Hash (Hash)
import qualified Gargantext.Core.Text.Corpus.API as API
import qualified Gargantext.Database.Query.Table.Node.Document.Add as Doc (add)
+------------------------------------------------------------------------
+-- Impots for upgrade function
+import Gargantext.Database.Query.Tree.Root (getRootId)
+import Gargantext.Database.Query.Tree (findNodesId)
+import qualified Data.List as List
------------------------------------------------------------------------
-- TODO use internal with API name (could be old data)
data DataOrigin = InternalOrigin { _do_api :: API.ExternalAPIs }
-> DataText
-> TermType Lang
-> CorpusId
+ -> Maybe FlowSocialListWith
-> m CorpusId
-flowDataText u (DataOld ids) tt cid = flowCorpusUser (_tt_lang tt) u (Right [cid]) corpusType ids
+flowDataText u (DataOld ids) tt cid mfslw = flowCorpusUser (_tt_lang tt) u (Right [cid]) corpusType ids mfslw
where
corpusType = (Nothing :: Maybe HyperdataCorpus)
-flowDataText u (DataNew txt) tt cid = flowCorpus u (Right [cid]) tt txt
+flowDataText u (DataNew txt) tt cid mfslw = flowCorpus u (Right [cid]) tt mfslw txt
------------------------------------------------------------------------
-- TODO use proxy
-> FilePath
-> m AnnuaireId
flowAnnuaire u n l filePath = do
- docs <- liftBase $ (( splitEvery 500 <$> deserialiseImtUsersFromFile filePath) :: IO [[HyperdataContact]])
- flow (Nothing :: Maybe HyperdataAnnuaire) u n l docs
+ docs <- liftBase $ (( splitEvery 500 <$> readFile_Annuaire filePath) :: IO [[HyperdataContact]])
+ flow (Nothing :: Maybe HyperdataAnnuaire) u n l Nothing docs
------------------------------------------------------------------------
flowCorpusFile :: (FlowCmdM env err m)
-> Either CorpusName [CorpusId]
-> Limit -- Limit the number of docs (for dev purpose)
-> TermType Lang -> FileFormat -> FilePath
+ -> Maybe FlowSocialListWith
-> m CorpusId
-flowCorpusFile u n l la ff fp = do
- docs <- liftBase ( splitEvery 500
- <$> take l
- <$> parseFile ff fp
- )
- flowCorpus u n la (map (map toHyperdataDocument) docs)
+flowCorpusFile u n l la ff fp mfslw = do
+ eParsed <- liftBase $ parseFile ff fp
+ case eParsed of
+ Right parsed -> do
+ let docs = splitEvery 500 $ take l parsed
+ flowCorpus u n la mfslw (map (map toHyperdataDocument) docs)
+ Left e -> panic $ "Error: " <> (T.pack e)
------------------------------------------------------------------------
-- | TODO improve the needed type to create/update a corpus
=> User
-> Either CorpusName [CorpusId]
-> TermType Lang
+ -> Maybe FlowSocialListWith
-> [[a]]
-> m CorpusId
flowCorpus = flow (Nothing :: Maybe HyperdataCorpus)
-> User
-> Either CorpusName [CorpusId]
-> TermType Lang
+ -> Maybe FlowSocialListWith
-> [[a]]
-> m CorpusId
-flow c u cn la docs = do
+flow c u cn la mfslw docs = do
-- TODO if public insertMasterDocs else insertUserDocs
ids <- traverse (insertMasterDocs c la) docs
- flowCorpusUser (la ^. tt_lang) u cn c (concat ids)
+ flowCorpusUser (la ^. tt_lang) u cn c (concat ids) mfslw
------------------------------------------------------------------------
flowCorpusUser :: ( FlowCmdM env err m
-> Either CorpusName [CorpusId]
-> Maybe c
-> [NodeId]
+ -> Maybe FlowSocialListWith
-> m CorpusId
-flowCorpusUser l user corpusName ctype ids = do
+flowCorpusUser l user corpusName ctype ids mfslw = do
-- User Flow
(userId, _rootId, userCorpusId) <- getOrMk_RootWithCorpus user corpusName ctype
-- NodeTexts is first
-- printDebug "Node Text Ids:" tId
-- User List Flow
- (masterUserId, _masterRootId, masterCorpusId) <- getOrMk_RootWithCorpus (UserName userMaster) (Left "") ctype
+ (masterUserId, _masterRootId, masterCorpusId)
+ <- getOrMk_RootWithCorpus (UserName userMaster) (Left "") ctype
- -- let gp = (GroupParams l 2 3 (StopSize 3))
+ --let gp = (GroupParams l 2 3 (StopSize 3))
let gp = GroupWithPosTag l CoreNLP HashMap.empty
- ngs <- buildNgramsLists gp user userCorpusId masterCorpusId
+ ngs <- buildNgramsLists user userCorpusId masterCorpusId mfslw gp
_userListId <- flowList_DbRepo listId ngs
_mastListId <- getOrMkList masterCorpusId masterUserId
(extractNgramsT $ withLang lang documentsWithId)
documentsWithId
+ lId <- getOrMkList masterCorpusId masterUserId
+ _ <- saveDocNgramsWith lId mapNgramsDocs'
+
+ -- _cooc <- insertDefaultNode NodeListCooc lId masterUserId
+ pure ids'
+
+saveDocNgramsWith :: ( FlowCmdM env err m)
+ => ListId
+ -> HashMap ExtractedNgrams (Map NgramsType (Map NodeId Int))
+ -> m ()
+saveDocNgramsWith lId mapNgramsDocs' = do
terms2id <- insertExtractedNgrams $ HashMap.keys mapNgramsDocs'
let mapNgramsDocs = HashMap.mapKeys extracted2ngrams mapNgramsDocs'
let indexedNgrams = HashMap.mapKeys (indexNgrams terms2id) mapNgramsDocs
-- new
- lId <- getOrMkList masterCorpusId masterUserId
mapCgramsId <- listInsertDb lId toNodeNgramsW'
$ map (first _ngramsTerms . second Map.keys)
$ HashMap.toList mapNgramsDocs
+
-- insertDocNgrams
_return <- insertNodeNodeNgrams2
$ catMaybes [ NodeNodeNgrams2 <$> Just nId
, (ngrams_type, mapNodeIdWeight) <- Map.toList mapNgramsTypes
, (nId, w) <- Map.toList mapNodeIdWeight
]
-
- -- _cooc <- insertDefaultNode NodeListCooc lId masterUserId
-- to be removed
_ <- insertDocNgrams lId indexedNgrams
- pure ids'
+
+ pure ()
+
------------------------------------------------------------------------
-- TODO Type NodeDocumentUnicised
$ _hd_source doc
institutes = map text2ngrams
- $ maybe ["Nothing"] (map toSchoolName . (splitOn ", "))
+ $ maybe ["Nothing"] (map toSchoolName . (T.splitOn ", "))
$ _hd_institutes doc
authors = map text2ngrams
- $ maybe ["Nothing"] (splitOn ", ")
+ $ maybe ["Nothing"] (T.splitOn ", ")
$ _hd_authors doc
terms' <- map (enrichedTerms (lang' ^. tt_lang) CoreNLP NP)
hasText (Node _ _ _ _ _ _ _ h) = hasText h
+
+-- | TODO putelsewhere
+-- | Upgrade function
+-- Suppose all documents are English (this is the case actually)
+indexAllDocumentsWithPosTag :: FlowCmdM env err m => m ()
+indexAllDocumentsWithPosTag = do
+ rootId <- getRootId (UserName userMaster)
+ corpusIds <- findNodesId rootId [NodeCorpus]
+ docs <- List.concat <$> mapM getDocumentsWithParentId corpusIds
+
+ _ <- mapM extractInsert (splitEvery 1000 docs)
+
+ pure ()
+
+extractInsert :: FlowCmdM env err m => [Node HyperdataDocument] -> m ()
+extractInsert docs = do
+ let documentsWithId = map (\doc -> Indexed (doc ^. node_id) doc) docs
+
+ mapNgramsDocs' <- mapNodeIdNgrams
+ <$> documentIdWithNgrams
+ (extractNgramsT $ withLang (Multi EN) documentsWithId)
+ documentsWithId
+
+ _ <- insertExtractedNgrams $ HashMap.keys mapNgramsDocs'
+
+ pure ()
+
+