2 Module : Gargantext.API.Node.Corpus.New
3 Description : New corpus API
4 Copyright : (c) CNRS, 2017-Present
5 License : AGPL + CECILL v3
6 Maintainer : team@gargantext.org
7 Stability : experimental
10 New corpus means either:
12 - new data in existing corpus
15 {-# LANGUAGE TemplateHaskell #-}
16 {-# LANGUAGE TypeOperators #-}
18 module Gargantext.API.Node.Corpus.New
23 import Control.Lens hiding (elements, Empty)
25 import Data.Aeson.TH (deriveJSON)
26 import qualified Data.ByteString.Base64 as BSB64
28 import Data.Maybe (fromMaybe)
30 import Data.Text (Text)
31 import qualified Data.Text as T
32 import GHC.Generics (Generic)
34 import Servant.Job.Utils (jsonOptions)
35 -- import Servant.Multipart
36 import qualified Data.Text.Encoding as TE
37 -- import Test.QuickCheck (elements)
38 import Test.QuickCheck.Arbitrary
40 import Gargantext.Prelude
42 import Gargantext.API.Admin.Orchestrator.Types (JobLog(..), AsyncJobs, ScraperEvent(..), scst_events)
43 import Gargantext.API.Admin.Types (HasSettings)
44 import Gargantext.API.Job (addEvent, jobLogSuccess, jobLogFailTotal, jobLogFailTotalWithMessage)
45 import Gargantext.API.Node.Corpus.New.File
46 import Gargantext.API.Node.Corpus.Searx
47 import Gargantext.API.Node.Corpus.Types
48 import Gargantext.API.Node.Types
49 import Gargantext.Core (Lang(..){-, allLangs-})
50 import Gargantext.Core.Text.List.Social (FlowSocialListWith(..))
51 import qualified Gargantext.Core.Text.Corpus.API as API
52 import qualified Gargantext.Core.Text.Corpus.Parsers as Parser (FileFormat(..), parseFormat)
53 import Gargantext.Core.Types.Individu (User(..))
54 import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
55 import Gargantext.Database.Action.Flow (flowCorpus, getDataText, flowDataText, TermType(..){-, allDataOrigins-})
56 import Gargantext.Database.Action.Flow.Types (FlowCmdM)
57 import Gargantext.Database.Action.Mail (sendMail)
58 import Gargantext.Database.Action.Node (mkNodeWithParent)
59 import Gargantext.Database.Action.User (getUserId)
60 import Gargantext.Database.Admin.Types.Hyperdata
61 import Gargantext.Database.Admin.Types.Node (CorpusId, NodeType(..), UserId)
62 import Gargantext.Database.Prelude (hasConfig)
63 import Gargantext.Database.Query.Table.Node (getNodeWith)
64 import Gargantext.Database.Query.Table.Node.UpdateOpaleye (updateHyperdata)
65 import Gargantext.Database.Schema.Node (node_hyperdata)
66 import qualified Gargantext.Database.GargDB as GargDB
67 import Gargantext.Prelude.Config (gc_max_docs_parsers)
68 ------------------------------------------------------------------------
70 data Query = Query { query_query :: Text
71 , query_node_id :: Int
73 , query_databases :: [DataOrigin]
75 deriving (Eq, Generic)
77 deriveJSON (unPrefix "query_") 'Query
79 instance Arbitrary Query where
80 arbitrary = elements [ Query q n la fs
81 | q <- ["honeybee* AND collapse"
86 , fs <- take 3 $ repeat allDataOrigins
89 instance ToSchema Query where
90 declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "query_")
93 ------------------------------------------------------------------------
99 type PostApi = Summary "New Corpus endpoint"
100 :> ReqBody '[JSON] Query
101 :> Post '[JSON] CorpusId
102 type GetApi = Get '[JSON] ApiInfo
105 -- | TODO manage several apis
107 -- TODO this is only the POST
109 api :: (FlowCmdM env err m) => UserId -> Query -> m CorpusId
110 api uid (Query q _ as) = do
111 cId <- case head as of
112 Nothing -> flowCorpusSearchInDatabase (UserDBId uid) EN q
113 Just API.All -> flowCorpusSearchInDatabase (UserDBId uid) EN q
115 docs <- liftBase $ API.get a q (Just 1000)
116 cId' <- flowCorpus (UserDBId uid) (Left q) (Multi EN) [docs]
122 ------------------------------------------------
123 -- TODO use this route for Client implementation
124 data ApiInfo = ApiInfo { api_info :: [API.ExternalAPIs]}
126 instance Arbitrary ApiInfo where
127 arbitrary = ApiInfo <$> arbitrary
129 deriveJSON (unPrefix "") 'ApiInfo
131 instance ToSchema ApiInfo
133 info :: FlowCmdM env err m => UserId -> m ApiInfo
134 info _u = pure $ ApiInfo API.externalAPIs
136 ------------------------------------------------------------------------
137 ------------------------------------------------------------------------
138 data WithQuery = WithQuery
140 , _wq_databases :: !Database
141 , _wq_datafield :: !(Maybe Datafield)
143 , _wq_node_id :: !Int
144 , _wq_flowListWith :: !FlowSocialListWith
148 makeLenses ''WithQuery
149 instance FromJSON WithQuery where
150 parseJSON = genericParseJSON $ jsonOptions "_wq_"
151 instance ToJSON WithQuery where
152 toJSON = genericToJSON $ jsonOptions "_wq_"
153 instance ToSchema WithQuery where
154 declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_wq_")
156 ------------------------------------------------------------------------
158 type AddWithQuery = Summary "Add with Query to corpus endpoint"
160 :> Capture "corpus_id" CorpusId
162 :> AsyncJobs JobLog '[JSON] WithQuery JobLog
165 type AddWithFile = Summary "Add with MultipartData to corpus endpoint"
167 :> Capture "corpus_id" CorpusId
170 :> MultipartForm Mem (MultipartData Mem)
171 :> QueryParam "fileType" FileType
173 :> AsyncJobs JobLog '[JSON] () JobLog
177 ------------------------------------------------------------------------
178 -- TODO WithQuery also has a corpus id
179 addToCorpusWithQuery :: FlowCmdM env err m
186 addToCorpusWithQuery user cid (WithQuery { _wq_query = q
187 , _wq_databases = dbs
188 , _wq_datafield = datafield
190 , _wq_flowListWith = flw }) maybeLimit logStatus = do
192 logStatus JobLog { _scst_succeeded = Just 0
193 , _scst_failed = Just 0
194 , _scst_remaining = Just 3
195 , _scst_events = Just []
197 printDebug "[addToCorpusWithQuery] (cid, dbs)" (cid, dbs)
198 printDebug "[addToCorpusWithQuery] datafield" datafield
199 printDebug "[addToCorpusWithQuery] flowListWith" flw
203 printDebug "[addToCorpusWithQuery] processing web request" datafield
205 _ <- triggerSearxSearch user cid q l logStatus
207 pure JobLog { _scst_succeeded = Just 3
208 , _scst_failed = Just 0
209 , _scst_remaining = Just 0
210 , _scst_events = Just []
215 -- TODO if cid is folder -> create Corpus
216 -- if cid is corpus -> add to corpus
217 -- if cid is root -> create corpus in Private
218 eTxts <- mapM (\db -> getDataText db (Multi l) q maybeLimit) [database2origin dbs]
219 let lTxts = lefts eTxts
222 let txts = rights eTxts
223 -- TODO Sum lenghts of each txt elements
224 logStatus JobLog { _scst_succeeded = Just 2
225 , _scst_failed = Just 0
226 , _scst_remaining = Just $ 1 + length txts
227 , _scst_events = Just []
230 cids <- mapM (\txt -> flowDataText user txt (Multi l) cid Nothing logStatus) txts
231 printDebug "corpus id" cids
232 printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
235 pure JobLog { _scst_succeeded = Just 3
236 , _scst_failed = Just 0
237 , _scst_remaining = Just 0
238 , _scst_events = Just []
242 pure $ addEvent "ERROR" (T.pack $ show err) $
243 JobLog { _scst_succeeded = Just 2
244 , _scst_failed = Just 1
245 , _scst_remaining = Just 0
246 , _scst_events = Just []
250 type AddWithForm = Summary "Add with FormUrlEncoded to corpus endpoint"
252 :> Capture "corpus_id" CorpusId
256 :> AsyncJobs JobLog '[FormUrlEncoded] NewWithForm JobLog
258 addToCorpusWithForm :: (FlowCmdM env err m)
265 addToCorpusWithForm user cid (NewWithForm ft d l _n) logStatus jobLog = do
266 printDebug "[addToCorpusWithForm] Parsing corpus: " cid
267 printDebug "[addToCorpusWithForm] fileType" ft
271 CSV_HAL -> Parser.parseFormat Parser.CsvHal
272 CSV -> Parser.parseFormat Parser.CsvGargV3
273 WOS -> Parser.parseFormat Parser.WOS
274 PresseRIS -> Parser.parseFormat Parser.RisPresse
275 ZIP -> Parser.parseFormat Parser.ZIP
277 -- TODO granularity of the logStatus
278 let data' = case ft of
279 ZIP -> case BSB64.decode $ TE.encodeUtf8 d of
280 Left err -> panic $ T.pack "[addToCorpusWithForm] error decoding base64: " <> T.pack err
281 Right decoded -> decoded
283 eDocs <- liftBase $ parse data'
286 -- TODO Add progress (jobStatus) update for docs - this is a
289 limit' <- view $ hasConfig . gc_max_docs_parsers
290 let limit = fromIntegral limit'
291 if length docs > limit then do
292 printDebug "[addToCorpusWithForm] number of docs exceeds the limit" (show $ length docs)
293 let panicMsg' = [ "[addToCorpusWithForm] number of docs ("
295 , ") exceeds the MAX_DOCS_PARSERS limit ("
298 let panicMsg = T.concat $ T.pack <$> panicMsg'
299 logStatus $ jobLogFailTotalWithMessage panicMsg jobLog
304 printDebug "Parsing corpus finished : " cid
307 printDebug "Starting extraction : " cid
308 -- TODO granularity of the logStatus
309 _cid' <- flowCorpus user
311 (Multi $ fromMaybe EN l)
313 (Just $ fromIntegral $ length docs, yieldMany docs .| mapC toHyperdataDocument)
314 --(map (map toHyperdataDocument) docs)
317 printDebug "Extraction finished : " cid
318 printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
324 printDebug "[addToCorpusWithForm] parse error" e
326 let evt = ScraperEvent { _scev_message = Just $ T.pack e
327 , _scev_level = Just "ERROR"
328 , _scev_date = Nothing }
330 logStatus $ over (scst_events . _Just) (\evt' -> evt' <> [evt]) jobLogE
333 jobLog2 = jobLogSuccess jobLog
334 jobLog3 = jobLogSuccess jobLog2
335 jobLogE = jobLogFailTotal jobLog
338 addToCorpusWithFile :: FlowCmdM env err m
344 addToCorpusWithFile cid input filetype logStatus = do
345 logStatus JobLog { _scst_succeeded = Just 10
346 , _scst_failed = Just 2
347 , _scst_remaining = Just 138
348 , _scst_events = Just []
350 printDebug "addToCorpusWithFile" cid
351 _h <- postUpload cid filetype input
353 pure JobLog { _scst_succeeded = Just 137
354 , _scst_failed = Just 13
355 , _scst_remaining = Just 0
356 , _scst_events = Just []
362 type AddWithFile = Summary "Add with FileUrlEncoded to corpus endpoint"
364 :> Capture "corpus_id" CorpusId
368 :> AsyncJobs JobLog '[FormUrlEncoded] NewWithFile JobLog
370 addToCorpusWithFile :: (HasSettings env, FlowCmdM env err m)
376 addToCorpusWithFile user cid nwf@(NewWithFile _d _l fName) logStatus = do
378 printDebug "[addToCorpusWithFile] Uploading file to corpus: " cid
379 logStatus JobLog { _scst_succeeded = Just 0
380 , _scst_failed = Just 0
381 , _scst_remaining = Just 1
382 , _scst_events = Just []
385 fPath <- GargDB.writeFile nwf
386 printDebug "[addToCorpusWithFile] File saved as: " fPath
388 uId <- getUserId user
389 nIds <- mkNodeWithParent NodeFile (Just cid) uId fName
393 node <- getNodeWith nId (Proxy :: Proxy HyperdataFile)
394 let hl = node ^. node_hyperdata
395 _ <- updateHyperdata nId $ hl { _hff_name = fName
396 , _hff_path = T.pack fPath }
398 printDebug "[addToCorpusWithFile] Created node with id: " nId
401 printDebug "[addToCorpusWithFile] File upload to corpus finished: " cid
403 printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
406 pure $ JobLog { _scst_succeeded = Just 1
407 , _scst_failed = Just 0
408 , _scst_remaining = Just 0
409 , _scst_events = Just []