2 Module : Gargantext.API.Node.Corpus.New
3 Description : New corpus API
4 Copyright : (c) CNRS, 2017-Present
5 License : AGPL + CECILL v3
6 Maintainer : team@gargantext.org
7 Stability : experimental
10 New corpus means either:
12 - new data in existing corpus
15 {-# LANGUAGE TemplateHaskell #-}
16 {-# LANGUAGE TypeOperators #-}
18 module Gargantext.API.Node.Corpus.New
22 import Control.Lens hiding (elements, Empty)
25 import Data.Aeson.TH (deriveJSON)
26 import qualified Data.ByteString.Base64 as BSB64
27 import Data.Conduit.Internal (zipSources)
29 import Data.Maybe (fromMaybe)
31 import Data.Text (Text)
32 import qualified Data.Text as T
33 import GHC.Generics (Generic)
35 import Servant.Job.Utils (jsonOptions)
36 -- import Servant.Multipart
37 import qualified Data.Text.Encoding as TE
38 -- import Test.QuickCheck (elements)
39 import Test.QuickCheck.Arbitrary
41 import Gargantext.API.Admin.Orchestrator.Types (JobLog(..), AsyncJobs)
42 import Gargantext.API.Admin.Types (HasSettings)
43 import Gargantext.API.Node.Corpus.New.Types
44 import Gargantext.API.Node.Corpus.Searx
45 import Gargantext.API.Node.Corpus.Types
46 import Gargantext.API.Node.Types
47 import Gargantext.Core (Lang(..))
48 import Gargantext.Core.Text.List.Social (FlowSocialListWith(..))
49 import Gargantext.Core.Types.Individu (User(..))
50 import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
51 import Gargantext.Database.Action.Flow (flowCorpus, getDataText, flowDataText, TermType(..){-, allDataOrigins-})
52 import Gargantext.Database.Action.Flow.Types (FlowCmdM)
53 import Gargantext.Database.Action.Mail (sendMail)
54 import Gargantext.Database.Action.Node (mkNodeWithParent)
55 import Gargantext.Database.Action.User (getUserId)
56 import Gargantext.Database.Admin.Types.Hyperdata
57 import Gargantext.Database.Admin.Types.Node (CorpusId, NodeType(..))
58 import Gargantext.Database.Prelude (hasConfig)
59 import Gargantext.Database.Query.Table.Node (getNodeWith, updateCorpusPubmedAPIKey)
60 import Gargantext.Database.Query.Table.Node.UpdateOpaleye (updateHyperdata)
61 import Gargantext.Database.Schema.Node (node_hyperdata)
62 import Gargantext.Prelude
63 import Gargantext.Prelude.Config (gc_max_docs_parsers, gc_pubmed_api_key)
64 import Gargantext.Utils.Jobs (JobHandle, MonadJobStatus(..))
65 import qualified Gargantext.Core.Text.Corpus.API as API
66 import qualified Gargantext.Core.Text.Corpus.Parsers as Parser (FileType(..), parseFormatC)
67 import qualified Gargantext.Database.GargDB as GargDB
69 ------------------------------------------------------------------------
71 data Query = Query { query_query :: Text
72 , query_node_id :: Int
74 , query_databases :: [DataOrigin]
76 deriving (Eq, Generic)
78 deriveJSON (unPrefix "query_") 'Query
80 instance Arbitrary Query where
81 arbitrary = elements [ Query q n la fs
82 | q <- ["honeybee* AND collapse"
87 , fs <- take 3 $ repeat allDataOrigins
90 instance ToSchema Query where
91 declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "query_")
94 ------------------------------------------------------------------------
100 type PostApi = Summary "New Corpus endpoint"
101 :> ReqBody '[JSON] Query
102 :> Post '[JSON] CorpusId
103 type GetApi = Get '[JSON] ApiInfo
106 -- | TODO manage several apis
108 -- TODO this is only the POST
110 api :: (FlowCmdM env err m) => UserId -> Query -> m CorpusId
111 api uid (Query q _ as) = do
112 cId <- case head as of
113 Nothing -> flowCorpusSearchInDatabase (UserDBId uid) EN q
114 Just API.All -> flowCorpusSearchInDatabase (UserDBId uid) EN q
116 docs <- liftBase $ API.get a q (Just 1000)
117 cId' <- flowCorpus (UserDBId uid) (Left q) (Multi EN) [docs]
123 ------------------------------------------------
124 -- TODO use this route for Client implementation
125 data ApiInfo = ApiInfo { api_info :: [API.ExternalAPIs]}
127 instance Arbitrary ApiInfo where
128 arbitrary = ApiInfo <$> arbitrary
130 deriveJSON (unPrefix "") 'ApiInfo
132 instance ToSchema ApiInfo
135 info = ApiInfo API.externalAPIs
137 ------------------------------------------------------------------------
138 ------------------------------------------------------------------------
139 data WithQuery = WithQuery
140 { _wq_query :: !API.RawQuery
141 , _wq_databases :: !Database
142 , _wq_datafield :: !(Maybe Datafield)
144 , _wq_node_id :: !Int
145 , _wq_flowListWith :: !FlowSocialListWith
149 makeLenses ''WithQuery
150 instance FromJSON WithQuery where
151 parseJSON = genericParseJSON $ jsonOptions "_wq_"
152 instance ToJSON WithQuery where
153 toJSON = genericToJSON $ jsonOptions "_wq_"
154 instance ToSchema WithQuery where
155 declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_wq_")
157 ------------------------------------------------------------------------
159 type AddWithQuery = Summary "Add with Query to corpus endpoint"
161 :> Capture "corpus_id" CorpusId
163 :> AsyncJobs JobLog '[JSON] WithQuery JobLog
166 type AddWithFile = Summary "Add with MultipartData to corpus endpoint"
168 :> Capture "corpus_id" CorpusId
171 :> MultipartForm Mem (MultipartData Mem)
172 :> QueryParam "fileType" FileType
174 :> AsyncJobs JobLog '[JSON] () JobLog
178 ------------------------------------------------------------------------
179 -- TODO WithQuery also has a corpus id
182 addToCorpusWithQuery :: (FlowCmdM env err m, MonadJobStatus m)
189 addToCorpusWithQuery user cid (WithQuery { _wq_query = q
190 , _wq_databases = dbs
191 , _wq_datafield = datafield
193 , _wq_flowListWith = flw }) maybeLimit jobHandle = do
195 -- printDebug "[addToCorpusWithQuery] (cid, dbs)" (cid, dbs)
196 -- printDebug "[addToCorpusWithQuery] datafield" datafield
197 -- printDebug "[addToCorpusWithQuery] flowListWith" flw
201 -- printDebug "[addToCorpusWithQuery] processing web request" datafield
203 markStarted 1 jobHandle
205 _ <- triggerSearxSearch user cid q l jobHandle
207 markComplete jobHandle
211 Just (External (Just PubMed)) -> do
212 _api_key <- view $ hasConfig . gc_pubmed_api_key
213 printDebug "[addToCorpusWithQuery] pubmed api key" _api_key
214 _ <- updateCorpusPubmedAPIKey cid (Just _api_key)
217 markStarted 3 jobHandle
220 -- TODO if cid is folder -> create Corpus
221 -- if cid is corpus -> add to corpus
222 -- if cid is root -> create corpus in Private
223 -- printDebug "[G.A.N.C.New] getDataText with query" q
224 let db = database2origin dbs
225 eTxt <- getDataText db (Multi l) q maybeLimit
227 -- printDebug "[G.A.N.C.New] lTxts" lTxts
230 -- TODO Sum lenghts of each txt elements
232 markProgress 1 jobHandle
234 void $ flowDataText user txt (Multi l) cid (Just flw) jobHandle
235 -- printDebug "corpus id" cids
236 -- printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
239 markComplete jobHandle
242 -- printDebug "Error: " err
243 markFailed (Just $ T.pack (show err)) jobHandle
245 type AddWithForm = Summary "Add with FormUrlEncoded to corpus endpoint"
247 :> Capture "corpus_id" CorpusId
251 :> AsyncJobs JobLog '[FormUrlEncoded] NewWithForm JobLog
253 addToCorpusWithForm :: (FlowCmdM env err m, MonadJobStatus m)
259 addToCorpusWithForm user cid (NewWithForm ft ff d l _n sel) jobHandle = do
260 -- printDebug "[addToCorpusWithForm] Parsing corpus: " cid
261 -- printDebug "[addToCorpusWithForm] fileType" ft
262 -- printDebug "[addToCorpusWithForm] fileFormat" ff
263 limit' <- view $ hasConfig . gc_max_docs_parsers
264 let limit = fromIntegral limit' :: Integer
267 CSV_HAL -> Parser.parseFormatC Parser.CsvHal
268 CSV -> Parser.parseFormatC Parser.CsvGargV3
269 WOS -> Parser.parseFormatC Parser.WOS
270 PresseRIS -> Parser.parseFormatC Parser.RisPresse
271 Iramuteq -> Parser.parseFormatC Parser.Iramuteq
272 JSON -> Parser.parseFormatC Parser.JSON
274 -- TODO granularity of the logStatus
275 let data' = case ff of
277 ZIP -> case BSB64.decode $ TE.encodeUtf8 d of
278 Left err -> panic $ T.pack "[addToCorpusWithForm] error decoding base64: " <> T.pack err
279 Right decoded -> decoded
280 eDocsC <- liftBase $ parseC ff data'
282 Right (mCount, docsC) -> do
283 -- TODO Add progress (jobStatus) update for docs - this is a
286 let docsC' = zipSources (yieldMany [1..]) docsC
287 .| mapMC (\(idx, doc) ->
288 if idx > limit then do
289 --printDebug "[addToCorpusWithForm] number of docs exceeds the limit" (show limit)
290 let panicMsg' = [ "[addToCorpusWithForm] number of docs "
291 , "exceeds the MAX_DOCS_PARSERS limit ("
294 let panicMsg = T.concat $ T.pack <$> panicMsg'
295 --logStatus $ jobLogFailTotalWithMessage panicMsg jobLog
299 .| mapC toHyperdataDocument
301 --printDebug "Parsing corpus finished : " cid
304 --printDebug "Starting extraction : " cid
305 -- TODO granularity of the logStatus
306 -- printDebug "flowCorpus with (corpus_id, lang)" (cid, l)
308 _cid' <- flowCorpus user
310 (Multi $ fromMaybe EN l)
312 --(Just $ fromIntegral $ length docs, docsC')
313 (mCount, transPipe liftBase docsC') -- TODO fix number of docs
314 --(map (map toHyperdataDocument) docs)
317 -- printDebug "Extraction finished : " cid
318 -- printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
319 -- TODO uncomment this
322 markComplete jobHandle
324 printDebug "[addToCorpusWithForm] parse error" e
325 markFailed (Just $ T.pack e) jobHandle
328 addToCorpusWithFile :: FlowCmdM env err m
334 addToCorpusWithFile cid input filetype logStatus = do
335 logStatus JobLog { _scst_succeeded = Just 10
336 , _scst_failed = Just 2
337 , _scst_remaining = Just 138
338 , _scst_events = Just []
340 printDebug "addToCorpusWithFile" cid
341 _h <- postUpload cid filetype input
343 pure JobLog { _scst_succeeded = Just 137
344 , _scst_failed = Just 13
345 , _scst_remaining = Just 0
346 , _scst_events = Just []
352 type AddWithFile = Summary "Add with FileUrlEncoded to corpus endpoint"
354 :> Capture "corpus_id" CorpusId
358 :> AsyncJobs JobLog '[FormUrlEncoded] NewWithFile JobLog
360 addToCorpusWithFile :: (HasSettings env, FlowCmdM env err m, MonadJobStatus m)
366 addToCorpusWithFile user cid nwf@(NewWithFile _d _l fName) jobHandle = do
368 printDebug "[addToCorpusWithFile] Uploading file to corpus: " cid
369 markStarted 1 jobHandle
371 fPath <- GargDB.writeFile nwf
372 printDebug "[addToCorpusWithFile] File saved as: " fPath
374 uId <- getUserId user
375 nIds <- mkNodeWithParent NodeFile (Just cid) uId fName
379 node <- getNodeWith nId (Proxy :: Proxy HyperdataFile)
380 let hl = node ^. node_hyperdata
381 _ <- updateHyperdata nId $ hl { _hff_name = fName
382 , _hff_path = T.pack fPath }
384 printDebug "[addToCorpusWithFile] Created node with id: " nId
387 printDebug "[addToCorpusWithFile] File upload to corpus finished: " cid
389 printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
392 markComplete jobHandle