]> Git — Sourcephile - gargantext.git/blob - src/Gargantext/Database/Node.hs
[FIX] Hyperdata of Documents
[gargantext.git] / src / Gargantext / Database / Node.hs
1 {-|
2 Module : Gargantext.Database.Node
3 Description : Main requests of Node to the database
4 Copyright : (c) CNRS, 2017-Present
5 License : AGPL + CECILL v3
6 Maintainer : team@gargantext.org
7 Stability : experimental
8 Portability : POSIX
9 -}
10
11 {-# OPTIONS_GHC -fno-warn-name-shadowing #-}
12 {-# OPTIONS_GHC -fno-warn-orphans #-}
13
14 {-# LANGUAGE Arrows #-}
15 {-# LANGUAGE DeriveGeneric #-}
16 {-# LANGUAGE FlexibleInstances #-}
17 {-# LANGUAGE FunctionalDependencies #-}
18 {-# LANGUAGE GeneralizedNewtypeDeriving #-}
19 {-# LANGUAGE OverloadedStrings #-}
20 {-# LANGUAGE MultiParamTypeClasses #-}
21 {-# LANGUAGE NoImplicitPrelude #-}
22 {-# LANGUAGE TemplateHaskell #-}
23
24 module Gargantext.Database.Node where
25
26
27 import GHC.Int (Int64)
28 import Data.Maybe
29 import Data.Time (UTCTime)
30 import Database.PostgreSQL.Simple.FromField ( Conversion
31 , ResultError(ConversionFailed)
32 , FromField
33 , fromField
34 , returnError
35 )
36 import Prelude hiding (null, id, map, sum)
37
38 import Gargantext.Core.Types
39 import Gargantext.Database.Types.Node (NodeType)
40 import Gargantext.Database.Queries
41 import Gargantext.Database.Config (nodeTypeId)
42 import Gargantext.Prelude hiding (sum)
43
44
45 import Database.PostgreSQL.Simple.Internal (Field)
46 import Control.Applicative (Applicative)
47 import Control.Arrow (returnA)
48 import Control.Lens.TH (makeLensesWith, abbreviatedFields)
49 import Control.Monad.IO.Class
50 import Control.Monad.Reader
51 import Data.Aeson
52 import Data.Maybe (Maybe, fromMaybe)
53 import Data.Text (Text)
54 import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
55 import Data.Typeable (Typeable)
56
57 import qualified Data.ByteString as DB
58 import qualified Data.ByteString.Lazy as DBL
59 import Data.ByteString (ByteString)
60
61 import Database.PostgreSQL.Simple (Connection)
62 import Opaleye hiding (FromField)
63 import Opaleye.Internal.QueryArr (Query)
64 import qualified Data.Profunctor.Product as PP
65 -- | Types for Node Database Management
66 data PGTSVector
67
68 newtype Cmd a = Cmd (ReaderT Connection IO a)
69 deriving (Functor, Applicative, Monad, MonadReader Connection, MonadIO)
70
71 runCmd :: Connection -> Cmd a -> IO a
72 runCmd c (Cmd f) = runReaderT f c
73
74 mkCmd :: (Connection -> IO a) -> Cmd a
75 mkCmd = Cmd . ReaderT
76
77 {-
78 newtype Cmd a = Cmd { unCmd :: Connection -> IO a }
79
80 instance Monad Cmd where
81 return a = Cmd $ \_ -> return a
82
83 m >>= f = Cmd $ \c -> do
84 a <- unCmd m c
85 unCmd (f a) c
86 -}
87
88 ------------------------------------------------------------------------
89 type CorpusId = Int
90 ------------------------------------------------------------------------
91
92 instance FromField HyperdataCorpus where
93 fromField = fromField'
94
95 instance FromField HyperdataDocument where
96 fromField = fromField'
97
98 instance FromField HyperdataDocumentV3 where
99 fromField = fromField'
100
101 instance FromField HyperdataProject where
102 fromField = fromField'
103
104 instance FromField HyperdataUser where
105 fromField = fromField'
106
107
108 instance QueryRunnerColumnDefault PGJsonb HyperdataDocument where
109 queryRunnerColumnDefault = fieldQueryRunnerColumn
110 instance QueryRunnerColumnDefault PGJsonb HyperdataDocumentV3 where
111 queryRunnerColumnDefault = fieldQueryRunnerColumn
112
113 instance QueryRunnerColumnDefault PGJsonb HyperdataCorpus where
114 queryRunnerColumnDefault = fieldQueryRunnerColumn
115
116 instance QueryRunnerColumnDefault PGJsonb HyperdataProject where
117 queryRunnerColumnDefault = fieldQueryRunnerColumn
118
119 instance QueryRunnerColumnDefault PGJsonb HyperdataUser where
120 queryRunnerColumnDefault = fieldQueryRunnerColumn
121
122
123
124 fromField' :: (Typeable b, FromJSON b) => Field -> Maybe DB.ByteString -> Conversion b
125 fromField' field mb = do
126 v <- fromField field mb
127 valueToHyperdata v
128 where
129 valueToHyperdata v = case fromJSON v of
130 Success a -> pure a
131 Error _err -> returnError ConversionFailed field "cannot parse hyperdata"
132
133
134 $(makeAdaptorAndInstance "pNode" ''NodePoly)
135 $(makeLensesWith abbreviatedFields ''NodePoly)
136
137
138 nodeTable :: Table NodeWrite NodeRead
139 nodeTable = Table "nodes" (pNode Node { node_id = optional "id"
140 , node_typename = required "typename"
141 , node_userId = required "user_id"
142 , node_parentId = required "parent_id"
143 , node_name = required "name"
144 , node_date = optional "date"
145 , node_hyperdata = required "hyperdata"
146 -- , node_titleAbstract = optional "title_abstract"
147 }
148 )
149
150
151 nodeTable' :: Table (Maybe (Column PGInt4)
152 , Column PGInt4
153 , Column PGInt4
154 , Column PGInt4
155 , Column PGText
156 ,Maybe (Column PGTimestamptz)
157 , Column PGJsonb
158 )
159 ((Column PGInt4)
160 , Column PGInt4
161 , Column PGInt4
162 , Column PGInt4
163 , Column PGText
164 ,(Column PGTimestamptz)
165 , Column PGJsonb
166 )
167
168 nodeTable' = Table "nodes" (PP.p7 ( optional "id"
169 , required "typename"
170 , required "user_id"
171 , required "parent_id"
172 , required "name"
173 , optional "date"
174 , required "hyperdata"
175 )
176 )
177
178
179 queryNodeTable :: Query NodeRead
180 queryNodeTable = queryTable nodeTable
181
182
183 selectNode :: Column PGInt4 -> Query NodeRead
184 selectNode id = proc () -> do
185 row <- queryNodeTable -< ()
186 restrict -< node_id row .== id
187 returnA -< row
188
189 runGetNodes :: Query NodeRead -> Cmd [Node Value]
190 runGetNodes q = mkCmd $ \conn -> runQuery conn q
191
192 -- | order by publication date
193 -- Favorites (Bool), node_ngrams
194 selectNodesWith :: ParentId -> Maybe NodeType
195 -> Maybe Offset -> Maybe Limit -> Query NodeRead
196 selectNodesWith parentId maybeNodeType maybeOffset maybeLimit =
197 --offset' maybeOffset $ limit' maybeLimit $ orderBy (asc (hyperdataDocument_Publication_date . node_hyperdata)) $ selectNodesWith' parentId typeId
198 limit' maybeLimit $ offset' maybeOffset $ orderBy (asc node_id) $ selectNodesWith' parentId maybeNodeType
199
200 selectNodesWith' :: ParentId -> Maybe NodeType -> Query NodeRead
201 selectNodesWith' parentId maybeNodeType = proc () -> do
202 node <- (proc () -> do
203 row@(Node _ typeId _ parentId' _ _ _) <- queryNodeTable -< ()
204 restrict -< parentId' .== (toNullable $ pgInt4 parentId)
205
206 let typeId' = maybe 0 nodeTypeId maybeNodeType
207
208 restrict -< if typeId' > 0
209 then typeId .== (pgInt4 (typeId' :: Int))
210 else (pgBool True)
211 returnA -< row ) -< ()
212 returnA -< node
213
214
215 --type Cmd' a = forall m. (MonadReader env m, HasConnection env, MonadIO m) => m a
216
217
218 -- deleteNode :: (MonadReader Connection m, MonadIO m) => Int -> m Int
219 -- deleteNode :: Int -> Cmd' Int
220
221 deleteNode :: Int -> Cmd Int
222 deleteNode n = mkCmd $ \conn ->
223 fromIntegral <$> runDelete conn nodeTable
224 (\(Node n_id _ _ _ _ _ _) -> n_id .== pgInt4 n)
225
226 deleteNodes :: [Int] -> Cmd Int
227 deleteNodes ns = mkCmd $ \conn ->
228 fromIntegral <$> runDelete conn nodeTable
229 (\(Node n_id _ _ _ _ _ _) -> in_ ((map pgInt4 ns)) n_id)
230
231
232 getNodesWith :: Connection -> Int -> Maybe NodeType
233 -> Maybe Offset -> Maybe Limit -> IO [Node Value]
234 getNodesWith conn parentId nodeType maybeOffset maybeLimit =
235 runQuery conn $ selectNodesWith
236 parentId nodeType maybeOffset maybeLimit
237
238
239 -- NP check type
240 getNodesWithParentId :: Int
241 -> Maybe Text -> Connection -> IO [Node Value]
242 getNodesWithParentId n _ conn = runQuery conn $ selectNodesWithParentID n
243
244 getNodesWithParentId' :: Int
245 -> Maybe Text -> Connection -> IO [Node Value]
246 getNodesWithParentId' n _ conn = runQuery conn $ selectNodesWithParentID n
247
248
249 ------------------------------------------------------------------------
250 getDocumentsV3WithParentId :: Connection -> Int -> IO [Node HyperdataDocumentV3]
251 getDocumentsV3WithParentId conn n = runQuery conn $ selectNodesWith' n (Just Document)
252
253 getDocumentsWithParentId :: Connection -> Int -> IO [Node HyperdataDocument]
254 getDocumentsWithParentId conn n = runQuery conn $ selectNodesWith' n (Just Document)
255
256 ------------------------------------------------------------------------
257
258
259 selectNodesWithParentID :: Int -> Query NodeRead
260 selectNodesWithParentID n = proc () -> do
261 row@(Node _ _ _ parent_id _ _ _) <- queryNodeTable -< ()
262 restrict -< if n > 0
263 then
264 parent_id .== (toNullable $ pgInt4 n)
265 else
266 isNull parent_id
267 returnA -< row
268
269
270 selectNodesWithType :: Column PGInt4 -> Query NodeRead
271 selectNodesWithType type_id = proc () -> do
272 row@(Node _ tn _ _ _ _ _) <- queryNodeTable -< ()
273 restrict -< tn .== type_id
274 returnA -< row
275
276
277 getNode :: Connection -> Int -> IO (Node Value)
278 getNode conn id = do
279 fromMaybe (error $ "Node does node exist: " <> show id) . headMay <$> runQuery conn (limit 1 $ selectNode (pgInt4 id))
280
281
282 getNodesWithType :: Connection -> Column PGInt4 -> IO [Node HyperdataDocument]
283 getNodesWithType conn type_id = do
284 runQuery conn $ selectNodesWithType type_id
285
286 type UserId = NodeId
287 type TypeId = Int
288
289
290 ------------------------------------------------------------------------
291 -- Quick and dirty
292 ------------------------------------------------------------------------
293 type NodeWrite' = NodePoly (Maybe Int) Int Int (ParentId) Text (Maybe UTCTime) ByteString
294
295 --node :: UserId -> ParentId -> NodeType -> Text -> Value -> NodeWrite'
296 node :: UserId -> ParentId -> NodeType -> Text -> Value -> NodeWrite'
297 node userId parentId nodeType name nodeData = Node Nothing typeId userId parentId name Nothing byteData
298 where
299 typeId = nodeTypeId nodeType
300 byteData = DB.pack $ DBL.unpack $ encode nodeData
301
302
303
304 node2write :: (Functor f2, Functor f1) =>
305 Int
306 -> NodePoly (f1 Int) Int Int parentId Text (f2 UTCTime) ByteString
307 -> (f1 (Column PGInt4), Column PGInt4, Column PGInt4,
308 Column PGInt4, Column PGText, f2 (Column PGTimestamptz),
309 Column PGJsonb)
310 node2write pid (Node id tn ud _ nm dt hp) = ((pgInt4 <$> id)
311 ,(pgInt4 tn)
312 ,(pgInt4 ud)
313 ,(pgInt4 pid)
314 ,(pgStrictText nm)
315 ,(pgUTCTime <$> dt)
316 ,(pgStrictJSONB hp)
317 )
318
319
320 mkNode :: ParentId -> [NodeWrite'] -> Connection -> IO Int64
321 mkNode pid ns conn = runInsertMany conn nodeTable' $ map (node2write pid) ns
322
323 mkNodeR :: ParentId -> [NodeWrite'] -> Connection -> IO [Int]
324 mkNodeR pid ns conn = runInsertManyReturning conn nodeTable' (map (node2write pid) ns) (\(i,_,_,_,_,_,_) -> i)
325
326
327 ------------------------------------------------------------------------
328 -- TODO Hierachy of Nodes
329 -- post and get same types Node' and update if changes
330
331 {- TODO semantic to achieve
332 post c uid pid [ Node' Corpus "name" "{}" []
333 , Node' Folder "name" "{}" [Node' Corpus "test 2" "" [ Node' Document "title" "metaData" []
334 , Node' Document "title" "jsonData" []
335 ]
336 ]
337 ]
338 -}
339 ------------------------------------------------------------------------
340
341 -- TODO
342 -- currently this function remove the child relation
343 -- needs a Temporary type between Node' and NodeWriteT
344 node2table :: UserId -> ParentId -> Node' -> NodeWriteT
345 node2table uid pid (Node' nt txt v []) = ( Nothing, (pgInt4$ nodeTypeId nt), (pgInt4 uid), (pgInt4 pid)
346 , pgStrictText txt, Nothing, pgStrictJSONB $ DB.pack $ DBL.unpack $ encode v)
347 node2table _ _ (Node' _ _ _ _) = panic "node2table: should not happen, Tree insert not implemented yet"
348
349
350 data Node' = Node' { _n_type :: NodeType
351 , _n_name :: Text
352 , _n_data :: Value
353 , _n_children :: [Node']
354 } deriving (Show)
355
356
357 type NodeWriteT = ( Maybe (Column PGInt4)
358 , Column PGInt4, Column PGInt4
359 , Column PGInt4, Column PGText
360 , Maybe (Column PGTimestamptz)
361 , Column PGJsonb
362 )
363
364
365 mkNode' :: [NodeWriteT] -> Cmd Int64
366 mkNode' ns = mkCmd $ \conn -> runInsertMany conn nodeTable' ns
367
368 mkNodeR' :: [NodeWriteT] -> Cmd [Int]
369 mkNodeR' ns = mkCmd $ \conn -> runInsertManyReturning conn nodeTable' ns (\(i,_,_,_,_,_,_) -> i)
370
371 data NewNode = NewNode { _newNodeId :: Int
372 , _newNodeChildren :: [Int] }
373
374 -- | postNode
375 postNode :: UserId -> ParentId -> Node' -> Cmd NewNode
376 postNode uid pid (Node' nt txt v []) = do
377 pids <- mkNodeR' [node2table uid pid (Node' nt txt v [])]
378 case pids of
379 [pid] -> pure $ NewNode pid []
380 _ -> panic "postNode: only one pid expected"
381
382 postNode uid pid (Node' NodeCorpus txt v ns) = do
383 NewNode pid' _ <- postNode uid pid (Node' NodeCorpus txt v [])
384 pids <- mkNodeR' (concat $ map (\n -> [childWith uid pid' n]) ns)
385 pure $ NewNode pid' pids
386
387 postNode uid pid (Node' Annuaire txt v ns) = do
388 NewNode pid' _ <- postNode uid pid (Node' Annuaire txt v [])
389 pids <- mkNodeR' (concat $ map (\n -> [childWith uid pid' n]) ns)
390 pure $ NewNode pid' pids
391 postNode _ _ (Node' _ _ _ _) = panic "TODO: postNode for this type not implemented yet"
392
393
394 childWith :: UserId -> ParentId -> Node' -> NodeWriteT
395 childWith uId pId (Node' Document txt v []) = node2table uId pId (Node' Document txt v [])
396 childWith uId pId (Node' UserPage txt v []) = node2table uId pId (Node' UserPage txt v [])
397 childWith _ _ (Node' _ _ _ _) = panic "This NodeType can not be a child"
398