]> Git — Sourcephile - gargantext.git/blob - src/Gargantext/Database/Node.hs
[FEAT] Adding External module for IMT community manager
[gargantext.git] / src / Gargantext / Database / Node.hs
1 {-|
2 Module : Gargantext.Database.Node
3 Description : Main requests of Node to the database
4 Copyright : (c) CNRS, 2017-Present
5 License : AGPL + CECILL v3
6 Maintainer : team@gargantext.org
7 Stability : experimental
8 Portability : POSIX
9 -}
10
11 {-# OPTIONS_GHC -fno-warn-name-shadowing #-}
12 {-# OPTIONS_GHC -fno-warn-orphans #-}
13
14 {-# LANGUAGE Arrows #-}
15 {-# LANGUAGE DeriveGeneric #-}
16 {-# LANGUAGE FlexibleInstances #-}
17 {-# LANGUAGE FunctionalDependencies #-}
18 {-# LANGUAGE MultiParamTypeClasses #-}
19 {-# LANGUAGE NoImplicitPrelude #-}
20 {-# LANGUAGE TemplateHaskell #-}
21
22 module Gargantext.Database.Node where
23
24
25 import GHC.Int (Int64)
26 import Data.Maybe
27 import Data.Time (UTCTime)
28 import Database.PostgreSQL.Simple.FromField ( Conversion
29 , ResultError(ConversionFailed)
30 , FromField
31 , fromField
32 , returnError
33 )
34 import Prelude hiding (null, id, map, sum)
35
36 import Gargantext.Core.Types
37 import Gargantext.Core.Types.Node (NodeType)
38 import Gargantext.Database.Queries
39 import Gargantext.Prelude hiding (sum)
40
41
42 import Database.PostgreSQL.Simple.Internal (Field)
43 import Control.Arrow (returnA)
44 import Control.Lens.TH (makeLensesWith, abbreviatedFields)
45 import Data.Aeson
46 import Data.Maybe (Maybe, fromMaybe)
47 import Data.Text (Text, pack)
48 import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
49 import Data.Typeable (Typeable)
50
51 import qualified Data.ByteString as DB
52 import qualified Data.ByteString.Lazy as DBL
53 import Data.ByteString (ByteString)
54
55 import Database.PostgreSQL.Simple (Connection)
56 import Opaleye hiding (FromField)
57 import Opaleye.Internal.QueryArr (Query)
58 import qualified Data.Profunctor.Product as PP
59 -- | Types for Node Database Management
60 data PGTSVector
61
62
63 instance FromField HyperdataCorpus where
64 fromField = fromField'
65
66 instance FromField HyperdataDocument where
67 fromField = fromField'
68
69 instance FromField HyperdataProject where
70 fromField = fromField'
71
72 instance FromField HyperdataUser where
73 fromField = fromField'
74
75
76 instance QueryRunnerColumnDefault PGJsonb HyperdataDocument where
77 queryRunnerColumnDefault = fieldQueryRunnerColumn
78
79 instance QueryRunnerColumnDefault PGJsonb HyperdataCorpus where
80 queryRunnerColumnDefault = fieldQueryRunnerColumn
81
82 instance QueryRunnerColumnDefault PGJsonb HyperdataProject where
83 queryRunnerColumnDefault = fieldQueryRunnerColumn
84
85 instance QueryRunnerColumnDefault PGJsonb HyperdataUser where
86 queryRunnerColumnDefault = fieldQueryRunnerColumn
87
88
89
90 fromField' :: (Typeable b, FromJSON b) => Field -> Maybe DB.ByteString -> Conversion b
91 fromField' field mb = do
92 v <- fromField field mb
93 valueToHyperdata v
94 where
95 valueToHyperdata v = case fromJSON v of
96 Success a -> pure a
97 Error _err -> returnError ConversionFailed field "cannot parse hyperdata"
98
99
100 $(makeAdaptorAndInstance "pNode" ''NodePoly)
101 $(makeLensesWith abbreviatedFields ''NodePoly)
102
103
104 nodeTable :: Table NodeWrite NodeRead
105 nodeTable = Table "nodes" (pNode Node { node_id = optional "id"
106 , node_typename = required "typename"
107 , node_userId = required "user_id"
108 , node_parentId = required "parent_id"
109 , node_name = required "name"
110 , node_date = optional "date"
111 , node_hyperdata = required "hyperdata"
112 -- , node_titleAbstract = optional "title_abstract"
113 }
114 )
115
116
117 nodeTable' :: Table (Maybe (Column PGInt4)
118 , Column PGInt4
119 , Column PGInt4
120 , Column PGInt4
121 , Column PGText
122 ,Maybe (Column PGTimestamptz)
123 , Column PGJsonb
124 )
125 ((Column PGInt4)
126 , Column PGInt4
127 , Column PGInt4
128 , Column PGInt4
129 , Column PGText
130 ,(Column PGTimestamptz)
131 , Column PGJsonb
132 )
133
134 nodeTable' = Table "nodes" (PP.p7 ( optional "id"
135 , required "typename"
136 , required "user_id"
137 , required "parent_id"
138 , required "name"
139 , optional "date"
140 , required "hyperdata"
141 )
142 )
143
144
145 queryNodeTable :: Query NodeRead
146 queryNodeTable = queryTable nodeTable
147
148
149 selectNode :: Column PGInt4 -> Query NodeRead
150 selectNode id = proc () -> do
151 row <- queryNodeTable -< ()
152 restrict -< node_id row .== id
153 returnA -< row
154
155 runGetNodes :: Connection -> Query NodeRead -> IO [Node Value]
156 runGetNodes = runQuery
157
158 -- | order by publication date
159 -- Favorites (Bool), node_ngrams
160 selectNodesWith :: ParentId -> Maybe NodeType
161 -> Maybe Offset -> Maybe Limit -> Query NodeRead
162 selectNodesWith parentId maybeNodeType maybeOffset maybeLimit =
163 --offset' maybeOffset $ limit' maybeLimit $ orderBy (asc (hyperdataDocument_Publication_date . node_hyperdata)) $ selectNodesWith' parentId typeId
164 limit' maybeLimit $ offset' maybeOffset $ orderBy (asc node_id) $ selectNodesWith' parentId maybeNodeType
165
166 selectNodesWith' :: ParentId -> Maybe NodeType -> Query NodeRead
167 selectNodesWith' parentId maybeNodeType = proc () -> do
168 node <- (proc () -> do
169 row@(Node _ typeId _ parentId' _ _ _) <- queryNodeTable -< ()
170 restrict -< parentId' .== (toNullable $ pgInt4 parentId)
171
172 let typeId' = maybe 0 nodeTypeId maybeNodeType
173
174 restrict -< if typeId' > 0
175 then typeId .== (pgInt4 (typeId' :: Int))
176 else (pgBool True)
177 returnA -< row ) -< ()
178 returnA -< node
179
180
181 deleteNode :: Connection -> Int -> IO Int
182 deleteNode conn n = fromIntegral <$> runDelete conn nodeTable
183 (\(Node n_id _ _ _ _ _ _) -> n_id .== pgInt4 n)
184
185 deleteNodes :: Connection -> [Int] -> IO Int
186 deleteNodes conn ns = fromIntegral <$> runDelete conn nodeTable
187 (\(Node n_id _ _ _ _ _ _) -> in_ ((map pgInt4 ns)) n_id)
188
189
190 getNodesWith :: Connection -> Int -> Maybe NodeType
191 -> Maybe Offset -> Maybe Limit -> IO [Node Value]
192 getNodesWith conn parentId nodeType maybeOffset maybeLimit =
193 runQuery conn $ selectNodesWith
194 parentId nodeType maybeOffset maybeLimit
195
196
197 -- NP check type
198 getNodesWithParentId :: Connection -> Int
199 -> Maybe Text -> IO [Node Value]
200 getNodesWithParentId conn n _ = runQuery conn $ selectNodesWithParentID n
201
202 getNodesWithParentId' :: Connection -> Int
203 -> Maybe Text -> IO [Node Value]
204 getNodesWithParentId' conn n _ = runQuery conn $ selectNodesWithParentID n
205
206
207 selectNodesWithParentID :: Int -> Query NodeRead
208 selectNodesWithParentID n = proc () -> do
209 row@(Node _ _ _ parent_id _ _ _) <- queryNodeTable -< ()
210 restrict -< if n > 0
211 then
212 parent_id .== (toNullable $ pgInt4 n)
213 else
214 isNull parent_id
215 returnA -< row
216
217
218 selectNodesWithType :: Column PGInt4 -> Query NodeRead
219 selectNodesWithType type_id = proc () -> do
220 row@(Node _ tn _ _ _ _ _) <- queryNodeTable -< ()
221 restrict -< tn .== type_id
222 returnA -< row
223
224 getNode' :: Connection -> Int -> IO (Node Value)
225 getNode' c id = do
226 fromMaybe (error "TODO: 404") . headMay <$> runQuery c (limit 1 $ selectNode (pgInt4 id))
227
228
229 getNode :: Connection -> Int -> IO (Node Value)
230 getNode conn id = do
231 fromMaybe (error "TODO: 404") . headMay <$> runQuery conn (limit 1 $ selectNode (pgInt4 id))
232
233 getNodesWithType :: Connection -> Column PGInt4 -> IO [Node HyperdataDocument]
234 getNodesWithType conn type_id = do
235 runQuery conn $ selectNodesWithType type_id
236
237 type UserId = NodeId
238 type TypeId = Int
239
240
241 ------------------------------------------------------------------------
242 -- Quick and dirty
243 ------------------------------------------------------------------------
244 type NodeWrite' = NodePoly (Maybe Int) Int Int (ParentId) Text (Maybe UTCTime) ByteString
245
246 --node :: UserId -> ParentId -> NodeType -> Text -> Value -> NodeWrite'
247 node :: UserId -> ParentId -> NodeType -> Text -> Value -> NodeWrite'
248 node userId parentId nodeType name nodeData = Node Nothing typeId userId parentId name Nothing byteData
249 where
250 typeId = nodeTypeId nodeType
251 byteData = DB.pack $ DBL.unpack $ encode nodeData
252
253
254
255 node2write :: (Functor f2, Functor f1) =>
256 Int
257 -> NodePoly (f1 Int) Int Int parentId Text (f2 UTCTime) ByteString
258 -> (f1 (Column PGInt4), Column PGInt4, Column PGInt4,
259 Column PGInt4, Column PGText, f2 (Column PGTimestamptz),
260 Column PGJsonb)
261 node2write pid (Node id tn ud _ nm dt hp) = ((pgInt4 <$> id)
262 ,(pgInt4 tn)
263 ,(pgInt4 ud)
264 ,(pgInt4 pid)
265 ,(pgStrictText nm)
266 ,(pgUTCTime <$> dt)
267 ,(pgStrictJSONB hp)
268 )
269
270
271 mkNode :: Connection -> ParentId -> [NodeWrite'] -> IO Int64
272 mkNode conn pid ns = runInsertMany conn nodeTable' $ map (node2write pid) ns
273
274 mkNodeR :: Connection -> ParentId -> [NodeWrite'] -> IO [Int]
275 mkNodeR conn pid ns = runInsertManyReturning conn nodeTable' (map (node2write pid) ns) (\(i,_,_,_,_,_,_) -> i)
276
277
278 ------------------------------------------------------------------------
279 -- TODO Hierachy of Nodes
280 -- post and get same types Node' and update if changes
281
282 {- TODO semantic to achieve
283 post c uid pid [ Node' Corpus "name" "{}" []
284 , Node' Folder "name" "{}" [Node' Corpus "test 2" "" [ Node' Document "title" "metaData" []
285 , Node' Document "title" "jsonData" []
286 ]
287 ]
288 ]
289 -}
290 ------------------------------------------------------------------------
291
292 -- TODO
293 -- currently this function remove the child relation
294 -- needs a Temporary type between Node' and NodeWriteT
295 node2table :: UserId -> ParentId -> Node' -> [NodeWriteT]
296 node2table uid pid (Node' nt txt v []) = [( Nothing, (pgInt4$ nodeTypeId nt), (pgInt4 uid), (pgInt4 pid)
297 , pgStrictText txt, Nothing, pgStrictJSONB $ DB.pack $ DBL.unpack $ encode v)]
298 node2table _ _ (Node' _ _ _ _) = panic $ pack "node2table: should not happen, Tree insert not implemented yet"
299
300
301 data Node' = Node' { _n_type :: NodeType
302 , _n_name :: Text
303 , _n_data :: Value
304 , _n_children :: [Node']
305 } deriving (Show)
306
307
308 type NodeWriteT = ( Maybe (Column PGInt4)
309 , Column PGInt4, Column PGInt4
310 , Column PGInt4, Column PGText
311 , Maybe (Column PGTimestamptz)
312 , Column PGJsonb
313 )
314
315
316 mkNode' :: Connection -> [NodeWriteT] -> IO Int64
317 mkNode' conn ns = runInsertMany conn nodeTable' ns
318
319 mkNodeR' :: Connection -> [NodeWriteT] -> IO [Int]
320 mkNodeR' conn ns = runInsertManyReturning conn nodeTable' ns (\(i,_,_,_,_,_,_) -> i)
321
322 -- | postNode
323 postNode :: Connection -> UserId -> ParentId -> Node' -> IO [Int]
324 postNode c uid pid (Node' nt txt v []) = mkNodeR' c (node2table uid pid (Node' nt txt v []))
325
326 postNode c uid pid (Node' Corpus txt v ns) = do
327 [pid'] <- postNode c uid pid (Node' Corpus txt v [])
328 pids <- mkNodeR' c $ concat $ map (\n -> childWith uid pid' n) ns
329 pure (pids)
330
331 postNode c uid pid (Node' Annuaire txt v ns) = do
332 [pid'] <- postNode c uid pid (Node' Annuaire txt v [])
333 pids <- mkNodeR' c $ concat $ map (\n -> childWith uid pid' n) ns
334 pure (pids)
335 postNode _ _ _ (Node' _ _ _ _) = panic $ pack "postNode for this type not implemented yet"
336
337
338 childWith :: UserId -> ParentId -> Node' -> [NodeWriteT]
339 childWith uId pId (Node' Document txt v []) = node2table uId pId (Node' Document txt v [])
340 childWith uId pId (Node' UserPage txt v []) = node2table uId pId (Node' UserPage txt v [])
341 childWith _ _ (Node' _ _ _ _) = panic $ pack "This NodeType can not be a child"
342