]> Git — Sourcephile - gargantext.git/blob - src/Gargantext/Database/Node.hs
[Community] Annuaire added, ok.
[gargantext.git] / src / Gargantext / Database / Node.hs
1 {-|
2 Module : Gargantext.Database.Node
3 Description : Main requests of Node to the database
4 Copyright : (c) CNRS, 2017-Present
5 License : AGPL + CECILL v3
6 Maintainer : team@gargantext.org
7 Stability : experimental
8 Portability : POSIX
9 -}
10
11 {-# OPTIONS_GHC -fno-warn-name-shadowing #-}
12 {-# OPTIONS_GHC -fno-warn-orphans #-}
13
14 {-# LANGUAGE Arrows #-}
15 {-# LANGUAGE DeriveGeneric #-}
16 {-# LANGUAGE FlexibleInstances #-}
17 {-# LANGUAGE FunctionalDependencies #-}
18 {-# LANGUAGE MultiParamTypeClasses #-}
19 {-# LANGUAGE NoImplicitPrelude #-}
20 {-# LANGUAGE TemplateHaskell #-}
21
22 module Gargantext.Database.Node where
23
24
25 import GHC.Int (Int64)
26 import Data.Maybe
27 import Data.Time (UTCTime)
28 import Database.PostgreSQL.Simple.FromField ( Conversion
29 , ResultError(ConversionFailed)
30 , FromField
31 , fromField
32 , returnError
33 )
34 import Prelude hiding (null, id, map, sum)
35 import Data.Time.Segment (jour, timesAfter, Granularity(D))
36
37 import Gargantext.Core.Types
38 import Gargantext.Core.Types.Node (NodeType)
39 import Gargantext.Database.Queries
40 import Gargantext.Prelude hiding (sum)
41
42
43 import Database.PostgreSQL.Simple.Internal (Field)
44 import Control.Arrow (returnA)
45 import Control.Lens.TH (makeLensesWith, abbreviatedFields)
46 import Data.Aeson
47 import Data.Maybe (Maybe, fromMaybe)
48 import Data.Text (Text, pack)
49 import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
50 import Data.Typeable (Typeable)
51
52 import qualified Data.ByteString as DB
53 import qualified Data.ByteString.Lazy as DBL
54 import Data.ByteString (ByteString)
55
56 import Database.PostgreSQL.Simple (Connection)
57 import Opaleye hiding (FromField)
58 import Opaleye.Internal.QueryArr (Query(..))
59 import qualified Data.Profunctor.Product as PP
60 -- | Types for Node Database Management
61 data PGTSVector
62
63
64 instance FromField HyperdataCorpus where
65 fromField = fromField'
66
67 instance FromField HyperdataDocument where
68 fromField = fromField'
69
70 instance FromField HyperdataProject where
71 fromField = fromField'
72
73 instance FromField HyperdataUser where
74 fromField = fromField'
75
76
77 instance QueryRunnerColumnDefault PGJsonb HyperdataDocument where
78 queryRunnerColumnDefault = fieldQueryRunnerColumn
79
80 instance QueryRunnerColumnDefault PGJsonb HyperdataCorpus where
81 queryRunnerColumnDefault = fieldQueryRunnerColumn
82
83 instance QueryRunnerColumnDefault PGJsonb HyperdataProject where
84 queryRunnerColumnDefault = fieldQueryRunnerColumn
85
86 instance QueryRunnerColumnDefault PGJsonb HyperdataUser where
87 queryRunnerColumnDefault = fieldQueryRunnerColumn
88
89
90
91 fromField' :: (Typeable b, FromJSON b) => Field -> Maybe DB.ByteString -> Conversion b
92 fromField' field mb = do
93 v <- fromField field mb
94 valueToHyperdata v
95 where
96 valueToHyperdata v = case fromJSON v of
97 Success a -> pure a
98 Error _err -> returnError ConversionFailed field "cannot parse hyperdata"
99
100
101 $(makeAdaptorAndInstance "pNode" ''NodePoly)
102 $(makeLensesWith abbreviatedFields ''NodePoly)
103
104
105 nodeTable :: Table NodeWrite NodeRead
106 nodeTable = Table "nodes" (pNode Node { node_id = optional "id"
107 , node_typename = required "typename"
108 , node_userId = required "user_id"
109 , node_parentId = required "parent_id"
110 , node_name = required "name"
111 , node_date = optional "date"
112 , node_hyperdata = required "hyperdata"
113 -- , node_titleAbstract = optional "title_abstract"
114 }
115 )
116
117
118 nodeTable' :: Table (Maybe (Column PGInt4)
119 , Column PGInt4
120 , Column PGInt4
121 , Column PGInt4
122 , Column PGText
123 ,Maybe (Column PGTimestamptz)
124 , Column PGJsonb
125 )
126 ((Column PGInt4)
127 , Column PGInt4
128 , Column PGInt4
129 , Column PGInt4
130 , Column PGText
131 ,(Column PGTimestamptz)
132 , Column PGJsonb
133 )
134
135 nodeTable' = Table "nodes" (PP.p7 ( optional "id"
136 , required "typename"
137 , required "user_id"
138 , required "parent_id"
139 , required "name"
140 , optional "date"
141 , required "hyperdata"
142 )
143 )
144
145
146 queryNodeTable :: Query NodeRead
147 queryNodeTable = queryTable nodeTable
148
149
150 selectNode :: Column PGInt4 -> Query NodeRead
151 selectNode id = proc () -> do
152 row <- queryNodeTable -< ()
153 restrict -< node_id row .== id
154 returnA -< row
155
156 runGetNodes :: Connection -> Query NodeRead -> IO [Node Value]
157 runGetNodes = runQuery
158
159 -- | order by publication date
160 -- Favorites (Bool), node_ngrams
161 selectNodesWith :: ParentId -> Maybe NodeType
162 -> Maybe Offset -> Maybe Limit -> Query NodeRead
163 selectNodesWith parentId maybeNodeType maybeOffset maybeLimit =
164 --offset' maybeOffset $ limit' maybeLimit $ orderBy (asc (hyperdataDocument_Publication_date . node_hyperdata)) $ selectNodesWith' parentId typeId
165 limit' maybeLimit $ offset' maybeOffset $ orderBy (asc node_id) $ selectNodesWith' parentId maybeNodeType
166
167 selectNodesWith' :: ParentId -> Maybe NodeType -> Query NodeRead
168 selectNodesWith' parentId maybeNodeType = proc () -> do
169 node <- (proc () -> do
170 row@(Node _ typeId _ parentId' _ _ _) <- queryNodeTable -< ()
171 restrict -< parentId' .== (toNullable $ pgInt4 parentId)
172
173 let typeId' = maybe 0 nodeTypeId maybeNodeType
174
175 restrict -< if typeId' > 0
176 then typeId .== (pgInt4 (typeId' :: Int))
177 else (pgBool True)
178 returnA -< row ) -< ()
179 returnA -< node
180
181
182 deleteNode :: Connection -> Int -> IO Int
183 deleteNode conn n = fromIntegral <$> runDelete conn nodeTable
184 (\(Node n_id _ _ _ _ _ _) -> n_id .== pgInt4 n)
185
186 deleteNodes :: Connection -> [Int] -> IO Int
187 deleteNodes conn ns = fromIntegral <$> runDelete conn nodeTable
188 (\(Node n_id _ _ _ _ _ _) -> in_ ((map pgInt4 ns)) n_id)
189
190
191 getNodesWith :: Connection -> Int -> Maybe NodeType
192 -> Maybe Offset -> Maybe Limit -> IO [Node HyperdataDocument]
193 getNodesWith conn parentId nodeType maybeOffset maybeLimit =
194 runQuery conn $ selectNodesWith
195 parentId nodeType maybeOffset maybeLimit
196
197
198 -- NP check type
199 getNodesWithParentId :: Connection -> Int
200 -> Maybe Text -> IO [Node HyperdataDocument]
201 getNodesWithParentId conn n _ = runQuery conn $ selectNodesWithParentID n
202
203 getNodesWithParentId' :: Connection -> Int
204 -> Maybe Text -> IO [Node Value]
205 getNodesWithParentId' conn n _ = runQuery conn $ selectNodesWithParentID n
206
207
208 selectNodesWithParentID :: Int -> Query NodeRead
209 selectNodesWithParentID n = proc () -> do
210 row@(Node _ _ _ parent_id _ _ _) <- queryNodeTable -< ()
211 restrict -< if n > 0
212 then
213 parent_id .== (toNullable $ pgInt4 n)
214 else
215 isNull parent_id
216 returnA -< row
217
218
219 selectNodesWithType :: Column PGInt4 -> Query NodeRead
220 selectNodesWithType type_id = proc () -> do
221 row@(Node _ tn _ _ _ _ _) <- queryNodeTable -< ()
222 restrict -< tn .== type_id
223 returnA -< row
224
225 getNode' :: Connection -> Int -> IO (Node Value)
226 getNode' c id = do
227 fromMaybe (error "TODO: 404") . headMay <$> runQuery c (limit 1 $ selectNode (pgInt4 id))
228
229
230 getNode :: Connection -> Int -> IO (Node HyperdataDocument)
231 getNode conn id = do
232 fromMaybe (error "TODO: 404") . headMay <$> runQuery conn (limit 1 $ selectNode (pgInt4 id))
233
234 getNodesWithType :: Connection -> Column PGInt4 -> IO [Node HyperdataDocument]
235 getNodesWithType conn type_id = do
236 runQuery conn $ selectNodesWithType type_id
237
238 type UserId = NodeId
239 type TypeId = Int
240
241
242 ------------------------------------------------------------------------
243 -- Quick and dirty
244 ------------------------------------------------------------------------
245 type NodeWrite' = NodePoly (Maybe Int) Int Int (ParentId) Text (Maybe UTCTime) ByteString
246
247 --node :: UserId -> ParentId -> NodeType -> Text -> Value -> NodeWrite'
248 node :: UserId -> ParentId -> NodeType -> Text -> Value -> NodeWrite'
249 node userId parentId nodeType name nodeData = Node Nothing typeId userId parentId name Nothing byteData
250 where
251 typeId = nodeTypeId nodeType
252 byteData = DB.pack $ DBL.unpack $ encode nodeData
253
254 node2write pid (Node id tn ud _ nm dt hp) = ((pgInt4 <$> id)
255 ,(pgInt4 tn)
256 ,(pgInt4 ud)
257 ,(pgInt4 pid)
258 ,(pgStrictText nm)
259 ,(pgUTCTime <$> dt)
260 ,(pgStrictJSONB hp)
261 )
262
263
264 mkNode :: Connection -> ParentId -> [NodeWrite'] -> IO Int64
265 mkNode conn pid ns = runInsertMany conn nodeTable' $ map (node2write pid) ns
266
267 mkNodeR :: Connection -> ParentId -> [NodeWrite'] -> IO [Int]
268 mkNodeR conn pid ns = runInsertManyReturning conn nodeTable' (map (node2write pid) ns) (\(i,_,_,_,_,_,_) -> i)
269
270
271 ------------------------------------------------------------------------
272 -- TODO Hierachy of Nodes
273 -- post and get same types Node' and update if changes
274
275 {- TODO semantic to achieve
276 post c uid pid [ Node' Corpus "name" "{}" []
277 , Node' Folder "name" "{}" [Node' Corpus "test 2" "" [ Node' Document "title" "metaData" []
278 , Node' Document "title" "jsonData" []
279 ]
280 ]
281 ]
282 -}
283 ------------------------------------------------------------------------
284
285 -- TODO
286 -- currently this function remove the child relation
287 -- needs a Temporary type between Node' and NodeWriteT
288 node2table :: UserId -> ParentId -> Node' -> [NodeWriteT]
289 node2table uid pid (Node' nt txt v []) = [( Nothing, (pgInt4$ nodeTypeId nt), (pgInt4 uid), (pgInt4 pid)
290 , pgStrictText txt, Nothing, pgStrictJSONB $ DB.pack $ DBL.unpack $ encode v)]
291 node2table _ _ (Node' _ _ _ _) = panic $ pack "node2table: should not happen, Tree insert not implemented yet"
292
293
294 data Node' = Node' { _n_type :: NodeType
295 , _n_name :: Text
296 , _n_data :: Value
297 , _n_children :: [Node']
298 } deriving (Show)
299
300
301 type NodeWriteT = ( Maybe (Column PGInt4)
302 , Column PGInt4, Column PGInt4
303 , Column PGInt4, Column PGText
304 , Maybe (Column PGTimestamptz)
305 , Column PGJsonb
306 )
307
308
309 mkNode' :: Connection -> [NodeWriteT] -> IO Int64
310 mkNode' conn ns = runInsertMany conn nodeTable' ns
311
312 mkNodeR' :: Connection -> [NodeWriteT] -> IO [Int]
313 mkNodeR' conn ns = runInsertManyReturning conn nodeTable' ns (\(i,_,_,_,_,_,_) -> i)
314
315 postNode :: Connection -> UserId -> ParentId -> Node' -> IO [Int]
316 postNode c uid pid (Node' nt txt v []) = mkNodeR' c (node2table uid pid (Node' nt txt v []))
317 postNode c uid pid (Node' Corpus txt v ns) = do
318 [pid'] <- postNode c uid pid (Node' Corpus txt v [])
319 pids <- mkNodeR' c $ concat $ (map (\(Node' Document txt v _) -> node2table uid pid' $ Node' Document txt v []) ns)
320 pure (pids)
321 postNode c uid pid (Node' Annuaire txt v ns) = do
322 [pid'] <- postNode c uid pid (Node' Annuaire txt v [])
323 pids <- mkNodeR' c $ concat $ (map (\(Node' UserPage txt v _) -> node2table uid pid' $ Node' UserPage txt v []) ns)
324 pure (pids)
325 postNode c uid pid (Node' _ _ _ _) = panic $ pack "postNode for this type not implemented yet"
326
327
328