]> Git — Sourcephile - gargantext.git/blob - src/Gargantext/API/Node.hs
[FEAT] adding clustering louvain.
[gargantext.git] / src / Gargantext / API / Node.hs
1 {-|
2 Module : Gargantext.API.Node
3 Description : Server API
4 Copyright : (c) CNRS, 2017-Present
5 License : AGPL + CECILL v3
6 Maintainer : team@gargantext.org
7 Stability : experimental
8 Portability : POSIX
9
10 Node API
11 -}
12
13 {-# OPTIONS_GHC -fno-warn-name-shadowing #-}
14
15 {-# LANGUAGE NoImplicitPrelude #-}
16 {-# LANGUAGE DataKinds #-}
17 {-# LANGUAGE TemplateHaskell #-}
18 {-# LANGUAGE TypeOperators #-}
19 {-# LANGUAGE OverloadedStrings #-}
20
21 -------------------------------------------------------------------
22 module Gargantext.API.Node
23 where
24 -------------------------------------------------------------------
25
26 import Control.Monad.IO.Class (liftIO)
27 import Control.Monad ((>>))
28 --import System.IO (putStrLn, readFile)
29
30 -- import Data.Aeson (Value())
31 --import Data.Text (Text(), pack)
32 import Data.Text (Text())
33 import Data.Time (UTCTime)
34
35 import Database.PostgreSQL.Simple (Connection)
36
37 import Servant
38 -- import Servant.Multipart
39
40 import Gargantext.Prelude
41 import Gargantext.Core.Types.Node
42 import Gargantext.Database.Node ( getNodesWithParentId
43 , getNode, getNodesWith
44 , deleteNode, deleteNodes)
45 import Gargantext.Database.Facet (FacetDoc, getDocFacet
46 ,FacetChart)
47
48 -------------------------------------------------------------------
49 -------------------------------------------------------------------
50 -- | Node API Types management
51 type Roots = Get '[JSON] [Node HyperdataDocument]
52 :<|> Post '[JSON] Int
53 :<|> Put '[JSON] Int
54 :<|> Delete '[JSON] Int
55
56 type NodesAPI = Delete '[JSON] Int
57
58 type NodeAPI = Get '[JSON] (Node HyperdataDocument)
59 :<|> Delete '[JSON] Int
60 :<|> "children" :> Summary " Summary children"
61 :> QueryParam "type" NodeType
62 :> QueryParam "offset" Int
63 :> QueryParam "limit" Int
64 :> Get '[JSON] [Node HyperdataDocument]
65 :<|> "facet" :> Summary " Facet documents"
66 :> "documents" :> FacetDocAPI
67 -- :<|> "facet" :<|> "sources" :<|> FacetSourcesAPI
68 -- :<|> "facet" :<|> "authors" :<|> FacetAuthorsAPI
69 -- :<|> "facet" :<|> "terms" :<|> FacetTermsAPI
70
71 --data FacetFormat = Table | Chart
72 --data FacetType = Doc | Term | Source | Author
73 --data Facet = Facet Doc Format
74
75
76 type FacetDocAPI = "table"
77 :> Summary " Table data"
78 :> QueryParam "offset" Int
79 :> QueryParam "limit" Int
80 :> Get '[JSON] [FacetDoc]
81
82 :<|> "chart"
83 :> Summary " Chart data"
84 :> QueryParam "from" UTCTime
85 :> QueryParam "to" UTCTime
86 :> Get '[JSON] [FacetChart]
87 --
88 -- Depending on the Type of the Node, we could post
89 -- New documents for a corpus
90 -- New map list terms
91 -- :<|> "process" :> MultipartForm MultipartData :> Post '[JSON] Text
92
93 -- To launch a query and update the corpus
94 -- :<|> "query" :> Capture "string" Text :> Get '[JSON] Text
95
96
97 -- | Node API functions
98 roots :: Connection -> Server Roots
99 roots conn = liftIO (putStrLn ( "Log Needed" :: Text) >> getNodesWithParentId conn 0 Nothing)
100 :<|> pure (panic "not implemented yet")
101 :<|> pure (panic "not implemented yet")
102 :<|> pure (panic "not implemented yet")
103
104 nodeAPI :: Connection -> NodeId -> Server NodeAPI
105 nodeAPI conn id = liftIO (putStrLn ("getNode" :: Text) >> getNode conn id )
106 :<|> deleteNode' conn id
107 :<|> getNodesWith' conn id
108 :<|> getFacet conn id
109 :<|> getChart conn id
110 -- :<|> upload
111 -- :<|> query
112
113 nodesAPI :: Connection -> [NodeId] -> Server NodesAPI
114 nodesAPI conn ids = deleteNodes' conn ids
115
116 deleteNodes' :: Connection -> [NodeId] -> Handler Int
117 deleteNodes' conn ids = liftIO (deleteNodes conn ids)
118
119 deleteNode' :: Connection -> NodeId -> Handler Int
120 deleteNode' conn id = liftIO (deleteNode conn id)
121
122 getNodesWith' :: Connection -> NodeId -> Maybe NodeType -> Maybe Int -> Maybe Int
123 -> Handler [Node HyperdataDocument]
124 getNodesWith' conn id nodeType offset limit = liftIO (getNodesWith conn id nodeType offset limit)
125
126
127 getFacet :: Connection -> NodeId -> Maybe Int -> Maybe Int
128 -> Handler [FacetDoc]
129 getFacet conn id offset limit = liftIO (getDocFacet conn id (Just Document) offset limit)
130
131 getChart :: Connection -> NodeId -> Maybe UTCTime -> Maybe UTCTime
132 -> Handler [FacetChart]
133 getChart _ _ _ _ = undefined
134
135
136 query :: Text -> Handler Text
137 query s = pure s
138
139
140 -- | Upload files
141 -- TODO Is it possible to adapt the function according to iValue input ?
142 --upload :: MultipartData -> Handler Text
143 --upload multipartData = do
144 -- liftIO $ do
145 -- putStrLn "Inputs:"
146 -- forM_ (inputs multipartData) $ \input ->
147 -- putStrLn $ " " <> show (iName input)
148 -- <> " -> " <> show (iValue input)
149 --
150 -- forM_ (files multipartData) $ \file -> do
151 -- content <- readFile (fdFilePath file)
152 -- putStrLn $ "Content of " <> show (fdFileName file)
153 -- <> " at " <> fdFilePath file
154 -- putStrLn content
155 -- pure (pack "Data loaded")
156