]> Git — Sourcephile - gargantext.git/blob - src/Gargantext/API/Admin/Settings.hs
resolve the conflict
[gargantext.git] / src / Gargantext / API / Admin / Settings.hs
1 {-|
2 Module : Gargantext.API.Admin.Settings
3 Description : Settings of the API (Server and Client)
4 Copyright : (c) CNRS, 2017-Present
5 License : AGPL + CECILL v3
6 Maintainer : team@gargantext.org
7 Stability : experimental
8 Portability : POSIX
9
10 TODO-SECURITY: Critical
11 -}
12
13
14
15 {-# LANGUAGE BangPatterns #-}
16 {-# LANGUAGE ScopedTypeVariables #-}
17 {-# LANGUAGE TemplateHaskell #-}
18
19 module Gargantext.API.Admin.Settings
20 where
21
22 -- import Control.Debounce (mkDebounce, defaultDebounceSettings, debounceFreq, debounceAction)
23 import Codec.Serialise (Serialise(), serialise)
24 import Control.Lens
25 import Control.Monad.Logger (LogLevel(..))
26 import Control.Monad.Reader
27 import Data.Maybe (fromMaybe)
28 import Data.Pool (Pool, createPool)
29 import Database.PostgreSQL.Simple (Connection, connect, close, ConnectInfo)
30 import Gargantext.Core.NodeStory
31 import Gargantext.Prelude.Config ({-GargConfig(..),-} {-gc_repofilepath,-} readConfig)
32 import Network.HTTP.Client.TLS (newTlsManager)
33 import Servant.Auth.Server (defaultJWTSettings, CookieSettings(..), XsrfCookieSettings(..), defaultCookieSettings, defaultXsrfCookieSettings, readKey, writeKey)
34 import Servant.Client (parseBaseUrl)
35 import Servant.Job.Async (newJobEnv, defaultSettings)
36 import System.Directory
37 -- import System.FileLock (tryLockFile, unlockFile, SharedExclusive(Exclusive))
38 import System.IO (FilePath, hClose)
39 import System.IO.Temp (withTempFile)
40 import System.Log.FastLogger
41 import qualified Data.ByteString.Lazy as L
42
43
44 import Gargantext.API.Admin.EnvTypes
45 import Gargantext.API.Admin.Types
46 -- import Gargantext.API.Ngrams.Types (NgramsRepo, HasRepo(..), RepoEnv(..), r_version, initRepo, renv_var, renv_lock)
47 import Gargantext.Core.NLP (nlpServerMap)
48 import Gargantext.Database.Prelude (databaseParameters, hasConfig)
49 import Gargantext.Prelude
50 import Gargantext.Prelude.Config (gc_js_job_timeout, gc_js_id_timeout)
51 import qualified Gargantext.Prelude.Mail as Mail
52 import qualified Gargantext.Prelude.NLP as NLP
53 import qualified Gargantext.Utils.Jobs as Jobs
54 import qualified Gargantext.Utils.Jobs.Monad as Jobs
55 import qualified Gargantext.Utils.Jobs.Queue as Jobs
56 import qualified Gargantext.Utils.Jobs.Settings as Jobs
57
58 devSettings :: FilePath -> IO Settings
59 devSettings jwkFile = do
60 jwkExists <- doesFileExist jwkFile
61 when (not jwkExists) $ writeKey jwkFile
62 jwk <- readKey jwkFile
63 pure $ Settings
64 { _allowedOrigin = "http://localhost:8008"
65 , _allowedHost = "localhost:3000"
66 , _appPort = 3000
67 , _logLevelLimit = LevelDebug
68 -- , _dbServer = "localhost"
69 , _sendLoginEmails = LogEmailToConsole
70 , _scrapydUrl = fromMaybe (panic "Invalid scrapy URL") $ parseBaseUrl "http://localhost:6800"
71 , _cookieSettings = defaultCookieSettings { cookieXsrfSetting = Just xsrfCookieSetting } -- TODO-SECURITY tune
72 , _jwtSettings = defaultJWTSettings jwk -- TODO-SECURITY tune
73 }
74 where
75 xsrfCookieSetting = defaultXsrfCookieSettings { xsrfExcludeGet = True }
76
77 {- NOT USED YET
78 import System.Environment (lookupEnv)
79
80 reqSetting :: FromHttpApiData a => Text -> IO a
81 reqSetting name = do
82 e <- fromMaybe (panic $ "Missing " <> name) <$> lookupEnv (unpack name)
83 pure $ either (panic $ "Unable to parse " <> name) identity $ parseUrlPiece $ pack e
84
85 optSetting :: FromHttpApiData a => Text -> a -> IO a
86 optSetting name d = do
87 me <- lookupEnv (unpack name)
88 case me of
89 Nothing -> pure d
90 Just e -> pure $ either (panic $ "Unable to parse " <> name) identity $ parseUrlPiece $ pack e
91
92 settingsFromEnvironment :: IO Settings
93 settingsFromEnvironment =
94 Settings <$> (encodeUtf8 <$> reqSetting "ALLOWED_ORIGIN")
95 <*> (encodeUtf8 <$> reqSetting "ALLOWED_HOST")
96 <*> optSetting "PORT" 3000
97 <*> (parseLogLevel <$> optSetting "LOG_LEVEL" "warn")
98 <*> reqSetting "DB_SERVER"
99 <*> (parseJwk <$> reqSetting "JWT_SECRET")
100 <*> optSetting "SEND_EMAIL" SendEmailViaAws
101 -}
102
103 -----------------------------------------------------------------------
104 -- | RepoDir FilePath configuration
105 type RepoDirFilePath = FilePath
106
107 repoSnapshot :: RepoDirFilePath -> FilePath
108 repoSnapshot repoDir = repoDir <> "/repo.cbor"
109
110
111
112 -- This assumes we own the lock on repoSnapshot.
113 repoSaverAction :: RepoDirFilePath -> Serialise a => a -> IO ()
114 repoSaverAction repoDir a = do
115 withTempFile repoDir "tmp-repo.cbor" $ \fp h -> do
116 -- printDebug "repoSaverAction" fp
117 L.hPut h $ serialise a
118 hClose h
119 renameFile fp (repoSnapshot repoDir)
120
121
122
123 {-
124 -- The use of mkDebounce makes sure that repoSaverAction is not called too often.
125 -- If repoSaverAction start taking more time than the debounceFreq then it should
126 -- be increased.
127 mkRepoSaver :: RepoDirFilePath -> MVar NgramsRepo -> IO (IO ())
128 mkRepoSaver repoDir repo_var = mkDebounce settings'
129 where
130 settings' = defaultDebounceSettings
131 { debounceFreq = let n = 6 :: Int in 10^n -- 1 second
132 , debounceAction = withMVar repo_var (repoSaverAction repoDir)
133 -- Here this not only `readMVar` but `takeMVar`.
134 -- Namely while repoSaverAction is saving no other change
135 -- can be made to the MVar.
136 -- This might be not efficent and thus reconsidered later.
137 -- However this enables to safely perform a *final* save.
138 -- See `cleanEnv`.
139 -- Future work:
140 -- Add a new MVar just for saving.
141 }
142
143 -}
144 {-
145 readRepoEnv :: FilePath -> IO RepoEnv
146 readRepoEnv repoDir = do
147 -- Does file exist ? :: Bool
148 _repoDir <- createDirectoryIfMissing True repoDir
149
150 repoFile <- doesFileExist (repoSnapshot repoDir)
151
152 -- Is file not empty ? :: Bool
153 repoExists <- if repoFile
154 then (>0) <$> getFileSize (repoSnapshot repoDir)
155 else pure False
156
157 mlock <- tryLockFile (repoSnapshot repoDir) Exclusive
158 lock <- maybe (panic "Repo file already locked") pure mlock
159
160 mvar <- newMVar =<<
161 if repoExists
162 then do
163 -- e_repo <- eitherDecodeStrict <$> deserialise <$> L.readFile repoSnapshot
164 repo <- deserialise <$> L.readFile (repoSnapshot repoDir)
165 -- repo <- either fail pure e_repo
166 let archive = (repoSnapshot repoDir) <> ".v" <> show (repo ^. r_version)
167 copyFile (repoSnapshot repoDir) archive
168 pure repo
169 else
170 pure initRepo
171 -- TODO save in DB here
172 saver <- mkRepoSaver repoDir mvar
173 pure $ RepoEnv { _renv_var = mvar, _renv_saver = saver, _renv_lock = lock }
174 --}
175
176 devJwkFile :: FilePath
177 devJwkFile = "dev.jwk"
178
179 newEnv :: PortNumber -> FilePath -> IO Env
180 newEnv port file = do
181 !manager_env <- newTlsManager
182 !settings' <- devSettings devJwkFile <&> appPort .~ port -- TODO read from 'file'
183 when (port /= settings' ^. appPort) $
184 panic "TODO: conflicting settings of port"
185
186 !config_env <- readConfig file
187 prios <- Jobs.readPrios (file <> ".jobs")
188 let prios' = Jobs.applyPrios prios Jobs.defaultPrios
189 putStrLn $ "Overrides: " <> show prios
190 putStrLn $ "New priorities: " <> show prios'
191 !self_url_env <- parseBaseUrl $ "http://0.0.0.0:" <> show port
192 dbParam <- databaseParameters file
193 !pool <- newPool dbParam
194 --nodeStory_env <- readNodeStoryEnv (_gc_repofilepath config_env)
195 !nodeStory_env <- readNodeStoryEnv pool
196 !scrapers_env <- newJobEnv defaultSettings manager_env
197
198 secret <- Jobs.genSecret
199 let jobs_settings = (Jobs.defaultJobSettings 1 secret)
200 & Jobs.l_jsJobTimeout .~ (fromIntegral $ config_env ^. hasConfig ^. gc_js_job_timeout)
201 & Jobs.l_jsIDTimeout .~ (fromIntegral $ config_env ^. hasConfig ^. gc_js_id_timeout)
202 !jobs_env <- Jobs.newJobEnv jobs_settings prios' manager_env
203 !logger <- newStderrLoggerSet defaultBufSize
204 !config_mail <- Mail.readConfig file
205 !nlp_env <- nlpServerMap <$> NLP.readConfig file
206
207 {- An 'Env' by default doesn't have strict fields, but when constructing one in production
208 we want to force them to WHNF to avoid accumulating unnecessary thunks.
209 -}
210 pure $ Env
211 { _env_settings = settings'
212 , _env_logger = logger
213 , _env_pool = pool
214 , _env_nodeStory = nodeStory_env
215 , _env_manager = manager_env
216 , _env_scrapers = scrapers_env
217 , _env_jobs = jobs_env
218 , _env_self_url = self_url_env
219 , _env_config = config_env
220 , _env_mail = config_mail
221 , _env_nlp = nlp_env
222 }
223
224 newPool :: ConnectInfo -> IO (Pool Connection)
225 newPool param = createPool (connect param) close 1 (60*60) 8
226
227 {-
228 cleanEnv :: (HasConfig env, HasRepo env) => env -> IO ()
229 cleanEnv env = do
230 r <- takeMVar (env ^. repoEnv . renv_var)
231 repoSaverAction (env ^. hasConfig . gc_repofilepath) r
232 unlockFile (env ^. repoEnv . renv_lock)
233 --}