From 55e8f39b0a258f6e098ec0e0fbcb45b55b67bc72 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Fri, 6 Dec 2019 18:40:18 +0530 Subject: [PATCH 001/195] init --- server/graphql-engine.cabal | 5 + server/src-lib/Hasura/App.hs | 3 + server/src-lib/Hasura/Events/Timed.hs | 137 ++++++++++++++++++ server/src-lib/Hasura/RQL/DDL/TimedTrigger.hs | 38 +++++ server/src-lib/Hasura/SQL/Types.hs | 7 +- server/src-lib/Hasura/Server/Query.hs | 6 + server/src-rsr/initialise.sql | 23 +++ 7 files changed, 217 insertions(+), 2 deletions(-) create mode 100644 server/src-lib/Hasura/Events/Timed.hs create mode 100644 server/src-lib/Hasura/RQL/DDL/TimedTrigger.hs diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 65a348bc5c452..550d53f95e318 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -181,6 +181,9 @@ library -- caching , psqueues >= 0.2 + -- scheduled triggers + , cron + exposed-modules: Control.Monad.Stateless , Hasura.Prelude @@ -255,6 +258,7 @@ library , Hasura.RQL.DDL.Schema.Table , Hasura.RQL.DDL.Utils , Hasura.RQL.DDL.EventTrigger + , Hasura.RQL.DDL.TimedTrigger , Hasura.RQL.DDL.Headers , Hasura.RQL.DDL.RemoteSchema , Hasura.RQL.DDL.QueryCollection @@ -317,6 +321,7 @@ library , Hasura.Events.Lib , Hasura.Events.HTTP + , Hasura.Events.Timed , Control.Concurrent.Extended , Control.Lens.Extended diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index dc7515e49c36c..e2768f98bc442 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -30,6 +30,7 @@ import qualified Text.Mustache.Compile as M import Hasura.Db import Hasura.EncJSON import Hasura.Events.Lib +import Hasura.Events.Timed import Hasura.Logging import Hasura.Prelude import Hasura.RQL.DDL.Schema.Cache @@ -252,6 +253,8 @@ runHGEServer ServeOptions{..} InitCtx{..} initTime = do void $ liftIO $ C.forkIO $ processEventQueue logger logEnvHeaders _icHttpManager _icPgPool scRef eventEngineCtx + void $ liftIO $ C.forkIO $ runScheduledEventsGenerator _icPgPool -- logger logEnvHeaders + void $ liftIO $ C.forkIO $ processScheduledQueue _icPgPool -- logger logEnvHeaders -- start a background thread to check for updates void $ liftIO $ C.forkIO $ checkForUpdates loggerCtx _icHttpManager diff --git a/server/src-lib/Hasura/Events/Timed.hs b/server/src-lib/Hasura/Events/Timed.hs new file mode 100644 index 0000000000000..729301882f3e9 --- /dev/null +++ b/server/src-lib/Hasura/Events/Timed.hs @@ -0,0 +1,137 @@ +module Hasura.Events.Timed + ( processScheduledQueue + , runScheduledEventsGenerator + ) where + +import Control.Concurrent (threadDelay) +import Control.Concurrent.Async +import Data.Time.Clock +import Data.Time.Format +import Hasura.Prelude +import Hasura.RQL.DDL.TimedTrigger +import Hasura.RQL.Types +import Hasura.SQL.DML +import Hasura.SQL.Types +import System.Cron + +import qualified Data.Text as T +import qualified Database.PG.Query as Q +import qualified Text.Builder as TB (run) + +import Debug.Trace + +scheduledEventsTable :: QualifiedTable +scheduledEventsTable = + QualifiedObject + hdbCatalogSchema + (TableName $ T.pack "hdb_scheduled_trigger_events") + +data ScheduledEvent + = ScheduledEvent + { seName :: !T.Text + , seWebhook :: !T.Text + , seScheduledTime :: !UTCTime + } deriving (Show, Eq) + +runScheduledEventsGenerator :: Q.PGPool -> IO () +runScheduledEventsGenerator pgpool = do + forever $ do + traceM "entering scheduled events generator" + runExceptT + (Q.runTx + pgpool + (Q.RepeatableRead, Just Q.ReadWrite) + generateScheduledEvents) >>= \case + Right _ -> pure () + Left err -> traceShowM err + threadDelay oneHour + where + oneHour = 60 * 60 * 1000000 + +generateScheduledEvents :: Q.TxE QErr () +generateScheduledEvents = do + allSchedules <- map uncurrySchedule <$> Q.listQE defaultTxErrorHandler + [Q.sql| + SELECT st.name, st.webhook, st.schedule + FROM hdb_catalog.hdb_scheduled_trigger st + |] () False + currentTime <- liftIO getCurrentTime + let scheduledEvents = concatMap (mkScheduledEvents currentTime) allSchedules + case scheduledEvents of + [] -> pure () + events -> do + let insertScheduledEventsSql = TB.run $ toSQL + SQLInsert + { siTable = scheduledEventsTable + , siCols = map (PGCol . T.pack) ["name", "webhook", "scheduled_time"] + , siValues = ValuesExp $ map (toTupleExp . toArr) events + , siConflict = Just $ DoNothing Nothing + , siRet = Nothing + } + Q.unitQE defaultTxErrorHandler (Q.fromText insertScheduledEventsSql) () False + where + toArr (ScheduledEvent n w t) = n : w : (pure $ formatTime' t) + toTupleExp = TupleExp . map SELit + uncurrySchedule (n, w, st) = + TimedTriggerQuery {ttqName = n, ttqWebhook = w, ttqSchedule = st} + +mkScheduledEvents :: UTCTime -> TimedTriggerQuery -> [ScheduledEvent] +mkScheduledEvents time (TimedTriggerQuery name webhook schedule) = + let events = + case parseCronSchedule $ unNonEmptyText schedule of + Right cron -> + generateScheduledEventsBetween + time + (addUTCTime nominalDay time) + cron + Left _err -> [] + in map (ScheduledEvent (unNonEmptyText name) (unNonEmptyText webhook)) events + +-- generates events (from, till] according to CronSchedule +generateScheduledEventsBetween :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] +generateScheduledEventsBetween from till cron = takeWhile ((>=) till) $ go from + where + go init = + case nextMatch cron init of + Nothing -> [] + Just next -> next : (go next) + +processScheduledQueue :: Q.PGPool -> IO () +processScheduledQueue pgpool = + forever $ do + scheduledEventsE <- + runExceptT $ + Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) getScheduledEvents + case scheduledEventsE of + Right events -> sequence_ $ map processScheduledEvent events + Left err -> traceShowM err + threadDelay oneMinute + where + oneMinute = 60 * 1000000 + +processScheduledEvent :: ScheduledEvent -> IO (Async ()) +processScheduledEvent = async . traceShowM + +getScheduledEvents :: Q.TxE QErr [ScheduledEvent] +getScheduledEvents = do + allSchedules <- map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_trigger_events + SET locked = 't' + WHERE name IN ( SELECT t.name + FROM hdb_catalog.hdb_scheduled_trigger_events t + WHERE t.locked = 'f' + FOR UPDATE SKIP LOCKED + ) + RETURNING name, webhook, scheduled_time + |] () True + pure $ allSchedules + where uncurryEvent (n, w, st) = + ScheduledEvent + { seName = n + , seWebhook = w + , seScheduledTime = st + } + +-- RFC822 +formatTime' :: UTCTime -> T.Text +formatTime' = T.pack . formatTime defaultTimeLocale "%a, %d %b %Y %H:%M:%S %z" diff --git a/server/src-lib/Hasura/RQL/DDL/TimedTrigger.hs b/server/src-lib/Hasura/RQL/DDL/TimedTrigger.hs new file mode 100644 index 0000000000000..d052bb7aa939f --- /dev/null +++ b/server/src-lib/Hasura/RQL/DDL/TimedTrigger.hs @@ -0,0 +1,38 @@ +{-# LANGUAGE RecordWildCards #-} + +module Hasura.RQL.DDL.TimedTrigger + ( TimedTriggerQuery(..) + , runCreateTimedTrigger + ) where + +import Data.Aeson +import Data.Aeson.Casing +import Data.Aeson.TH +import Hasura.EncJSON +import Hasura.Prelude +import Hasura.RQL.DDL.Schema.Cache (CacheBuildM) +import Hasura.RQL.Types +import Hasura.RQL.Types.Common (NonEmptyText) +import Language.Haskell.TH.Syntax (Lift) + +import qualified Database.PG.Query as Q + +data TimedTriggerQuery + = TimedTriggerQuery + { ttqName :: !NonEmptyText + , ttqWebhook :: !NonEmptyText + , ttqSchedule :: !NonEmptyText + } + deriving (Show, Eq, Lift) + +$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''TimedTriggerQuery) + +runCreateTimedTrigger :: CacheBuildM m => TimedTriggerQuery -> m EncJSON +runCreateTimedTrigger TimedTriggerQuery{..} = do + liftTx $ Q.unitQE defaultTxErrorHandler + [Q.sql| + INSERT into hdb_catalog.hdb_scheduled_trigger + (name, webhook, schedule) + VALUES ($1, $2, $3) + |] (ttqName, Q.AltJ $ toJSON ttqWebhook, ttqSchedule) False + return successMsg diff --git a/server/src-lib/Hasura/SQL/Types.hs b/server/src-lib/Hasura/SQL/Types.hs index c2f0a63e73e81..38f8f2fb54504 100644 --- a/server/src-lib/Hasura/SQL/Types.hs +++ b/server/src-lib/Hasura/SQL/Types.hs @@ -16,8 +16,7 @@ module Hasura.SQL.Types , PGDescription(..) - , PGCol - , getPGColTxt + , PGCol(..) , showPGCols , isIntegerType @@ -41,6 +40,7 @@ module Hasura.SQL.Types , SchemaName(..) , publicSchema , hdbViewsSchema + , hdbCatalogSchema , TableName(..) , FunctionName(..) @@ -235,6 +235,9 @@ publicSchema = SchemaName "public" hdbViewsSchema :: SchemaName hdbViewsSchema = SchemaName "hdb_views" +hdbCatalogSchema :: SchemaName +hdbCatalogSchema = SchemaName "hdb_catalog" + instance IsIden SchemaName where toIden (SchemaName t) = Iden t diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index d96823d53b6c1..d09b422c72d90 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -25,6 +25,7 @@ import Hasura.RQL.DDL.Relationship import Hasura.RQL.DDL.Relationship.Rename import Hasura.RQL.DDL.RemoteSchema import Hasura.RQL.DDL.Schema +import Hasura.RQL.DDL.TimedTrigger import Hasura.RQL.DML.Count import Hasura.RQL.DML.Delete import Hasura.RQL.DML.Insert @@ -86,6 +87,8 @@ data RQLQueryV1 | RQRedeliverEvent !RedeliverEventQuery | RQInvokeEventTrigger !InvokeEventTriggerQuery + | RQCreateTimedTrigger !TimedTriggerQuery + -- query collections, allow list related | RQCreateQueryCollection !CreateCollection | RQDropQueryCollection !DropCollection @@ -275,6 +278,7 @@ queryNeedsReload (RQV1 qi) = case qi of RQDeleteEventTrigger _ -> True RQRedeliverEvent _ -> False RQInvokeEventTrigger _ -> False + RQCreateTimedTrigger _ -> False RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True @@ -398,6 +402,7 @@ runQueryM rq = RQDeleteEventTrigger q -> runDeleteEventTriggerQuery q RQRedeliverEvent q -> runRedeliverEvent q RQInvokeEventTrigger q -> runInvokeEventTrigger q + RQCreateTimedTrigger q -> runCreateTimedTrigger q RQCreateQueryCollection q -> runCreateCollection q RQDropQueryCollection q -> runDropCollection q @@ -471,6 +476,7 @@ requiresAdmin = \case RQDeleteEventTrigger _ -> True RQRedeliverEvent _ -> True RQInvokeEventTrigger _ -> True + RQCreateTimedTrigger _ -> True RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index 6ba0458e4c764..bb0058899645c 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -662,3 +662,26 @@ CREATE VIEW hdb_catalog.hdb_computed_field_function AS END AS function_schema FROM hdb_catalog.hdb_computed_field ); + +CREATE TABLE hdb_catalog.hdb_scheduled_trigger +( + name TEXT PRIMARY KEY, + webhook TEXT NOT NULL, + schedule TEXT +); + +CREATE TABLE hdb_catalog.hdb_scheduled_trigger_events +( + name TEXT, + webhook TEXT NOT NULL, + scheduled_time TIMESTAMP NOT NULL, + delivered BOOLEAN NOT NULL DEFAULT FALSE, + error BOOLEAN NOT NULL DEFAULT FALSE, + tries INTEGER NOT NULL DEFAULT 0, + created_at TIMESTAMP DEFAULT NOW(), + locked BOOLEAN NOT NULL DEFAULT FALSE, + next_retry_at TIMESTAMP, + + PRIMARY KEY (name, scheduled_time), + FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) ON UPDATE CASCADE +); From 332f1ca649b77fa48b44516535870e4ec9a1b1cd Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Fri, 13 Dec 2019 14:57:42 +0530 Subject: [PATCH 002/195] rename files --- server/graphql-engine.cabal | 6 +- server/src-lib/Hasura/App.hs | 69 ++++++++++--------- .../Lib.hs => Eventing/EventTrigger.hs} | 4 +- .../Hasura/{Events => Eventing}/HTTP.hs | 2 +- .../Timed.hs => Eventing/ScheduledTrigger.hs} | 3 +- 5 files changed, 43 insertions(+), 41 deletions(-) rename server/src-lib/Hasura/{Events/Lib.hs => Eventing/EventTrigger.hs} (99%) rename server/src-lib/Hasura/{Events => Eventing}/HTTP.hs (99%) rename server/src-lib/Hasura/{Events/Timed.hs => Eventing/ScheduledTrigger.hs} (98%) diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 550d53f95e318..efe803fa393e3 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -319,9 +319,9 @@ library , Hasura.GraphQL.Context , Hasura.GraphQL.Logging - , Hasura.Events.Lib - , Hasura.Events.HTTP - , Hasura.Events.Timed + , Hasura.Eventing.HTTP + , Hasura.Eventing.EventTrigger + , Hasura.Eventing.ScheduledTrigger , Control.Concurrent.Extended , Control.Lens.Extended diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index e2768f98bc442..4c6a84f11cdab 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -5,51 +5,52 @@ module Hasura.App where import Control.Monad.Base import Control.Monad.Stateless -import Control.Monad.STM (atomically) -import Control.Monad.Trans.Control (MonadBaseControl (..)) -import Data.Aeson ((.=)) -import Data.Time.Clock (UTCTime, getCurrentTime) +import Control.Monad.STM (atomically) +import Control.Monad.Trans.Control (MonadBaseControl (..)) +import Data.Aeson ((.=)) +import Data.Time.Clock (UTCTime, getCurrentTime) import Options.Applicative -import System.Environment (getEnvironment, lookupEnv) -import System.Exit (exitFailure) - -import qualified Control.Concurrent as C -import qualified Data.Aeson as A -import qualified Data.ByteString.Char8 as BC -import qualified Data.ByteString.Lazy.Char8 as BLC -import qualified Data.Text as T -import qualified Data.Time.Clock as Clock -import qualified Data.Yaml as Y -import qualified Database.PG.Query as Q -import qualified Network.HTTP.Client as HTTP -import qualified Network.HTTP.Client.TLS as HTTP -import qualified Network.Wai.Handler.Warp as Warp -import qualified System.Posix.Signals as Signals -import qualified Text.Mustache.Compile as M +import System.Environment (getEnvironment, lookupEnv) +import System.Exit (exitFailure) + +import qualified Control.Concurrent as C +import qualified Data.Aeson as A +import qualified Data.ByteString.Char8 as BC +import qualified Data.ByteString.Lazy.Char8 as BLC +import qualified Data.Text as T +import qualified Data.Time.Clock as Clock +import qualified Data.Yaml as Y +import qualified Database.PG.Query as Q +import qualified Network.HTTP.Client as HTTP +import qualified Network.HTTP.Client.TLS as HTTP +import qualified Network.Wai.Handler.Warp as Warp +import qualified System.Posix.Signals as Signals +import qualified Text.Mustache.Compile as M import Hasura.Db import Hasura.EncJSON -import Hasura.Events.Lib -import Hasura.Events.Timed +import Hasura.Eventing.EventTrigger +import Hasura.Eventing.ScheduledTrigger import Hasura.Logging import Hasura.Prelude import Hasura.RQL.DDL.Schema.Cache -import Hasura.RQL.Types (CacheRWM, Code (..), - HasHttpManager, HasSQLGenCtx, - HasSystemDefined, QErr (..), - SQLGenCtx (..), SchemaCache (..), - UserInfoM, adminRole, - adminUserInfo, decodeValue, - emptySchemaCache, throw400, - userRole, withPathK) +import Hasura.RQL.Types (CacheRWM, Code (..), + HasHttpManager, HasSQLGenCtx, + HasSystemDefined, QErr (..), + SQLGenCtx (..), + SchemaCache (..), UserInfoM, + adminRole, adminUserInfo, + decodeValue, + emptySchemaCache, throw400, + userRole, withPathK) import Hasura.Server.App import Hasura.Server.Auth -import Hasura.Server.CheckUpdates (checkForUpdates) +import Hasura.Server.CheckUpdates (checkForUpdates) import Hasura.Server.Init import Hasura.Server.Logging -import Hasura.Server.Migrate (migrateCatalog) -import Hasura.Server.Query (Run, RunCtx (..), peelRun, - requiresAdmin, runQueryM) +import Hasura.Server.Migrate (migrateCatalog) +import Hasura.Server.Query (Run, RunCtx (..), peelRun, + requiresAdmin, runQueryM) import Hasura.Server.SchemaUpdate import Hasura.Server.Telemetry import Hasura.Server.Version diff --git a/server/src-lib/Hasura/Events/Lib.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs similarity index 99% rename from server/src-lib/Hasura/Events/Lib.hs rename to server/src-lib/Hasura/Eventing/EventTrigger.hs index 632cbcc0be667..bd18c6c098747 100644 --- a/server/src-lib/Hasura/Events/Lib.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -1,4 +1,4 @@ -module Hasura.Events.Lib +module Hasura.Eventing.EventTrigger ( initEventEngineCtx , processEventQueue , unlockAllEvents @@ -19,7 +19,7 @@ import Data.Has import Data.Int (Int64) import Data.IORef (IORef, readIORef) import Data.Time.Clock -import Hasura.Events.HTTP +import Hasura.Eventing.HTTP import Hasura.HTTP import Hasura.Prelude import Hasura.RQL.DDL.Headers diff --git a/server/src-lib/Hasura/Events/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs similarity index 99% rename from server/src-lib/Hasura/Events/HTTP.hs rename to server/src-lib/Hasura/Eventing/HTTP.hs index f33f23c652480..45261d48baa37 100644 --- a/server/src-lib/Hasura/Events/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -1,4 +1,4 @@ -module Hasura.Events.HTTP +module Hasura.Eventing.HTTP ( HTTPErr(..) , HTTPResp(..) , runHTTP diff --git a/server/src-lib/Hasura/Events/Timed.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs similarity index 98% rename from server/src-lib/Hasura/Events/Timed.hs rename to server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 729301882f3e9..6782823b36cf0 100644 --- a/server/src-lib/Hasura/Events/Timed.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -1,4 +1,4 @@ -module Hasura.Events.Timed +module Hasura.Eventing.ScheduledTrigger ( processScheduledQueue , runScheduledEventsGenerator ) where @@ -99,6 +99,7 @@ generateScheduledEventsBetween from till cron = takeWhile ((>=) till) $ go from processScheduledQueue :: Q.PGPool -> IO () processScheduledQueue pgpool = forever $ do + traceM "entering processor queue" scheduledEventsE <- runExceptT $ Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) getScheduledEvents From 4f2d7f6d6e7d649905d42663061c39e115505ae5 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Fri, 13 Dec 2019 16:52:08 +0530 Subject: [PATCH 003/195] WIP --- server/graphql-engine.cabal | 2 +- server/src-lib/Hasura/App.hs | 2 +- .../src-lib/Hasura/Eventing/EventTrigger.hs | 109 +---------- server/src-lib/Hasura/Eventing/HTTP.hs | 120 ++++++++++++- .../Hasura/Eventing/ScheduledTrigger.hs | 169 ++++++++++++++---- .../{TimedTrigger.hs => ScheduledTrigger.hs} | 24 +-- server/src-lib/Hasura/Server/Query.hs | 10 +- server/src-rsr/initialise.sql | 2 +- 8 files changed, 280 insertions(+), 158 deletions(-) rename server/src-lib/Hasura/RQL/DDL/{TimedTrigger.hs => ScheduledTrigger.hs} (57%) diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index efe803fa393e3..166afec5beeb7 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -258,7 +258,7 @@ library , Hasura.RQL.DDL.Schema.Table , Hasura.RQL.DDL.Utils , Hasura.RQL.DDL.EventTrigger - , Hasura.RQL.DDL.TimedTrigger + , Hasura.RQL.DDL.ScheduledTrigger , Hasura.RQL.DDL.Headers , Hasura.RQL.DDL.RemoteSchema , Hasura.RQL.DDL.QueryCollection diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 4c6a84f11cdab..0bc1050fbcb32 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -255,7 +255,7 @@ runHGEServer ServeOptions{..} InitCtx{..} initTime = do _icHttpManager _icPgPool scRef eventEngineCtx void $ liftIO $ C.forkIO $ runScheduledEventsGenerator _icPgPool -- logger logEnvHeaders - void $ liftIO $ C.forkIO $ processScheduledQueue _icPgPool -- logger logEnvHeaders + void $ liftIO $ C.forkIO $ processScheduledQueue logger _icPgPool _icHttpManager -- logger logEnvHeaders -- start a background thread to check for updates void $ liftIO $ C.forkIO $ checkForUpdates loggerCtx _icHttpManager diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index bd18c6c098747..1136877c19d66 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -41,7 +41,6 @@ import qualified Hasura.Logging as L import qualified Network.HTTP.Client as HTTP import qualified Network.HTTP.Types as HTTP -type Version = T.Text invocationVersion :: Version invocationVersion = "2" @@ -51,32 +50,11 @@ type LogEnvHeaders = Bool newtype CacheRef = CacheRef { unCacheRef :: IORef (SchemaCache, SchemaCacheVer) } -newtype EventInternalErr - = EventInternalErr QErr - deriving (Show, Eq) - -instance L.ToEngineLog EventInternalErr L.Hasura where - toEngineLog (EventInternalErr qerr) = (L.LevelError, L.eventTriggerLogType, toJSON qerr) - -data TriggerMeta - = TriggerMeta { tmName :: TriggerName } - deriving (Show, Eq) - -$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''TriggerMeta) - -data DeliveryInfo - = DeliveryInfo - { diCurrentRetry :: Int - , diMaxRetries :: Int - } deriving (Show, Eq) - -$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''DeliveryInfo) - data Event = Event { eId :: EventId , eTable :: QualifiedTable - , eTrigger :: TriggerMeta + , eTrigger :: TriggerMetadata , eEvent :: Value , eTries :: Int , eCreatedAt :: Time.UTCTime @@ -98,7 +76,7 @@ data EventPayload = EventPayload { epId :: EventId , epTable :: QualifiedTableStrict - , epTrigger :: TriggerMeta + , epTrigger :: TriggerMetadata , epEvent :: Value , epDeliveryInfo :: DeliveryInfo , epCreatedAt :: Time.UTCTime @@ -106,47 +84,6 @@ data EventPayload $(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''EventPayload) -data WebhookRequest - = WebhookRequest - { _rqPayload :: Value - , _rqHeaders :: Maybe [HeaderConf] - , _rqVersion :: T.Text - } -$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''WebhookRequest) - -data WebhookResponse - = WebhookResponse - { _wrsBody :: TBS.TByteString - , _wrsHeaders :: Maybe [HeaderConf] - , _wrsStatus :: Int - } -$(deriveToJSON (aesonDrop 4 snakeCase){omitNothingFields=True} ''WebhookResponse) - -data ClientError = ClientError { _ceMessage :: TBS.TByteString} -$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ClientError) - -data Response = ResponseType1 WebhookResponse | ResponseType2 ClientError - -instance ToJSON Response where - toJSON (ResponseType1 resp) = object - [ "type" .= String "webhook_response" - , "data" .= toJSON resp - , "version" .= invocationVersion - ] - toJSON (ResponseType2 err) = object - [ "type" .= String "client_error" - , "data" .= toJSON err - , "version" .= invocationVersion - ] - -data Invocation - = Invocation - { iEventId :: EventId - , iStatus :: Int - , iRequest :: WebhookRequest - , iResponse :: Response - } - data EventEngineCtx = EventEngineCtx { _eeCtxEventQueue :: TQ.TQueue Event @@ -354,44 +291,9 @@ mkInvo ep status reqHeaders respBody respHeaders Invocation (epId ep) status - (mkWebhookReq (toJSON ep) reqHeaders) + (mkWebhookReq (toJSON ep) reqHeaders invocationVersion) resp -mkResp :: Int -> TBS.TByteString -> [HeaderConf] -> Response -mkResp status payload headers = - let wr = WebhookResponse payload (mkMaybe headers) status - in ResponseType1 wr - -mkClientErr :: TBS.TByteString -> Response -mkClientErr message = - let cerr = ClientError message - in ResponseType2 cerr - -mkWebhookReq :: Value -> [HeaderConf] -> WebhookRequest -mkWebhookReq payload headers = WebhookRequest payload (mkMaybe headers) invocationVersion - -isClientError :: Int -> Bool -isClientError status = status >= 1000 - -mkMaybe :: [a] -> Maybe [a] -mkMaybe [] = Nothing -mkMaybe x = Just x - -logQErr :: ( MonadReader r m, Has (L.Logger L.Hasura) r, MonadIO m) => QErr -> m () -logQErr err = do - logger :: L.Logger L.Hasura <- asks getter - L.unLogger logger $ EventInternalErr err - -logHTTPErr - :: ( MonadReader r m - , Has (L.Logger L.Hasura) r - , MonadIO m - ) - => HTTPErr -> m () -logHTTPErr err = do - logger :: L.Logger L.Hasura <- asks getter - L.unLogger logger $ err - tryWebhook :: ( Has (L.Logger L.Hasura) r , Has HTTP.Manager r @@ -406,6 +308,7 @@ tryWebhook headers responseTimeout ep webhook = do let createdAt = epCreatedAt ep eventId = epId ep initReqE <- liftIO $ try $ HTTP.parseRequest webhook + manager <- asks getter case initReqE of Left excp -> throwError $ HClient excp Right initReq -> do @@ -424,7 +327,7 @@ tryWebhook headers responseTimeout ep webhook = do then retry else modifyTVar' c (+1) - eitherResp <- runHTTP req (Just (ExtraContext createdAt eventId)) + eitherResp <- runHTTP manager req (Just (ExtraContext createdAt eventId)) -- decrement counter once http is done liftIO $ atomically $ do @@ -456,7 +359,7 @@ fetchEvents = Event { eId = id' , eTable = QualifiedObject sn tn - , eTrigger = TriggerMeta trn + , eTrigger = TriggerMetadata trn , eEvent = payload , eTries = tries , eCreatedAt = created diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index 45261d48baa37..f62ffd3ac409d 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -5,6 +5,21 @@ module Hasura.Eventing.HTTP , isNetworkError , isNetworkErrorHC , ExtraContext(..) + , Invocation(..) + , Version + , Response(..) + , WebhookRequest(..) + , WebhookResponse(..) + , ClientError(..) + , isClientError + , mkClientErr + , TriggerMetadata(..) + , DeliveryInfo(..) + , logQErr + , logHTTPErr + , EventInternalErr(..) + , mkWebhookReq + , mkResp ) where import Data.Either @@ -19,6 +34,7 @@ import qualified Data.Text as T import qualified Data.Text.Encoding as TE import qualified Data.Text.Encoding.Error as TE import qualified Data.Time.Clock as Time +import qualified Hasura.Logging as L import qualified Network.HTTP.Client as HTTP import qualified Network.HTTP.Types as HTTP @@ -29,8 +45,50 @@ import Data.Has import Hasura.Logging import Hasura.Prelude import Hasura.RQL.DDL.Headers +import Hasura.RQL.Types.Error (QErr) import Hasura.RQL.Types.EventTrigger +data WebhookRequest + = WebhookRequest + { _rqPayload :: J.Value + , _rqHeaders :: Maybe [HeaderConf] + , _rqVersion :: T.Text + } +$(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''WebhookRequest) + +data WebhookResponse + = WebhookResponse + { _wrsBody :: TBS.TByteString + , _wrsHeaders :: Maybe [HeaderConf] + , _wrsStatus :: Int + } +$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''WebhookResponse) + +newtype ClientError = ClientError { _ceMessage :: TBS.TByteString} +$(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''ClientError) + +type Version = T.Text + +data Response = ResponseType1 WebhookResponse | ResponseType2 ClientError + +instance J.ToJSON Response where + toJSON (ResponseType1 resp) = J.object + [ "type" J..= J.String "webhook_response" + , "data" J..= J.toJSON resp + ] + toJSON (ResponseType2 err ) = J.object + [ "type" J..= J.String "client_error" + , "data" J..= J.toJSON err + ] + +data Invocation + = Invocation + { iEventId :: EventId + , iStatus :: Int + , iRequest :: WebhookRequest + , iResponse :: Response + } + data ExtraContext = ExtraContext { elEventCreatedAt :: Time.UTCTime @@ -137,15 +195,69 @@ instance ToEngineLog HTTPReq Hasura where runHTTP :: ( MonadReader r m , Has (Logger Hasura) r - , Has HTTP.Manager r , MonadIO m ) - => HTTP.Request -> Maybe ExtraContext -> m (Either HTTPErr HTTPResp) -runHTTP req exLog = do + => HTTP.Manager -> HTTP.Request -> Maybe ExtraContext -> m (Either HTTPErr HTTPResp) +runHTTP manager req exLog = do logger :: Logger Hasura <- asks getter - manager <- asks getter res <- liftIO $ try $ HTTP.httpLbs req manager case res of Left e -> unLogger logger $ HClient e Right resp -> unLogger logger $ HTTPRespExtra (mkHTTPResp resp) exLog return $ either (Left . HClient) anyBodyParser res + +newtype EventInternalErr + = EventInternalErr QErr + deriving (Show, Eq) + +instance L.ToEngineLog EventInternalErr L.Hasura where + toEngineLog (EventInternalErr qerr) = (L.LevelError, L.eventTriggerLogType, J.toJSON qerr) + +data TriggerMetadata + = TriggerMetadata { tmName :: TriggerName } + deriving (Show, Eq) + +$(J.deriveJSON (J.aesonDrop 2 J.snakeCase){J.omitNothingFields=True} ''TriggerMetadata) + +data DeliveryInfo + = DeliveryInfo + { diCurrentRetry :: Int + , diMaxRetries :: Int + } deriving (Show, Eq) + +$(J.deriveJSON (J.aesonDrop 2 J.snakeCase){J.omitNothingFields=True} ''DeliveryInfo) + +mkResp :: Int -> TBS.TByteString -> [HeaderConf] -> Response +mkResp status payload headers = + let wr = WebhookResponse payload (mkMaybe headers) status + in ResponseType1 wr + +mkClientErr :: TBS.TByteString -> Response +mkClientErr message = + let cerr = ClientError message + in ResponseType2 cerr + +mkWebhookReq :: J.Value -> [HeaderConf] -> Version -> WebhookRequest +mkWebhookReq payload headers version = WebhookRequest payload (mkMaybe headers) version + +isClientError :: Int -> Bool +isClientError status = status >= 1000 + +mkMaybe :: [a] -> Maybe [a] +mkMaybe [] = Nothing +mkMaybe x = Just x + +logQErr :: ( MonadReader r m, Has (L.Logger L.Hasura) r, MonadIO m) => QErr -> m () +logQErr err = do + logger :: L.Logger L.Hasura <- asks getter + L.unLogger logger $ EventInternalErr err + +logHTTPErr + :: ( MonadReader r m + , Has (L.Logger L.Hasura) r + , MonadIO m + ) + => HTTPErr -> m () +logHTTPErr err = do + logger :: L.Logger L.Hasura <- asks getter + L.unLogger logger err diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 6782823b36cf0..596164902f40b 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -1,25 +1,45 @@ +{-# LANGUAGE RecordWildCards #-} + module Hasura.Eventing.ScheduledTrigger ( processScheduledQueue , runScheduledEventsGenerator ) where -import Control.Concurrent (threadDelay) -import Control.Concurrent.Async +import Control.Concurrent (threadDelay) +import Control.Exception (try) +import Data.Has import Data.Time.Clock import Data.Time.Format +import Hasura.Eventing.HTTP import Hasura.Prelude -import Hasura.RQL.DDL.TimedTrigger +import Hasura.RQL.DDL.ScheduledTrigger import Hasura.RQL.Types import Hasura.SQL.DML import Hasura.SQL.Types import System.Cron -import qualified Data.Text as T -import qualified Database.PG.Query as Q -import qualified Text.Builder as TB (run) +import qualified Data.Aeson as J +import qualified Data.Text as T +import qualified Database.PG.Query as Q +import qualified Hasura.Logging as L +import qualified Network.HTTP.Client as HTTP +import qualified Text.Builder as TB (run) import Debug.Trace +oneSecond :: Int +oneSecond = 1000000 + +oneMinute :: Int +oneMinute = 60 * oneSecond + +oneHour :: Int +oneHour = 60 * oneMinute + +-- type LogEnvHeaders = Bool + +type ScheduledEventPayload = J.Value + scheduledEventsTable :: QualifiedTable scheduledEventsTable = QualifiedObject @@ -28,7 +48,8 @@ scheduledEventsTable = data ScheduledEvent = ScheduledEvent - { seName :: !T.Text + { seId :: !(Maybe Text) + , seName :: !T.Text , seWebhook :: !T.Text , seScheduledTime :: !UTCTime } deriving (Show, Eq) @@ -40,13 +61,11 @@ runScheduledEventsGenerator pgpool = do runExceptT (Q.runTx pgpool - (Q.RepeatableRead, Just Q.ReadWrite) + (Q.ReadCommitted, Just Q.ReadWrite) generateScheduledEvents) >>= \case Right _ -> pure () Left err -> traceShowM err - threadDelay oneHour - where - oneHour = 60 * 60 * 1000000 + threadDelay oneMinute generateScheduledEvents :: Q.TxE QErr () generateScheduledEvents = do @@ -70,22 +89,22 @@ generateScheduledEvents = do } Q.unitQE defaultTxErrorHandler (Q.fromText insertScheduledEventsSql) () False where - toArr (ScheduledEvent n w t) = n : w : (pure $ formatTime' t) + toArr (ScheduledEvent _ n w t) = n : w : (pure $ formatTime' t) toTupleExp = TupleExp . map SELit uncurrySchedule (n, w, st) = - TimedTriggerQuery {ttqName = n, ttqWebhook = w, ttqSchedule = st} + ScheduledTriggerQuery {stqName = n, stqWebhook = w, stqSchedule = st} -mkScheduledEvents :: UTCTime -> TimedTriggerQuery -> [ScheduledEvent] -mkScheduledEvents time (TimedTriggerQuery name webhook schedule) = +mkScheduledEvents :: UTCTime -> ScheduledTriggerQuery -> [ScheduledEvent] +mkScheduledEvents time ScheduledTriggerQuery{..} = let events = - case parseCronSchedule $ unNonEmptyText schedule of + case parseCronSchedule $ unNonEmptyText stqSchedule of Right cron -> generateScheduledEventsBetween time (addUTCTime nominalDay time) cron Left _err -> [] - in map (ScheduledEvent (unNonEmptyText name) (unNonEmptyText webhook)) events + in map (ScheduledEvent Nothing (unNonEmptyText stqName) (unNonEmptyText stqWebhook)) events -- generates events (from, till] according to CronSchedule generateScheduledEventsBetween :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] @@ -96,39 +115,127 @@ generateScheduledEventsBetween from till cron = takeWhile ((>=) till) $ go from Nothing -> [] Just next -> next : (go next) -processScheduledQueue :: Q.PGPool -> IO () -processScheduledQueue pgpool = +processScheduledQueue :: L.Logger L.Hasura -> Q.PGPool -> HTTP.Manager -> IO () +processScheduledQueue logger pgpool httpMgr = forever $ do traceM "entering processor queue" scheduledEventsE <- runExceptT $ - Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) getScheduledEvents + Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) getScheduledEvents case scheduledEventsE of - Right events -> sequence_ $ map processScheduledEvent events - Left err -> traceShowM err - threadDelay oneMinute + Right events -> + sequence_ $ + map + (\ev -> runReaderT (processScheduledEvent pgpool httpMgr ev) (logger)) + events + Left err -> traceShowM err + threadDelay (10 * oneSecond) + +processScheduledEvent :: + (MonadReader r m, Has (L.Logger L.Hasura) r, MonadIO m) + => Q.PGPool + -> HTTP.Manager + -> ScheduledEvent + -> m () +processScheduledEvent pgpool httpMgr se@ScheduledEvent{..} = do + -- let webhook = T.unpack $ wciCachedValue $ etiWebhookInfo eti + -- retryConf = etiRetryConf eti + let timeoutSeconds = 60 + responseTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) + -- headerInfos = etiHeaders eti + -- etHeaders = map encodeHeader headerInfos + -- headers = addDefaultHeaders etHeaders + -- ep = createEventPayload retryConf e + eventPayload = J.Null + res <- runExceptT $ tryWebhook httpMgr responseTimeout eventPayload seWebhook + -- let decodedHeaders = map (decodeHeader logenv headerInfos) headers + finally <- either + (processError pgpool se) + (processSuccess pgpool se) (trace ("error is " ++ show res) res) + either logQErr return finally + +tryWebhook :: + ( MonadReader r m + , Has (L.Logger L.Hasura) r + , MonadIO m + , MonadError HTTPErr m + ) + => HTTP.Manager + -> HTTP.ResponseTimeout + -> ScheduledEventPayload + -> T.Text + -> m HTTPResp +tryWebhook httpMgr timeout payload webhook = do + initReqE <- liftIO $ try $ HTTP.parseRequest (T.unpack webhook) + case initReqE of + Left excp -> throwError $ HClient excp + Right initReq -> do + let req = + initReq + { HTTP.method = "POST" + -- , HTTP.requestHeaders = [] + , HTTP.requestBody = HTTP.RequestBodyLBS (J.encode payload) + , HTTP.responseTimeout = timeout + } + eitherResp <- runHTTP httpMgr req Nothing + onLeft eitherResp throwError + +processError :: (MonadIO m) => Q.PGPool -> ScheduledEvent -> HTTPErr -> m (Either QErr ()) +processError pgpool se err = do + liftIO $ + runExceptT $ + Q.runTx + pgpool + (Q.RepeatableRead, Just Q.ReadWrite) + markError where - oneMinute = 60 * 1000000 + markError = + Q.unitQE defaultTxErrorHandler + [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_trigger_events + SET error = 't', locked = 'f' + WHERE id = $1 + |] (Identity $ seId se) True -processScheduledEvent :: ScheduledEvent -> IO (Async ()) -processScheduledEvent = async . traceShowM +processSuccess :: (MonadIO m) => Q.PGPool -> ScheduledEvent -> HTTPResp -> m (Either QErr ()) +processSuccess pgpool se resp = do + liftIO $ + runExceptT $ + Q.runTx + pgpool + (Q.RepeatableRead, Just Q.ReadWrite) + markSuccess + where + markSuccess = + Q.unitQE defaultTxErrorHandler + [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_trigger_events + SET delivered = 't', locked = 'f' + WHERE id = $1 + |] (Identity $ seId se) True getScheduledEvents :: Q.TxE QErr [ScheduledEvent] getScheduledEvents = do + currentTime <- liftIO getCurrentTime allSchedules <- map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_trigger_events SET locked = 't' WHERE name IN ( SELECT t.name FROM hdb_catalog.hdb_scheduled_trigger_events t - WHERE t.locked = 'f' + WHERE ( t.locked = 'f' + and t.delivered = 'f' + and t.error = 'f' + and t.scheduled_time <= $1 + ) FOR UPDATE SKIP LOCKED ) - RETURNING name, webhook, scheduled_time - |] () True + RETURNING id, name, webhook, scheduled_time + |] (Identity currentTime) True pure $ allSchedules - where uncurryEvent (n, w, st) = + where uncurryEvent (i, n, w, st) = ScheduledEvent - { seName = n + { seId = i + , seName = n , seWebhook = w , seScheduledTime = st } diff --git a/server/src-lib/Hasura/RQL/DDL/TimedTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs similarity index 57% rename from server/src-lib/Hasura/RQL/DDL/TimedTrigger.hs rename to server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index d052bb7aa939f..bb36594e82f60 100644 --- a/server/src-lib/Hasura/RQL/DDL/TimedTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -1,8 +1,8 @@ {-# LANGUAGE RecordWildCards #-} -module Hasura.RQL.DDL.TimedTrigger - ( TimedTriggerQuery(..) - , runCreateTimedTrigger +module Hasura.RQL.DDL.ScheduledTrigger + ( ScheduledTriggerQuery(..) + , runCreateScheduledTrigger ) where import Data.Aeson @@ -17,22 +17,22 @@ import Language.Haskell.TH.Syntax (Lift) import qualified Database.PG.Query as Q -data TimedTriggerQuery - = TimedTriggerQuery - { ttqName :: !NonEmptyText - , ttqWebhook :: !NonEmptyText - , ttqSchedule :: !NonEmptyText +data ScheduledTriggerQuery + = ScheduledTriggerQuery + { stqName :: !NonEmptyText + , stqWebhook :: !NonEmptyText + , stqSchedule :: !NonEmptyText } deriving (Show, Eq, Lift) -$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''TimedTriggerQuery) +$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ScheduledTriggerQuery) -runCreateTimedTrigger :: CacheBuildM m => TimedTriggerQuery -> m EncJSON -runCreateTimedTrigger TimedTriggerQuery{..} = do +runCreateScheduledTrigger :: CacheBuildM m => ScheduledTriggerQuery -> m EncJSON +runCreateScheduledTrigger ScheduledTriggerQuery{..} = do liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.hdb_scheduled_trigger (name, webhook, schedule) VALUES ($1, $2, $3) - |] (ttqName, Q.AltJ $ toJSON ttqWebhook, ttqSchedule) False + |] (stqName, stqWebhook, stqSchedule) False return successMsg diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index d09b422c72d90..6e50f3fb0c413 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -24,8 +24,8 @@ import Hasura.RQL.DDL.QueryCollection import Hasura.RQL.DDL.Relationship import Hasura.RQL.DDL.Relationship.Rename import Hasura.RQL.DDL.RemoteSchema +import Hasura.RQL.DDL.ScheduledTrigger import Hasura.RQL.DDL.Schema -import Hasura.RQL.DDL.TimedTrigger import Hasura.RQL.DML.Count import Hasura.RQL.DML.Delete import Hasura.RQL.DML.Insert @@ -87,7 +87,7 @@ data RQLQueryV1 | RQRedeliverEvent !RedeliverEventQuery | RQInvokeEventTrigger !InvokeEventTriggerQuery - | RQCreateTimedTrigger !TimedTriggerQuery + | RQCreateScheduledTrigger !ScheduledTriggerQuery -- query collections, allow list related | RQCreateQueryCollection !CreateCollection @@ -278,7 +278,7 @@ queryNeedsReload (RQV1 qi) = case qi of RQDeleteEventTrigger _ -> True RQRedeliverEvent _ -> False RQInvokeEventTrigger _ -> False - RQCreateTimedTrigger _ -> False + RQCreateScheduledTrigger _ -> False RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True @@ -402,7 +402,7 @@ runQueryM rq = RQDeleteEventTrigger q -> runDeleteEventTriggerQuery q RQRedeliverEvent q -> runRedeliverEvent q RQInvokeEventTrigger q -> runInvokeEventTrigger q - RQCreateTimedTrigger q -> runCreateTimedTrigger q + RQCreateScheduledTrigger q -> runCreateScheduledTrigger q RQCreateQueryCollection q -> runCreateCollection q RQDropQueryCollection q -> runDropCollection q @@ -476,7 +476,7 @@ requiresAdmin = \case RQDeleteEventTrigger _ -> True RQRedeliverEvent _ -> True RQInvokeEventTrigger _ -> True - RQCreateTimedTrigger _ -> True + RQCreateScheduledTrigger _ -> True RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index bb0058899645c..d2643aef4e666 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -672,6 +672,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger CREATE TABLE hdb_catalog.hdb_scheduled_trigger_events ( + id TEXT DEFAULT gen_random_uuid() UNIQUE, name TEXT, webhook TEXT NOT NULL, scheduled_time TIMESTAMP NOT NULL, @@ -680,7 +681,6 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger_events tries INTEGER NOT NULL DEFAULT 0, created_at TIMESTAMP DEFAULT NOW(), locked BOOLEAN NOT NULL DEFAULT FALSE, - next_retry_at TIMESTAMP, PRIMARY KEY (name, scheduled_time), FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) ON UPDATE CASCADE From 85ac116b0ad1ea2292a3ec6e8e4c0e354c9d78db Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 17 Dec 2019 13:53:39 +0530 Subject: [PATCH 004/195] [temp] allow warnings --- server/Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/Makefile b/server/Makefile index 55d54f4f7dd88..0abd9a88939e4 100644 --- a/server/Makefile +++ b/server/Makefile @@ -55,7 +55,8 @@ release-image: $(project).cabal ci-binary: mkdir -p packaging/build/rootfs # --no-terminal for a cleaner output in circleci - stack $(STACK_FLAGS) build --no-terminal --test --no-run-tests --ghc-options=-Werror $(BUILD_FLAGS) + # stack $(STACK_FLAGS) build --no-terminal --test --no-run-tests --ghc-options=-Werror $(BUILD_FLAGS) + stack $(STACK_FLAGS) build --no-terminal --test --no-run-tests $(BUILD_FLAGS) mkdir -p $(build_output) cp $(build_dir)/$(project)/$(project) $(build_dir)/graphql-engine-tests/graphql-engine-tests $(build_output) echo "$(VERSION)" > $(build_output)/version.txt From 522513bdba84c977e8c7b0e2eaeeaa466b472ee5 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 17 Dec 2019 16:53:26 +0530 Subject: [PATCH 005/195] support one-off type --- .../Hasura/Eventing/ScheduledTrigger.hs | 12 ++++-- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 39 +++++++++++++++++-- 2 files changed, 44 insertions(+), 7 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 596164902f40b..e61d8abb6605a 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -36,6 +36,9 @@ oneMinute = 60 * oneSecond oneHour :: Int oneHour = 60 * oneMinute +endOfTime :: UTCTime +endOfTime = read "2999-12-31 00:00:00 Z" + -- type LogEnvHeaders = Bool type ScheduledEventPayload = J.Value @@ -92,18 +95,19 @@ generateScheduledEvents = do toArr (ScheduledEvent _ n w t) = n : w : (pure $ formatTime' t) toTupleExp = TupleExp . map SELit uncurrySchedule (n, w, st) = - ScheduledTriggerQuery {stqName = n, stqWebhook = w, stqSchedule = st} + ScheduledTriggerQuery {stqName = n, stqWebhook = w, stqSchedule = fromBS st} + fromBS st = fromMaybe (OneOff endOfTime) $ J.decodeStrict' st mkScheduledEvents :: UTCTime -> ScheduledTriggerQuery -> [ScheduledEvent] mkScheduledEvents time ScheduledTriggerQuery{..} = let events = - case parseCronSchedule $ unNonEmptyText stqSchedule of - Right cron -> + case stqSchedule of + OneOff _ -> [] -- one-off scheduled events need not be generated + Cron cron -> generateScheduledEventsBetween time (addUTCTime nominalDay time) cron - Left _err -> [] in map (ScheduledEvent Nothing (unNonEmptyText stqName) (unNonEmptyText stqWebhook)) events -- generates events (from, till] according to CronSchedule diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index bb36594e82f60..eb85f2995ca69 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -1,30 +1,59 @@ +{-# LANGUAGE DeriveLift #-} {-# LANGUAGE RecordWildCards #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} module Hasura.RQL.DDL.ScheduledTrigger ( ScheduledTriggerQuery(..) , runCreateScheduledTrigger + , ScheduleType(..) ) where import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH +import Data.Time.Clock import Hasura.EncJSON import Hasura.Prelude import Hasura.RQL.DDL.Schema.Cache (CacheBuildM) import Hasura.RQL.Types import Hasura.RQL.Types.Common (NonEmptyText) -import Language.Haskell.TH.Syntax (Lift) +import Instances.TH.Lift () +import Language.Haskell.TH.Syntax as TH +import System.Cron.Parser +import System.Cron.Types +import qualified Data.Aeson as J +import qualified Data.Text as T import qualified Database.PG.Query as Q +instance Lift UTCTime + +instance Lift CronSchedule where + lift = cronScheduleExp + +cronScheduleExp :: CronSchedule -> Q Exp +cronScheduleExp c = [| c |] + +data ScheduleType = OneOff UTCTime | Cron CronSchedule + deriving (Show, Eq, Lift) + +$(deriveJSON (defaultOptions){sumEncoding=TaggedObject "type" "value"} ''ScheduleType) + data ScheduledTriggerQuery = ScheduledTriggerQuery { stqName :: !NonEmptyText , stqWebhook :: !NonEmptyText - , stqSchedule :: !NonEmptyText + , stqSchedule :: !ScheduleType } deriving (Show, Eq, Lift) +instance FromJSON CronSchedule where + parseJSON = withText "CronSchedule" $ \t -> + either fail pure $ parseCronSchedule t + +instance ToJSON CronSchedule where + toJSON = J.String . serializeCronSchedule + $(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ScheduledTriggerQuery) runCreateScheduledTrigger :: CacheBuildM m => ScheduledTriggerQuery -> m EncJSON @@ -34,5 +63,9 @@ runCreateScheduledTrigger ScheduledTriggerQuery{..} = do INSERT into hdb_catalog.hdb_scheduled_trigger (name, webhook, schedule) VALUES ($1, $2, $3) - |] (stqName, stqWebhook, stqSchedule) False + |] (stqName, stqWebhook, toTxt stqSchedule) False return successMsg + where + toTxt = \case + OneOff utcTime -> T.pack $ show utcTime + Cron cron -> serializeCronSchedule cron From c938522b59c6081631d9d1a696a87593dc97b15c Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 18 Dec 2019 12:43:37 +0530 Subject: [PATCH 006/195] [wip] take optional payload --- .../Hasura/Eventing/ScheduledTrigger.hs | 29 +++++----- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 54 +++++++++++++------ server/src-rsr/initialise.sql | 4 +- 3 files changed, 58 insertions(+), 29 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index e61d8abb6605a..b6aa369789ac7 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -19,7 +19,9 @@ import Hasura.SQL.Types import System.Cron import qualified Data.Aeson as J +import qualified Data.ByteString.Lazy as LBS import qualified Data.Text as T +import qualified Data.Text.Encoding as TE import qualified Database.PG.Query as Q import qualified Hasura.Logging as L import qualified Network.HTTP.Client as HTTP @@ -54,6 +56,7 @@ data ScheduledEvent { seId :: !(Maybe Text) , seName :: !T.Text , seWebhook :: !T.Text + , sePayload :: !J.Value , seScheduledTime :: !UTCTime } deriving (Show, Eq) @@ -74,7 +77,7 @@ generateScheduledEvents :: Q.TxE QErr () generateScheduledEvents = do allSchedules <- map uncurrySchedule <$> Q.listQE defaultTxErrorHandler [Q.sql| - SELECT st.name, st.webhook, st.schedule + SELECT st.name, st.webhook, st.schedule, st.payload FROM hdb_catalog.hdb_scheduled_trigger st |] () False currentTime <- liftIO getCurrentTime @@ -85,30 +88,30 @@ generateScheduledEvents = do let insertScheduledEventsSql = TB.run $ toSQL SQLInsert { siTable = scheduledEventsTable - , siCols = map (PGCol . T.pack) ["name", "webhook", "scheduled_time"] + , siCols = map (PGCol . T.pack) ["name", "webhook", "payload", "scheduled_time"] , siValues = ValuesExp $ map (toTupleExp . toArr) events , siConflict = Just $ DoNothing Nothing , siRet = Nothing } Q.unitQE defaultTxErrorHandler (Q.fromText insertScheduledEventsSql) () False where - toArr (ScheduledEvent _ n w t) = n : w : (pure $ formatTime' t) + toArr (ScheduledEvent _ n w p t) = n : w : (TE.decodeUtf8 . LBS.toStrict $ J.encode p) : (pure $ formatTime' t) toTupleExp = TupleExp . map SELit - uncurrySchedule (n, w, st) = - ScheduledTriggerQuery {stqName = n, stqWebhook = w, stqSchedule = fromBS st} + uncurrySchedule (n, w, st, p) = + ScheduledTrigger {stName = n, stWebhook = w, stSchedule = fromBS st, stPayload = Q.getAltJ <$> p} fromBS st = fromMaybe (OneOff endOfTime) $ J.decodeStrict' st -mkScheduledEvents :: UTCTime -> ScheduledTriggerQuery -> [ScheduledEvent] -mkScheduledEvents time ScheduledTriggerQuery{..} = +mkScheduledEvents :: UTCTime -> ScheduledTrigger-> [ScheduledEvent] +mkScheduledEvents time ScheduledTrigger{..} = let events = - case stqSchedule of + case stSchedule of OneOff _ -> [] -- one-off scheduled events need not be generated Cron cron -> generateScheduledEventsBetween time (addUTCTime nominalDay time) cron - in map (ScheduledEvent Nothing (unNonEmptyText stqName) (unNonEmptyText stqWebhook)) events + in map (ScheduledEvent Nothing stName stWebhook (fromMaybe J.Null stPayload)) events -- generates events (from, till] according to CronSchedule generateScheduledEventsBetween :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] @@ -150,8 +153,7 @@ processScheduledEvent pgpool httpMgr se@ScheduledEvent{..} = do -- etHeaders = map encodeHeader headerInfos -- headers = addDefaultHeaders etHeaders -- ep = createEventPayload retryConf e - eventPayload = J.Null - res <- runExceptT $ tryWebhook httpMgr responseTimeout eventPayload seWebhook + res <- runExceptT $ tryWebhook httpMgr responseTimeout sePayload seWebhook -- let decodedHeaders = map (decodeHeader logenv headerInfos) headers finally <- either (processError pgpool se) @@ -233,14 +235,15 @@ getScheduledEvents = do ) FOR UPDATE SKIP LOCKED ) - RETURNING id, name, webhook, scheduled_time + RETURNING id, name, webhook, payload, scheduled_time |] (Identity currentTime) True pure $ allSchedules - where uncurryEvent (i, n, w, st) = + where uncurryEvent (i, n, w, Q.AltJ p, st) = ScheduledEvent { seId = i , seName = n , seWebhook = w + , sePayload = p , seScheduledTime = st } diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index eb85f2995ca69..523392602e366 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -1,4 +1,3 @@ -{-# LANGUAGE DeriveLift #-} {-# LANGUAGE RecordWildCards #-} {-# OPTIONS_GHC -fno-warn-orphans #-} @@ -6,6 +5,7 @@ module Hasura.RQL.DDL.ScheduledTrigger ( ScheduledTriggerQuery(..) , runCreateScheduledTrigger , ScheduleType(..) + , ScheduledTrigger(..) ) where import Data.Aeson @@ -26,24 +26,33 @@ import qualified Data.Aeson as J import qualified Data.Text as T import qualified Database.PG.Query as Q -instance Lift UTCTime +data ScheduleType = OneOff UTCTime | Cron CronSchedule + deriving (Show, Eq) -instance Lift CronSchedule where - lift = cronScheduleExp +$(deriveJSON (defaultOptions){sumEncoding=TaggedObject "type" "value"} ''ScheduleType) -cronScheduleExp :: CronSchedule -> Q Exp -cronScheduleExp c = [| c |] +data ScheduledTrigger + = ScheduledTrigger + { stName :: !T.Text + , stWebhook :: !T.Text + , stSchedule :: !ScheduleType + , stPayload :: !(Maybe J.Value) + } + deriving (Show, Eq) -data ScheduleType = OneOff UTCTime | Cron CronSchedule +-- TODO :: Change stqSchedule to ScheduleType after writing TH.Lift instances + +data ScheduleTypeUnstrict = UnstrictOneOff T.Text | UnstrictCron T.Text deriving (Show, Eq, Lift) -$(deriveJSON (defaultOptions){sumEncoding=TaggedObject "type" "value"} ''ScheduleType) +$(deriveJSON (defaultOptions){constructorTagModifier = drop 8, sumEncoding=TaggedObject "type" "value"} ''ScheduleTypeUnstrict) data ScheduledTriggerQuery = ScheduledTriggerQuery { stqName :: !NonEmptyText , stqWebhook :: !NonEmptyText - , stqSchedule :: !ScheduleType + , stqSchedule :: !ScheduleTypeUnstrict + , stqPayload :: !(Maybe J.Value) } deriving (Show, Eq, Lift) @@ -54,18 +63,33 @@ instance FromJSON CronSchedule where instance ToJSON CronSchedule where toJSON = J.String . serializeCronSchedule -$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ScheduledTriggerQuery) +instance FromJSON ScheduledTriggerQuery where + parseJSON = + withObject "ScheduledTriggerQuery" $ \o -> do + stqName <- o .: "name" + stqWebhook <- o .: "webhook" + stqPayload <- o .:? "payload" + stqScheduleUnstrict :: ScheduleTypeUnstrict <- o .: "schedule" + scheduleType :: ScheduleType <- + either fail pure $ eitherDecode' (J.encode stqScheduleUnstrict) + stqSchedule <- + case scheduleType of + OneOff utcTime -> pure $ UnstrictOneOff (T.pack $ show utcTime) + Cron cron -> pure $ UnstrictCron(serializeCronSchedule cron) + pure ScheduledTriggerQuery {..} + +$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ScheduledTriggerQuery) runCreateScheduledTrigger :: CacheBuildM m => ScheduledTriggerQuery -> m EncJSON runCreateScheduledTrigger ScheduledTriggerQuery{..} = do liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.hdb_scheduled_trigger - (name, webhook, schedule) - VALUES ($1, $2, $3) - |] (stqName, stqWebhook, toTxt stqSchedule) False + (name, webhook, schedule, payload) + VALUES ($1, $2, $3, $4) + |] (stqName, stqWebhook, Q.AltJ $ toJSON stqSchedule, Q.AltJ <$> stqPayload) False return successMsg where toTxt = \case - OneOff utcTime -> T.pack $ show utcTime - Cron cron -> serializeCronSchedule cron + UnstrictOneOff utcTime -> utcTime + UnstrictCron cron -> cron diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index d2643aef4e666..c779fe89937c1 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -667,7 +667,8 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger ( name TEXT PRIMARY KEY, webhook TEXT NOT NULL, - schedule TEXT + schedule JSON NOT NULL, + payload JSON ); CREATE TABLE hdb_catalog.hdb_scheduled_trigger_events @@ -676,6 +677,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger_events name TEXT, webhook TEXT NOT NULL, scheduled_time TIMESTAMP NOT NULL, + payload JSON, delivered BOOLEAN NOT NULL DEFAULT FALSE, error BOOLEAN NOT NULL DEFAULT FALSE, tries INTEGER NOT NULL DEFAULT 0, From 879c88ac8c58ac977a65fd3808552a17666911a1 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 18 Dec 2019 14:31:00 +0530 Subject: [PATCH 007/195] log invocations for each request --- .../src-lib/Hasura/Eventing/EventTrigger.hs | 4 - server/src-lib/Hasura/Eventing/HTTP.hs | 9 +- .../Hasura/Eventing/ScheduledTrigger.hs | 103 ++++++++++++++---- server/src-rsr/initialise.sql | 14 ++- 4 files changed, 104 insertions(+), 26 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 1136877c19d66..d4d3c04785c4b 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -16,7 +16,6 @@ import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH import Data.Has -import Data.Int (Int64) import Data.IORef (IORef, readIORef) import Data.Time.Clock import Hasura.Eventing.HTTP @@ -409,6 +408,3 @@ unlockAllEvents = SET locked = 'f' WHERE locked = 't' |] () False - -toInt64 :: (Integral a) => a -> Int64 -toInt64 = fromIntegral diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index f62ffd3ac409d..610deec91e4df 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -5,6 +5,7 @@ module Hasura.Eventing.HTTP , isNetworkError , isNetworkErrorHC , ExtraContext(..) + , EventId , Invocation(..) , Version , Response(..) @@ -20,10 +21,9 @@ module Hasura.Eventing.HTTP , EventInternalErr(..) , mkWebhookReq , mkResp + , toInt64 ) where -import Data.Either - import qualified Data.Aeson as J import qualified Data.Aeson.Casing as J import qualified Data.Aeson.TH as J @@ -41,7 +41,9 @@ import qualified Network.HTTP.Types as HTTP import Control.Exception (try) import Control.Monad.IO.Class (MonadIO, liftIO) import Control.Monad.Reader (MonadReader) +import Data.Either import Data.Has +import Data.Int (Int64) import Hasura.Logging import Hasura.Prelude import Hasura.RQL.DDL.Headers @@ -261,3 +263,6 @@ logHTTPErr logHTTPErr err = do logger :: L.Logger L.Hasura <- asks getter L.unLogger logger err + +toInt64 :: (Integral a) => a -> Int64 +toInt64 = fromIntegral diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index b6aa369789ac7..2b97036fc8f29 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -12,6 +12,7 @@ import Data.Time.Clock import Data.Time.Format import Hasura.Eventing.HTTP import Hasura.Prelude +import Hasura.RQL.DDL.Headers import Hasura.RQL.DDL.ScheduledTrigger import Hasura.RQL.Types import Hasura.SQL.DML @@ -19,7 +20,10 @@ import Hasura.SQL.Types import System.Cron import qualified Data.Aeson as J +import qualified Data.Aeson.Casing as J +import qualified Data.Aeson.TH as J import qualified Data.ByteString.Lazy as LBS +import qualified Data.TByteString as TBS import qualified Data.Text as T import qualified Data.Text.Encoding as TE import qualified Database.PG.Query as Q @@ -29,6 +33,9 @@ import qualified Text.Builder as TB (run) import Debug.Trace +invocationVersion :: Version +invocationVersion = "1" + oneSecond :: Int oneSecond = 1000000 @@ -49,7 +56,7 @@ scheduledEventsTable :: QualifiedTable scheduledEventsTable = QualifiedObject hdbCatalogSchema - (TableName $ T.pack "hdb_scheduled_trigger_events") + (TableName $ T.pack "hdb_scheduled_events") data ScheduledEvent = ScheduledEvent @@ -60,6 +67,8 @@ data ScheduledEvent , seScheduledTime :: !UTCTime } deriving (Show, Eq) +$(J.deriveToJSON (J.aesonDrop 2 J.snakeCase){J.omitNothingFields=True} ''ScheduledEvent) + runScheduledEventsGenerator :: Q.PGPool -> IO () runScheduledEventsGenerator pgpool = do forever $ do @@ -95,17 +104,23 @@ generateScheduledEvents = do } Q.unitQE defaultTxErrorHandler (Q.fromText insertScheduledEventsSql) () False where - toArr (ScheduledEvent _ n w p t) = n : w : (TE.decodeUtf8 . LBS.toStrict $ J.encode p) : (pure $ formatTime' t) + toArr (ScheduledEvent _ n w p t) = + n : w : (TE.decodeUtf8 . LBS.toStrict $ J.encode p) : (pure $ formatTime' t) toTupleExp = TupleExp . map SELit uncurrySchedule (n, w, st, p) = - ScheduledTrigger {stName = n, stWebhook = w, stSchedule = fromBS st, stPayload = Q.getAltJ <$> p} + ScheduledTrigger + { stName = n, + stWebhook = w, + stSchedule = fromBS st, + stPayload = Q.getAltJ <$> p + } fromBS st = fromMaybe (OneOff endOfTime) $ J.decodeStrict' st mkScheduledEvents :: UTCTime -> ScheduledTrigger-> [ScheduledEvent] mkScheduledEvents time ScheduledTrigger{..} = let events = case stSchedule of - OneOff _ -> [] -- one-off scheduled events need not be generated + OneOff schedTime -> [schedTime] -- one-off scheduled events need not be generated Cron cron -> generateScheduledEventsBetween time @@ -157,7 +172,7 @@ processScheduledEvent pgpool httpMgr se@ScheduledEvent{..} = do -- let decodedHeaders = map (decodeHeader logenv headerInfos) headers finally <- either (processError pgpool se) - (processSuccess pgpool se) (trace ("error is " ++ show res) res) + (processSuccess pgpool se) res either logQErr return finally tryWebhook :: @@ -188,46 +203,96 @@ tryWebhook httpMgr timeout payload webhook = do processError :: (MonadIO m) => Q.PGPool -> ScheduledEvent -> HTTPErr -> m (Either QErr ()) processError pgpool se err = do + let decodedHeaders = [] + invocation = case err of + HClient excp -> do + let errMsg = TBS.fromLBS $ J.encode $ show excp + mkInvo se 1000 decodedHeaders errMsg [] + HParse _ detail -> do + let errMsg = TBS.fromLBS $ J.encode detail + mkInvo se 1001 decodedHeaders errMsg [] + HStatus errResp -> do + let respPayload = hrsBody errResp + respHeaders = hrsHeaders errResp + respStatus = hrsStatus errResp + mkInvo se respStatus decodedHeaders respPayload respHeaders + HOther detail -> do + let errMsg = (TBS.fromLBS $ J.encode detail) + mkInvo se 500 decodedHeaders errMsg [] liftIO $ runExceptT $ - Q.runTx - pgpool - (Q.RepeatableRead, Just Q.ReadWrite) + Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) $ do + insertInvocation invocation markError where markError = - Q.unitQE defaultTxErrorHandler - [Q.sql| - UPDATE hdb_catalog.hdb_scheduled_trigger_events + Q.unitQE + defaultTxErrorHandler + [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_events SET error = 't', locked = 'f' WHERE id = $1 |] (Identity $ seId se) True processSuccess :: (MonadIO m) => Q.PGPool -> ScheduledEvent -> HTTPResp -> m (Either QErr ()) processSuccess pgpool se resp = do + let respBody = hrsBody resp + respHeaders = hrsHeaders resp + respStatus = hrsStatus resp + decodedHeaders = [] + invocation = mkInvo se respStatus decodedHeaders respBody respHeaders liftIO $ runExceptT $ - Q.runTx - pgpool - (Q.RepeatableRead, Just Q.ReadWrite) + Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) $ do + insertInvocation invocation markSuccess where markSuccess = - Q.unitQE defaultTxErrorHandler - [Q.sql| - UPDATE hdb_catalog.hdb_scheduled_trigger_events + Q.unitQE + defaultTxErrorHandler + [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_events SET delivered = 't', locked = 'f' WHERE id = $1 |] (Identity $ seId se) True +mkInvo + :: ScheduledEvent -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] + -> Invocation +mkInvo se status reqHeaders respBody respHeaders + = let resp = if isClientError status + then mkClientErr respBody + else mkResp status respBody respHeaders + in + Invocation + (fromMaybe "unknown" $ seId se) -- WARN: should never happen? + status + (mkWebhookReq (J.toJSON se) reqHeaders invocationVersion) + resp + +insertInvocation :: Invocation -> Q.TxE QErr () +insertInvocation invo = do + Q.unitQE defaultTxErrorHandler [Q.sql| + INSERT INTO hdb_catalog.hdb_scheduled_event_invocation_logs (event_id, status, request, response) + VALUES ($1, $2, $3, $4) + |] ( iEventId invo + , toInt64 $ iStatus invo + , Q.AltJ $ J.toJSON $ iRequest invo + , Q.AltJ $ J.toJSON $ iResponse invo) True + Q.unitQE defaultTxErrorHandler [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_events + SET tries = tries + 1 + WHERE id = $1 + |] (Identity $ iEventId invo) True + getScheduledEvents :: Q.TxE QErr [ScheduledEvent] getScheduledEvents = do currentTime <- liftIO getCurrentTime allSchedules <- map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| - UPDATE hdb_catalog.hdb_scheduled_trigger_events + UPDATE hdb_catalog.hdb_scheduled_events SET locked = 't' WHERE name IN ( SELECT t.name - FROM hdb_catalog.hdb_scheduled_trigger_events t + FROM hdb_catalog.hdb_scheduled_events t WHERE ( t.locked = 'f' and t.delivered = 'f' and t.error = 'f' diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index c779fe89937c1..cf8896dd48fc3 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -671,7 +671,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger payload JSON ); -CREATE TABLE hdb_catalog.hdb_scheduled_trigger_events +CREATE TABLE hdb_catalog.hdb_scheduled_events ( id TEXT DEFAULT gen_random_uuid() UNIQUE, name TEXT, @@ -687,3 +687,15 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger_events PRIMARY KEY (name, scheduled_time), FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) ON UPDATE CASCADE ); + +CREATE TABLE hdb_catalog.hdb_scheduled_event_invocation_logs +( + id TEXT DEFAULT gen_random_uuid() PRIMARY KEY, + event_id TEXT, + status INTEGER, + request JSON, + response JSON, + created_at TIMESTAMP DEFAULT NOW(), + + FOREIGN KEY (event_id) REFERENCES hdb_catalog.hdb_scheduled_events (id) +); From a004ddc09dcaa2b1fabdc1cceafab9c5bfb45095 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 18 Dec 2019 14:51:36 +0530 Subject: [PATCH 008/195] change application currentTime to db now() --- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 2b97036fc8f29..952a09821d4c3 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -287,7 +287,6 @@ insertInvocation invo = do getScheduledEvents :: Q.TxE QErr [ScheduledEvent] getScheduledEvents = do - currentTime <- liftIO getCurrentTime allSchedules <- map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_events SET locked = 't' @@ -296,12 +295,12 @@ getScheduledEvents = do WHERE ( t.locked = 'f' and t.delivered = 'f' and t.error = 'f' - and t.scheduled_time <= $1 + and t.scheduled_time <= now() ) FOR UPDATE SKIP LOCKED ) RETURNING id, name, webhook, payload, scheduled_time - |] (Identity currentTime) True + |] () True pure $ allSchedules where uncurryEvent (i, n, w, Q.AltJ p, st) = ScheduledEvent From 0b2f12e654fc78bcb878dde6c3b9dff0563b2ce2 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 18 Dec 2019 15:39:01 +0530 Subject: [PATCH 009/195] on delete cascade --- server/src-rsr/initialise.sql | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index cf8896dd48fc3..f72786cd4a309 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -685,7 +685,8 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events locked BOOLEAN NOT NULL DEFAULT FALSE, PRIMARY KEY (name, scheduled_time), - FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) ON UPDATE CASCADE + FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) + ON UPDATE CASCADE ON DELETE CASCADE ); CREATE TABLE hdb_catalog.hdb_scheduled_event_invocation_logs @@ -697,5 +698,5 @@ CREATE TABLE hdb_catalog.hdb_scheduled_event_invocation_logs response JSON, created_at TIMESTAMP DEFAULT NOW(), - FOREIGN KEY (event_id) REFERENCES hdb_catalog.hdb_scheduled_events (id) + FOREIGN KEY (event_id) REFERENCES hdb_catalog.hdb_scheduled_events (id) ON DELETE CASCADE ); From 7386d8de5e54f449318159d89584df012482620a Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 18 Dec 2019 16:24:55 +0530 Subject: [PATCH 010/195] change time formatting --- .../src-lib/Hasura/Eventing/ScheduledTrigger.hs | 5 ----- server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs | 15 +++++++++------ 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 952a09821d4c3..9b23f0b99adee 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -9,7 +9,6 @@ import Control.Concurrent (threadDelay) import Control.Exception (try) import Data.Has import Data.Time.Clock -import Data.Time.Format import Hasura.Eventing.HTTP import Hasura.Prelude import Hasura.RQL.DDL.Headers @@ -310,7 +309,3 @@ getScheduledEvents = do , sePayload = p , seScheduledTime = st } - --- RFC822 -formatTime' :: UTCTime -> T.Text -formatTime' = T.pack . formatTime defaultTimeLocale "%a, %d %b %Y %H:%M:%S %z" diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 523392602e366..4a8d055cc8e4b 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -6,12 +6,14 @@ module Hasura.RQL.DDL.ScheduledTrigger , runCreateScheduledTrigger , ScheduleType(..) , ScheduledTrigger(..) + , formatTime' ) where import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH import Data.Time.Clock +import Data.Time.Format import Hasura.EncJSON import Hasura.Prelude import Hasura.RQL.DDL.Schema.Cache (CacheBuildM) @@ -26,6 +28,11 @@ import qualified Data.Aeson as J import qualified Data.Text as T import qualified Database.PG.Query as Q +-- aeson doesn't decode 'UTC' identifier so explicitly provide 'Z' +-- TODO: take proper timezone +formatTime' :: UTCTime -> T.Text +formatTime'= T.pack . formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S Z" + data ScheduleType = OneOff UTCTime | Cron CronSchedule deriving (Show, Eq) @@ -74,8 +81,8 @@ instance FromJSON ScheduledTriggerQuery where either fail pure $ eitherDecode' (J.encode stqScheduleUnstrict) stqSchedule <- case scheduleType of - OneOff utcTime -> pure $ UnstrictOneOff (T.pack $ show utcTime) - Cron cron -> pure $ UnstrictCron(serializeCronSchedule cron) + OneOff utcTime -> pure $ UnstrictOneOff (formatTime' utcTime) + Cron cron -> pure $ UnstrictCron (serializeCronSchedule cron) pure ScheduledTriggerQuery {..} $(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ScheduledTriggerQuery) @@ -89,7 +96,3 @@ runCreateScheduledTrigger ScheduledTriggerQuery{..} = do VALUES ($1, $2, $3, $4) |] (stqName, stqWebhook, Q.AltJ $ toJSON stqSchedule, Q.AltJ <$> stqPayload) False return successMsg - where - toTxt = \case - UnstrictOneOff utcTime -> utcTime - UnstrictCron cron -> cron From dc0fd1e862cbf6f70cba05b03e549183321412bd Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 18 Dec 2019 16:57:47 +0530 Subject: [PATCH 011/195] bugfix: lock on id and not name --- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 9b23f0b99adee..42e6d2f78ef87 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -289,7 +289,7 @@ getScheduledEvents = do allSchedules <- map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_events SET locked = 't' - WHERE name IN ( SELECT t.name + WHERE id IN ( SELECT t.id FROM hdb_catalog.hdb_scheduled_events t WHERE ( t.locked = 'f' and t.delivered = 'f' From 7f4acc119f0e5ef6058160228f1eef135a5f370f Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 18 Dec 2019 19:55:53 +0530 Subject: [PATCH 012/195] run scheduler every 10 s --- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 42e6d2f78ef87..6e29cb28fb1e7 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -79,7 +79,7 @@ runScheduledEventsGenerator pgpool = do generateScheduledEvents) >>= \case Right _ -> pure () Left err -> traceShowM err - threadDelay oneMinute + threadDelay (10 * oneSecond) generateScheduledEvents :: Q.TxE QErr () generateScheduledEvents = do From b8e8c07a8571d99a720fd4dca0e241df19eb42d4 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Mon, 23 Dec 2019 12:13:40 +0530 Subject: [PATCH 013/195] add retry conf --- server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 4a8d055cc8e4b..2164903cfdcf9 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -14,16 +14,18 @@ import Data.Aeson.Casing import Data.Aeson.TH import Data.Time.Clock import Data.Time.Format +import Hasura.Db import Hasura.EncJSON import Hasura.Prelude import Hasura.RQL.DDL.Schema.Cache (CacheBuildM) -import Hasura.RQL.Types +import Hasura.RQL.DDL.Utils import Hasura.RQL.Types.Common (NonEmptyText) import Instances.TH.Lift () import Language.Haskell.TH.Syntax as TH import System.Cron.Parser import System.Cron.Types + import qualified Data.Aeson as J import qualified Data.Text as T import qualified Database.PG.Query as Q @@ -33,6 +35,16 @@ import qualified Database.PG.Query as Q formatTime' :: UTCTime -> T.Text formatTime'= T.pack . formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S Z" +data RetryConf + = RetryConf + { rcNumRetries :: !Int + , rcIntervalSec :: !Int + , rcTimeoutSec :: !(Maybe Int) + , rcTolerance :: !DiffTime + } deriving (Show, Eq) + +$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RetryConf) + data ScheduleType = OneOff UTCTime | Cron CronSchedule deriving (Show, Eq) From 36ba5c55131a53f1f984b93cac0daaa9bf755929 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 24 Dec 2019 14:51:03 +0530 Subject: [PATCH 014/195] derive Lift using custom cron package, also add retry_conf to metadata --- .../Hasura/Eventing/ScheduledTrigger.hs | 13 +-- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 92 +++++++++---------- server/src-lib/Hasura/Server/Query.hs | 2 +- server/src-rsr/initialise.sql | 3 +- server/stack.yaml | 2 + server/stack.yaml.lock | 14 +++ 6 files changed, 68 insertions(+), 58 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 6e29cb28fb1e7..1ce3c8be793a1 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -85,7 +85,7 @@ generateScheduledEvents :: Q.TxE QErr () generateScheduledEvents = do allSchedules <- map uncurrySchedule <$> Q.listQE defaultTxErrorHandler [Q.sql| - SELECT st.name, st.webhook, st.schedule, st.payload + SELECT st.name, st.webhook, st.schedule, st.payload, st.retry_conf FROM hdb_catalog.hdb_scheduled_trigger st |] () False currentTime <- liftIO getCurrentTime @@ -106,12 +106,13 @@ generateScheduledEvents = do toArr (ScheduledEvent _ n w p t) = n : w : (TE.decodeUtf8 . LBS.toStrict $ J.encode p) : (pure $ formatTime' t) toTupleExp = TupleExp . map SELit - uncurrySchedule (n, w, st, p) = + uncurrySchedule (n, w, st, p, Q.AltJ rc) = ScheduledTrigger - { stName = n, - stWebhook = w, - stSchedule = fromBS st, - stPayload = Q.getAltJ <$> p + { stName = n + , stWebhook = w + , stSchedule = fromBS st + , stPayload = Q.getAltJ <$> p + , stRetryConf = rc } fromBS st = fromMaybe (OneOff endOfTime) $ J.decodeStrict' st diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 2164903cfdcf9..de8dc129d5860 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -2,8 +2,7 @@ {-# OPTIONS_GHC -fno-warn-orphans #-} module Hasura.RQL.DDL.ScheduledTrigger - ( ScheduledTriggerQuery(..) - , runCreateScheduledTrigger + ( runCreateScheduledTrigger , ScheduleType(..) , ScheduledTrigger(..) , formatTime' @@ -18,14 +17,11 @@ import Hasura.Db import Hasura.EncJSON import Hasura.Prelude import Hasura.RQL.DDL.Schema.Cache (CacheBuildM) -import Hasura.RQL.DDL.Utils -import Hasura.RQL.Types.Common (NonEmptyText) -import Instances.TH.Lift () +import Hasura.RQL.Types (successMsg) import Language.Haskell.TH.Syntax as TH import System.Cron.Parser import System.Cron.Types - import qualified Data.Aeson as J import qualified Data.Text as T import qualified Database.PG.Query as Q @@ -35,43 +31,44 @@ import qualified Database.PG.Query as Q formatTime' :: UTCTime -> T.Text formatTime'= T.pack . formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S Z" +instance TH.Lift DiffTime where + lift x = [|picosecondsToDiffTime x'|] + where + x' = diffTimeToPicoseconds x + data RetryConf = RetryConf { rcNumRetries :: !Int , rcIntervalSec :: !Int - , rcTimeoutSec :: !(Maybe Int) + , rcTimeoutSec :: !Int , rcTolerance :: !DiffTime - } deriving (Show, Eq) + } deriving (Show, Eq, Lift) $(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RetryConf) -data ScheduleType = OneOff UTCTime | Cron CronSchedule - deriving (Show, Eq) - -$(deriveJSON (defaultOptions){sumEncoding=TaggedObject "type" "value"} ''ScheduleType) - -data ScheduledTrigger - = ScheduledTrigger - { stName :: !T.Text - , stWebhook :: !T.Text - , stSchedule :: !ScheduleType - , stPayload :: !(Maybe J.Value) +defaultRetryConf :: RetryConf +defaultRetryConf = + RetryConf + { rcNumRetries = 1 + , rcIntervalSec = 10 + , rcTimeoutSec = 60 + , rcTolerance = fromInteger 21600 -- 6 hours } - deriving (Show, Eq) --- TODO :: Change stqSchedule to ScheduleType after writing TH.Lift instances +instance TH.Lift UTCTime -data ScheduleTypeUnstrict = UnstrictOneOff T.Text | UnstrictCron T.Text +data ScheduleType = OneOff UTCTime | Cron CronSchedule deriving (Show, Eq, Lift) -$(deriveJSON (defaultOptions){constructorTagModifier = drop 8, sumEncoding=TaggedObject "type" "value"} ''ScheduleTypeUnstrict) +$(deriveJSON (defaultOptions){sumEncoding=TaggedObject "type" "value"} ''ScheduleType) -data ScheduledTriggerQuery - = ScheduledTriggerQuery - { stqName :: !NonEmptyText - , stqWebhook :: !NonEmptyText - , stqSchedule :: !ScheduleTypeUnstrict - , stqPayload :: !(Maybe J.Value) +data ScheduledTrigger + = ScheduledTrigger + { stName :: !T.Text + , stWebhook :: !T.Text + , stSchedule :: !ScheduleType + , stPayload :: !(Maybe J.Value) + , stRetryConf :: !RetryConf } deriving (Show, Eq, Lift) @@ -82,29 +79,24 @@ instance FromJSON CronSchedule where instance ToJSON CronSchedule where toJSON = J.String . serializeCronSchedule -instance FromJSON ScheduledTriggerQuery where +instance FromJSON ScheduledTrigger where parseJSON = - withObject "ScheduledTriggerQuery" $ \o -> do - stqName <- o .: "name" - stqWebhook <- o .: "webhook" - stqPayload <- o .:? "payload" - stqScheduleUnstrict :: ScheduleTypeUnstrict <- o .: "schedule" - scheduleType :: ScheduleType <- - either fail pure $ eitherDecode' (J.encode stqScheduleUnstrict) - stqSchedule <- - case scheduleType of - OneOff utcTime -> pure $ UnstrictOneOff (formatTime' utcTime) - Cron cron -> pure $ UnstrictCron (serializeCronSchedule cron) - pure ScheduledTriggerQuery {..} - -$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ScheduledTriggerQuery) - -runCreateScheduledTrigger :: CacheBuildM m => ScheduledTriggerQuery -> m EncJSON -runCreateScheduledTrigger ScheduledTriggerQuery{..} = do + withObject "ScheduledTriggerQuery " $ \o -> do + stName <- o .: "name" + stWebhook <- o .: "webhook" + stPayload <- o .:? "payload" + stSchedule <- o .: "schedule" + stRetryConf <- o .:? "retry_conf" .!= defaultRetryConf + pure ScheduledTrigger {..} + +$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ScheduledTrigger) + +runCreateScheduledTrigger :: CacheBuildM m => ScheduledTrigger -> m EncJSON +runCreateScheduledTrigger ScheduledTrigger {..} = do liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.hdb_scheduled_trigger - (name, webhook, schedule, payload) - VALUES ($1, $2, $3, $4) - |] (stqName, stqWebhook, Q.AltJ $ toJSON stqSchedule, Q.AltJ <$> stqPayload) False + (name, webhook, schedule, payload, retry_conf) + VALUES ($1, $2, $3, $4, $5) + |] (stName, stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False return successMsg diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 6e50f3fb0c413..8078901ad87f7 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -87,7 +87,7 @@ data RQLQueryV1 | RQRedeliverEvent !RedeliverEventQuery | RQInvokeEventTrigger !InvokeEventTriggerQuery - | RQCreateScheduledTrigger !ScheduledTriggerQuery + | RQCreateScheduledTrigger !ScheduledTrigger -- query collections, allow list related | RQCreateQueryCollection !CreateCollection diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index f72786cd4a309..e2ef041bd9cf6 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -668,7 +668,8 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger name TEXT PRIMARY KEY, webhook TEXT NOT NULL, schedule JSON NOT NULL, - payload JSON + payload JSON, + retry_conf JSON ); CREATE TABLE hdb_catalog.hdb_scheduled_events diff --git a/server/stack.yaml b/server/stack.yaml index 9b149ca658dbc..3e8457242aab5 100644 --- a/server/stack.yaml +++ b/server/stack.yaml @@ -22,6 +22,8 @@ extra-deps: commit: f3d9b645efd9adb143e2ad4c6b73bded1578a4e9 - git: https://github.com/hasura/ci-info-hs.git commit: ad6df731584dc89b72a6e131687d37ef01714fe8 +- git: https://github.com/hasura/cron.git + commit: cc35408f4a05575d8305f5c45bdfeeb31dd3ea5b # extra dep for pg-client-hs - select-0.4.0.1 diff --git a/server/stack.yaml.lock b/server/stack.yaml.lock index 5659b204a4761..d6c24dc194abd 100644 --- a/server/stack.yaml.lock +++ b/server/stack.yaml.lock @@ -46,6 +46,20 @@ packages: original: git: https://github.com/hasura/ci-info-hs.git commit: ad6df731584dc89b72a6e131687d37ef01714fe8 +- completed: + cabal-file: + size: 3947 + sha256: 8c06d160d745978f2167aed7be788ff3d95b57c515e0de52cf2638c1d530004d + name: cron + version: 0.6.1 + git: https://github.com/hasura/cron.git + pantry-tree: + size: 1882 + sha256: 72d4503f9d73c71f50d273b30598853cf5b0a359a3ca62e991e09e70a76a2274 + commit: cc35408f4a05575d8305f5c45bdfeeb31dd3ea5b + original: + git: https://github.com/hasura/cron.git + commit: cc35408f4a05575d8305f5c45bdfeeb31dd3ea5b - completed: hackage: select-0.4.0.1@sha256:d409315752a069693bdd4169fa9a8ea7777d814da77cd8604f367cf0741de295,2492 pantry-tree: From 46f29625ccd8f22d1e92bc4c3980ff2a2cc6252b Mon Sep 17 00:00:00 2001 From: Alexis King Date: Tue, 24 Dec 2019 12:16:06 -0600 Subject: [PATCH 015/195] Re-enable -Werror in CI --- server/Makefile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/Makefile b/server/Makefile index 0abd9a88939e4..55d54f4f7dd88 100644 --- a/server/Makefile +++ b/server/Makefile @@ -55,8 +55,7 @@ release-image: $(project).cabal ci-binary: mkdir -p packaging/build/rootfs # --no-terminal for a cleaner output in circleci - # stack $(STACK_FLAGS) build --no-terminal --test --no-run-tests --ghc-options=-Werror $(BUILD_FLAGS) - stack $(STACK_FLAGS) build --no-terminal --test --no-run-tests $(BUILD_FLAGS) + stack $(STACK_FLAGS) build --no-terminal --test --no-run-tests --ghc-options=-Werror $(BUILD_FLAGS) mkdir -p $(build_output) cp $(build_dir)/$(project)/$(project) $(build_dir)/graphql-engine-tests/graphql-engine-tests $(build_output) echo "$(VERSION)" > $(build_output)/version.txt From a79083be9cd64dbcf23e6ac08db6589d919381a1 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 25 Dec 2019 16:44:26 +0530 Subject: [PATCH 016/195] bump lts, upgrade time, refactors, stub functions --- server/graphql-engine.cabal | 8 +- server/src-lib/Hasura/App.hs | 5 +- .../src-lib/Hasura/Eventing/EventTrigger.hs | 29 ----- server/src-lib/Hasura/Eventing/HTTP.hs | 37 +++++- .../Hasura/Eventing/ScheduledTrigger.hs | 123 ++++++++++-------- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 88 +------------ server/src-lib/Hasura/RQL/Types.hs | 43 +++--- server/src-lib/Hasura/RQL/Types/Helpers.hs | 8 ++ .../Hasura/RQL/Types/ScheduledTrigger.hs | 93 +++++++++++++ .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 20 ++- server/src-rsr/initialise.sql | 1 + server/stack.yaml | 10 +- server/stack.yaml.lock | 53 ++++++-- 13 files changed, 304 insertions(+), 214 deletions(-) create mode 100644 server/src-lib/Hasura/RQL/Types/Helpers.hs create mode 100644 server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 166afec5beeb7..50b4db2fc0002 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -42,8 +42,8 @@ common common-all DeriveGeneric DeriveLift DeriveTraversable EmptyCase FlexibleContexts FlexibleInstances FunctionalDependencies GeneralizedNewtypeDeriving InstanceSigs LambdaCase MultiParamTypeClasses MultiWayIf NoImplicitPrelude OverloadedStrings QuantifiedConstraints QuasiQuotes RankNTypes - ScopedTypeVariables StandaloneDeriving TemplateHaskell TupleSections TypeApplications - TypeFamilies TypeOperators + RecordWildCards ScopedTypeVariables StandaloneDeriving TemplateHaskell TupleSections + TypeApplications TypeFamilies TypeOperators common common-exe ghc-options: @@ -74,7 +74,7 @@ library , http-types , attoparsec , attoparsec-iso8601 >= 1.0 - , time + , time >= 1.9.1 , scientific , Spock-core , split @@ -240,6 +240,8 @@ library , Hasura.RQL.Types.Permission , Hasura.RQL.Types.QueryCollection , Hasura.RQL.Types.RemoteSchema + , Hasura.RQL.Types.Helpers + , Hasura.RQL.Types.ScheduledTrigger , Hasura.RQL.DDL.ComputedField , Hasura.RQL.DDL.Relationship , Hasura.RQL.DDL.Deps diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 0bc1050fbcb32..3cbff62b2b9e3 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -254,8 +254,9 @@ runHGEServer ServeOptions{..} InitCtx{..} initTime = do void $ liftIO $ C.forkIO $ processEventQueue logger logEnvHeaders _icHttpManager _icPgPool scRef eventEngineCtx - void $ liftIO $ C.forkIO $ runScheduledEventsGenerator _icPgPool -- logger logEnvHeaders - void $ liftIO $ C.forkIO $ processScheduledQueue logger _icPgPool _icHttpManager -- logger logEnvHeaders + void $ liftIO $ C.forkIO $ runScheduledEventsGenerator logger _icPgPool scRef + void $ liftIO $ C.forkIO $ processScheduledQueue logger _icPgPool _icHttpManager scRef -- logEnvHeaders + -- start a background thread to check for updates void $ liftIO $ C.forkIO $ checkForUpdates loggerCtx _icHttpManager diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index d4d3c04785c4b..6ccb596dde59b 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -26,14 +26,10 @@ import Hasura.RQL.Types import Hasura.SQL.Types import qualified Control.Concurrent.STM.TQueue as TQ -import qualified Data.ByteString as BS import qualified Data.CaseInsensitive as CI import qualified Data.HashMap.Strict as M import qualified Data.TByteString as TBS import qualified Data.Text as T -import qualified Data.Text.Encoding as T -import qualified Data.Text.Encoding as TE -import qualified Data.Text.Encoding.Error as TE import qualified Data.Time.Clock as Time import qualified Database.PG.Query as Q import qualified Hasura.Logging as L @@ -44,8 +40,6 @@ import qualified Network.HTTP.Types as HTTP invocationVersion :: Version invocationVersion = "2" -type LogEnvHeaders = Bool - newtype CacheRef = CacheRef { unCacheRef :: IORef (SchemaCache, SchemaCacheVer) } @@ -256,29 +250,6 @@ retryOrSetError e retryConf err = do Nothing -> Nothing Just sec -> if sec > 0 then Just sec else Nothing -encodeHeader :: EventHeaderInfo -> HTTP.Header -encodeHeader (EventHeaderInfo hconf cache) = - let (HeaderConf name _) = hconf - ciname = CI.mk $ T.encodeUtf8 name - value = T.encodeUtf8 cache - in (ciname, value) - -decodeHeader - :: LogEnvHeaders -> [EventHeaderInfo] -> (HTTP.HeaderName, BS.ByteString) - -> HeaderConf -decodeHeader logenv headerInfos (hdrName, hdrVal) - = let name = decodeBS $ CI.original hdrName - getName ehi = let (HeaderConf name' _) = ehiHeaderConf ehi - in name' - mehi = find (\hi -> getName hi == name) headerInfos - in case mehi of - Nothing -> HeaderConf name (HVValue (decodeBS hdrVal)) - Just ehi -> if logenv - then HeaderConf name (HVValue (ehiCachedValue ehi)) - else ehiHeaderConf ehi - where - decodeBS = TE.decodeUtf8With TE.lenientDecode - mkInvo :: EventPayload -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] -> Invocation diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index 610deec91e4df..d0815da6d2933 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -22,12 +22,16 @@ module Hasura.Eventing.HTTP , mkWebhookReq , mkResp , toInt64 + , LogEnvHeaders + , encodeHeader + , decodeHeader ) where import qualified Data.Aeson as J import qualified Data.Aeson.Casing as J import qualified Data.Aeson.TH as J -import qualified Data.ByteString.Lazy as B +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy as LBS import qualified Data.CaseInsensitive as CI import qualified Data.TByteString as TBS import qualified Data.Text as T @@ -50,6 +54,8 @@ import Hasura.RQL.DDL.Headers import Hasura.RQL.Types.Error (QErr) import Hasura.RQL.Types.EventTrigger +type LogEnvHeaders = Bool + data WebhookRequest = WebhookRequest { _rqPayload :: J.Value @@ -111,7 +117,7 @@ $(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''HTTPRes instance ToEngineLog HTTPResp Hasura where toEngineLog resp = (LevelInfo, eventTriggerLogType, J.toJSON resp) -mkHTTPResp :: HTTP.Response B.ByteString -> HTTPResp +mkHTTPResp :: HTTP.Response LBS.ByteString -> HTTPResp mkHTTPResp resp = HTTPResp { hrsStatus = HTTP.statusCode $ HTTP.responseStatus resp @@ -171,7 +177,7 @@ isNetworkErrorHC = \case HTTP.HttpExceptionRequest _ HTTP.ResponseTimeout -> True _ -> False -anyBodyParser :: HTTP.Response B.ByteString -> Either HTTPErr HTTPResp +anyBodyParser :: HTTP.Response LBS.ByteString -> Either HTTPErr HTTPResp anyBodyParser resp = do let httpResp = mkHTTPResp resp if respCode >= HTTP.status200 && respCode < HTTP.status300 @@ -240,7 +246,7 @@ mkClientErr message = in ResponseType2 cerr mkWebhookReq :: J.Value -> [HeaderConf] -> Version -> WebhookRequest -mkWebhookReq payload headers version = WebhookRequest payload (mkMaybe headers) version +mkWebhookReq payload headers = WebhookRequest payload (mkMaybe headers) isClientError :: Int -> Bool isClientError status = status >= 1000 @@ -266,3 +272,26 @@ logHTTPErr err = do toInt64 :: (Integral a) => a -> Int64 toInt64 = fromIntegral + +encodeHeader :: EventHeaderInfo -> HTTP.Header +encodeHeader (EventHeaderInfo hconf cache) = + let (HeaderConf name _) = hconf + ciname = CI.mk $ TE.encodeUtf8 name + value = TE.encodeUtf8 cache + in (ciname, value) + +decodeHeader + :: LogEnvHeaders -> [EventHeaderInfo] -> (HTTP.HeaderName, BS.ByteString) + -> HeaderConf +decodeHeader logenv headerInfos (hdrName, hdrVal) + = let name = decodeBS $ CI.original hdrName + getName ehi = let (HeaderConf name' _) = ehiHeaderConf ehi + in name' + mehi = find (\hi -> getName hi == name) headerInfos + in case mehi of + Nothing -> HeaderConf name (HVValue (decodeBS hdrVal)) + Just ehi -> if logenv + then HeaderConf name (HVValue (ehiCachedValue ehi)) + else ehiHeaderConf ehi + where + decodeBS = TE.decodeUtf8With TE.lenientDecode diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 1ce3c8be793a1..77e2aebb3a637 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -1,5 +1,3 @@ -{-# LANGUAGE RecordWildCards #-} - module Hasura.Eventing.ScheduledTrigger ( processScheduledQueue , runScheduledEventsGenerator @@ -8,15 +6,17 @@ module Hasura.Eventing.ScheduledTrigger import Control.Concurrent (threadDelay) import Control.Exception (try) import Data.Has +import Data.IORef (IORef, readIORef) import Data.Time.Clock import Hasura.Eventing.HTTP import Hasura.Prelude import Hasura.RQL.DDL.Headers -import Hasura.RQL.DDL.ScheduledTrigger +import Hasura.RQL.Types.ScheduledTrigger import Hasura.RQL.Types import Hasura.SQL.DML import Hasura.SQL.Types import System.Cron +import Hasura.HTTP import qualified Data.Aeson as J import qualified Data.Aeson.Casing as J @@ -28,7 +28,9 @@ import qualified Data.Text.Encoding as TE import qualified Database.PG.Query as Q import qualified Hasura.Logging as L import qualified Network.HTTP.Client as HTTP +import qualified Network.HTTP.Types as HTTP import qualified Text.Builder as TB (run) +import qualified Data.HashMap.Strict as Map import Debug.Trace @@ -47,8 +49,6 @@ oneHour = 60 * oneMinute endOfTime :: UTCTime endOfTime = read "2999-12-31 00:00:00 Z" --- type LogEnvHeaders = Bool - type ScheduledEventPayload = J.Value scheduledEventsTable :: QualifiedTable @@ -60,7 +60,7 @@ scheduledEventsTable = data ScheduledEvent = ScheduledEvent { seId :: !(Maybe Text) - , seName :: !T.Text + , seName :: !TriggerName , seWebhook :: !T.Text , sePayload :: !J.Value , seScheduledTime :: !UTCTime @@ -68,28 +68,30 @@ data ScheduledEvent $(J.deriveToJSON (J.aesonDrop 2 J.snakeCase){J.omitNothingFields=True} ''ScheduledEvent) -runScheduledEventsGenerator :: Q.PGPool -> IO () -runScheduledEventsGenerator pgpool = do +runScheduledEventsGenerator :: + L.Logger L.Hasura + -> Q.PGPool + -> IORef (SchemaCache, SchemaCacheVer) + -> IO () +runScheduledEventsGenerator logger pgpool scRef = do forever $ do traceM "entering scheduled events generator" + (sc, _) <- liftIO $ readIORef scRef + let scheduledTriggers = Map.elems $ scScheduledTriggers sc runExceptT (Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) - generateScheduledEvents) >>= \case + (insertScheduledEventsFor scheduledTriggers) ) >>= \case Right _ -> pure () - Left err -> traceShowM err + Left err -> + L.unLogger logger $ EventInternalErr $ err500 Unexpected (T.pack $ show err) threadDelay (10 * oneSecond) -generateScheduledEvents :: Q.TxE QErr () -generateScheduledEvents = do - allSchedules <- map uncurrySchedule <$> Q.listQE defaultTxErrorHandler - [Q.sql| - SELECT st.name, st.webhook, st.schedule, st.payload, st.retry_conf - FROM hdb_catalog.hdb_scheduled_trigger st - |] () False +insertScheduledEventsFor :: [ScheduledTriggerInfo] -> Q.TxE QErr () +insertScheduledEventsFor scheduledTriggers = do currentTime <- liftIO getCurrentTime - let scheduledEvents = concatMap (mkScheduledEvents currentTime) allSchedules + let scheduledEvents = concatMap (generateScheduledEventsFrom currentTime) scheduledTriggers case scheduledEvents of [] -> pure () events -> do @@ -104,29 +106,23 @@ generateScheduledEvents = do Q.unitQE defaultTxErrorHandler (Q.fromText insertScheduledEventsSql) () False where toArr (ScheduledEvent _ n w p t) = - n : w : (TE.decodeUtf8 . LBS.toStrict $ J.encode p) : (pure $ formatTime' t) + (triggerNameToTxt n) : w : (TE.decodeUtf8 . LBS.toStrict $ J.encode p) : (pure $ formatTime' t) toTupleExp = TupleExp . map SELit - uncurrySchedule (n, w, st, p, Q.AltJ rc) = - ScheduledTrigger - { stName = n - , stWebhook = w - , stSchedule = fromBS st - , stPayload = Q.getAltJ <$> p - , stRetryConf = rc - } - fromBS st = fromMaybe (OneOff endOfTime) $ J.decodeStrict' st -mkScheduledEvents :: UTCTime -> ScheduledTrigger-> [ScheduledEvent] -mkScheduledEvents time ScheduledTrigger{..} = +generateScheduledEventsFrom :: UTCTime -> ScheduledTriggerInfo-> [ScheduledEvent] +generateScheduledEventsFrom time ScheduledTriggerInfo{..} = let events = - case stSchedule of - OneOff schedTime -> [schedTime] -- one-off scheduled events need not be generated + case stiSchedule of + OneOff _ -> empty -- one-off scheduled events are generated during creation Cron cron -> generateScheduledEventsBetween time (addUTCTime nominalDay time) cron - in map (ScheduledEvent Nothing stName stWebhook (fromMaybe J.Null stPayload)) events + webhook = wciCachedValue stiWebhookInfo + in map + (ScheduledEvent Nothing stiName webhook (fromMaybe J.Null stiPayload)) + events -- generates events (from, till] according to CronSchedule generateScheduledEventsBetween :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] @@ -137,19 +133,28 @@ generateScheduledEventsBetween from till cron = takeWhile ((>=) till) $ go from Nothing -> [] Just next -> next : (go next) -processScheduledQueue :: L.Logger L.Hasura -> Q.PGPool -> HTTP.Manager -> IO () -processScheduledQueue logger pgpool httpMgr = +processScheduledQueue :: + L.Logger L.Hasura + -> Q.PGPool + -> HTTP.Manager + -> IORef (SchemaCache, SchemaCacheVer) + -> IO () +processScheduledQueue logger pgpool httpMgr scRef = forever $ do traceM "entering processor queue" + (sc, _) <- liftIO $ readIORef scRef + let scheduledTriggersInfo = scScheduledTriggers sc scheduledEventsE <- runExceptT $ Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) getScheduledEvents case scheduledEventsE of Right events -> sequence_ $ - map - (\ev -> runReaderT (processScheduledEvent pgpool httpMgr ev) (logger)) - events + flip map events $ \ev -> do + let st' = Map.lookup (seName ev) scheduledTriggersInfo + case st' of + Nothing -> traceM "ERROR: couldn't find scheduled trigger in cache" + Just st -> runReaderT (processScheduledEvent pgpool httpMgr st ev) logger Left err -> traceShowM err threadDelay (10 * oneSecond) @@ -157,23 +162,26 @@ processScheduledEvent :: (MonadReader r m, Has (L.Logger L.Hasura) r, MonadIO m) => Q.PGPool -> HTTP.Manager + -> ScheduledTriggerInfo -> ScheduledEvent -> m () -processScheduledEvent pgpool httpMgr se@ScheduledEvent{..} = do - -- let webhook = T.unpack $ wciCachedValue $ etiWebhookInfo eti - -- retryConf = etiRetryConf eti - let timeoutSeconds = 60 - responseTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) - -- headerInfos = etiHeaders eti - -- etHeaders = map encodeHeader headerInfos - -- headers = addDefaultHeaders etHeaders - -- ep = createEventPayload retryConf e - res <- runExceptT $ tryWebhook httpMgr responseTimeout sePayload seWebhook - -- let decodedHeaders = map (decodeHeader logenv headerInfos) headers - finally <- either - (processError pgpool se) - (processSuccess pgpool se) res - either logQErr return finally +processScheduledEvent pgpool httpMgr ScheduledTriggerInfo{..} se@ScheduledEvent{..} = do + currentTime <- liftIO getCurrentTime + if diffUTCTime currentTime seScheduledTime > rcstTolerance stiRetryConf + then undefined -- do nothing + else do + let webhook = wciCachedValue stiWebhookInfo + timeoutSeconds = 60 + responseTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) + headers = map encodeHeader stiHeaders + headers' = addDefaultHeaders headers + -- ep = createEventPayload retryConf e + res <- runExceptT $ tryWebhook httpMgr responseTimeout headers' sePayload webhook + -- let decodedHeaders = map (decodeHeader logenv headerInfos) headers + finally <- either + (processError pgpool se) + (processSuccess pgpool se) res + either logQErr return finally tryWebhook :: ( MonadReader r m @@ -183,10 +191,11 @@ tryWebhook :: ) => HTTP.Manager -> HTTP.ResponseTimeout + -> [HTTP.Header] -> ScheduledEventPayload -> T.Text -> m HTTPResp -tryWebhook httpMgr timeout payload webhook = do +tryWebhook httpMgr timeout headers payload webhook = do initReqE <- liftIO $ try $ HTTP.parseRequest (T.unpack webhook) case initReqE of Left excp -> throwError $ HClient excp @@ -194,7 +203,7 @@ tryWebhook httpMgr timeout payload webhook = do let req = initReq { HTTP.method = "POST" - -- , HTTP.requestHeaders = [] + , HTTP.requestHeaders = headers , HTTP.requestBody = HTTP.RequestBodyLBS (J.encode payload) , HTTP.responseTimeout = timeout } @@ -273,7 +282,8 @@ mkInvo se status reqHeaders respBody respHeaders insertInvocation :: Invocation -> Q.TxE QErr () insertInvocation invo = do Q.unitQE defaultTxErrorHandler [Q.sql| - INSERT INTO hdb_catalog.hdb_scheduled_event_invocation_logs (event_id, status, request, response) + INSERT INTO hdb_catalog.hdb_scheduled_event_invocation_logs + (event_id, status, request, response) VALUES ($1, $2, $3, $4) |] ( iEventId invo , toInt64 $ iStatus invo @@ -296,6 +306,7 @@ getScheduledEvents = do and t.delivered = 'f' and t.error = 'f' and t.scheduled_time <= now() + and t.dead = 'f' ) FOR UPDATE SKIP LOCKED ) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index de8dc129d5860..5e1674b10bfaf 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -1,95 +1,15 @@ -{-# LANGUAGE RecordWildCards #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} - module Hasura.RQL.DDL.ScheduledTrigger ( runCreateScheduledTrigger - , ScheduleType(..) - , ScheduledTrigger(..) - , formatTime' ) where -import Data.Aeson -import Data.Aeson.Casing -import Data.Aeson.TH -import Data.Time.Clock -import Data.Time.Format import Hasura.Db import Hasura.EncJSON import Hasura.Prelude -import Hasura.RQL.DDL.Schema.Cache (CacheBuildM) -import Hasura.RQL.Types (successMsg) -import Language.Haskell.TH.Syntax as TH -import System.Cron.Parser -import System.Cron.Types - -import qualified Data.Aeson as J -import qualified Data.Text as T -import qualified Database.PG.Query as Q - --- aeson doesn't decode 'UTC' identifier so explicitly provide 'Z' --- TODO: take proper timezone -formatTime' :: UTCTime -> T.Text -formatTime'= T.pack . formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S Z" - -instance TH.Lift DiffTime where - lift x = [|picosecondsToDiffTime x'|] - where - x' = diffTimeToPicoseconds x - -data RetryConf - = RetryConf - { rcNumRetries :: !Int - , rcIntervalSec :: !Int - , rcTimeoutSec :: !Int - , rcTolerance :: !DiffTime - } deriving (Show, Eq, Lift) - -$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RetryConf) - -defaultRetryConf :: RetryConf -defaultRetryConf = - RetryConf - { rcNumRetries = 1 - , rcIntervalSec = 10 - , rcTimeoutSec = 60 - , rcTolerance = fromInteger 21600 -- 6 hours - } - -instance TH.Lift UTCTime - -data ScheduleType = OneOff UTCTime | Cron CronSchedule - deriving (Show, Eq, Lift) - -$(deriveJSON (defaultOptions){sumEncoding=TaggedObject "type" "value"} ''ScheduleType) - -data ScheduledTrigger - = ScheduledTrigger - { stName :: !T.Text - , stWebhook :: !T.Text - , stSchedule :: !ScheduleType - , stPayload :: !(Maybe J.Value) - , stRetryConf :: !RetryConf - } - deriving (Show, Eq, Lift) - -instance FromJSON CronSchedule where - parseJSON = withText "CronSchedule" $ \t -> - either fail pure $ parseCronSchedule t - -instance ToJSON CronSchedule where - toJSON = J.String . serializeCronSchedule - -instance FromJSON ScheduledTrigger where - parseJSON = - withObject "ScheduledTriggerQuery " $ \o -> do - stName <- o .: "name" - stWebhook <- o .: "webhook" - stPayload <- o .:? "payload" - stSchedule <- o .: "schedule" - stRetryConf <- o .:? "retry_conf" .!= defaultRetryConf - pure ScheduledTrigger {..} +import Hasura.RQL.DDL.Schema.Cache (CacheBuildM) +import Hasura.RQL.Types.Helpers +import Hasura.RQL.Types.ScheduledTrigger -$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ScheduledTrigger) +import qualified Database.PG.Query as Q runCreateScheduledTrigger :: CacheBuildM m => ScheduledTrigger -> m EncJSON runCreateScheduledTrigger ScheduledTrigger {..} = do diff --git a/server/src-lib/Hasura/RQL/Types.hs b/server/src-lib/Hasura/RQL/Types.hs index ef8927e3a39b4..92865fff6b426 100644 --- a/server/src-lib/Hasura/RQL/Types.hs +++ b/server/src-lib/Hasura/RQL/Types.hs @@ -5,7 +5,6 @@ module Hasura.RQL.Types , MonadTx(..) , UserInfoM(..) - , successMsg , HasHttpManager (..) , HasGCtxMap (..) @@ -43,29 +42,28 @@ module Hasura.RQL.Types , module R ) where -import Hasura.EncJSON +import Hasura.Db as R import Hasura.Prelude +import Hasura.RQL.Types.BoolExp as R +import Hasura.RQL.Types.Column as R +import Hasura.RQL.Types.Common as R +import Hasura.RQL.Types.ComputedField as R +import Hasura.RQL.Types.DML as R +import Hasura.RQL.Types.Error as R +import Hasura.RQL.Types.EventTrigger as R +import Hasura.RQL.Types.Function as R +import Hasura.RQL.Types.Helpers as R +import Hasura.RQL.Types.Metadata as R +import Hasura.RQL.Types.Permission as R +import Hasura.RQL.Types.RemoteSchema as R +import Hasura.RQL.Types.ScheduledTrigger as R +import Hasura.RQL.Types.SchemaCache as R import Hasura.SQL.Types -import Hasura.Db as R -import Hasura.RQL.Types.BoolExp as R -import Hasura.RQL.Types.Column as R -import Hasura.RQL.Types.Common as R -import Hasura.RQL.Types.ComputedField as R -import Hasura.RQL.Types.DML as R -import Hasura.RQL.Types.Error as R -import Hasura.RQL.Types.EventTrigger as R -import Hasura.RQL.Types.Function as R -import Hasura.RQL.Types.Metadata as R -import Hasura.RQL.Types.Permission as R -import Hasura.RQL.Types.RemoteSchema as R -import Hasura.RQL.Types.SchemaCache as R - -import qualified Hasura.GraphQL.Context as GC - -import qualified Data.HashMap.Strict as M -import qualified Data.Text as T -import qualified Network.HTTP.Client as HTTP +import qualified Data.HashMap.Strict as M +import qualified Data.Text as T +import qualified Hasura.GraphQL.Context as GC +import qualified Network.HTTP.Client as HTTP getFieldInfoMap :: QualifiedTable @@ -323,7 +321,4 @@ adminOnly = do where errMsg = "restricted access : admin only" -successMsg :: EncJSON -successMsg = "{\"message\":\"success\"}" - type HeaderObj = M.HashMap T.Text T.Text diff --git a/server/src-lib/Hasura/RQL/Types/Helpers.hs b/server/src-lib/Hasura/RQL/Types/Helpers.hs new file mode 100644 index 0000000000000..e3dd28010a460 --- /dev/null +++ b/server/src-lib/Hasura/RQL/Types/Helpers.hs @@ -0,0 +1,8 @@ +module Hasura.RQL.Types.Helpers where + +import Hasura.EncJSON + +successMsg :: EncJSON +successMsg = "{\"message\":\"success\"}" + + diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs new file mode 100644 index 0000000000000..60ff95cd5c98f --- /dev/null +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -0,0 +1,93 @@ +{-# OPTIONS_GHC -fno-warn-orphans #-} + +module Hasura.RQL.Types.ScheduledTrigger + ( ScheduleType(..) + , ScheduledTrigger(..) + , RetryConfST(..) + , formatTime' + ) where + +import Data.Time.Clock +import Data.Time.Format +import Data.Aeson +import Data.Aeson.Casing +import Data.Aeson.TH +import Data.Fixed +import Hasura.Prelude +import Language.Haskell.TH.Syntax as TH +import System.Cron.Types +import Hasura.RQL.Types.EventTrigger (TriggerName) +import System.Cron.Parser + +import qualified Data.Text as T +import qualified Data.Aeson as J + +instance TH.Lift (Fixed E12) where + lift x = [| MkFixed x' |] + where + x' = resolution x + +instance TH.Lift NominalDiffTime where + lift x = [| secondsToNominalDiffTime x'|] + where + x' = nominalDiffTimeToSeconds x + +instance TH.Lift UTCTime + +data RetryConfST + = RetryConfST + { rcstNumRetries :: !Int + , rcstIntervalSec :: !Int + , rcstTimeoutSec :: !Int + , rcstTolerance :: !NominalDiffTime + } deriving (Show, Eq, Lift) + +$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RetryConfST) + +defaultRetryConf :: RetryConfST +defaultRetryConf = + RetryConfST + { rcstNumRetries = 1 + , rcstIntervalSec = 10 + , rcstTimeoutSec = 60 + , rcstTolerance = 21600 -- 6 hours + } + +data ScheduleType = OneOff UTCTime | Cron CronSchedule + deriving (Show, Eq, Lift) + +$(deriveJSON defaultOptions{sumEncoding=TaggedObject "type" "value"} ''ScheduleType) + +data ScheduledTrigger + = ScheduledTrigger + { stName :: !TriggerName + , stWebhook :: !T.Text + , stSchedule :: !ScheduleType + , stPayload :: !(Maybe J.Value) + , stRetryConf :: !RetryConfST + } + deriving (Show, Eq, Lift) + +instance FromJSON CronSchedule where + parseJSON = withText "CronSchedule" $ \t -> + either fail pure $ parseCronSchedule t + +instance ToJSON CronSchedule where + toJSON = J.String . serializeCronSchedule + +instance FromJSON ScheduledTrigger where + parseJSON = + withObject "ScheduledTriggerQuery " $ \o -> do + stName <- o .: "name" + stWebhook <- o .: "webhook" + stPayload <- o .:? "payload" + stSchedule <- o .: "schedule" + stRetryConf <- o .:? "retry_conf" .!= defaultRetryConf + pure ScheduledTrigger {..} + +$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ScheduledTrigger) + +-- aeson doesn't decode 'UTC' identifier so explicitly provide 'Z' +-- TODO: take proper timezone +formatTime' :: UTCTime -> T.Text +formatTime'= T.pack . formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S Z" diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index 28af86224ef6c..c6ff5a2c03693 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -117,6 +117,8 @@ module Hasura.RQL.Types.SchemaCache , updateFunctionDescription , replaceAllowlist + + , ScheduledTriggerInfo(..) ) where import qualified Hasura.GraphQL.Context as GC @@ -133,6 +135,7 @@ import Hasura.RQL.Types.Metadata import Hasura.RQL.Types.Permission import Hasura.RQL.Types.QueryCollection import Hasura.RQL.Types.RemoteSchema +import Hasura.RQL.Types.ScheduledTrigger import Hasura.RQL.Types.SchemaCacheTypes import Hasura.SQL.Types @@ -277,6 +280,20 @@ $(deriveToJSON (aesonDrop 3 snakeCase) ''EventTriggerInfo) type EventTriggerInfoMap = M.HashMap TriggerName EventTriggerInfo +data ScheduledTriggerInfo + = ScheduledTriggerInfo + { stiName :: !TriggerName + , stiSchedule :: !ScheduleType + , stiPayload :: !(Maybe Value) + , stiRetryConf :: !RetryConfST + , stiWebhookInfo :: !WebhookConfInfo + , stiHeaders :: ![EventHeaderInfo] + } deriving (Show, Eq) + +$(deriveToJSON (aesonDrop 3 snakeCase) ''ScheduledTriggerInfo) + +type ScheduledTriggerInfoMap = M.HashMap TriggerName ScheduledTriggerInfo + data ConstraintType = CTCHECK | CTFOREIGNKEY @@ -446,6 +463,7 @@ data SchemaCache , scDefaultRemoteGCtx :: !GC.GCtx , scDepMap :: !DepMap , scInconsistentObjs :: ![InconsistentMetadataObj] + , scScheduledTriggers :: !ScheduledTriggerInfoMap } deriving (Show, Eq) $(deriveToJSON (aesonDrop 2 snakeCase) ''SchemaCache) @@ -481,7 +499,7 @@ instance (Monad m) => CacheRWM (StateT SchemaCache m) where emptySchemaCache :: SchemaCache emptySchemaCache = SchemaCache M.empty M.empty M.empty - HS.empty M.empty GC.emptyGCtx mempty [] + HS.empty M.empty GC.emptyGCtx mempty [] M.empty modTableCache :: (CacheRWM m) => TableCache PGColumnInfo -> m () modTableCache tc = do diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index e2ef041bd9cf6..3fa4c6ce05a5d 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -684,6 +684,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events tries INTEGER NOT NULL DEFAULT 0, created_at TIMESTAMP DEFAULT NOW(), locked BOOLEAN NOT NULL DEFAULT FALSE, + dead BOOLEAN NOT NULL DEFAULT FALSE PRIMARY KEY (name, scheduled_time), FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) diff --git a/server/stack.yaml b/server/stack.yaml index 3e8457242aab5..c86d64692650b 100644 --- a/server/stack.yaml +++ b/server/stack.yaml @@ -2,7 +2,7 @@ # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) # resolver: lts-10.8 -resolver: lts-13.20 +resolver: lts-14.18 # Local packages, usually specified by relative directory name packages: - '.' @@ -23,7 +23,7 @@ extra-deps: - git: https://github.com/hasura/ci-info-hs.git commit: ad6df731584dc89b72a6e131687d37ef01714fe8 - git: https://github.com/hasura/cron.git - commit: cc35408f4a05575d8305f5c45bdfeeb31dd3ea5b + commit: e73cab157f2d47f80fe25b77c32212248884d483 # extra dep for pg-client-hs - select-0.4.0.1 @@ -38,6 +38,12 @@ extra-deps: - shakespeare-2.0.22 - brotli-0.0.0.0 +- aeson-1.4.6.0 +- time-1.9.3 + +- directory-1.3.5.0@sha256:a7d0f5e79a6bb31a75dfceb9656d2de119de9f91076ec18ad30e4496d0385cfa,2782 +- process-1.6.7.0@sha256:305bcf44c42a96425e77af1748183f505a701648f68cc299d5ad8ac1b866a6a1,2468 +- unix-2.7.2.2@sha256:e69269a17b9fa26cb77f3f55e86c39d0a4940ccfa0c4bc20826919d2572076ad,3496 # Override default flag values for local packages and extra-deps flags: {} diff --git a/server/stack.yaml.lock b/server/stack.yaml.lock index d6c24dc194abd..aa8776527d376 100644 --- a/server/stack.yaml.lock +++ b/server/stack.yaml.lock @@ -48,18 +48,18 @@ packages: commit: ad6df731584dc89b72a6e131687d37ef01714fe8 - completed: cabal-file: - size: 3947 - sha256: 8c06d160d745978f2167aed7be788ff3d95b57c515e0de52cf2638c1d530004d + size: 3949 + sha256: 17c43e706339cd5ef8631d83a7f2211ed84e0df9b91b01b6b0d73367d5350aec name: cron version: 0.6.1 git: https://github.com/hasura/cron.git pantry-tree: size: 1882 - sha256: 72d4503f9d73c71f50d273b30598853cf5b0a359a3ca62e991e09e70a76a2274 - commit: cc35408f4a05575d8305f5c45bdfeeb31dd3ea5b + sha256: 6e3dec01d3608c85a4ba7325c706bea845dbcb68e4320f2e649f7d187c17e6f3 + commit: e73cab157f2d47f80fe25b77c32212248884d483 original: git: https://github.com/hasura/cron.git - commit: cc35408f4a05575d8305f5c45bdfeeb31dd3ea5b + commit: e73cab157f2d47f80fe25b77c32212248884d483 - completed: hackage: select-0.4.0.1@sha256:d409315752a069693bdd4169fa9a8ea7777d814da77cd8604f367cf0741de295,2492 pantry-tree: @@ -123,9 +123,44 @@ packages: sha256: 5ad2a6b25c8145f58c43b10f1b60a1e1b5bf57f0e33e4a203f9671b361fb03c0 original: hackage: brotli-0.0.0.0 +- completed: + hackage: aeson-1.4.6.0@sha256:560575b008a23960403a128331f0e59594786b5cd19a35be0cd74b9a7257958e,6980 + pantry-tree: + size: 40193 + sha256: 5769473440ae594ae8679dde9fe12b6d00a49264a9dd8962a53ff3ae5740d7a5 + original: + hackage: aeson-1.4.6.0 +- completed: + hackage: time-1.9.3@sha256:8f1b5448722a12a952248b356c9eb366e351226543d9086a2da71270522d5f45,5679 + pantry-tree: + size: 6558 + sha256: a1043c1719491764f0fa37a1fd70d9451080548a41632fee88d8e1b8db4942d6 + original: + hackage: time-1.9.3 +- completed: + hackage: directory-1.3.5.0@sha256:a7d0f5e79a6bb31a75dfceb9656d2de119de9f91076ec18ad30e4496d0385cfa,2782 + pantry-tree: + size: 3365 + sha256: 13abe2f1a1f82059563af1ee076f23a6067d402f61834c738bb38eb606b847e8 + original: + hackage: directory-1.3.5.0@sha256:a7d0f5e79a6bb31a75dfceb9656d2de119de9f91076ec18ad30e4496d0385cfa,2782 +- completed: + hackage: process-1.6.7.0@sha256:305bcf44c42a96425e77af1748183f505a701648f68cc299d5ad8ac1b866a6a1,2468 + pantry-tree: + size: 1211 + sha256: 49c3e531d2473fe455c1cde655f074a320fa4ec8569d650262bf382f9c5796fb + original: + hackage: process-1.6.7.0@sha256:305bcf44c42a96425e77af1748183f505a701648f68cc299d5ad8ac1b866a6a1,2468 +- completed: + hackage: unix-2.7.2.2@sha256:e69269a17b9fa26cb77f3f55e86c39d0a4940ccfa0c4bc20826919d2572076ad,3496 + pantry-tree: + size: 3536 + sha256: c355f7924ce67e5bf8f20767462af18f09b8c0d1f7161117221cbb94c15deee3 + original: + hackage: unix-2.7.2.2@sha256:e69269a17b9fa26cb77f3f55e86c39d0a4940ccfa0c4bc20826919d2572076ad,3496 snapshots: - completed: - size: 498167 - url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/13/20.yaml - sha256: cda928d57b257a5f17bcad796843c9daa674fef47d600dbea3aa7b0e49d64a11 - original: lts-13.20 + size: 524789 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/14/18.yaml + sha256: 646be71223e08234131c6989912e6011e01b9767bc447b6d466a35e14360bdf2 + original: lts-14.18 From 322ee8b256e429789dd8209f4094e74d4161b4eb Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 25 Dec 2019 18:03:07 +0530 Subject: [PATCH 017/195] process dead event --- .../Hasura/Eventing/ScheduledTrigger.hs | 49 ++++++++++++------- 1 file changed, 32 insertions(+), 17 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 77e2aebb3a637..11752cfd17c1d 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -46,9 +46,6 @@ oneMinute = 60 * oneSecond oneHour :: Int oneHour = 60 * oneMinute -endOfTime :: UTCTime -endOfTime = read "2999-12-31 00:00:00 Z" - type ScheduledEventPayload = J.Value scheduledEventsTable :: QualifiedTable @@ -165,23 +162,27 @@ processScheduledEvent :: -> ScheduledTriggerInfo -> ScheduledEvent -> m () -processScheduledEvent pgpool httpMgr ScheduledTriggerInfo{..} se@ScheduledEvent{..} = do +processScheduledEvent pgpool httpMgr ScheduledTriggerInfo {..} se@ScheduledEvent {..} = do currentTime <- liftIO getCurrentTime if diffUTCTime currentTime seScheduledTime > rcstTolerance stiRetryConf - then undefined -- do nothing - else do - let webhook = wciCachedValue stiWebhookInfo - timeoutSeconds = 60 - responseTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) - headers = map encodeHeader stiHeaders - headers' = addDefaultHeaders headers - -- ep = createEventPayload retryConf e - res <- runExceptT $ tryWebhook httpMgr responseTimeout headers' sePayload webhook + then processDead' + else do + let webhook = wciCachedValue stiWebhookInfo + timeoutSeconds = rcstTimeoutSec stiRetryConf + responseTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) + headers = map encodeHeader stiHeaders + headers' = addDefaultHeaders headers + res <- + runExceptT $ + tryWebhook httpMgr responseTimeout headers' sePayload webhook -- let decodedHeaders = map (decodeHeader logenv headerInfos) headers - finally <- either - (processError pgpool se) - (processSuccess pgpool se) res - either logQErr return finally + finally <- either (processError pgpool se) (processSuccess pgpool se) res + either logQErr return finally + where + processDead' = + processDead pgpool se >>= \case + Left err -> logQErr err + Right _ -> pure () tryWebhook :: ( MonadReader r m @@ -265,6 +266,20 @@ processSuccess pgpool se resp = do WHERE id = $1 |] (Identity $ seId se) True +processDead :: (MonadIO m) => Q.PGPool -> ScheduledEvent -> m (Either QErr ()) +processDead pgpool se = + liftIO $ + runExceptT $ Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) markDead + where + markDead = + Q.unitQE + defaultTxErrorHandler + [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_events + SET dead = 't', locked = 'f' + WHERE id = $1 + |] (Identity $ seId se) False + mkInvo :: ScheduledEvent -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] -> Invocation From 981578b597b97aedb4d5040643b9b7ca21410485 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 25 Dec 2019 18:03:33 +0530 Subject: [PATCH 018/195] add scheduled trigger to cache --- server/src-lib/Hasura/RQL/DDL/EventTrigger.hs | 2 + .../Hasura/RQL/DDL/ScheduledTrigger.hs | 49 ++++++++++++++++--- .../Hasura/RQL/Types/ScheduledTrigger.hs | 14 +++--- .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 7 +++ server/src-lib/Hasura/Server/Query.hs | 2 +- 5 files changed, 58 insertions(+), 16 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs b/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs index 3d1d2377f4699..d0393a8451a9f 100644 --- a/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs @@ -14,6 +14,8 @@ module Hasura.RQL.DDL.EventTrigger , mkAllTriggersQ , getEventTriggerDef , updateEventTriggerDef + , getWebhookInfoFromConf + , getHeaderInfosFromConf ) where import Data.Aeson diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 5e1674b10bfaf..e8801a01fc288 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -6,17 +6,50 @@ import Hasura.Db import Hasura.EncJSON import Hasura.Prelude import Hasura.RQL.DDL.Schema.Cache (CacheBuildM) +import Hasura.RQL.DDL.EventTrigger ( getWebhookInfoFromConf + , getHeaderInfosFromConf) import Hasura.RQL.Types.Helpers +import Hasura.RQL.Types.EventTrigger import Hasura.RQL.Types.ScheduledTrigger +import Hasura.RQL.Types.SchemaCache ( addScheduledTriggerToCache + , ScheduledTriggerInfo(..)) import qualified Database.PG.Query as Q -runCreateScheduledTrigger :: CacheBuildM m => ScheduledTrigger -> m EncJSON -runCreateScheduledTrigger ScheduledTrigger {..} = do - liftTx $ Q.unitQE defaultTxErrorHandler - [Q.sql| - INSERT into hdb_catalog.hdb_scheduled_trigger - (name, webhook, schedule, payload, retry_conf) - VALUES ($1, $2, $3, $4, $5) - |] (stName, stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False +runCreateScheduledTrigger :: CacheBuildM m => CreateScheduledTrigger -> m EncJSON +runCreateScheduledTrigger q = do + sti <- addScheduledTriggerSetup q + addScheduledTriggerToCatalog q + addScheduledTriggerToCache sti return successMsg + +addScheduledTriggerToCatalog :: CacheBuildM m => CreateScheduledTrigger -> m () +addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ + Q.unitQE defaultTxErrorHandler + [Q.sql| + INSERT into hdb_catalog.hdb_scheduled_trigger + (name, webhook, schedule, payload, retry_conf) + VALUES ($1, $2, $3, $4, $5) + |] (stName, stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False + +addScheduledTriggerSetup :: + (CacheBuildM m) => CreateScheduledTrigger -> m ScheduledTriggerInfo +addScheduledTriggerSetup CreateScheduledTrigger {..} = do + webhookConf <- return $ WCValue stWebhook + -- case (webhook, webhookFromEnv) of + -- (Just w, Nothing) -> return $ WCValue w + -- (Nothing, Just wEnv) -> return $ WCEnv wEnv + -- _ -> throw500 "expected webhook or webhook_from_env" + -- let headerConfs = fromMaybe [] mheaders + let headerConfs = [] + webhookInfo <- getWebhookInfoFromConf webhookConf + headerInfo <- getHeaderInfosFromConf headerConfs + let stInfo = + ScheduledTriggerInfo + stName + stSchedule + stPayload + stRetryConf + webhookInfo + headerInfo + pure stInfo diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 60ff95cd5c98f..b0f8501094b3f 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -2,7 +2,7 @@ module Hasura.RQL.Types.ScheduledTrigger ( ScheduleType(..) - , ScheduledTrigger(..) + , CreateScheduledTrigger(..) , RetryConfST(..) , formatTime' ) where @@ -58,8 +58,8 @@ data ScheduleType = OneOff UTCTime | Cron CronSchedule $(deriveJSON defaultOptions{sumEncoding=TaggedObject "type" "value"} ''ScheduleType) -data ScheduledTrigger - = ScheduledTrigger +data CreateScheduledTrigger + = CreateScheduledTrigger { stName :: !TriggerName , stWebhook :: !T.Text , stSchedule :: !ScheduleType @@ -75,17 +75,17 @@ instance FromJSON CronSchedule where instance ToJSON CronSchedule where toJSON = J.String . serializeCronSchedule -instance FromJSON ScheduledTrigger where +instance FromJSON CreateScheduledTrigger where parseJSON = - withObject "ScheduledTriggerQuery " $ \o -> do + withObject "CreateScheduledTrigger" $ \o -> do stName <- o .: "name" stWebhook <- o .: "webhook" stPayload <- o .:? "payload" stSchedule <- o .: "schedule" stRetryConf <- o .:? "retry_conf" .!= defaultRetryConf - pure ScheduledTrigger {..} + pure CreateScheduledTrigger {..} -$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ScheduledTrigger) +$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) -- aeson doesn't decode 'UTC' identifier so explicitly provide 'Z' -- TODO: take proper timezone diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index c6ff5a2c03693..10ddca3ab6ee2 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -119,6 +119,7 @@ module Hasura.RQL.Types.SchemaCache , replaceAllowlist , ScheduledTriggerInfo(..) + , addScheduledTriggerToCache ) where import qualified Hasura.GraphQL.Context as GC @@ -686,6 +687,12 @@ delEventTriggerFromCache qt trn = do return $ ti { _tiEventTriggerInfoMap = M.delete trn etim } schObjId = SOTableObj qt $ TOTrigger trn +addScheduledTriggerToCache :: (QErrM m, CacheRWM m) => ScheduledTriggerInfo -> m () +addScheduledTriggerToCache stInfo = do + sc <- askSchemaCache + let scScheduledTriggers' = M.insert (stiName stInfo) stInfo $ scScheduledTriggers sc + writeSchemaCache $ sc {scScheduledTriggers = scScheduledTriggers'} + addFunctionToCache :: (QErrM m, CacheRWM m) => FunctionInfo -> [SchemaDependency] -> m () diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 8078901ad87f7..202433f9c5ee4 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -87,7 +87,7 @@ data RQLQueryV1 | RQRedeliverEvent !RedeliverEventQuery | RQInvokeEventTrigger !InvokeEventTriggerQuery - | RQCreateScheduledTrigger !ScheduledTrigger + | RQCreateScheduledTrigger !CreateScheduledTrigger -- query collections, allow list related | RQCreateQueryCollection !CreateCollection From 96acd3acaa97dd9416d1f04685048b470dc9ab86 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Mon, 6 Jan 2020 19:31:12 +0530 Subject: [PATCH 019/195] take webhook from env and use schema-cache more effectively --- .../Hasura/Eventing/ScheduledTrigger.hs | 108 +++++++++++------- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 18 +-- .../src-lib/Hasura/RQL/Types/EventTrigger.hs | 5 +- .../Hasura/RQL/Types/ScheduledTrigger.hs | 27 +++-- server/src-rsr/initialise.sql | 4 +- 5 files changed, 91 insertions(+), 71 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 11752cfd17c1d..10eceb5addb5e 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -1,3 +1,6 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE PolyKinds #-} + module Hasura.Eventing.ScheduledTrigger ( processScheduledQueue , runScheduledEventsGenerator @@ -21,10 +24,8 @@ import Hasura.HTTP import qualified Data.Aeson as J import qualified Data.Aeson.Casing as J import qualified Data.Aeson.TH as J -import qualified Data.ByteString.Lazy as LBS import qualified Data.TByteString as TBS import qualified Data.Text as T -import qualified Data.Text.Encoding as TE import qualified Database.PG.Query as Q import qualified Hasura.Logging as L import qualified Network.HTTP.Client as HTTP @@ -54,14 +55,41 @@ scheduledEventsTable = hdbCatalogSchema (TableName $ T.pack "hdb_scheduled_events") -data ScheduledEvent +data ScheduledEventSeed + = ScheduledEventSeed + { sesName :: !TriggerName + , sesScheduledTime :: !UTCTime + } deriving (Show, Eq) + + +-- ScheduledEvents can be "partial" or "full" +-- Partial represents the event as present in db +-- Full represents the partial event combined with schema cache configuration elements + +data SE_P = SE_PARTIAL | SE_FULL + +type family Param (p :: k) x + +data ScheduledEvent (p :: SE_P) = ScheduledEvent - { seId :: !(Maybe Text) + { seId :: !Text , seName :: !TriggerName - , seWebhook :: !T.Text - , sePayload :: !J.Value , seScheduledTime :: !UTCTime - } deriving (Show, Eq) + , seWebhook :: !(Param p T.Text) + , sePayload :: !(Param p J.Value) + } + +deriving instance Show (ScheduledEvent 'SE_PARTIAL) +deriving instance Show (ScheduledEvent 'SE_FULL) + +type instance Param 'SE_PARTIAL a = () +type instance Param 'SE_FULL a = a + +instance J.ToJSON (Param 'SE_PARTIAL a) where + toJSON _ = J.Null + +instance (J.ToJSON a) => J.ToJSON (Param 'SE_FULL a) where + toJSON _ = toJSON a $(J.deriveToJSON (J.aesonDrop 2 J.snakeCase){J.omitNothingFields=True} ''ScheduledEvent) @@ -95,35 +123,31 @@ insertScheduledEventsFor scheduledTriggers = do let insertScheduledEventsSql = TB.run $ toSQL SQLInsert { siTable = scheduledEventsTable - , siCols = map (PGCol . T.pack) ["name", "webhook", "payload", "scheduled_time"] + , siCols = map (PGCol . T.pack) ["name", "scheduled_time"] , siValues = ValuesExp $ map (toTupleExp . toArr) events , siConflict = Just $ DoNothing Nothing , siRet = Nothing } Q.unitQE defaultTxErrorHandler (Q.fromText insertScheduledEventsSql) () False where - toArr (ScheduledEvent _ n w p t) = - (triggerNameToTxt n) : w : (TE.decodeUtf8 . LBS.toStrict $ J.encode p) : (pure $ formatTime' t) + toArr (ScheduledEventSeed n t) = [(triggerNameToTxt n), (formatTime' t)] toTupleExp = TupleExp . map SELit -generateScheduledEventsFrom :: UTCTime -> ScheduledTriggerInfo-> [ScheduledEvent] +generateScheduledEventsFrom :: UTCTime -> ScheduledTriggerInfo-> [ScheduledEventSeed] generateScheduledEventsFrom time ScheduledTriggerInfo{..} = let events = case stiSchedule of OneOff _ -> empty -- one-off scheduled events are generated during creation Cron cron -> - generateScheduledEventsBetween + generateSchedulesBetween time (addUTCTime nominalDay time) cron - webhook = wciCachedValue stiWebhookInfo - in map - (ScheduledEvent Nothing stiName webhook (fromMaybe J.Null stiPayload)) - events + in map (ScheduledEventSeed stiName) events -- generates events (from, till] according to CronSchedule -generateScheduledEventsBetween :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] -generateScheduledEventsBetween from till cron = takeWhile ((>=) till) $ go from +generateSchedulesBetween :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] +generateSchedulesBetween from till cron = takeWhile ((>=) till) $ go from where go init = case nextMatch cron init of @@ -145,13 +169,17 @@ processScheduledQueue logger pgpool httpMgr scRef = runExceptT $ Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) getScheduledEvents case scheduledEventsE of - Right events -> + Right partialEvents -> sequence_ $ - flip map events $ \ev -> do - let st' = Map.lookup (seName ev) scheduledTriggersInfo - case st' of + flip map partialEvents $ \(ScheduledEvent id' name st _ _) -> do + let sti' = Map.lookup name scheduledTriggersInfo + case sti' of Nothing -> traceM "ERROR: couldn't find scheduled trigger in cache" - Just st -> runReaderT (processScheduledEvent pgpool httpMgr st ev) logger + Just sti -> do + let webhook = wciCachedValue $ stiWebhookInfo sti + payload = fromMaybe J.Null $ stiPayload sti + se = ScheduledEvent id' name st webhook payload + runReaderT (processScheduledEvent pgpool httpMgr sti se) logger Left err -> traceShowM err threadDelay (10 * oneSecond) @@ -160,21 +188,20 @@ processScheduledEvent :: => Q.PGPool -> HTTP.Manager -> ScheduledTriggerInfo - -> ScheduledEvent + -> ScheduledEvent 'SE_FULL -> m () processScheduledEvent pgpool httpMgr ScheduledTriggerInfo {..} se@ScheduledEvent {..} = do currentTime <- liftIO getCurrentTime if diffUTCTime currentTime seScheduledTime > rcstTolerance stiRetryConf then processDead' else do - let webhook = wciCachedValue stiWebhookInfo - timeoutSeconds = rcstTimeoutSec stiRetryConf + let timeoutSeconds = rcstTimeoutSec stiRetryConf responseTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) headers = map encodeHeader stiHeaders headers' = addDefaultHeaders headers res <- runExceptT $ - tryWebhook httpMgr responseTimeout headers' sePayload webhook + tryWebhook httpMgr responseTimeout headers' sePayload seWebhook -- let decodedHeaders = map (decodeHeader logenv headerInfos) headers finally <- either (processError pgpool se) (processSuccess pgpool se) res either logQErr return finally @@ -211,7 +238,7 @@ tryWebhook httpMgr timeout headers payload webhook = do eitherResp <- runHTTP httpMgr req Nothing onLeft eitherResp throwError -processError :: (MonadIO m) => Q.PGPool -> ScheduledEvent -> HTTPErr -> m (Either QErr ()) +processError :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> HTTPErr -> m (Either QErr ()) processError pgpool se err = do let decodedHeaders = [] invocation = case err of @@ -244,7 +271,7 @@ processError pgpool se err = do WHERE id = $1 |] (Identity $ seId se) True -processSuccess :: (MonadIO m) => Q.PGPool -> ScheduledEvent -> HTTPResp -> m (Either QErr ()) +processSuccess :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> HTTPResp -> m (Either QErr ()) processSuccess pgpool se resp = do let respBody = hrsBody resp respHeaders = hrsHeaders resp @@ -266,7 +293,7 @@ processSuccess pgpool se resp = do WHERE id = $1 |] (Identity $ seId se) True -processDead :: (MonadIO m) => Q.PGPool -> ScheduledEvent -> m (Either QErr ()) +processDead :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> m (Either QErr ()) processDead pgpool se = liftIO $ runExceptT $ Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) markDead @@ -281,7 +308,7 @@ processDead pgpool se = |] (Identity $ seId se) False mkInvo - :: ScheduledEvent -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] + :: ScheduledEvent 'SE_FULL -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] -> Invocation mkInvo se status reqHeaders respBody respHeaders = let resp = if isClientError status @@ -289,7 +316,7 @@ mkInvo se status reqHeaders respBody respHeaders else mkResp status respBody respHeaders in Invocation - (fromMaybe "unknown" $ seId se) -- WARN: should never happen? + (seId se) status (mkWebhookReq (J.toJSON se) reqHeaders invocationVersion) resp @@ -310,9 +337,9 @@ insertInvocation invo = do WHERE id = $1 |] (Identity $ iEventId invo) True -getScheduledEvents :: Q.TxE QErr [ScheduledEvent] +getScheduledEvents :: Q.TxE QErr [ScheduledEvent 'SE_PARTIAL] getScheduledEvents = do - allSchedules <- map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| + partialSchedules <- map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_events SET locked = 't' WHERE id IN ( SELECT t.id @@ -325,14 +352,7 @@ getScheduledEvents = do ) FOR UPDATE SKIP LOCKED ) - RETURNING id, name, webhook, payload, scheduled_time + RETURNING id, name, scheduled_time |] () True - pure $ allSchedules - where uncurryEvent (i, n, w, Q.AltJ p, st) = - ScheduledEvent - { seId = i - , seName = n - , seWebhook = w - , sePayload = p - , seScheduledTime = st - } + pure $ partialSchedules + where uncurryEvent (i, n, st) = ScheduledEvent i n st () () diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index e8801a01fc288..9e79957d13c74 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -9,12 +9,12 @@ import Hasura.RQL.DDL.Schema.Cache (CacheBuildM) import Hasura.RQL.DDL.EventTrigger ( getWebhookInfoFromConf , getHeaderInfosFromConf) import Hasura.RQL.Types.Helpers -import Hasura.RQL.Types.EventTrigger import Hasura.RQL.Types.ScheduledTrigger import Hasura.RQL.Types.SchemaCache ( addScheduledTriggerToCache , ScheduledTriggerInfo(..)) -import qualified Database.PG.Query as Q +import qualified Data.Aeson as J +import qualified Database.PG.Query as Q runCreateScheduledTrigger :: CacheBuildM m => CreateScheduledTrigger -> m EncJSON runCreateScheduledTrigger q = do @@ -28,21 +28,15 @@ addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.hdb_scheduled_trigger - (name, webhook, schedule, payload, retry_conf) + (name, webhook_conf, schedule, payload, retry_conf) VALUES ($1, $2, $3, $4, $5) - |] (stName, stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False + |] (stName, Q.AltJ $ J.toJSON stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False addScheduledTriggerSetup :: (CacheBuildM m) => CreateScheduledTrigger -> m ScheduledTriggerInfo addScheduledTriggerSetup CreateScheduledTrigger {..} = do - webhookConf <- return $ WCValue stWebhook - -- case (webhook, webhookFromEnv) of - -- (Just w, Nothing) -> return $ WCValue w - -- (Nothing, Just wEnv) -> return $ WCEnv wEnv - -- _ -> throw500 "expected webhook or webhook_from_env" - -- let headerConfs = fromMaybe [] mheaders - let headerConfs = [] - webhookInfo <- getWebhookInfoFromConf webhookConf + let headerConfs = fromMaybe [] stHeaders + webhookInfo <- getWebhookInfoFromConf stWebhookConf headerInfo <- getHeaderInfosFromConf headerConfs let stInfo = ScheduledTriggerInfo diff --git a/server/src-lib/Hasura/RQL/Types/EventTrigger.hs b/server/src-lib/Hasura/RQL/Types/EventTrigger.hs index 8dfc4115a62a9..0bf6e4a8cb4ab 100644 --- a/server/src-lib/Hasura/RQL/Types/EventTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/EventTrigger.hs @@ -18,6 +18,7 @@ module Hasura.RQL.Types.EventTrigger , EventHeaderInfo(..) , WebhookConf(..) , WebhookConfInfo(..) + , HeaderConf(..) , defaultRetryConf , defaultTimeoutSeconds @@ -101,8 +102,8 @@ data WebhookConf = WCValue T.Text | WCEnv T.Text deriving (Show, Eq, Lift) instance ToJSON WebhookConf where - toJSON (WCValue w) = String w - toJSON (WCEnv wEnv) = String wEnv + toJSON (WCValue w) = object ["type" .= String "static", "value" .= w ] + toJSON (WCEnv wEnv) = object ["type" .= String "env", "value" .= wEnv ] data WebhookConfInfo = WebhookConfInfo diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index b0f8501094b3f..1dae7657ddf71 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -16,11 +16,11 @@ import Data.Fixed import Hasura.Prelude import Language.Haskell.TH.Syntax as TH import System.Cron.Types -import Hasura.RQL.Types.EventTrigger (TriggerName) import System.Cron.Parser -import qualified Data.Text as T -import qualified Data.Aeson as J +import qualified Data.Text as T +import qualified Data.Aeson as J +import qualified Hasura.RQL.Types.EventTrigger as ET instance TH.Lift (Fixed E12) where lift x = [| MkFixed x' |] @@ -60,13 +60,13 @@ $(deriveJSON defaultOptions{sumEncoding=TaggedObject "type" "value"} ''ScheduleT data CreateScheduledTrigger = CreateScheduledTrigger - { stName :: !TriggerName - , stWebhook :: !T.Text - , stSchedule :: !ScheduleType - , stPayload :: !(Maybe J.Value) - , stRetryConf :: !RetryConfST - } - deriving (Show, Eq, Lift) + { stName :: !ET.TriggerName + , stWebhookConf :: !ET.WebhookConf + , stSchedule :: !ScheduleType + , stPayload :: !(Maybe J.Value) + , stRetryConf :: !RetryConfST + , stHeaders :: !(Maybe [ET.HeaderConf]) + } deriving (Show, Eq, Lift) instance FromJSON CronSchedule where parseJSON = withText "CronSchedule" $ \t -> @@ -80,9 +80,16 @@ instance FromJSON CreateScheduledTrigger where withObject "CreateScheduledTrigger" $ \o -> do stName <- o .: "name" stWebhook <- o .: "webhook" + stWebhookFromEnv <- o .: "webhook_from_env" stPayload <- o .:? "payload" stSchedule <- o .: "schedule" stRetryConf <- o .:? "retry_conf" .!= defaultRetryConf + stHeaders <- o .:? "headers" + stWebhookConf <- case (stWebhook, stWebhookFromEnv) of + (Just value, Nothing) -> pure $ ET.WCValue value + (Nothing, Just env) -> pure $ ET.WCEnv env + (Just _, Just _) -> fail "only one of webhook or webhook_from_env should be given" + (Nothing, Nothing) -> fail "must provide webhook or webhook_from_env" pure CreateScheduledTrigger {..} $(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index 3fa4c6ce05a5d..9f20499cd0324 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -666,7 +666,7 @@ CREATE VIEW hdb_catalog.hdb_computed_field_function AS CREATE TABLE hdb_catalog.hdb_scheduled_trigger ( name TEXT PRIMARY KEY, - webhook TEXT NOT NULL, + webhook_conf JSON NOT NULL, schedule JSON NOT NULL, payload JSON, retry_conf JSON @@ -676,9 +676,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events ( id TEXT DEFAULT gen_random_uuid() UNIQUE, name TEXT, - webhook TEXT NOT NULL, scheduled_time TIMESTAMP NOT NULL, - payload JSON, delivered BOOLEAN NOT NULL DEFAULT FALSE, error BOOLEAN NOT NULL DEFAULT FALSE, tries INTEGER NOT NULL DEFAULT 0, From 41782152370f9d951ed5cd6faa78914c3c2c56f7 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 7 Jan 2020 17:40:59 +0530 Subject: [PATCH 020/195] describe time formats in the API --- server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 1dae7657ddf71..b187e86f783bf 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -94,7 +94,13 @@ instance FromJSON CreateScheduledTrigger where $(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) --- aeson doesn't decode 'UTC' identifier so explicitly provide 'Z' --- TODO: take proper timezone +-- Supported time string formats for the API: +-- (see FromJSON for ZonedTime: https://hackage.haskell.org/package/aeson-1.4.6.0/docs/src/Data.Aeson.Types.FromJSON.html#line-2050) + +-- YYYY-MM-DD HH:MM Z YYYY-MM-DD HH:MM:SS Z YYYY-MM-DD HH:MM:SS.SSS Z + +-- The first space may instead be a T, and the second space is optional. The Z represents UTC. +-- The Z may be replaced with a time zone offset of the form +0000 or -08:00, +-- where the first two digits are hours, the : is optional and the second two digits (also optional) are minutes. formatTime' :: UTCTime -> T.Text formatTime'= T.pack . formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S Z" From 32626c54efed2725fdc44e2bb065e9a0832bfdb8 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 8 Jan 2020 12:39:26 +0530 Subject: [PATCH 021/195] fix WIP compilation errors --- .../Hasura/Eventing/ScheduledTrigger.hs | 18 ++++++++---------- .../Hasura/RQL/Types/ScheduledTrigger.hs | 4 ++-- server/src-lib/Hasura/Server/Query.hs | 2 +- server/src-rsr/initialise.sql | 2 +- 4 files changed, 12 insertions(+), 14 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 10eceb5addb5e..09ae8eeb1d0a9 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -1,5 +1,6 @@ {-# LANGUAGE DataKinds #-} {-# LANGUAGE PolyKinds #-} +{-# LANGUAGE UndecidableInstances #-} module Hasura.Eventing.ScheduledTrigger ( processScheduledQueue @@ -61,11 +62,9 @@ data ScheduledEventSeed , sesScheduledTime :: !UTCTime } deriving (Show, Eq) - --- ScheduledEvents can be "partial" or "full" +-- | ScheduledEvents can be "partial" or "full" -- Partial represents the event as present in db -- Full represents the partial event combined with schema cache configuration elements - data SE_P = SE_PARTIAL | SE_FULL type family Param (p :: k) x @@ -85,13 +84,12 @@ deriving instance Show (ScheduledEvent 'SE_FULL) type instance Param 'SE_PARTIAL a = () type instance Param 'SE_FULL a = a -instance J.ToJSON (Param 'SE_PARTIAL a) where - toJSON _ = J.Null - -instance (J.ToJSON a) => J.ToJSON (Param 'SE_FULL a) where - toJSON _ = toJSON a +-- empty splice to bring all the above definitions in scope +$(pure []) -$(J.deriveToJSON (J.aesonDrop 2 J.snakeCase){J.omitNothingFields=True} ''ScheduledEvent) +instance (J.ToJSON (Param p T.Text), J.ToJSON (Param p J.Value)) => + J.ToJSON (ScheduledEvent p) where + toJSON = $(J.mkToJSON (J.aesonDrop 2 J.snakeCase) {J.omitNothingFields = True} ''ScheduledEvent) runScheduledEventsGenerator :: L.Logger L.Hasura @@ -102,7 +100,7 @@ runScheduledEventsGenerator logger pgpool scRef = do forever $ do traceM "entering scheduled events generator" (sc, _) <- liftIO $ readIORef scRef - let scheduledTriggers = Map.elems $ scScheduledTriggers sc + let scheduledTriggers = traceShowId $ Map.elems $ scScheduledTriggers sc runExceptT (Q.runTx pgpool diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index b187e86f783bf..acf32727a567b 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -79,8 +79,8 @@ instance FromJSON CreateScheduledTrigger where parseJSON = withObject "CreateScheduledTrigger" $ \o -> do stName <- o .: "name" - stWebhook <- o .: "webhook" - stWebhookFromEnv <- o .: "webhook_from_env" + stWebhook <- o .:? "webhook" + stWebhookFromEnv <- o .:? "webhook_from_env" stPayload <- o .:? "payload" stSchedule <- o .: "schedule" stRetryConf <- o .:? "retry_conf" .!= defaultRetryConf diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 202433f9c5ee4..59980eb35091b 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -278,7 +278,7 @@ queryNeedsReload (RQV1 qi) = case qi of RQDeleteEventTrigger _ -> True RQRedeliverEvent _ -> False RQInvokeEventTrigger _ -> False - RQCreateScheduledTrigger _ -> False + RQCreateScheduledTrigger _ -> True RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index 9f20499cd0324..bec2710d87bd0 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -682,7 +682,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events tries INTEGER NOT NULL DEFAULT 0, created_at TIMESTAMP DEFAULT NOW(), locked BOOLEAN NOT NULL DEFAULT FALSE, - dead BOOLEAN NOT NULL DEFAULT FALSE + dead BOOLEAN NOT NULL DEFAULT FALSE, PRIMARY KEY (name, scheduled_time), FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) From c011638bd3883a765029aea429968372657370a0 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 8 Jan 2020 15:02:49 +0530 Subject: [PATCH 022/195] add retry functionality --- .../src-lib/Hasura/Eventing/EventTrigger.hs | 24 +-------- server/src-lib/Hasura/Eventing/HTTP.hs | 30 +++++++++++ .../Hasura/Eventing/ScheduledTrigger.hs | 51 ++++++++++++++++--- server/src-rsr/initialise.sql | 1 + 4 files changed, 76 insertions(+), 30 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 6ccb596dde59b..008d1a12883d4 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -26,7 +26,6 @@ import Hasura.RQL.Types import Hasura.SQL.Types import qualified Control.Concurrent.STM.TQueue as TQ -import qualified Data.CaseInsensitive as CI import qualified Data.HashMap.Strict as M import qualified Data.TByteString as TBS import qualified Data.Text as T @@ -91,9 +90,6 @@ defaultMaxEventThreads = 100 defaultFetchIntervalMilliSec :: Int defaultFetchIntervalMilliSec = 1000 -retryAfterHeader :: CI.CI T.Text -retryAfterHeader = "Retry-After" - initEventEngineCtx :: Int -> Int -> STM EventEngineCtx initEventEngineCtx maxT fetchI = do q <- TQ.newTQueue @@ -218,9 +214,9 @@ processError pool e retryConf decodedHeaders ep err = do retryOrSetError :: Event -> RetryConf -> HTTPErr -> Q.TxE QErr () retryOrSetError e retryConf err = do - let mretryHeader = getRetryAfterHeaderFromError err + let mretryHeader = getRetryAfterHeaderFromHTTPErr err tries = eTries e - mretryHeaderSeconds = parseRetryHeader mretryHeader + mretryHeaderSeconds = join $ parseRetryHeaderValue <$> mretryHeader triesExhausted = tries >= rcNumRetries retryConf noRetryHeader = isNothing mretryHeaderSeconds -- current_try = tries + 1 , allowed_total_tries = rcNumRetries retryConf + 1 @@ -233,22 +229,6 @@ retryOrSetError e retryConf err = do diff = fromIntegral delay retryTime = addUTCTime diff currentTime setRetry e retryTime - where - getRetryAfterHeaderFromError (HStatus resp) = getRetryAfterHeaderFromResp resp - getRetryAfterHeaderFromError _ = Nothing - - getRetryAfterHeaderFromResp resp - = let mHeader = find (\(HeaderConf name _) - -> CI.mk name == retryAfterHeader) (hrsHeaders resp) - in case mHeader of - Just (HeaderConf _ (HVValue value)) -> Just value - _ -> Nothing - parseRetryHeader Nothing = Nothing - parseRetryHeader (Just hValue) - = let seconds = readMaybe $ T.unpack hValue - in case seconds of - Nothing -> Nothing - Just sec -> if sec > 0 then Just sec else Nothing mkInvo :: EventPayload -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index d0815da6d2933..c9938279529b0 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -25,6 +25,9 @@ module Hasura.Eventing.HTTP , LogEnvHeaders , encodeHeader , decodeHeader + , getRetryAfterHeaderFromHTTPErr + , getRetryAfterHeaderFromResp + , parseRetryHeaderValue ) where import qualified Data.Aeson as J @@ -56,6 +59,9 @@ import Hasura.RQL.Types.EventTrigger type LogEnvHeaders = Bool +retryAfterHeader :: CI.CI T.Text +retryAfterHeader = "Retry-After" + data WebhookRequest = WebhookRequest { _rqPayload :: J.Value @@ -295,3 +301,27 @@ decodeHeader logenv headerInfos (hdrName, hdrVal) else ehiHeaderConf ehi where decodeBS = TE.decodeUtf8With TE.lenientDecode + +getRetryAfterHeaderFromHTTPErr :: HTTPErr -> Maybe Text +getRetryAfterHeaderFromHTTPErr (HStatus resp) = getRetryAfterHeaderFromResp resp +getRetryAfterHeaderFromHTTPErr _ = Nothing + +getRetryAfterHeaderFromResp :: HTTPResp -> Maybe Text +getRetryAfterHeaderFromResp resp = + let mHeader = + find + (\(HeaderConf name _) -> CI.mk name == retryAfterHeader) + (hrsHeaders resp) + in case mHeader of + Just (HeaderConf _ (HVValue value)) -> Just value + _ -> Nothing + +parseRetryHeaderValue :: T.Text -> Maybe Int +parseRetryHeaderValue hValue = + let seconds = readMaybe $ T.unpack hValue + in case seconds of + Nothing -> Nothing + Just sec -> + if sec > 0 + then Just sec + else Nothing diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 09ae8eeb1d0a9..5ed9b66422d37 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -74,8 +74,10 @@ data ScheduledEvent (p :: SE_P) { seId :: !Text , seName :: !TriggerName , seScheduledTime :: !UTCTime + , seTries :: !Int , seWebhook :: !(Param p T.Text) , sePayload :: !(Param p J.Value) + , seRetryConf :: !(Param p RetryConfST) } deriving instance Show (ScheduledEvent 'SE_PARTIAL) @@ -87,7 +89,11 @@ type instance Param 'SE_FULL a = a -- empty splice to bring all the above definitions in scope $(pure []) -instance (J.ToJSON (Param p T.Text), J.ToJSON (Param p J.Value)) => +instance ( J.ToJSON (Param p T.Text) + , J.ToJSON (Param p J.Value) + , J.ToJSON (Param p Int) + , J.ToJSON (Param p RetryConfST) + ) => J.ToJSON (ScheduledEvent p) where toJSON = $(J.mkToJSON (J.aesonDrop 2 J.snakeCase) {J.omitNothingFields = True} ''ScheduledEvent) @@ -100,7 +106,7 @@ runScheduledEventsGenerator logger pgpool scRef = do forever $ do traceM "entering scheduled events generator" (sc, _) <- liftIO $ readIORef scRef - let scheduledTriggers = traceShowId $ Map.elems $ scScheduledTriggers sc + let scheduledTriggers = Map.elems $ scScheduledTriggers sc runExceptT (Q.runTx pgpool @@ -169,14 +175,15 @@ processScheduledQueue logger pgpool httpMgr scRef = case scheduledEventsE of Right partialEvents -> sequence_ $ - flip map partialEvents $ \(ScheduledEvent id' name st _ _) -> do + flip map partialEvents $ \(ScheduledEvent id' name st tries _ _ _) -> do let sti' = Map.lookup name scheduledTriggersInfo case sti' of Nothing -> traceM "ERROR: couldn't find scheduled trigger in cache" Just sti -> do let webhook = wciCachedValue $ stiWebhookInfo sti payload = fromMaybe J.Null $ stiPayload sti - se = ScheduledEvent id' name st webhook payload + retryConf = stiRetryConf sti + se = ScheduledEvent id' name st tries webhook payload retryConf runReaderT (processScheduledEvent pgpool httpMgr sti se) logger Left err -> traceShowM err threadDelay (10 * oneSecond) @@ -258,7 +265,24 @@ processError pgpool se err = do runExceptT $ Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) $ do insertInvocation invocation + retryOrMarkError se err + +retryOrMarkError :: ScheduledEvent 'SE_FULL -> HTTPErr -> Q.TxE QErr () +retryOrMarkError se@ScheduledEvent{..} err = do + let mretryHeader = getRetryAfterHeaderFromHTTPErr err + mretryHeaderSeconds = join $ parseRetryHeaderValue <$> mretryHeader + triesExhausted = seTries >= rcstNumRetries seRetryConf + noRetryHeader = isNothing mretryHeaderSeconds + -- current_try = tries + 1 , allowed_total_tries = rcNumRetries retryConf + 1 + if triesExhausted && noRetryHeader + then do markError + else do + currentTime <- liftIO getCurrentTime + let delay = fromMaybe (rcstIntervalSec seRetryConf) mretryHeaderSeconds + diff = fromIntegral delay + retryTime = addUTCTime diff currentTime + setRetry se retryTime where markError = Q.unitQE @@ -267,7 +291,7 @@ processError pgpool se err = do UPDATE hdb_catalog.hdb_scheduled_events SET error = 't', locked = 'f' WHERE id = $1 - |] (Identity $ seId se) True + |] (Identity seId) True processSuccess :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> HTTPResp -> m (Either QErr ()) processSuccess pgpool se resp = do @@ -305,6 +329,14 @@ processDead pgpool se = WHERE id = $1 |] (Identity $ seId se) False +setRetry :: ScheduledEvent 'SE_FULL -> UTCTime -> Q.TxE QErr () +setRetry se time = + Q.unitQE defaultTxErrorHandler [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_events + SET next_retry_at = $1, locked = 'f' + WHERE id = $2 + |] (time, seId se) True + mkInvo :: ScheduledEvent 'SE_FULL -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] -> Invocation @@ -345,12 +377,15 @@ getScheduledEvents = do WHERE ( t.locked = 'f' and t.delivered = 'f' and t.error = 'f' - and t.scheduled_time <= now() + and ( + (t.next_retry_at is NULL and t.scheduled_time <= now()) or + (t.next_retry_at is not NULL and t.next_retry_at <= now()) + ) and t.dead = 'f' ) FOR UPDATE SKIP LOCKED ) - RETURNING id, name, scheduled_time + RETURNING id, name, scheduled_time, tries |] () True pure $ partialSchedules - where uncurryEvent (i, n, st) = ScheduledEvent i n st () () + where uncurryEvent (i, n, st, tries) = ScheduledEvent i n st tries () () () diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index bec2710d87bd0..c6fb15aadb233 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -683,6 +683,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events created_at TIMESTAMP DEFAULT NOW(), locked BOOLEAN NOT NULL DEFAULT FALSE, dead BOOLEAN NOT NULL DEFAULT FALSE, + next_retry_at TIMESTAMP, PRIMARY KEY (name, scheduled_time), FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) From 5fb35ad457432eae2d57546a6b8f4a5e5fa4727e Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 8 Jan 2020 18:39:21 +0530 Subject: [PATCH 023/195] [wip] implement cancel_scheduled_event api --- .../src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs | 17 +++++++++++++++++ .../Hasura/RQL/Types/ScheduledTrigger.hs | 7 +++++++ server/src-lib/Hasura/Server/Query.hs | 7 +++++++ 3 files changed, 31 insertions(+) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 9e79957d13c74..1f004722ba9df 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -1,5 +1,6 @@ module Hasura.RQL.DDL.ScheduledTrigger ( runCreateScheduledTrigger + , runCancelScheduledEvent ) where import Hasura.Db @@ -47,3 +48,19 @@ addScheduledTriggerSetup CreateScheduledTrigger {..} = do webhookInfo headerInfo pure stInfo + +runCancelScheduledEvent :: CacheBuildM m => CancelScheduledEvent -> m EncJSON +runCancelScheduledEvent se = do + affectedRows <- deleteScheduledEventFromCatalog se + if affectedRows == 1 + then pure successMsg + else undefined + +deleteScheduledEventFromCatalog :: CacheBuildM m => CancelScheduledEvent -> m Int +deleteScheduledEventFromCatalog se = liftTx $ + Q.listQE defaultTxErrorHandler + [Q.sql| + DELETE FROM hdb_catalog.hdb_scheduled_events + WHERE id = $1 + RETURNING count(*) + |] (Identity (cseId se)) False diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index acf32727a567b..957b1a1698584 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -4,6 +4,7 @@ module Hasura.RQL.Types.ScheduledTrigger ( ScheduleType(..) , CreateScheduledTrigger(..) , RetryConfST(..) + , CancelScheduledEvent(..) , formatTime' ) where @@ -94,6 +95,12 @@ instance FromJSON CreateScheduledTrigger where $(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) +newtype CancelScheduledEvent + = CancelScheduledEvent { cseId :: T.Text } + deriving (Show, Eq, Lift) + +$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CancelScheduledEvent) + -- Supported time string formats for the API: -- (see FromJSON for ZonedTime: https://hackage.haskell.org/package/aeson-1.4.6.0/docs/src/Data.Aeson.Types.FromJSON.html#line-2050) diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 59980eb35091b..0355ecb1ead03 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -88,6 +88,7 @@ data RQLQueryV1 | RQInvokeEventTrigger !InvokeEventTriggerQuery | RQCreateScheduledTrigger !CreateScheduledTrigger + | RQCancelScheduledEvent !CancelScheduledEvent -- query collections, allow list related | RQCreateQueryCollection !CreateCollection @@ -278,7 +279,9 @@ queryNeedsReload (RQV1 qi) = case qi of RQDeleteEventTrigger _ -> True RQRedeliverEvent _ -> False RQInvokeEventTrigger _ -> False + RQCreateScheduledTrigger _ -> True + RQCancelScheduledEvent _ -> False RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True @@ -402,7 +405,9 @@ runQueryM rq = RQDeleteEventTrigger q -> runDeleteEventTriggerQuery q RQRedeliverEvent q -> runRedeliverEvent q RQInvokeEventTrigger q -> runInvokeEventTrigger q + RQCreateScheduledTrigger q -> runCreateScheduledTrigger q + RQCancelScheduledEvent q -> runCancelScheduledEvent q RQCreateQueryCollection q -> runCreateCollection q RQDropQueryCollection q -> runDropCollection q @@ -476,7 +481,9 @@ requiresAdmin = \case RQDeleteEventTrigger _ -> True RQRedeliverEvent _ -> True RQInvokeEventTrigger _ -> True + RQCreateScheduledTrigger _ -> True + RQCancelScheduledEvent _ -> True RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True From 3d3605dc90adbae404a0009e2ff2d7f089a8a404 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Fri, 10 Jan 2020 14:10:50 +0530 Subject: [PATCH 024/195] implement cancel_scheduled_event api --- server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 1f004722ba9df..29f2dd6ddfbd7 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -10,11 +10,11 @@ import Hasura.RQL.DDL.Schema.Cache (CacheBuildM) import Hasura.RQL.DDL.EventTrigger ( getWebhookInfoFromConf , getHeaderInfosFromConf) import Hasura.RQL.Types.Helpers +import Hasura.RQL.Types.Error import Hasura.RQL.Types.ScheduledTrigger import Hasura.RQL.Types.SchemaCache ( addScheduledTriggerToCache , ScheduledTriggerInfo(..)) -import qualified Data.Aeson as J import qualified Database.PG.Query as Q runCreateScheduledTrigger :: CacheBuildM m => CreateScheduledTrigger -> m EncJSON @@ -31,7 +31,7 @@ addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ INSERT into hdb_catalog.hdb_scheduled_trigger (name, webhook_conf, schedule, payload, retry_conf) VALUES ($1, $2, $3, $4, $5) - |] (stName, Q.AltJ $ J.toJSON stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False + |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False addScheduledTriggerSetup :: (CacheBuildM m) => CreateScheduledTrigger -> m ScheduledTriggerInfo @@ -52,13 +52,13 @@ addScheduledTriggerSetup CreateScheduledTrigger {..} = do runCancelScheduledEvent :: CacheBuildM m => CancelScheduledEvent -> m EncJSON runCancelScheduledEvent se = do affectedRows <- deleteScheduledEventFromCatalog se - if affectedRows == 1 - then pure successMsg - else undefined + if | affectedRows == 1 -> pure successMsg + | affectedRows == 0 -> throw400 NotFound "scheduled event not found" + | otherwise -> throw500 "more than one scheduled events cancelled" deleteScheduledEventFromCatalog :: CacheBuildM m => CancelScheduledEvent -> m Int -deleteScheduledEventFromCatalog se = liftTx $ - Q.listQE defaultTxErrorHandler +deleteScheduledEventFromCatalog se = liftTx $ do + (runIdentity . Q.getRow) <$> Q.withQE defaultTxErrorHandler [Q.sql| DELETE FROM hdb_catalog.hdb_scheduled_events WHERE id = $1 From dec2857c4066d98df7ceede5451e15e902aba668 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Mon, 13 Jan 2020 17:58:32 +0530 Subject: [PATCH 025/195] implement delete_scheduled_trigger api --- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 21 +++++++++++++++++-- .../Hasura/RQL/Types/ScheduledTrigger.hs | 8 +++++++ .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 13 ++++++++++++ server/src-lib/Hasura/Server/Query.hs | 4 ++++ 4 files changed, 44 insertions(+), 2 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 29f2dd6ddfbd7..80c4b478c00f0 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -1,5 +1,6 @@ module Hasura.RQL.DDL.ScheduledTrigger ( runCreateScheduledTrigger + , runDeleteScheduledTrigger , runCancelScheduledEvent ) where @@ -11,8 +12,10 @@ import Hasura.RQL.DDL.EventTrigger ( getWebhookInfoFromConf , getHeaderInfosFromConf) import Hasura.RQL.Types.Helpers import Hasura.RQL.Types.Error +import Hasura.RQL.Types.EventTrigger (TriggerName) import Hasura.RQL.Types.ScheduledTrigger import Hasura.RQL.Types.SchemaCache ( addScheduledTriggerToCache + , removeScheduledTriggerFromCache , ScheduledTriggerInfo(..)) import qualified Database.PG.Query as Q @@ -20,8 +23,8 @@ import qualified Database.PG.Query as Q runCreateScheduledTrigger :: CacheBuildM m => CreateScheduledTrigger -> m EncJSON runCreateScheduledTrigger q = do sti <- addScheduledTriggerSetup q - addScheduledTriggerToCatalog q addScheduledTriggerToCache sti + addScheduledTriggerToCatalog q return successMsg addScheduledTriggerToCatalog :: CacheBuildM m => CreateScheduledTrigger -> m () @@ -49,12 +52,26 @@ addScheduledTriggerSetup CreateScheduledTrigger {..} = do headerInfo pure stInfo +runDeleteScheduledTrigger :: CacheBuildM m => DeleteScheduledTrigger -> m EncJSON +runDeleteScheduledTrigger (DeleteScheduledTrigger stName) = do + removeScheduledTriggerFromCache stName + deleteScheduledTriggerFromCatalog stName + return successMsg + +deleteScheduledTriggerFromCatalog :: CacheBuildM m => TriggerName -> m () +deleteScheduledTriggerFromCatalog stName = liftTx $ do + Q.unitQE defaultTxErrorHandler + [Q.sql| + DELETE FROM hdb_catalog.hdb_scheduled_trigger + WHERE name = $1 + |] (Identity stName) False + runCancelScheduledEvent :: CacheBuildM m => CancelScheduledEvent -> m EncJSON runCancelScheduledEvent se = do affectedRows <- deleteScheduledEventFromCatalog se if | affectedRows == 1 -> pure successMsg | affectedRows == 0 -> throw400 NotFound "scheduled event not found" - | otherwise -> throw500 "more than one scheduled events cancelled" + | otherwise -> throw500 "unexpected: more than one scheduled events cancelled" deleteScheduledEventFromCatalog :: CacheBuildM m => CancelScheduledEvent -> m Int deleteScheduledEventFromCatalog se = liftTx $ do diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 957b1a1698584..8eedabbc82899 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -4,6 +4,7 @@ module Hasura.RQL.Types.ScheduledTrigger ( ScheduleType(..) , CreateScheduledTrigger(..) , RetryConfST(..) + , DeleteScheduledTrigger(..) , CancelScheduledEvent(..) , formatTime' ) where @@ -95,6 +96,13 @@ instance FromJSON CreateScheduledTrigger where $(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) + +newtype DeleteScheduledTrigger + = DeleteScheduledTrigger { dst :: ET.TriggerName } + deriving (Show, Eq, Lift) + +$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''DeleteScheduledTrigger) + newtype CancelScheduledEvent = CancelScheduledEvent { cseId :: T.Text } deriving (Show, Eq, Lift) diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index 97eca4760a4bb..ed4aef16e0231 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -120,6 +120,7 @@ module Hasura.RQL.Types.SchemaCache , ScheduledTriggerInfo(..) , addScheduledTriggerToCache + , removeScheduledTriggerFromCache ) where import qualified Hasura.GraphQL.Context as GC @@ -690,8 +691,20 @@ delEventTriggerFromCache qt trn = do addScheduledTriggerToCache :: (QErrM m, CacheRWM m) => ScheduledTriggerInfo -> m () addScheduledTriggerToCache stInfo = do sc <- askSchemaCache + onJust (M.lookup stName (scScheduledTriggers sc)) $ \_ -> + throw400 AlreadyExists $ "scheduled trigger " <> triggerNameToTxt stName <> " already exists" let scScheduledTriggers' = M.insert (stiName stInfo) stInfo $ scScheduledTriggers sc writeSchemaCache $ sc {scScheduledTriggers = scScheduledTriggers'} + where + stName = stiName stInfo + +removeScheduledTriggerFromCache :: (QErrM m, CacheRWM m) => TriggerName -> m () +removeScheduledTriggerFromCache stName = do + sc <- askSchemaCache + void $ onNothing (M.lookup stName (scScheduledTriggers sc)) $ + throw400 NotFound "scheduled trigger not found" + let scScheduledTriggers' = M.delete stName (scScheduledTriggers sc) + writeSchemaCache $ sc {scScheduledTriggers = scScheduledTriggers'} addFunctionToCache :: (QErrM m, CacheRWM m) diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 0355ecb1ead03..678e4e9624467 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -88,6 +88,7 @@ data RQLQueryV1 | RQInvokeEventTrigger !InvokeEventTriggerQuery | RQCreateScheduledTrigger !CreateScheduledTrigger + | RQDeleteScheduledTrigger !DeleteScheduledTrigger | RQCancelScheduledEvent !CancelScheduledEvent -- query collections, allow list related @@ -281,6 +282,7 @@ queryNeedsReload (RQV1 qi) = case qi of RQInvokeEventTrigger _ -> False RQCreateScheduledTrigger _ -> True + RQDeleteScheduledTrigger _ -> True RQCancelScheduledEvent _ -> False RQCreateQueryCollection _ -> True @@ -407,6 +409,7 @@ runQueryM rq = RQInvokeEventTrigger q -> runInvokeEventTrigger q RQCreateScheduledTrigger q -> runCreateScheduledTrigger q + RQDeleteScheduledTrigger q -> runDeleteScheduledTrigger q RQCancelScheduledEvent q -> runCancelScheduledEvent q RQCreateQueryCollection q -> runCreateCollection q @@ -483,6 +486,7 @@ requiresAdmin = \case RQInvokeEventTrigger _ -> True RQCreateScheduledTrigger _ -> True + RQDeleteScheduledTrigger _ -> True RQCancelScheduledEvent _ -> True RQCreateQueryCollection _ -> True From bc22952914aa7bb4deb09e4904aa1679115fbfb1 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 15 Jan 2020 15:55:52 +0530 Subject: [PATCH 026/195] implement update_scheduled_trigger --- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 33 +++++++++++++++++-- .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 11 +++++++ server/src-lib/Hasura/Server/Query.hs | 4 +++ 3 files changed, 45 insertions(+), 3 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 80c4b478c00f0..b884ae3605894 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -1,5 +1,6 @@ module Hasura.RQL.DDL.ScheduledTrigger ( runCreateScheduledTrigger + , runUpdateScheduledTrigger , runDeleteScheduledTrigger , runCancelScheduledEvent ) where @@ -15,6 +16,7 @@ import Hasura.RQL.Types.Error import Hasura.RQL.Types.EventTrigger (TriggerName) import Hasura.RQL.Types.ScheduledTrigger import Hasura.RQL.Types.SchemaCache ( addScheduledTriggerToCache + , updateScheduledTriggerInCache , removeScheduledTriggerFromCache , ScheduledTriggerInfo(..)) @@ -22,7 +24,7 @@ import qualified Database.PG.Query as Q runCreateScheduledTrigger :: CacheBuildM m => CreateScheduledTrigger -> m EncJSON runCreateScheduledTrigger q = do - sti <- addScheduledTriggerSetup q + sti <- scheduledTriggerSetup q addScheduledTriggerToCache sti addScheduledTriggerToCatalog q return successMsg @@ -36,9 +38,9 @@ addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ VALUES ($1, $2, $3, $4, $5) |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False -addScheduledTriggerSetup :: +scheduledTriggerSetup :: (CacheBuildM m) => CreateScheduledTrigger -> m ScheduledTriggerInfo -addScheduledTriggerSetup CreateScheduledTrigger {..} = do +scheduledTriggerSetup CreateScheduledTrigger {..} = do let headerConfs = fromMaybe [] stHeaders webhookInfo <- getWebhookInfoFromConf stWebhookConf headerInfo <- getHeaderInfosFromConf headerConfs @@ -52,6 +54,31 @@ addScheduledTriggerSetup CreateScheduledTrigger {..} = do headerInfo pure stInfo +runUpdateScheduledTrigger :: CacheBuildM m => CreateScheduledTrigger -> m EncJSON +runUpdateScheduledTrigger q = do + sti <- scheduledTriggerSetup q + updateScheduledTriggerInCache sti + updateScheduledTriggerInCatalog q + return successMsg + +updateScheduledTriggerInCatalog :: CacheBuildM m => CreateScheduledTrigger -> m () +updateScheduledTriggerInCatalog CreateScheduledTrigger {..} = liftTx $ do + Q.unitQE defaultTxErrorHandler + [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_trigger + SET webhook_conf = $2, + schedule = $3, + payload = $4, + retry_conf = $5 + WHERE name = $1 + |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False + -- since the scheduled trigger is updated, clear all its future events which are not retries + Q.unitQE defaultTxErrorHandler + [Q.sql| + DELETE FROM hdb_catalog.hdb_scheduled_events + WHERE name = $1 AND scheduled_time > now() AND tries = 0 + |] (Identity stName) False + runDeleteScheduledTrigger :: CacheBuildM m => DeleteScheduledTrigger -> m EncJSON runDeleteScheduledTrigger (DeleteScheduledTrigger stName) = do removeScheduledTriggerFromCache stName diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index ed4aef16e0231..2871c3b37e100 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -120,6 +120,7 @@ module Hasura.RQL.Types.SchemaCache , ScheduledTriggerInfo(..) , addScheduledTriggerToCache + , updateScheduledTriggerInCache , removeScheduledTriggerFromCache ) where @@ -698,6 +699,16 @@ addScheduledTriggerToCache stInfo = do where stName = stiName stInfo +updateScheduledTriggerInCache :: (QErrM m, CacheRWM m) => ScheduledTriggerInfo -> m () +updateScheduledTriggerInCache stInfo = do + sc <- askSchemaCache + void $ onNothing (M.lookup stName (scScheduledTriggers sc)) $ + throw400 NotFound "scheduled trigger not found" + let scScheduledTriggers' = M.insert (stiName stInfo) stInfo $ scScheduledTriggers sc + writeSchemaCache $ sc {scScheduledTriggers = scScheduledTriggers'} + where + stName = stiName stInfo + removeScheduledTriggerFromCache :: (QErrM m, CacheRWM m) => TriggerName -> m () removeScheduledTriggerFromCache stName = do sc <- askSchemaCache diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 678e4e9624467..c90b878be3e01 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -88,6 +88,7 @@ data RQLQueryV1 | RQInvokeEventTrigger !InvokeEventTriggerQuery | RQCreateScheduledTrigger !CreateScheduledTrigger + | RQUpdateScheduledTrigger !CreateScheduledTrigger | RQDeleteScheduledTrigger !DeleteScheduledTrigger | RQCancelScheduledEvent !CancelScheduledEvent @@ -282,6 +283,7 @@ queryNeedsReload (RQV1 qi) = case qi of RQInvokeEventTrigger _ -> False RQCreateScheduledTrigger _ -> True + RQUpdateScheduledTrigger _ -> True RQDeleteScheduledTrigger _ -> True RQCancelScheduledEvent _ -> False @@ -409,6 +411,7 @@ runQueryM rq = RQInvokeEventTrigger q -> runInvokeEventTrigger q RQCreateScheduledTrigger q -> runCreateScheduledTrigger q + RQUpdateScheduledTrigger q -> runUpdateScheduledTrigger q RQDeleteScheduledTrigger q -> runDeleteScheduledTrigger q RQCancelScheduledEvent q -> runCancelScheduledEvent q @@ -486,6 +489,7 @@ requiresAdmin = \case RQInvokeEventTrigger _ -> True RQCreateScheduledTrigger _ -> True + RQUpdateScheduledTrigger _ -> True RQDeleteScheduledTrigger _ -> True RQCancelScheduledEvent _ -> True From e59ca440e13bd44f397ddea0349bcf91a3996cb7 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Mon, 27 Jan 2020 16:20:49 +0530 Subject: [PATCH 027/195] add upgrade migration --- .../src-lib/Hasura/Server/Migrate/Version.hs | 2 +- server/src-rsr/migrations/30_to_31.sql | 38 +++++++++++++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 server/src-rsr/migrations/30_to_31.sql diff --git a/server/src-lib/Hasura/Server/Migrate/Version.hs b/server/src-lib/Hasura/Server/Migrate/Version.hs index 6968137765f62..7a008c2f432b0 100644 --- a/server/src-lib/Hasura/Server/Migrate/Version.hs +++ b/server/src-lib/Hasura/Server/Migrate/Version.hs @@ -12,7 +12,7 @@ import Hasura.Prelude import qualified Data.Text as T latestCatalogVersion :: Integer -latestCatalogVersion = 30 +latestCatalogVersion = 31 latestCatalogVersionString :: T.Text latestCatalogVersionString = T.pack $ show latestCatalogVersion diff --git a/server/src-rsr/migrations/30_to_31.sql b/server/src-rsr/migrations/30_to_31.sql new file mode 100644 index 0000000000000..4ead50aaa351b --- /dev/null +++ b/server/src-rsr/migrations/30_to_31.sql @@ -0,0 +1,38 @@ +CREATE TABLE hdb_catalog.hdb_scheduled_trigger +( + name TEXT PRIMARY KEY, + webhook_conf JSON NOT NULL, + schedule JSON NOT NULL, + payload JSON, + retry_conf JSON +); + +CREATE TABLE hdb_catalog.hdb_scheduled_events +( + id TEXT DEFAULT gen_random_uuid() UNIQUE, + name TEXT, + scheduled_time TIMESTAMP NOT NULL, + delivered BOOLEAN NOT NULL DEFAULT FALSE, + error BOOLEAN NOT NULL DEFAULT FALSE, + tries INTEGER NOT NULL DEFAULT 0, + created_at TIMESTAMP DEFAULT NOW(), + locked BOOLEAN NOT NULL DEFAULT FALSE, + dead BOOLEAN NOT NULL DEFAULT FALSE, + next_retry_at TIMESTAMP, + + PRIMARY KEY (name, scheduled_time), + FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) + ON UPDATE CASCADE ON DELETE CASCADE +); + +CREATE TABLE hdb_catalog.hdb_scheduled_event_invocation_logs +( + id TEXT DEFAULT gen_random_uuid() PRIMARY KEY, + event_id TEXT, + status INTEGER, + request JSON, + response JSON, + created_at TIMESTAMP DEFAULT NOW(), + + FOREIGN KEY (event_id) REFERENCES hdb_catalog.hdb_scheduled_events (id) ON DELETE CASCADE +); From a5b76382f652ecf0a0719f28f521cc1c6db2bc5c Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 28 Jan 2020 17:25:07 +0530 Subject: [PATCH 028/195] [wip] review comments --- server/src-lib/Data/Time/Clock/Units.hs | 4 + server/src-lib/Hasura/App.hs | 2 +- .../src-lib/Hasura/Eventing/EventTrigger.hs | 97 ++++++------- server/src-lib/Hasura/Eventing/HTTP.hs | 90 ++++++++---- .../Hasura/Eventing/ScheduledTrigger.hs | 133 ++++++++---------- .../Hasura/Incremental/Internal/Dependency.hs | 15 ++ .../Hasura/RQL/DDL/ScheduledTrigger.hs | 15 +- server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs | 7 +- server/src-lib/Hasura/RQL/Instances.hs | 14 ++ .../Hasura/RQL/Types/ScheduledTrigger.hs | 65 ++------- server/src-lib/Hasura/Server/Query.hs | 4 +- 11 files changed, 208 insertions(+), 238 deletions(-) diff --git a/server/src-lib/Data/Time/Clock/Units.hs b/server/src-lib/Data/Time/Clock/Units.hs index 5e81ff656d25c..3959505ffe9f6 100644 --- a/server/src-lib/Data/Time/Clock/Units.hs +++ b/server/src-lib/Data/Time/Clock/Units.hs @@ -37,6 +37,7 @@ module Data.Time.Clock.Units , Milliseconds(..) , Microseconds(..) , Nanoseconds(..) + , diffTimeToSeconds ) where import Prelude @@ -50,6 +51,9 @@ type Seconds = DiffTime seconds :: DiffTime -> DiffTime seconds = id +diffTimeToSeconds :: DiffTime -> Integer +diffTimeToSeconds = (1000000000000 *) . diffTimeToPicoseconds + newtype Days = Days { days :: DiffTime } deriving (Show, Eq, Ord) deriving (Num, Fractional, Real, RealFrac) via (TimeUnit (SecondsP 86400)) diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 411a21535d71b..1240b591d957a 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -253,7 +253,7 @@ runHGEServer ServeOptions{..} InitCtx{..} initTime = do _icHttpManager _icPgPool (getSCFromRef cacheRef) eventEngineCtx void $ liftIO $ C.forkIO $ runScheduledEventsGenerator logger _icPgPool (getSCFromRef cacheRef) - void $ liftIO $ C.forkIO $ processScheduledQueue logger _icPgPool _icHttpManager (getSCFromRef cacheRef) -- logEnvHeaders + void $ liftIO $ C.forkIO $ processScheduledQueue logger logEnvHeaders _icHttpManager _icPgPool (getSCFromRef cacheRef) -- start a background thread to check for updates void $ liftIO $ C.forkIO $ checkForUpdates loggerCtx _icHttpManager diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 2a09050a60c73..4bb302bbfb2e6 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -10,7 +10,7 @@ module Hasura.Eventing.EventTrigger import Control.Concurrent (threadDelay) import Control.Concurrent.Async (async, waitAny) import Control.Concurrent.STM.TVar -import Control.Exception (try) +import Control.Exception (bracket_, try) import Control.Monad.STM (STM, atomically, retry) import Data.Aeson import Data.Aeson.Casing @@ -115,11 +115,11 @@ pushEvents logger pool eectx = forever $ do consumeEvents :: (HasVersion) => L.Logger L.Hasura -> LogEnvHeaders -> HTTP.Manager -> Q.PGPool -> IO SchemaCache -> EventEngineCtx -> IO () -consumeEvents logger logenv httpMgr pool getSchemaCache eectx = forever $ do +consumeEvents logger logenv httpMgr pool getSC eectx = forever $ do event <- atomically $ do let EventEngineCtx q _ _ _ = eectx TQ.readTQueue q - async $ runReaderT (processEvent logenv pool getSchemaCache event) (logger, httpMgr, eectx) + async $ runReaderT (processEvent logenv pool getSC event) (logger, httpMgr, eectx) processEvent :: ( HasVersion @@ -132,6 +132,7 @@ processEvent => LogEnvHeaders -> Q.PGPool -> IO SchemaCache -> Event -> m () processEvent logenv pool getSchemaCache e = do cache <- liftIO getSchemaCache + eventEngineCtx <- asks getter let meti = getEventTriggerInfoFromEvent cache e case meti of Nothing -> do @@ -140,18 +141,42 @@ processEvent logenv pool getSchemaCache e = do let webhook = T.unpack $ wciCachedValue $ etiWebhookInfo eti retryConf = etiRetryConf eti timeoutSeconds = fromMaybe defaultTimeoutSeconds (rcTimeoutSec retryConf) - responseTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) + respTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) headerInfos = etiHeaders eti etHeaders = map encodeHeader headerInfos headers = addDefaultHeaders etHeaders ep = createEventPayload retryConf e - res <- runExceptT $ tryWebhook headers responseTimeout ep webhook + extraLogCtx = ExtraLogContext (epId ep) + res <- runExceptT $ withEventEngineCtx eventEngineCtx $ + tryWebhook headers respTimeout (toJSON ep) webhook (Just extraLogCtx) let decodedHeaders = map (decodeHeader logenv headerInfos) headers finally <- either (processError pool e retryConf decodedHeaders ep) (processSuccess pool e decodedHeaders ep) res either logQErr return finally +withEventEngineCtx :: + ( MonadReader r m + , Has HTTP.Manager r + , Has (L.Logger L.Hasura) r + , MonadIO m + , MonadError HTTPErr m + ) + => EventEngineCtx + -> m HTTPResp + -> m HTTPResp +withEventEngineCtx eeCtx = bracket_ (incrementThreadCount eeCtx) (decrementThreadCount eeCtx) + +incrementThreadCount :: EventEngineCtx -> IO () +incrementThreadCount (EventEngineCtx _ c maxT _ ) = atomically $ do + countThreads <- readTVar c + if countThreads >= maxT + then retry + else modifyTVar' c (+1) + +decrementThreadCount :: EventEngineCtx -> IO () +decrementThreadCount (EventEngineCtx _ c _ _) = atomically $ modifyTVar' c (\v -> v - 1) + createEventPayload :: RetryConf -> Event -> EventPayload createEventPayload retryConf e = EventPayload { epId = eId e @@ -159,9 +184,9 @@ createEventPayload retryConf e = EventPayload , epTrigger = eTrigger e , epEvent = eEvent e , epDeliveryInfo = DeliveryInfo - { diCurrentRetry = eTries e - , diMaxRetries = rcNumRetries retryConf - } + { diCurrentRetry = eTries e + , diMaxRetries = rcNumRetries retryConf + } , epCreatedAt = eCreatedAt e } @@ -173,7 +198,7 @@ processSuccess pool e decodedHeaders ep resp = do let respBody = hrsBody resp respHeaders = hrsHeaders resp respStatus = hrsStatus resp - invocation = mkInvo ep respStatus decodedHeaders respBody respHeaders + invocation = mkInvocation ep respStatus decodedHeaders respBody respHeaders liftIO $ runExceptT $ Q.runTx pool (Q.RepeatableRead, Just Q.ReadWrite) $ do insertInvocation invocation setSuccess e @@ -190,18 +215,18 @@ processError pool e retryConf decodedHeaders ep err = do let invocation = case err of HClient excp -> do let errMsg = TBS.fromLBS $ encode $ show excp - mkInvo ep 1000 decodedHeaders errMsg [] + mkInvocation ep 1000 decodedHeaders errMsg [] HParse _ detail -> do let errMsg = TBS.fromLBS $ encode detail - mkInvo ep 1001 decodedHeaders errMsg [] + mkInvocation ep 1001 decodedHeaders errMsg [] HStatus errResp -> do let respPayload = hrsBody errResp respHeaders = hrsHeaders errResp respStatus = hrsStatus errResp - mkInvo ep respStatus decodedHeaders respPayload respHeaders + mkInvocation ep respStatus decodedHeaders respPayload respHeaders HOther detail -> do let errMsg = (TBS.fromLBS $ encode detail) - mkInvo ep 500 decodedHeaders errMsg [] + mkInvocation ep 500 decodedHeaders errMsg [] liftIO $ runExceptT $ Q.runTx pool (Q.RepeatableRead, Just Q.ReadWrite) $ do insertInvocation invocation retryOrSetError e retryConf err @@ -224,10 +249,10 @@ retryOrSetError e retryConf err = do retryTime = addUTCTime diff currentTime setRetry e retryTime -mkInvo +mkInvocation :: EventPayload -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] -> Invocation -mkInvo ep status reqHeaders respBody respHeaders +mkInvocation ep status reqHeaders respBody respHeaders = let resp = if isClientError status then mkClientErr respBody else mkResp status respBody respHeaders @@ -238,48 +263,6 @@ mkInvo ep status reqHeaders respBody respHeaders (mkWebhookReq (toJSON ep) reqHeaders invocationVersion) resp -tryWebhook - :: ( Has (L.Logger L.Hasura) r - , Has HTTP.Manager r - , Has EventEngineCtx r - , MonadReader r m - , MonadIO m - , MonadError HTTPErr m - ) - => [HTTP.Header] -> HTTP.ResponseTimeout -> EventPayload -> String - -> m HTTPResp -tryWebhook headers responseTimeout ep webhook = do - let createdAt = epCreatedAt ep - eventId = epId ep - initReqE <- liftIO $ try $ HTTP.parseRequest webhook - manager <- asks getter - case initReqE of - Left excp -> throwError $ HClient excp - Right initReq -> do - let req = initReq - { HTTP.method = "POST" - , HTTP.requestHeaders = headers - , HTTP.requestBody = HTTP.RequestBodyLBS (encode ep) - , HTTP.responseTimeout = responseTimeout - } - eeCtx <- asks getter - -- wait for counter and then increment beforing making http - liftIO $ atomically $ do - let EventEngineCtx _ c maxT _ = eeCtx - countThreads <- readTVar c - if countThreads >= maxT - then retry - else modifyTVar' c (+1) - - eitherResp <- runHTTP manager req (Just (ExtraContext createdAt eventId)) - - -- decrement counter once http is done - liftIO $ atomically $ do - let EventEngineCtx _ c _ _ = eeCtx - modifyTVar' c (\v -> v - 1) - - onLeft eitherResp throwError - getEventTriggerInfoFromEvent :: SchemaCache -> Event -> Maybe EventTriggerInfo getEventTriggerInfoFromEvent sc e = let table = eTable e tableInfo = M.lookup table $ scTables sc diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index c9938279529b0..48aab5dba2578 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -1,10 +1,11 @@ module Hasura.Eventing.HTTP ( HTTPErr(..) , HTTPResp(..) + , tryWebhook , runHTTP , isNetworkError , isNetworkErrorHC - , ExtraContext(..) + , ExtraLogContext(..) , EventId , Invocation(..) , Version @@ -40,7 +41,6 @@ import qualified Data.TByteString as TBS import qualified Data.Text as T import qualified Data.Text.Encoding as TE import qualified Data.Text.Encoding.Error as TE -import qualified Data.Time.Clock as Time import qualified Hasura.Logging as L import qualified Network.HTTP.Client as HTTP import qualified Network.HTTP.Types as HTTP @@ -103,13 +103,12 @@ data Invocation , iResponse :: Response } -data ExtraContext - = ExtraContext - { elEventCreatedAt :: Time.UTCTime - , elEventId :: EventId +data ExtraLogContext + = ExtraLogContext + { elcEventId :: EventId } deriving (Show, Eq) -$(J.deriveJSON (J.aesonDrop 2 J.snakeCase){J.omitNothingFields=True} ''ExtraContext) +$(J.deriveJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''ExtraLogContext) data HTTPResp = HTTPResp @@ -123,29 +122,6 @@ $(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''HTTPRes instance ToEngineLog HTTPResp Hasura where toEngineLog resp = (LevelInfo, eventTriggerLogType, J.toJSON resp) -mkHTTPResp :: HTTP.Response LBS.ByteString -> HTTPResp -mkHTTPResp resp = - HTTPResp - { hrsStatus = HTTP.statusCode $ HTTP.responseStatus resp - , hrsHeaders = map decodeHeader $ HTTP.responseHeaders resp - , hrsBody = TBS.fromLBS $ HTTP.responseBody resp - } - where - decodeBS = TE.decodeUtf8With TE.lenientDecode - decodeHeader (hdrName, hdrVal) - = HeaderConf (decodeBS $ CI.original hdrName) (HVValue (decodeBS hdrVal)) - -data HTTPRespExtra - = HTTPRespExtra - { _hreResponse :: HTTPResp - , _hreContext :: Maybe ExtraContext - } - -$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''HTTPRespExtra) - -instance ToEngineLog HTTPRespExtra Hasura where - toEngineLog resp = (LevelInfo, eventTriggerLogType, J.toJSON resp) - data HTTPErr = HClient !HTTP.HttpException | HParse !HTTP.Status !String @@ -171,6 +147,29 @@ instance J.ToJSON HTTPErr where instance ToEngineLog HTTPErr Hasura where toEngineLog err = (LevelError, eventTriggerLogType, J.toJSON err) +mkHTTPResp :: HTTP.Response LBS.ByteString -> HTTPResp +mkHTTPResp resp = + HTTPResp + { hrsStatus = HTTP.statusCode $ HTTP.responseStatus resp + , hrsHeaders = map decodeHeader $ HTTP.responseHeaders resp + , hrsBody = TBS.fromLBS $ HTTP.responseBody resp + } + where + decodeBS = TE.decodeUtf8With TE.lenientDecode + decodeHeader (hdrName, hdrVal) + = HeaderConf (decodeBS $ CI.original hdrName) (HVValue (decodeBS hdrVal)) + +data HTTPRespExtra + = HTTPRespExtra + { _hreResponse :: HTTPResp + , _hreContext :: Maybe ExtraLogContext + } + +$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''HTTPRespExtra) + +instance ToEngineLog HTTPRespExtra Hasura where + toEngineLog resp = (LevelInfo, eventTriggerLogType, J.toJSON resp) + isNetworkError :: HTTPErr -> Bool isNetworkError = \case HClient he -> isNetworkErrorHC he @@ -211,7 +210,7 @@ runHTTP , Has (Logger Hasura) r , MonadIO m ) - => HTTP.Manager -> HTTP.Request -> Maybe ExtraContext -> m (Either HTTPErr HTTPResp) + => HTTP.Manager -> HTTP.Request -> Maybe ExtraLogContext -> m (Either HTTPErr HTTPResp) runHTTP manager req exLog = do logger :: Logger Hasura <- asks getter res <- liftIO $ try $ HTTP.httpLbs req manager @@ -220,6 +219,35 @@ runHTTP manager req exLog = do Right resp -> unLogger logger $ HTTPRespExtra (mkHTTPResp resp) exLog return $ either (Left . HClient) anyBodyParser res +tryWebhook :: + ( MonadReader r m + , Has HTTP.Manager r + , Has (L.Logger L.Hasura) r + , MonadIO m + , MonadError HTTPErr m + ) + => [HTTP.Header] + -> HTTP.ResponseTimeout + -> J.Value + -> String + -> Maybe ExtraLogContext + -> m HTTPResp +tryWebhook headers timeout payload webhook extraLogCtx = do + initReqE <- liftIO $ try $ HTTP.parseRequest webhook + manager <- asks getter + case initReqE of + Left excp -> throwError $ HClient excp + Right initReq -> do + let req = + initReq + { HTTP.method = "POST" + , HTTP.requestHeaders = headers + , HTTP.requestBody = HTTP.RequestBodyLBS (J.encode payload) + , HTTP.responseTimeout = timeout + } + eitherResp <- runHTTP manager req extraLogCtx + onLeft eitherResp throwError + newtype EventInternalErr = EventInternalErr QErr deriving (Show, Eq) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index e578c06e8729c..8ca89a4523ed2 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -8,9 +8,9 @@ module Hasura.Eventing.ScheduledTrigger ) where import Control.Concurrent (threadDelay) -import Control.Exception (try) import Data.Has import Data.Time.Clock +import Data.Time.Clock.Units import Hasura.Eventing.HTTP import Hasura.Prelude import Hasura.RQL.DDL.Headers @@ -30,7 +30,6 @@ import qualified Data.Text as T import qualified Database.PG.Query as Q import qualified Hasura.Logging as L import qualified Network.HTTP.Client as HTTP -import qualified Network.HTTP.Types as HTTP import qualified Text.Builder as TB (run) import qualified Data.HashMap.Strict as Map @@ -48,8 +47,6 @@ oneMinute = 60 * oneSecond oneHour :: Int oneHour = 60 * oneMinute -type ScheduledEventPayload = J.Value - scheduledEventsTable :: QualifiedTable scheduledEventsTable = QualifiedObject @@ -143,15 +140,15 @@ generateScheduledEventsFrom time ScheduledTriggerInfo{..} = case stiSchedule of OneOff _ -> empty -- one-off scheduled events are generated during creation Cron cron -> - generateSchedulesBetween + generateScheduleTimesBetween time (addUTCTime nominalDay time) cron in map (ScheduledEventSeed stiName) events --- generates events (from, till] according to CronSchedule -generateSchedulesBetween :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] -generateSchedulesBetween from till cron = takeWhile ((>=) till) $ go from +-- | Generates events @(from, till]@ according to 'CronSchedule' +generateScheduleTimesBetween :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] +generateScheduleTimesBetween from till cron = takeWhile (<= till) $ go from where go init = case nextMatch cron init of @@ -161,15 +158,15 @@ generateSchedulesBetween from till cron = takeWhile ((>=) till) $ go from processScheduledQueue :: HasVersion => L.Logger L.Hasura - -> Q.PGPool + -> LogEnvHeaders -> HTTP.Manager + -> Q.PGPool -> IO SchemaCache -> IO () -processScheduledQueue logger pgpool httpMgr getSC = +processScheduledQueue logger logEnv httpMgr pgpool getSC = forever $ do traceM "entering processor queue" - sc <- getSC - let scheduledTriggersInfo = scScheduledTriggers sc + scheduledTriggersInfo <- scScheduledTriggers <$> getSC scheduledEventsE <- runExceptT $ Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) getScheduledEvents @@ -185,31 +182,40 @@ processScheduledQueue logger pgpool httpMgr getSC = payload = fromMaybe J.Null $ stiPayload sti retryConf = stiRetryConf sti se = ScheduledEvent id' name st tries webhook payload retryConf - runReaderT (processScheduledEvent pgpool httpMgr sti se) logger + runReaderT (processScheduledEvent logEnv pgpool sti se) (logger, httpMgr) Left err -> traceShowM err threadDelay oneMinute processScheduledEvent :: - (MonadReader r m, Has (L.Logger L.Hasura) r, HasVersion, MonadIO m) - => Q.PGPool - -> HTTP.Manager + ( MonadReader r m + , Has HTTP.Manager r + , Has (L.Logger L.Hasura) r + , HasVersion + , MonadIO m + ) + => LogEnvHeaders + -> Q.PGPool -> ScheduledTriggerInfo -> ScheduledEvent 'SE_FULL -> m () -processScheduledEvent pgpool httpMgr ScheduledTriggerInfo {..} se@ScheduledEvent {..} = do +processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEvent {..} = do currentTime <- liftIO getCurrentTime - if diffUTCTime currentTime seScheduledTime > rcstTolerance stiRetryConf + if (toRational $ diffUTCTime currentTime seScheduledTime) > (toRational $ rcstTolerance stiRetryConf) then processDead' else do - let timeoutSeconds = rcstTimeoutSec stiRetryConf - responseTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) + let timeoutSeconds = fromInteger . diffTimeToSeconds $ rcstTimeoutSec stiRetryConf + httpTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) headers = map encodeHeader stiHeaders headers' = addDefaultHeaders headers + extraLogCtx = ExtraLogContext seId res <- runExceptT $ - tryWebhook httpMgr responseTimeout headers' sePayload seWebhook - -- let decodedHeaders = map (decodeHeader logenv headerInfos) headers - finally <- either (processError pgpool se) (processSuccess pgpool se) res + tryWebhook headers' httpTimeout sePayload (T.unpack seWebhook) (Just extraLogCtx) + let decodedHeaders = map (decodeHeader logEnv stiHeaders) headers' + finally <- either + (processError pgpool se decodedHeaders) + (processSuccess pgpool se decodedHeaders) + res either logQErr return finally where processDead' = @@ -217,51 +223,23 @@ processScheduledEvent pgpool httpMgr ScheduledTriggerInfo {..} se@ScheduledEvent Left err -> logQErr err Right _ -> pure () -tryWebhook :: - ( MonadReader r m - , Has (L.Logger L.Hasura) r - , MonadIO m - , MonadError HTTPErr m - ) - => HTTP.Manager - -> HTTP.ResponseTimeout - -> [HTTP.Header] - -> ScheduledEventPayload - -> T.Text - -> m HTTPResp -tryWebhook httpMgr timeout headers payload webhook = do - initReqE <- liftIO $ try $ HTTP.parseRequest (T.unpack webhook) - case initReqE of - Left excp -> throwError $ HClient excp - Right initReq -> do - let req = - initReq - { HTTP.method = "POST" - , HTTP.requestHeaders = headers - , HTTP.requestBody = HTTP.RequestBodyLBS (J.encode payload) - , HTTP.responseTimeout = timeout - } - eitherResp <- runHTTP httpMgr req Nothing - onLeft eitherResp throwError - -processError :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> HTTPErr -> m (Either QErr ()) -processError pgpool se err = do - let decodedHeaders = [] - invocation = case err of - HClient excp -> do - let errMsg = TBS.fromLBS $ J.encode $ show excp - mkInvo se 1000 decodedHeaders errMsg [] - HParse _ detail -> do - let errMsg = TBS.fromLBS $ J.encode detail - mkInvo se 1001 decodedHeaders errMsg [] - HStatus errResp -> do - let respPayload = hrsBody errResp - respHeaders = hrsHeaders errResp - respStatus = hrsStatus errResp - mkInvo se respStatus decodedHeaders respPayload respHeaders - HOther detail -> do - let errMsg = (TBS.fromLBS $ J.encode detail) - mkInvo se 500 decodedHeaders errMsg [] +processError :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> [HeaderConf] -> HTTPErr -> m (Either QErr ()) +processError pgpool se decodedHeaders err = do + let invocation = case err of + HClient excp -> do + let errMsg = TBS.fromLBS $ J.encode $ show excp + mkInvocation se 1000 decodedHeaders errMsg [] + HParse _ detail -> do + let errMsg = TBS.fromLBS $ J.encode detail + mkInvocation se 1001 decodedHeaders errMsg [] + HStatus errResp -> do + let respPayload = hrsBody errResp + respHeaders = hrsHeaders errResp + respStatus = hrsStatus errResp + mkInvocation se respStatus decodedHeaders respPayload respHeaders + HOther detail -> do + let errMsg = (TBS.fromLBS $ J.encode detail) + mkInvocation se 500 decodedHeaders errMsg [] liftIO $ runExceptT $ Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) $ do @@ -270,17 +248,17 @@ processError pgpool se err = do retryOrMarkError :: ScheduledEvent 'SE_FULL -> HTTPErr -> Q.TxE QErr () retryOrMarkError se@ScheduledEvent{..} err = do - let mretryHeader = getRetryAfterHeaderFromHTTPErr err - mretryHeaderSeconds = join $ parseRetryHeaderValue <$> mretryHeader + let mRetryHeader = getRetryAfterHeaderFromHTTPErr err + mRetryHeaderSeconds = join $ parseRetryHeaderValue <$> mRetryHeader triesExhausted = seTries >= rcstNumRetries seRetryConf - noRetryHeader = isNothing mretryHeaderSeconds + noRetryHeader = isNothing mRetryHeaderSeconds -- current_try = tries + 1 , allowed_total_tries = rcNumRetries retryConf + 1 if triesExhausted && noRetryHeader then do markError else do currentTime <- liftIO getCurrentTime - let delay = fromMaybe (rcstIntervalSec seRetryConf) mretryHeaderSeconds + let delay = fromMaybe (fromInteger . diffTimeToSeconds $ rcstIntervalSec seRetryConf) mRetryHeaderSeconds diff = fromIntegral delay retryTime = addUTCTime diff currentTime setRetry se retryTime @@ -294,13 +272,12 @@ retryOrMarkError se@ScheduledEvent{..} err = do WHERE id = $1 |] (Identity seId) True -processSuccess :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> HTTPResp -> m (Either QErr ()) -processSuccess pgpool se resp = do +processSuccess :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> [HeaderConf] -> HTTPResp -> m (Either QErr ()) +processSuccess pgpool se decodedHeaders resp = do let respBody = hrsBody resp respHeaders = hrsHeaders resp respStatus = hrsStatus resp - decodedHeaders = [] - invocation = mkInvo se respStatus decodedHeaders respBody respHeaders + invocation = mkInvocation se respStatus decodedHeaders respBody respHeaders liftIO $ runExceptT $ Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) $ do @@ -338,10 +315,10 @@ setRetry se time = WHERE id = $2 |] (time, seId se) True -mkInvo +mkInvocation :: ScheduledEvent 'SE_FULL -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] -> Invocation -mkInvo se status reqHeaders respBody respHeaders +mkInvocation se status reqHeaders respBody respHeaders = let resp = if isClientError status then mkClientErr respBody else mkResp status respBody respHeaders diff --git a/server/src-lib/Hasura/Incremental/Internal/Dependency.hs b/server/src-lib/Hasura/Incremental/Internal/Dependency.hs index 8b6fb29fb374c..aba0f360be3a5 100644 --- a/server/src-lib/Hasura/Incremental/Internal/Dependency.hs +++ b/server/src-lib/Hasura/Incremental/Internal/Dependency.hs @@ -22,6 +22,7 @@ import GHC.Generics ((:*:) (..), (:+:) (..), Generic M1 (..), U1 (..), V1) import Hasura.Incremental.Select +import System.Cron.Types -- | A 'Dependency' represents a value that a 'Rule' can /conditionally/ depend on. A 'Dependency' -- is created using 'newDependency', and it can be “opened” again using 'dependOn'. What makes a @@ -161,9 +162,23 @@ instance Cacheable Integer where unchanged _ = (==) instance Cacheable Scientific where unchanged _ = (==) instance Cacheable Text where unchanged _ = (==) instance Cacheable N.URIAuth where unchanged _ = (==) +instance Cacheable DiffTime where unchanged _ = (==) instance Cacheable NominalDiffTime where unchanged _ = (==) instance Cacheable UTCTime where unchanged _ = (==) +-- instances for CronSchedule from package `cron` +instance Cacheable StepField +instance Cacheable RangeField +instance Cacheable SpecificField +instance Cacheable BaseField +instance Cacheable CronField +instance Cacheable MonthSpec +instance Cacheable DayOfMonthSpec +instance Cacheable DayOfWeekSpec +instance Cacheable HourSpec +instance Cacheable MinuteSpec +instance Cacheable CronSchedule + instance (Cacheable a) => Cacheable (Seq a) where unchanged = liftEq . unchanged instance (Cacheable a) => Cacheable (Vector a) where diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 9d0adedfe4b9a..7157f81f1a6a4 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -35,9 +35,8 @@ resolveScheduledTrigger :: (QErrM m, MonadIO m) => CreateScheduledTrigger -> m ScheduledTriggerInfo resolveScheduledTrigger CreateScheduledTrigger {..} = do - let headerConfs = fromMaybe [] stHeaders webhookInfo <- getWebhookInfoFromConf stWebhookConf - headerInfo <- getHeaderInfosFromConf headerConfs + headerInfo <- getHeaderInfosFromConf stHeaders let stInfo = ScheduledTriggerInfo stName @@ -72,8 +71,8 @@ updateScheduledTriggerInCatalog CreateScheduledTrigger {..} = liftTx $ do WHERE name = $1 AND scheduled_time > now() AND tries = 0 |] (Identity stName) False -runDeleteScheduledTrigger :: (CacheRWM m, MonadTx m) => DeleteScheduledTrigger -> m EncJSON -runDeleteScheduledTrigger (DeleteScheduledTrigger stName) = do +runDeleteScheduledTrigger :: (CacheRWM m, MonadTx m) => TriggerName -> m EncJSON +runDeleteScheduledTrigger stName = do deleteScheduledTriggerFromCatalog stName return successMsg @@ -85,18 +84,18 @@ deleteScheduledTriggerFromCatalog stName = liftTx $ do WHERE name = $1 |] (Identity stName) False -runCancelScheduledEvent :: (MonadTx m) => CancelScheduledEvent -> m EncJSON +runCancelScheduledEvent :: (MonadTx m) => EventId -> m EncJSON runCancelScheduledEvent se = do affectedRows <- deleteScheduledEventFromCatalog se if | affectedRows == 1 -> pure successMsg | affectedRows == 0 -> throw400 NotFound "scheduled event not found" | otherwise -> throw500 "unexpected: more than one scheduled events cancelled" -deleteScheduledEventFromCatalog :: (MonadTx m) => CancelScheduledEvent -> m Int -deleteScheduledEventFromCatalog se = liftTx $ do +deleteScheduledEventFromCatalog :: (MonadTx m) => EventId -> m Int +deleteScheduledEventFromCatalog seId = liftTx $ do (runIdentity . Q.getRow) <$> Q.withQE defaultTxErrorHandler [Q.sql| DELETE FROM hdb_catalog.hdb_scheduled_events WHERE id = $1 RETURNING count(*) - |] (Identity (cseId se)) False + |] (Identity seId) False diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs index fcb1d61ad2f56..35a2042178ea8 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs @@ -186,16 +186,13 @@ buildSchemaCacheRule = proc inputs -> do -- scheduled triggers scheduledTriggersMap <- (mapFromL _cstName scheduledTriggers >- returnA) >-> (| Inc.keyed (\_ (CatalogScheduledTrigger n wc s p rc h ) -> do - let q = CreateScheduledTrigger n wc s p rc h + let q = CreateScheduledTrigger n wc s p rc (fromMaybe [] h) definition = toJSON q triggerName = triggerNameToTxt n metadataObject = MetadataObject (MOScheduledTrigger n) definition addScheduledTriggerContext e = "in scheduled trigger " <> triggerName <> ": " <> e (| withRecordInconsistency ( - (| modifyErrA ( do - scheduledTriggerInfo <- bindErrorA -< resolveScheduledTrigger q - returnA -< scheduledTriggerInfo - ) + (| modifyErrA (bindErrorA -< resolveScheduledTrigger q) |) addScheduledTriggerContext) |) metadataObject) |) diff --git a/server/src-lib/Hasura/RQL/Instances.hs b/server/src-lib/Hasura/RQL/Instances.hs index 0703cc3b457da..41296e8868e36 100644 --- a/server/src-lib/Hasura/RQL/Instances.hs +++ b/server/src-lib/Hasura/RQL/Instances.hs @@ -12,6 +12,7 @@ import qualified Language.Haskell.TH.Syntax as TH import Data.Functor.Product import Data.GADT.Compare import Instances.TH.Lift () +import System.Cron.Types instance NFData G.Argument instance NFData G.Directive @@ -42,6 +43,19 @@ deriving instance NFData G.Variable deriving instance (NFData a) => NFData (G.ListValueG a) deriving instance (NFData a) => NFData (G.ObjectValueG a) +-- instances for CronSchedule from package `cron` +instance NFData StepField +instance NFData RangeField +instance NFData SpecificField +instance NFData BaseField +instance NFData CronField +instance NFData MonthSpec +instance NFData DayOfMonthSpec +instance NFData DayOfWeekSpec +instance NFData HourSpec +instance NFData MinuteSpec +instance NFData CronSchedule + instance (TH.Lift k, TH.Lift v) => TH.Lift (M.HashMap k v) where lift m = [| M.fromList $(TH.lift $ M.toList m) |] diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 543f2f068a745..869ee8d31674d 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -4,12 +4,11 @@ module Hasura.RQL.Types.ScheduledTrigger ( ScheduleType(..) , CreateScheduledTrigger(..) , RetryConfST(..) - , DeleteScheduledTrigger(..) - , CancelScheduledEvent(..) , formatTime' ) where import Data.Time.Clock +import Data.Time.Clock.Units import Data.Time.Format import Data.Aeson import Data.Aeson.Casing @@ -23,46 +22,12 @@ import qualified Data.Text as T import qualified Data.Aeson as J import qualified Hasura.RQL.Types.EventTrigger as ET --- instance TH.Lift (Fixed E12) where --- lift x = [| MkFixed x' |] --- where --- x' = resolution x - --- instance TH.Lift NominalDiffTime where --- lift x = [| secondsToNominalDiffTime x'|] --- where --- x' = nominalDiffTimeToSeconds x - -instance NFData StepField -instance NFData RangeField -instance NFData SpecificField -instance NFData BaseField -instance NFData CronField -instance NFData MonthSpec -instance NFData DayOfMonthSpec -instance NFData DayOfWeekSpec -instance NFData HourSpec -instance NFData MinuteSpec -instance NFData CronSchedule - -instance Cacheable StepField -instance Cacheable RangeField -instance Cacheable SpecificField -instance Cacheable BaseField -instance Cacheable CronField -instance Cacheable MonthSpec -instance Cacheable DayOfMonthSpec -instance Cacheable DayOfWeekSpec -instance Cacheable HourSpec -instance Cacheable MinuteSpec -instance Cacheable CronSchedule - data RetryConfST = RetryConfST { rcstNumRetries :: !Int - , rcstIntervalSec :: !Int - , rcstTimeoutSec :: !Int - , rcstTolerance :: !NominalDiffTime + , rcstIntervalSec :: !Seconds + , rcstTimeoutSec :: !Seconds + , rcstTolerance :: !Seconds } deriving (Show, Eq, Generic) instance NFData RetryConfST @@ -74,9 +39,9 @@ defaultRetryConf :: RetryConfST defaultRetryConf = RetryConfST { rcstNumRetries = 1 - , rcstIntervalSec = 10 - , rcstTimeoutSec = 60 - , rcstTolerance = 21600 -- 6 hours + , rcstIntervalSec = seconds 10 + , rcstTimeoutSec = seconds 60 + , rcstTolerance = hours 6 } data ScheduleType = OneOff UTCTime | Cron CronSchedule @@ -94,7 +59,7 @@ data CreateScheduledTrigger , stSchedule :: !ScheduleType , stPayload :: !(Maybe J.Value) , stRetryConf :: !RetryConfST - , stHeaders :: !(Maybe [ET.HeaderConf]) + , stHeaders :: ![ET.HeaderConf] } deriving (Show, Eq, Generic) instance NFData CreateScheduledTrigger @@ -116,7 +81,7 @@ instance FromJSON CreateScheduledTrigger where stPayload <- o .:? "payload" stSchedule <- o .: "schedule" stRetryConf <- o .:? "retry_conf" .!= defaultRetryConf - stHeaders <- o .:? "headers" + stHeaders <- o .:? "headers" .!= [] stWebhookConf <- case (stWebhook, stWebhookFromEnv) of (Just value, Nothing) -> pure $ ET.WCValue value (Nothing, Just env) -> pure $ ET.WCEnv env @@ -126,18 +91,6 @@ instance FromJSON CreateScheduledTrigger where $(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) -newtype DeleteScheduledTrigger - = DeleteScheduledTrigger { dst :: ET.TriggerName } - deriving (Show, Eq) - -$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''DeleteScheduledTrigger) - -newtype CancelScheduledEvent - = CancelScheduledEvent { cseId :: T.Text } - deriving (Show, Eq) - -$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CancelScheduledEvent) - -- Supported time string formats for the API: -- (see FromJSON for ZonedTime: https://hackage.haskell.org/package/aeson-1.4.6.0/docs/src/Data.Aeson.Types.FromJSON.html#line-2050) diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 764540508339a..630e619a85f5f 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -90,8 +90,8 @@ data RQLQueryV1 | RQCreateScheduledTrigger !CreateScheduledTrigger | RQUpdateScheduledTrigger !CreateScheduledTrigger - | RQDeleteScheduledTrigger !DeleteScheduledTrigger - | RQCancelScheduledEvent !CancelScheduledEvent + | RQDeleteScheduledTrigger !TriggerName + | RQCancelScheduledEvent !EventId -- query collections, allow list related | RQCreateQueryCollection !CreateCollection From 8f040e0a6f514b56351eb738eadb39dd4922ad76 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 29 Jan 2020 15:48:22 +0530 Subject: [PATCH 029/195] use MonadMask for general bracket --- server/cabal.project.freeze | 275 +++++++++--------- server/graphql-engine.cabal | 1 + .../src-lib/Hasura/Eventing/EventTrigger.hs | 46 ++- 3 files changed, 167 insertions(+), 155 deletions(-) diff --git a/server/cabal.project.freeze b/server/cabal.project.freeze index 3f90eae46dfbb..1dd007b8adb0d 100644 --- a/server/cabal.project.freeze +++ b/server/cabal.project.freeze @@ -1,97 +1,99 @@ constraints: any.Cabal ==2.4.0.1, - any.Glob ==0.9.3, + any.Glob ==0.10.0, any.HUnit ==1.6.0.0, any.Only ==0.1, any.QuickCheck ==2.12.6.1, QuickCheck +templatehaskell, - any.RSA ==2.3.1, + any.RSA ==2.4.1, any.SHA ==1.6.4.4, SHA -exe, any.Spock-core ==0.13.0.0, - any.StateVar ==1.1.1.1, + any.StateVar ==1.2, any.abstract-deque ==0.3, abstract-deque -usecas, any.abstract-par ==0.3.3, any.adjunctions ==4.4, - any.aeson ==1.4.2.0, + any.aeson ==1.4.6.0, aeson -bytestring-builder -cffi -developer -fast, - any.aeson-casing ==0.1.1.0, - any.ansi-terminal ==0.8.2, + any.aeson-casing ==0.2.0.0, + any.ansi-terminal ==0.10.2, ansi-terminal -example, - any.ansi-wl-pprint ==0.6.8.2, + any.ansi-wl-pprint ==0.6.9, ansi-wl-pprint -example, - any.appar ==0.1.7, + any.appar ==0.1.8, any.array ==0.5.3.0, - any.asn1-encoding ==0.9.5, - any.asn1-parse ==0.9.4, - any.asn1-types ==0.3.2, - any.async ==2.2.1, + any.asn1-encoding ==0.9.6, + any.asn1-parse ==0.9.5, + any.asn1-types ==0.3.3, + any.async ==2.2.2, async -bench, - any.attoparsec ==0.13.2.2, + any.attoparsec ==0.13.2.3, attoparsec -developer, any.attoparsec-iso8601 ==1.0.1.0, attoparsec-iso8601 -developer -fast, - any.authenticate-oauth ==1.6, - any.auto-update ==0.1.4.1, + any.authenticate-oauth ==1.6.0.1, + any.auto-update ==0.1.6, any.base ==4.12.0.0, any.base-compat ==0.10.5, any.base-compat-batteries ==0.10.5, - any.base-orphans ==0.8.1, + any.base-orphans ==0.8.2, any.base-prelude ==1.3, any.base16-bytestring ==0.1.1.6, - any.base64-bytestring ==1.0.0.2, - any.basement ==0.0.10, - any.bifunctors ==5.5.4, + any.base64-bytestring ==1.0.0.3, + any.basement ==0.0.11, + any.bifunctors ==5.5.6, bifunctors +semigroups +tagged, any.binary ==0.8.6.0, + any.binary-orphans ==1.0.1, any.binary-parser ==0.5.5, any.blaze-builder ==0.4.1.0, - any.blaze-html ==0.9.1.1, - any.blaze-markup ==0.8.2.2, + any.blaze-html ==0.9.1.2, + any.blaze-markup ==0.8.2.3, any.bsb-http-chunked ==0.0.0.4, - any.byteable ==0.1.1, any.byteorder ==1.0.4, any.bytestring ==0.10.8.2, any.bytestring-builder ==0.10.8.2.0, bytestring-builder +bytestring_has_builder, - any.bytestring-strict-builder ==0.4.5.1, - any.bytestring-tree-builder ==0.2.7.2, + any.bytestring-strict-builder ==0.4.5.3, + any.bytestring-tree-builder ==0.2.7.3, any.cabal-doctest ==1.0.8, - any.call-stack ==0.1.0, - any.case-insensitive ==1.2.0.11, - any.cassava ==0.5.1.0, + any.call-stack ==0.2.0, + any.case-insensitive ==1.2.1.0, + any.cassava ==0.5.2.0, cassava -bytestring--lt-0_10_4, - any.cereal ==0.5.8.0, + any.cereal ==0.5.8.1, cereal -bytestring-builder, any.charset ==0.3.7.1, - any.clock ==0.7.2, + any.clock ==0.8, clock -llvm, any.cmdargs ==0.10.20, cmdargs +quotation -testprog, any.code-page ==0.2, any.colour ==2.3.5, - any.comonad ==5.0.5, + any.comonad ==5.0.6, comonad +containers +distributive +test-doctests, any.concise ==0.1.0.1, - any.concurrent-output ==1.10.9, - any.conduit ==1.3.1.1, - any.connection ==0.2.8, - any.constraints ==0.10.1, + any.concurrent-output ==1.10.11, + any.conduit ==1.3.1.2, + any.connection ==0.3.1, + any.constraints ==0.11.2, any.containers ==0.6.0.1, - any.contravariant ==1.5.1, + any.contravariant ==1.5.2, contravariant +semigroups +statevar +tagged, - any.contravariant-extras ==0.3.4, - any.cookie ==0.4.4, - any.criterion ==1.5.5.0, + any.contravariant-extras ==0.3.5.1, + any.cookie ==0.4.5, + any.criterion ==1.5.6.1, criterion -embed-data-files -fast, - any.criterion-measurement ==0.1.1.0, + any.criterion-measurement ==0.1.2.0, criterion-measurement -fast, + any.cron ==0.6.2, + cron -lib-werror, any.crypto-api ==0.13.3, crypto-api -all_cpolys, any.crypto-pubkey-types ==0.4.3, any.cryptohash-md5 ==0.11.100.1, any.cryptohash-sha1 ==0.11.100.1, - any.cryptonite ==0.25, + any.cryptonite ==0.26, cryptonite -check_alignment +integer-gmp -old_toolchain_inliner +support_aesni +support_deepseq -support_pclmuldq +support_rdrand -support_sse, any.data-bword ==0.1.0.1, any.data-checked ==0.3, @@ -100,117 +102,124 @@ constraints: any.Cabal ==2.4.0.1, any.data-default-instances-containers ==0.0.1, any.data-default-instances-dlist ==0.0.1, any.data-default-instances-old-locale ==0.0.1, - any.data-dword ==0.3.1.2, + any.data-dword ==0.3.1.3, any.data-endian ==0.1.1, any.data-has ==0.3.0.0, - any.data-serializer ==0.3.4, - any.data-textual ==0.3.0.2, + any.data-serializer ==0.3.4.1, + any.data-textual ==0.3.0.3, any.deepseq ==1.4.4.0, - any.deferred-folds ==0.9.10, + any.deferred-folds ==0.9.10.1, any.dense-linear-algebra ==0.1.0.0, any.dependent-map ==0.2.4.0, any.dependent-sum ==0.4, any.directory ==1.3.3.0, - any.distributive ==0.6, + any.distributive ==0.6.1, distributive +semigroups +tagged, - any.dlist ==0.8.0.6, + any.dlist ==0.8.0.7, any.easy-file ==0.2.2, any.either ==5.0.1.1, any.ekg-core ==0.1.1.6, any.ekg-json ==0.1.0.6, - any.entropy ==0.4.1.4, + any.entropy ==0.4.1.5, entropy -halvm, any.erf ==2.0.0.0, any.errors ==2.3.0, - any.exceptions ==0.10.2, - any.fast-logger ==2.4.15, - any.file-embed ==0.0.11, + any.exceptions ==0.10.4, + exceptions +transformers-0-4, + any.fail ==4.9.0.0, + any.fast-logger ==3.0.0, + any.file-embed ==0.0.11.1, any.filepath ==1.4.2.1, any.focus ==1.0.1.3, - any.foldl ==1.4.5, - any.free ==5.1.1, + any.foldl ==1.4.6, + any.free ==5.1.3, any.generic-arbitrary ==0.1.0, any.ghc-boot-th ==8.6.5, any.ghc-prim ==0.5.3, + graphql-engine +developer, any.happy ==1.19.12, happy +small_base, any.hashable ==1.2.7.0, hashable -examples +integer-gmp +sse2 -sse41, - any.hashtables ==1.2.3.1, - hashtables -bounds-checking -debug -portable -sse42 +unsafe-tricks, + any.hashtables ==1.2.3.4, + hashtables -bounds-checking -debug -detailed-profiling -portable -sse42 +unsafe-tricks, any.haskell-lexer ==1.0.2, - any.hasql ==1.3.0.5, - any.hasql-pool ==0.5.0.2, - any.hasql-transaction ==0.7.1, - any.hedgehog ==0.6.1, + any.hasql ==1.4.1, + any.hasql-pool ==0.5.1, + any.hasql-transaction ==1.0.0.1, + any.hedgehog ==1.0.2, any.hourglass ==0.2.12, any.hsc2hs ==0.68.6, hsc2hs -in-ghc-tree, - any.hspec ==2.6.1, - any.hspec-core ==2.6.1, - any.hspec-discover ==2.6.1, + any.hspec ==2.7.0, + any.hspec-core ==2.7.0, + any.hspec-discover ==2.7.0, any.hspec-expectations ==0.8.2, any.hspec-expectations-lifted ==0.10.0, - any.http-api-data ==0.4, + any.http-api-data ==0.4.1.1, http-api-data -use-text-show, - any.http-client ==0.5.14, + any.http-client ==0.6.4, http-client +network-uri, any.http-client-tls ==0.3.5.3, any.http-date ==0.0.8, any.http-types ==0.12.3, - any.http2 ==1.6.5, + any.http2 ==2.0.3, http2 -devel, any.hvect ==0.4.0.0, - any.insert-ordered-containers ==0.2.1.0, + any.indexed-profunctors ==0.1, + any.insert-ordered-containers ==0.2.3, any.integer-gmp ==1.0.2.0, any.integer-logarithms ==1.0.3, integer-logarithms -check-bounds +integer-gmp, any.invariant ==0.5.3, - any.iproute ==1.7.7, - any.jose ==0.8.0.0, + any.iproute ==1.7.8, + any.jose ==0.8.2.0, + jose -demos, any.js-flot ==0.8.3, any.js-jquery ==3.3.1, any.kan-extensions ==5.2, any.keys ==3.12.2, any.lens ==4.17.1, lens -benchmark-uniplate -dump-splices +inlining -j -old-inline-pragmas -safe +test-doctests +test-hunit +test-properties +test-templates +trustworthy, - any.lens-aeson ==1.0.2, + any.lens-aeson ==1.1, lens-aeson +test-doctests, - any.libyaml ==0.1.1.0, + any.libyaml ==0.1.1.1, libyaml -no-unicode -system-libyaml, any.lifted-async ==0.10.0.4, any.lifted-base ==0.2.3.12, - any.list-t ==1.0.3.1, + any.list-t ==1.0.4, any.loch-th ==0.2.2, - any.math-functions ==0.3.1.0, - math-functions -system-expm1, - any.memory ==0.14.18, + any.math-functions ==0.3.3.0, + math-functions +system-erf +system-expm1, + any.memory ==0.15.0, memory +support_basement +support_bytestring +support_deepseq +support_foundation, any.microstache ==1.0.1.1, any.mime-types ==0.1.0.9, any.mmorph ==1.1.3, any.monad-control ==1.0.2.3, - any.monad-par ==0.3.4.8, + any.monad-par ==0.3.5, monad-par -chaselev -newgeneric, any.monad-par-extras ==0.3.3, any.monad-time ==0.3.1.0, any.monad-validate ==1.2.0.0, - any.mono-traversable ==1.0.11.0, + any.mono-traversable ==1.0.15.1, any.mtl ==2.2.2, any.mtl-compat ==0.2.2, mtl-compat -two-point-one -two-point-two, - any.mustache ==2.3.0, - any.mwc-probability ==2.0.4, + any.mustache ==2.3.1, + any.mwc-probability ==2.1.0, any.mwc-random ==0.14.0.0, any.natural-transformation ==0.4, - any.network ==2.8.0.1, - any.network-byte-order ==0.0.0.0, + any.network ==3.1.1.1, + any.network-byte-order ==0.1.3.0, any.network-info ==0.2.0.10, - any.network-ip ==0.3.0.2, + any.network-ip ==0.3.0.3, any.network-uri ==2.6.1.0, any.old-locale ==1.0.0.7, any.old-time ==1.1.0.3, - any.optparse-applicative ==0.14.3.0, + any.optics-core ==0.2, + any.optics-extra ==0.2, + any.optparse-applicative ==0.15.1.0, any.parallel ==3.2.2.0, any.parsec ==3.1.13.0, any.parsers ==0.12.10, @@ -219,133 +228,137 @@ constraints: any.Cabal ==2.4.0.1, any.placeholders ==0.1, any.pointed ==5.0.1, pointed +comonad +containers +kan-extensions +semigroupoids +semigroups +stm +tagged +transformers +unordered-containers, - any.postgresql-binary ==0.12.1.2, + any.postgresql-binary ==0.12.2, any.postgresql-libpq ==0.9.4.2, postgresql-libpq -use-pkg-config, any.pretty ==1.1.3.6, any.pretty-show ==1.9.5, - any.prettyprinter ==1.2.1, + any.prettyprinter ==1.6.0, prettyprinter -buildreadme, - any.primitive ==0.6.4.0, - any.primitive-extras ==0.7.1, + any.primitive ==0.7.0.0, + any.primitive-extras ==0.8, + any.primitive-unlifted ==0.1.3.0, any.process ==1.6.5.0, - any.profunctors ==5.3, - any.protolude ==0.2.3, - any.psqueues ==0.2.7.1, + any.profunctors ==5.5.1, + any.protolude ==0.2.4, + any.psqueues ==0.2.7.2, any.quickcheck-instances ==0.3.19, quickcheck-instances -bytestring-builder, any.quickcheck-io ==0.2.0, any.random ==1.1, - any.reflection ==2.1.4, + any.reflection ==2.1.5, reflection -slow +template-haskell, - any.regex-base ==0.93.2, - regex-base +newbase +splitbase, - any.regex-tdfa ==1.2.3.1, - regex-tdfa -devel, + any.regex-base ==0.94.0.0, + any.regex-tdfa ==1.3.1.0, + regex-tdfa -force-o2, any.reroute ==0.5.0.0, any.resource-pool ==0.2.3.2, resource-pool -developer, any.resourcet ==1.2.2, - any.retry ==0.7.7.0, + any.retry ==0.8.1.0, retry -lib-werror, any.rts ==1.0, - any.safe ==0.3.17, + any.safe ==0.3.18, any.scientific ==0.3.6.2, scientific -bytestring-builder -integer-simple, - any.semigroupoids ==5.3.2, + any.semigroupoids ==5.3.4, semigroupoids +comonad +containers +contravariant +distributive +doctests +tagged +unordered-containers, - any.semigroups ==0.18.5, - semigroups +binary +bytestring -bytestring-builder +containers +deepseq +hashable +tagged +text +transformers +unordered-containers, - any.semver ==0.3.3.1, + any.semigroups ==0.19.1, + semigroups +binary +bytestring -bytestring-builder +containers +deepseq +hashable +tagged +template-haskell +text +transformers +unordered-containers, + any.semver ==0.3.4, any.setenv ==0.1.1.3, - any.shakespeare ==2.0.22, + any.shakespeare ==2.0.24, shakespeare -test_coffee -test_export -test_roy, - any.simple-sendfile ==0.2.28, + any.simple-sendfile ==0.2.30, simple-sendfile +allow-bsd, - any.socks ==0.5.6, + any.socks ==0.6.1, any.split ==0.2.3.3, - any.statistics ==0.15.0.0, + any.statistics ==0.15.2.0, any.stm ==2.5.0.0, any.stm-containers ==1.1.0.4, - any.stm-hamt ==1.2.0.2, - any.streaming-commons ==0.2.1.0, + any.stm-hamt ==1.2.0.4, + any.streaming-commons ==0.2.1.2, streaming-commons -use-bytestring-builder, any.string-conversions ==0.4.0.1, any.superbuffer ==0.3.1.1, any.tagged ==0.8.6, tagged +deepseq +transformers, any.template-haskell ==2.14.0.0, + any.template-haskell-compat-v0208 ==0.1.2.1, any.terminal-size ==0.3.2.1, any.text ==1.2.3.1, - any.text-builder ==0.6.5, + any.text-builder ==0.6.6.1, any.text-conversions ==0.3.0, any.text-latin1 ==0.3.1, - any.text-printer ==0.5, - any.text-short ==0.1.2, + any.text-printer ==0.5.0.1, + any.text-short ==0.1.3, text-short -asserts, any.tf-random ==0.5, - any.th-abstraction ==0.2.11.0, - any.th-lift ==0.7.11, - any.th-lift-instances ==0.1.12, + any.th-abstraction ==0.3.1.0, + any.th-lift ==0.8.1, + any.th-lift-instances ==0.1.14, any.these ==0.7.6, any.time ==1.8.0.2, + any.time-compat ==1.9.2.2, + time-compat -old-locale, any.time-locale-compat ==0.1.1.5, time-locale-compat -old-locale, - any.tls ==1.4.1, + any.time-manager ==0.0.0, + any.tls ==1.5.3, tls +compat -hans +network, any.transformers ==0.5.6.2, any.transformers-base ==0.4.5.2, transformers-base +orphaninstances, - any.transformers-compat ==0.6.4, + any.transformers-compat ==0.6.5, transformers-compat -five +five-three -four +generic-deriving +mtl -three -two, - any.tuple-th ==0.2.5, + any.type-equality ==1, any.type-hint ==0.1, any.unix ==2.7.2.2, - any.unix-compat ==0.5.1, + any.unix-compat ==0.5.2, unix-compat -old-time, - any.unix-time ==0.4.5, + any.unix-time ==0.4.7, any.unliftio-core ==0.1.2.0, - any.unordered-containers ==0.2.9.0, + any.unordered-containers ==0.2.10.0, unordered-containers -debug, any.uri-encode ==1.5.0.5, uri-encode +network-uri -tools, any.utf8-string ==1.0.1.1, any.uuid ==1.3.13, any.uuid-types ==1.0.3, - any.vault ==0.3.1.2, + any.vault ==0.3.1.3, vault +useghc, any.vector ==0.12.0.3, vector +boundschecks -internalchecks -unsafechecks -wall, - any.vector-algorithms ==0.8.0.1, + any.vector-algorithms ==0.8.0.3, vector-algorithms +bench +boundschecks -internalchecks -llvm +properties -unsafechecks, any.vector-binary-instances ==0.2.5.1, - any.vector-builder ==0.3.7.2, + any.vector-builder ==0.3.8, any.vector-instances ==3.4, vector-instances +hashable, - any.vector-th-unbox ==0.2.1.6, - any.void ==0.7.2, + any.vector-th-unbox ==0.2.1.7, + any.void ==0.7.3, void -safe, - any.wai ==3.2.2, - any.wai-app-static ==3.1.6.3, + any.wai ==3.2.2.1, + any.wai-app-static ==3.1.7.1, wai-app-static -print, - any.wai-extra ==3.0.26, + any.wai-extra ==3.0.29, wai-extra -build-example, - any.wai-logger ==2.3.4, + any.wai-logger ==2.3.6, any.wai-websockets ==3.0.1.2, wai-websockets +example, - any.warp ==3.2.27, + any.warp ==3.3.8, warp +allow-sendfilefd -network-bytestring -warp-debug, - any.websockets ==0.12.5.3, + any.websockets ==0.12.7.0, websockets -example, any.wl-pprint-annotated ==0.1.0.1, any.word8 ==0.1.3, - any.wreq ==0.5.3.1, + any.wreq ==0.5.3.2, wreq -aws -developer +doctest -httpbin, any.x509 ==1.7.5, any.x509-store ==1.6.7, any.x509-system ==1.6.6, any.x509-validation ==1.6.11, - any.yaml ==0.11.0.0, + any.yaml ==0.11.2.0, yaml +no-examples +no-exe, - any.zlib ==0.6.2, + any.zlib ==0.6.2.1, zlib -non-blocking-ffi -pkg-config diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index bd9efb57c191a..d24456c10f42f 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -88,6 +88,7 @@ library , deepseq , dependent-map >=0.2.4 && <0.4 , dependent-sum >=0.4 && <0.5 + , exceptions -- `these >=1` is split into several different packages, but our current stack -- resolver has `these <1`; when we upgrade we just need to add an extra diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 4bb302bbfb2e6..7c44c2a25d31c 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -10,7 +10,7 @@ module Hasura.Eventing.EventTrigger import Control.Concurrent (threadDelay) import Control.Concurrent.Async (async, waitAny) import Control.Concurrent.STM.TVar -import Control.Exception (bracket_, try) +import Control.Monad.Catch (MonadMask, bracket_) import Control.Monad.STM (STM, atomically, retry) import Data.Aeson import Data.Aeson.Casing @@ -33,7 +33,6 @@ import qualified Data.Time.Clock as Time import qualified Database.PG.Query as Q import qualified Hasura.Logging as L import qualified Network.HTTP.Client as HTTP -import qualified Network.HTTP.Types as HTTP invocationVersion :: Version invocationVersion = "2" @@ -128,6 +127,7 @@ processEvent , Has (L.Logger L.Hasura) r , Has EventEngineCtx r , MonadIO m + , MonadMask m ) => LogEnvHeaders -> Q.PGPool -> IO SchemaCache -> Event -> m () processEvent logenv pool getSchemaCache e = do @@ -156,26 +156,23 @@ processEvent logenv pool getSchemaCache e = do either logQErr return finally withEventEngineCtx :: - ( MonadReader r m - , Has HTTP.Manager r - , Has (L.Logger L.Hasura) r - , MonadIO m - , MonadError HTTPErr m + ( MonadIO m + , MonadMask m ) => EventEngineCtx -> m HTTPResp -> m HTTPResp withEventEngineCtx eeCtx = bracket_ (incrementThreadCount eeCtx) (decrementThreadCount eeCtx) -incrementThreadCount :: EventEngineCtx -> IO () -incrementThreadCount (EventEngineCtx _ c maxT _ ) = atomically $ do +incrementThreadCount :: MonadIO m => EventEngineCtx -> m () +incrementThreadCount (EventEngineCtx _ c maxT _ ) = liftIO $ atomically $ do countThreads <- readTVar c if countThreads >= maxT then retry else modifyTVar' c (+1) -decrementThreadCount :: EventEngineCtx -> IO () -decrementThreadCount (EventEngineCtx _ c _ _) = atomically $ modifyTVar' c (\v -> v - 1) +decrementThreadCount :: MonadIO m => EventEngineCtx -> m () +decrementThreadCount (EventEngineCtx _ c _ _) = liftIO $ atomically $ modifyTVar' c (\v -> v - 1) createEventPayload :: RetryConf -> Event -> EventPayload createEventPayload retryConf e = EventPayload @@ -252,21 +249,22 @@ retryOrSetError e retryConf err = do mkInvocation :: EventPayload -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] -> Invocation -mkInvocation ep status reqHeaders respBody respHeaders - = let resp = if isClientError status - then mkClientErr respBody - else mkResp status respBody respHeaders - in - Invocation - (epId ep) - status - (mkWebhookReq (toJSON ep) reqHeaders invocationVersion) - resp +mkInvocation ep status reqHeaders respBody respHeaders = + let resp = if isClientError status + then mkClientErr respBody + else mkResp status respBody respHeaders + in + Invocation + (epId ep) + status + (mkWebhookReq (toJSON ep) reqHeaders invocationVersion) + resp getEventTriggerInfoFromEvent :: SchemaCache -> Event -> Maybe EventTriggerInfo -getEventTriggerInfoFromEvent sc e = let table = eTable e - tableInfo = M.lookup table $ scTables sc - in M.lookup ( tmName $ eTrigger e) =<< (_tiEventTriggerInfoMap <$> tableInfo) +getEventTriggerInfoFromEvent sc e = + let table = eTable e + tableInfo = M.lookup table $ scTables sc + in M.lookup ( tmName $ eTrigger e) =<< (_tiEventTriggerInfoMap <$> tableInfo) fetchEvents :: Q.TxE QErr [Event] fetchEvents = From 3a4c3f582c1abe8ba317bf8d2a36bf6f0162b7b5 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 29 Jan 2020 16:42:52 +0530 Subject: [PATCH 030/195] fix tolerance time units --- server/src-lib/Data/Time/Clock/Units.hs | 4 ++-- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 6 +++--- server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/server/src-lib/Data/Time/Clock/Units.hs b/server/src-lib/Data/Time/Clock/Units.hs index 3959505ffe9f6..181826c861511 100644 --- a/server/src-lib/Data/Time/Clock/Units.hs +++ b/server/src-lib/Data/Time/Clock/Units.hs @@ -51,8 +51,8 @@ type Seconds = DiffTime seconds :: DiffTime -> DiffTime seconds = id -diffTimeToSeconds :: DiffTime -> Integer -diffTimeToSeconds = (1000000000000 *) . diffTimeToPicoseconds +diffTimeToSeconds :: (Integral a) => DiffTime -> a +diffTimeToSeconds = (flip div 1000000000000) . fromInteger . diffTimeToPicoseconds newtype Days = Days { days :: DiffTime } deriving (Show, Eq, Ord) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 8ca89a4523ed2..1fa8067a019d1 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -200,10 +200,10 @@ processScheduledEvent :: -> m () processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEvent {..} = do currentTime <- liftIO getCurrentTime - if (toRational $ diffUTCTime currentTime seScheduledTime) > (toRational $ rcstTolerance stiRetryConf) + if diffUTCTime currentTime seScheduledTime > rcstTolerance stiRetryConf then processDead' else do - let timeoutSeconds = fromInteger . diffTimeToSeconds $ rcstTimeoutSec stiRetryConf + let timeoutSeconds = diffTimeToSeconds $ rcstTimeoutSec stiRetryConf httpTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) headers = map encodeHeader stiHeaders headers' = addDefaultHeaders headers @@ -258,7 +258,7 @@ retryOrMarkError se@ScheduledEvent{..} err = do markError else do currentTime <- liftIO getCurrentTime - let delay = fromMaybe (fromInteger . diffTimeToSeconds $ rcstIntervalSec seRetryConf) mRetryHeaderSeconds + let delay = fromMaybe (diffTimeToSeconds $ rcstIntervalSec seRetryConf) mRetryHeaderSeconds diff = fromIntegral delay retryTime = addUTCTime diff currentTime setRetry se retryTime diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 869ee8d31674d..c4a2515c1b80c 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -27,7 +27,7 @@ data RetryConfST { rcstNumRetries :: !Int , rcstIntervalSec :: !Seconds , rcstTimeoutSec :: !Seconds - , rcstTolerance :: !Seconds + , rcstTolerance :: !NominalDiffTime } deriving (Show, Eq, Generic) instance NFData RetryConfST @@ -41,7 +41,7 @@ defaultRetryConf = { rcstNumRetries = 1 , rcstIntervalSec = seconds 10 , rcstTimeoutSec = seconds 60 - , rcstTolerance = hours 6 + , rcstTolerance = 21600 -- 6 hours } data ScheduleType = OneOff UTCTime | Cron CronSchedule From 4784e19c294b0c9def5ee89c5d8e636f99dc9ac6 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Mon, 3 Feb 2020 13:19:20 +0530 Subject: [PATCH 031/195] fix tests (change in error msgs) after aeson update --- .../add_remote_schema_with_union_err_wrapped_type.yaml | 3 +-- server/tests-py/queries/v1/basic/query_args_as_string_err.yaml | 2 +- server/tests-py/queries/v1/basic/query_string_input_err.yaml | 2 +- .../select/limits/select_query_article_string_limit_error.yaml | 2 +- .../select_query_article_int_as_string_offset_error.yaml | 2 +- .../queries/v1/update/basic/person_error_no_where_clause.yaml | 2 +- 6 files changed, 6 insertions(+), 7 deletions(-) diff --git a/server/tests-py/queries/remote_schemas/add_remote_schema_with_union_err_wrapped_type.yaml b/server/tests-py/queries/remote_schemas/add_remote_schema_with_union_err_wrapped_type.yaml index 9ecbc5536fe2a..2a698512d8316 100644 --- a/server/tests-py/queries/remote_schemas/add_remote_schema_with_union_err_wrapped_type.yaml +++ b/server/tests-py/queries/remote_schemas/add_remote_schema_with_union_err_wrapped_type.yaml @@ -3,8 +3,7 @@ url: /v1/query status: 400 response: path: $.args - error: |- - Error in $.types[1].possibleTypes[0].name: expected Text, encountered Null + error: 'Error in $.types[1].possibleTypes[0].name: parsing Text failed, expected String, but encountered Null' code: remote-schema-error query: type: add_remote_schema diff --git a/server/tests-py/queries/v1/basic/query_args_as_string_err.yaml b/server/tests-py/queries/v1/basic/query_args_as_string_err.yaml index d7a65a9ad6d8a..c9f1a42cc485c 100644 --- a/server/tests-py/queries/v1/basic/query_args_as_string_err.yaml +++ b/server/tests-py/queries/v1/basic/query_args_as_string_err.yaml @@ -3,7 +3,7 @@ url: /v1/query status: 400 response: path: $ - error: expected Object, encountered String + error: parsing Object failed, expected Object, but encountered String code: parse-failed query: | type: count diff --git a/server/tests-py/queries/v1/basic/query_string_input_err.yaml b/server/tests-py/queries/v1/basic/query_string_input_err.yaml index 5b7a570c0e01d..5cec1b40aca63 100644 --- a/server/tests-py/queries/v1/basic/query_string_input_err.yaml +++ b/server/tests-py/queries/v1/basic/query_string_input_err.yaml @@ -3,7 +3,7 @@ url: /v1/query status: 400 response: path: $ - error: expected Object, encountered String + error: parsing Object failed, expected Object, but encountered String code: parse-failed query: | type: count diff --git a/server/tests-py/queries/v1/select/limits/select_query_article_string_limit_error.yaml b/server/tests-py/queries/v1/select/limits/select_query_article_string_limit_error.yaml index 4849f44163678..a4568ef74c201 100644 --- a/server/tests-py/queries/v1/select/limits/select_query_article_string_limit_error.yaml +++ b/server/tests-py/queries/v1/select/limits/select_query_article_string_limit_error.yaml @@ -3,7 +3,7 @@ url: /v1/query status: 400 response: code: parse-failed - error: 'expected Int, encountered String' + error: parsing Int failed, expected Number, but encountered String path: $.limit query: type: select diff --git a/server/tests-py/queries/v1/select/offset/select_query_article_int_as_string_offset_error.yaml b/server/tests-py/queries/v1/select/offset/select_query_article_int_as_string_offset_error.yaml index b627cab0f4e76..cfcbca9fb0d09 100644 --- a/server/tests-py/queries/v1/select/offset/select_query_article_int_as_string_offset_error.yaml +++ b/server/tests-py/queries/v1/select/offset/select_query_article_int_as_string_offset_error.yaml @@ -3,7 +3,7 @@ url: /v1/query status: 400 response: code: parse-failed - error: expected Int, encountered String + error: parsing Int failed, expected Number, but encountered String path: $.offset query: type: select diff --git a/server/tests-py/queries/v1/update/basic/person_error_no_where_clause.yaml b/server/tests-py/queries/v1/update/basic/person_error_no_where_clause.yaml index 0417ac69b78a6..d74781eb244f1 100644 --- a/server/tests-py/queries/v1/update/basic/person_error_no_where_clause.yaml +++ b/server/tests-py/queries/v1/update/basic/person_error_no_where_clause.yaml @@ -3,7 +3,7 @@ url: /v1/query status: 400 response: path: $ - error: key "where" not present + error: key "where" not found code: parse-failed query: type: update From f4535821f104b56913d9a71d0e13cdcf2df377c4 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 5 Feb 2020 16:29:42 +0530 Subject: [PATCH 032/195] change default retry value --- server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index c4a2515c1b80c..1d5b2eb3be10f 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -38,7 +38,7 @@ $(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RetryConfST) defaultRetryConf :: RetryConfST defaultRetryConf = RetryConfST - { rcstNumRetries = 1 + { rcstNumRetries = 0 , rcstIntervalSec = seconds 10 , rcstTimeoutSec = seconds 60 , rcstTolerance = 21600 -- 6 hours From e4135fe33aa118f09b992588cc79f2802abfff52 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 5 Feb 2020 16:29:59 +0530 Subject: [PATCH 033/195] add api reference docs --- .../schema-metadata-api/event-triggers.rst | 28 ++ .../schema-metadata-api/index.rst | 21 ++ .../scheduled-triggers.rst | 309 ++++++++++++++++++ 3 files changed, 358 insertions(+) create mode 100644 docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/event-triggers.rst b/docs/graphql/manual/api-reference/schema-metadata-api/event-triggers.rst index 64c1166ce511f..55defa0ec15a2 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/event-triggers.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/event-triggers.rst @@ -96,6 +96,10 @@ Args syntax - false - [ HeaderFromValue_ | HeaderFromEnv_ ] - List of headers to be sent with the webhook + * - retry_conf + - false + - RetryConf_ + - Retry configuration if event delivery fails * - replace - false - Boolean @@ -263,3 +267,27 @@ EventTriggerColumns "*" | [:ref:`PGColumn`] +.. _RetryConf: + +RetryConf +&&&&&&&&& + +.. list-table:: + :header-rows: 1 + + * - Key + - required + - Schema + - Description + * - num_retries + - false + - Integer + - Number of times to retry delivery. Default: 0 + * - interval_sec + - false + - Integer + - Number of seconds to wait between each retry. Default: 10 + * - timeout_sec + - false + - Integer + - Number of seconds to wait for response before timing out. Default: 60 diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/index.rst b/docs/graphql/manual/api-reference/schema-metadata-api/index.rst index 29be947141bb4..6fa7c482a4bad 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/index.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/index.rst @@ -221,6 +221,26 @@ The various types of queries are listed in the following table: - 1 - Delete an existing event trigger + * - :ref:`create_scheduled_trigger` + - :ref:`create_scheduled_trigger_args ` + - 1 + - Create a scheduled trigger + + * - :ref:`update_scheduled_trigger` + - :ref:`update_scheduled_trigger_args ` + - 1 + - Update an existing scheduled trigger + + * - :ref:`delete_scheduled_trigger` + - :ref:`delete_scheduled_trigger_args ` + - 1 + - Delete an existing scheduled trigger + + * - :ref:`cancel_scheduled_event` + - :ref:`cancel_scheduled_event_args ` + - 1 + - Cancel a particular run of a scheduled trigger + * - :ref:`add_remote_schema` - :ref:`add_remote_schema_args ` - 1 @@ -389,6 +409,7 @@ See :doc:`../../deployment/graphql-engine-flags/reference` for info on setting t Permissions Computed Fields Event Triggers + Scheduled Triggers Remote Schemas Query Collections Manage Metadata diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst new file mode 100644 index 0000000000000..259b6724b6b37 --- /dev/null +++ b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst @@ -0,0 +1,309 @@ +.. meta:: + :description: Manage scheduled triggers with the Hasura schema/metadata API + :keywords: hasura, docs, schema/metadata API, API reference, scheduled trigger + +Schema/Metadata API Reference: Scheduled Triggers +================================================= + +.. contents:: Table of contents + :backlinks: none + :depth: 1 + :local: + +Scheduled triggers are used to invoke webhooks based on a timestamp or cron. + +.. _create_scheduled_trigger: + +create_scheduled_trigger +------------------------ + +``create_scheduled_trigger`` is used to create a new scheduled trigger. + +.. code-block:: http + + POST /v1/query HTTP/1.1 + Content-Type: application/json + X-Hasura-Role: admin + + { + "type" : "create_scheduled_trigger", + "args" : { + "name": "sample_cron", + "webhook": "https://httpbin.org/post", + "schedule": { + "type": "Cron", + "value": "* * * * *" + }, + "payload": { + "key1": "value1", + "key2": "value2" + } + } + } + +.. _create_scheduled_trigger_syntax: + +Args syntax +^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Key + - Required + - Schema + - Description + * - name + - true + - TriggerName_ + - Name of the scheduled trigger + * - webhook + - false + - String + - Full url of webhook + * - webhook_from_env + - false + - String + - Environment variable which has the full url of webhook + * - schedule + - true + - ScheduleConf_ + - Schedule configuration + * - payload + - false + - JSON + - Any JSON payload which will be sent with the scheduled event + * - headers + - false + - [ HeaderFromValue_ | HeaderFromEnv_ ] + - List of headers to be sent with the webhook + * - retry_conf + - false + - RetryConfST_ + - Retry configuration if scheduled event delivery fails + + +.. _update_scheduled_trigger: + +update_scheduled_trigger +------------------------ + +``update_scheduled_trigger`` is used to update an existing scheduled trigger. + +.. _update_scheduled_trigger_syntax: + +Args syntax +^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Key + - Required + - Schema + - Description + * - name + - true + - TriggerName_ + - Name of the scheduled trigger + * - webhook + - false + - String + - Full url of webhook + * - webhook_from_env + - false + - String + - Environment variable which has the full url of webhook + * - schedule + - true + - ScheduleConf_ + - Schedule configuration + * - payload + - false + - JSON + - Any JSON payload which will be sent with the scheduled event + * - headers + - false + - [ HeaderFromValue_ | HeaderFromEnv_ ] + - List of headers to be sent with the webhook + * - retry_conf + - false + - RetryConfST_ + - Retry configuration if scheduled event delivery fails + +.. _delete_scheduled_trigger: + +delete_scheduled_trigger +------------------------ + +``delete_scheduled_trigger`` is used to delete an existing scheduled trigger. + +.. code-block:: http + + POST /v1/query HTTP/1.1 + Content-Type: application/json + X-Hasura-Role: admin + + { + "type" : "delete_scheduled_trigger", + "args" : { + "name": "sample_cron" + } + } + +.. _delete_scheduled_trigger_syntax: + +Args syntax +^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Key + - Required + - Schema + - Description + * - name + - true + - TriggerName_ + - Name of the scheduled trigger + +.. _cancel_scheduled_event: + +cancel_scheduled_event +---------------------- + +``cancel_scheduled_event`` is used to cancel a particular run of a scheduled trigger. + +.. code-block:: http + + POST /v1/query HTTP/1.1 + Content-Type: application/json + X-Hasura-Role: admin + + { + "type" : "cancel_scheduled_event", + "args" : { + "event_id": "237b604c-67f1-4aa8-8453-36855cfebfc4" + } + } + +.. _cancel_scheduled_event_syntax: + +Args syntax +^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Key + - Required + - Schema + - Description + * - event_id + - true + - UUID + - Id of the scheduled event + +.. _TriggerName: + +TriggerName +&&&&&&&&&&& + +.. parsed-literal:: + + String + +.. _ScheduleConf: + +ScheduleConf +&&&&&&&&&&&& + +.. list-table:: + :header-rows: 1 + + * - Key + - required + - Schema + - Description + * - type + - true + - OneOff | Cron + - Type of scheduled trigger + * - value + - true + - String + - Timestamp in UTC or cron expression + +.. _HeaderFromValue: + +HeaderFromValue +&&&&&&&&&&&&&&& + +.. list-table:: + :header-rows: 1 + + * - Key + - required + - Schema + - Description + * - name + - true + - String + - Name of the header + * - value + - true + - String + - Value of the header + +.. _HeaderFromEnv: + +HeaderFromEnv +&&&&&&&&&&&&& + +.. list-table:: + :header-rows: 1 + + * - Key + - required + - Schema + - Description + * - name + - true + - String + - Name of the header + * - value_from_env + - true + - String + - Name of the environment variable which holds the value of the header + +.. _RetryConfST: + +RetryConfST +&&&&&&&&&&& + +.. list-table:: + :header-rows: 1 + + * - Key + - required + - Schema + - Description + * - num_retries + - false + - Integer + - Number of times to retry delivery. Default: 0 + * - interval_sec + - false + - Integer + - Number of seconds to wait between each retry. Default: 10 + * - timeout_sec + - false + - Integer + - Number of seconds to wait for response before timing out. Default: 60 + * - tolerance + - false + - Integer + - Number of minutes between scheduled time and actual delivery time that is acceptable. If the time difference is more than this, then the event is dropped. Default: 360 (6 hours) + + From a8477110067aa16cd699d562b8111ad5c034538d Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Mon, 10 Feb 2020 10:46:58 +0530 Subject: [PATCH 034/195] remove type family fanciness --- .../Hasura/Eventing/ScheduledTrigger.hs | 100 ++++++++---------- 1 file changed, 45 insertions(+), 55 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 1fa8067a019d1..d3503c77914d9 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -54,45 +54,35 @@ scheduledEventsTable = (TableName $ T.pack "hdb_scheduled_events") data ScheduledEventSeed - = ScheduledEventSeed - { sesName :: !TriggerName - , sesScheduledTime :: !UTCTime - } deriving (Show, Eq) + = ScheduledEventSeed + { sesName :: !TriggerName + , sesScheduledTime :: !UTCTime + } deriving (Show, Eq) -- | ScheduledEvents can be "partial" or "full" -- Partial represents the event as present in db -- Full represents the partial event combined with schema cache configuration elements -data SE_P = SE_PARTIAL | SE_FULL -type family Param (p :: k) x +data ScheduledEventPartial + = ScheduledEventPartial + { sepId :: !Text + , sepName :: !TriggerName + , sepScheduledTime :: !UTCTime + , sepTries :: !Int + } deriving (Show, Eq) -data ScheduledEvent (p :: SE_P) - = ScheduledEvent - { seId :: !Text - , seName :: !TriggerName - , seScheduledTime :: !UTCTime - , seTries :: !Int - , seWebhook :: !(Param p T.Text) - , sePayload :: !(Param p J.Value) - , seRetryConf :: !(Param p RetryConfST) - } +data ScheduledEventFull + = ScheduledEventFull + { sefId :: !Text + , sefName :: !TriggerName + , sefScheduledTime :: !UTCTime + , sefTries :: !Int + , sefWebhook :: !T.Text + , sefPayload :: !J.Value + , sefRetryConf :: !RetryConfST + } deriving (Show, Eq) -deriving instance Show (ScheduledEvent 'SE_PARTIAL) -deriving instance Show (ScheduledEvent 'SE_FULL) - -type instance Param 'SE_PARTIAL a = () -type instance Param 'SE_FULL a = a - --- empty splice to bring all the above definitions in scope -$(pure []) - -instance ( J.ToJSON (Param p T.Text) - , J.ToJSON (Param p J.Value) - , J.ToJSON (Param p Int) - , J.ToJSON (Param p RetryConfST) - ) => - J.ToJSON (ScheduledEvent p) where - toJSON = $(J.mkToJSON (J.aesonDrop 2 J.snakeCase) {J.omitNothingFields = True} ''ScheduledEvent) +$(J.deriveToJSON (J.aesonDrop 3 J.snakeCase) {J.omitNothingFields = True} ''ScheduledEventFull) runScheduledEventsGenerator :: L.Logger L.Hasura @@ -173,7 +163,7 @@ processScheduledQueue logger logEnv httpMgr pgpool getSC = case scheduledEventsE of Right partialEvents -> sequence_ $ - flip map partialEvents $ \(ScheduledEvent id' name st tries _ _ _) -> do + flip map partialEvents $ \(ScheduledEventPartial id' name st tries)-> do let sti' = Map.lookup name scheduledTriggersInfo case sti' of Nothing -> traceM "ERROR: couldn't find scheduled trigger in cache" @@ -181,7 +171,7 @@ processScheduledQueue logger logEnv httpMgr pgpool getSC = let webhook = wciCachedValue $ stiWebhookInfo sti payload = fromMaybe J.Null $ stiPayload sti retryConf = stiRetryConf sti - se = ScheduledEvent id' name st tries webhook payload retryConf + se = ScheduledEventFull id' name st tries webhook payload retryConf runReaderT (processScheduledEvent logEnv pgpool sti se) (logger, httpMgr) Left err -> traceShowM err threadDelay oneMinute @@ -196,21 +186,21 @@ processScheduledEvent :: => LogEnvHeaders -> Q.PGPool -> ScheduledTriggerInfo - -> ScheduledEvent 'SE_FULL + -> ScheduledEventFull -> m () -processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEvent {..} = do +processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEventFull {..} = do currentTime <- liftIO getCurrentTime - if diffUTCTime currentTime seScheduledTime > rcstTolerance stiRetryConf + if diffUTCTime currentTime sefScheduledTime > rcstTolerance stiRetryConf then processDead' else do let timeoutSeconds = diffTimeToSeconds $ rcstTimeoutSec stiRetryConf httpTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) headers = map encodeHeader stiHeaders headers' = addDefaultHeaders headers - extraLogCtx = ExtraLogContext seId + extraLogCtx = ExtraLogContext sefId res <- runExceptT $ - tryWebhook headers' httpTimeout sePayload (T.unpack seWebhook) (Just extraLogCtx) + tryWebhook headers' httpTimeout sefPayload (T.unpack sefWebhook) (Just extraLogCtx) let decodedHeaders = map (decodeHeader logEnv stiHeaders) headers' finally <- either (processError pgpool se decodedHeaders) @@ -223,7 +213,7 @@ processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEvent Left err -> logQErr err Right _ -> pure () -processError :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> [HeaderConf] -> HTTPErr -> m (Either QErr ()) +processError :: (MonadIO m) => Q.PGPool -> ScheduledEventFull -> [HeaderConf] -> HTTPErr -> m (Either QErr ()) processError pgpool se decodedHeaders err = do let invocation = case err of HClient excp -> do @@ -246,11 +236,11 @@ processError pgpool se decodedHeaders err = do insertInvocation invocation retryOrMarkError se err -retryOrMarkError :: ScheduledEvent 'SE_FULL -> HTTPErr -> Q.TxE QErr () -retryOrMarkError se@ScheduledEvent{..} err = do +retryOrMarkError :: ScheduledEventFull -> HTTPErr -> Q.TxE QErr () +retryOrMarkError se@ScheduledEventFull {..} err = do let mRetryHeader = getRetryAfterHeaderFromHTTPErr err mRetryHeaderSeconds = join $ parseRetryHeaderValue <$> mRetryHeader - triesExhausted = seTries >= rcstNumRetries seRetryConf + triesExhausted = sefTries >= rcstNumRetries sefRetryConf noRetryHeader = isNothing mRetryHeaderSeconds -- current_try = tries + 1 , allowed_total_tries = rcNumRetries retryConf + 1 if triesExhausted && noRetryHeader @@ -258,7 +248,7 @@ retryOrMarkError se@ScheduledEvent{..} err = do markError else do currentTime <- liftIO getCurrentTime - let delay = fromMaybe (diffTimeToSeconds $ rcstIntervalSec seRetryConf) mRetryHeaderSeconds + let delay = fromMaybe (diffTimeToSeconds $ rcstIntervalSec sefRetryConf) mRetryHeaderSeconds diff = fromIntegral delay retryTime = addUTCTime diff currentTime setRetry se retryTime @@ -270,9 +260,9 @@ retryOrMarkError se@ScheduledEvent{..} err = do UPDATE hdb_catalog.hdb_scheduled_events SET error = 't', locked = 'f' WHERE id = $1 - |] (Identity seId) True + |] (Identity sefId) True -processSuccess :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> [HeaderConf] -> HTTPResp -> m (Either QErr ()) +processSuccess :: (MonadIO m) => Q.PGPool -> ScheduledEventFull -> [HeaderConf] -> HTTPResp -> m (Either QErr ()) processSuccess pgpool se decodedHeaders resp = do let respBody = hrsBody resp respHeaders = hrsHeaders resp @@ -291,9 +281,9 @@ processSuccess pgpool se decodedHeaders resp = do UPDATE hdb_catalog.hdb_scheduled_events SET delivered = 't', locked = 'f' WHERE id = $1 - |] (Identity $ seId se) True + |] (Identity $ sefId se) True -processDead :: (MonadIO m) => Q.PGPool -> ScheduledEvent 'SE_FULL -> m (Either QErr ()) +processDead :: (MonadIO m) => Q.PGPool -> ScheduledEventFull -> m (Either QErr ()) processDead pgpool se = liftIO $ runExceptT $ Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) markDead @@ -305,18 +295,18 @@ processDead pgpool se = UPDATE hdb_catalog.hdb_scheduled_events SET dead = 't', locked = 'f' WHERE id = $1 - |] (Identity $ seId se) False + |] (Identity $ sefId se) False -setRetry :: ScheduledEvent 'SE_FULL -> UTCTime -> Q.TxE QErr () +setRetry :: ScheduledEventFull -> UTCTime -> Q.TxE QErr () setRetry se time = Q.unitQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_events SET next_retry_at = $1, locked = 'f' WHERE id = $2 - |] (time, seId se) True + |] (time, sefId se) True mkInvocation - :: ScheduledEvent 'SE_FULL -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] + :: ScheduledEventFull -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] -> Invocation mkInvocation se status reqHeaders respBody respHeaders = let resp = if isClientError status @@ -324,7 +314,7 @@ mkInvocation se status reqHeaders respBody respHeaders else mkResp status respBody respHeaders in Invocation - (seId se) + (sefId se) status (mkWebhookReq (J.toJSON se) reqHeaders invocationVersion) resp @@ -345,7 +335,7 @@ insertInvocation invo = do WHERE id = $1 |] (Identity $ iEventId invo) True -getScheduledEvents :: Q.TxE QErr [ScheduledEvent 'SE_PARTIAL] +getScheduledEvents :: Q.TxE QErr [ScheduledEventPartial] getScheduledEvents = do partialSchedules <- map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_events @@ -366,4 +356,4 @@ getScheduledEvents = do RETURNING id, name, scheduled_time, tries |] () True pure $ partialSchedules - where uncurryEvent (i, n, st, tries) = ScheduledEvent i n st tries () () () + where uncurryEvent (i, n, st, tries) = ScheduledEventPartial i n st tries From f252082c58b104a6a0c21ae202595e3b00e4ea10 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Mon, 10 Feb 2020 11:17:43 +0530 Subject: [PATCH 035/195] remove traces --- .../src-lib/Hasura/Eventing/EventTrigger.hs | 15 +++++++- server/src-lib/Hasura/Eventing/HTTP.hs | 15 -------- .../Hasura/Eventing/ScheduledTrigger.hs | 34 +++++++++++-------- server/src-lib/Hasura/Logging.hs | 6 ++++ 4 files changed, 39 insertions(+), 31 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 7c44c2a25d31c..4beabc34ec345 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -37,6 +37,19 @@ import qualified Network.HTTP.Client as HTTP invocationVersion :: Version invocationVersion = "2" +newtype EventTriggerInternalErr + = EventTriggerInternalErr QErr + deriving (Show, Eq) + +instance L.ToEngineLog EventTriggerInternalErr L.Hasura where + toEngineLog (EventTriggerInternalErr qerr) = + (L.LevelError, L.eventTriggerLogType, toJSON qerr) + +logQErr :: ( MonadReader r m, Has (L.Logger L.Hasura) r, MonadIO m) => QErr -> m () +logQErr err = do + logger :: L.Logger L.Hasura <- asks getter + L.unLogger logger $ EventTriggerInternalErr err + data Event = Event { eId :: EventId @@ -107,7 +120,7 @@ pushEvents logger pool eectx = forever $ do let EventEngineCtx q _ _ fetchI = eectx eventsOrError <- runExceptT $ Q.runTx pool (Q.RepeatableRead, Just Q.ReadWrite) fetchEvents case eventsOrError of - Left err -> L.unLogger logger $ EventInternalErr err + Left err -> L.unLogger logger $ EventTriggerInternalErr err Right events -> atomically $ mapM_ (TQ.writeTQueue q) events threadDelay (fetchI * 1000) diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index 48aab5dba2578..a00c427849edc 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -17,9 +17,7 @@ module Hasura.Eventing.HTTP , mkClientErr , TriggerMetadata(..) , DeliveryInfo(..) - , logQErr , logHTTPErr - , EventInternalErr(..) , mkWebhookReq , mkResp , toInt64 @@ -54,7 +52,6 @@ import Data.Int (Int64) import Hasura.Logging import Hasura.Prelude import Hasura.RQL.DDL.Headers -import Hasura.RQL.Types.Error (QErr) import Hasura.RQL.Types.EventTrigger type LogEnvHeaders = Bool @@ -248,13 +245,6 @@ tryWebhook headers timeout payload webhook extraLogCtx = do eitherResp <- runHTTP manager req extraLogCtx onLeft eitherResp throwError -newtype EventInternalErr - = EventInternalErr QErr - deriving (Show, Eq) - -instance L.ToEngineLog EventInternalErr L.Hasura where - toEngineLog (EventInternalErr qerr) = (L.LevelError, L.eventTriggerLogType, J.toJSON qerr) - data TriggerMetadata = TriggerMetadata { tmName :: TriggerName } deriving (Show, Eq) @@ -289,11 +279,6 @@ mkMaybe :: [a] -> Maybe [a] mkMaybe [] = Nothing mkMaybe x = Just x -logQErr :: ( MonadReader r m, Has (L.Logger L.Hasura) r, MonadIO m) => QErr -> m () -logQErr err = do - logger :: L.Logger L.Hasura <- asks getter - L.unLogger logger $ EventInternalErr err - logHTTPErr :: ( MonadReader r m , Has (L.Logger L.Hasura) r diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index d3503c77914d9..da47a6d4c9174 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -33,8 +33,6 @@ import qualified Network.HTTP.Client as HTTP import qualified Text.Builder as TB (run) import qualified Data.HashMap.Strict as Map -import Debug.Trace - invocationVersion :: Version invocationVersion = "1" @@ -44,8 +42,18 @@ oneSecond = 1000000 oneMinute :: Int oneMinute = 60 * oneSecond -oneHour :: Int -oneHour = 60 * oneMinute +newtype ScheduledTriggerInternalErr + = ScheduledTriggerInternalErr QErr + deriving (Show, Eq) + +instance L.ToEngineLog ScheduledTriggerInternalErr L.Hasura where + toEngineLog (ScheduledTriggerInternalErr qerr) = + (L.LevelError, L.scheduledTriggerLogType, J.toJSON qerr) + +logQErr :: ( MonadReader r m, Has (L.Logger L.Hasura) r, MonadIO m) => QErr -> m () +logQErr err = do + logger :: L.Logger L.Hasura <- asks getter + L.unLogger logger $ ScheduledTriggerInternalErr err scheduledEventsTable :: QualifiedTable scheduledEventsTable = @@ -59,10 +67,6 @@ data ScheduledEventSeed , sesScheduledTime :: !UTCTime } deriving (Show, Eq) --- | ScheduledEvents can be "partial" or "full" --- Partial represents the event as present in db --- Full represents the partial event combined with schema cache configuration elements - data ScheduledEventPartial = ScheduledEventPartial { sepId :: !Text @@ -91,7 +95,6 @@ runScheduledEventsGenerator :: -> IO () runScheduledEventsGenerator logger pgpool getSC = do forever $ do - traceM "entering scheduled events generator" sc <- getSC let scheduledTriggers = Map.elems $ scScheduledTriggers sc runExceptT @@ -101,8 +104,8 @@ runScheduledEventsGenerator logger pgpool getSC = do (insertScheduledEventsFor scheduledTriggers) ) >>= \case Right _ -> pure () Left err -> - L.unLogger logger $ EventInternalErr $ err500 Unexpected (T.pack $ show err) - threadDelay oneHour + L.unLogger logger $ ScheduledTriggerInternalErr $ err500 Unexpected (T.pack $ show err) + threadDelay oneMinute insertScheduledEventsFor :: [ScheduledTriggerInfo] -> Q.TxE QErr () insertScheduledEventsFor scheduledTriggers = do @@ -132,7 +135,7 @@ generateScheduledEventsFrom time ScheduledTriggerInfo{..} = Cron cron -> generateScheduleTimesBetween time - (addUTCTime nominalDay time) + (addUTCTime nominalDay time) -- by default, generate events for one day cron in map (ScheduledEventSeed stiName) events @@ -155,7 +158,6 @@ processScheduledQueue -> IO () processScheduledQueue logger logEnv httpMgr pgpool getSC = forever $ do - traceM "entering processor queue" scheduledTriggersInfo <- scScheduledTriggers <$> getSC scheduledEventsE <- runExceptT $ @@ -166,14 +168,16 @@ processScheduledQueue logger logEnv httpMgr pgpool getSC = flip map partialEvents $ \(ScheduledEventPartial id' name st tries)-> do let sti' = Map.lookup name scheduledTriggersInfo case sti' of - Nothing -> traceM "ERROR: couldn't find scheduled trigger in cache" + Nothing -> L.unLogger logger $ ScheduledTriggerInternalErr $ + err500 Unexpected "could not find scheduled trigger in cache" Just sti -> do let webhook = wciCachedValue $ stiWebhookInfo sti payload = fromMaybe J.Null $ stiPayload sti retryConf = stiRetryConf sti se = ScheduledEventFull id' name st tries webhook payload retryConf runReaderT (processScheduledEvent logEnv pgpool sti se) (logger, httpMgr) - Left err -> traceShowM err + Left err -> L.unLogger logger $ ScheduledTriggerInternalErr $ + err500 Unexpected $ "could not fetch scheduled events: " <> (T.pack $ show err) threadDelay oneMinute processScheduledEvent :: diff --git a/server/src-lib/Hasura/Logging.hs b/server/src-lib/Hasura/Logging.hs index 8058b69f96002..88777dc5a4bc7 100644 --- a/server/src-lib/Hasura/Logging.hs +++ b/server/src-lib/Hasura/Logging.hs @@ -19,6 +19,7 @@ module Hasura.Logging , mkLoggerCtx , cleanLoggerCtx , eventTriggerLogType + , scheduledTriggerLogType , EnabledLogTypes (..) , defaultEnabledEngineLogTypes , isEngineLogTypeEnabled @@ -94,6 +95,7 @@ data InternalLogTypes = ILTUnstructured -- ^ mostly for debug logs - see @debugT@, @debugBS@ and @debugLBS@ functions | ILTEventTrigger + | ILTScheduledTrigger | ILTWsServer -- ^ internal logs for the websocket server | ILTPgClient @@ -110,6 +112,7 @@ instance J.ToJSON InternalLogTypes where toJSON = \case ILTUnstructured -> "unstructured" ILTEventTrigger -> "event-trigger" + ILTScheduledTrigger -> "scheduled-trigger" ILTWsServer -> "ws-server" ILTPgClient -> "pg-client" ILTMetadata -> "metadata" @@ -266,3 +269,6 @@ mkLogger (LoggerCtx loggerSet serverLogLevel timeGetter enabledLogTypes) = Logge eventTriggerLogType :: EngineLogType Hasura eventTriggerLogType = ELTInternal ILTEventTrigger + +scheduledTriggerLogType :: EngineLogType Hasura +scheduledTriggerLogType = ELTInternal ILTScheduledTrigger From a1e453b246afa4600b821e5f257bd5222c03a1b2 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 11 Feb 2020 13:26:24 +0530 Subject: [PATCH 036/195] set lower bound for cron package --- server/cabal.project.freeze | 30 +++++++++++++------------- server/graphql-engine.cabal | 2 +- server/src-lib/Network/URI/Extended.hs | 5 +---- 3 files changed, 17 insertions(+), 20 deletions(-) diff --git a/server/cabal.project.freeze b/server/cabal.project.freeze index 1dd007b8adb0d..37e82168fc848 100644 --- a/server/cabal.project.freeze +++ b/server/cabal.project.freeze @@ -16,7 +16,7 @@ constraints: any.Cabal ==2.4.0.1, any.aeson ==1.4.6.0, aeson -bytestring-builder -cffi -developer -fast, any.aeson-casing ==0.2.0.0, - any.ansi-terminal ==0.10.2, + any.ansi-terminal ==0.10.3, ansi-terminal -example, any.ansi-wl-pprint ==0.6.9, ansi-wl-pprint -example, @@ -41,7 +41,7 @@ constraints: any.Cabal ==2.4.0.1, any.base16-bytestring ==0.1.1.6, any.base64-bytestring ==1.0.0.3, any.basement ==0.0.11, - any.bifunctors ==5.5.6, + any.bifunctors ==5.5.7, bifunctors +semigroups +tagged, any.binary ==0.8.6.0, any.binary-orphans ==1.0.1, @@ -76,7 +76,7 @@ constraints: any.Cabal ==2.4.0.1, any.concurrent-output ==1.10.11, any.conduit ==1.3.1.2, any.connection ==0.3.1, - any.constraints ==0.11.2, + any.constraints ==0.12, any.containers ==0.6.0.1, any.contravariant ==1.5.2, contravariant +semigroups +statevar +tagged, @@ -143,7 +143,7 @@ constraints: any.Cabal ==2.4.0.1, hashable -examples +integer-gmp +sse2 -sse41, any.hashtables ==1.2.3.4, hashtables -bounds-checking -debug -detailed-profiling -portable -sse42 +unsafe-tricks, - any.haskell-lexer ==1.0.2, + any.haskell-lexer ==1.1, any.hasql ==1.4.1, any.hasql-pool ==0.5.1, any.hasql-transaction ==1.0.0.1, @@ -178,14 +178,14 @@ constraints: any.Cabal ==2.4.0.1, any.js-flot ==0.8.3, any.js-jquery ==3.3.1, any.kan-extensions ==5.2, - any.keys ==3.12.2, + any.keys ==3.12.3, any.lens ==4.17.1, lens -benchmark-uniplate -dump-splices +inlining -j -old-inline-pragmas -safe +test-doctests +test-hunit +test-properties +test-templates +trustworthy, any.lens-aeson ==1.1, lens-aeson +test-doctests, - any.libyaml ==0.1.1.1, + any.libyaml ==0.1.2, libyaml -no-unicode -system-libyaml, - any.lifted-async ==0.10.0.4, + any.lifted-async ==0.10.0.5, any.lifted-base ==0.2.3.12, any.list-t ==1.0.4, any.loch-th ==0.2.2, @@ -207,14 +207,14 @@ constraints: any.Cabal ==2.4.0.1, any.mtl-compat ==0.2.2, mtl-compat -two-point-one -two-point-two, any.mustache ==2.3.1, - any.mwc-probability ==2.1.0, + any.mwc-probability ==2.2.0, any.mwc-random ==0.14.0.0, any.natural-transformation ==0.4, any.network ==3.1.1.1, - any.network-byte-order ==0.1.3.0, + any.network-byte-order ==0.1.4.0, any.network-info ==0.2.0.10, any.network-ip ==0.3.0.3, - any.network-uri ==2.6.1.0, + any.network-uri ==2.6.2.0, any.old-locale ==1.0.0.7, any.old-time ==1.1.0.3, any.optics-core ==0.2, @@ -232,8 +232,8 @@ constraints: any.Cabal ==2.4.0.1, any.postgresql-libpq ==0.9.4.2, postgresql-libpq -use-pkg-config, any.pretty ==1.1.3.6, - any.pretty-show ==1.9.5, - any.prettyprinter ==1.6.0, + any.pretty-show ==1.10, + any.prettyprinter ==1.6.1, prettyprinter -buildreadme, any.primitive ==0.7.0.0, any.primitive-extras ==0.8, @@ -294,7 +294,7 @@ constraints: any.Cabal ==2.4.0.1, any.text-short ==0.1.3, text-short -asserts, any.tf-random ==0.5, - any.th-abstraction ==0.3.1.0, + any.th-abstraction ==0.3.2.0, any.th-lift ==0.8.1, any.th-lift-instances ==0.1.14, any.these ==0.7.6, @@ -327,7 +327,7 @@ constraints: any.Cabal ==2.4.0.1, any.uuid-types ==1.0.3, any.vault ==0.3.1.3, vault +useghc, - any.vector ==0.12.0.3, + any.vector ==0.12.1.2, vector +boundschecks -internalchecks -unsafechecks -wall, any.vector-algorithms ==0.8.0.3, vector-algorithms +bench +boundschecks -internalchecks -llvm +properties -unsafechecks, @@ -346,7 +346,7 @@ constraints: any.Cabal ==2.4.0.1, any.wai-logger ==2.3.6, any.wai-websockets ==3.0.1.2, wai-websockets +example, - any.warp ==3.3.8, + any.warp ==3.3.9, warp +allow-sendfilefd -network-bytestring -warp-debug, any.websockets ==0.12.7.0, websockets -example, diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index d24456c10f42f..9ddb1d14738ed 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -194,7 +194,7 @@ library , generic-arbitrary -- scheduled triggers - , cron + , cron >= 0.6.2 exposed-modules: Control.Arrow.Extended , Control.Arrow.Trans diff --git a/server/src-lib/Network/URI/Extended.hs b/server/src-lib/Network/URI/Extended.hs index 798e5794ac16c..5202691143523 100644 --- a/server/src-lib/Network/URI/Extended.hs +++ b/server/src-lib/Network/URI/Extended.hs @@ -9,10 +9,9 @@ import Data.Aeson import Data.Aeson.Types import Data.Hashable import Hasura.Prelude -import Language.Haskell.TH.Syntax (Lift) import Network.URI -import qualified Data.Text as T +import qualified Data.Text as T instance {-# INCOHERENT #-} FromJSON URI where parseJSON (String uri) = do @@ -26,7 +25,5 @@ instance {-# INCOHERENT #-} ToJSON URI where instance {-# INCOHERENT #-} ToJSONKey URI where toJSONKey = toJSONKeyText (T.pack . show) -instance Lift URI - instance Hashable URI where hashWithSalt i = hashWithSalt i . (T.pack . show) From edc35be867578aced639a33398c5581614322e3f Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 11 Feb 2020 13:36:28 +0530 Subject: [PATCH 037/195] use round since 'DiffTime' works in seconds --- server/src-lib/Data/Time/Clock/Units.hs | 4 ---- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 5 ++--- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/server/src-lib/Data/Time/Clock/Units.hs b/server/src-lib/Data/Time/Clock/Units.hs index 181826c861511..5e81ff656d25c 100644 --- a/server/src-lib/Data/Time/Clock/Units.hs +++ b/server/src-lib/Data/Time/Clock/Units.hs @@ -37,7 +37,6 @@ module Data.Time.Clock.Units , Milliseconds(..) , Microseconds(..) , Nanoseconds(..) - , diffTimeToSeconds ) where import Prelude @@ -51,9 +50,6 @@ type Seconds = DiffTime seconds :: DiffTime -> DiffTime seconds = id -diffTimeToSeconds :: (Integral a) => DiffTime -> a -diffTimeToSeconds = (flip div 1000000000000) . fromInteger . diffTimeToPicoseconds - newtype Days = Days { days :: DiffTime } deriving (Show, Eq, Ord) deriving (Num, Fractional, Real, RealFrac) via (TimeUnit (SecondsP 86400)) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index da47a6d4c9174..64fab351a14f2 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -10,7 +10,6 @@ module Hasura.Eventing.ScheduledTrigger import Control.Concurrent (threadDelay) import Data.Has import Data.Time.Clock -import Data.Time.Clock.Units import Hasura.Eventing.HTTP import Hasura.Prelude import Hasura.RQL.DDL.Headers @@ -197,7 +196,7 @@ processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEventF if diffUTCTime currentTime sefScheduledTime > rcstTolerance stiRetryConf then processDead' else do - let timeoutSeconds = diffTimeToSeconds $ rcstTimeoutSec stiRetryConf + let timeoutSeconds = round $ rcstTimeoutSec stiRetryConf httpTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) headers = map encodeHeader stiHeaders headers' = addDefaultHeaders headers @@ -252,7 +251,7 @@ retryOrMarkError se@ScheduledEventFull {..} err = do markError else do currentTime <- liftIO getCurrentTime - let delay = fromMaybe (diffTimeToSeconds $ rcstIntervalSec sefRetryConf) mRetryHeaderSeconds + let delay = fromMaybe (round $ rcstIntervalSec sefRetryConf) mRetryHeaderSeconds diff = fromIntegral delay retryTime = addUTCTime diff currentTime setRetry se retryTime From c1aa809fb77401390f7d14be05b0ec71bd961133 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 11 Feb 2020 13:44:59 +0530 Subject: [PATCH 038/195] use unfoldr --- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 7 +++---- server/src-lib/Hasura/Incremental/Internal/Dependency.hs | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 64fab351a14f2..d49ae682da870 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -7,8 +7,10 @@ module Hasura.Eventing.ScheduledTrigger , runScheduledEventsGenerator ) where +import Control.Arrow.Extended (dup) import Control.Concurrent (threadDelay) import Data.Has +import Data.List (unfoldr) import Data.Time.Clock import Hasura.Eventing.HTTP import Hasura.Prelude @@ -142,10 +144,7 @@ generateScheduledEventsFrom time ScheduledTriggerInfo{..} = generateScheduleTimesBetween :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] generateScheduleTimesBetween from till cron = takeWhile (<= till) $ go from where - go init = - case nextMatch cron init of - Nothing -> [] - Just next -> next : (go next) + go = unfoldr (fmap dup . nextMatch cron) processScheduledQueue :: HasVersion diff --git a/server/src-lib/Hasura/Incremental/Internal/Dependency.hs b/server/src-lib/Hasura/Incremental/Internal/Dependency.hs index aba0f360be3a5..0f1c23c03ffd8 100644 --- a/server/src-lib/Hasura/Incremental/Internal/Dependency.hs +++ b/server/src-lib/Hasura/Incremental/Internal/Dependency.hs @@ -20,9 +20,9 @@ import Data.Time.Clock import Data.Vector (Vector) import GHC.Generics ((:*:) (..), (:+:) (..), Generic (..), K1 (..), M1 (..), U1 (..), V1) +import System.Cron.Types import Hasura.Incremental.Select -import System.Cron.Types -- | A 'Dependency' represents a value that a 'Rule' can /conditionally/ depend on. A 'Dependency' -- is created using 'newDependency', and it can be “opened” again using 'dependOn'. What makes a From 47d260105926dfae50056adf15845e7d3184c7ad Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 11 Feb 2020 19:01:18 +0530 Subject: [PATCH 039/195] add docs and remove unnecessary pragmas --- .../src-lib/Hasura/Eventing/ScheduledTrigger.hs | 17 ++++++++++++++--- .../Hasura/RQL/Types/ScheduledTrigger.hs | 1 + 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index d49ae682da870..82e555f4fbe30 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -1,7 +1,18 @@ -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE PolyKinds #-} -{-# LANGUAGE UndecidableInstances #-} +{-| += Scheduled Triggers +This module implements the functionality of invoking webhooks during specified time events aka scheduled events. +Scheduled events are modeled using rows in Postgres with a @timestamp@ column. + +== Implementation + +During startup, two threads are started: + +1. Generator: Fetches the list of scheduled triggers from cache and generates scheduled events +for the next @x@ hours (default: 24). This effectively corresponds to doing an INSERT with values containing specific timestamp. +2. Processor: Fetches the scheduled events from db which are @<=NOW()@ and not delivered and delivers them. +The delivery mechanism is similar to Event Triggers. +-} module Hasura.Eventing.ScheduledTrigger ( processScheduledQueue , runScheduledEventsGenerator diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 1d5b2eb3be10f..5a2cd022a8fdc 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -1,5 +1,6 @@ {-# OPTIONS_GHC -fno-warn-orphans #-} +-- | These are types for Scheduled Trigger definition; see "Hasura.Eventing.ScheduledTrigger" module Hasura.RQL.Types.ScheduledTrigger ( ScheduleType(..) , CreateScheduledTrigger(..) From 245e296af5253359a682e9ab78466b50a61ab03e Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 12 Feb 2020 16:01:15 +0530 Subject: [PATCH 040/195] move orphaned instances to Hasura.RQL.Instances --- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 2 +- server/src-lib/Hasura/RQL/Instances.hs | 9 +++++++++ server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs | 10 ---------- 3 files changed, 10 insertions(+), 11 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 82e555f4fbe30..c929a729c7a42 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -11,7 +11,7 @@ During startup, two threads are started: 1. Generator: Fetches the list of scheduled triggers from cache and generates scheduled events for the next @x@ hours (default: 24). This effectively corresponds to doing an INSERT with values containing specific timestamp. 2. Processor: Fetches the scheduled events from db which are @<=NOW()@ and not delivered and delivers them. -The delivery mechanism is similar to Event Triggers. +The delivery mechanism is similar to Event Triggers; see "Hasura.Eventing.EventTrigger" -} module Hasura.Eventing.ScheduledTrigger ( processScheduledQueue diff --git a/server/src-lib/Hasura/RQL/Instances.hs b/server/src-lib/Hasura/RQL/Instances.hs index 41296e8868e36..83737c2c2cd28 100644 --- a/server/src-lib/Hasura/RQL/Instances.hs +++ b/server/src-lib/Hasura/RQL/Instances.hs @@ -4,6 +4,7 @@ module Hasura.RQL.Instances where import Hasura.Prelude +import qualified Data.Aeson as J import qualified Data.HashMap.Strict as M import qualified Data.HashSet as S import qualified Language.GraphQL.Draft.Syntax as G @@ -12,6 +13,7 @@ import qualified Language.Haskell.TH.Syntax as TH import Data.Functor.Product import Data.GADT.Compare import Instances.TH.Lift () +import System.Cron.Parser import System.Cron.Types instance NFData G.Argument @@ -77,3 +79,10 @@ instance (GCompare f, GCompare g) => GCompare (Product f g) where GEQ -> GEQ GGT -> GGT GGT -> GGT + +instance J.FromJSON CronSchedule where + parseJSON = J.withText "CronSchedule" $ \t -> + either fail pure $ parseCronSchedule t + +instance J.ToJSON CronSchedule where + toJSON = J.String . serializeCronSchedule diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 5a2cd022a8fdc..0901d2f633124 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -1,5 +1,3 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} - -- | These are types for Scheduled Trigger definition; see "Hasura.Eventing.ScheduledTrigger" module Hasura.RQL.Types.ScheduledTrigger ( ScheduleType(..) @@ -16,7 +14,6 @@ import Data.Aeson.Casing import Data.Aeson.TH import Hasura.Prelude import System.Cron.Types -import System.Cron.Parser import Hasura.Incremental import qualified Data.Text as T @@ -66,13 +63,6 @@ data CreateScheduledTrigger instance NFData CreateScheduledTrigger instance Cacheable CreateScheduledTrigger -instance FromJSON CronSchedule where - parseJSON = withText "CronSchedule" $ \t -> - either fail pure $ parseCronSchedule t - -instance ToJSON CronSchedule where - toJSON = J.String . serializeCronSchedule - instance FromJSON CreateScheduledTrigger where parseJSON = withObject "CreateScheduledTrigger" $ \o -> do From c8638e570285b3dd95f505127aea92529e7aa80d Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 12 Feb 2020 22:59:57 +0530 Subject: [PATCH 041/195] create one-off event during creation --- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 7157f81f1a6a4..cc13f10107f54 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -23,13 +23,21 @@ runCreateScheduledTrigger q = do return successMsg addScheduledTriggerToCatalog :: (MonadTx m) => CreateScheduledTrigger -> m () -addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ +addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ do Q.unitQE defaultTxErrorHandler - [Q.sql| - INSERT into hdb_catalog.hdb_scheduled_trigger - (name, webhook_conf, schedule, payload, retry_conf) - VALUES ($1, $2, $3, $4, $5) - |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False + [Q.sql| + INSERT into hdb_catalog.hdb_scheduled_trigger + (name, webhook_conf, schedule, payload, retry_conf) + VALUES ($1, $2, $3, $4, $5) + |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False + case stSchedule of + Cron _ -> pure () + OneOff timestamp -> Q.unitQE defaultTxErrorHandler + [Q.sql| + INSERT into hdb_catalog.hdb_scheduled_events + (name, scheduled_time) + VALUES ($1, $2) + |] (stName, timestamp) False resolveScheduledTrigger :: (QErrM m, MonadIO m) From 2c0dcafa4358023322e917802a2b65bb64e49c09 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Thu, 13 Feb 2020 18:36:12 +0530 Subject: [PATCH 042/195] only include scheduled triggers in metadata if specified in api --- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 37 ++++++++++--------- .../Hasura/RQL/Types/ScheduledTrigger.hs | 16 ++++++++ server/src-lib/Hasura/Server/Query.hs | 4 +- server/src-rsr/initialise.sql | 3 +- server/src-rsr/migrations/30_to_31.sql | 3 +- 5 files changed, 42 insertions(+), 21 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index cc13f10107f54..53cba93a98281 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -16,28 +16,29 @@ import Hasura.RQL.Types import qualified Database.PG.Query as Q -runCreateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTrigger -> m EncJSON -runCreateScheduledTrigger q = do - addScheduledTriggerToCatalog q - buildSchemaCacheFor $ MOScheduledTrigger $ stName q +runCreateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTriggerWith -> m EncJSON +runCreateScheduledTrigger (CreateScheduledTriggerWith definition includeInMetadata) = do + addScheduledTriggerToCatalog definition includeInMetadata + buildSchemaCacheFor $ MOScheduledTrigger $ stName definition return successMsg -addScheduledTriggerToCatalog :: (MonadTx m) => CreateScheduledTrigger -> m () -addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ do +addScheduledTriggerToCatalog :: (MonadTx m) => CreateScheduledTrigger -> Bool -> m () +addScheduledTriggerToCatalog CreateScheduledTrigger {..} includeInMetadata = liftTx $ do Q.unitQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.hdb_scheduled_trigger - (name, webhook_conf, schedule, payload, retry_conf) + (name, webhook_conf, schedule, payload, retry_conf, include_in_metadata) VALUES ($1, $2, $3, $4, $5) - |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False + |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf + , includeInMetadata) False case stSchedule of - Cron _ -> pure () OneOff timestamp -> Q.unitQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.hdb_scheduled_events (name, scheduled_time) VALUES ($1, $2) |] (stName, timestamp) False + _ -> pure () resolveScheduledTrigger :: (QErrM m, MonadIO m) @@ -55,23 +56,25 @@ resolveScheduledTrigger CreateScheduledTrigger {..} = do headerInfo pure stInfo -runUpdateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTrigger -> m EncJSON -runUpdateScheduledTrigger q = do - updateScheduledTriggerInCatalog q - buildSchemaCacheFor $ MOScheduledTrigger $ stName q +runUpdateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTriggerWith -> m EncJSON +runUpdateScheduledTrigger (CreateScheduledTriggerWith definition includeInMetadata) = do + updateScheduledTriggerInCatalog definition includeInMetadata + buildSchemaCacheFor $ MOScheduledTrigger $ stName definition return successMsg -updateScheduledTriggerInCatalog :: (MonadTx m) => CreateScheduledTrigger -> m () -updateScheduledTriggerInCatalog CreateScheduledTrigger {..} = liftTx $ do +updateScheduledTriggerInCatalog :: (MonadTx m) => CreateScheduledTrigger -> Bool -> m () +updateScheduledTriggerInCatalog CreateScheduledTrigger {..} includeInMetadata = liftTx $ do Q.unitQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_trigger SET webhook_conf = $2, schedule = $3, payload = $4, - retry_conf = $5 + retry_conf = $5, + include_in_metadata = $6 WHERE name = $1 - |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf) False + |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf + , includeInMetadata) False -- since the scheduled trigger is updated, clear all its future events which are not retries Q.unitQE defaultTxErrorHandler [Q.sql| diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 0901d2f633124..e0eeb4c79e495 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -1,6 +1,7 @@ -- | These are types for Scheduled Trigger definition; see "Hasura.Eventing.ScheduledTrigger" module Hasura.RQL.Types.ScheduledTrigger ( ScheduleType(..) + , CreateScheduledTriggerWith(..) , CreateScheduledTrigger(..) , RetryConfST(..) , formatTime' @@ -82,6 +83,21 @@ instance FromJSON CreateScheduledTrigger where $(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) +data CreateScheduledTriggerWith + = CreateScheduledTriggerWith + { stwDefinition :: !CreateScheduledTrigger + , stwIncludeInMetadata :: !Bool + } deriving (Show, Eq, Generic) + +instance FromJSON CreateScheduledTriggerWith where + parseJSON = + withObject "CreateScheduledTriggerWith" $ \o -> do + stwDefinition <- o .: "definition" + stwIncludeInMetadata <- o .: "include_in_metadata" + pure CreateScheduledTriggerWith {..} + +$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CreateScheduledTriggerWith) + -- Supported time string formats for the API: -- (see FromJSON for ZonedTime: https://hackage.haskell.org/package/aeson-1.4.6.0/docs/src/Data.Aeson.Types.FromJSON.html#line-2050) diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 630e619a85f5f..bdc783ad0a525 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -88,8 +88,8 @@ data RQLQueryV1 | RQRedeliverEvent !RedeliverEventQuery | RQInvokeEventTrigger !InvokeEventTriggerQuery - | RQCreateScheduledTrigger !CreateScheduledTrigger - | RQUpdateScheduledTrigger !CreateScheduledTrigger + | RQCreateScheduledTrigger !CreateScheduledTriggerWith + | RQUpdateScheduledTrigger !CreateScheduledTriggerWith | RQDeleteScheduledTrigger !TriggerName | RQCancelScheduledEvent !EventId diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index 36bb5083d87b0..092ce48ee804e 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -670,7 +670,8 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger webhook_conf JSON NOT NULL, schedule JSON NOT NULL, payload JSON, - retry_conf JSON + retry_conf JSON, + include_in_metadata BOOLEAN NOT NULL DEFAULT FALSE ); CREATE TABLE hdb_catalog.hdb_scheduled_events diff --git a/server/src-rsr/migrations/30_to_31.sql b/server/src-rsr/migrations/30_to_31.sql index 4ead50aaa351b..a173868a8d9f9 100644 --- a/server/src-rsr/migrations/30_to_31.sql +++ b/server/src-rsr/migrations/30_to_31.sql @@ -4,7 +4,8 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger webhook_conf JSON NOT NULL, schedule JSON NOT NULL, payload JSON, - retry_conf JSON + retry_conf JSON, + include_in_metadata BOOLEAN NOT NULL DEFAULT FALSE ); CREATE TABLE hdb_catalog.hdb_scheduled_events From 2d87c7bdfee710d1d81b128c201331071fc9e476 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Thu, 13 Feb 2020 19:58:50 +0530 Subject: [PATCH 043/195] add scheduled triggers to metadata apis --- server/graphql-engine.cabal | 1 + .../src-lib/Hasura/Eventing/EventTrigger.hs | 19 ++-- server/src-lib/Hasura/Eventing/HTTP.hs | 97 ++++++++++++------- .../Hasura/Eventing/ScheduledTrigger.hs | 15 +-- server/src-lib/Hasura/RQL/DDL/Metadata.hs | 39 +++++++- .../Hasura/RQL/DDL/Metadata/Generator.hs | 63 ++++++++---- .../src-lib/Hasura/RQL/DDL/Metadata/Types.hs | 22 +++-- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 13 +-- .../src-lib/Hasura/RQL/Types/EventTrigger.hs | 15 +-- .../Hasura/RQL/Types/ScheduledTrigger.hs | 10 +- server/src-rsr/initialise.sql | 2 +- server/src-rsr/migrations/30_to_31.sql | 2 +- 12 files changed, 185 insertions(+), 113 deletions(-) diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 9ddb1d14738ed..4c5068ae09ab1 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -192,6 +192,7 @@ library -- testing , QuickCheck , generic-arbitrary + , quickcheck-instances -- scheduled triggers , cron >= 0.6.2 diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 4beabc34ec345..0d4e5f38f47b1 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -161,7 +161,8 @@ processEvent logenv pool getSchemaCache e = do ep = createEventPayload retryConf e extraLogCtx = ExtraLogContext (epId ep) res <- runExceptT $ withEventEngineCtx eventEngineCtx $ - tryWebhook headers respTimeout (toJSON ep) webhook (Just extraLogCtx) + tryWebhook headers respTimeout (toJSON ep) webhook + logHTTPForET res (Just extraLogCtx) let decodedHeaders = map (decodeHeader logenv headerInfos) headers finally <- either (processError pool e retryConf decodedHeaders ep) @@ -173,8 +174,8 @@ withEventEngineCtx :: , MonadMask m ) => EventEngineCtx - -> m HTTPResp - -> m HTTPResp + -> m (HTTPResp a) + -> m (HTTPResp a) withEventEngineCtx eeCtx = bracket_ (incrementThreadCount eeCtx) (decrementThreadCount eeCtx) incrementThreadCount :: MonadIO m => EventEngineCtx -> m () @@ -202,7 +203,7 @@ createEventPayload retryConf e = EventPayload processSuccess :: ( MonadIO m ) - => Q.PGPool -> Event -> [HeaderConf] -> EventPayload -> HTTPResp + => Q.PGPool -> Event -> [HeaderConf] -> EventPayload -> HTTPResp a -> m (Either QErr ()) processSuccess pool e decodedHeaders ep resp = do let respBody = hrsBody resp @@ -214,14 +215,10 @@ processSuccess pool e decodedHeaders ep resp = do setSuccess e processError - :: ( MonadIO m - , MonadReader r m - , Has (L.Logger L.Hasura) r - ) - => Q.PGPool -> Event -> RetryConf -> [HeaderConf] -> EventPayload -> HTTPErr + :: (MonadIO m) + => Q.PGPool -> Event -> RetryConf -> [HeaderConf] -> EventPayload -> HTTPErr a -> m (Either QErr ()) processError pool e retryConf decodedHeaders ep err = do - logHTTPErr err let invocation = case err of HClient excp -> do let errMsg = TBS.fromLBS $ encode $ show excp @@ -241,7 +238,7 @@ processError pool e retryConf decodedHeaders ep err = do insertInvocation invocation retryOrSetError e retryConf err -retryOrSetError :: Event -> RetryConf -> HTTPErr -> Q.TxE QErr () +retryOrSetError :: Event -> RetryConf -> HTTPErr a -> Q.TxE QErr () retryOrSetError e retryConf err = do let mretryHeader = getRetryAfterHeaderFromHTTPErr err tries = eTries e diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index a00c427849edc..953213dc9cdf6 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE DataKinds #-} + module Hasura.Eventing.HTTP ( HTTPErr(..) , HTTPResp(..) @@ -5,6 +7,8 @@ module Hasura.Eventing.HTTP , runHTTP , isNetworkError , isNetworkErrorHC + , logHTTPForET + , logHTTPForST , ExtraLogContext(..) , EventId , Invocation(..) @@ -17,7 +21,6 @@ module Hasura.Eventing.HTTP , mkClientErr , TriggerMetadata(..) , DeliveryInfo(..) - , logHTTPErr , mkWebhookReq , mkResp , toInt64 @@ -80,6 +83,8 @@ $(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''ClientE type Version = T.Text +data TriggerTypes = ET | ST + data Response = ResponseType1 WebhookResponse | ResponseType2 ClientError instance J.ToJSON Response where @@ -107,7 +112,7 @@ data ExtraLogContext $(J.deriveJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''ExtraLogContext) -data HTTPResp +data HTTPResp (a :: TriggerTypes) = HTTPResp { hrsStatus :: !Int , hrsHeaders :: ![HeaderConf] @@ -116,17 +121,20 @@ data HTTPResp $(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''HTTPResp) -instance ToEngineLog HTTPResp Hasura where +instance ToEngineLog (HTTPResp 'ET) Hasura where toEngineLog resp = (LevelInfo, eventTriggerLogType, J.toJSON resp) -data HTTPErr +instance ToEngineLog (HTTPResp 'ST) Hasura where + toEngineLog resp = (LevelInfo, scheduledTriggerLogType, J.toJSON resp) + +data HTTPErr (a :: TriggerTypes) = HClient !HTTP.HttpException | HParse !HTTP.Status !String - | HStatus !HTTPResp + | HStatus !(HTTPResp a) | HOther !String deriving (Show) -instance J.ToJSON HTTPErr where +instance J.ToJSON (HTTPErr a) where toJSON err = toObj $ case err of (HClient e) -> ("client", J.toJSON $ show e) (HParse st e) -> @@ -140,11 +148,14 @@ instance J.ToJSON HTTPErr where toObj :: (T.Text, J.Value) -> J.Value toObj (k, v) = J.object [ "type" J..= k , "detail" J..= v] --- encapsulates a http operation -instance ToEngineLog HTTPErr Hasura where + +instance ToEngineLog (HTTPErr 'ET) Hasura where toEngineLog err = (LevelError, eventTriggerLogType, J.toJSON err) -mkHTTPResp :: HTTP.Response LBS.ByteString -> HTTPResp +instance ToEngineLog (HTTPErr 'ST) Hasura where + toEngineLog err = (LevelError, scheduledTriggerLogType, J.toJSON err) + +mkHTTPResp :: HTTP.Response LBS.ByteString -> HTTPResp a mkHTTPResp resp = HTTPResp { hrsStatus = HTTP.statusCode $ HTTP.responseStatus resp @@ -156,18 +167,21 @@ mkHTTPResp resp = decodeHeader (hdrName, hdrVal) = HeaderConf (decodeBS $ CI.original hdrName) (HVValue (decodeBS hdrVal)) -data HTTPRespExtra +data HTTPRespExtra (a :: TriggerTypes) = HTTPRespExtra - { _hreResponse :: HTTPResp + { _hreResponse :: Either (HTTPErr a) (HTTPResp a) , _hreContext :: Maybe ExtraLogContext } $(J.deriveToJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''HTTPRespExtra) -instance ToEngineLog HTTPRespExtra Hasura where +instance ToEngineLog (HTTPRespExtra 'ET) Hasura where toEngineLog resp = (LevelInfo, eventTriggerLogType, J.toJSON resp) -isNetworkError :: HTTPErr -> Bool +instance ToEngineLog (HTTPRespExtra 'ST) Hasura where + toEngineLog resp = (LevelInfo, scheduledTriggerLogType, J.toJSON resp) + +isNetworkError :: HTTPErr a -> Bool isNetworkError = \case HClient he -> isNetworkErrorHC he _ -> False @@ -179,7 +193,7 @@ isNetworkErrorHC = \case HTTP.HttpExceptionRequest _ HTTP.ResponseTimeout -> True _ -> False -anyBodyParser :: HTTP.Response LBS.ByteString -> Either HTTPErr HTTPResp +anyBodyParser :: HTTP.Response LBS.ByteString -> Either (HTTPErr a) (HTTPResp a) anyBodyParser resp = do let httpResp = mkHTTPResp resp if respCode >= HTTP.status200 && respCode < HTTP.status300 @@ -202,34 +216,43 @@ $(J.deriveJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''HTTPReq) instance ToEngineLog HTTPReq Hasura where toEngineLog req = (LevelInfo, eventTriggerLogType, J.toJSON req) -runHTTP +logHTTPForET + :: ( MonadReader r m + , Has (Logger Hasura) r + , MonadIO m + ) + => Either (HTTPErr 'ET) (HTTPResp 'ET) -> Maybe ExtraLogContext -> m () +logHTTPForET eitherResp extraLogCtx = do + logger :: Logger Hasura <- asks getter + unLogger logger $ HTTPRespExtra eitherResp extraLogCtx + +logHTTPForST :: ( MonadReader r m , Has (Logger Hasura) r , MonadIO m ) - => HTTP.Manager -> HTTP.Request -> Maybe ExtraLogContext -> m (Either HTTPErr HTTPResp) -runHTTP manager req exLog = do + => Either (HTTPErr 'ST) (HTTPResp 'ST) -> Maybe ExtraLogContext -> m () +logHTTPForST eitherResp extraLogCtx = do logger :: Logger Hasura <- asks getter + unLogger logger $ HTTPRespExtra eitherResp extraLogCtx + +runHTTP :: (MonadIO m) => HTTP.Manager -> HTTP.Request -> m (Either (HTTPErr a) (HTTPResp a)) +runHTTP manager req = do res <- liftIO $ try $ HTTP.httpLbs req manager - case res of - Left e -> unLogger logger $ HClient e - Right resp -> unLogger logger $ HTTPRespExtra (mkHTTPResp resp) exLog return $ either (Left . HClient) anyBodyParser res tryWebhook :: ( MonadReader r m , Has HTTP.Manager r - , Has (L.Logger L.Hasura) r , MonadIO m - , MonadError HTTPErr m + , MonadError (HTTPErr a) m ) => [HTTP.Header] -> HTTP.ResponseTimeout -> J.Value -> String - -> Maybe ExtraLogContext - -> m HTTPResp -tryWebhook headers timeout payload webhook extraLogCtx = do + -> m (HTTPResp a) +tryWebhook headers timeout payload webhook = do initReqE <- liftIO $ try $ HTTP.parseRequest webhook manager <- asks getter case initReqE of @@ -242,7 +265,7 @@ tryWebhook headers timeout payload webhook extraLogCtx = do , HTTP.requestBody = HTTP.RequestBodyLBS (J.encode payload) , HTTP.responseTimeout = timeout } - eitherResp <- runHTTP manager req extraLogCtx + eitherResp <- runHTTP manager req onLeft eitherResp throwError data TriggerMetadata @@ -279,15 +302,15 @@ mkMaybe :: [a] -> Maybe [a] mkMaybe [] = Nothing mkMaybe x = Just x -logHTTPErr - :: ( MonadReader r m - , Has (L.Logger L.Hasura) r - , MonadIO m - ) - => HTTPErr -> m () -logHTTPErr err = do - logger :: L.Logger L.Hasura <- asks getter - L.unLogger logger err +-- logHTTPErr +-- :: ( MonadReader r m +-- , Has (L.Logger L.Hasura) r +-- , MonadIO m +-- ) +-- => HTTPErr a -> m () +-- logHTTPErr err = do +-- logger :: L.Logger L.Hasura <- asks getter +-- L.unLogger logger err toInt64 :: (Integral a) => a -> Int64 toInt64 = fromIntegral @@ -315,11 +338,11 @@ decodeHeader logenv headerInfos (hdrName, hdrVal) where decodeBS = TE.decodeUtf8With TE.lenientDecode -getRetryAfterHeaderFromHTTPErr :: HTTPErr -> Maybe Text +getRetryAfterHeaderFromHTTPErr :: HTTPErr a -> Maybe Text getRetryAfterHeaderFromHTTPErr (HStatus resp) = getRetryAfterHeaderFromResp resp getRetryAfterHeaderFromHTTPErr _ = Nothing -getRetryAfterHeaderFromResp :: HTTPResp -> Maybe Text +getRetryAfterHeaderFromResp :: HTTPResp a -> Maybe Text getRetryAfterHeaderFromResp resp = let mHeader = find diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index c929a729c7a42..521fbda1163c4 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -211,9 +211,8 @@ processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEventF headers = map encodeHeader stiHeaders headers' = addDefaultHeaders headers extraLogCtx = ExtraLogContext sefId - res <- - runExceptT $ - tryWebhook headers' httpTimeout sefPayload (T.unpack sefWebhook) (Just extraLogCtx) + res <- runExceptT $ tryWebhook headers' httpTimeout sefPayload (T.unpack sefWebhook) + logHTTPForST res (Just extraLogCtx) let decodedHeaders = map (decodeHeader logEnv stiHeaders) headers' finally <- either (processError pgpool se decodedHeaders) @@ -226,7 +225,9 @@ processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEventF Left err -> logQErr err Right _ -> pure () -processError :: (MonadIO m) => Q.PGPool -> ScheduledEventFull -> [HeaderConf] -> HTTPErr -> m (Either QErr ()) +processError + :: (MonadIO m) + => Q.PGPool -> ScheduledEventFull -> [HeaderConf] -> HTTPErr a -> m (Either QErr ()) processError pgpool se decodedHeaders err = do let invocation = case err of HClient excp -> do @@ -249,7 +250,7 @@ processError pgpool se decodedHeaders err = do insertInvocation invocation retryOrMarkError se err -retryOrMarkError :: ScheduledEventFull -> HTTPErr -> Q.TxE QErr () +retryOrMarkError :: ScheduledEventFull -> HTTPErr a -> Q.TxE QErr () retryOrMarkError se@ScheduledEventFull {..} err = do let mRetryHeader = getRetryAfterHeaderFromHTTPErr err mRetryHeaderSeconds = join $ parseRetryHeaderValue <$> mRetryHeader @@ -275,7 +276,9 @@ retryOrMarkError se@ScheduledEventFull {..} err = do WHERE id = $1 |] (Identity sefId) True -processSuccess :: (MonadIO m) => Q.PGPool -> ScheduledEventFull -> [HeaderConf] -> HTTPResp -> m (Either QErr ()) +processSuccess + :: (MonadIO m) + => Q.PGPool -> ScheduledEventFull -> [HeaderConf] -> HTTPResp a -> m (Either QErr ()) processSuccess pgpool se decodedHeaders resp = do let respBody = hrsBody resp respHeaders = hrsHeaders resp diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata.hs b/server/src-lib/Hasura/RQL/DDL/Metadata.hs index 44a2f3fe9b5ec..ea7b6f61bf7db 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata.hs @@ -28,7 +28,8 @@ import Hasura.RQL.DDL.Metadata.Types import Hasura.RQL.DDL.Permission.Internal (dropPermFromCatalog) import Hasura.RQL.DDL.RemoteSchema (addRemoteSchemaP2, removeRemoteSchemaFromCatalog) -import Hasura.RQL.DDL.ScheduledTrigger (deleteScheduledTriggerFromCatalog) +import Hasura.RQL.DDL.ScheduledTrigger (addScheduledTriggerToCatalog, + deleteScheduledTriggerFromCatalog) import Hasura.RQL.Types import Hasura.Server.Version (HasVersion) import Hasura.SQL.Types @@ -51,6 +52,7 @@ clearMetadata = Q.catchE defaultTxErrorHandler $ do Q.unitQ "DELETE FROM hdb_catalog.remote_schemas" () False Q.unitQ "DELETE FROM hdb_catalog.hdb_allowlist" () False Q.unitQ "DELETE FROM hdb_catalog.hdb_query_collection WHERE is_system_defined <> 'true'" () False + Q.unitQ "DELETE FROM hdb_catalog.hdb_scheduled_trigger WHERE include_in_metadata" () False runClearMetadata :: (MonadTx m, CacheRWM m) @@ -63,8 +65,8 @@ runClearMetadata _ = do applyQP1 :: (QErrM m) => ReplaceMetadata -> m () -applyQP1 (ReplaceMetadata _ tables functionsMeta schemas collections allowlist) = do - +applyQP1 (ReplaceMetadata _ tables functionsMeta schemas collections allowlist scheduledTriggers) + = do withPathK "tables" $ do checkMultipleDecls "tables" $ map _tmTable tables @@ -105,6 +107,9 @@ applyQP1 (ReplaceMetadata _ tables functionsMeta schemas collections allowlist) withPathK "allowlist" $ checkMultipleDecls "allow list" $ map Collection._crCollection allowlist + withPathK "scheduled_triggers" $ + checkMultipleDecls "scheduled triggers" $ map stName scheduledTriggers + where withTableName qt = withPathK (qualObjectToText qt) @@ -127,7 +132,8 @@ applyQP2 ) => ReplaceMetadata -> m EncJSON -applyQP2 (ReplaceMetadata _ tables functionsMeta schemas collections allowlist) = do +applyQP2 (ReplaceMetadata _ tables functionsMeta schemas collections allowlist scheduledTriggers) + = do liftTx clearMetadata buildSchemaCacheStrict @@ -193,6 +199,9 @@ applyQP2 (ReplaceMetadata _ tables functionsMeta schemas collections allowlist) withPathK "remote_schemas" $ indexedMapM_ (void . addRemoteSchemaP2) schemas + withPathK "scheduled_triggers" $ + indexedForM_ scheduledTriggers $ \st -> liftTx $ addScheduledTriggerToCatalog st True + buildSchemaCacheStrict return successMsg @@ -265,8 +274,10 @@ fetchMetadata = do -- fetch allow list allowlist <- map Collection.CollectionReq <$> fetchAllowlists + scheduledTriggers <- fetchScheduledTriggers + return $ ReplaceMetadata currentMetadataVersion (HMIns.elems postRelMap) functions - remoteSchemas collections allowlist + remoteSchemas collections allowlist scheduledTriggers where @@ -373,6 +384,24 @@ fetchMetadata = do , ComputedFieldMeta name definition comment ) + fetchScheduledTriggers = + map uncurrySchedule <$> Q.listQE defaultTxErrorHandler + [Q.sql| + SELECT st.name, st.webhook_conf, st.schedule_conf, st.payload, st.retry_conf + FROM hdb_catalog.hdb_scheduled_trigger st + WHERE include_in_metadata + |] () False + where + uncurrySchedule (n, wc, sc, p, rc) = + CreateScheduledTrigger + { stName = n, + stWebhook = Q.getAltJ wc, + stSchedule = Q.getAltJ sc, + stPayload = Q.getAltJ <$> p, + stRetryConf = Q.getAltJ rc, + stHeaders = [] + } + runExportMetadata :: (QErrM m, MonadTx m) => ExportMetadata -> m EncJSON diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata/Generator.hs b/server/src-lib/Hasura/RQL/DDL/Metadata/Generator.hs index 033dc7714b487..d599178dab753 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata/Generator.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata/Generator.hs @@ -3,32 +3,36 @@ module Hasura.RQL.DDL.Metadata.Generator (genReplaceMetadata) where -import Hasura.GraphQL.Utils (simpleGraphQLQuery) +import Hasura.GraphQL.Utils (simpleGraphQLQuery) import Hasura.Prelude import Hasura.RQL.DDL.Headers import Hasura.RQL.DDL.Metadata.Types import Hasura.RQL.Types import Hasura.Server.Utils import Hasura.SQL.Types - -import qualified Hasura.GraphQL.Context as GC -import qualified Hasura.RQL.DDL.ComputedField as ComputedField -import qualified Hasura.RQL.DDL.Permission as Permission -import qualified Hasura.RQL.DDL.Permission.Internal as Permission -import qualified Hasura.RQL.DDL.QueryCollection as Collection -import qualified Hasura.RQL.DDL.Relationship as Relationship -import qualified Hasura.RQL.DDL.Schema as Schema - -import qualified Data.Aeson as J -import qualified Data.HashMap.Strict as HM -import qualified Data.Text as T -import qualified Data.Vector as V -import qualified Language.GraphQL.Draft.Parser as G -import qualified Language.GraphQL.Draft.Syntax as G -import qualified Language.Haskell.TH.Syntax as TH -import qualified Network.URI as N +import System.Cron.Types + +import qualified Hasura.GraphQL.Context as GC +import qualified Hasura.RQL.DDL.ComputedField as ComputedField +import qualified Hasura.RQL.DDL.Permission as Permission +import qualified Hasura.RQL.DDL.Permission.Internal as Permission +import qualified Hasura.RQL.DDL.QueryCollection as Collection +import qualified Hasura.RQL.DDL.Relationship as Relationship +import qualified Hasura.RQL.DDL.Schema as Schema + +import qualified Data.Aeson as J +import qualified Data.Text as T +import qualified Data.Vector as V +import qualified Language.GraphQL.Draft.Parser as G +import qualified Language.GraphQL.Draft.Syntax as G +import qualified Language.Haskell.TH.Syntax as TH +import qualified Network.URI as N +import qualified System.Cron.Parser as Cr import Test.QuickCheck +import Test.QuickCheck.Instances.Semigroup () +import Test.QuickCheck.Instances.Time () +import Test.QuickCheck.Instances.UnorderedContainers () genReplaceMetadata :: Gen ReplaceMetadata genReplaceMetadata = do @@ -39,15 +43,13 @@ genReplaceMetadata = do <*> arbitrary <*> arbitrary <*> arbitrary + <*> arbitrary where genFunctionsMetadata :: MetadataVersion -> Gen FunctionsMetadata genFunctionsMetadata = \case MVVersion1 -> FMVersion1 <$> arbitrary MVVersion2 -> FMVersion2 <$> arbitrary -instance (Eq k, Hashable k, Arbitrary k, Arbitrary v) => Arbitrary (HM.HashMap k v) where - arbitrary = HM.fromList <$> arbitrary - instance Arbitrary G.Name where arbitrary = G.Name . T.pack <$> listOf1 (elements ['a'..'z']) @@ -195,3 +197,22 @@ instance Arbitrary Collection.CreateCollection where instance Arbitrary Collection.CollectionReq where arbitrary = genericArbitrary + +instance Arbitrary CreateScheduledTrigger where + arbitrary = genericArbitrary + +instance Arbitrary WebhookConf where + arbitrary = genericArbitrary + +instance Arbitrary ScheduleType where + arbitrary = genericArbitrary + +instance Arbitrary RetryConfST where + arbitrary = genericArbitrary + +instance Arbitrary CronSchedule where + arbitrary = elements sampleCronSchedules + +-- TODO: add more +sampleCronSchedules :: [CronSchedule] +sampleCronSchedules = rights $ map Cr.parseCronSchedule [ "* * * * *" ] diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs b/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs index b83b419f46fcd..7901bfbd2bb7e 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs @@ -161,13 +161,14 @@ instance FromJSON ClearMetadata where data ReplaceMetadata = ReplaceMetadata - { aqVersion :: !MetadataVersion - , aqTables :: ![TableMeta] - , aqFunctions :: !FunctionsMetadata - , aqRemoteSchemas :: ![AddRemoteSchemaQuery] - , aqQueryCollections :: ![Collection.CreateCollection] - , aqAllowlist :: ![Collection.CollectionReq] - } deriving (Show, Eq, Lift) + { aqVersion :: !MetadataVersion + , aqTables :: ![TableMeta] + , aqFunctions :: !FunctionsMetadata + , aqRemoteSchemas :: ![AddRemoteSchemaQuery] + , aqQueryCollections :: ![Collection.CreateCollection] + , aqAllowlist :: ![Collection.CollectionReq] + , aqScheduledTriggers :: ![CreateScheduledTrigger] + } deriving (Show, Eq) instance FromJSON ReplaceMetadata where parseJSON = withObject "Object" $ \o -> do @@ -178,6 +179,7 @@ instance FromJSON ReplaceMetadata where <*> o .:? "remote_schemas" .!= [] <*> o .:? "query_collections" .!= [] <*> o .:? "allow_list" .!= [] + <*> o .:? "scheduled_triggers" .!= [] where parseFunctions version maybeValue = case version of @@ -240,11 +242,13 @@ replaceMetadataToOrdJSON ( ReplaceMetadata remoteSchemas queryCollections allowlist + scheduledTriggers ) = AO.object $ [versionPair, tablesPair] <> catMaybes [ functionsPair , remoteSchemasPair , queryCollectionsPair , allowlistPair + , scheduledTriggersPair ] where versionPair = ("version", AO.toOrdered version) @@ -257,6 +261,8 @@ replaceMetadataToOrdJSON ( ReplaceMetadata allowlistPair = listToMaybeOrdPair "allowlist" AO.toOrdered allowlist + scheduledTriggersPair = listToMaybeOrdPair "scheduled_triggers" scheduledTriggerQToOrdJSON scheduledTriggers + tableMetaToOrdJSON :: TableMeta -> AO.Value tableMetaToOrdJSON ( TableMeta table @@ -403,6 +409,8 @@ replaceMetadataToOrdJSON ( ReplaceMetadata , ("definition", AO.toOrdered definition) ] <> catMaybes [maybeCommentToMaybeOrdPair comment] + scheduledTriggerQToOrdJSON :: CreateScheduledTrigger -> AO.Value + scheduledTriggerQToOrdJSON = AO.toOrdered -- Utility functions listToMaybeOrdPair :: Text -> (a -> AO.Value) -> [a] -> Maybe (Text, AO.Value) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 53cba93a98281..4ebff5067b422 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -3,6 +3,7 @@ module Hasura.RQL.DDL.ScheduledTrigger , runUpdateScheduledTrigger , runDeleteScheduledTrigger , runCancelScheduledEvent + , addScheduledTriggerToCatalog , resolveScheduledTrigger , deleteScheduledTriggerFromCatalog ) where @@ -27,9 +28,9 @@ addScheduledTriggerToCatalog CreateScheduledTrigger {..} includeInMetadata = lif Q.unitQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.hdb_scheduled_trigger - (name, webhook_conf, schedule, payload, retry_conf, include_in_metadata) - VALUES ($1, $2, $3, $4, $5) - |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf + (name, webhook_conf, schedule_conf, payload, retry_conf, include_in_metadata) + VALUES ($1, $2, $3, $4, $5, $6) + |] (stName, Q.AltJ stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf , includeInMetadata) False case stSchedule of OneOff timestamp -> Q.unitQE defaultTxErrorHandler @@ -44,7 +45,7 @@ resolveScheduledTrigger :: (QErrM m, MonadIO m) => CreateScheduledTrigger -> m ScheduledTriggerInfo resolveScheduledTrigger CreateScheduledTrigger {..} = do - webhookInfo <- getWebhookInfoFromConf stWebhookConf + webhookInfo <- getWebhookInfoFromConf stWebhook headerInfo <- getHeaderInfosFromConf stHeaders let stInfo = ScheduledTriggerInfo @@ -68,12 +69,12 @@ updateScheduledTriggerInCatalog CreateScheduledTrigger {..} includeInMetadata = [Q.sql| UPDATE hdb_catalog.hdb_scheduled_trigger SET webhook_conf = $2, - schedule = $3, + schedule_conf = $3, payload = $4, retry_conf = $5, include_in_metadata = $6 WHERE name = $1 - |] (stName, Q.AltJ stWebhookConf, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf + |] (stName, Q.AltJ stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf , includeInMetadata) False -- since the scheduled trigger is updated, clear all its future events which are not retries Q.unitQE defaultTxErrorHandler diff --git a/server/src-lib/Hasura/RQL/Types/EventTrigger.hs b/server/src-lib/Hasura/RQL/Types/EventTrigger.hs index 98421d6ff6992..cc09bef6c60a1 100644 --- a/server/src-lib/Hasura/RQL/Types/EventTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/EventTrigger.hs @@ -109,18 +109,13 @@ instance NFData WebhookConf instance Cacheable WebhookConf instance ToJSON WebhookConf where - toJSON (WCValue w) = object ["webhook" .= w ] - toJSON (WCEnv wEnv) = object ["webhook_from_env" .= wEnv ] + toJSON (WCValue w) = String w + toJSON (WCEnv wEnv) = object ["from_env" .= wEnv ] instance FromJSON WebhookConf where - parseJSON = withObject "WebhookConf" \o -> do - webhook <- o .:? "webhook" - webhookFromEnv <- o .:? "webhook_from_env" - case (webhook, webhookFromEnv) of - (Just value, Nothing) -> pure $ WCValue value - (Nothing, Just env) -> pure $ WCEnv env - (Just _, Just _) -> fail "only one of webhook or webhook_from_env should be given" - (Nothing, Nothing) -> fail "must provide webhook or webhook_from_env" + parseJSON (Object o) = WCEnv <$> o .: "from_env" + parseJSON (String t) = pure $ WCValue t + parseJSON _ = fail "one of string or object must be provided for webhook" data WebhookConfInfo = WebhookConfInfo diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index e0eeb4c79e495..373e3f8a52ab9 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -54,7 +54,7 @@ $(deriveJSON defaultOptions{sumEncoding=TaggedObject "type" "value"} ''ScheduleT data CreateScheduledTrigger = CreateScheduledTrigger { stName :: !ET.TriggerName - , stWebhookConf :: !ET.WebhookConf + , stWebhook :: !ET.WebhookConf , stSchedule :: !ScheduleType , stPayload :: !(Maybe J.Value) , stRetryConf :: !RetryConfST @@ -68,17 +68,11 @@ instance FromJSON CreateScheduledTrigger where parseJSON = withObject "CreateScheduledTrigger" $ \o -> do stName <- o .: "name" - stWebhook <- o .:? "webhook" - stWebhookFromEnv <- o .:? "webhook_from_env" + stWebhook <- o .: "webhook" stPayload <- o .:? "payload" stSchedule <- o .: "schedule" stRetryConf <- o .:? "retry_conf" .!= defaultRetryConf stHeaders <- o .:? "headers" .!= [] - stWebhookConf <- case (stWebhook, stWebhookFromEnv) of - (Just value, Nothing) -> pure $ ET.WCValue value - (Nothing, Just env) -> pure $ ET.WCEnv env - (Just _, Just _) -> fail "only one of webhook or webhook_from_env should be given" - (Nothing, Nothing) -> fail "must provide webhook or webhook_from_env" pure CreateScheduledTrigger {..} $(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index 092ce48ee804e..ef8f7259ad6ed 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -668,7 +668,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger ( name TEXT PRIMARY KEY, webhook_conf JSON NOT NULL, - schedule JSON NOT NULL, + schedule_conf JSON NOT NULL, payload JSON, retry_conf JSON, include_in_metadata BOOLEAN NOT NULL DEFAULT FALSE diff --git a/server/src-rsr/migrations/30_to_31.sql b/server/src-rsr/migrations/30_to_31.sql index a173868a8d9f9..5c2e00d8a4f55 100644 --- a/server/src-rsr/migrations/30_to_31.sql +++ b/server/src-rsr/migrations/30_to_31.sql @@ -2,7 +2,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger ( name TEXT PRIMARY KEY, webhook_conf JSON NOT NULL, - schedule JSON NOT NULL, + schedule_conf JSON NOT NULL, payload JSON, retry_conf JSON, include_in_metadata BOOLEAN NOT NULL DEFAULT FALSE From 1db07feb4446cf6796e025cc41c7e7b26312c895 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Fri, 14 Feb 2020 18:24:32 +0530 Subject: [PATCH 044/195] implement `track_scheduled_trigger` api and remove `include_in_metadata` from create api --- server/src-lib/Hasura/RQL/DDL/Metadata.hs | 7 ++- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 52 ++++++++++++------- .../Hasura/RQL/Types/ScheduledTrigger.hs | 16 ------ server/src-lib/Hasura/Server/Query.hs | 8 ++- 4 files changed, 45 insertions(+), 38 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata.hs b/server/src-lib/Hasura/RQL/DDL/Metadata.hs index ea7b6f61bf7db..c177edf96b5ae 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata.hs @@ -29,7 +29,8 @@ import Hasura.RQL.DDL.Permission.Internal (dropPermFromCatalog) import Hasura.RQL.DDL.RemoteSchema (addRemoteSchemaP2, removeRemoteSchemaFromCatalog) import Hasura.RQL.DDL.ScheduledTrigger (addScheduledTriggerToCatalog, - deleteScheduledTriggerFromCatalog) + deleteScheduledTriggerFromCatalog, + trackScheduledTriggerInCatalog) import Hasura.RQL.Types import Hasura.Server.Version (HasVersion) import Hasura.SQL.Types @@ -200,7 +201,9 @@ applyQP2 (ReplaceMetadata _ tables functionsMeta schemas collections allowlist s indexedMapM_ (void . addRemoteSchemaP2) schemas withPathK "scheduled_triggers" $ - indexedForM_ scheduledTriggers $ \st -> liftTx $ addScheduledTriggerToCatalog st True + indexedForM_ scheduledTriggers $ \st -> liftTx $ do + addScheduledTriggerToCatalog st + trackScheduledTriggerInCatalog (stName st) buildSchemaCacheStrict return successMsg diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 4ebff5067b422..78e5f623b8c1f 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -3,9 +3,11 @@ module Hasura.RQL.DDL.ScheduledTrigger , runUpdateScheduledTrigger , runDeleteScheduledTrigger , runCancelScheduledEvent + , runTrackScheduledTrigger , addScheduledTriggerToCatalog - , resolveScheduledTrigger , deleteScheduledTriggerFromCatalog + , trackScheduledTriggerInCatalog + , resolveScheduledTrigger ) where import Hasura.Db @@ -17,21 +19,21 @@ import Hasura.RQL.Types import qualified Database.PG.Query as Q -runCreateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTriggerWith -> m EncJSON -runCreateScheduledTrigger (CreateScheduledTriggerWith definition includeInMetadata) = do - addScheduledTriggerToCatalog definition includeInMetadata - buildSchemaCacheFor $ MOScheduledTrigger $ stName definition +runCreateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTrigger -> m EncJSON +runCreateScheduledTrigger q = do + addScheduledTriggerToCatalog q + buildSchemaCacheFor $ MOScheduledTrigger $ stName q return successMsg -addScheduledTriggerToCatalog :: (MonadTx m) => CreateScheduledTrigger -> Bool -> m () -addScheduledTriggerToCatalog CreateScheduledTrigger {..} includeInMetadata = liftTx $ do +addScheduledTriggerToCatalog :: (MonadTx m) => CreateScheduledTrigger -> m () +addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ do Q.unitQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.hdb_scheduled_trigger - (name, webhook_conf, schedule_conf, payload, retry_conf, include_in_metadata) - VALUES ($1, $2, $3, $4, $5, $6) + (name, webhook_conf, schedule_conf, payload, retry_conf) + VALUES ($1, $2, $3, $4, $5) |] (stName, Q.AltJ stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf - , includeInMetadata) False + ) False case stSchedule of OneOff timestamp -> Q.unitQE defaultTxErrorHandler [Q.sql| @@ -57,14 +59,14 @@ resolveScheduledTrigger CreateScheduledTrigger {..} = do headerInfo pure stInfo -runUpdateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTriggerWith -> m EncJSON -runUpdateScheduledTrigger (CreateScheduledTriggerWith definition includeInMetadata) = do - updateScheduledTriggerInCatalog definition includeInMetadata - buildSchemaCacheFor $ MOScheduledTrigger $ stName definition +runUpdateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTrigger -> m EncJSON +runUpdateScheduledTrigger q = do + updateScheduledTriggerInCatalog q + buildSchemaCacheFor $ MOScheduledTrigger $ stName q return successMsg -updateScheduledTriggerInCatalog :: (MonadTx m) => CreateScheduledTrigger -> Bool -> m () -updateScheduledTriggerInCatalog CreateScheduledTrigger {..} includeInMetadata = liftTx $ do +updateScheduledTriggerInCatalog :: (MonadTx m) => CreateScheduledTrigger -> m () +updateScheduledTriggerInCatalog CreateScheduledTrigger {..} = liftTx $ do Q.unitQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_trigger @@ -72,10 +74,9 @@ updateScheduledTriggerInCatalog CreateScheduledTrigger {..} includeInMetadata = schedule_conf = $3, payload = $4, retry_conf = $5, - include_in_metadata = $6 WHERE name = $1 |] (stName, Q.AltJ stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf - , includeInMetadata) False + ) False -- since the scheduled trigger is updated, clear all its future events which are not retries Q.unitQE defaultTxErrorHandler [Q.sql| @@ -111,3 +112,18 @@ deleteScheduledEventFromCatalog seId = liftTx $ do WHERE id = $1 RETURNING count(*) |] (Identity seId) False + +runTrackScheduledTrigger :: (MonadTx m) => TriggerName -> m EncJSON +runTrackScheduledTrigger stName = do + trackScheduledTriggerInCatalog stName + return successMsg + +trackScheduledTriggerInCatalog :: (MonadTx m) => TriggerName -> m () +trackScheduledTriggerInCatalog stName = liftTx $ do + Q.unitQE defaultTxErrorHandler + [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_trigger + SET include_in_metadata = 't' + WHERE name = $1 + |] (Identity stName) False + diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 373e3f8a52ab9..52017fe2b564b 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -1,7 +1,6 @@ -- | These are types for Scheduled Trigger definition; see "Hasura.Eventing.ScheduledTrigger" module Hasura.RQL.Types.ScheduledTrigger ( ScheduleType(..) - , CreateScheduledTriggerWith(..) , CreateScheduledTrigger(..) , RetryConfST(..) , formatTime' @@ -77,21 +76,6 @@ instance FromJSON CreateScheduledTrigger where $(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) -data CreateScheduledTriggerWith - = CreateScheduledTriggerWith - { stwDefinition :: !CreateScheduledTrigger - , stwIncludeInMetadata :: !Bool - } deriving (Show, Eq, Generic) - -instance FromJSON CreateScheduledTriggerWith where - parseJSON = - withObject "CreateScheduledTriggerWith" $ \o -> do - stwDefinition <- o .: "definition" - stwIncludeInMetadata <- o .: "include_in_metadata" - pure CreateScheduledTriggerWith {..} - -$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CreateScheduledTriggerWith) - -- Supported time string formats for the API: -- (see FromJSON for ZonedTime: https://hackage.haskell.org/package/aeson-1.4.6.0/docs/src/Data.Aeson.Types.FromJSON.html#line-2050) diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index bdc783ad0a525..bf8df96ce623a 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -88,10 +88,11 @@ data RQLQueryV1 | RQRedeliverEvent !RedeliverEventQuery | RQInvokeEventTrigger !InvokeEventTriggerQuery - | RQCreateScheduledTrigger !CreateScheduledTriggerWith - | RQUpdateScheduledTrigger !CreateScheduledTriggerWith + | RQCreateScheduledTrigger !CreateScheduledTrigger + | RQUpdateScheduledTrigger !CreateScheduledTrigger | RQDeleteScheduledTrigger !TriggerName | RQCancelScheduledEvent !EventId + | RQTrackScheduledTrigger !TriggerName -- query collections, allow list related | RQCreateQueryCollection !CreateCollection @@ -254,6 +255,7 @@ queryModifiesSchemaCache (RQV1 qi) = case qi of RQUpdateScheduledTrigger _ -> True RQDeleteScheduledTrigger _ -> True RQCancelScheduledEvent _ -> False + RQTrackScheduledTrigger _ -> False RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True @@ -377,6 +379,7 @@ runQueryM rq = withPathK "args" $ case rq of RQUpdateScheduledTrigger q -> runUpdateScheduledTrigger q RQDeleteScheduledTrigger q -> runDeleteScheduledTrigger q RQCancelScheduledEvent q -> runCancelScheduledEvent q + RQTrackScheduledTrigger q -> runTrackScheduledTrigger q RQCreateQueryCollection q -> runCreateCollection q RQDropQueryCollection q -> runDropCollection q @@ -455,6 +458,7 @@ requiresAdmin = \case RQUpdateScheduledTrigger _ -> True RQDeleteScheduledTrigger _ -> True RQCancelScheduledEvent _ -> True + RQTrackScheduledTrigger _ -> True RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True From 6cea9c4631decf7ff2a452b6191275b399520c91 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Fri, 14 Feb 2020 18:40:19 +0530 Subject: [PATCH 045/195] update docs --- .../scheduled-triggers.rst | 75 +++++++++++++++---- 1 file changed, 61 insertions(+), 14 deletions(-) diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst index 259b6724b6b37..4b04db399003c 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst @@ -58,13 +58,9 @@ Args syntax - TriggerName_ - Name of the scheduled trigger * - webhook - - false - - String - - Full url of webhook - * - webhook_from_env - - false - - String - - Environment variable which has the full url of webhook + - true + - Text | UrlFromEnv_ + - Url of webhook or environment variable which has the url * - schedule - true - ScheduleConf_ @@ -107,13 +103,9 @@ Args syntax - TriggerName_ - Name of the scheduled trigger * - webhook - - false - - String - - Full url of webhook - * - webhook_from_env - - false - - String - - Environment variable which has the full url of webhook + - true + - Text | UrlFromEnv_ + - Url of webhook or environment variable which has the url * - schedule - true - ScheduleConf_ @@ -204,6 +196,44 @@ Args syntax - true - UUID - Id of the scheduled event + +.. _track_scheduled_trigger: + +track_scheduled_trigger +----------------------- + +``track_scheduled_trigger`` is used to track a scheduled trigger in metadata so it can be exported/imported. + +.. code-block:: http + + POST /v1/query HTTP/1.1 + Content-Type: application/json + X-Hasura-Role: admin + + { + "type" : "track_scheduled_trigger", + "args" : { + "name": "sample_cron" + } + } + +.. _track_scheduled_trigger_syntax: + +Args syntax +^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Key + - Required + - Schema + - Description + * - name + - true + - TriggerName_ + - Name of the scheduled trigger + .. _TriggerName: @@ -214,6 +244,23 @@ TriggerName String +.. _UrlFromEnv: + +UrlFromEnv +&&&&&&&&&& + +.. list-table:: + :header-rows: 1 + + * - Key + - required + - Schema + - Description + * - from_env + - true + - String + - Name of the environment variable which has the url + .. _ScheduleConf: ScheduleConf From b3aa0b64c17987aae7bc37a90e72d8394270a156 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Fri, 14 Feb 2020 18:44:34 +0530 Subject: [PATCH 046/195] few bug fixes --- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 21 +++++++++---------- server/src-lib/Hasura/RQL/Types/Catalog.hs | 5 ++--- .../Hasura/RQL/Types/ScheduledTrigger.hs | 15 +++++++++++++ server/src-lib/Hasura/Server/Query.hs | 6 +++--- server/src-rsr/catalog_metadata.sql | 2 +- 5 files changed, 31 insertions(+), 18 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 78e5f623b8c1f..41eb72ca5bb03 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -73,7 +73,7 @@ updateScheduledTriggerInCatalog CreateScheduledTrigger {..} = liftTx $ do SET webhook_conf = $2, schedule_conf = $3, payload = $4, - retry_conf = $5, + retry_conf = $5 WHERE name = $1 |] (stName, Q.AltJ stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf ) False @@ -84,8 +84,8 @@ updateScheduledTriggerInCatalog CreateScheduledTrigger {..} = liftTx $ do WHERE name = $1 AND scheduled_time > now() AND tries = 0 |] (Identity stName) False -runDeleteScheduledTrigger :: (CacheRWM m, MonadTx m) => TriggerName -> m EncJSON -runDeleteScheduledTrigger stName = do +runDeleteScheduledTrigger :: (CacheRWM m, MonadTx m) => ScheduledTriggerName -> m EncJSON +runDeleteScheduledTrigger (ScheduledTriggerName stName) = do deleteScheduledTriggerFromCatalog stName return successMsg @@ -97,9 +97,9 @@ deleteScheduledTriggerFromCatalog stName = liftTx $ do WHERE name = $1 |] (Identity stName) False -runCancelScheduledEvent :: (MonadTx m) => EventId -> m EncJSON -runCancelScheduledEvent se = do - affectedRows <- deleteScheduledEventFromCatalog se +runCancelScheduledEvent :: (MonadTx m) => ScheduledEventId -> m EncJSON +runCancelScheduledEvent (ScheduledEventId seId) = do + affectedRows <- deleteScheduledEventFromCatalog seId if | affectedRows == 1 -> pure successMsg | affectedRows == 0 -> throw400 NotFound "scheduled event not found" | otherwise -> throw500 "unexpected: more than one scheduled events cancelled" @@ -108,13 +108,12 @@ deleteScheduledEventFromCatalog :: (MonadTx m) => EventId -> m Int deleteScheduledEventFromCatalog seId = liftTx $ do (runIdentity . Q.getRow) <$> Q.withQE defaultTxErrorHandler [Q.sql| - DELETE FROM hdb_catalog.hdb_scheduled_events - WHERE id = $1 - RETURNING count(*) + WITH "cte" AS (DELETE FROM hdb_catalog.hdb_scheduled_events WHERE id = $1 RETURNING *) + SELECT count(*) FROM "cte" |] (Identity seId) False -runTrackScheduledTrigger :: (MonadTx m) => TriggerName -> m EncJSON -runTrackScheduledTrigger stName = do +runTrackScheduledTrigger :: (MonadTx m) => ScheduledTriggerName -> m EncJSON +runTrackScheduledTrigger (ScheduledTriggerName stName) = do trackScheduledTriggerInCatalog stName return successMsg diff --git a/server/src-lib/Hasura/RQL/Types/Catalog.hs b/server/src-lib/Hasura/RQL/Types/Catalog.hs index a3d6b1eaf7f4a..e736ba844714d 100644 --- a/server/src-lib/Hasura/RQL/Types/Catalog.hs +++ b/server/src-lib/Hasura/RQL/Types/Catalog.hs @@ -139,15 +139,14 @@ instance NFData CatalogFunction instance Cacheable CatalogFunction $(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogFunction) - data CatalogScheduledTrigger = CatalogScheduledTrigger { _cstName :: !TriggerName , _cstWebhookConf :: !WebhookConf - , _cstSchedule :: !ScheduleType + , _cstScheduleConf :: !ScheduleType , _cstPayload :: !(Maybe Value) , _cstRetryConf :: !RetryConfST - , _cstHeaders :: !(Maybe [HeaderConf]) + , _cstHeaderConf :: !(Maybe [HeaderConf]) } deriving (Show, Eq, Generic) instance NFData CatalogScheduledTrigger instance Cacheable CatalogScheduledTrigger diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 52017fe2b564b..3032e94d0145c 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -1,6 +1,8 @@ -- | These are types for Scheduled Trigger definition; see "Hasura.Eventing.ScheduledTrigger" module Hasura.RQL.Types.ScheduledTrigger ( ScheduleType(..) + , ScheduledTriggerName(..) + , ScheduledEventId(..) , CreateScheduledTrigger(..) , RetryConfST(..) , formatTime' @@ -76,6 +78,19 @@ instance FromJSON CreateScheduledTrigger where $(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) +newtype ScheduledTriggerName + = ScheduledTriggerName { unName :: ET.TriggerName } + deriving (Show, Eq) + +$(deriveJSON (aesonDrop 2 snakeCase) ''ScheduledTriggerName) + +newtype ScheduledEventId + = ScheduledEventId{ unEventId:: ET.EventId} + deriving (Show, Eq) + +$(deriveJSON (aesonDrop 2 snakeCase) ''ScheduledEventId) + + -- Supported time string formats for the API: -- (see FromJSON for ZonedTime: https://hackage.haskell.org/package/aeson-1.4.6.0/docs/src/Data.Aeson.Types.FromJSON.html#line-2050) diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index bf8df96ce623a..951a8af75937b 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -90,9 +90,9 @@ data RQLQueryV1 | RQCreateScheduledTrigger !CreateScheduledTrigger | RQUpdateScheduledTrigger !CreateScheduledTrigger - | RQDeleteScheduledTrigger !TriggerName - | RQCancelScheduledEvent !EventId - | RQTrackScheduledTrigger !TriggerName + | RQDeleteScheduledTrigger !ScheduledTriggerName + | RQCancelScheduledEvent !ScheduledEventId + | RQTrackScheduledTrigger !ScheduledTriggerName -- query collections, allow list related | RQCreateQueryCollection !CreateCollection diff --git a/server/src-rsr/catalog_metadata.sql b/server/src-rsr/catalog_metadata.sql index fd4a478371b9f..c28161582af31 100644 --- a/server/src-rsr/catalog_metadata.sql +++ b/server/src-rsr/catalog_metadata.sql @@ -179,7 +179,7 @@ from json_build_object( 'name', name, 'webhook_conf', webhook_conf :: json, - 'schedule', schedule :: json, + 'schedule_conf', schedule_conf :: json, 'payload', payload :: json, 'retry_conf', retry_conf :: json ) From 74c7d744279e5364eaed4d032ffd827d0b4f1c62 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Fri, 14 Feb 2020 19:35:40 +0530 Subject: [PATCH 047/195] add `untrack_scheduled_trigger` api --- .../schema-metadata-api/index.rst | 10 +++++ .../scheduled-triggers.rst | 41 ++++++++++++++++++- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 14 +++++++ server/src-lib/Hasura/Server/Query.hs | 14 ++++--- 4 files changed, 72 insertions(+), 7 deletions(-) diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/index.rst b/docs/graphql/manual/api-reference/schema-metadata-api/index.rst index 6fa7c482a4bad..c4386908a0db4 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/index.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/index.rst @@ -241,6 +241,16 @@ The various types of queries are listed in the following table: - 1 - Cancel a particular run of a scheduled trigger + * - :ref:`track_scheduled_trigger` + - :ref:`track_scheduled_trigger_args ` + - 1 + - Track an existing scheduled trigger so it can be exported in metadata + + * - :ref:`untrack_scheduled_trigger` + - :ref:`untrack_scheduled_trigger_args ` + - 1 + - Untrack an existing scheduled trigger so it won't be exported in metadata + * - :ref:`add_remote_schema` - :ref:`add_remote_schema_args ` - 1 diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst index 4b04db399003c..0605b4aee1b3f 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst @@ -202,7 +202,7 @@ Args syntax track_scheduled_trigger ----------------------- -``track_scheduled_trigger`` is used to track a scheduled trigger in metadata so it can be exported/imported. +``track_scheduled_trigger`` is used to track a scheduled trigger so it can be exported in metadata. By default, scheduled triggers are untracked. .. code-block:: http @@ -234,7 +234,44 @@ Args syntax - TriggerName_ - Name of the scheduled trigger - +.. _untrack_scheduled_trigger: + +untrack_scheduled_trigger +------------------------- + +``untrack_scheduled_trigger`` is used to untrack a scheduled trigger so it won't be exported in metadata. By default, scheduled triggers are untracked. + +.. code-block:: http + + POST /v1/query HTTP/1.1 + Content-Type: application/json + X-Hasura-Role: admin + + { + "type" : "untrack_scheduled_trigger", + "args" : { + "name": "sample_cron" + } + } + +.. _untrack_scheduled_trigger_syntax: + +Args syntax +^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Key + - Required + - Schema + - Description + * - name + - true + - TriggerName_ + - Name of the scheduled trigger + + .. _TriggerName: TriggerName diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 41eb72ca5bb03..12baa89f05504 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -4,6 +4,7 @@ module Hasura.RQL.DDL.ScheduledTrigger , runDeleteScheduledTrigger , runCancelScheduledEvent , runTrackScheduledTrigger + , runUntrackScheduledTrigger , addScheduledTriggerToCatalog , deleteScheduledTriggerFromCatalog , trackScheduledTriggerInCatalog @@ -126,3 +127,16 @@ trackScheduledTriggerInCatalog stName = liftTx $ do WHERE name = $1 |] (Identity stName) False +runUntrackScheduledTrigger :: (MonadTx m) => ScheduledTriggerName -> m EncJSON +runUntrackScheduledTrigger (ScheduledTriggerName stName) = do + untrackScheduledTriggerInCatalog stName + return successMsg + +untrackScheduledTriggerInCatalog :: (MonadTx m) => TriggerName -> m () +untrackScheduledTriggerInCatalog stName = liftTx $ do + Q.unitQE defaultTxErrorHandler + [Q.sql| + UPDATE hdb_catalog.hdb_scheduled_trigger + SET include_in_metadata = 'f' + WHERE name = $1 + |] (Identity stName) False diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 951a8af75937b..05a652942bbc7 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -88,11 +88,12 @@ data RQLQueryV1 | RQRedeliverEvent !RedeliverEventQuery | RQInvokeEventTrigger !InvokeEventTriggerQuery - | RQCreateScheduledTrigger !CreateScheduledTrigger - | RQUpdateScheduledTrigger !CreateScheduledTrigger - | RQDeleteScheduledTrigger !ScheduledTriggerName - | RQCancelScheduledEvent !ScheduledEventId - | RQTrackScheduledTrigger !ScheduledTriggerName + | RQCreateScheduledTrigger !CreateScheduledTrigger + | RQUpdateScheduledTrigger !CreateScheduledTrigger + | RQDeleteScheduledTrigger !ScheduledTriggerName + | RQCancelScheduledEvent !ScheduledEventId + | RQTrackScheduledTrigger !ScheduledTriggerName + | RQUntrackScheduledTrigger !ScheduledTriggerName -- query collections, allow list related | RQCreateQueryCollection !CreateCollection @@ -256,6 +257,7 @@ queryModifiesSchemaCache (RQV1 qi) = case qi of RQDeleteScheduledTrigger _ -> True RQCancelScheduledEvent _ -> False RQTrackScheduledTrigger _ -> False + RQUntrackScheduledTrigger _ -> False RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True @@ -380,6 +382,7 @@ runQueryM rq = withPathK "args" $ case rq of RQDeleteScheduledTrigger q -> runDeleteScheduledTrigger q RQCancelScheduledEvent q -> runCancelScheduledEvent q RQTrackScheduledTrigger q -> runTrackScheduledTrigger q + RQUntrackScheduledTrigger q -> runUntrackScheduledTrigger q RQCreateQueryCollection q -> runCreateCollection q RQDropQueryCollection q -> runDropCollection q @@ -459,6 +462,7 @@ requiresAdmin = \case RQDeleteScheduledTrigger _ -> True RQCancelScheduledEvent _ -> True RQTrackScheduledTrigger _ -> True + RQUntrackScheduledTrigger _ -> True RQCreateQueryCollection _ -> True RQDropQueryCollection _ -> True From 90d0d1c219c9f0a2850fc93b7643194a5fb63f7f Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Fri, 14 Feb 2020 21:34:49 +0530 Subject: [PATCH 048/195] change OneOff to AdHoc type and implement `create_scheduled_event` api --- .../schema-metadata-api/index.rst | 5 ++ .../scheduled-triggers.rst | 53 +++++++++++++++++-- .../Hasura/Eventing/ScheduledTrigger.hs | 13 ++--- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 20 ++++--- .../Hasura/RQL/Types/ScheduledTrigger.hs | 12 ++++- server/src-lib/Hasura/Server/Query.hs | 4 ++ server/src-rsr/initialise.sql | 1 + server/src-rsr/migrations/30_to_31.sql | 1 + 8 files changed, 91 insertions(+), 18 deletions(-) diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/index.rst b/docs/graphql/manual/api-reference/schema-metadata-api/index.rst index c4386908a0db4..26b19af66d624 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/index.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/index.rst @@ -236,6 +236,11 @@ The various types of queries are listed in the following table: - 1 - Delete an existing scheduled trigger + * - :ref:`create_scheduled_event` + - :ref:`create_scheduled_event_args ` + - 1 + - Create a new run of a scheduled trigger + * - :ref:`cancel_scheduled_event` - :ref:`cancel_scheduled_event_args ` - 1 diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst index 0605b4aee1b3f..361f393de496a 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst @@ -160,6 +160,53 @@ Args syntax - TriggerName_ - Name of the scheduled trigger +.. _create_scheduled_event: + +create_scheduled_event +---------------------- + +``create_scheduled_event`` is used to create a new scheduled event with given timestamp and payload + +.. code-block:: http + + POST /v1/query HTTP/1.1 + Content-Type: application/json + X-Hasura-Role: admin + + { + "type" : "create_scheduled_event", + "args" : { + "name": "sample-adhoc", + "timestamp": "2020-02-14 22:00:00 Z", + "payload": { "k" : "v"} + } + } + +.. _create_scheduled_event_syntax: + +Args syntax +^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Key + - Required + - Schema + - Description + * - name + - true + - Text + - Name of the scheduled trigger + * - timestamp + - true + - UTCTime + - UTC Timestamp to invoke the trigger in ISO8601 format + * - payload + - false + - Object + - Any object to send with the trigger, will override configured payload + .. _cancel_scheduled_event: cancel_scheduled_event @@ -312,12 +359,12 @@ ScheduleConf - Description * - type - true - - OneOff | Cron + - Cron | AdHoc - Type of scheduled trigger * - value - - true + - false - String - - Timestamp in UTC or cron expression + - Cron expression (if type is Cron) .. _HeaderFromValue: diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 521fbda1163c4..39d1aded399ff 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -84,6 +84,7 @@ data ScheduledEventPartial { sepId :: !Text , sepName :: !TriggerName , sepScheduledTime :: !UTCTime + , sepPayload :: !(Maybe J.Value) , sepTries :: !Int } deriving (Show, Eq) @@ -143,7 +144,7 @@ generateScheduledEventsFrom :: UTCTime -> ScheduledTriggerInfo-> [ScheduledEvent generateScheduledEventsFrom time ScheduledTriggerInfo{..} = let events = case stiSchedule of - OneOff _ -> empty -- one-off scheduled events are generated during creation + AdHoc -> empty -- ad-hoc scheduled events are created through 'create_scheduled_event' API Cron cron -> generateScheduleTimesBetween time @@ -174,16 +175,16 @@ processScheduledQueue logger logEnv httpMgr pgpool getSC = case scheduledEventsE of Right partialEvents -> sequence_ $ - flip map partialEvents $ \(ScheduledEventPartial id' name st tries)-> do + flip map partialEvents $ \(ScheduledEventPartial id' name st payload tries)-> do let sti' = Map.lookup name scheduledTriggersInfo case sti' of Nothing -> L.unLogger logger $ ScheduledTriggerInternalErr $ err500 Unexpected "could not find scheduled trigger in cache" Just sti -> do let webhook = wciCachedValue $ stiWebhookInfo sti - payload = fromMaybe J.Null $ stiPayload sti + payload' = fromMaybe (fromMaybe J.Null $ stiPayload sti) payload -- override if neccessary retryConf = stiRetryConf sti - se = ScheduledEventFull id' name st tries webhook payload retryConf + se = ScheduledEventFull id' name st tries webhook payload' retryConf runReaderT (processScheduledEvent logEnv pgpool sti se) (logger, httpMgr) Left err -> L.unLogger logger $ ScheduledTriggerInternalErr $ err500 Unexpected $ "could not fetch scheduled events: " <> (T.pack $ show err) @@ -369,7 +370,7 @@ getScheduledEvents = do ) FOR UPDATE SKIP LOCKED ) - RETURNING id, name, scheduled_time, tries + RETURNING id, name, scheduled_time, additional_payload, tries |] () True pure $ partialSchedules - where uncurryEvent (i, n, st, tries) = ScheduledEventPartial i n st tries + where uncurryEvent (i, n, st, p, tries) = ScheduledEventPartial i n st (Q.getAltJ <$> p) tries diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 12baa89f05504..9661f012f4592 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -2,6 +2,7 @@ module Hasura.RQL.DDL.ScheduledTrigger ( runCreateScheduledTrigger , runUpdateScheduledTrigger , runDeleteScheduledTrigger + , runCreateScheduledEvent , runCancelScheduledEvent , runTrackScheduledTrigger , runUntrackScheduledTrigger @@ -35,14 +36,6 @@ addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ do VALUES ($1, $2, $3, $4, $5) |] (stName, Q.AltJ stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf ) False - case stSchedule of - OneOff timestamp -> Q.unitQE defaultTxErrorHandler - [Q.sql| - INSERT into hdb_catalog.hdb_scheduled_events - (name, scheduled_time) - VALUES ($1, $2) - |] (stName, timestamp) False - _ -> pure () resolveScheduledTrigger :: (QErrM m, MonadIO m) @@ -88,6 +81,7 @@ updateScheduledTriggerInCatalog CreateScheduledTrigger {..} = liftTx $ do runDeleteScheduledTrigger :: (CacheRWM m, MonadTx m) => ScheduledTriggerName -> m EncJSON runDeleteScheduledTrigger (ScheduledTriggerName stName) = do deleteScheduledTriggerFromCatalog stName + withNewInconsistentObjsCheck buildSchemaCache return successMsg deleteScheduledTriggerFromCatalog :: (MonadTx m) => TriggerName -> m () @@ -98,6 +92,16 @@ deleteScheduledTriggerFromCatalog stName = liftTx $ do WHERE name = $1 |] (Identity stName) False +runCreateScheduledEvent :: (MonadTx m) => CreateScheduledEvent -> m EncJSON +runCreateScheduledEvent CreateScheduledEvent{..} = do + liftTx $ Q.unitQE defaultTxErrorHandler + [Q.sql| + INSERT into hdb_catalog.hdb_scheduled_events + (name, scheduled_time, additional_payload) + VALUES ($1, $2, $3) + |] (steName, steTimestamp, Q.AltJ <$> stePayload) False + pure successMsg + runCancelScheduledEvent :: (MonadTx m) => ScheduledEventId -> m EncJSON runCancelScheduledEvent (ScheduledEventId seId) = do affectedRows <- deleteScheduledEventFromCatalog seId diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 3032e94d0145c..8de22c00bddec 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -4,6 +4,7 @@ module Hasura.RQL.Types.ScheduledTrigger , ScheduledTriggerName(..) , ScheduledEventId(..) , CreateScheduledTrigger(..) + , CreateScheduledEvent(..) , RetryConfST(..) , formatTime' ) where @@ -44,7 +45,7 @@ defaultRetryConf = , rcstTolerance = 21600 -- 6 hours } -data ScheduleType = OneOff UTCTime | Cron CronSchedule +data ScheduleType = Cron CronSchedule | AdHoc deriving (Show, Eq, Generic) instance NFData ScheduleType @@ -78,6 +79,15 @@ instance FromJSON CreateScheduledTrigger where $(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''CreateScheduledTrigger) +data CreateScheduledEvent + = CreateScheduledEvent + { steName :: !ET.TriggerName + , steTimestamp :: !UTCTime + , stePayload :: !(Maybe J.Value) + } deriving (Show, Eq, Generic) + +$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CreateScheduledEvent) + newtype ScheduledTriggerName = ScheduledTriggerName { unName :: ET.TriggerName } deriving (Show, Eq) diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 05a652942bbc7..4e2925a91af76 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -91,6 +91,7 @@ data RQLQueryV1 | RQCreateScheduledTrigger !CreateScheduledTrigger | RQUpdateScheduledTrigger !CreateScheduledTrigger | RQDeleteScheduledTrigger !ScheduledTriggerName + | RQCreateScheduledEvent !CreateScheduledEvent | RQCancelScheduledEvent !ScheduledEventId | RQTrackScheduledTrigger !ScheduledTriggerName | RQUntrackScheduledTrigger !ScheduledTriggerName @@ -255,6 +256,7 @@ queryModifiesSchemaCache (RQV1 qi) = case qi of RQCreateScheduledTrigger _ -> True RQUpdateScheduledTrigger _ -> True RQDeleteScheduledTrigger _ -> True + RQCreateScheduledEvent _ -> False RQCancelScheduledEvent _ -> False RQTrackScheduledTrigger _ -> False RQUntrackScheduledTrigger _ -> False @@ -380,6 +382,7 @@ runQueryM rq = withPathK "args" $ case rq of RQCreateScheduledTrigger q -> runCreateScheduledTrigger q RQUpdateScheduledTrigger q -> runUpdateScheduledTrigger q RQDeleteScheduledTrigger q -> runDeleteScheduledTrigger q + RQCreateScheduledEvent q -> runCreateScheduledEvent q RQCancelScheduledEvent q -> runCancelScheduledEvent q RQTrackScheduledTrigger q -> runTrackScheduledTrigger q RQUntrackScheduledTrigger q -> runUntrackScheduledTrigger q @@ -460,6 +463,7 @@ requiresAdmin = \case RQCreateScheduledTrigger _ -> True RQUpdateScheduledTrigger _ -> True RQDeleteScheduledTrigger _ -> True + RQCreateScheduledEvent _ -> True RQCancelScheduledEvent _ -> True RQTrackScheduledTrigger _ -> True RQUntrackScheduledTrigger _ -> True diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index ef8f7259ad6ed..9d37795dd9779 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -679,6 +679,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events id TEXT DEFAULT gen_random_uuid() UNIQUE, name TEXT, scheduled_time TIMESTAMP NOT NULL, + additional_payload JSON, delivered BOOLEAN NOT NULL DEFAULT FALSE, error BOOLEAN NOT NULL DEFAULT FALSE, tries INTEGER NOT NULL DEFAULT 0, diff --git a/server/src-rsr/migrations/30_to_31.sql b/server/src-rsr/migrations/30_to_31.sql index 5c2e00d8a4f55..e61d6d530a036 100644 --- a/server/src-rsr/migrations/30_to_31.sql +++ b/server/src-rsr/migrations/30_to_31.sql @@ -13,6 +13,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events id TEXT DEFAULT gen_random_uuid() UNIQUE, name TEXT, scheduled_time TIMESTAMP NOT NULL, + additional_payload JSON, delivered BOOLEAN NOT NULL DEFAULT FALSE, error BOOLEAN NOT NULL DEFAULT FALSE, tries INTEGER NOT NULL DEFAULT 0, From 98a490bb4aac1d51c6184800887faae8605ebbbe Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Sun, 16 Feb 2020 09:51:06 +0530 Subject: [PATCH 049/195] adhoc trigger takes an optional timestamp value --- .../schema-metadata-api/scheduled-triggers.rst | 6 +++--- .../src-lib/Hasura/Eventing/ScheduledTrigger.hs | 2 +- .../src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs | 8 ++++++++ .../src-lib/Hasura/RQL/Types/ScheduledTrigger.hs | 16 ++++++++++++++-- 4 files changed, 26 insertions(+), 6 deletions(-) diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst index 361f393de496a..d2cd664a5385b 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst @@ -31,7 +31,7 @@ create_scheduled_trigger "name": "sample_cron", "webhook": "https://httpbin.org/post", "schedule": { - "type": "Cron", + "type": "cron", "value": "* * * * *" }, "payload": { @@ -359,12 +359,12 @@ ScheduleConf - Description * - type - true - - Cron | AdHoc + - cron | adhoc - Type of scheduled trigger * - value - false - String - - Cron expression (if type is Cron) + - If type is cron, then cron expression. If type is adhoc, then optional timestamp .. _HeaderFromValue: diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 39d1aded399ff..ac3e5d65c0721 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -144,7 +144,7 @@ generateScheduledEventsFrom :: UTCTime -> ScheduledTriggerInfo-> [ScheduledEvent generateScheduledEventsFrom time ScheduledTriggerInfo{..} = let events = case stiSchedule of - AdHoc -> empty -- ad-hoc scheduled events are created through 'create_scheduled_event' API + AdHoc _ -> empty -- ad-hoc scheduled events are created through 'create_scheduled_event' API Cron cron -> generateScheduleTimesBetween time diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 9661f012f4592..5f3b3acfe41ec 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -36,6 +36,14 @@ addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ do VALUES ($1, $2, $3, $4, $5) |] (stName, Q.AltJ stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf ) False + case stSchedule of + AdHoc (Just timestamp) -> Q.unitQE defaultTxErrorHandler + [Q.sql| + INSERT into hdb_catalog.hdb_scheduled_events + (name, scheduled_time) + VALUES ($1, $2) + |] (stName, timestamp) False + _ -> pure () resolveScheduledTrigger :: (QErrM m, MonadIO m) diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 8de22c00bddec..8ba74299819df 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -45,13 +45,25 @@ defaultRetryConf = , rcstTolerance = 21600 -- 6 hours } -data ScheduleType = Cron CronSchedule | AdHoc +data ScheduleType = Cron CronSchedule | AdHoc (Maybe UTCTime) deriving (Show, Eq, Generic) instance NFData ScheduleType instance Cacheable ScheduleType -$(deriveJSON defaultOptions{sumEncoding=TaggedObject "type" "value"} ''ScheduleType) +instance FromJSON ScheduleType where + parseJSON = + withObject "ScheduleType" $ \o -> do + type' <- o .: "type" + case type' of + String "cron" -> Cron <$> o .: "value" + String "adhoc" -> AdHoc <$> o .:? "value" + _ -> fail "expected type to be cron or adhoc" + +instance ToJSON ScheduleType where + toJSON (Cron cs) = object ["type" .= String "cron", "value" .= toJSON cs] + toJSON (AdHoc (Just ts)) = object ["type" .= String "adhoc", "value" .= toJSON ts] + toJSON (AdHoc Nothing) = object ["type" .= String "adhoc"] data CreateScheduledTrigger = CreateScheduledTrigger From 748ea4e5b362c9e8dd46e5ab0ac03dd198caefa0 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Mon, 17 Feb 2020 18:19:49 +0530 Subject: [PATCH 050/195] change logic of creating upcoming events (from next day to next 100) --- .../Hasura/Eventing/ScheduledTrigger.hs | 82 +++++++++++++++---- server/src-rsr/initialise.sql | 13 +++ server/src-rsr/migrations/30_to_31.sql | 13 +++ 3 files changed, 91 insertions(+), 17 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index ac3e5d65c0721..6b7d3fc09b59d 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -9,7 +9,8 @@ Scheduled events are modeled using rows in Postgres with a @timestamp@ column. During startup, two threads are started: 1. Generator: Fetches the list of scheduled triggers from cache and generates scheduled events -for the next @x@ hours (default: 24). This effectively corresponds to doing an INSERT with values containing specific timestamp. +if there are less than 100 upcoming events. +This effectively corresponds to doing an INSERT with values containing specific timestamp. 2. Processor: Fetches the scheduled events from db which are @<=NOW()@ and not delivered and delivers them. The delivery mechanism is similar to Event Triggers; see "Hasura.Eventing.EventTrigger" -} @@ -73,6 +74,13 @@ scheduledEventsTable = hdbCatalogSchema (TableName $ T.pack "hdb_scheduled_events") +data ScheduledTriggerStats + = ScheduledTriggerStats + { stsName :: !TriggerName + , stsUpcomingEventsCount :: !Int + , stsMaxScheduledTime :: !UTCTime + } deriving (Show, Eq) + data ScheduledEventSeed = ScheduledEventSeed { sesName :: !TriggerName @@ -109,21 +117,54 @@ runScheduledEventsGenerator :: runScheduledEventsGenerator logger pgpool getSC = do forever $ do sc <- getSC + -- get scheduled triggers from cache let scheduledTriggers = Map.elems $ scScheduledTriggers sc + + -- get scheduled trigger stats from db runExceptT - (Q.runTx - pgpool - (Q.ReadCommitted, Just Q.ReadWrite) - (insertScheduledEventsFor scheduledTriggers) ) >>= \case - Right _ -> pure () - Left err -> - L.unLogger logger $ ScheduledTriggerInternalErr $ err500 Unexpected (T.pack $ show err) + (Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadOnly) getScheduledTriggerStats) >>= \case + Left err -> L.unLogger logger $ + ScheduledTriggerInternalErr $ err500 Unexpected (T.pack $ show err) + Right scheduledTriggerStats -> do + + -- join scheduled triggers with stats and produce @[(ScheduledTriggerInfo, ScheduledTriggerStats)]@ + scheduledTriggersWithStats' <- mapM (withStats scheduledTriggerStats) scheduledTriggers + let scheduledTriggersWithStats = catMaybes scheduledTriggersWithStats' + + -- filter out scheduled trigger which have more than 100 upcoming events already + let scheduledTriggersForHydration = + filter (\(_sti, stats) -> stsUpcomingEventsCount stats < 100) scheduledTriggersWithStats + + -- insert scheduled events for scheduled triggers that need hydration + runExceptT + (Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) $ + insertScheduledEventsFor scheduledTriggersForHydration) >>= \case + Right _ -> pure () + Left err -> + L.unLogger logger $ ScheduledTriggerInternalErr $ err500 Unexpected (T.pack $ show err) threadDelay oneMinute + where + getScheduledTriggerStats = liftTx $ do + map uncurryStats <$> + Q.listQE defaultTxErrorHandler + [Q.sql| + SELECT name, upcoming_events_count, max_scheduled_time + FROM hdb_catalog.hdb_scheduled_events_stats + |] () True + uncurryStats (n, count, maxTs) = ScheduledTriggerStats n count maxTs + withStats stStats sti = do + let mStats = find (\ScheduledTriggerStats{stsName} -> stsName == stiName sti) stStats + case mStats of + Nothing -> do + L.unLogger logger $ + ScheduledTriggerInternalErr $ err500 Unexpected "could not find scheduled trigger in stats" + pure Nothing + Just stats -> pure $ Just (sti, stats) -insertScheduledEventsFor :: [ScheduledTriggerInfo] -> Q.TxE QErr () -insertScheduledEventsFor scheduledTriggers = do - currentTime <- liftIO getCurrentTime - let scheduledEvents = concatMap (generateScheduledEventsFrom currentTime) scheduledTriggers +insertScheduledEventsFor :: [(ScheduledTriggerInfo, ScheduledTriggerStats)] -> Q.TxE QErr () +insertScheduledEventsFor scheduledTriggersWithStats = do + let scheduledEvents = flip concatMap scheduledTriggersWithStats $ \(sti, stats) -> + generateScheduledEventsFrom (stsMaxScheduledTime stats) sti case scheduledEvents of [] -> pure () events -> do @@ -146,18 +187,25 @@ generateScheduledEventsFrom time ScheduledTriggerInfo{..} = case stiSchedule of AdHoc _ -> empty -- ad-hoc scheduled events are created through 'create_scheduled_event' API Cron cron -> - generateScheduleTimesBetween + generateScheduleTimes time - (addUTCTime nominalDay time) -- by default, generate events for one day + 100 -- by default, generate next 100 events cron in map (ScheduledEventSeed stiName) events --- | Generates events @(from, till]@ according to 'CronSchedule' -generateScheduleTimesBetween :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] -generateScheduleTimesBetween from till cron = takeWhile (<= till) $ go from + +-- | Generates next @n events starting @from according to 'CronSchedule' +generateScheduleTimes :: UTCTime -> Int -> CronSchedule -> [UTCTime] +generateScheduleTimes from n cron = take n $ go from where go = unfoldr (fmap dup . nextMatch cron) +-- | Generates events @(from, till]@ according to 'CronSchedule' +-- generateScheduleTimesFrom :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] +-- generateScheduleTimesFrom from till cron = takeWhile (<= till) $ go from +-- where +-- go = unfoldr (fmap dup . nextMatch cron) + processScheduledQueue :: HasVersion => L.Logger L.Hasura diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index 9d37795dd9779..a3b36332b29de 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -704,3 +704,16 @@ CREATE TABLE hdb_catalog.hdb_scheduled_event_invocation_logs FOREIGN KEY (event_id) REFERENCES hdb_catalog.hdb_scheduled_events (id) ON DELETE CASCADE ); + +CREATE VIEW hdb_catalog.hdb_scheduled_events_stats AS + SELECT st.name, + COALESCE(ste.upcoming_events_count,0) as upcoming_events_count, + COALESCE(ste.max_scheduled_time, now()) as max_scheduled_time + FROM hdb_catalog.hdb_scheduled_trigger st + LEFT JOIN + ( SELECT name, count(*) as upcoming_events_count, max(scheduled_time) as max_scheduled_time + FROM hdb_catalog.hdb_scheduled_events + WHERE tries = 0 + GROUP BY name + ) ste + ON st.name = ste.name; diff --git a/server/src-rsr/migrations/30_to_31.sql b/server/src-rsr/migrations/30_to_31.sql index e61d6d530a036..7f79e236cf90a 100644 --- a/server/src-rsr/migrations/30_to_31.sql +++ b/server/src-rsr/migrations/30_to_31.sql @@ -38,3 +38,16 @@ CREATE TABLE hdb_catalog.hdb_scheduled_event_invocation_logs FOREIGN KEY (event_id) REFERENCES hdb_catalog.hdb_scheduled_events (id) ON DELETE CASCADE ); + +CREATE VIEW hdb_catalog.hdb_scheduled_events_stats AS + SELECT st.name, + COALESCE(ste.upcoming_events_count,0) as upcoming_events_count, + COALESCE(ste.max_scheduled_time, now()) as max_scheduled_time + FROM hdb_catalog.hdb_scheduled_trigger st + LEFT JOIN + ( SELECT name, count(*) as upcoming_events_count, max(scheduled_time) as max_scheduled_time + FROM hdb_catalog.hdb_scheduled_events + WHERE tries = 0 + GROUP BY name + ) ste + ON st.name = ste.name; From 6fb69e0740db383747264008ec374ad83610f277 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Mon, 17 Feb 2020 19:17:01 +0530 Subject: [PATCH 051/195] create scheduled events for cron during create also --- .../Hasura/Eventing/ScheduledTrigger.hs | 28 ++++++++++++++----- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 8 ++++++ 2 files changed, 29 insertions(+), 7 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 6b7d3fc09b59d..a37b2bd56e01c 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -17,6 +17,10 @@ The delivery mechanism is similar to Event Triggers; see "Hasura.Eventing.EventT module Hasura.Eventing.ScheduledTrigger ( processScheduledQueue , runScheduledEventsGenerator + + , ScheduledEventSeed(..) + , generateScheduleTimes + , insertScheduledEvents ) where import Control.Arrow.Extended (dup) @@ -181,19 +185,29 @@ insertScheduledEventsFor scheduledTriggersWithStats = do toArr (ScheduledEventSeed n t) = [(triggerNameToTxt n), (formatTime' t)] toTupleExp = TupleExp . map SELit +insertScheduledEvents :: [ScheduledEventSeed] -> Q.TxE QErr () +insertScheduledEvents events = do + let insertScheduledEventsSql = TB.run $ toSQL + SQLInsert + { siTable = scheduledEventsTable + , siCols = map (PGCol . T.pack) ["name", "scheduled_time"] + , siValues = ValuesExp $ map (toTupleExp . toArr) events + , siConflict = Just $ DoNothing Nothing + , siRet = Nothing + } + Q.unitQE defaultTxErrorHandler (Q.fromText insertScheduledEventsSql) () False + where + toArr (ScheduledEventSeed n t) = [(triggerNameToTxt n), (formatTime' t)] + toTupleExp = TupleExp . map SELit + generateScheduledEventsFrom :: UTCTime -> ScheduledTriggerInfo-> [ScheduledEventSeed] -generateScheduledEventsFrom time ScheduledTriggerInfo{..} = +generateScheduledEventsFrom startTime ScheduledTriggerInfo{..} = let events = case stiSchedule of AdHoc _ -> empty -- ad-hoc scheduled events are created through 'create_scheduled_event' API - Cron cron -> - generateScheduleTimes - time - 100 -- by default, generate next 100 events - cron + Cron cron -> generateScheduleTimes startTime 100 cron -- by default, generate next 100 events in map (ScheduledEventSeed stiName) events - -- | Generates next @n events starting @from according to 'CronSchedule' generateScheduleTimes :: UTCTime -> Int -> CronSchedule -> [UTCTime] generateScheduleTimes from n cron = take n $ go from diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 5f3b3acfe41ec..94da7d5124a99 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -19,7 +19,10 @@ import Hasura.RQL.DDL.EventTrigger ( getWebhookInfoFromConf , getHeaderInfosFromConf) import Hasura.RQL.Types +import Hasura.Eventing.ScheduledTrigger + import qualified Database.PG.Query as Q +import qualified Data.Time.Clock as C runCreateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTrigger -> m EncJSON runCreateScheduledTrigger q = do @@ -43,6 +46,11 @@ addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ do (name, scheduled_time) VALUES ($1, $2) |] (stName, timestamp) False + Cron cron -> do + currentTime <- liftIO C.getCurrentTime + let scheduleTimes = generateScheduleTimes currentTime 100 cron -- generate next 100 events + events = map (ScheduledEventSeed stName) scheduleTimes + insertScheduledEvents events _ -> pure () resolveScheduledTrigger From d8ad5dbf646cf93a0beb07f6ecdc4e8609a11bb6 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Mon, 17 Feb 2020 19:25:27 +0530 Subject: [PATCH 052/195] mark column as `cancelled` instead of deleting row for `cancel_scheduled_event` api --- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 1 + server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs | 3 ++- server/src-rsr/initialise.sql | 1 + server/src-rsr/migrations/30_to_31.sql | 1 + 4 files changed, 5 insertions(+), 1 deletion(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index a37b2bd56e01c..9b54ce6de36ae 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -422,6 +422,7 @@ getScheduledEvents = do WHERE id IN ( SELECT t.id FROM hdb_catalog.hdb_scheduled_events t WHERE ( t.locked = 'f' + and t.cancelled = 'f' and t.delivered = 'f' and t.error = 'f' and ( diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 94da7d5124a99..9a86da1bc5d62 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -129,7 +129,8 @@ deleteScheduledEventFromCatalog :: (MonadTx m) => EventId -> m Int deleteScheduledEventFromCatalog seId = liftTx $ do (runIdentity . Q.getRow) <$> Q.withQE defaultTxErrorHandler [Q.sql| - WITH "cte" AS (DELETE FROM hdb_catalog.hdb_scheduled_events WHERE id = $1 RETURNING *) + WITH "cte" AS + (UPDATE hdb_catalog.hdb_scheduled_events SET cancelled = 't' WHERE id = $1 RETURNING *) SELECT count(*) FROM "cte" |] (Identity seId) False diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index a3b36332b29de..6cbbb7da18e19 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -680,6 +680,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events name TEXT, scheduled_time TIMESTAMP NOT NULL, additional_payload JSON, + cancelled BOOLEAN NOT NULL DEFAULT FALSE, delivered BOOLEAN NOT NULL DEFAULT FALSE, error BOOLEAN NOT NULL DEFAULT FALSE, tries INTEGER NOT NULL DEFAULT 0, diff --git a/server/src-rsr/migrations/30_to_31.sql b/server/src-rsr/migrations/30_to_31.sql index 7f79e236cf90a..a89eb1e7d807c 100644 --- a/server/src-rsr/migrations/30_to_31.sql +++ b/server/src-rsr/migrations/30_to_31.sql @@ -13,6 +13,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events id TEXT DEFAULT gen_random_uuid() UNIQUE, name TEXT, scheduled_time TIMESTAMP NOT NULL, + cancelled BOOLEAN NOT NULL DEFAULT FALSE, additional_payload JSON, delivered BOOLEAN NOT NULL DEFAULT FALSE, error BOOLEAN NOT NULL DEFAULT FALSE, From 3f8c6ea401f321c2c52c56213fa2872bff5c2dae Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 18 Feb 2020 14:43:36 +0530 Subject: [PATCH 053/195] validate (non)existence of scheduled trigger in DDLs --- .../Hasura/RQL/DDL/ScheduledTrigger.hs | 38 +++++++++++++++++-- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index 9a86da1bc5d62..c561034c7afa6 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -23,12 +23,21 @@ import Hasura.Eventing.ScheduledTrigger import qualified Database.PG.Query as Q import qualified Data.Time.Clock as C +import qualified Data.HashMap.Strict as Map runCreateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTrigger -> m EncJSON runCreateScheduledTrigger q = do + addScheduledTriggerP1 addScheduledTriggerToCatalog q buildSchemaCacheFor $ MOScheduledTrigger $ stName q return successMsg + where + addScheduledTriggerP1 = do + stMap <- scScheduledTriggers <$> askSchemaCache + case Map.lookup (stName q) stMap of + Nothing -> pure () + Just _ -> throw400 AlreadyExists $ + "scheduled trigger with name: " <> (triggerNameToTxt $ stName q) <> " already exists" addScheduledTriggerToCatalog :: (MonadTx m) => CreateScheduledTrigger -> m () addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ do @@ -71,9 +80,12 @@ resolveScheduledTrigger CreateScheduledTrigger {..} = do runUpdateScheduledTrigger :: (CacheRWM m, MonadTx m) => CreateScheduledTrigger -> m EncJSON runUpdateScheduledTrigger q = do + updateScheduledTriggerP1 (stName q) updateScheduledTriggerInCatalog q buildSchemaCacheFor $ MOScheduledTrigger $ stName q return successMsg + where + updateScheduledTriggerP1 = checkExists updateScheduledTriggerInCatalog :: (MonadTx m) => CreateScheduledTrigger -> m () updateScheduledTriggerInCatalog CreateScheduledTrigger {..} = liftTx $ do @@ -96,9 +108,12 @@ updateScheduledTriggerInCatalog CreateScheduledTrigger {..} = liftTx $ do runDeleteScheduledTrigger :: (CacheRWM m, MonadTx m) => ScheduledTriggerName -> m EncJSON runDeleteScheduledTrigger (ScheduledTriggerName stName) = do + deleteScheduledTriggerP1 stName deleteScheduledTriggerFromCatalog stName withNewInconsistentObjsCheck buildSchemaCache return successMsg + where + deleteScheduledTriggerP1 = checkExists deleteScheduledTriggerFromCatalog :: (MonadTx m) => TriggerName -> m () deleteScheduledTriggerFromCatalog stName = liftTx $ do @@ -108,8 +123,9 @@ deleteScheduledTriggerFromCatalog stName = liftTx $ do WHERE name = $1 |] (Identity stName) False -runCreateScheduledEvent :: (MonadTx m) => CreateScheduledEvent -> m EncJSON +runCreateScheduledEvent :: (CacheRM m, MonadTx m) => CreateScheduledEvent -> m EncJSON runCreateScheduledEvent CreateScheduledEvent{..} = do + createScheduledEventP1 steName liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.hdb_scheduled_events @@ -117,6 +133,8 @@ runCreateScheduledEvent CreateScheduledEvent{..} = do VALUES ($1, $2, $3) |] (steName, steTimestamp, Q.AltJ <$> stePayload) False pure successMsg + where + createScheduledEventP1 = checkExists runCancelScheduledEvent :: (MonadTx m) => ScheduledEventId -> m EncJSON runCancelScheduledEvent (ScheduledEventId seId) = do @@ -134,10 +152,13 @@ deleteScheduledEventFromCatalog seId = liftTx $ do SELECT count(*) FROM "cte" |] (Identity seId) False -runTrackScheduledTrigger :: (MonadTx m) => ScheduledTriggerName -> m EncJSON +runTrackScheduledTrigger :: (CacheRM m, MonadTx m) => ScheduledTriggerName -> m EncJSON runTrackScheduledTrigger (ScheduledTriggerName stName) = do + trackScheduledTriggerP1 stName trackScheduledTriggerInCatalog stName return successMsg + where + trackScheduledTriggerP1 = checkExists trackScheduledTriggerInCatalog :: (MonadTx m) => TriggerName -> m () trackScheduledTriggerInCatalog stName = liftTx $ do @@ -148,10 +169,13 @@ trackScheduledTriggerInCatalog stName = liftTx $ do WHERE name = $1 |] (Identity stName) False -runUntrackScheduledTrigger :: (MonadTx m) => ScheduledTriggerName -> m EncJSON +runUntrackScheduledTrigger :: (CacheRM m, MonadTx m) => ScheduledTriggerName -> m EncJSON runUntrackScheduledTrigger (ScheduledTriggerName stName) = do + untrackScheduledTriggerP1 stName untrackScheduledTriggerInCatalog stName return successMsg + where + untrackScheduledTriggerP1 = checkExists untrackScheduledTriggerInCatalog :: (MonadTx m) => TriggerName -> m () untrackScheduledTriggerInCatalog stName = liftTx $ do @@ -161,3 +185,11 @@ untrackScheduledTriggerInCatalog stName = liftTx $ do SET include_in_metadata = 'f' WHERE name = $1 |] (Identity stName) False + +checkExists :: (CacheRM m, MonadError QErr m) => TriggerName -> m () +checkExists name = do + stMap <- scScheduledTriggers <$> askSchemaCache + void $ onNothing (Map.lookup name stMap) notExistsErr + where + notExistsErr= throw400 NotExists $ + "scheduled trigger with name: " <> (triggerNameToTxt name) <> " does not exist" From 818344a7510ac01ad90b4cb8d752a9c8c1ed8672 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 25 Feb 2020 11:39:50 +0530 Subject: [PATCH 054/195] add downgrade 33_to_32 --- server/src-rsr/migrations/33_to_32.sql | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 server/src-rsr/migrations/33_to_32.sql diff --git a/server/src-rsr/migrations/33_to_32.sql b/server/src-rsr/migrations/33_to_32.sql new file mode 100644 index 0000000000000..9658d3c480d1b --- /dev/null +++ b/server/src-rsr/migrations/33_to_32.sql @@ -0,0 +1,4 @@ +DROP VIEW hdb_catalog.hdb_scheduled_events_stats; +DROP TABLE hdb_catalog.hdb_scheduled_event_invocation_logs; +DROP TABLE hdb_catalog.hdb_scheduled_events; +DROP TABLE hdb_catalog.hdb_scheduled_trigger; From bf5d4e839bbeab1f1fb4d5ef628fbcb7327c802e Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Tue, 25 Feb 2020 13:27:30 +0530 Subject: [PATCH 055/195] add a new column 'header_conf' in the scheduled_trigger table (#78) While an event of the ST is fired, the headers will also be sent --- server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs | 6 +++--- server/src-rsr/catalog_metadata.sql | 3 ++- server/src-rsr/initialise.sql | 1 + server/src-rsr/migrations/32_to_33.sql | 1 + 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index c561034c7afa6..dc7dae82d6fcb 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -44,10 +44,10 @@ addScheduledTriggerToCatalog CreateScheduledTrigger {..} = liftTx $ do Q.unitQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.hdb_scheduled_trigger - (name, webhook_conf, schedule_conf, payload, retry_conf) - VALUES ($1, $2, $3, $4, $5) + (name, webhook_conf, schedule_conf, payload, retry_conf, header_conf) + VALUES ($1, $2, $3, $4, $5, $6) |] (stName, Q.AltJ stWebhook, Q.AltJ stSchedule, Q.AltJ <$> stPayload, Q.AltJ stRetryConf - ) False + ,Q.AltJ stHeaders) False case stSchedule of AdHoc (Just timestamp) -> Q.unitQE defaultTxErrorHandler [Q.sql| diff --git a/server/src-rsr/catalog_metadata.sql b/server/src-rsr/catalog_metadata.sql index fefb323a175e8..c16f86be5d0b6 100644 --- a/server/src-rsr/catalog_metadata.sql +++ b/server/src-rsr/catalog_metadata.sql @@ -216,7 +216,8 @@ from 'webhook_conf', webhook_conf :: json, 'schedule_conf', schedule_conf :: json, 'payload', payload :: json, - 'retry_conf', retry_conf :: json + 'retry_conf', retry_conf :: json, + 'header_conf', header_conf :: json ) ), '[]' diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index dcaa522ffb8ec..b2a6372f31f3b 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -726,6 +726,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger schedule_conf JSON NOT NULL, payload JSON, retry_conf JSON, + header_conf JSON, include_in_metadata BOOLEAN NOT NULL DEFAULT FALSE ); diff --git a/server/src-rsr/migrations/32_to_33.sql b/server/src-rsr/migrations/32_to_33.sql index a89eb1e7d807c..7001e74d61ce6 100644 --- a/server/src-rsr/migrations/32_to_33.sql +++ b/server/src-rsr/migrations/32_to_33.sql @@ -5,6 +5,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_trigger schedule_conf JSON NOT NULL, payload JSON, retry_conf JSON, + header_conf JSON, include_in_metadata BOOLEAN NOT NULL DEFAULT FALSE ); From b4fab749ceece69c3d6ac60d335e53eb373f1786 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 25 Feb 2020 14:58:05 +0530 Subject: [PATCH 056/195] no-op --- .../schema-metadata-api/index.rst | 8 +-- .../scheduled-triggers.rst | 6 +- server/graphql-engine.cabal | 3 +- .../src-lib/Hasura/Eventing/EventTrigger.hs | 3 +- server/src-lib/Hasura/Eventing/HTTP.hs | 58 ++++++------------- .../Hasura/Eventing/ScheduledTrigger.hs | 3 +- server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs | 14 +++-- server/src-lib/Hasura/RQL/Types.hs | 10 ---- server/src-lib/Hasura/RQL/Types/Catalog.hs | 2 +- server/src-lib/Hasura/RQL/Types/Common.hs | 6 ++ .../Hasura/RQL/Types/ScheduledTrigger.hs | 7 ++- 11 files changed, 53 insertions(+), 67 deletions(-) diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/index.rst b/docs/graphql/manual/api-reference/schema-metadata-api/index.rst index fdfe8dd7bb233..83972146a8aa8 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/index.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/index.rst @@ -239,22 +239,22 @@ The various types of queries are listed in the following table: * - :ref:`create_scheduled_event` - :ref:`create_scheduled_event_args ` - 1 - - Create a new run of a scheduled trigger + - Create a new event for a scheduled trigger * - :ref:`cancel_scheduled_event` - :ref:`cancel_scheduled_event_args ` - 1 - - Cancel a particular run of a scheduled trigger + - Cancel a particular event of a scheduled trigger * - :ref:`track_scheduled_trigger` - :ref:`track_scheduled_trigger_args ` - 1 - - Track an existing scheduled trigger so it can be exported in metadata + - Scheduled triggers need to be tracked so they can be exported to metadata. More details in API reference * - :ref:`untrack_scheduled_trigger` - :ref:`untrack_scheduled_trigger_args ` - 1 - - Untrack an existing scheduled trigger so it won't be exported in metadata + - Untrack an already tracked scheduled trigger. More details in API reference * - :ref:`add_remote_schema` - :ref:`add_remote_schema_args ` diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst index d2cd664a5385b..33a722dba0138 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst @@ -249,7 +249,9 @@ Args syntax track_scheduled_trigger ----------------------- -``track_scheduled_trigger`` is used to track a scheduled trigger so it can be exported in metadata. By default, scheduled triggers are untracked. +``track_scheduled_trigger`` is used to enable export of scheduled trigger in metadata. By default, scheduled triggers are untracked. +This is because different Scheduled Triggers can have different configurations which may be related to user data and hence may not make +sense to be included in the metadata. For e.g. a one-time scheduled event which has a payload of some user-id should not be in metadata. .. code-block:: http @@ -286,7 +288,7 @@ Args syntax untrack_scheduled_trigger ------------------------- -``untrack_scheduled_trigger`` is used to untrack a scheduled trigger so it won't be exported in metadata. By default, scheduled triggers are untracked. +``untrack_scheduled_trigger`` is used to disable export of scheduled trigger in metadata. See track_scheduled_trigger_ . .. code-block:: http diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 905f7df25dc20..6d33af126b419 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -30,7 +30,7 @@ common common-all default-language: Haskell2010 default-extensions: - ApplicativeDo BangPatterns BlockArguments ConstraintKinds DefaultSignatures DeriveDataTypeable + ApplicativeDo BangPatterns BlockArguments ConstraintKinds DataKinds DefaultSignatures DeriveDataTypeable DeriveFoldable DeriveFunctor DeriveGeneric DeriveLift DeriveTraversable DerivingVia EmptyCase FlexibleContexts FlexibleInstances FunctionalDependencies GeneralizedNewtypeDeriving InstanceSigs LambdaCase MultiParamTypeClasses MultiWayIf NamedFieldPuns NoImplicitPrelude @@ -279,7 +279,6 @@ library , Hasura.RQL.Types.QueryCollection , Hasura.RQL.Types.Action , Hasura.RQL.Types.RemoteSchema - , Hasura.RQL.Types.Helpers , Hasura.RQL.Types.ScheduledTrigger , Hasura.RQL.DDL.ComputedField , Hasura.RQL.DDL.Relationship diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 74c6d7780c1af..9200ac5d76672 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -16,6 +16,7 @@ import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH import Data.Has +import Data.Int (Int64) import Data.Time.Clock import Hasura.Eventing.HTTP import Hasura.HTTP @@ -307,7 +308,7 @@ insertInvocation invo = do INSERT INTO hdb_catalog.event_invocation_logs (event_id, status, request, response) VALUES ($1, $2, $3, $4) |] ( iEventId invo - , toInt64 $ iStatus invo + , fromIntegral $ iStatus invo :: Int64 , Q.AltJ $ toJSON $ iRequest invo , Q.AltJ $ toJSON $ iResponse invo) True Q.unitQE defaultTxErrorHandler [Q.sql| diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index 953213dc9cdf6..794120d04135c 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -1,5 +1,3 @@ -{-# LANGUAGE DataKinds #-} - module Hasura.Eventing.HTTP ( HTTPErr(..) , HTTPResp(..) @@ -23,7 +21,6 @@ module Hasura.Eventing.HTTP , DeliveryInfo(..) , mkWebhookReq , mkResp - , toInt64 , LogEnvHeaders , encodeHeader , decodeHeader @@ -51,7 +48,6 @@ import Control.Monad.IO.Class (MonadIO, liftIO) import Control.Monad.Reader (MonadReader) import Data.Either import Data.Has -import Data.Int (Int64) import Hasura.Logging import Hasura.Prelude import Hasura.RQL.DDL.Headers @@ -65,7 +61,7 @@ retryAfterHeader = "Retry-After" data WebhookRequest = WebhookRequest { _rqPayload :: J.Value - , _rqHeaders :: Maybe [HeaderConf] + , _rqHeaders :: [HeaderConf] , _rqVersion :: T.Text } $(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''WebhookRequest) @@ -73,7 +69,7 @@ $(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''Webhook data WebhookResponse = WebhookResponse { _wrsBody :: TBS.TByteString - , _wrsHeaders :: Maybe [HeaderConf] + , _wrsHeaders :: [HeaderConf] , _wrsStatus :: Int } $(J.deriveToJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''WebhookResponse) @@ -83,16 +79,17 @@ $(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''ClientE type Version = T.Text -data TriggerTypes = ET | ST +-- | There are two types of events: Event (for event triggers) and Scheduled (for scheduled triggers) +data TriggerTypes = Event | Scheduled -data Response = ResponseType1 WebhookResponse | ResponseType2 ClientError +data Response = ResponseHTTP WebhookResponse | ResponseError ClientError instance J.ToJSON Response where - toJSON (ResponseType1 resp) = J.object + toJSON (ResponseHTTP resp) = J.object [ "type" J..= J.String "webhook_response" , "data" J..= J.toJSON resp ] - toJSON (ResponseType2 err ) = J.object + toJSON (ResponseError err) = J.object [ "type" J..= J.String "client_error" , "data" J..= J.toJSON err ] @@ -121,10 +118,10 @@ data HTTPResp (a :: TriggerTypes) $(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''HTTPResp) -instance ToEngineLog (HTTPResp 'ET) Hasura where +instance ToEngineLog (HTTPResp 'Event) Hasura where toEngineLog resp = (LevelInfo, eventTriggerLogType, J.toJSON resp) -instance ToEngineLog (HTTPResp 'ST) Hasura where +instance ToEngineLog (HTTPResp 'Scheduled) Hasura where toEngineLog resp = (LevelInfo, scheduledTriggerLogType, J.toJSON resp) data HTTPErr (a :: TriggerTypes) @@ -149,10 +146,10 @@ instance J.ToJSON (HTTPErr a) where toObj (k, v) = J.object [ "type" J..= k , "detail" J..= v] -instance ToEngineLog (HTTPErr 'ET) Hasura where +instance ToEngineLog (HTTPErr 'Event) Hasura where toEngineLog err = (LevelError, eventTriggerLogType, J.toJSON err) -instance ToEngineLog (HTTPErr 'ST) Hasura where +instance ToEngineLog (HTTPErr 'Scheduled) Hasura where toEngineLog err = (LevelError, scheduledTriggerLogType, J.toJSON err) mkHTTPResp :: HTTP.Response LBS.ByteString -> HTTPResp a @@ -175,10 +172,10 @@ data HTTPRespExtra (a :: TriggerTypes) $(J.deriveToJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''HTTPRespExtra) -instance ToEngineLog (HTTPRespExtra 'ET) Hasura where +instance ToEngineLog (HTTPRespExtra 'Event) Hasura where toEngineLog resp = (LevelInfo, eventTriggerLogType, J.toJSON resp) -instance ToEngineLog (HTTPRespExtra 'ST) Hasura where +instance ToEngineLog (HTTPRespExtra 'Scheduled) Hasura where toEngineLog resp = (LevelInfo, scheduledTriggerLogType, J.toJSON resp) isNetworkError :: HTTPErr a -> Bool @@ -221,7 +218,7 @@ logHTTPForET , Has (Logger Hasura) r , MonadIO m ) - => Either (HTTPErr 'ET) (HTTPResp 'ET) -> Maybe ExtraLogContext -> m () + => Either (HTTPErr 'Event) (HTTPResp 'Event) -> Maybe ExtraLogContext -> m () logHTTPForET eitherResp extraLogCtx = do logger :: Logger Hasura <- asks getter unLogger logger $ HTTPRespExtra eitherResp extraLogCtx @@ -231,7 +228,7 @@ logHTTPForST , Has (Logger Hasura) r , MonadIO m ) - => Either (HTTPErr 'ST) (HTTPResp 'ST) -> Maybe ExtraLogContext -> m () + => Either (HTTPErr 'Scheduled) (HTTPResp 'Scheduled) -> Maybe ExtraLogContext -> m () logHTTPForST eitherResp extraLogCtx = do logger :: Logger Hasura <- asks getter unLogger logger $ HTTPRespExtra eitherResp extraLogCtx @@ -284,37 +281,20 @@ $(J.deriveJSON (J.aesonDrop 2 J.snakeCase){J.omitNothingFields=True} ''DeliveryI mkResp :: Int -> TBS.TByteString -> [HeaderConf] -> Response mkResp status payload headers = - let wr = WebhookResponse payload (mkMaybe headers) status - in ResponseType1 wr + let wr = WebhookResponse payload headers status + in ResponseHTTP wr mkClientErr :: TBS.TByteString -> Response mkClientErr message = let cerr = ClientError message - in ResponseType2 cerr + in ResponseError cerr mkWebhookReq :: J.Value -> [HeaderConf] -> Version -> WebhookRequest -mkWebhookReq payload headers = WebhookRequest payload (mkMaybe headers) +mkWebhookReq payload headers = WebhookRequest payload headers isClientError :: Int -> Bool isClientError status = status >= 1000 -mkMaybe :: [a] -> Maybe [a] -mkMaybe [] = Nothing -mkMaybe x = Just x - --- logHTTPErr --- :: ( MonadReader r m --- , Has (L.Logger L.Hasura) r --- , MonadIO m --- ) --- => HTTPErr a -> m () --- logHTTPErr err = do --- logger :: L.Logger L.Hasura <- asks getter --- L.unLogger logger err - -toInt64 :: (Integral a) => a -> Int64 -toInt64 = fromIntegral - encodeHeader :: EventHeaderInfo -> HTTP.Header encodeHeader (EventHeaderInfo hconf cache) = let (HeaderConf name _) = hconf diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 51bdc532216d2..8b0cfe22a27e2 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -26,6 +26,7 @@ module Hasura.Eventing.ScheduledTrigger import Control.Arrow.Extended (dup) import Control.Concurrent (threadDelay) import Data.Has +import Data.Int (Int64) import Data.List (unfoldr) import Data.Time.Clock import Hasura.Eventing.HTTP @@ -405,7 +406,7 @@ insertInvocation invo = do (event_id, status, request, response) VALUES ($1, $2, $3, $4) |] ( iEventId invo - , toInt64 $ iStatus invo + , fromIntegral $ iStatus invo :: Int64 , Q.AltJ $ J.toJSON $ iRequest invo , Q.AltJ $ J.toJSON $ iResponse invo) True Q.unitQE defaultTxErrorHandler [Q.sql| diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs index ed9115f9bb4fb..5020e2e0e411a 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs @@ -279,11 +279,17 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do -- scheduled triggers scheduledTriggersMap <- (mapFromL _cstName scheduledTriggers >- returnA) - >-> (| Inc.keyed (\_ (CatalogScheduledTrigger n wc s p rc h ) -> do - let q = CreateScheduledTrigger n wc s p rc (fromMaybe [] h) + >-> (| Inc.keyed (\_ (CatalogScheduledTrigger{..}) -> do + let q = CreateScheduledTrigger + _cstName + _cstWebhookConf + _cstScheduleConf + _cstPayload + (fromMaybe defaultRetryConfST _cstRetryConf) + (fromMaybe [] _cstHeaderConf) definition = toJSON q - triggerName = triggerNameToTxt n - metadataObject = MetadataObject (MOScheduledTrigger n) definition + triggerName = triggerNameToTxt _cstName + metadataObject = MetadataObject (MOScheduledTrigger _cstName) definition addScheduledTriggerContext e = "in scheduled trigger " <> triggerName <> ": " <> e (| withRecordInconsistency ( (| modifyErrA (bindErrorA -< resolveScheduledTrigger q) diff --git a/server/src-lib/Hasura/RQL/Types.hs b/server/src-lib/Hasura/RQL/Types.hs index 7720604b5f286..78198d215d600 100644 --- a/server/src-lib/Hasura/RQL/Types.hs +++ b/server/src-lib/Hasura/RQL/Types.hs @@ -31,8 +31,6 @@ module Hasura.RQL.Types , askEventTriggerInfo , askTabInfoFromTrigger - , adminOnly - , HeaderObj , liftMaybe @@ -53,7 +51,6 @@ import Hasura.RQL.Types.DML as R import Hasura.RQL.Types.Error as R import Hasura.RQL.Types.EventTrigger as R import Hasura.RQL.Types.Function as R -import Hasura.RQL.Types.Helpers as R import Hasura.RQL.Types.Metadata as R import Hasura.RQL.Types.Permission as R import Hasura.RQL.Types.QueryCollection as R @@ -291,11 +288,4 @@ askFieldInfo m f = askCurRole :: (UserInfoM m) => m RoleName askCurRole = userRole <$> askUserInfo -adminOnly :: (UserInfoM m, QErrM m) => m () -adminOnly = do - curRole <- askCurRole - unless (curRole == adminRole) $ throw400 AccessDenied errMsg - where - errMsg = "restricted access : admin only" - type HeaderObj = M.HashMap T.Text T.Text diff --git a/server/src-lib/Hasura/RQL/Types/Catalog.hs b/server/src-lib/Hasura/RQL/Types/Catalog.hs index a894bcf8875b3..14309a9cd7476 100644 --- a/server/src-lib/Hasura/RQL/Types/Catalog.hs +++ b/server/src-lib/Hasura/RQL/Types/Catalog.hs @@ -149,7 +149,7 @@ data CatalogScheduledTrigger , _cstWebhookConf :: !WebhookConf , _cstScheduleConf :: !ScheduleType , _cstPayload :: !(Maybe Value) - , _cstRetryConf :: !RetryConfST + , _cstRetryConf :: !(Maybe RetryConfST) , _cstHeaderConf :: !(Maybe [HeaderConf]) } deriving (Show, Eq, Generic) instance NFData CatalogScheduledTrigger diff --git a/server/src-lib/Hasura/RQL/Types/Common.hs b/server/src-lib/Hasura/RQL/Types/Common.hs index fccf29a13a298..57d271af2c159 100644 --- a/server/src-lib/Hasura/RQL/Types/Common.hs +++ b/server/src-lib/Hasura/RQL/Types/Common.hs @@ -33,8 +33,11 @@ module Hasura.RQL.Types.Common , SystemDefined(..) , isSystemDefined + + , successMsg ) where +import Hasura.EncJSON import Hasura.Incremental (Cacheable) import Hasura.Prelude import Hasura.SQL.Types @@ -235,3 +238,6 @@ newtype SystemDefined = SystemDefined { unSystemDefined :: Bool } isSystemDefined :: SystemDefined -> Bool isSystemDefined = unSystemDefined + +successMsg :: EncJSON +successMsg = "{\"message\":\"success\"}" diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index dce5ce6512436..53f229ac0b876 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -7,6 +7,7 @@ module Hasura.RQL.Types.ScheduledTrigger , CreateScheduledEvent(..) , RetryConfST(..) , formatTime' + , defaultRetryConfST ) where import Data.Time.Clock @@ -36,8 +37,8 @@ instance Cacheable RetryConfST $(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RetryConfST) -defaultRetryConf :: RetryConfST -defaultRetryConf = +defaultRetryConfST :: RetryConfST +defaultRetryConfST = RetryConfST { rcstNumRetries = 0 , rcstIntervalSec = seconds 10 @@ -85,7 +86,7 @@ instance FromJSON CreateScheduledTrigger where stWebhook <- o .: "webhook" stPayload <- o .:? "payload" stSchedule <- o .: "schedule" - stRetryConf <- o .:? "retry_conf" .!= defaultRetryConf + stRetryConf <- o .:? "retry_conf" .!= defaultRetryConfST stHeaders <- o .:? "headers" .!= [] pure CreateScheduledTrigger {..} From 0ca80a75fda645c2c7f1d3ab658f535d0bec6709 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Tue, 25 Feb 2020 15:02:10 +0530 Subject: [PATCH 057/195] add tests for scheduled triggers (#79) - Make a new scheduled events webhook server on port 5594 - Setting the session timezone of the db to UTC so that "SELECT NOW()" returns UTC time. - Cron's schedule is set from 30 mins from the time the tests run, so it will not be run(to avoid race condition between the cron ST and the adhoc ST) - The tests will run for a maximum of a minute(until the event is fired) --- server/tests-py/conftest.py | 10 ++ server/tests-py/context.py | 3 + server/tests-py/requirements.txt | 1 + server/tests-py/test_scheduled_triggers.py | 191 +++++++++++++++++++++ 4 files changed, 205 insertions(+) create mode 100644 server/tests-py/test_scheduled_triggers.py diff --git a/server/tests-py/conftest.py b/server/tests-py/conftest.py index 77613c34c06cf..e3cdaa15bad2d 100644 --- a/server/tests-py/conftest.py +++ b/server/tests-py/conftest.py @@ -266,6 +266,16 @@ def actions_webhook(hge_ctx): webhook_httpd.server_close() web_server.join() +@pytest.fixture(scope='class') +def scheduled_triggers_evts_webhook(request): + webhook_httpd = EvtsWebhookServer(server_address=('127.0.0.1', 5594)) + web_server = threading.Thread(target=webhook_httpd.serve_forever) + web_server.start() + yield webhook_httpd + webhook_httpd.shutdown() + webhook_httpd.server_close() + web_server.join() + @pytest.fixture(scope='class') def ws_client(request, hge_ctx): """ diff --git a/server/tests-py/context.py b/server/tests-py/context.py index aa2aff5fbd02f..fb50d224c5d29 100644 --- a/server/tests-py/context.py +++ b/server/tests-py/context.py @@ -351,6 +351,9 @@ def get_error_queue_size(self): sz = sz + 1 return sz + def is_queue_empty(self): + return self.resp_queue.empty + def teardown(self): self.evt_trggr_httpd.shutdown() self.evt_trggr_httpd.server_close() diff --git a/server/tests-py/requirements.txt b/server/tests-py/requirements.txt index 7f4e0fdd245a7..fc40e50583e06 100644 --- a/server/tests-py/requirements.txt +++ b/server/tests-py/requirements.txt @@ -5,6 +5,7 @@ attrs==19.3.0 certifi==2019.9.11 cffi==1.13.2 chardet==3.0.4 +croniter==0.3.31 cryptography==2.8 execnet==1.7.1 graphene==2.1.8 diff --git a/server/tests-py/test_scheduled_triggers.py b/server/tests-py/test_scheduled_triggers.py new file mode 100644 index 0000000000000..61d669a52ca96 --- /dev/null +++ b/server/tests-py/test_scheduled_triggers.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python3 + +import pytest +from datetime import datetime,timedelta +from croniter import croniter +from validate import validate_event_webhook,validate_event_headers +from queue import Empty +import time + +def stringify_datetime(dt): + return dt.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + +def get_events_of_scheduled_trigger(hge_ctx,trigger_name): + events_count_sql = ''' + select count(*) from hdb_catalog.hdb_scheduled_events where name = '{}' + '''.format(trigger_name) + q = { + "type":"run_sql", + "args":{ + "sql":events_count_sql + } + } + return hge_ctx.v1q(q) + +class TestScheduledTriggerCron(object): + + cron_trigger_name = "a_scheduled_trigger" + webhook_payload = {"foo":"baz"} + webhook_path = "/hello" + url = '/v1/query' + + def test_create_cron_schedule_triggers(self,hge_ctx): + # setting the time zone to 'UTC' because everything + # (utcnow,now,cronschedule) will all be based on UTC. + q = { + "type":"run_sql", + "args":{ + "sql":"set time zone 'UTC'" + } + } + st,resp = hge_ctx.v1q(q) + assert st == 200,resp + # setting the test to be after 30 mins, to make sure that + # any of the events are not triggered. + min_after_30_mins = (datetime.utcnow() + timedelta(minutes=30)).minute + TestScheduledTriggerCron.cron_schedule = "{} * * * *".format(min_after_30_mins) + + cron_st_api_query = { + "type":"create_scheduled_trigger", + "args":{ + "name":self.cron_trigger_name, + "webhook":"http://127.0.0.1:5594" + "/foo", + "schedule":{ + "type":"cron", + "value":self.cron_schedule + }, + "headers":[ + { + "name":"foo", + "value":"baz" + } + ], + "payload":self.webhook_payload + } + } + headers = {} + if hge_ctx.hge_key is not None: + headers['X-Hasura-Admin-Secret'] = hge_ctx.hge_key + cron_st_code,cron_st_resp,_ = hge_ctx.anyq(self.url,cron_st_api_query,headers) + TestScheduledTriggerCron.init_time = datetime.utcnow() # the cron events will be generated based on the current time, they will not be exactly the same though(the server now and now here) + assert cron_st_code == 200 + assert cron_st_resp['message'] == 'success' + + def test_check_generated_cron_scheduled_events(self,hge_ctx): + future_schedule_timestamps = [] + iter = croniter(self.cron_schedule,self.init_time) + for i in range(100): + future_schedule_timestamps.append(iter.next(datetime)) + sql = ''' + select scheduled_time from hdb_catalog.hdb_scheduled_events where + name = '{}' order by scheduled_time asc; + ''' + q = { + "type":"run_sql", + "args":{ + "sql":sql.format(self.cron_trigger_name) + } + } + st,resp = hge_ctx.v1q(q) + assert st == 200 + ts_resp = resp['result'][1:] + assert len(ts_resp) == 100 # 100 events are generated in a cron ST + scheduled_events_ts = [] + for ts in ts_resp: + datetime_ts = datetime.strptime(ts[0],"%Y-%m-%d %H:%M:%S") + scheduled_events_ts.append(datetime_ts) + assert future_schedule_timestamps == scheduled_events_ts + + def test_delete_cron_scheduled_trigger(self,hge_ctx): + q = { + "type":"delete_scheduled_trigger", + "args":{ + "name":self.cron_trigger_name + } + } + st,resp = hge_ctx.v1q(q) + assert st == 200,resp + +class ScheduledEventNotFound(Exception): + pass + +@pytest.mark.usefixtures("scheduled_triggers_evts_webhook") +class TestScheduledTriggerAdhoc(object): + + adhoc_trigger_name = "adhoc_trigger" + webhook_path = "/hello" + # maximum wait time is retries * interval_in_secs = 60 secs + webhook_payload = {"foo":"baz"} + url = "/v1/query" + + @classmethod + def dir(cls): + return 'queries/scheduled_triggers' + + def poll_until_scheduled_event_found(self,scheduled_triggers_evts_webhook,retries=12,interval_in_secs=5.0): + while (retries > 0): + try: + ev_full = scheduled_triggers_evts_webhook.get_event(3) + return ev_full + except Empty: + retries = retries - 1 + time.sleep(interval_in_secs) + raise ScheduledEventNotFound # retries exhausted + + def test_create_adhoc_scheduled_trigger(self,hge_ctx,scheduled_triggers_evts_webhook): + q = { + "type":"run_sql", + "args":{ + "sql":"set time zone 'UTC'" + } + } + st,resp = hge_ctx.v1q(q) + current_time = datetime.utcnow() + current_time_str = stringify_datetime(current_time) + adhoc_st_api_query = { + "type":"create_scheduled_trigger", + "args":{ + "name":self.adhoc_trigger_name, + "webhook":"http://127.0.0.1:5594" + self.webhook_path, + "schedule":{ + "type":"adhoc", + "value":current_time_str + }, + "payload":self.webhook_payload, + "headers":[ + { + "name":"header-1", + "value":"header-1-value" + } + ] + } + } + headers = {} + if hge_ctx.hge_key is not None: + headers['X-Hasura-Admin-Secret'] = hge_ctx.hge_key + adhoc_st_code,adhoc_st_resp,_ = hge_ctx.anyq(self.url,adhoc_st_api_query,headers) + assert adhoc_st_resp['message'] == 'success' + assert adhoc_st_code == 200 + + def test_check_adhoc_generated_event(self,hge_ctx,scheduled_triggers_evts_webhook): + adhoc_event_st,adhoc_event_resp = get_events_of_scheduled_trigger(hge_ctx,self.adhoc_trigger_name) + assert int(adhoc_event_resp['result'][1][0]) == 1 # An adhoc ST should create exactly one schedule event + + def test_check_adhoc_webhook_event(self,hge_ctx,scheduled_triggers_evts_webhook): + ev_full = self.poll_until_scheduled_event_found(scheduled_triggers_evts_webhook) + validate_event_webhook(ev_full['path'],'/hello') + validate_event_headers(ev_full['headers'],{"header-1":"header-1-value"}) + assert ev_full['body'] == self.webhook_payload + time.sleep(1.0) # sleep for 1s more to check if any other events were also fired + + assert scheduled_triggers_evts_webhook.is_queue_empty() + + def test_delete_adhoc_scheduled_trigger(self,hge_ctx,scheduled_triggers_evts_webhook): + q = { + "type":"delete_scheduled_trigger", + "args":{ + "name":self.adhoc_trigger_name + } + } + st,resp = hge_ctx.v1q(q) + assert st == 200,resp From 5ea32c8ec7f7c2029ed60ec80532f756f6735aed Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 25 Feb 2020 15:23:24 +0530 Subject: [PATCH 058/195] rm unneeded file --- server/src-lib/Hasura/RQL/Types/Helpers.hs | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 server/src-lib/Hasura/RQL/Types/Helpers.hs diff --git a/server/src-lib/Hasura/RQL/Types/Helpers.hs b/server/src-lib/Hasura/RQL/Types/Helpers.hs deleted file mode 100644 index e3dd28010a460..0000000000000 --- a/server/src-lib/Hasura/RQL/Types/Helpers.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Hasura.RQL.Types.Helpers where - -import Hasura.EncJSON - -successMsg :: EncJSON -successMsg = "{\"message\":\"success\"}" - - From cdf5641915e4b9f70ae78e7b47116c3644075c7a Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 25 Feb 2020 15:30:47 +0530 Subject: [PATCH 059/195] fix export of headers in scheduled triggers metadata --- server/src-lib/Hasura/RQL/DDL/Metadata.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata.hs b/server/src-lib/Hasura/RQL/DDL/Metadata.hs index 49bff88635de6..0997d72f6ba82 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata.hs @@ -407,19 +407,19 @@ fetchMetadata = do fetchScheduledTriggers = map uncurrySchedule <$> Q.listQE defaultTxErrorHandler [Q.sql| - SELECT st.name, st.webhook_conf, st.schedule_conf, st.payload, st.retry_conf + SELECT st.name, st.webhook_conf, st.schedule_conf, st.payload, st.retry_conf, st.header_conf FROM hdb_catalog.hdb_scheduled_trigger st WHERE include_in_metadata |] () False where - uncurrySchedule (n, wc, sc, p, rc) = + uncurrySchedule (n, wc, sc, p, rc, hc) = CreateScheduledTrigger { stName = n, stWebhook = Q.getAltJ wc, stSchedule = Q.getAltJ sc, stPayload = Q.getAltJ <$> p, stRetryConf = Q.getAltJ rc, - stHeaders = [] + stHeaders = Q.getAltJ hc } fetchCustomTypes :: Q.TxE QErr CustomTypes From d340f6c46a7c912e6ce25e616ecc4da4d9e1b5c9 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Tue, 25 Feb 2020 16:04:20 +0530 Subject: [PATCH 060/195] change the data type of scheduled_time to TIMESTAMPTZ (#80) --- server/src-rsr/initialise.sql | 4 ++-- server/src-rsr/migrations/32_to_33.sql | 4 ++-- server/tests-py/test_scheduled_triggers.py | 15 ++++----------- 3 files changed, 8 insertions(+), 15 deletions(-) diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index b2a6372f31f3b..f8b8a5069f0e9 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -734,7 +734,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events ( id TEXT DEFAULT gen_random_uuid() UNIQUE, name TEXT, - scheduled_time TIMESTAMP NOT NULL, + scheduled_time TIMESTAMPTZ NOT NULL, additional_payload JSON, cancelled BOOLEAN NOT NULL DEFAULT FALSE, delivered BOOLEAN NOT NULL DEFAULT FALSE, @@ -743,7 +743,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events created_at TIMESTAMP DEFAULT NOW(), locked BOOLEAN NOT NULL DEFAULT FALSE, dead BOOLEAN NOT NULL DEFAULT FALSE, - next_retry_at TIMESTAMP, + next_retry_at TIMESTAMPTZ, PRIMARY KEY (name, scheduled_time), FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) diff --git a/server/src-rsr/migrations/32_to_33.sql b/server/src-rsr/migrations/32_to_33.sql index 7001e74d61ce6..78ed54491ee65 100644 --- a/server/src-rsr/migrations/32_to_33.sql +++ b/server/src-rsr/migrations/32_to_33.sql @@ -13,7 +13,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events ( id TEXT DEFAULT gen_random_uuid() UNIQUE, name TEXT, - scheduled_time TIMESTAMP NOT NULL, + scheduled_time TIMESTAMPTZ NOT NULL, cancelled BOOLEAN NOT NULL DEFAULT FALSE, additional_payload JSON, delivered BOOLEAN NOT NULL DEFAULT FALSE, @@ -22,7 +22,7 @@ CREATE TABLE hdb_catalog.hdb_scheduled_events created_at TIMESTAMP DEFAULT NOW(), locked BOOLEAN NOT NULL DEFAULT FALSE, dead BOOLEAN NOT NULL DEFAULT FALSE, - next_retry_at TIMESTAMP, + next_retry_at TIMESTAMPTZ, PRIMARY KEY (name, scheduled_time), FOREIGN KEY (name) REFERENCES hdb_catalog.hdb_scheduled_trigger(name) diff --git a/server/tests-py/test_scheduled_triggers.py b/server/tests-py/test_scheduled_triggers.py index 61d669a52ca96..72efe11ec8005 100644 --- a/server/tests-py/test_scheduled_triggers.py +++ b/server/tests-py/test_scheduled_triggers.py @@ -30,16 +30,6 @@ class TestScheduledTriggerCron(object): url = '/v1/query' def test_create_cron_schedule_triggers(self,hge_ctx): - # setting the time zone to 'UTC' because everything - # (utcnow,now,cronschedule) will all be based on UTC. - q = { - "type":"run_sql", - "args":{ - "sql":"set time zone 'UTC'" - } - } - st,resp = hge_ctx.v1q(q) - assert st == 200,resp # setting the test to be after 30 mins, to make sure that # any of the events are not triggered. min_after_30_mins = (datetime.utcnow() + timedelta(minutes=30)).minute @@ -76,8 +66,11 @@ def test_check_generated_cron_scheduled_events(self,hge_ctx): iter = croniter(self.cron_schedule,self.init_time) for i in range(100): future_schedule_timestamps.append(iter.next(datetime)) + # Get timestamps in UTC from the db to compare it with + # the croniter generated timestamps sql = ''' - select scheduled_time from hdb_catalog.hdb_scheduled_events where + select timezone('utc',scheduled_time) as scheduled_time + from hdb_catalog.hdb_scheduled_events where name = '{}' order by scheduled_time asc; ''' q = { From 888fa599c709f4aab0e8bbf0ee0776a0dda4cc59 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 25 Feb 2020 18:31:04 +0530 Subject: [PATCH 061/195] no-op --- .../Hasura/Eventing/ScheduledTrigger.hs | 49 +++++++------------ 1 file changed, 17 insertions(+), 32 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 8b0cfe22a27e2..dcddfa85b7518 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -215,12 +215,6 @@ generateScheduleTimes from n cron = take n $ go from where go = unfoldr (fmap dup . nextMatch cron) --- | Generates events @(from, till]@ according to 'CronSchedule' --- generateScheduleTimesFrom :: UTCTime -> UTCTime -> CronSchedule -> [UTCTime] --- generateScheduleTimesFrom from till cron = takeWhile (<= till) $ go from --- where --- go = unfoldr (fmap dup . nextMatch cron) - processScheduledQueue :: HasVersion => L.Logger L.Hasura @@ -237,20 +231,16 @@ processScheduledQueue logger logEnv httpMgr pgpool getSC = Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) getScheduledEvents case scheduledEventsE of Right partialEvents -> - sequence_ $ - flip map partialEvents $ \(ScheduledEventPartial id' name st payload tries)-> do - let sti' = Map.lookup name scheduledTriggersInfo - case sti' of + for_ partialEvents $ \(ScheduledEventPartial id' name st payload tries)-> do + case Map.lookup name scheduledTriggersInfo of Nothing -> L.unLogger logger $ ScheduledTriggerInternalErr $ err500 Unexpected "could not find scheduled trigger in cache" - Just sti -> do - let webhook = wciCachedValue $ stiWebhookInfo sti - payload' = fromMaybe (fromMaybe J.Null $ stiPayload sti) payload -- override if neccessary - retryConf = stiRetryConf sti - se = ScheduledEventFull id' name st tries webhook payload' retryConf - runReaderT (processScheduledEvent logEnv pgpool sti se) (logger, httpMgr) - Left err -> L.unLogger logger $ ScheduledTriggerInternalErr $ - err500 Unexpected $ "could not fetch scheduled events: " <> (T.pack $ show err) + Just stInfo@ScheduledTriggerInfo{..} -> do + let webhook = wciCachedValue stiWebhookInfo + payload' = fromMaybe (fromMaybe J.Null stiPayload) payload -- override if neccessary + scheduledEvent = ScheduledEventFull id' name st tries webhook payload' stiRetryConf + runReaderT (processScheduledEvent logEnv pgpool stInfo scheduledEvent) (logger, httpMgr) + Left err -> L.unLogger logger $ ScheduledTriggerInternalErr err threadDelay oneMinute processScheduledEvent :: @@ -268,26 +258,23 @@ processScheduledEvent :: processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEventFull {..} = do currentTime <- liftIO getCurrentTime if diffUTCTime currentTime sefScheduledTime > rcstTolerance stiRetryConf - then processDead' + then + processDead pgpool se >>= \case + Left err -> logQErr err + Right _ -> pure () else do let timeoutSeconds = round $ rcstTimeoutSec stiRetryConf httpTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) - headers = map encodeHeader stiHeaders - headers' = addDefaultHeaders headers + headers = addDefaultHeaders $ map encodeHeader stiHeaders extraLogCtx = ExtraLogContext sefId - res <- runExceptT $ tryWebhook headers' httpTimeout sefPayload (T.unpack sefWebhook) + res <- runExceptT $ tryWebhook headers httpTimeout sefPayload (T.unpack sefWebhook) logHTTPForST res (Just extraLogCtx) - let decodedHeaders = map (decodeHeader logEnv stiHeaders) headers' + let decodedHeaders = map (decodeHeader logEnv stiHeaders) headers finally <- either (processError pgpool se decodedHeaders) (processSuccess pgpool se decodedHeaders) res either logQErr return finally - where - processDead' = - processDead pgpool se >>= \case - Left err -> logQErr err - Right _ -> pure () processError :: (MonadIO m) @@ -317,10 +304,9 @@ processError pgpool se decodedHeaders err = do retryOrMarkError :: ScheduledEventFull -> HTTPErr a -> Q.TxE QErr () retryOrMarkError se@ScheduledEventFull {..} err = do let mRetryHeader = getRetryAfterHeaderFromHTTPErr err - mRetryHeaderSeconds = join $ parseRetryHeaderValue <$> mRetryHeader + mRetryHeaderSeconds = parseRetryHeaderValue =<< mRetryHeader triesExhausted = sefTries >= rcstNumRetries sefRetryConf noRetryHeader = isNothing mRetryHeaderSeconds - -- current_try = tries + 1 , allowed_total_tries = rcNumRetries retryConf + 1 if triesExhausted && noRetryHeader then do markError @@ -417,7 +403,7 @@ insertInvocation invo = do getScheduledEvents :: Q.TxE QErr [ScheduledEventPartial] getScheduledEvents = do - partialSchedules <- map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| + map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_events SET locked = 't' WHERE id IN ( SELECT t.id @@ -436,5 +422,4 @@ getScheduledEvents = do ) RETURNING id, name, scheduled_time, additional_payload, tries |] () True - pure $ partialSchedules where uncurryEvent (i, n, st, p, tries) = ScheduledEventPartial i n st (Q.getAltJ <$> p) tries From b5ac6834625432e1e5c569ec9dd448081f884fb4 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Thu, 27 Feb 2020 18:37:22 +0530 Subject: [PATCH 062/195] track hdb_catalog tables for scheduled triggers --- server/src-lib/Hasura/Server/Migrate.hs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/server/src-lib/Hasura/Server/Migrate.hs b/server/src-lib/Hasura/Server/Migrate.hs index 73036d0ac7e06..537a0cea3fa50 100644 --- a/server/src-lib/Hasura/Server/Migrate.hs +++ b/server/src-lib/Hasura/Server/Migrate.hs @@ -416,6 +416,16 @@ recreateSystemMetadata = do , arrayRel $$(nonEmptyText "permissions") $ manualConfig "hdb_catalog" "hdb_permission_agg" [("role_name", "role_name")] ] + , table "hdb_catalog" "hdb_scheduled_trigger" + [ arrayRel $$(nonEmptyText "scheduled_events") $ RUFKeyOn $ + ArrRelUsingFKeyOn (QualifiedObject "hdb_catalog" "hdb_scheduled_events") "name" + ] + , table "hdb_catalog" "hdb_scheduled_events" + [ objectRel $$(nonEmptyText "scheduled_trigger") $ RUFKeyOn "name" + , arrayRel $$(nonEmptyText "logs") $ RUFKeyOn $ + ArrRelUsingFKeyOn (QualifiedObject "hdb_catalog" "hdb_scheduled_event_invocation_logs") "event_id" ] + , table "hdb_catalog" "hdb_scheduled_event_invocation_logs" + [ objectRel $$(nonEmptyText "scheduled_event") $ RUFKeyOn "event_id" ] ] tableNameMapping = From c8f2e1cb168e40360635f8f04ec3abc2b75c1144 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan Date: Thu, 27 Feb 2020 18:42:26 +0530 Subject: [PATCH 063/195] doc improvements from review Co-Authored-By: Marion Schleifer --- .../schema-metadata-api/scheduled-triggers.rst | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst index 33a722dba0138..a819bc38832b3 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/scheduled-triggers.rst @@ -60,7 +60,7 @@ Args syntax * - webhook - true - Text | UrlFromEnv_ - - Url of webhook or environment variable which has the url + - URL of webhook or environment variable which has the URL * - schedule - true - ScheduleConf_ @@ -105,7 +105,7 @@ Args syntax * - webhook - true - Text | UrlFromEnv_ - - Url of webhook or environment variable which has the url + - URL of webhook or environment variable which has the URL * - schedule - true - ScheduleConf_ @@ -165,7 +165,7 @@ Args syntax create_scheduled_event ---------------------- -``create_scheduled_event`` is used to create a new scheduled event with given timestamp and payload +``create_scheduled_event`` is used to create a new scheduled event with a given timestamp and optional payload. .. code-block:: http @@ -242,7 +242,7 @@ Args syntax * - event_id - true - UUID - - Id of the scheduled event + - ID of the scheduled event .. _track_scheduled_trigger: @@ -345,7 +345,7 @@ UrlFromEnv * - from_env - true - String - - Name of the environment variable which has the url + - Name of the environment variable which has the URL .. _ScheduleConf: @@ -439,4 +439,3 @@ RetryConfST - Integer - Number of minutes between scheduled time and actual delivery time that is acceptable. If the time difference is more than this, then the event is dropped. Default: 360 (6 hours) - From 4f4e0991ca7988f155fa0b870356f904428173f3 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 3 Mar 2020 12:14:17 +0530 Subject: [PATCH 064/195] switch `Either a b` to `MonadError` --- .../Hasura/Eventing/ScheduledTrigger.hs | 61 +++++++++---------- 1 file changed, 29 insertions(+), 32 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index dcddfa85b7518..aaf6c6966acac 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -68,11 +68,6 @@ instance L.ToEngineLog ScheduledTriggerInternalErr L.Hasura where toEngineLog (ScheduledTriggerInternalErr qerr) = (L.LevelError, L.scheduledTriggerLogType, J.toJSON qerr) -logQErr :: ( MonadReader r m, Has (L.Logger L.Hasura) r, MonadIO m) => QErr -> m () -logQErr err = do - logger :: L.Logger L.Hasura <- asks getter - L.unLogger logger $ ScheduledTriggerInternalErr err - scheduledEventsTable :: QualifiedTable scheduledEventsTable = QualifiedObject @@ -233,15 +228,19 @@ processScheduledQueue logger logEnv httpMgr pgpool getSC = Right partialEvents -> for_ partialEvents $ \(ScheduledEventPartial id' name st payload tries)-> do case Map.lookup name scheduledTriggersInfo of - Nothing -> L.unLogger logger $ ScheduledTriggerInternalErr $ + Nothing -> logInternalError $ err500 Unexpected "could not find scheduled trigger in cache" Just stInfo@ScheduledTriggerInfo{..} -> do let webhook = wciCachedValue stiWebhookInfo payload' = fromMaybe (fromMaybe J.Null stiPayload) payload -- override if neccessary scheduledEvent = ScheduledEventFull id' name st tries webhook payload' stiRetryConf - runReaderT (processScheduledEvent logEnv pgpool stInfo scheduledEvent) (logger, httpMgr) - Left err -> L.unLogger logger $ ScheduledTriggerInternalErr err + finally <- runExceptT $ + runReaderT (processScheduledEvent logEnv pgpool stInfo scheduledEvent) (logger, httpMgr) + either logInternalError pure finally + Left err -> logInternalError err threadDelay oneMinute + where + logInternalError err = L.unLogger logger $ ScheduledTriggerInternalErr err processScheduledEvent :: ( MonadReader r m @@ -249,6 +248,7 @@ processScheduledEvent :: , Has (L.Logger L.Hasura) r , HasVersion , MonadIO m + , MonadError QErr m ) => LogEnvHeaders -> Q.PGPool @@ -258,10 +258,7 @@ processScheduledEvent :: processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEventFull {..} = do currentTime <- liftIO getCurrentTime if diffUTCTime currentTime sefScheduledTime > rcstTolerance stiRetryConf - then - processDead pgpool se >>= \case - Left err -> logQErr err - Right _ -> pure () + then processDead pgpool se else do let timeoutSeconds = round $ rcstTimeoutSec stiRetryConf httpTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) @@ -270,15 +267,14 @@ processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEventF res <- runExceptT $ tryWebhook headers httpTimeout sefPayload (T.unpack sefWebhook) logHTTPForST res (Just extraLogCtx) let decodedHeaders = map (decodeHeader logEnv stiHeaders) headers - finally <- either - (processError pgpool se decodedHeaders) - (processSuccess pgpool se decodedHeaders) - res - either logQErr return finally + either + (processError pgpool se decodedHeaders) + (processSuccess pgpool se decodedHeaders) + res processError - :: (MonadIO m) - => Q.PGPool -> ScheduledEventFull -> [HeaderConf] -> HTTPErr a -> m (Either QErr ()) + :: (MonadIO m, MonadError QErr m) + => Q.PGPool -> ScheduledEventFull -> [HeaderConf] -> HTTPErr a -> m () processError pgpool se decodedHeaders err = do let invocation = case err of HClient excp -> do @@ -295,11 +291,10 @@ processError pgpool se decodedHeaders err = do HOther detail -> do let errMsg = (TBS.fromLBS $ J.encode detail) mkInvocation se 500 decodedHeaders errMsg [] - liftIO $ - runExceptT $ + liftExceptTIO $ Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) $ do - insertInvocation invocation - retryOrMarkError se err + insertInvocation invocation + retryOrMarkError se err retryOrMarkError :: ScheduledEventFull -> HTTPErr a -> Q.TxE QErr () retryOrMarkError se@ScheduledEventFull {..} err = do @@ -327,18 +322,17 @@ retryOrMarkError se@ScheduledEventFull {..} err = do |] (Identity sefId) True processSuccess - :: (MonadIO m) - => Q.PGPool -> ScheduledEventFull -> [HeaderConf] -> HTTPResp a -> m (Either QErr ()) + :: (MonadIO m, MonadError QErr m) + => Q.PGPool -> ScheduledEventFull -> [HeaderConf] -> HTTPResp a -> m () processSuccess pgpool se decodedHeaders resp = do let respBody = hrsBody resp respHeaders = hrsHeaders resp respStatus = hrsStatus resp invocation = mkInvocation se respStatus decodedHeaders respBody respHeaders - liftIO $ - runExceptT $ + liftExceptTIO $ Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) $ do - insertInvocation invocation - markSuccess + insertInvocation invocation + markSuccess where markSuccess = Q.unitQE @@ -349,10 +343,10 @@ processSuccess pgpool se decodedHeaders resp = do WHERE id = $1 |] (Identity $ sefId se) True -processDead :: (MonadIO m) => Q.PGPool -> ScheduledEventFull -> m (Either QErr ()) +processDead :: (MonadIO m, MonadError QErr m) => Q.PGPool -> ScheduledEventFull -> m () processDead pgpool se = - liftIO $ - runExceptT $ Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) markDead + liftExceptTIO $ + Q.runTx pgpool (Q.RepeatableRead, Just Q.ReadWrite) markDead where markDead = Q.unitQE @@ -423,3 +417,6 @@ getScheduledEvents = do RETURNING id, name, scheduled_time, additional_payload, tries |] () True where uncurryEvent (i, n, st, p, tries) = ScheduledEventPartial i n st (Q.getAltJ <$> p) tries + +liftExceptTIO :: (MonadError e m, MonadIO m) => ExceptT e IO a -> m a +liftExceptTIO m = liftEither =<< liftIO (runExceptT m) From 31fb11d385c896ad5bb3c1654f927bc0edbad114 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Tue, 3 Mar 2020 19:25:42 +0530 Subject: [PATCH 065/195] incorporate NominalDiffTime clock units in `Data.Time.Clock.Units` --- server/cabal.project.freeze | 58 +-- server/graphql-engine.cabal | 5 +- server/src-lib/Data/Time/Clock/Units.hs | 350 +++++++++++++++--- .../src-lib/Hasura/Eventing/EventTrigger.hs | 3 +- server/src-lib/Hasura/Prelude.hs | 7 +- .../Hasura/RQL/Types/ScheduledTrigger.hs | 6 +- server/src-lib/Hasura/Server/Logging.hs | 4 +- .../Hasura/Server/Telemetry/Counters.hs | 10 +- 8 files changed, 352 insertions(+), 91 deletions(-) diff --git a/server/cabal.project.freeze b/server/cabal.project.freeze index 2f25cc90728c6..4e05cf0f6e587 100644 --- a/server/cabal.project.freeze +++ b/server/cabal.project.freeze @@ -1,8 +1,9 @@ -constraints: any.Cabal ==2.4.0.1, +constraints: any.Cabal ==3.0.0.0, + Cabal -bundled-binary-generic, any.Glob ==0.10.0, any.HUnit ==1.6.0.0, any.Only ==0.1, - any.QuickCheck ==2.12.6.1, + any.QuickCheck ==2.13.2, QuickCheck +templatehaskell, any.RSA ==2.4.1, any.SHA ==1.6.4.4, @@ -25,6 +26,7 @@ constraints: any.Cabal ==2.4.0.1, any.asn1-encoding ==0.9.6, any.asn1-parse ==0.9.5, any.asn1-types ==0.3.3, + any.assoc ==1.0.1, any.async ==2.2.2, async -bench, any.attoparsec ==0.13.2.3, @@ -34,8 +36,8 @@ constraints: any.Cabal ==2.4.0.1, any.authenticate-oauth ==1.6.0.1, any.auto-update ==0.1.6, any.base ==4.12.0.0, - any.base-compat ==0.10.5, - any.base-compat-batteries ==0.10.5, + any.base-compat ==0.11.1, + any.base-compat-batteries ==0.11.1, any.base-orphans ==0.8.2, any.base-prelude ==1.3, any.base16-bytestring ==0.1.1.6, @@ -82,7 +84,7 @@ constraints: any.Cabal ==2.4.0.1, contravariant +semigroups +statevar +tagged, any.contravariant-extras ==0.3.5.1, any.cookie ==0.4.5, - any.criterion ==1.5.6.1, + any.criterion ==1.5.6.2, criterion -embed-data-files -fast, any.criterion-measurement ==0.1.2.0, criterion-measurement -fast, @@ -112,7 +114,7 @@ constraints: any.Cabal ==2.4.0.1, any.dense-linear-algebra ==0.1.0.0, any.dependent-map ==0.2.4.0, any.dependent-sum ==0.4, - any.directory ==1.3.3.0, + any.directory ==1.3.6.0, any.distributive ==0.6.1, distributive +semigroups +tagged, any.dlist ==0.8.0.7, @@ -128,7 +130,7 @@ constraints: any.Cabal ==2.4.0.1, exceptions +transformers-0-4, any.fail ==4.9.0.0, any.fast-logger ==3.0.1, - any.file-embed ==0.0.11.1, + any.file-embed ==0.0.11.2, any.filepath ==1.4.2.1, any.focus ==1.0.1.3, any.foldl ==1.4.6, @@ -139,7 +141,7 @@ constraints: any.Cabal ==2.4.0.1, graphql-engine +developer, any.happy ==1.19.12, happy +small_base, - any.hashable ==1.2.7.0, + any.hashable ==1.3.0.0, hashable -examples +integer-gmp +sse2 -sse41, any.hashtables ==1.2.3.4, hashtables -bounds-checking -debug -detailed-profiling -portable -sse42 +unsafe-tricks, @@ -151,9 +153,9 @@ constraints: any.Cabal ==2.4.0.1, any.hourglass ==0.2.12, any.hsc2hs ==0.68.6, hsc2hs -in-ghc-tree, - any.hspec ==2.7.0, - any.hspec-core ==2.7.0, - any.hspec-discover ==2.7.0, + any.hspec ==2.7.1, + any.hspec-core ==2.7.1, + any.hspec-discover ==2.7.1, any.hspec-expectations ==0.8.2, any.hspec-expectations-lifted ==0.10.0, any.http-api-data ==0.4.1.1, @@ -178,8 +180,7 @@ constraints: any.Cabal ==2.4.0.1, any.js-flot ==0.8.3, any.js-jquery ==3.3.1, any.kan-extensions ==5.2, - any.keys ==3.12.3, - any.lens ==4.17.1, + any.lens ==4.18.1, lens -benchmark-uniplate -dump-splices +inlining -j -old-inline-pragmas -safe +test-doctests +test-hunit +test-properties +test-templates +trustworthy, any.lens-aeson ==1.1, lens-aeson +test-doctests, @@ -214,7 +215,7 @@ constraints: any.Cabal ==2.4.0.1, any.network-byte-order ==0.1.4.0, any.network-info ==0.2.0.10, any.network-ip ==0.3.0.3, - any.network-uri ==2.6.2.0, + any.network-uri ==2.6.3.0, any.old-locale ==1.0.0.7, any.old-time ==1.1.0.3, any.optics-core ==0.2, @@ -226,8 +227,6 @@ constraints: any.Cabal ==2.4.0.1, parsers +attoparsec +binary +parsec, any.pem ==0.2.4, any.placeholders ==0.1, - any.pointed ==5.0.1, - pointed +comonad +containers +kan-extensions +semigroupoids +semigroups +stm +tagged +transformers +unordered-containers, any.postgresql-binary ==0.12.2, any.postgresql-libpq ==0.9.4.2, postgresql-libpq -use-pkg-config, @@ -235,14 +234,14 @@ constraints: any.Cabal ==2.4.0.1, any.pretty-show ==1.10, any.prettyprinter ==1.6.1, prettyprinter -buildreadme, - any.primitive ==0.7.0.0, + any.primitive ==0.7.0.1, any.primitive-extras ==0.8, any.primitive-unlifted ==0.1.3.0, - any.process ==1.6.5.0, + any.process ==1.6.8.0, any.profunctors ==5.5.2, any.protolude ==0.2.4, any.psqueues ==0.2.7.2, - any.quickcheck-instances ==0.3.19, + any.quickcheck-instances ==0.3.22, quickcheck-instances -bytestring-builder, any.quickcheck-io ==0.2.0, any.random ==1.1, @@ -254,13 +253,15 @@ constraints: any.Cabal ==2.4.0.1, any.reroute ==0.5.0.0, any.resource-pool ==0.2.3.2, resource-pool -developer, - any.resourcet ==1.2.2, + any.resourcet ==1.2.3, any.retry ==0.8.1.0, retry -lib-werror, any.rts ==1.0, any.safe ==0.3.18, any.scientific ==0.3.6.2, scientific -bytestring-builder -integer-simple, + any.semialign ==1.1, + semialign +semigroupoids, any.semigroupoids ==5.3.4, semigroupoids +comonad +containers +contravariant +distributive +doctests +tagged +unordered-containers, any.semigroups ==0.19.1, @@ -272,7 +273,9 @@ constraints: any.Cabal ==2.4.0.1, any.simple-sendfile ==0.2.30, simple-sendfile +allow-bsd, any.socks ==0.6.1, - any.split ==0.2.3.3, + any.split ==0.2.3.4, + any.splitmix ==0.0.4, + splitmix -optimised-mixer +random, any.statistics ==0.15.2.0, any.stm ==2.5.0.0, any.stm-containers ==1.1.0.4, @@ -297,14 +300,15 @@ constraints: any.Cabal ==2.4.0.1, any.th-abstraction ==0.3.2.0, any.th-lift ==0.8.1, any.th-lift-instances ==0.1.14, - any.these ==0.7.6, - any.time ==1.8.0.2, + any.these ==1.0.1, + these +aeson +assoc +quickcheck +semigroupoids, + any.time ==1.9.3, any.time-compat ==1.9.2.2, time-compat -old-locale, any.time-locale-compat ==0.1.1.5, time-locale-compat -old-locale, any.time-manager ==0.0.0, - any.tls ==1.5.3, + any.tls ==1.5.4, tls +compat -hans +network, any.transformers ==0.5.6.2, any.transformers-base ==0.4.5.2, @@ -317,7 +321,7 @@ constraints: any.Cabal ==2.4.0.1, any.unix-compat ==0.5.2, unix-compat -old-time, any.unix-time ==0.4.7, - any.unliftio-core ==0.1.2.0, + any.unliftio-core ==0.2.0.1, any.unordered-containers ==0.2.10.0, unordered-containers -debug, any.uri-encode ==1.5.0.5, @@ -333,15 +337,13 @@ constraints: any.Cabal ==2.4.0.1, vector-algorithms +bench +boundschecks -internalchecks -llvm +properties -unsafechecks, any.vector-binary-instances ==0.2.5.1, any.vector-builder ==0.3.8, - any.vector-instances ==3.4, - vector-instances +hashable, any.vector-th-unbox ==0.2.1.7, any.void ==0.7.3, void -safe, any.wai ==3.2.2.1, any.wai-app-static ==3.1.7.1, wai-app-static -print, - any.wai-extra ==3.0.29, + any.wai-extra ==3.0.29.1, wai-extra -build-example, any.wai-logger ==2.3.6, any.wai-websockets ==3.0.1.2, diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 6d33af126b419..2f5519a648c62 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -70,7 +70,7 @@ library , http-types , attoparsec , attoparsec-iso8601 >= 1.0 - , time + , time >= 1.9 , scientific , Spock-core , split @@ -93,7 +93,8 @@ library -- `these >=1` is split into several different packages, but our current stack -- resolver has `these <1`; when we upgrade we just need to add an extra -- dependency on `semialign` - , these >=0.7.1 && <0.8 + , these + , semialign -- Encoder related , uuid diff --git a/server/src-lib/Data/Time/Clock/Units.hs b/server/src-lib/Data/Time/Clock/Units.hs index f45eb17e5a05a..f01adf416c8a9 100644 --- a/server/src-lib/Data/Time/Clock/Units.hs +++ b/server/src-lib/Data/Time/Clock/Units.hs @@ -1,7 +1,8 @@ -{-# LANGUAGE AllowAmbiguousTypes #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DerivingVia #-} -{-# LANGUAGE TypeOperators #-} +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingVia #-} +{-# LANGUAGE TypeFamilyDependencies #-} +{-# LANGUAGE TypeOperators #-} {-| Types for time intervals of various units. Each newtype wraps 'DiffTime', but they have different 'Num' instances. The intent is to use the record selectors to write literals with @@ -25,21 +26,21 @@ You can also go the other way using the constructors rather than the selectors: 0.5 @ -NOTE: the 'Real' and 'Fractional' instances just essentially add or strip the unit label (as -above), so you can't use 'realToFrac' to convert between the units types here. Instead try +NOTE: the 'Real' and 'Fractional' instances just essentially add or strip the unit label (as +above), so you can't use 'realToFrac' to convert between the units types here. Instead try 'fromUnits' which is less of a foot-gun. The 'Read' instances for these types mirror the behavior of the 'RealFrac' instance wrt numeric literals for convenient serialization (e.g. when working with env vars): @ ->>> read "1.2" :: Milliseconds +>>> read "1.2" :: Milliseconds Milliseconds {milliseconds = 0.0012s} @ Generally, if you need to pass around a duration between functions you should use 'DiffTime' directly. However if storing a duration in a type that will be serialized, e.g. one having -a 'ToJSON' instance, it is better to use one of these explicit wrapper types so that it's +a 'ToJSON' instance, it is better to use one of these explicit wrapper types so that it's obvious what units will be used. -} module Data.Time.Clock.Units ( Days(..) @@ -50,7 +51,8 @@ module Data.Time.Clock.Units , Microseconds(..) , Nanoseconds(..) -- * Converting between units - , Duration(..) + , Duration'(..) + , DurationType(..) , fromUnits -- * Reexports -- | We use 'DiffTime' as the standard type for unit-agnostic duration in our @@ -69,6 +71,7 @@ import Prelude import Control.Arrow (first) import Data.Aeson +import Data.Fixed import Data.Hashable import Data.Proxy import Data.Time.Clock @@ -76,49 +79,268 @@ import GHC.TypeLits import Numeric (readFloat) -newtype Seconds = Seconds { seconds :: DiffTime } - -- NOTE: we want Show to give a pastable data structure string, even - -- though Read is custom. - deriving (Duration, Show, Eq, Ord, ToJSON, FromJSON) - deriving (Read, Num, Fractional, Real, Hashable, RealFrac) via (TimeUnit (SecondsP 1)) +data DurationType = Absolute | Calendar + +type family Duration a = r | r -> a where + Duration 'Absolute = DiffTime + Duration 'Calendar = NominalDiffTime + +newtype Seconds t = Seconds { seconds :: Duration t} + +deriving instance Duration' (Seconds 'Absolute) +deriving instance Duration' (Seconds 'Calendar) + +-- NOTE: we want Show to give a pastable data structure string, even +-- though Read is custom. +deriving instance Show (Seconds 'Absolute) +deriving instance Show (Seconds 'Calendar) + +deriving instance Eq (Seconds 'Absolute) +deriving instance Eq (Seconds 'Calendar) + +deriving instance Ord (Seconds 'Absolute) +deriving instance Ord (Seconds 'Calendar) + +deriving instance ToJSON (Seconds 'Absolute) +deriving instance ToJSON (Seconds 'Calendar) + +deriving instance FromJSON (Seconds 'Absolute) +deriving instance FromJSON (Seconds 'Calendar) + +deriving via (TimeUnit (SecondsP 1) 'Absolute) instance Read (Seconds 'Absolute) +deriving via (TimeUnit (SecondsP 1) 'Calendar) instance Read (Seconds 'Calendar) + +deriving via (TimeUnit (SecondsP 1) 'Absolute) instance Num (Seconds 'Absolute) +deriving via (TimeUnit (SecondsP 1) 'Calendar) instance Num (Seconds 'Calendar) + +deriving via (TimeUnit (SecondsP 1) 'Absolute) instance Fractional (Seconds 'Absolute) +deriving via (TimeUnit (SecondsP 1) 'Calendar) instance Fractional (Seconds 'Calendar) + +deriving via (TimeUnit (SecondsP 1) 'Absolute) instance Real (Seconds 'Absolute) +deriving via (TimeUnit (SecondsP 1) 'Calendar) instance Real (Seconds 'Calendar) + +deriving via (TimeUnit (SecondsP 1) 'Absolute) instance Hashable (Seconds 'Absolute) +deriving via (TimeUnit (SecondsP 1) 'Calendar) instance Hashable (Seconds 'Calendar) + +deriving via (TimeUnit (SecondsP 1) 'Absolute) instance RealFrac (Seconds 'Absolute) +deriving via (TimeUnit (SecondsP 1) 'Calendar) instance RealFrac (Seconds 'Calendar) + -- TODO if needed: deriving (ToJSON, FromJSON) via (TimeUnit ..) making sure -- to copy Aeson instances (with withBoundedScientific), and e.g. -- toJSON (5 :: Minutes) == Number 5 -newtype Days = Days { days :: DiffTime } - deriving (Duration, Show, Eq, Ord) - deriving (Read, Num, Fractional, Real, Hashable, RealFrac) via (TimeUnit (SecondsP 86400)) +newtype Days t = Days { days :: Duration t} + +deriving instance Duration' (Days 'Absolute) +deriving instance Duration' (Days 'Calendar) + +deriving instance Show (Days 'Absolute) +deriving instance Show (Days 'Calendar) + +deriving instance Eq (Days 'Absolute) +deriving instance Eq (Days 'Calendar) + +deriving instance Ord (Days 'Absolute) +deriving instance Ord (Days 'Calendar) + +deriving via (TimeUnit (SecondsP 86400) 'Absolute) instance Read (Days 'Absolute) +deriving via (TimeUnit (SecondsP 86400) 'Calendar) instance Read (Days 'Calendar) + +deriving via (TimeUnit (SecondsP 86400) 'Absolute) instance Num (Days 'Absolute) +deriving via (TimeUnit (SecondsP 86400) 'Calendar) instance Num (Days 'Calendar) + +deriving via (TimeUnit (SecondsP 86400) 'Absolute) instance Fractional (Days 'Absolute) +deriving via (TimeUnit (SecondsP 86400) 'Calendar) instance Fractional (Days 'Calendar) + +deriving via (TimeUnit (SecondsP 86400) 'Absolute) instance Real (Days 'Absolute) +deriving via (TimeUnit (SecondsP 86400) 'Calendar) instance Real (Days 'Calendar) + +deriving via (TimeUnit (SecondsP 86400) 'Absolute) instance Hashable (Days 'Absolute) +deriving via (TimeUnit (SecondsP 86400) 'Calendar) instance Hashable (Days 'Calendar) + +deriving via (TimeUnit (SecondsP 86400) 'Absolute) instance RealFrac (Days 'Absolute) +deriving via (TimeUnit (SecondsP 86400) 'Calendar) instance RealFrac (Days 'Calendar) + +newtype Hours t = Hours { hours :: Duration t} + +deriving instance Duration' (Hours 'Absolute) +deriving instance Duration' (Hours 'Calendar) + +deriving instance Show (Hours 'Absolute) +deriving instance Show (Hours 'Calendar) + +deriving instance Eq (Hours 'Absolute) +deriving instance Eq (Hours 'Calendar) + +deriving instance Ord (Hours 'Absolute) +deriving instance Ord (Hours 'Calendar) + +deriving via (TimeUnit (SecondsP 3600) 'Absolute) instance Read (Hours 'Absolute) +deriving via (TimeUnit (SecondsP 3600) 'Calendar) instance Read (Hours 'Calendar) + +deriving via (TimeUnit (SecondsP 3600) 'Absolute) instance Num (Hours 'Absolute) +deriving via (TimeUnit (SecondsP 3600) 'Calendar) instance Num (Hours 'Calendar) + +deriving via (TimeUnit (SecondsP 3600) 'Absolute) instance Fractional (Hours 'Absolute) +deriving via (TimeUnit (SecondsP 3600) 'Calendar) instance Fractional (Hours 'Calendar) + +deriving via (TimeUnit (SecondsP 3600) 'Absolute) instance Real (Hours 'Absolute) +deriving via (TimeUnit (SecondsP 3600) 'Calendar) instance Real (Hours 'Calendar) + +deriving via (TimeUnit (SecondsP 3600) 'Absolute) instance Hashable (Hours 'Absolute) +deriving via (TimeUnit (SecondsP 3600) 'Calendar) instance Hashable (Hours 'Calendar) + +deriving via (TimeUnit (SecondsP 3600) 'Absolute) instance RealFrac (Hours 'Absolute) +deriving via (TimeUnit (SecondsP 3600) 'Calendar) instance RealFrac (Hours 'Calendar) + +newtype Minutes t = Minutes { minutes :: Duration t} + +deriving instance Duration' (Minutes 'Absolute) +deriving instance Duration' (Minutes 'Calendar) + +deriving instance Show (Minutes 'Absolute) +deriving instance Show (Minutes 'Calendar) + +deriving instance Eq (Minutes 'Absolute) +deriving instance Eq (Minutes 'Calendar) + +deriving instance Ord (Minutes 'Absolute) +deriving instance Ord (Minutes 'Calendar) + +deriving via (TimeUnit (SecondsP 60) 'Absolute) instance Read (Minutes 'Absolute) +deriving via (TimeUnit (SecondsP 60) 'Calendar) instance Read (Minutes 'Calendar) + +deriving via (TimeUnit (SecondsP 60) 'Absolute) instance Num (Minutes 'Absolute) +deriving via (TimeUnit (SecondsP 60) 'Calendar) instance Num (Minutes 'Calendar) + +deriving via (TimeUnit (SecondsP 60) 'Absolute) instance Fractional (Minutes 'Absolute) +deriving via (TimeUnit (SecondsP 60) 'Calendar) instance Fractional (Minutes 'Calendar) + +deriving via (TimeUnit (SecondsP 60) 'Absolute) instance Real (Minutes 'Absolute) +deriving via (TimeUnit (SecondsP 60) 'Calendar) instance Real (Minutes 'Calendar) + +deriving via (TimeUnit (SecondsP 60) 'Absolute) instance Hashable (Minutes 'Absolute) +deriving via (TimeUnit (SecondsP 60) 'Calendar) instance Hashable (Minutes 'Calendar) + +deriving via (TimeUnit (SecondsP 60) 'Absolute) instance RealFrac (Minutes 'Absolute) +deriving via (TimeUnit (SecondsP 60) 'Calendar) instance RealFrac (Minutes 'Calendar) + +newtype Milliseconds t = Milliseconds { milliseconds :: Duration t} + +deriving instance Duration' (Milliseconds 'Absolute) +deriving instance Duration' (Milliseconds 'Calendar) + +deriving instance Show (Milliseconds 'Absolute) +deriving instance Show (Milliseconds 'Calendar) -newtype Hours = Hours { hours :: DiffTime } - deriving (Duration, Show, Eq, Ord) - deriving (Read, Num, Fractional, Real, Hashable, RealFrac) via (TimeUnit (SecondsP 3600)) +deriving instance Eq (Milliseconds 'Absolute) +deriving instance Eq (Milliseconds 'Calendar) -newtype Minutes = Minutes { minutes :: DiffTime } - deriving (Duration, Show, Eq, Ord) - deriving (Read, Num, Fractional, Real, Hashable, RealFrac) via (TimeUnit (SecondsP 60)) +deriving instance Ord (Milliseconds 'Absolute) +deriving instance Ord (Milliseconds 'Calendar) -newtype Milliseconds = Milliseconds { milliseconds :: DiffTime } - deriving (Duration, Show, Eq, Ord) - deriving (Read, Num, Fractional, Real, Hashable, RealFrac) via (TimeUnit 1000000000) +deriving via (TimeUnit 1000000000 'Absolute) instance Read (Milliseconds 'Absolute) +deriving via (TimeUnit 1000000000 'Calendar) instance Read (Milliseconds 'Calendar) -newtype Microseconds = Microseconds { microseconds :: DiffTime } - deriving (Duration, Show, Eq, Ord) - deriving (Read, Num, Fractional, Real, Hashable, RealFrac) via (TimeUnit 1000000) +deriving via (TimeUnit 1000000000 'Absolute) instance Num (Milliseconds 'Absolute) +deriving via (TimeUnit 1000000000 'Calendar) instance Num (Milliseconds 'Calendar) -newtype Nanoseconds = Nanoseconds { nanoseconds :: DiffTime } - deriving (Duration, Show, Eq, Ord) - deriving (Read, Num, Fractional, Real, Hashable, RealFrac) via (TimeUnit 1000) +deriving via (TimeUnit 1000000000 'Absolute) instance Fractional (Milliseconds 'Absolute) +deriving via (TimeUnit 1000000000 'Calendar) instance Fractional (Milliseconds 'Calendar) + +deriving via (TimeUnit 1000000000 'Absolute) instance Real (Milliseconds 'Absolute) +deriving via (TimeUnit 1000000000 'Calendar) instance Real (Milliseconds 'Calendar) + +deriving via (TimeUnit 1000000000 'Absolute) instance Hashable (Milliseconds 'Absolute) +deriving via (TimeUnit 1000000000 'Calendar) instance Hashable (Milliseconds 'Calendar) + +deriving via (TimeUnit 1000000000 'Absolute) instance RealFrac (Milliseconds 'Absolute) +deriving via (TimeUnit 1000000000 'Calendar) instance RealFrac (Milliseconds 'Calendar) + + +newtype Microseconds t = Microseconds { microseconds :: Duration t} + +deriving instance Duration' (Microseconds 'Absolute) +deriving instance Duration' (Microseconds 'Calendar) + +deriving instance Show (Microseconds 'Absolute) +deriving instance Show (Microseconds 'Calendar) + +deriving instance Eq (Microseconds 'Absolute) +deriving instance Eq (Microseconds 'Calendar) + +deriving instance Ord (Microseconds 'Absolute) +deriving instance Ord (Microseconds 'Calendar) + +deriving via (TimeUnit 1000000 'Absolute) instance Read (Microseconds 'Absolute) +deriving via (TimeUnit 1000000 'Calendar) instance Read (Microseconds 'Calendar) + +deriving via (TimeUnit 1000000 'Absolute) instance Num (Microseconds 'Absolute) +deriving via (TimeUnit 1000000 'Calendar) instance Num (Microseconds 'Calendar) + +deriving via (TimeUnit 1000000 'Absolute) instance Fractional (Microseconds 'Absolute) +deriving via (TimeUnit 1000000 'Calendar) instance Fractional (Microseconds 'Calendar) + +deriving via (TimeUnit 1000000 'Absolute) instance Real (Microseconds 'Absolute) +deriving via (TimeUnit 1000000 'Calendar) instance Real (Microseconds 'Calendar) + +deriving via (TimeUnit 1000000 'Absolute) instance Hashable (Microseconds 'Absolute) +deriving via (TimeUnit 1000000 'Calendar) instance Hashable (Microseconds 'Calendar) + +deriving via (TimeUnit 1000000 'Absolute) instance RealFrac (Microseconds 'Absolute) +deriving via (TimeUnit 1000000 'Calendar) instance RealFrac (Microseconds 'Calendar) + + +newtype Nanoseconds t = Nanoseconds { nanoseconds :: Duration t} + +deriving instance Duration' (Nanoseconds 'Absolute) +deriving instance Duration' (Nanoseconds 'Calendar) + +deriving instance Show (Nanoseconds 'Absolute) +deriving instance Show (Nanoseconds 'Calendar) + +deriving instance Eq (Nanoseconds 'Absolute) +deriving instance Eq (Nanoseconds 'Calendar) + +deriving instance Ord (Nanoseconds 'Absolute) +deriving instance Ord (Nanoseconds 'Calendar) + +deriving via (TimeUnit 1000 'Absolute) instance Read (Nanoseconds 'Absolute) +deriving via (TimeUnit 1000 'Calendar) instance Read (Nanoseconds 'Calendar) + +deriving via (TimeUnit 1000 'Absolute) instance Num (Nanoseconds 'Absolute) +deriving via (TimeUnit 1000 'Calendar) instance Num (Nanoseconds 'Calendar) + +deriving via (TimeUnit 1000 'Absolute) instance Fractional (Nanoseconds 'Absolute) +deriving via (TimeUnit 1000 'Calendar) instance Fractional (Nanoseconds 'Calendar) + +deriving via (TimeUnit 1000 'Absolute) instance Real (Nanoseconds 'Absolute) +deriving via (TimeUnit 1000 'Calendar) instance Real (Nanoseconds 'Calendar) + +deriving via (TimeUnit 1000 'Absolute) instance Hashable (Nanoseconds 'Absolute) +deriving via (TimeUnit 1000 'Calendar) instance Hashable (Nanoseconds 'Calendar) + +deriving via (TimeUnit 1000 'Absolute) instance RealFrac (Nanoseconds 'Absolute) +deriving via (TimeUnit 1000 'Calendar) instance RealFrac (Nanoseconds 'Calendar) -- Internal for deriving via -newtype TimeUnit (picosPerUnit :: Nat) = TimeUnit DiffTime - deriving (Show, Eq, Ord) +newtype TimeUnit (picosPerUnit :: Nat) t = TimeUnit (Duration t) + +deriving instance Show (TimeUnit picosPerUnit 'Absolute) +deriving instance Show (TimeUnit picosPerUnit 'Calendar) + +deriving instance Eq (TimeUnit picosPerUnit 'Absolute) +deriving instance Eq (TimeUnit picosPerUnit 'Calendar) + +deriving instance Ord (TimeUnit picosPerUnit 'Absolute) +deriving instance Ord (TimeUnit picosPerUnit 'Calendar) type SecondsP n = n GHC.TypeLits.* 1000000000000 natNum :: forall n a. (KnownNat n, Num a) => a natNum = fromInteger $ natVal (Proxy @n) -instance (KnownNat picosPerUnit) => Num (TimeUnit picosPerUnit) where +instance (KnownNat picosPerUnit) => Num (TimeUnit picosPerUnit 'Absolute) where TimeUnit a + TimeUnit b = TimeUnit $ a + b TimeUnit a - TimeUnit b = TimeUnit $ a - b TimeUnit a * TimeUnit b = TimeUnit . picosecondsToDiffTime $ @@ -128,18 +350,18 @@ instance (KnownNat picosPerUnit) => Num (TimeUnit picosPerUnit) where signum (TimeUnit a) = TimeUnit $ signum a fromInteger a = TimeUnit . picosecondsToDiffTime $ a * natNum @picosPerUnit -instance (KnownNat picosPerUnit) => Read (TimeUnit picosPerUnit) where +instance (KnownNat picosPerUnit) => Read (TimeUnit picosPerUnit 'Absolute) where readsPrec _ = map (first fromRational) . readFloat -instance (KnownNat picosPerUnit) => Fractional (TimeUnit picosPerUnit) where +instance (KnownNat picosPerUnit) => Fractional (TimeUnit picosPerUnit 'Absolute) where TimeUnit a / TimeUnit b = TimeUnit . picosecondsToDiffTime $ diffTimeToPicoseconds a * natNum @picosPerUnit `div` diffTimeToPicoseconds b fromRational a = TimeUnit . picosecondsToDiffTime $ round (a * natNum @picosPerUnit) -instance (KnownNat picosPerUnit) => Real (TimeUnit picosPerUnit) where +instance (KnownNat picosPerUnit) => Real (TimeUnit picosPerUnit 'Absolute) where toRational (TimeUnit a) = toRational (diffTimeToPicoseconds a) / natNum @picosPerUnit -instance (KnownNat picosPerUnit) => RealFrac (TimeUnit picosPerUnit) where +instance (KnownNat picosPerUnit) => RealFrac (TimeUnit picosPerUnit 'Absolute) where properFraction a = (i, a - fromIntegral i) where i = truncate a truncate = truncate . toRational @@ -148,24 +370,62 @@ instance (KnownNat picosPerUnit) => RealFrac (TimeUnit picosPerUnit) where floor = floor . toRational -- we can ignore unit: -instance Hashable (TimeUnit a) where - hashWithSalt salt (TimeUnit dt) = hashWithSalt salt $ +instance Hashable (TimeUnit a 'Absolute) where + hashWithSalt salt (TimeUnit dt) = hashWithSalt salt $ (realToFrac :: DiffTime -> Double) dt +-- instances for Calendar +instance (KnownNat picosPerUnit) => Num (TimeUnit picosPerUnit 'Calendar) where + TimeUnit a + TimeUnit b = TimeUnit $ a + b + TimeUnit a - TimeUnit b = TimeUnit $ a - b + TimeUnit a * TimeUnit b = TimeUnit . secondsToNominalDiffTime . toFixedPico $ + (toRational $ nominalDiffTimeToSeconds a * nominalDiffTimeToSeconds b) `div'` (natNum @picosPerUnit) + negate (TimeUnit a) = TimeUnit $ negate a + abs (TimeUnit a) = TimeUnit $ abs a + signum (TimeUnit a) = TimeUnit $ signum a + fromInteger a = TimeUnit . secondsToNominalDiffTime . toFixedPico $ a * natNum @picosPerUnit + +instance (KnownNat picosPerUnit) => Read (TimeUnit picosPerUnit 'Calendar) where + readsPrec _ = map (first fromRational) . readFloat + +instance (KnownNat picosPerUnit) => Fractional (TimeUnit picosPerUnit 'Calendar) where + TimeUnit a / TimeUnit b = TimeUnit . secondsToNominalDiffTime . toFixedPico $ fromInteger $ + ((toRational $ nominalDiffTimeToSeconds a) * natNum @picosPerUnit) `div'` (toRational $ nominalDiffTimeToSeconds b) + fromRational a = TimeUnit . secondsToNominalDiffTime . toFixedPico $ round (a * natNum @picosPerUnit) + +instance (KnownNat picosPerUnit) => Real (TimeUnit picosPerUnit 'Calendar) where + toRational (TimeUnit a) = toRational (nominalDiffTimeToSeconds a) / natNum @picosPerUnit + +instance (KnownNat picosPerUnit) => RealFrac (TimeUnit picosPerUnit 'Calendar) where + properFraction a = (i, a - fromIntegral i) + where i = truncate a + truncate = truncate . toRational + round = round . toRational + ceiling = ceiling . toRational + floor = floor . toRational + +-- we can ignore unit: +instance Hashable (TimeUnit a 'Calendar) where + hashWithSalt salt (TimeUnit dt) = hashWithSalt salt $ + (realToFrac :: NominalDiffTime -> Double) dt + -- | Duration types isomorphic to 'DiffTime', powering 'fromUnits'. -class Duration d where +class Duration' d where fromDiffTime :: DiffTime -> d toDiffTime :: d -> DiffTime -instance Duration DiffTime where +instance Duration' DiffTime where fromDiffTime = id toDiffTime = id -instance Duration NominalDiffTime where +instance Duration' NominalDiffTime where fromDiffTime = realToFrac toDiffTime = realToFrac -- | Safe conversion between duration units. -fromUnits :: (Duration x, Duration y)=> x -> y +fromUnits :: (Duration' x, Duration' y)=> x -> y fromUnits = fromDiffTime . toDiffTime + +toFixedPico :: Integer -> Pico +toFixedPico = fromIntegral diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 9200ac5d76672..793e88aa74dbb 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -96,10 +96,9 @@ data EventEngineCtx defaultMaxEventThreads :: Int defaultMaxEventThreads = 100 -defaultFetchIntervalMilliSec :: Milliseconds +defaultFetchIntervalMilliSec :: (Milliseconds 'Absolute) defaultFetchIntervalMilliSec = 1000 - initEventEngineCtx :: Int -> DiffTime -> STM EventEngineCtx initEventEngineCtx maxT fetchI = do q <- TQ.newTQueue diff --git a/server/src-lib/Hasura/Prelude.hs b/server/src-lib/Hasura/Prelude.hs index 37c55f3e7d981..ebde232beb8bb 100644 --- a/server/src-lib/Hasura/Prelude.hs +++ b/server/src-lib/Hasura/Prelude.hs @@ -28,8 +28,7 @@ import Control.Monad.Identity as M import Control.Monad.Reader as M import Control.Monad.State.Strict as M import Control.Monad.Writer.Strict as M (MonadWriter (..), WriterT (..)) -import Data.Align as M (Align (align, alignWith)) -import Data.Align.Key as M (AlignWithKey (..)) +import Data.Align as M (Semialign (align, alignWith)) import Data.Bool as M (bool) import Data.Data as M (Data (..)) import Data.Either as M (lefts, partitionEithers, rights) @@ -42,7 +41,7 @@ import Data.HashSet as M (HashSet) import Data.List as M (find, findIndex, foldl', group, intercalate, intersect, lookup, sort, sortBy, sortOn, union, unionBy, (\\)) -import Data.List.NonEmpty as M (NonEmpty(..)) +import Data.List.NonEmpty as M (NonEmpty (..)) import Data.Maybe as M (catMaybes, fromMaybe, isJust, isNothing, listToMaybe, mapMaybe, maybeToList) import Data.Ord as M (comparing) @@ -52,13 +51,13 @@ import Data.String as M (IsString) import Data.Text as M (Text) import Data.These as M (These (..), fromThese, mergeThese, mergeTheseWith, these) +import Data.Time.Clock.Units import Data.Traversable as M (for) import Data.Word as M (Word64) import GHC.Generics as M (Generic) import Prelude as M hiding (fail, init, lookup) import Test.QuickCheck.Arbitrary.Generic as M import Text.Read as M (readEither, readMaybe) -import Data.Time.Clock.Units import qualified Data.ByteString as B import qualified Data.HashMap.Strict as Map diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 53f229ac0b876..a1b8e87baa8c9 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -12,7 +12,7 @@ module Hasura.RQL.Types.ScheduledTrigger import Data.Time.Clock import Data.Time.Clock.Units -import Data.Time.Format +import Data.Time.Format.ISO8601 import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH @@ -43,7 +43,7 @@ defaultRetryConfST = { rcstNumRetries = 0 , rcstIntervalSec = seconds 10 , rcstTimeoutSec = seconds 60 - , rcstTolerance = 21600 -- 6 hours + , rcstTolerance = hours 6 } data ScheduleType = Cron CronSchedule | AdHoc (Maybe UTCTime) @@ -123,4 +123,4 @@ $(deriveJSON (aesonDrop 2 snakeCase) ''ScheduledEventId) -- The Z may be replaced with a time zone offset of the form +0000 or -08:00, -- where the first two digits are hours, the : is optional and the second two digits (also optional) are minutes. formatTime' :: UTCTime -> T.Text -formatTime'= T.pack . formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S Z" +formatTime'= T.pack . iso8601Show diff --git a/server/src-lib/Hasura/Server/Logging.hs b/server/src-lib/Hasura/Server/Logging.hs index adab6bf5e1c48..9b9eb942b12c9 100644 --- a/server/src-lib/Hasura/Server/Logging.hs +++ b/server/src-lib/Hasura/Server/Logging.hs @@ -191,9 +191,9 @@ data OperationLog { olRequestId :: !RequestId , olUserVars :: !(Maybe UserVars) , olResponseSize :: !(Maybe Int64) - , olRequestReadTime :: !(Maybe Seconds) + , olRequestReadTime :: !(Maybe (Seconds 'Absolute)) -- ^ Request IO wait time, i.e. time spent reading the full request from the socket. - , olQueryExecutionTime :: !(Maybe Seconds) + , olQueryExecutionTime :: !(Maybe (Seconds 'Absolute)) -- ^ Service time, not including request IO wait time. , olQuery :: !(Maybe Value) , olRawQuery :: !(Maybe Text) diff --git a/server/src-lib/Hasura/Server/Telemetry/Counters.hs b/server/src-lib/Hasura/Server/Telemetry/Counters.hs index 6d8bf1ba60a20..042064c210a6e 100644 --- a/server/src-lib/Hasura/Server/Telemetry/Counters.hs +++ b/server/src-lib/Hasura/Server/Telemetry/Counters.hs @@ -49,9 +49,9 @@ instance Hashable RequestDimensions -- | Accumulated time metrics. data RequestTimings = RequestTimings { - telemTimeIO :: !Seconds + telemTimeIO :: !(Seconds 'Absolute) -- ^ Time spent waiting on PG/remote http calls - , telemTimeTot :: !Seconds + , telemTimeTot :: !(Seconds 'Absolute) -- ^ Total service time for request (including 'telemTimeIO') } @@ -62,8 +62,8 @@ instance Semigroup RequestTimings where -- | 'RequestTimings' along with the count data RequestTimingsCount = RequestTimingsCount { - telemTimeIO :: !Seconds - , telemTimeTot :: !Seconds + telemTimeIO :: !(Seconds 'Absolute) + , telemTimeTot :: !(Seconds 'Absolute) , telemCount :: !Word -- ^ The number of requests that have contributed to the accumulated timings above. -- So e.g. @telemTimeTot / count@ would give the mean service time. @@ -125,7 +125,7 @@ instance A.FromJSON Transport -- | The timings and counts here were from requests with total time longer than -- 'bucketGreaterThan' (but less than any larger bucket cutoff times). -newtype RunningTimeBucket = RunningTimeBucket { bucketGreaterThan :: Seconds } +newtype RunningTimeBucket = RunningTimeBucket { bucketGreaterThan :: Seconds 'Absolute } deriving (Fractional, Num, Ord, Eq, Show, Generic, A.ToJSON, A.FromJSON, Hashable) From 4fe45bcc3c004965d2318d839e3a2b3a48d5c688 Mon Sep 17 00:00:00 2001 From: Alexis King Date: Tue, 3 Mar 2020 17:30:30 -0600 Subject: [PATCH 066/195] Constrain setup.Cabal to be compatible with cabal-install 2.4 --- server/cabal.project | 5 +++++ server/cabal.project.ci | 2 ++ server/cabal.project.freeze | 3 +-- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/server/cabal.project b/server/cabal.project index a74125f55a132..5d58386651a5f 100644 --- a/server/cabal.project +++ b/server/cabal.project @@ -14,6 +14,11 @@ -- See: https://www.haskell.org/cabal/users-guide/nix-local-build.html#configuring-builds-with-cabal-project packages: . +constraints: + -- We build with cabal-install 2.4 in CI, so ensure we don’t end up with a + -- freeze file that forces an incompatible version for Setup.hs scripts. + setup.Cabal <2.6 + package * optimization: 2 diff --git a/server/cabal.project.ci b/server/cabal.project.ci index d65b970281751..5bc6968f6d998 100644 --- a/server/cabal.project.ci +++ b/server/cabal.project.ci @@ -1,5 +1,7 @@ -- The project configuration used when building in CI. +reject-unconstrained-dependencies: all + package graphql-engine ghc-options: -j3 -Werror tests: true diff --git a/server/cabal.project.freeze b/server/cabal.project.freeze index 4e05cf0f6e587..f109add264531 100644 --- a/server/cabal.project.freeze +++ b/server/cabal.project.freeze @@ -1,4 +1,4 @@ -constraints: any.Cabal ==3.0.0.0, +constraints: any.Cabal ==2.4.1.0, Cabal -bundled-binary-generic, any.Glob ==0.10.0, any.HUnit ==1.6.0.0, @@ -138,7 +138,6 @@ constraints: any.Cabal ==3.0.0.0, any.generic-arbitrary ==0.1.0, any.ghc-boot-th ==8.6.5, any.ghc-prim ==0.5.3, - graphql-engine +developer, any.happy ==1.19.12, happy +small_base, any.hashable ==1.3.0.0, From c0774b0b64d4a2c1b8aa538226ebf92b8d239164 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Wed, 4 Mar 2020 10:13:57 +0530 Subject: [PATCH 067/195] clean metadata export for ST * don't export the default meta data values of the scheduled trigger - omit the retry_conf and header value if they have default values * make some changes in helper fns of exporting metadata of ST --- server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs b/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs index e8ca3a48e9f79..f4fd8ee0027af 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs @@ -425,7 +425,17 @@ replaceMetadataToOrdJSON ( ReplaceMetadata ] <> catMaybes [maybeCommentToMaybeOrdPair comment] scheduledTriggerQToOrdJSON :: CreateScheduledTrigger -> AO.Value - scheduledTriggerQToOrdJSON = AO.toOrdered + scheduledTriggerQToOrdJSON (CreateScheduledTrigger name webhook schedule payload retryConf headers) = + AO.object $ [ ("name", AO.toOrdered name) + , ("webhook", AO.toOrdered webhook) + , ("schedule", AO.toOrdered schedule) + ] <> catMaybes [ maybeAnyToMaybeOrdPair "payload" AO.toOrdered payload + , maybeAnyToMaybeOrdPair "retry_conf" AO.toOrdered (maybeRetryConfiguration retryConf) + , maybeAnyToMaybeOrdPair "headers" AO.toOrdered (maybeHeader headers)] + where + maybeRetryConfiguration rc = if rc == defaultRetryConfST then Nothing else Just rc + + maybeHeader headerConf = if headerConf == [] then Nothing else Just headerConf customTypesToOrdJSON :: CustomTypes -> AO.Value customTypesToOrdJSON (CustomTypes inpObjs objs scalars enums) = From ac54a45a06bfb758bf24d5639af41503c3f01652 Mon Sep 17 00:00:00 2001 From: Tirumarai Selvan A Date: Wed, 4 Mar 2020 14:06:21 +0530 Subject: [PATCH 068/195] [skip ci] wip: unit tests for `Duration 'Calendar` --- server/src-lib/Data/Time/Clock/Units.hs | 1 + server/src-test/Data/TimeSpec.hs | 66 +++++++++++++++++-------- 2 files changed, 46 insertions(+), 21 deletions(-) diff --git a/server/src-lib/Data/Time/Clock/Units.hs b/server/src-lib/Data/Time/Clock/Units.hs index f01adf416c8a9..480e21160eb3a 100644 --- a/server/src-lib/Data/Time/Clock/Units.hs +++ b/server/src-lib/Data/Time/Clock/Units.hs @@ -51,6 +51,7 @@ module Data.Time.Clock.Units , Microseconds(..) , Nanoseconds(..) -- * Converting between units + , Duration , Duration'(..) , DurationType(..) , fromUnits diff --git a/server/src-test/Data/TimeSpec.hs b/server/src-test/Data/TimeSpec.hs index 19acbaaaf5201..4f92c2c9f7a2f 100644 --- a/server/src-test/Data/TimeSpec.hs +++ b/server/src-test/Data/TimeSpec.hs @@ -1,39 +1,63 @@ module Data.TimeSpec (spec) where -- | Time-related properties we care about. -import Prelude -import Data.Time.Clock.Units -import Data.Time -import Data.Aeson -import Test.Hspec +import Data.Aeson +import Data.Time +import Data.Time.Clock.Units +import Prelude +import Test.Hspec spec :: Spec spec = do - timeUnitsSpec + timeAbsoluteUnitsSpec + timeCalendarUnitsSpec diffTimeSpec -timeUnitsSpec :: Spec -timeUnitsSpec = - describe "time units" $ do +timeAbsoluteUnitsSpec :: Spec +timeAbsoluteUnitsSpec = + describe "time absolute units" $ do it "converts correctly" $ do - seconds 123 `shouldBe` 123 - milliseconds 123 `shouldBe` 0.123 - microseconds 123 `shouldBe` 0.000123 - nanoseconds 123 `shouldBe` 0.000000123 + (seconds 123 :: Duration 'Absolute) `shouldBe` 123 + (milliseconds 123 :: Duration 'Absolute) `shouldBe` 0.123 + (microseconds 123 :: Duration 'Absolute) `shouldBe` 0.000123 + (nanoseconds 123 :: Duration 'Absolute) `shouldBe` 0.000000123 it "has a correct Read instance" $ do - seconds (read "123") `shouldBe` 123 - milliseconds (read "123") `shouldBe` 0.123 - microseconds (read "123") `shouldBe` 0.000123 - nanoseconds (read "123") `shouldBe` 0.000000123 + (seconds (read "123") :: Duration 'Absolute) `shouldBe` 123 + (milliseconds (read "123") :: Duration 'Absolute) `shouldBe` 0.123 + (microseconds (read "123") :: Duration 'Absolute) `shouldBe` 0.000123 + (nanoseconds (read "123") :: Duration 'Absolute) `shouldBe` 0.000000123 it "JSON serializes as proper units" $ do - toJSON (1 :: Seconds) `shouldBe` Number 1 - decode "1.0" `shouldBe` Just (1 :: Seconds) + toJSON (1 :: Seconds 'Absolute) `shouldBe` Number 1 + decode "1.0" `shouldBe` Just (1 :: Seconds 'Absolute) it "converts with fromUnits" $ do - fromUnits (2 :: Minutes) `shouldBe` (120 :: NominalDiffTime) - fromUnits (60 :: Seconds) `shouldBe` (1 :: Minutes) + fromUnits (2 :: Minutes 'Absolute) `shouldBe` (120 :: NominalDiffTime) + fromUnits (60 :: Seconds 'Absolute) `shouldBe` (1 :: Minutes 'Calendar) + +timeCalendarUnitsSpec :: Spec +timeCalendarUnitsSpec = + describe "time calendar units" $ do + it "converts correctly" $ do + (seconds 123 :: Duration 'Calendar) `shouldBe` 123 + (milliseconds 123 :: Duration 'Calendar) `shouldBe` 0.123 + (microseconds 123 :: Duration 'Calendar) `shouldBe` 0.000123 + (nanoseconds 123 :: Duration 'Calendar) `shouldBe` 0.000000123 + + it "has a correct Read instance" $ do + (seconds (read "123") :: Duration 'Calendar) `shouldBe` 123 + (milliseconds (read "123") :: Duration 'Calendar) `shouldBe` 0.123 + (microseconds (read "123") :: Duration 'Calendar) `shouldBe` 0.000123 + (nanoseconds (read "123") :: Duration 'Calendar) `shouldBe` 0.000000123 + + it "JSON serializes as proper units" $ do + toJSON (1 :: Seconds 'Calendar) `shouldBe` Number 1 + decode "1.0" `shouldBe` Just (1 :: Seconds 'Calendar) + + it "converts with fromUnits" $ do + fromUnits (2 :: Minutes 'Calendar) `shouldBe` (120 :: DiffTime) + fromUnits (60 :: Seconds 'Calendar) `shouldBe` (1 :: Minutes 'Absolute) diffTimeSpec :: Spec diffTimeSpec = From 2885b9ab880ac7679520adcdfed78baecdf7796b Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Wed, 4 Mar 2020 18:04:48 +0530 Subject: [PATCH 069/195] convert PicoSeconds to Seconds in the toFixedPico function --- server/src-lib/Data/Time/Clock/Units.hs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/server/src-lib/Data/Time/Clock/Units.hs b/server/src-lib/Data/Time/Clock/Units.hs index 480e21160eb3a..650dee6d1c0f6 100644 --- a/server/src-lib/Data/Time/Clock/Units.hs +++ b/server/src-lib/Data/Time/Clock/Units.hs @@ -377,6 +377,12 @@ instance Hashable (TimeUnit a 'Absolute) where -- instances for Calendar +-- NOTE: secondsToNominalDiffTime :: Pico -> NominalDiffTime +-- http://hackage.haskell.org/package/time-1.9.3/docs/Data-Time-Clock.html +-- If a Pico value is supplied to the function then the output is also in Pico +-- and when the Pico value is converted into seconds(divide by 10^12) +-- then the output is in Seconds +-- So, the function `toFixedPico` converts Pico into Seconds instance (KnownNat picosPerUnit) => Num (TimeUnit picosPerUnit 'Calendar) where TimeUnit a + TimeUnit b = TimeUnit $ a + b TimeUnit a - TimeUnit b = TimeUnit $ a - b @@ -428,5 +434,7 @@ instance Duration' NominalDiffTime where fromUnits :: (Duration' x, Duration' y)=> x -> y fromUnits = fromDiffTime . toDiffTime +-- | The input to this function is the number of picos in Integer +-- and then converting it into seconds. toFixedPico :: Integer -> Pico -toFixedPico = fromIntegral +toFixedPico = (/1000000000000) . fromIntegral From 92250771793a1f558d94725db17f0afc13fbf333 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Thu, 12 Mar 2020 14:17:11 +0530 Subject: [PATCH 070/195] modified the logic of consuming scheduled events - made it similar to the logic being used in the event-triggers --- server/src-lib/Hasura/App.hs | 4 +- .../src-lib/Hasura/Eventing/EventTrigger.hs | 11 -- .../Hasura/Eventing/ScheduledTrigger.hs | 108 ++++++++++++++---- 3 files changed, 86 insertions(+), 37 deletions(-) diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 2362d7b4db3a3..2917350fc1a43 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -261,8 +261,10 @@ runHGEServer ServeOptions{..} InitCtx{..} initTime = do _asyncActionsThread <- C.forkImmortal "asyncActionsProcessor" logger $ liftIO $ asyncActionsProcessor (_scrCache cacheRef) _icPgPool _icHttpManager + maxSTEvThreads <- liftIO $ getFromEnv defaultMaxScheduledEventThreads "HASURA_GRAPHQL_SCHEDULED_EVENTS_HTTP_POOL_SIZE" + scEventEngineCtx <- liftIO $ atomically $ initScheduledEventEngineCtx maxSTEvThreads void $ liftIO $ C.forkIO $ runScheduledEventsGenerator logger _icPgPool (getSCFromRef cacheRef) - void $ liftIO $ C.forkIO $ processScheduledQueue logger logEnvHeaders _icHttpManager _icPgPool (getSCFromRef cacheRef) + void $ liftIO $ C.forkImmortal "processScheduledQueue" logger $ processScheduledQueue logger logEnvHeaders _icHttpManager _icPgPool (getSCFromRef cacheRef) scEventEngineCtx -- start a background thread to check for updates _updateThread <- C.forkImmortal "checkForUpdates" logger $ liftIO $ diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index bcb99950bf165..2353e910c4af5 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -298,17 +298,6 @@ logQErr err = do logger :: L.Logger L.Hasura <- asks getter L.unLogger logger $ EventInternalErr err --- TODO: Implement this function --- logHTTPErr --- :: ( MonadReader r m --- , Has (L.Logger L.Hasura) r --- , MonadIO m --- ) --- => HTTPErr a -> m () --- logHTTPErr err = do --- logger :: L.Logger L.Hasura <- asks getter --- L.unLogger logger $ err - getEventTriggerInfoFromEvent :: SchemaCache -> Event -> Maybe EventTriggerInfo getEventTriggerInfoFromEvent sc e = let table = eTable e tableInfo = M.lookup table $ scTables sc diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 6e3647e69a847..4c53a54b2c4e6 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -21,13 +21,21 @@ module Hasura.Eventing.ScheduledTrigger , ScheduledEventSeed(..) , generateScheduleTimes , insertScheduledEvents + , defaultMaxScheduledEventThreads + , initScheduledEventEngineCtx ) where +import Control.Concurrent.Extended (sleep) +import Control.Concurrent.Async (wait, withAsync, async, link) +import Control.Concurrent.STM.TVar +import Control.Exception.Lifted (finally, mask_) +import Control.Monad.STM import Control.Arrow.Extended (dup) import Control.Concurrent (threadDelay) import Data.Has import Data.Int (Int64) import Data.List (unfoldr) +import Data.String import Data.Time.Clock import Hasura.Eventing.HTTP import Hasura.Prelude @@ -109,6 +117,19 @@ data ScheduledEventFull $(J.deriveToJSON (J.aesonDrop 3 J.snakeCase) {J.omitNothingFields = True} ''ScheduledEventFull) +defaultMaxScheduledEventThreads :: Int +defaultMaxScheduledEventThreads = 10 + +data ScheduledEventEngineCtx + = ScheduledEventEngineCtx + { _seeCtxEventThreadsCapacity :: TVar Int + } + +initScheduledEventEngineCtx :: Int -> STM ScheduledEventEngineCtx +initScheduledEventEngineCtx maxT = do + _seeCtxEventThreadsCapacity <- newTVar maxT + return $ ScheduledEventEngineCtx{..} + runScheduledEventsGenerator :: L.Logger L.Hasura -> Q.PGPool @@ -217,30 +238,65 @@ processScheduledQueue -> HTTP.Manager -> Q.PGPool -> IO SchemaCache - -> IO () -processScheduledQueue logger logEnv httpMgr pgpool getSC = - forever $ do - scheduledTriggersInfo <- scScheduledTriggers <$> getSC - scheduledEventsE <- - runExceptT $ - Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) getScheduledEvents - case scheduledEventsE of - Right partialEvents -> - for_ partialEvents $ \(ScheduledEventPartial id' name st payload tries)-> do - case Map.lookup name scheduledTriggersInfo of - Nothing -> logInternalError $ - err500 Unexpected "could not find scheduled trigger in cache" - Just stInfo@ScheduledTriggerInfo{..} -> do - let webhook = wciCachedValue stiWebhookInfo - payload' = fromMaybe (fromMaybe J.Null stiPayload) payload -- override if neccessary - scheduledEvent = ScheduledEventFull id' name st tries webhook payload' stiRetryConf - finally <- runExceptT $ - runReaderT (processScheduledEvent logEnv pgpool stInfo scheduledEvent) (logger, httpMgr) - either logInternalError pure finally - Left err -> logInternalError err - threadDelay oneMinute + -> ScheduledEventEngineCtx + -> IO void +processScheduledQueue logger logenv httpMgr pgpool getSC ScheduledEventEngineCtx{..} = do + events0 <- popEventsBatch + go events0 0 False where + fetchBatchSize = 100 logInternalError err = L.unLogger logger $ ScheduledTriggerInternalErr err + popEventsBatch = do + let run = runExceptT . Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) + run (getScheduledEvents fetchBatchSize) >>= \case + Left err -> do + logInternalError err + return [] + Right events -> return events + + go :: [ScheduledEventPartial] -> Int -> Bool -> IO void + go events !fullFetchCount !alreadyWarned = do + when (null events) $ sleep (fromIntegral oneMinute) + scheduledTriggersInfo <- scScheduledTriggers <$> getSC + eventsNext <- withAsync popEventsBatch $ \eventsNextA -> do + forM_ events $ \(ScheduledEventPartial id' name st payload tries) -> do + case Map.lookup name scheduledTriggersInfo of + Nothing -> logInternalError $ err500 Unexpected "could not find scheduled trigger in cache" + Just stInfo@ScheduledTriggerInfo{..} -> + mask_ $ do + atomically $ do + capacity <- readTVar _seeCtxEventThreadsCapacity + check $ capacity > 0 + writeTVar _seeCtxEventThreadsCapacity (capacity - 1) + let restoreCapacity = liftIO $ atomically $ modifyTVar' _seeCtxEventThreadsCapacity (+ 1) + webhook = wciCachedValue stiWebhookInfo + payload' = fromMaybe (fromMaybe J.Null stiPayload) payload + scheduledEvent = ScheduledEventFull id' name st tries webhook payload' stiRetryConf + t <- async $ runExceptT $ flip runReaderT (logger, httpMgr) $ + (processScheduledEvent logenv pgpool stInfo scheduledEvent) `finally` restoreCapacity + link t + + wait eventsNextA + + let lenEvents = length events + if | lenEvents == fetchBatchSize -> do + -- If we've seen N fetches in a row from the DB come back full (i.e. only limited + -- by our LIMIT clause), then we say we're clearly falling behind: + let clearlyBehind = fullFetchCount >= 3 + unless alreadyWarned $ + when clearlyBehind $ + L.unLogger logger $ L.UnstructuredLog L.LevelWarn $ fromString $ + "Events processor may not be keeping up with events generated in postgres, " <> + "or we're working on a backlog of events. Consider increasing " <> + "HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE" + go eventsNext (fullFetchCount+1) (alreadyWarned || clearlyBehind) + + | otherwise -> do + when (lenEvents /= fetchBatchSize && alreadyWarned) $ + -- emit as warning in case users are only logging warning severity and saw above + L.unLogger logger $ L.UnstructuredLog L.LevelWarn $ fromString $ + "It looks like the events processor is keeping up again." + go eventsNext 0 False processScheduledEvent :: ( MonadReader r m @@ -395,8 +451,8 @@ insertInvocation invo = do WHERE id = $1 |] (Identity $ iEventId invo) True -getScheduledEvents :: Q.TxE QErr [ScheduledEventPartial] -getScheduledEvents = do +getScheduledEvents :: Int -> Q.TxE QErr [ScheduledEventPartial] +getScheduledEvents limitI = do map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_events SET locked = 't' @@ -412,11 +468,13 @@ getScheduledEvents = do ) and t.dead = 'f' ) + LIMIT $1 FOR UPDATE SKIP LOCKED ) RETURNING id, name, scheduled_time, additional_payload, tries - |] () True + |] (Identity limit) True where uncurryEvent (i, n, st, p, tries) = ScheduledEventPartial i n st (Q.getAltJ <$> p) tries + limit = fromIntegral limitI :: Word64 liftExceptTIO :: (MonadError e m, MonadIO m) => ExceptT e IO a -> m a liftExceptTIO m = liftEither =<< liftIO (runExceptT m) From 73db7d6329e78ad255e6638f9a4686bf84e6216a Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Thu, 12 Mar 2020 15:27:50 +0530 Subject: [PATCH 071/195] fix the warnings in the EventTrigger and ScheduledTrigger files --- server/src-lib/Hasura/App.hs | 3 ++- .../src-lib/Hasura/Eventing/EventTrigger.hs | 22 +------------------ 2 files changed, 3 insertions(+), 22 deletions(-) diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 2917350fc1a43..83a162116167a 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -263,7 +263,8 @@ runHGEServer ServeOptions{..} InitCtx{..} initTime = do maxSTEvThreads <- liftIO $ getFromEnv defaultMaxScheduledEventThreads "HASURA_GRAPHQL_SCHEDULED_EVENTS_HTTP_POOL_SIZE" scEventEngineCtx <- liftIO $ atomically $ initScheduledEventEngineCtx maxSTEvThreads - void $ liftIO $ C.forkIO $ runScheduledEventsGenerator logger _icPgPool (getSCFromRef cacheRef) + + void $ liftIO $ runScheduledEventsGenerator logger _icPgPool (getSCFromRef cacheRef) void $ liftIO $ C.forkImmortal "processScheduledQueue" logger $ processScheduledQueue logger logEnvHeaders _icHttpManager _icPgPool (getSCFromRef cacheRef) scEventEngineCtx -- start a background thread to check for updates diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 2353e910c4af5..56a37063ca685 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -12,7 +12,7 @@ module Hasura.Eventing.EventTrigger import Control.Concurrent.Extended (sleep) import Control.Concurrent.Async (wait, withAsync, async, link) import Control.Concurrent.STM.TVar -import Control.Exception.Lifted (finally, mask_, try) +import Control.Exception.Lifted (finally, mask_) import Control.Monad.STM import Data.Aeson import Data.Aeson.Casing @@ -30,19 +30,13 @@ import Hasura.RQL.Types import Hasura.Server.Version (HasVersion) import Hasura.SQL.Types -import qualified Data.ByteString as BS -import qualified Data.CaseInsensitive as CI import qualified Data.HashMap.Strict as M import qualified Data.TByteString as TBS import qualified Data.Text as T -import qualified Data.Text.Encoding as T -import qualified Data.Text.Encoding as TE -import qualified Data.Text.Encoding.Error as TE import qualified Data.Time.Clock as Time import qualified Database.PG.Query as Q import qualified Hasura.Logging as L import qualified Network.HTTP.Client as HTTP -import qualified Network.HTTP.Types as HTTP invocationVersion :: Version invocationVersion = "2" @@ -81,9 +75,6 @@ defaultMaxEventThreads = 100 defaultFetchIntervalMilliSec :: (Milliseconds 'Absolute) defaultFetchIntervalMilliSec = 1000 -retryAfterHeader :: CI.CI T.Text -retryAfterHeader = "Retry-After" - initEventEngineCtx :: Int -> DiffTime -> STM EventEngineCtx initEventEngineCtx maxT _eeCtxFetchInterval = do _eeCtxEventThreadsCapacity <- newTVar maxT @@ -266,13 +257,6 @@ retryOrSetError e retryConf err = do getRetryAfterHeaderFromError (HStatus resp) = getRetryAfterHeaderFromResp resp getRetryAfterHeaderFromError _ = Nothing - getRetryAfterHeaderFromResp resp - = let mHeader = find (\(HeaderConf name _) - -> CI.mk name == retryAfterHeader) (hrsHeaders resp) - in case mHeader of - Just (HeaderConf _ (HVValue value)) -> Just value - _ -> Nothing - parseRetryHeader = mfilter (> 0) . readMaybe . T.unpack mkInvo @@ -289,10 +273,6 @@ mkInvo ep status reqHeaders respBody respHeaders (mkWebhookReq (toJSON ep) reqHeaders invocationVersion) resp -mkMaybe :: [a] -> Maybe [a] -mkMaybe [] = Nothing -mkMaybe x = Just x - logQErr :: ( MonadReader r m, Has (L.Logger L.Hasura) r, MonadIO m) => QErr -> m () logQErr err = do logger :: L.Logger L.Hasura <- asks getter From e80cc55a6b6a310cf0c5f2d6021fce14cd901238 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Thu, 12 Mar 2020 15:50:21 +0530 Subject: [PATCH 072/195] use "scheduled events" in the logs instead of "events" --- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 4c53a54b2c4e6..d8c671740ad05 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -286,16 +286,16 @@ processScheduledQueue logger logenv httpMgr pgpool getSC ScheduledEventEngineCtx unless alreadyWarned $ when clearlyBehind $ L.unLogger logger $ L.UnstructuredLog L.LevelWarn $ fromString $ - "Events processor may not be keeping up with events generated in postgres, " <> - "or we're working on a backlog of events. Consider increasing " <> - "HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE" + "Scheduled Events processor may not be keeping up with events generated in postgres, " <> + "or we're working on a backlog of scheduled events. Consider increasing " <> + "HASURA_GRAPHQL_SCHEDULED_EVENTS_HTTP_POOL_SIZE" go eventsNext (fullFetchCount+1) (alreadyWarned || clearlyBehind) | otherwise -> do when (lenEvents /= fetchBatchSize && alreadyWarned) $ -- emit as warning in case users are only logging warning severity and saw above L.unLogger logger $ L.UnstructuredLog L.LevelWarn $ fromString $ - "It looks like the events processor is keeping up again." + "It looks like the scheduled events processor is keeping up again." go eventsNext 0 False processScheduledEvent :: From 070bd19aeacf0a2c4919490b795eeb2d9c05a38f Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Thu, 12 Mar 2020 15:50:56 +0530 Subject: [PATCH 073/195] add documentation for the processScheduledQueue function --- server/src-lib/Hasura/App.hs | 2 +- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 83a162116167a..33994b6a7a0f7 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -264,7 +264,7 @@ runHGEServer ServeOptions{..} InitCtx{..} initTime = do maxSTEvThreads <- liftIO $ getFromEnv defaultMaxScheduledEventThreads "HASURA_GRAPHQL_SCHEDULED_EVENTS_HTTP_POOL_SIZE" scEventEngineCtx <- liftIO $ atomically $ initScheduledEventEngineCtx maxSTEvThreads - void $ liftIO $ runScheduledEventsGenerator logger _icPgPool (getSCFromRef cacheRef) + void $ liftIO $ C.forkIO $ runScheduledEventsGenerator logger _icPgPool (getSCFromRef cacheRef) void $ liftIO $ C.forkImmortal "processScheduledQueue" logger $ processScheduledQueue logger logEnvHeaders _icHttpManager _icPgPool (getSCFromRef cacheRef) scEventEngineCtx -- start a background thread to check for updates diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index d8c671740ad05..a06e370ab070c 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -231,6 +231,11 @@ generateScheduleTimes from n cron = take n $ go from where go = unfoldr (fmap dup . nextMatch cron) +-- | processScheduledQueue works like processEventQueue(defined in Eventing/EventTrigger.hs) +-- | Here, the sleep time is hard-coded to 1 minute unlike in processEventQueue where it can +-- | be a value that can be set by the user. +-- | The number of threads to be spawned can be set through the `HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE`, +-- | by default it's set to 10 processScheduledQueue :: HasVersion => L.Logger L.Hasura From 3c85ea1e68f0097f2b59947d442f384b4ddb81ba Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Thu, 12 Mar 2020 18:24:39 +0530 Subject: [PATCH 074/195] log the HTTP error in processEventQueue --- server/src-lib/Hasura/Eventing/EventTrigger.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 56a37063ca685..7aacfb854b4e0 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -179,6 +179,7 @@ processEventQueue logger logenv httpMgr pool getSchemaCache EventEngineCtx{..} = headers = addDefaultHeaders etHeaders ep = createEventPayload retryConf e res <- runExceptT $ tryWebhook headers responseTimeout (toJSON ep) webhook + logHTTPForET res Nothing let decodedHeaders = map (decodeHeader logenv headerInfos) headers either (processError pool e retryConf decodedHeaders ep) @@ -216,7 +217,6 @@ processError => Q.PGPool -> Event -> RetryConf -> [HeaderConf] -> EventPayload -> HTTPErr a -> m (Either QErr ()) processError pool e retryConf decodedHeaders ep err = do --- logHTTPErr err === Need to implement this function let invocation = case err of HClient excp -> do let errMsg = TBS.fromLBS $ encode $ show excp From 28320ec2d174839d4aa47268558c97f758042dc8 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Fri, 13 Mar 2020 13:13:15 +0530 Subject: [PATCH 075/195] revert the Scheduled Triggers logic to as it was earlier --- server/src-lib/Hasura/App.hs | 5 +- .../Hasura/Eventing/ScheduledTrigger.hs | 111 ++++-------------- 2 files changed, 25 insertions(+), 91 deletions(-) diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 33994b6a7a0f7..2362d7b4db3a3 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -261,11 +261,8 @@ runHGEServer ServeOptions{..} InitCtx{..} initTime = do _asyncActionsThread <- C.forkImmortal "asyncActionsProcessor" logger $ liftIO $ asyncActionsProcessor (_scrCache cacheRef) _icPgPool _icHttpManager - maxSTEvThreads <- liftIO $ getFromEnv defaultMaxScheduledEventThreads "HASURA_GRAPHQL_SCHEDULED_EVENTS_HTTP_POOL_SIZE" - scEventEngineCtx <- liftIO $ atomically $ initScheduledEventEngineCtx maxSTEvThreads - void $ liftIO $ C.forkIO $ runScheduledEventsGenerator logger _icPgPool (getSCFromRef cacheRef) - void $ liftIO $ C.forkImmortal "processScheduledQueue" logger $ processScheduledQueue logger logEnvHeaders _icHttpManager _icPgPool (getSCFromRef cacheRef) scEventEngineCtx + void $ liftIO $ C.forkIO $ processScheduledQueue logger logEnvHeaders _icHttpManager _icPgPool (getSCFromRef cacheRef) -- start a background thread to check for updates _updateThread <- C.forkImmortal "checkForUpdates" logger $ liftIO $ diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index a06e370ab070c..265cf176c38d9 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -21,21 +21,13 @@ module Hasura.Eventing.ScheduledTrigger , ScheduledEventSeed(..) , generateScheduleTimes , insertScheduledEvents - , defaultMaxScheduledEventThreads - , initScheduledEventEngineCtx ) where -import Control.Concurrent.Extended (sleep) -import Control.Concurrent.Async (wait, withAsync, async, link) -import Control.Concurrent.STM.TVar -import Control.Exception.Lifted (finally, mask_) -import Control.Monad.STM import Control.Arrow.Extended (dup) import Control.Concurrent (threadDelay) import Data.Has import Data.Int (Int64) import Data.List (unfoldr) -import Data.String import Data.Time.Clock import Hasura.Eventing.HTTP import Hasura.Prelude @@ -117,19 +109,6 @@ data ScheduledEventFull $(J.deriveToJSON (J.aesonDrop 3 J.snakeCase) {J.omitNothingFields = True} ''ScheduledEventFull) -defaultMaxScheduledEventThreads :: Int -defaultMaxScheduledEventThreads = 10 - -data ScheduledEventEngineCtx - = ScheduledEventEngineCtx - { _seeCtxEventThreadsCapacity :: TVar Int - } - -initScheduledEventEngineCtx :: Int -> STM ScheduledEventEngineCtx -initScheduledEventEngineCtx maxT = do - _seeCtxEventThreadsCapacity <- newTVar maxT - return $ ScheduledEventEngineCtx{..} - runScheduledEventsGenerator :: L.Logger L.Hasura -> Q.PGPool @@ -231,11 +210,6 @@ generateScheduleTimes from n cron = take n $ go from where go = unfoldr (fmap dup . nextMatch cron) --- | processScheduledQueue works like processEventQueue(defined in Eventing/EventTrigger.hs) --- | Here, the sleep time is hard-coded to 1 minute unlike in processEventQueue where it can --- | be a value that can be set by the user. --- | The number of threads to be spawned can be set through the `HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE`, --- | by default it's set to 10 processScheduledQueue :: HasVersion => L.Logger L.Hasura @@ -243,65 +217,30 @@ processScheduledQueue -> HTTP.Manager -> Q.PGPool -> IO SchemaCache - -> ScheduledEventEngineCtx -> IO void -processScheduledQueue logger logenv httpMgr pgpool getSC ScheduledEventEngineCtx{..} = do - events0 <- popEventsBatch - go events0 0 False +processScheduledQueue logger logEnv httpMgr pgpool getSC = + forever $ do + scheduledTriggersInfo <- scScheduledTriggers <$> getSC + scheduledEventsE <- + runExceptT $ + Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) getScheduledEvents + case scheduledEventsE of + Right partialEvents -> + for_ partialEvents $ \(ScheduledEventPartial id' name st payload tries)-> do + case Map.lookup name scheduledTriggersInfo of + Nothing -> logInternalError $ + err500 Unexpected "could not find scheduled trigger in cache" + Just stInfo@ScheduledTriggerInfo{..} -> do + let webhook = wciCachedValue stiWebhookInfo + payload' = fromMaybe (fromMaybe J.Null stiPayload) payload -- override if neccessary + scheduledEvent = ScheduledEventFull id' name st tries webhook payload' stiRetryConf + finally <- runExceptT $ + runReaderT (processScheduledEvent logEnv pgpool stInfo scheduledEvent) (logger, httpMgr) + either logInternalError pure finally + Left err -> logInternalError err + threadDelay oneMinute where - fetchBatchSize = 100 logInternalError err = L.unLogger logger $ ScheduledTriggerInternalErr err - popEventsBatch = do - let run = runExceptT . Q.runTx pgpool (Q.ReadCommitted, Just Q.ReadWrite) - run (getScheduledEvents fetchBatchSize) >>= \case - Left err -> do - logInternalError err - return [] - Right events -> return events - - go :: [ScheduledEventPartial] -> Int -> Bool -> IO void - go events !fullFetchCount !alreadyWarned = do - when (null events) $ sleep (fromIntegral oneMinute) - scheduledTriggersInfo <- scScheduledTriggers <$> getSC - eventsNext <- withAsync popEventsBatch $ \eventsNextA -> do - forM_ events $ \(ScheduledEventPartial id' name st payload tries) -> do - case Map.lookup name scheduledTriggersInfo of - Nothing -> logInternalError $ err500 Unexpected "could not find scheduled trigger in cache" - Just stInfo@ScheduledTriggerInfo{..} -> - mask_ $ do - atomically $ do - capacity <- readTVar _seeCtxEventThreadsCapacity - check $ capacity > 0 - writeTVar _seeCtxEventThreadsCapacity (capacity - 1) - let restoreCapacity = liftIO $ atomically $ modifyTVar' _seeCtxEventThreadsCapacity (+ 1) - webhook = wciCachedValue stiWebhookInfo - payload' = fromMaybe (fromMaybe J.Null stiPayload) payload - scheduledEvent = ScheduledEventFull id' name st tries webhook payload' stiRetryConf - t <- async $ runExceptT $ flip runReaderT (logger, httpMgr) $ - (processScheduledEvent logenv pgpool stInfo scheduledEvent) `finally` restoreCapacity - link t - - wait eventsNextA - - let lenEvents = length events - if | lenEvents == fetchBatchSize -> do - -- If we've seen N fetches in a row from the DB come back full (i.e. only limited - -- by our LIMIT clause), then we say we're clearly falling behind: - let clearlyBehind = fullFetchCount >= 3 - unless alreadyWarned $ - when clearlyBehind $ - L.unLogger logger $ L.UnstructuredLog L.LevelWarn $ fromString $ - "Scheduled Events processor may not be keeping up with events generated in postgres, " <> - "or we're working on a backlog of scheduled events. Consider increasing " <> - "HASURA_GRAPHQL_SCHEDULED_EVENTS_HTTP_POOL_SIZE" - go eventsNext (fullFetchCount+1) (alreadyWarned || clearlyBehind) - - | otherwise -> do - when (lenEvents /= fetchBatchSize && alreadyWarned) $ - -- emit as warning in case users are only logging warning severity and saw above - L.unLogger logger $ L.UnstructuredLog L.LevelWarn $ fromString $ - "It looks like the scheduled events processor is keeping up again." - go eventsNext 0 False processScheduledEvent :: ( MonadReader r m @@ -456,8 +395,8 @@ insertInvocation invo = do WHERE id = $1 |] (Identity $ iEventId invo) True -getScheduledEvents :: Int -> Q.TxE QErr [ScheduledEventPartial] -getScheduledEvents limitI = do +getScheduledEvents :: Q.TxE QErr [ScheduledEventPartial] +getScheduledEvents = do map uncurryEvent <$> Q.listQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.hdb_scheduled_events SET locked = 't' @@ -473,13 +412,11 @@ getScheduledEvents limitI = do ) and t.dead = 'f' ) - LIMIT $1 FOR UPDATE SKIP LOCKED ) RETURNING id, name, scheduled_time, additional_payload, tries - |] (Identity limit) True + |] () True where uncurryEvent (i, n, st, p, tries) = ScheduledEventPartial i n st (Q.getAltJ <$> p) tries - limit = fromIntegral limitI :: Word64 liftExceptTIO :: (MonadError e m, MonadIO m) => ExceptT e IO a -> m a liftExceptTIO m = liftEither =<< liftIO (runExceptT m) From 4c09639c18644438d4d23b1308c6f83b9fda2bce Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Fri, 13 Mar 2020 13:44:48 +0530 Subject: [PATCH 076/195] fork new threads using forkImmortal instead of forkIO for ST threads --- server/src-lib/Hasura/App.hs | 4 ++-- server/src-lib/Hasura/Eventing/ScheduledTrigger.hs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 2362d7b4db3a3..c6b656680c326 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -261,8 +261,8 @@ runHGEServer ServeOptions{..} InitCtx{..} initTime = do _asyncActionsThread <- C.forkImmortal "asyncActionsProcessor" logger $ liftIO $ asyncActionsProcessor (_scrCache cacheRef) _icPgPool _icHttpManager - void $ liftIO $ C.forkIO $ runScheduledEventsGenerator logger _icPgPool (getSCFromRef cacheRef) - void $ liftIO $ C.forkIO $ processScheduledQueue logger logEnvHeaders _icHttpManager _icPgPool (getSCFromRef cacheRef) + void $ liftIO $ C.forkImmortal "runScheduledEventsGenerator" logger $ runScheduledEventsGenerator logger _icPgPool (getSCFromRef cacheRef) + void $ liftIO $ C.forkImmortal "processScheduledQueue" logger $ processScheduledQueue logger logEnvHeaders _icHttpManager _icPgPool (getSCFromRef cacheRef) -- start a background thread to check for updates _updateThread <- C.forkImmortal "checkForUpdates" logger $ liftIO $ diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 265cf176c38d9..b87bfe606b7ce 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -113,7 +113,7 @@ runScheduledEventsGenerator :: L.Logger L.Hasura -> Q.PGPool -> IO SchemaCache - -> IO () + -> IO void runScheduledEventsGenerator logger pgpool getSC = do forever $ do sc <- getSC From ffd52cbaae2caf4b68bac8e276c85deb96cd04a1 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Fri, 13 Mar 2020 13:51:36 +0530 Subject: [PATCH 077/195] remove the commented functions from Eventing/HTTP.hs --- server/src-lib/Hasura/Eventing/HTTP.hs | 45 -------------------------- 1 file changed, 45 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index 0df096624a713..1fba0c2a5058d 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -279,24 +279,6 @@ runHTTP manager req = do res <- liftIO $ try $ HTTP.httpLbs req manager return $ either (Left . HClient) anyBodyParser res --- -- | Like 'HTTP.httpLbs' but we catch 'HTTP.HttpException' and return all known --- -- error-like conditions as 'HTTPErr'. --- runHTTP --- :: ( MonadReader r m --- , Has (Logger Hasura) r --- , Has HTTP.Manager r --- , MonadIO m --- ) --- => HTTP.Request -> Maybe ExtraContext -> m (Either (HTTPErr a) (HTTPResp a)) --- runHTTP req exLog = do --- logger :: Logger Hasura <- asks getter --- manager <- asks getter --- res <- liftIO $ try $ HTTP.httpLbs req manager --- -- case res of --- -- Left e -> unLogger logger $ HClient e --- -- Right resp -> unLogger logger $ HTTPRespExtra (mkHTTPResp resp) exLog --- return $ either (Left . HClient) anyBodyParser res - tryWebhook :: ( MonadReader r m , Has HTTP.Manager r @@ -324,33 +306,6 @@ tryWebhook headers timeout payload webhook = do eitherResp <- runHTTP manager req onLeft eitherResp throwError --- These run concurrently on their respective EventPayloads --- tryWebhook --- :: ( Has (L.Logger L.Hasura) r --- , Has HTTP.Manager r --- , MonadReader r m --- , MonadIO m --- , MonadError (HTTPErr a) m --- ) --- => [HTTP.Header] -> HTTP.ResponseTimeout -> EventPayload -> String --- -> m (HTTPResp a) --- tryWebhook headers responseTimeout ep webhook = do --- let context = ExtraContext (epCreatedAt ep) (epId ep) --- initReqE <- liftIO $ try $ HTTP.parseRequest webhook --- case initReqE of --- Left excp -> throwError $ HClient excp --- Right initReq -> do --- let req = initReq --- { HTTP.method = "POST" --- , HTTP.requestHeaders = headers --- , HTTP.requestBody = HTTP.RequestBodyLBS (encode ep) --- , HTTP.responseTimeout = responseTimeout --- } - --- eitherResp <- runHTTP req (Just context) --- onLeft eitherResp throwError - - mkResp :: Int -> TBS.TByteString -> [HeaderConf] -> Response mkResp status payload headers = let wr = WebhookResponse payload headers status From b4fd78d6491addf438241f5d5263da2219deed99 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Fri, 13 Mar 2020 14:05:58 +0530 Subject: [PATCH 078/195] refactor ExtraContext in Eventing back to ExtraLogContext --- server/src-lib/Hasura/Eventing/EventTrigger.hs | 3 ++- server/src-lib/Hasura/Eventing/HTTP.hs | 16 ++++++++-------- .../src-lib/Hasura/Eventing/ScheduledTrigger.hs | 4 ++-- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 7aacfb854b4e0..12f4ed2198528 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -178,8 +178,9 @@ processEventQueue logger logenv httpMgr pool getSchemaCache EventEngineCtx{..} = etHeaders = map encodeHeader headerInfos headers = addDefaultHeaders etHeaders ep = createEventPayload retryConf e + extraLogCtx = ExtraLogContext Nothing (epId ep) -- avoiding getting current time here to avoid another IO call with each event call res <- runExceptT $ tryWebhook headers responseTimeout (toJSON ep) webhook - logHTTPForET res Nothing + logHTTPForET res extraLogCtx let decodedHeaders = map (decodeHeader logenv headerInfos) headers either (processError pool e retryConf decodedHeaders ep) diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index 1fba0c2a5058d..757406bfd90a1 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -7,7 +7,7 @@ module Hasura.Eventing.HTTP , isNetworkErrorHC , logHTTPForET , logHTTPForST - , ExtraContext(..) + , ExtraLogContext(..) , EventId , Invocation(..) , Version @@ -106,13 +106,13 @@ data Invocation , iResponse :: Response } -data ExtraContext - = ExtraContext - { elEventCreatedAt :: Time.UTCTime +data ExtraLogContext + = ExtraLogContext + { elEventCreatedAt :: Maybe Time.UTCTime , elEventId :: EventId } deriving (Show, Eq) -$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''ExtraContext) +$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''ExtraLogContext) data HTTPResp (a :: TriggerTypes) = HTTPResp @@ -208,7 +208,7 @@ mkHTTPResp resp = data HTTPRespExtra (a :: TriggerTypes) = HTTPRespExtra { _hreResponse :: Either (HTTPErr a) (HTTPResp a) - , _hreContext :: Maybe ExtraContext + , _hreContext :: ExtraLogContext } $(deriveToJSON (aesonDrop 4 snakeCase){omitNothingFields=True} ''HTTPRespExtra) @@ -259,7 +259,7 @@ logHTTPForET , Has (Logger Hasura) r , MonadIO m ) - => Either (HTTPErr 'Event) (HTTPResp 'Event) -> Maybe ExtraContext -> m () + => Either (HTTPErr 'Event) (HTTPResp 'Event) -> ExtraLogContext -> m () logHTTPForET eitherResp extraLogCtx = do logger :: Logger Hasura <- asks getter unLogger logger $ HTTPRespExtra eitherResp extraLogCtx @@ -269,7 +269,7 @@ logHTTPForST , Has (Logger Hasura) r , MonadIO m ) - => Either (HTTPErr 'Scheduled) (HTTPResp 'Scheduled) -> Maybe ExtraContext -> m () + => Either (HTTPErr 'Scheduled) (HTTPResp 'Scheduled) -> ExtraLogContext -> m () logHTTPForST eitherResp extraLogCtx = do logger :: Logger Hasura <- asks getter unLogger logger $ HTTPRespExtra eitherResp extraLogCtx diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index b87bfe606b7ce..0085e8f936671 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -263,9 +263,9 @@ processScheduledEvent logEnv pgpool ScheduledTriggerInfo {..} se@ScheduledEventF let timeoutSeconds = round $ rcstTimeoutSec stiRetryConf httpTimeout = HTTP.responseTimeoutMicro (timeoutSeconds * 1000000) headers = addDefaultHeaders $ map encodeHeader stiHeaders - extraLogCtx = ExtraContext currentTime sefId + extraLogCtx = ExtraLogContext (Just currentTime) sefId res <- runExceptT $ tryWebhook headers httpTimeout sefPayload (T.unpack sefWebhook) - logHTTPForST res (Just extraLogCtx) + logHTTPForST res extraLogCtx let decodedHeaders = map (decodeHeader logEnv stiHeaders) headers either (processError pgpool se decodedHeaders) From 89a02a91202d6deea39429eb0dfe46701c8e48a1 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Fri, 13 Mar 2020 16:03:14 +0530 Subject: [PATCH 079/195] refactor the Eventing/HTTP file - move event trigger specific things to EventTrigger.hs - refactor mkInvo to mkInvocation --- .../src-lib/Hasura/Eventing/EventTrigger.hs | 55 +++++++++++++++---- server/src-lib/Hasura/Eventing/HTTP.hs | 40 -------------- 2 files changed, 44 insertions(+), 51 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 12f4ed2198528..7c029679642c4 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -41,6 +41,12 @@ import qualified Network.HTTP.Client as HTTP invocationVersion :: Version invocationVersion = "2" +data TriggerMetadata + = TriggerMetadata { tmName :: TriggerName } + deriving (Show, Eq) + +$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''TriggerMetadata) + newtype EventInternalErr = EventInternalErr QErr deriving (Show, Eq) @@ -69,6 +75,36 @@ data EventEngineCtx , _eeCtxFetchInterval :: DiffTime } +data DeliveryInfo + = DeliveryInfo + { diCurrentRetry :: Int + , diMaxRetries :: Int + } deriving (Show, Eq) + +$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''DeliveryInfo) + +newtype QualifiedTableStrict = QualifiedTableStrict + { getQualifiedTable :: QualifiedTable + } deriving (Show, Eq) + +instance ToJSON QualifiedTableStrict where + toJSON (QualifiedTableStrict (QualifiedObject sn tn)) = + object [ "schema" .= sn + , "name" .= tn + ] + +data EventPayload + = EventPayload + { epId :: EventId + , epTable :: QualifiedTableStrict + , epTrigger :: TriggerMetadata + , epEvent :: Value + , epDeliveryInfo :: DeliveryInfo + , epCreatedAt :: Time.UTCTime + } deriving (Show, Eq) + +$(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''EventPayload) + defaultMaxEventThreads :: Int defaultMaxEventThreads = 100 @@ -208,7 +244,7 @@ processSuccess pool e decodedHeaders ep resp = do let respBody = hrsBody resp respHeaders = hrsHeaders resp respStatus = hrsStatus resp - invocation = mkInvo ep respStatus decodedHeaders respBody respHeaders + invocation = mkInvocation ep respStatus decodedHeaders respBody respHeaders liftIO $ runExceptT $ Q.runTx pool (Q.RepeatableRead, Just Q.ReadWrite) $ do insertInvocation invocation setSuccess e @@ -221,18 +257,18 @@ processError pool e retryConf decodedHeaders ep err = do let invocation = case err of HClient excp -> do let errMsg = TBS.fromLBS $ encode $ show excp - mkInvo ep 1000 decodedHeaders errMsg [] + mkInvocation ep 1000 decodedHeaders errMsg [] HParse _ detail -> do let errMsg = TBS.fromLBS $ encode detail - mkInvo ep 1001 decodedHeaders errMsg [] + mkInvocation ep 1001 decodedHeaders errMsg [] HStatus errResp -> do let respPayload = hrsBody errResp respHeaders = hrsHeaders errResp respStatus = hrsStatus errResp - mkInvo ep respStatus decodedHeaders respPayload respHeaders + mkInvocation ep respStatus decodedHeaders respPayload respHeaders HOther detail -> do let errMsg = (TBS.fromLBS $ encode detail) - mkInvo ep 500 decodedHeaders errMsg [] + mkInvocation ep 500 decodedHeaders errMsg [] liftIO $ runExceptT $ Q.runTx pool (Q.RepeatableRead, Just Q.ReadWrite) $ do insertInvocation invocation retryOrSetError e retryConf err @@ -260,10 +296,10 @@ retryOrSetError e retryConf err = do parseRetryHeader = mfilter (> 0) . readMaybe . T.unpack -mkInvo +mkInvocation :: EventPayload -> Int -> [HeaderConf] -> TBS.TByteString -> [HeaderConf] -> Invocation -mkInvo ep status reqHeaders respBody respHeaders +mkInvocation ep status reqHeaders respBody respHeaders = let resp = if isClientError status then mkClientErr respBody else mkResp status respBody respHeaders @@ -325,7 +361,7 @@ insertInvocation invo = do INSERT INTO hdb_catalog.event_invocation_logs (event_id, status, request, response) VALUES ($1, $2, $3, $4) |] ( iEventId invo - , toInt64 $ iStatus invo + , fromIntegral $ iStatus invo :: Int64 , Q.AltJ $ toJSON $ iRequest invo , Q.AltJ $ toJSON $ iResponse invo) True Q.unitQE defaultTxErrorHandler [Q.sql| @@ -363,6 +399,3 @@ unlockAllEvents = SET locked = 'f' WHERE locked = 't' |] () False - -toInt64 :: (Integral a) => a -> Int64 -toInt64 = fromIntegral diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index 757406bfd90a1..d54bbf3eddfb8 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -17,8 +17,6 @@ module Hasura.Eventing.HTTP , ClientError(..) , isClientError , mkClientErr - , TriggerMetadata(..) - , DeliveryInfo(..) , mkWebhookReq , mkResp , LogEnvHeaders @@ -27,8 +25,6 @@ module Hasura.Eventing.HTTP , getRetryAfterHeaderFromHTTPErr , getRetryAfterHeaderFromResp , parseRetryHeaderValue - , EventPayload(..) - , QualifiedTableStrict(..) ) where import qualified Data.ByteString as BS @@ -157,42 +153,6 @@ instance ToEngineLog (HTTPErr 'Event) Hasura where instance ToEngineLog (HTTPErr 'Scheduled) Hasura where toEngineLog err = (LevelError, scheduledTriggerLogType, toJSON err) -data DeliveryInfo - = DeliveryInfo - { diCurrentRetry :: Int - , diMaxRetries :: Int - } deriving (Show, Eq) - -$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''DeliveryInfo) - -data TriggerMetadata - = TriggerMetadata { tmName :: TriggerName } - deriving (Show, Eq) - -$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''TriggerMetadata) - -newtype QualifiedTableStrict = QualifiedTableStrict - { getQualifiedTable :: QualifiedTable - } deriving (Show, Eq) - -instance ToJSON QualifiedTableStrict where - toJSON (QualifiedTableStrict (QualifiedObject sn tn)) = - object [ "schema" .= sn - , "name" .= tn - ] - -data EventPayload - = EventPayload - { epId :: EventId - , epTable :: QualifiedTableStrict - , epTrigger :: TriggerMetadata - , epEvent :: Value - , epDeliveryInfo :: DeliveryInfo - , epCreatedAt :: Time.UTCTime - } deriving (Show, Eq) - -$(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''EventPayload) - mkHTTPResp :: HTTP.Response LBS.ByteString -> HTTPResp a mkHTTPResp resp = HTTPResp From b0b1c7c21ccd84c631afb13f6a8a536b5af60281 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Fri, 13 Mar 2020 18:24:05 +0530 Subject: [PATCH 080/195] use bracket_ to do the async stuff in event triggers --- .../src-lib/Hasura/Eventing/EventTrigger.hs | 41 +++++++++++-------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 7c029679642c4..4aa4745aea695 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -10,10 +10,10 @@ module Hasura.Eventing.EventTrigger ) where import Control.Concurrent.Extended (sleep) -import Control.Concurrent.Async (wait, withAsync, async, link) +import Control.Concurrent.Async (wait, withAsync) import Control.Concurrent.STM.TVar -import Control.Exception.Lifted (finally, mask_) import Control.Monad.STM +import Control.Monad.Catch (MonadMask, bracket_) import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH @@ -129,7 +129,7 @@ processEventQueue :: (HasVersion) => L.Logger L.Hasura -> LogEnvHeaders -> HTTP.Manager-> Q.PGPool -> IO SchemaCache -> EventEngineCtx -> IO void -processEventQueue logger logenv httpMgr pool getSchemaCache EventEngineCtx{..} = do +processEventQueue logger logenv httpMgr pool getSchemaCache eeCtx@EventEngineCtx{..} = do events0 <- popEventsBatch go events0 0 False where @@ -155,20 +155,9 @@ processEventQueue logger logenv httpMgr pool getSchemaCache EventEngineCtx{..} = -- worth the effort for something more fine-tuned eventsNext <- withAsync popEventsBatch $ \eventsNextA -> do -- process approximately in order, minding HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE: - forM_ events $ \event -> - mask_ $ do - atomically $ do -- block until < HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE threads: - capacity <- readTVar _eeCtxEventThreadsCapacity - check $ capacity > 0 - writeTVar _eeCtxEventThreadsCapacity (capacity - 1) - -- since there is some capacity in our worker threads, we can launch another: - let restoreCapacity = liftIO $ atomically $ - modifyTVar' _eeCtxEventThreadsCapacity (+ 1) - t <- async $ flip runReaderT (logger, httpMgr) $ - processEvent event `finally` restoreCapacity - link t - - -- return when next batch ready; some 'processEvent' threads may be running. + forM_ events $ \event -> do + res <- runReaderT (withEventEngineCtx eeCtx $ (processEvent event)) (logger, httpMgr) + return res wait eventsNextA let lenEvents = length events @@ -223,6 +212,24 @@ processEventQueue logger logenv httpMgr pool getSchemaCache EventEngineCtx{..} = (processSuccess pool e decodedHeaders ep) res >>= flip onLeft logQErr +withEventEngineCtx :: + ( MonadIO m + , MonadMask m + ) + => EventEngineCtx -> m () -> m () +withEventEngineCtx eeCtx = bracket_ (decrementThreadCount eeCtx) (incrementThreadCount eeCtx) + +incrementThreadCount :: MonadIO m => EventEngineCtx -> m () +incrementThreadCount (EventEngineCtx c _) = liftIO $ atomically $ modifyTVar' c (+1) + +decrementThreadCount :: MonadIO m => EventEngineCtx -> m () +decrementThreadCount (EventEngineCtx c _) = liftIO $ atomically $ do + countThreads <- readTVar c + if countThreads > 0 + then modifyTVar' c (\v -> v - 1) + else retry + + createEventPayload :: RetryConf -> Event -> EventPayload createEventPayload retryConf e = EventPayload { epId = eId e From 72ba58b5e7108de74ea433d1dae17cdce7e483e1 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Sun, 15 Mar 2020 13:04:02 +0530 Subject: [PATCH 081/195] refactor the processEventQueue function --- server/src-lib/Hasura/Eventing/EventTrigger.hs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 4aa4745aea695..f12f2bf6581a1 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -155,9 +155,8 @@ processEventQueue logger logenv httpMgr pool getSchemaCache eeCtx@EventEngineCtx -- worth the effort for something more fine-tuned eventsNext <- withAsync popEventsBatch $ \eventsNextA -> do -- process approximately in order, minding HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE: - forM_ events $ \event -> do - res <- runReaderT (withEventEngineCtx eeCtx $ (processEvent event)) (logger, httpMgr) - return res + forM_ events $ \event -> + runReaderT (withEventEngineCtx eeCtx $ (processEvent event)) (logger, httpMgr) wait eventsNextA let lenEvents = length events From fcb03d7cb5f87b53e2f9a7d33ef201dba8036065 Mon Sep 17 00:00:00 2001 From: Karthikeyan Chinnakonda Date: Mon, 16 Mar 2020 12:11:11 +0530 Subject: [PATCH 082/195] undo all the unrelated js file changes --- .../src/components/Services/Actions/Codegen/Codegen.js | 4 +--- console/src/components/Services/Actions/Landing/Main.js | 4 +--- console/src/components/Services/Data/RawSQL/RawSQL.js | 8 ++------ 3 files changed, 4 insertions(+), 12 deletions(-) diff --git a/console/src/components/Services/Actions/Codegen/Codegen.js b/console/src/components/Services/Actions/Codegen/Codegen.js index c689f92cd7d6c..95e3ae4d931f9 100644 --- a/console/src/components/Services/Actions/Codegen/Codegen.js +++ b/console/src/components/Services/Actions/Codegen/Codegen.js @@ -67,9 +67,7 @@ const Codegen = ({ allActions, allTypes, currentAction }) => { const getDrodown = () => { return ( { }; const renderWheres = (whereAnd, tableSchema, dispatch) => { - const styles = require('../../../Common/FilterQuery/FilterQuery.scss'); return whereAnd.map((clause, i) => { const colName = Object.keys(clause)[0]; const opName = Object.keys(clause[colName])[0]; @@ -110,7 +122,7 @@ const renderWheres = (whereAnd, tableSchema, dispatch) => { return (
- {renderCols(colName, tableSchema, dSetFilterCol, 'filter', i)} + {renderCols(colName, tableSchema, dSetFilterCol, 'filter', i, [])}
{renderOps(opName, dSetFilterOp, i)}
@@ -134,7 +146,7 @@ const renderWheres = (whereAnd, tableSchema, dispatch) => { }; const renderSorts = (orderBy, tableSchema, dispatch) => { - const styles = require('../../../Common/FilterQuery/FilterQuery.scss'); + const currentOrderBy = orderBy.map(o => o.column); return orderBy.map((c, i) => { const dSetOrderCol = e => { dispatch(setOrderCol(e.target.value, i)); @@ -158,7 +170,14 @@ const renderSorts = (orderBy, tableSchema, dispatch) => { return (
- {renderCols(c.column, tableSchema, dSetOrderCol, 'sort', i)} + {renderCols( + c.column, + tableSchema, + dSetOrderCol, + 'sort', + i, + currentOrderBy + )}
); - }); - }; + }; - const getViewRootFieldsSection = () => { - const existingRootFields = getTableCustomRootFields(tableSchema); + const label = () => { + return ( + + {columnName} + {existingCustomColumnNames[columnName] + ? ` → ${existingCustomColumnNames[columnName]}` + : ''} + + ); + }; + + const saveFunc = toggle => { + dispatch( + setViewCustomColumnNames( + customColumnNames, + tableName, + currentSchema, + toggle + ) + ); + }; return ( - -

- Custom GraphQL Root Fields - -

- + -
-
+
); - }; - - const modifyBtn = ( - - ); - - const untrackBtn = ( - - ); + }); + }; - const deleteBtn = ( - - ); + const getViewRootFieldsSection = () => { + const existingRootFields = getTableCustomRootFields(tableSchema); return ( -
- +

+ Custom GraphQL Root Fields + +

+ -
-
-
- -

Columns

- {getViewColumnsSection()} -
-

- View Definition: - {modifyBtn} -

- -
- {getViewRootFieldsSection()} - {untrackBtn} - {deleteBtn} -
-
-
-
{alert}
+
+ + ); + }; + + const modifyViewOnClick = () => { + modifyViewDefinition(tableName); + }; + const modifyBtn = ( + + ); + + const untrackOnclick = () => { + const confirmMessage = `This will remove the view "${tableName}" from the GraphQL schema`; + const isOk = getConfirmation(confirmMessage); + if (isOk) { + dispatch(untrackTableSql(tableName)); + } + }; + + const untrackBtn = ( + + ); + + const deleteOnClick = () => { + const confirmMessage = `This will permanently delete the view "${tableName}" from the database`; + const isOk = getConfirmation(confirmMessage, true, tableName); + if (isOk) { + dispatch(deleteViewSql(tableName)); + } + }; + const deleteBtn = ( + + ); + + return ( +
+ +
+
+
+ +

Columns

+ {getViewColumnsSection()} +
+

+ View Definition: + {modifyBtn} +

+ +
+ {getViewRootFieldsSection()} + {untrackBtn} + {deleteBtn} +
+
+
{alert}
- ); - } -} +
+ ); +}; ModifyView.propTypes = { sql: PropTypes.string.isRequired, diff --git a/console/src/components/Services/Data/TableModify/utils.js b/console/src/components/Services/Data/TableModify/utils.js index f5e0e6ed21e09..976b53de3d035 100644 --- a/console/src/components/Services/Data/TableModify/utils.js +++ b/console/src/components/Services/Data/TableModify/utils.js @@ -82,4 +82,15 @@ export const sanitiseRootFields = rootFields => { return santisedRootFields; }; +export const sanitiseColumnNames = columnNames => { + const sanitised = {}; + Object.keys(columnNames).forEach(c => { + const trimmedCustomName = columnNames[c] ? columnNames[c].trim() : null; + if (trimmedCustomName) { + sanitised[c] = trimmedCustomName; + } + }); + return sanitised; +}; + export { convertArrayToJson, getValidAlterOptions, fetchColumnCastsQuery }; diff --git a/console/src/components/Services/Data/TablePermissions/Actions.js b/console/src/components/Services/Data/TablePermissions/Actions.js index 4566d177f8abf..61947103f061e 100644 --- a/console/src/components/Services/Data/TablePermissions/Actions.js +++ b/console/src/components/Services/Data/TablePermissions/Actions.js @@ -407,7 +407,7 @@ const permRemoveMultipleRoles = tableSchema => { }; }; -const applySamePermissionsBulk = tableSchema => { +const applySamePermissionsBulk = (tableSchema, arePermissionsModified) => { return (dispatch, getState) => { const permissionsUpQueries = []; const permissionsDownQueries = []; @@ -420,30 +420,29 @@ const applySamePermissionsBulk = tableSchema => { const currentQueryType = permissionsState.query; const toBeAppliedPermission = permissionsState[currentQueryType]; - const mainApplyTo = { - table: table, - action: currentQueryType, - role: permissionsState.role, - }; + const permApplyToList = permissionsState.applySamePermissions.filter( + applyTo => applyTo.table && applyTo.action && applyTo.role + ); - const permApplyToList = permissionsState.applySamePermissions - .filter(applyTo => applyTo.table && applyTo.action && applyTo.role) - .concat([mainApplyTo]); + if (arePermissionsModified) { + const mainApplyTo = { + table: table, + action: currentQueryType, + role: permissionsState.role, + }; - let currentPermissions = []; - allSchemas.forEach(tSchema => { - currentPermissions = currentPermissions.concat(tSchema.permissions); - }); + permApplyToList.push(mainApplyTo); + } permApplyToList.map(applyTo => { - const currTableSchema = allSchemas.find( - tSchema => - tSchema.table_name === applyTo.table && - tSchema.table_schema === currentSchema + const currTableSchema = findTable( + allSchemas, + generateTableDef(applyTo.table, currentSchema) + ); + + const currentPermPermission = currTableSchema.permissions.find( + el => el.role_name === applyTo.role ); - const currentPermPermission = currTableSchema.permissions.find(el => { - return el.role_name === applyTo.role; - }); if ( currentPermPermission && @@ -457,6 +456,7 @@ const applySamePermissionsBulk = tableSchema => { role: applyTo.role, }, }; + const createQuery = { type: 'create_' + applyTo.action + '_permission', args: { @@ -465,8 +465,9 @@ const applySamePermissionsBulk = tableSchema => { permission: currentPermPermission.permissions[applyTo.action], }, }; + permissionsUpQueries.push(deleteQuery); - permissionsDownQueries.push(createQuery); + permissionsDownQueries.unshift(createQuery); } // modify query depending on table and action @@ -475,6 +476,7 @@ const applySamePermissionsBulk = tableSchema => { sanitizedPermission.columns = []; sanitizedPermission.set = {}; } + if (applyTo.action === 'insert' && currentQueryType !== 'insert') { sanitizedPermission.check = sanitizedPermission.filter; } else if (applyTo.action !== 'insert' && currentQueryType === 'insert') { @@ -498,7 +500,7 @@ const applySamePermissionsBulk = tableSchema => { }, }; permissionsUpQueries.push(createQuery); - permissionsDownQueries.push(deleteQuery); + permissionsDownQueries.unshift(deleteQuery); }); // Apply migration diff --git a/console/src/components/Services/Data/TablePermissions/Permissions.js b/console/src/components/Services/Data/TablePermissions/Permissions.js index 9835fba2686ce..6679359c0d328 100644 --- a/console/src/components/Services/Data/TablePermissions/Permissions.js +++ b/console/src/components/Services/Data/TablePermissions/Permissions.js @@ -526,9 +526,24 @@ class Permissions extends Component { const query = permissionsState.query; - const noPermissions = !permissionsState[query]; + const rolePermissions = tableSchema.permissions.find( + p => p.role_name === permissionsState.role + ); + + const currQueryPermissions = rolePermissions + ? rolePermissions.permissions[permissionsState.query] + : undefined; + + const newQueryPermissions = permissionsState[query]; + + const noPermissions = !newQueryPermissions; + const noPermissionsMsg = 'Set row permissions first'; + const permsChanged = + JSON.stringify(newQueryPermissions) !== + JSON.stringify(currQueryPermissions); + let sectionClasses = styles.editPermsSection; if (noPermissions) { sectionClasses += ' ' + styles.disabled; @@ -1523,7 +1538,7 @@ class Permissions extends Component { const confirmMessage = 'This will overwrite any existing permissions'; const isOk = getConfirmation(confirmMessage); if (isOk) { - dispatch(applySamePermissionsBulk(tableSchema)); + dispatch(applySamePermissionsBulk(tableSchema, permsChanged)); } }; @@ -1717,19 +1732,7 @@ class Permissions extends Component { {value} ); - - const rolePermissions = tableSchema.permissions.find( - p => p.role_name === permissionsState.role - ); - const currQueryPermissions = rolePermissions - ? rolePermissions.permissions[permissionsState.query] - : undefined; - const newQueryPermissions = permissionsState[permissionsState.query]; - const applySameSelected = permissionsState.applySamePermissions.length; - const permsChanged = - JSON.stringify(newQueryPermissions) !== - JSON.stringify(currQueryPermissions); const disableSave = applySameSelected || !permsChanged; const disableRemoveAccess = !currQueryPermissions; diff --git a/console/src/components/Services/Data/TableRelationships/Actions.js b/console/src/components/Services/Data/TableRelationships/Actions.js index d0aeb6188d5b4..d9a5f85150fb2 100644 --- a/console/src/components/Services/Data/TableRelationships/Actions.js +++ b/console/src/components/Services/Data/TableRelationships/Actions.js @@ -204,9 +204,7 @@ const deleteRelMigrate = relMeta => (dispatch, getState) => { const relChangesDown = [upQuery]; // Apply migrations - const migrationName = `drop_relationship_${relMeta.relName}_${ - relMeta.lSchema - }_table_${relMeta.lTable}`; + const migrationName = `drop_relationship_${relMeta.relName}_${relMeta.lSchema}_table_${relMeta.lTable}`; const requestMsg = 'Deleting Relationship...'; const successMsg = 'Relationship deleted'; @@ -250,9 +248,7 @@ const addRelNewFromStateMigrate = () => (dispatch, getState) => { const relChangesDown = [downQuery]; // Apply migrations - const migrationName = `add_relationship_${state.name}_table_${ - state.lSchema - }_${state.lTable}`; + const migrationName = `add_relationship_${state.name}_table_${state.lSchema}_${state.lTable}`; const requestMsg = 'Adding Relationship...'; const successMsg = 'Relationship created'; @@ -568,9 +564,7 @@ const autoAddRelName = obj => (dispatch, getState) => { const relChangesDown = [obj.downQuery]; // Apply migrations - const migrationName = `add_relationship_${relName}_table_${currentSchema}_${ - obj.data.tableName - }`; + const migrationName = `add_relationship_${relName}_table_${currentSchema}_${obj.data.tableName}`; const requestMsg = 'Adding Relationship...'; const successMsg = 'Relationship created'; diff --git a/console/src/components/Services/Data/TableRelationships/ManualRelationshipSelector.js b/console/src/components/Services/Data/TableRelationships/ManualRelationshipSelector.js index 5d5a06fb33793..7753c5c2297b6 100644 --- a/console/src/components/Services/Data/TableRelationships/ManualRelationshipSelector.js +++ b/console/src/components/Services/Data/TableRelationships/ManualRelationshipSelector.js @@ -91,17 +91,17 @@ const ManualRelationshipSelector = ({ disabled={!relAdd.relType || !relAdd.relName} > {// default unselected option - relAdd.rSchema === '' && ( - - )} + relAdd.rSchema === '' && ( + + )} {// all reference schema options - schemaList.map((rs, j) => ( - - ))} + schemaList.map((rs, j) => ( + + ))}
); @@ -208,16 +208,12 @@ const ManualRelationshipSelector = ({ return (
new RegExp(postgresFunctionTester).test(str); + +export const getEstimateCountQuery = (schemaName, tableName) => { + return ` +SELECT + reltuples::BIGINT +FROM + pg_class +WHERE + oid = (quote_ident('${schemaName}') || '.' || quote_ident('${tableName}'))::regclass::oid + AND relname = '${tableName}'; +`; +}; diff --git a/console/src/components/Services/EventTrigger/Common/InvokeManualTrigger/InvokeManualTrigger.js b/console/src/components/Services/EventTrigger/Common/InvokeManualTrigger/InvokeManualTrigger.js index 6e660d7ab45ac..7ddc8a8b2441e 100644 --- a/console/src/components/Services/EventTrigger/Common/InvokeManualTrigger/InvokeManualTrigger.js +++ b/console/src/components/Services/EventTrigger/Common/InvokeManualTrigger/InvokeManualTrigger.js @@ -139,14 +139,10 @@ class InvokeManualTrigger extends React.Component { getEventIdErrorText('Unable to invoke trigger'); const getEventData = () => (
Event ID - {eventInfo} diff --git a/console/src/components/Services/EventTrigger/Modify/HeadersEditor.js b/console/src/components/Services/EventTrigger/Modify/HeadersEditor.js index 7401286237e4b..cc107569b03e9 100644 --- a/console/src/components/Services/EventTrigger/Modify/HeadersEditor.js +++ b/console/src/components/Services/EventTrigger/Modify/HeadersEditor.js @@ -79,9 +79,7 @@ class HeadersEditor extends React.Component {
{ dispatch(setHeaderKey(e.target.value, i)); diff --git a/console/src/components/Services/EventTrigger/Modify/RetryConfEditor.js b/console/src/components/Services/EventTrigger/Modify/RetryConfEditor.js index 4bd21a58419b5..9d1bbce188197 100644 --- a/console/src/components/Services/EventTrigger/Modify/RetryConfEditor.js +++ b/console/src/components/Services/EventTrigger/Modify/RetryConfEditor.js @@ -94,9 +94,7 @@ class RetryConfEditor extends React.Component { dispatch(setRetryNum(e.target.value))} />
@@ -108,9 +106,7 @@ class RetryConfEditor extends React.Component {
dispatch(setRetryInterval(e.target.value))} /> @@ -123,9 +119,7 @@ class RetryConfEditor extends React.Component {
dispatch(setRetryTimeout(e.target.value))} /> diff --git a/console/src/components/Services/EventTrigger/PendingEvents/FilterQuery.js b/console/src/components/Services/EventTrigger/PendingEvents/FilterQuery.js index bd6d854e4135b..6eab7d9c58fca 100644 --- a/console/src/components/Services/EventTrigger/PendingEvents/FilterQuery.js +++ b/console/src/components/Services/EventTrigger/PendingEvents/FilterQuery.js @@ -207,17 +207,13 @@ class FilterQuery extends Component { >
Filter {renderWheres(whereAnd, triggerSchema, dispatch)}
Sort {renderSorts(orderBy, triggerSchema, dispatch)} diff --git a/console/src/components/Services/EventTrigger/PendingEvents/ViewTable.js b/console/src/components/Services/EventTrigger/PendingEvents/ViewTable.js index 7cfb5f363a5da..9a064f284cbc7 100644 --- a/console/src/components/Services/EventTrigger/PendingEvents/ViewTable.js +++ b/console/src/components/Services/EventTrigger/PendingEvents/ViewTable.js @@ -6,6 +6,7 @@ import TableHeader from '../TableCommon/TableHeader'; import ViewRows from './ViewRows'; import { NotFoundError } from '../../../Error/PageNotFound'; +/* const genHeadings = headings => { if (headings.length === 0) { return []; @@ -60,6 +61,7 @@ const genRow = (row, headings) => { throw 'Incomplete pattern match'; // eslint-disable-line no-throw-literal }; +*/ class ViewTable extends Component { constructor(props) { diff --git a/console/src/components/Services/EventTrigger/ProcessedEvents/FilterQuery.js b/console/src/components/Services/EventTrigger/ProcessedEvents/FilterQuery.js index 602a0c2f5b9f9..ae83f676c51a1 100644 --- a/console/src/components/Services/EventTrigger/ProcessedEvents/FilterQuery.js +++ b/console/src/components/Services/EventTrigger/ProcessedEvents/FilterQuery.js @@ -207,17 +207,13 @@ class FilterQuery extends Component { >
Filter {renderWheres(whereAnd, triggerSchema, dispatch)}
Sort {renderSorts(orderBy, triggerSchema, dispatch)} diff --git a/console/src/components/Services/EventTrigger/ProcessedEvents/ViewRows.js b/console/src/components/Services/EventTrigger/ProcessedEvents/ViewRows.js index 9def1e1110339..9b6a2eb40dfaf 100644 --- a/console/src/components/Services/EventTrigger/ProcessedEvents/ViewRows.js +++ b/console/src/components/Services/EventTrigger/ProcessedEvents/ViewRows.js @@ -437,26 +437,26 @@ const ViewRows = ({ > {finalResponse.status_code ? [ - 'Status Code: ', - verifySuccessStatus( - finalResponse.status_code - ) - ? successIcon - : failureIcon, - finalResponse.status_code, - ' ', - -