;
return (
diff --git a/server/graphiql/src/graphiql-vars-live.js b/server/graphiql/src/graphiql-vars-live.js
new file mode 100644
index 0000000000000..b43be1dcb3a15
--- /dev/null
+++ b/server/graphiql/src/graphiql-vars-live.js
@@ -0,0 +1,53 @@
+var query = `
+subscription live_reaction {
+ country {
+ name
+ votes {
+ count: vote_count
+ }
+ comments {
+ comment
+ }
+ }
+}
+
+mutation vote_for_france {
+ insert_country_vote (objects: {country: "France"}) {
+ affected_rows
+ }
+}
+
+mutation vote_for_croatia {
+ insert_country_vote (objects: {country: "Croatia"}) {
+ affected_rows
+ }
+}
+
+mutation comment_on_france {
+ insert_comment (
+ objects: {
+ country: "France"
+ comment: "rot in hell"
+ }
+ ) {
+ affected_rows
+ }
+}
+
+mutation comment_on_croatia {
+ insert_comment (
+ objects: {
+ country: "Croatia"
+ comment: "beautiful football"
+ }
+ ) {
+ affected_rows
+ }
+}
+`;
+var variables=`
+{
+}
+`;
+exports.query = query;
+exports.variables = variables;
diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal
index add39a78ea450..d8314ee2ea7f2 100644
--- a/server/graphql-engine.cabal
+++ b/server/graphql-engine.cabal
@@ -105,7 +105,21 @@ library
, yaml
, template-haskell >= 2.11
+ -- websockets interface related
+ , websockets
+ , wai-websockets
+ , hashtables
+ , stm
+ , stm-containers
+ , list-t
+ , async
+
+ -- logging related
+ , base64-bytestring >= 1.0
+ , auto-update
+
exposed-modules: Hasura.Server.App
+ , Hasura.Server.Auth
, Hasura.Server.Init
, Hasura.Server.Middleware
, Hasura.Server.Logging
@@ -139,14 +153,20 @@ library
, Hasura.RQL.DML.QueryTemplate
, Hasura.RQL.GBoolExp
- , Hasura.GraphQL.Execute
- , Hasura.GraphQL.Execute.Result
+ , Hasura.GraphQL.Transport.HTTP.Protocol
+ , Hasura.GraphQL.Transport.HTTP
+ , Hasura.GraphQL.Transport.WebSocket.Protocol
+ , Hasura.GraphQL.Transport.WebSocket.Server
+ , Hasura.GraphQL.Transport.WebSocket
, Hasura.GraphQL.Schema
, Hasura.GraphQL.Utils
+ , Hasura.GraphQL.Validate
, Hasura.GraphQL.Validate.Types
, Hasura.GraphQL.Validate.Context
, Hasura.GraphQL.Validate.Field
, Hasura.GraphQL.Validate.InputValue
+ , Hasura.GraphQL.Resolve
+ , Hasura.GraphQL.Resolve.LiveQuery
, Hasura.GraphQL.Resolve.BoolExp
, Hasura.GraphQL.Resolve.Context
, Hasura.GraphQL.Resolve.InputValue
@@ -156,6 +176,7 @@ library
, Data.Text.Extended
, Data.Sequence.NonEmpty
+ , Data.TByteString
, Data.HashMap.Strict.InsOrd.Extended
, Hasura.SQL.DML
@@ -164,6 +185,7 @@ library
, Hasura.SQL.GeoJSON
, Hasura.SQL.Time
, Hasura.Prelude
+ , Hasura.Logging
, Ops
, TH
@@ -178,7 +200,7 @@ executable graphql-engine
default-language: Haskell2010
hs-source-dirs: src-exec
build-depends: base
- , Spock-core >= 0.11
+ , warp >= 3.2
, graphql-engine
, aeson >= 1.0
, bytestring >= 0.10
diff --git a/server/src-exec/Main.hs b/server/src-exec/Main.hs
index 237dec6a3c61c..4b5124fd5e2fe 100644
--- a/server/src-exec/Main.hs
+++ b/server/src-exec/Main.hs
@@ -6,10 +6,10 @@ module Main where
import Ops
import Data.Time.Clock (getCurrentTime)
+import qualified Network.Wai.Handler.Warp as Warp
import Options.Applicative
import System.Environment (lookupEnv)
import System.Exit (exitFailure)
-import Web.Spock.Core (runSpockNoBanner, spockT)
import qualified Data.Aeson as A
import qualified Data.ByteString.Char8 as BC
@@ -18,11 +18,12 @@ import qualified Data.ByteString.Lazy.Char8 as BLC
import qualified Data.Text as T
import qualified Data.Yaml as Y
+import Hasura.Logging (mkLoggerCtx, defaultLoggerSettings)
import Hasura.Prelude
import Hasura.RQL.DDL.Metadata (fetchMetadata)
-import Hasura.Server.App (AuthMode (..), app, ravenLogGen)
+import Hasura.Server.App (mkWaiApp)
+import Hasura.Server.Auth (AuthMode (..))
import Hasura.Server.Init
-import Hasura.Server.Logging (withStdoutLogger)
import qualified Database.PG.Query as Q
@@ -99,12 +100,13 @@ mkAuthMode mAccessKey mWebHook =
(Just key, Just hook) -> return $ AMAccessKeyAndHook key hook
main :: IO ()
-main = withStdoutLogger ravenLogGen $ \rlogger -> do
+main = do
(RavenOptions rci ravenMode) <- parseArgs
mEnvDbUrl <- lookupEnv "HASURA_GRAPHQL_DATABASE_URL"
ci <- either ((>> exitFailure) . putStrLn . connInfoErrModifier)
return $ mkConnInfo mEnvDbUrl rci
printConnInfo ci
+ loggerCtx <- mkLoggerCtx defaultLoggerSettings
case ravenMode of
ROServe (ServeOptions port cp isoL mRootDir mAccessKey corsCfg mWebHook enableConsole) -> do
mFinalAccessKey <- considerEnv "HASURA_GRAPHQL_ACCESS_KEY" mAccessKey
@@ -117,9 +119,11 @@ main = withStdoutLogger ravenLogGen $ \rlogger -> do
initialise ci
migrate ci
pool <- Q.initPGPool ci cp
- runSpockNoBanner port $ do
- putStrLn $ "server: running on port " ++ show port
- spockT id $ app isoL mRootDir rlogger pool am finalCorsCfg enableConsole
+ putStrLn $ "server: running on port " ++ show port
+ app <- mkWaiApp isoL mRootDir loggerCtx pool am finalCorsCfg enableConsole
+ let warpSettings = Warp.setPort port Warp.defaultSettings
+ -- Warp.setHost "*" Warp.defaultSettings
+ Warp.runSettings warpSettings app
ROExport -> do
res <- runTx ci fetchMetadata
either ((>> exitFailure) . printJSON) printJSON res
diff --git a/server/src-lib/Data/TByteString.hs b/server/src-lib/Data/TByteString.hs
new file mode 100644
index 0000000000000..f5129542f2691
--- /dev/null
+++ b/server/src-lib/Data/TByteString.hs
@@ -0,0 +1,41 @@
+{-# LANGUAGE OverloadedStrings #-}
+
+module Data.TByteString
+ ( TByteString
+ , fromText
+ , fromBS
+ , fromLBS
+ ) where
+
+import qualified Data.Aeson as J
+import qualified Data.ByteString as B
+import qualified Data.ByteString.Base64 as B64
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Text as T
+import qualified Data.Text.Encoding as TE
+
+import Data.Bool (bool)
+import Prelude
+
+newtype TByteString
+ = TByteString (Bool, T.Text)
+ deriving (Show, Eq)
+
+instance J.ToJSON TByteString where
+ toJSON (TByteString (isBase64, t)) =
+ bool (J.toJSON t) (J.toJSON ["Base64", t]) isBase64
+
+fromText :: T.Text -> TByteString
+fromText t = TByteString (False, t)
+
+fromBS :: B.ByteString -> TByteString
+fromBS bs =
+ TByteString $
+ -- if the bs in not utf-8 encoded, encode it to Base64
+ case TE.decodeUtf8' bs of
+ Left _ -> (True, TE.decodeUtf8 $ B64.encode bs)
+ Right t -> (False, t)
+
+fromLBS :: BL.ByteString -> TByteString
+fromLBS =
+ fromBS . BL.toStrict
diff --git a/server/src-lib/Hasura/GraphQL/Execute.hs b/server/src-lib/Hasura/GraphQL/Execute.hs
deleted file mode 100644
index cda39bc23f4ab..0000000000000
--- a/server/src-lib/Hasura/GraphQL/Execute.hs
+++ /dev/null
@@ -1,249 +0,0 @@
-{-# LANGUAGE FlexibleContexts #-}
-{-# LANGUAGE NoImplicitPrelude #-}
-{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE TemplateHaskell #-}
-
-module Hasura.GraphQL.Execute
- ( validateGQ
- , GraphQLRequest
- , runGQ
- ) where
-
-import Data.Has
-import Hasura.Prelude
-
-import qualified Data.Aeson as J
-import qualified Data.Aeson.Casing as J
-import qualified Data.Aeson.TH as J
-import qualified Data.ByteString.Lazy as BL
-import qualified Data.HashMap.Strict as Map
-import qualified Data.Text as T
-import qualified Data.Vector as V
-import qualified Database.PG.Query as Q
-import qualified Language.GraphQL.Draft.Parser as G
-import qualified Language.GraphQL.Draft.Syntax as G
-import qualified Hasura.Server.Query as RQ
-
-import Hasura.GraphQL.Execute.Result
-import Hasura.GraphQL.Resolve.Context
-import Hasura.GraphQL.Resolve.Introspect
-import Hasura.GraphQL.Schema
-import Hasura.GraphQL.Validate.Context
-import Hasura.GraphQL.Validate.Field
-import Hasura.GraphQL.Validate.InputValue
-import Hasura.GraphQL.Validate.Types
-import Hasura.RQL.Types
-import Hasura.SQL.Types
-
-import qualified Hasura.GraphQL.Resolve.Mutation as RM
-import qualified Hasura.GraphQL.Resolve.Select as RS
-
-newtype GraphQLQuery
- = GraphQLQuery { unGraphQLQuery :: [G.ExecutableDefinition] }
- deriving (Show, Eq)
-
-instance J.FromJSON GraphQLQuery where
- parseJSON = J.withText "GraphQLQuery" $ \t ->
- case G.parseExecutableDoc t of
- Left _ -> fail "parsing the graphql query failed"
- Right q -> return $ GraphQLQuery $ G.getExecutableDefinitions q
-
-newtype OperationName
- = OperationName { _unOperationName :: G.Name }
- deriving (Show, Eq)
-
-instance J.FromJSON OperationName where
- parseJSON v = OperationName . G.Name <$> J.parseJSON v
-
-type VariableValues = Map.HashMap G.Variable J.Value
-
-data GraphQLRequest
- = GraphQLRequest
- { _grOperationName :: !(Maybe OperationName)
- , _grQuery :: !GraphQLQuery
- , _grVariables :: !(Maybe VariableValues)
- } deriving (Show, Eq)
-
-$(J.deriveFromJSON (J.aesonDrop 3 J.camelCase){J.omitNothingFields=True}
- ''GraphQLRequest
- )
-
-getTypedOp
- :: (MonadError QErr m)
- => Maybe OperationName
- -> [G.SelectionSet]
- -> [G.TypedOperationDefinition]
- -> m G.TypedOperationDefinition
-getTypedOp opNameM selSets opDefs =
- case (opNameM, selSets, opDefs) of
- (Just opName, [], _) -> do
- let n = _unOperationName opName
- opDefM = find (\opDef -> G._todName opDef == Just n) opDefs
- onNothing opDefM $ throwVE $
- "no such operation found in the document: " <> showName n
- (Just _, _, _) ->
- throwVE $ "operationName cannot be used when " <>
- "an anonymous operation exists in the document"
- (Nothing, [selSet], []) ->
- return $ G.TypedOperationDefinition
- G.OperationTypeQuery Nothing [] [] selSet
- (Nothing, [], [opDef]) ->
- return opDef
- (Nothing, _, _) ->
- throwVE $ "exactly one operation has to be present " <>
- "in the document when operationName is not specified"
-
--- For all the variables defined there will be a value in the final map
--- If no default, not in variables and nullable, then null value
-getAnnVarVals
- :: ( MonadReader r m, Has TypeMap r
- , MonadError QErr m
- )
- => [G.VariableDefinition]
- -> VariableValues
- -> m AnnVarVals
-getAnnVarVals varDefsL inpVals = do
-
- varDefs <- onLeft (mkMapWith G._vdVariable varDefsL) $ \dups ->
- throwVE $ "the following variables are defined more than once: " <>
- showVars dups
-
- let unexpectedVars = filter (not . (`Map.member` varDefs)) $ Map.keys inpVals
-
- unless (null unexpectedVars) $
- throwVE $ "unexpected variables in variableValues: " <>
- showVars unexpectedVars
-
- forM varDefs $ \(G.VariableDefinition var ty defM) -> do
- let baseTy = getBaseTy ty
- baseTyInfo <- getTyInfoVE baseTy
- -- check that the variable is defined on input types
- when (isObjTy baseTyInfo) $ throwVE $ objTyErrMsg baseTy
-
- let defM' = bool (defM <|> Just G.VCNull) defM $ G.isNotNull ty
- annDefM <- withPathK "defaultValue" $
- mapM (validateInputValue constValueParser ty) defM'
- let inpValM = Map.lookup var inpVals
- annInpValM <- withPathK "variableValues" $
- mapM (validateInputValue jsonParser ty) inpValM
- let varValM = annInpValM <|> annDefM
- onNothing varValM $ throwVE $ "expecting a value for non-null type: "
- <> G.showGT ty <> " in variableValues"
- where
- objTyErrMsg namedTy =
- "variables can only be defined on input types"
- <> "(enums, scalars, input objects), but "
- <> showNamedTy namedTy <> " is an object type"
-
- showVars :: (Functor f, Foldable f) => f G.Variable -> Text
- showVars = showNames . fmap G.unVariable
-
-validateFrag
- :: (MonadError QErr m, MonadReader r m, Has TypeMap r)
- => G.FragmentDefinition -> m FragDef
-validateFrag (G.FragmentDefinition n onTy dirs selSet) = do
- unless (null dirs) $ throwVE
- "unexpected directives at fragment definition"
- tyInfo <- getTyInfoVE onTy
- objTyInfo <- onNothing (getObjTyM tyInfo) $ throwVE
- "fragments can only be defined on object types"
- return $ FragDef n objTyInfo selSet
-
-{-# SCC validateGQ #-}
-validateGQ
- :: (MonadError QErr m, MonadReader GCtx m)
- => GraphQLRequest
- -> m SelSet
-validateGQ (GraphQLRequest opNameM q varValsM) = do
-
- -- get the operation that needs to be evaluated
- opDef <- getTypedOp opNameM selSets opDefs
-
- ctx <- ask
- -- get the operation root
- opRoot <- case G._todType opDef of
- G.OperationTypeQuery -> return $ _gQueryRoot ctx
- G.OperationTypeMutation -> onNothing (_gMutRoot ctx) $ throwVE
- "no mutations exist"
- _ -> throwVE "subscriptions are not supported"
-
- -- annotate the variables of this operation
- annVarVals <- getAnnVarVals (G._todVariableDefinitions opDef) $
- fromMaybe Map.empty varValsM
-
- -- annotate the fragments
- fragDefs <- onLeft (mkMapWith G._fdName fragDefsL) $ \dups ->
- throwVE $ "the following fragments are defined more than once: " <>
- showNames dups
- annFragDefs <- mapM validateFrag fragDefs
-
- -- build a validation ctx
- let valCtx = ValidationCtx (_gTypes ctx) annVarVals annFragDefs
-
- flip runReaderT valCtx $ denormSelSet [] opRoot $ G._todSelectionSet opDef
-
- where
- (selSets, opDefs, fragDefsL) = G.partitionExDefs $ unGraphQLQuery q
-
-{-# SCC buildTx #-}
-buildTx :: UserInfo -> GCtx -> Field -> Q.TxE QErr BL.ByteString
-buildTx userInfo gCtx fld = do
- opCxt <- getOpCtx $ _fName fld
- tx <- fmap fst $ runConvert (fldMap, orderByCtx) $ case opCxt of
-
- OCSelect tn permFilter hdrs ->
- validateHdrs hdrs >> RS.convertSelect tn permFilter fld
- OCInsert tn vn cols hdrs ->
- validateHdrs hdrs >> RM.convertInsert (tn, vn) cols fld
- OCUpdate tn permFilter hdrs ->
- validateHdrs hdrs >> RM.convertUpdate tn permFilter fld
- OCDelete tn permFilter hdrs ->
- validateHdrs hdrs >> RM.convertDelete tn permFilter fld
- tx
- where
- opCtxMap = _gOpCtxMap gCtx
- fldMap = _gFields gCtx
- orderByCtx = _gOrdByEnums gCtx
-
- getOpCtx f =
- onNothing (Map.lookup f opCtxMap) $ throw500 $
- "lookup failed: opctx: " <> showName f
-
- validateHdrs hdrs = do
- let receivedHdrs = map fst $ userHeaders userInfo
- forM_ hdrs $ \hdr ->
- unless (hdr `elem` map T.toLower receivedHdrs) $
- throw400 NotFound $ hdr <<> " header is expected but not found"
-
-{-# SCC resolveFld #-}
-resolveFld
- :: (MonadIO m, MonadError QErr m)
- => Q.PGPool -> Q.TxIsolation
- -> UserInfo -> GCtx
- -> Field -> m BL.ByteString
-resolveFld pool isoL userInfo gCtx fld =
- case _fName fld of
- "__type" -> J.encode <$> runReaderT (typeR fld) gCtx
- "__schema" -> J.encode <$> runReaderT (schemaR fld) gCtx
- "__typename" -> return $ J.encode J.Null
- _ -> runTx $ buildTx userInfo gCtx fld
- where
- runTx tx =
- Q.runTx pool (isoL, Nothing) $
- RQ.setHeadersTx userInfo >> tx
-
-runGQ
- :: (MonadIO m, MonadError QErr m)
- => Q.PGPool -> Q.TxIsolation
- -> UserInfo -> GCtxMap
- -> GraphQLRequest
- -> m BL.ByteString
-runGQ pool isoL userInfo gCtxMap req = do
- fields <- runReaderT (validateGQ req) gCtx
- -- putLText $ J.encodeToLazyText $ J.toJSON fields
- respFlds <- fmap V.fromList $ forM (toList fields) $ \fld -> do
- fldResp <- resolveFld pool isoL userInfo gCtx fld
- return (G.unName $ G.unAlias $ _fAlias fld, fldResp)
- return $ encodeGQResp $ GQSuccess $ mkJSONObj respFlds
- where
- gCtx = getGCtx (userRole userInfo) gCtxMap
diff --git a/server/src-lib/Hasura/GraphQL/Execute/Result.hs b/server/src-lib/Hasura/GraphQL/Execute/Result.hs
deleted file mode 100644
index fe5624e9b2776..0000000000000
--- a/server/src-lib/Hasura/GraphQL/Execute/Result.hs
+++ /dev/null
@@ -1,54 +0,0 @@
-{-# LANGUAGE NoImplicitPrelude #-}
-{-# LANGUAGE OverloadedStrings #-}
-
-module Hasura.GraphQL.Execute.Result
- ( encodeGQErr
- , encodeJSONObject
- , encodeGQResp
- , mkJSONObj
- , GQResp(..)
- ) where
-
-import Hasura.Prelude
-
-import qualified Data.Aeson as J
-import qualified Data.ByteString.Builder as BB
-import qualified Data.ByteString.Lazy as BL
-import qualified Data.Text.Encoding as TE
-import qualified Data.Vector as V
-
-import Hasura.RQL.Types
-
-encodeGQErr :: Text -> QErr -> J.Value
-encodeGQErr role qErr =
- J.object [ "errors" J..= [encodeQErr role qErr]]
-
-data GQResp
- = GQSuccess BL.ByteString
- | GQPreExecError [J.Value]
- | GQExecError [J.Value]
- deriving (Show, Eq)
-
-encodeJSONObject :: V.Vector (Text, BL.ByteString) -> BB.Builder
-encodeJSONObject xs
- | V.null xs = BB.char7 '{' <> BB.char7 '}'
- | otherwise = BB.char7 '{' <> builder' (V.unsafeHead xs) <>
- V.foldr go (BB.char7 '}') (V.unsafeTail xs)
- where
- go v b = BB.char7 ',' <> builder' v <> b
- -- builds "key":value from (key,value)
- builder' (t, v) =
- BB.char7 '"' <> TE.encodeUtf8Builder t <> BB.string7 "\":"
- <> BB.lazyByteString v
-
-encodeGQResp :: GQResp -> BL.ByteString
-encodeGQResp gqResp =
- buildBS $ case gqResp of
- GQSuccess r -> V.singleton ("data", r)
- GQPreExecError e -> V.singleton ("errors", J.encode e)
- GQExecError e -> V.fromList [("data", "null"), ("errors", J.encode e)]
- where
- buildBS = BB.toLazyByteString . encodeJSONObject
-
-mkJSONObj :: V.Vector (Text, BL.ByteString) -> BL.ByteString
-mkJSONObj = BB.toLazyByteString . encodeJSONObject
diff --git a/server/src-lib/Hasura/GraphQL/Resolve.hs b/server/src-lib/Hasura/GraphQL/Resolve.hs
new file mode 100644
index 0000000000000..f71b08fe56c57
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Resolve.hs
@@ -0,0 +1,89 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Resolve
+ ( resolveSelSet
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.Aeson as J
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.HashMap.Strict as Map
+import qualified Database.PG.Query as Q
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.Resolve.Context
+import Hasura.GraphQL.Resolve.Introspect
+import Hasura.GraphQL.Schema
+import Hasura.GraphQL.Transport.HTTP.Protocol
+import Hasura.GraphQL.Validate.Field
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Hasura.GraphQL.Resolve.Mutation as RM
+import qualified Hasura.GraphQL.Resolve.Select as RS
+
+-- {-# SCC buildTx #-}
+buildTx :: UserInfo -> GCtx -> Field -> Q.TxE QErr BL.ByteString
+buildTx userInfo gCtx fld = do
+ opCxt <- getOpCtx $ _fName fld
+ join $ fmap fst $ runConvert (fldMap, orderByCtx) $ case opCxt of
+
+ OCSelect tn permFilter hdrs ->
+ validateHdrs hdrs >> RS.convertSelect tn permFilter fld
+ -- RS.convertSelect tn permFilter fld
+ OCInsert tn vn cols hdrs ->
+ validateHdrs hdrs >> RM.convertInsert (tn, vn) cols fld
+ -- RM.convertInsert (tn, vn) cols fld
+ OCUpdate tn permFilter hdrs ->
+ validateHdrs hdrs >> RM.convertUpdate tn permFilter fld
+ -- RM.convertUpdate tn permFilter fld
+ OCDelete tn permFilter hdrs ->
+ validateHdrs hdrs >> RM.convertDelete tn permFilter fld
+ -- RM.convertDelete tn permFilter fld
+ where
+ opCtxMap = _gOpCtxMap gCtx
+ fldMap = _gFields gCtx
+ orderByCtx = _gOrdByEnums gCtx
+
+ getOpCtx f =
+ onNothing (Map.lookup f opCtxMap) $ throw500 $
+ "lookup failed: opctx: " <> showName f
+
+ validateHdrs hdrs = do
+ let receivedHdrs = userHeaders userInfo
+ forM_ hdrs $ \hdr ->
+ unless (Map.member hdr receivedHdrs) $
+ throw400 NotFound $ hdr <<> " header is expected but not found"
+
+-- {-# SCC resolveFld #-}
+resolveFld
+ :: UserInfo -> GCtx
+ -> G.OperationType
+ -> Field
+ -> Q.TxE QErr BL.ByteString
+resolveFld userInfo gCtx opTy fld =
+ case _fName fld of
+ "__type" -> J.encode <$> runReaderT (typeR fld) gCtx
+ "__schema" -> J.encode <$> runReaderT (schemaR fld) gCtx
+ "__typename" -> return $ J.encode $ mkRootTypeName opTy
+ _ -> buildTx userInfo gCtx fld
+ where
+ mkRootTypeName :: G.OperationType -> Text
+ mkRootTypeName = \case
+ G.OperationTypeQuery -> "query_root"
+ G.OperationTypeMutation -> "mutation_root"
+ G.OperationTypeSubscription -> "subscription_root"
+
+resolveSelSet
+ :: UserInfo -> GCtx
+ -> G.OperationType
+ -> SelSet
+ -> Q.TxE QErr BL.ByteString
+resolveSelSet userInfo gCtx opTy fields =
+ fmap mkJSONObj $ forM (toList fields) $ \fld -> do
+ fldResp <- resolveFld userInfo gCtx opTy fld
+ return (G.unName $ G.unAlias $ _fAlias fld, fldResp)
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs b/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs
index c2f9feefa52a0..1cae7ef246797 100644
--- a/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs
@@ -280,6 +280,7 @@ schemaR fld =
sortBy (comparing getNamedTy) $ Map.elems tyMap
"queryType" -> J.toJSON <$> namedTypeR (G.NamedType "query_root") subFld
"mutationType" -> typeR' "mutation_root" subFld
+ "subscriptionType" -> typeR' "subscription_root" subFld
"directives" -> J.toJSON <$> mapM (directiveR subFld)
(sortBy (comparing _diName) defaultDirectives)
_ -> return J.Null
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/LiveQuery.hs b/server/src-lib/Hasura/GraphQL/Resolve/LiveQuery.hs
new file mode 100644
index 0000000000000..a0c9d17cea42a
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Resolve/LiveQuery.hs
@@ -0,0 +1,170 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE FlexibleContexts #-}
+
+module Hasura.GraphQL.Resolve.LiveQuery
+ ( LiveQuery(..)
+ , LiveQueryMap
+ , newLiveQueryMap
+ , addLiveQuery
+ , removeLiveQuery
+ ) where
+
+import qualified Control.Concurrent.Async as A
+import qualified Control.Concurrent.STM as STM
+import qualified Data.ByteString.Lazy as BL
+import qualified ListT
+import qualified STMContainers.Map as STMMap
+
+import Control.Concurrent (threadDelay)
+
+import Hasura.GraphQL.Resolve.Context (RespTx)
+import Hasura.GraphQL.Transport.HTTP.Protocol
+import Hasura.Prelude
+import Hasura.RQL.Types
+
+data LiveQuery
+ = LiveQuery
+ { _lqUser :: !UserInfo
+ , _lqRequest :: !GraphQLRequest
+ } deriving (Show, Eq, Generic)
+
+instance Hashable LiveQuery
+
+type OnChange k = GQResp -> IO ()
+
+data LQHandler k
+ = LQHandler
+ -- the tx to be executed
+ { _lqhRespTx :: !RespTx
+ -- previous result
+ , _lqhPrevRes :: !(STM.TVar (Maybe GQResp))
+ -- the actions that have been run previously
+ -- we run these if the response changes
+ , _lqhCurOps :: !(STMMap.Map k (OnChange k))
+ -- we run these operations regardless
+ -- and then merge them with current operations
+ , _lqhNewOps :: !(STMMap.Map k (OnChange k))
+ }
+
+type ThreadTM = STM.TMVar (A.Async ())
+type LiveQueryMap k = STMMap.Map LiveQuery (LQHandler k, ThreadTM)
+
+newLiveQueryMap :: STM.STM (LiveQueryMap k)
+newLiveQueryMap = STMMap.new
+
+type TxRunner = RespTx -> IO (Either QErr BL.ByteString)
+
+removeLiveQuery
+ :: (Eq k, Hashable k)
+ => LiveQueryMap k
+ -- the query and the associated operation
+ -> LiveQuery
+ -> k
+ -> IO ()
+removeLiveQuery lqMap liveQ k = do
+
+ -- clean the handler's state
+ threadRefM <- STM.atomically $ do
+ lqHandlerM <- STMMap.lookup liveQ lqMap
+ maybe (return Nothing) cleanLQHandler lqHandlerM
+
+ -- cancel the polling thread
+ onJust threadRefM A.cancel
+
+ where
+ cleanLQHandler (handler, threadRef) = do
+ let curOps = _lqhCurOps handler
+ newOps = _lqhNewOps handler
+ STMMap.delete k curOps
+ STMMap.delete k newOps
+ cancelPollThread <- (&&)
+ <$> STMMap.null curOps
+ <*> STMMap.null newOps
+ -- if this happens to be the last operation, take the
+ -- ref for the polling thread to cancel it
+ if cancelPollThread then do
+ STMMap.delete liveQ lqMap
+ Just <$> STM.takeTMVar threadRef
+ else return Nothing
+
+onJust :: (Monad m) => Maybe a -> (a -> m ()) -> m ()
+onJust m action = maybe (return ()) action m
+
+addLiveQuery
+ :: (Eq k, Hashable k)
+ => TxRunner
+ -> LiveQueryMap k
+ -- the query
+ -> LiveQuery
+ -- the transaction associated with this query
+ -> RespTx
+ -- a unique operation id
+ -> k
+ -- the action to be executed when result changes
+ -> OnChange k
+ -> IO ()
+addLiveQuery txRunner lqMap liveQ respTx k onResultAction= do
+
+ -- a handler is returned only when it is newly created
+ handlerM <- STM.atomically $ do
+ lqHandlerM <- STMMap.lookup liveQ lqMap
+ maybe newHandler addToExistingHandler lqHandlerM
+
+ -- we can then attach a polling thread if it is new
+ -- the livequery can only be cancelled after putTMVar
+ onJust handlerM $ \(handler, pollerThreadTM) -> do
+ threadRef <- A.async $ forever $ do
+ pollQuery txRunner handler
+ threadDelay $ 1 * 1000 * 1000
+ STM.atomically $ STM.putTMVar pollerThreadTM threadRef
+
+ where
+
+ addToExistingHandler (handler, _) = do
+ STMMap.insert onResultAction k $ _lqhNewOps handler
+ return Nothing
+
+ newHandler = do
+ handler <- LQHandler
+ <$> return respTx
+ <*> STM.newTVar Nothing
+ <*> STMMap.new
+ <*> STMMap.new
+ STMMap.insert onResultAction k $ _lqhNewOps handler
+ asyncRefTM <- STM.newEmptyTMVar
+ STMMap.insert (handler, asyncRefTM) liveQ lqMap
+ return $ Just (handler, asyncRefTM)
+
+pollQuery
+ :: (Eq k, Hashable k)
+ => TxRunner
+ -> LQHandler k
+ -> IO ()
+pollQuery runTx (LQHandler respTx respTV curOpsTV newOpsTV) = do
+
+ res <- runTx respTx
+
+ let resp = case res of
+ Left e -> GQExecError [encodeGQErr False e]
+ Right bs -> GQSuccess bs
+
+ -- extract the current and new operations
+ (curOps, newOps) <- STM.atomically $ do
+ curOpsL <- ListT.toList $ STMMap.stream curOpsTV
+ newOpsL <- ListT.toList $ STMMap.stream newOpsTV
+ forM_ newOpsL $ \(k, action) -> STMMap.insert action k curOpsTV
+ STMMap.deleteAll newOpsTV
+ return (curOpsL, newOpsL)
+
+ runOperations resp newOps
+
+ -- write to the current websockets if needed
+ prevRespM <- STM.readTVarIO respTV
+ when (isExecError resp || Just resp /= prevRespM) $ do
+ runOperations resp curOps
+ STM.atomically $ STM.writeTVar respTV $ Just resp
+
+ where
+ runOperation resp action = action resp
+ runOperations resp =
+ void . A.mapConcurrently (runOperation resp . snd)
diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs
index 993dd17f8aa29..a50c2e4c1feec 100644
--- a/server/src-lib/Hasura/GraphQL/Schema.hs
+++ b/server/src-lib/Hasura/GraphQL/Schema.hs
@@ -39,7 +39,6 @@ import qualified Hasura.SQL.DML as S
defaultTypes :: [TypeInfo]
defaultTypes = $(fromSchemaDocQ defaultSchema)
--- defaultTypes = undefined
type OpCtxMap = Map.HashMap G.Name OpCtx
@@ -61,6 +60,7 @@ data GCtx
, _gOrdByEnums :: !OrdByResolveCtx
, _gQueryRoot :: !ObjTyInfo
, _gMutRoot :: !(Maybe ObjTyInfo)
+ , _gSubRoot :: !(Maybe ObjTyInfo)
, _gOpCtxMap :: !OpCtxMap
} deriving (Show, Eq)
@@ -818,9 +818,14 @@ mkGCtx (TyAgg tyInfos fldInfos ordByEnums) (RootFlds flds) =
scalarTys = map (TIScalar . mkScalarTyInfo) colTys
compTys = map (TIInpObj . mkCompExpInp) colTys
allTys = Map.union tyInfos $ mkTyInfoMap $
- catMaybes [Just $ TIObj queryRoot, TIObj <$> mutRootM] <>
+ catMaybes [ Just $ TIObj queryRoot
+ , TIObj <$> mutRootM
+ , TIObj <$> subRootM
+ ] <>
scalarTys <> compTys <> defaultTypes
- in GCtx allTys fldInfos ordByEnums queryRoot mutRootM $ Map.map fst flds
+ -- for now subscription root is query root
+ in GCtx allTys fldInfos ordByEnums queryRoot mutRootM (Just queryRoot) $
+ Map.map fst flds
where
mkMutRoot =
@@ -829,6 +834,12 @@ mkGCtx (TyAgg tyInfos fldInfos ordByEnums) (RootFlds flds) =
mutRootM = bool (Just $ mkMutRoot mFlds) Nothing $ null mFlds
+ mkSubRoot =
+ mkObjTyInfo (Just "subscription root") (G.NamedType "subscription_root") .
+ mapFromL _fiName
+
+ subRootM = bool (Just $ mkSubRoot qFlds) Nothing $ null qFlds
+
(qFlds, mFlds) = partitionEithers $ map snd $ Map.elems flds
schemaFld = ObjFldInfo Nothing "__schema" Map.empty $ G.toGT $
diff --git a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs
new file mode 100644
index 0000000000000..0c8bd6055fdb5
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs
@@ -0,0 +1,39 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Transport.HTTP
+ ( runGQ
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.ByteString.Lazy as BL
+import qualified Database.PG.Query as Q
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.Schema
+import Hasura.GraphQL.Transport.HTTP.Protocol
+import Hasura.RQL.Types
+
+import qualified Hasura.GraphQL.Resolve as R
+import qualified Hasura.GraphQL.Validate as VQ
+import qualified Hasura.Server.Query as RQ
+
+runGQ
+ :: (MonadIO m, MonadError QErr m)
+ => Q.PGPool -> Q.TxIsolation
+ -> UserInfo -> GCtxMap
+ -> GraphQLRequest
+ -> m BL.ByteString
+runGQ pool isoL userInfo gCtxMap req = do
+ (opTy, fields) <- runReaderT (VQ.validateGQ req) gCtx
+ when (opTy == G.OperationTypeSubscription) $ throw400 UnexpectedPayload
+ "subscriptions are not supported over HTTP, use websockets instead"
+ resp <- runTx $ R.resolveSelSet userInfo gCtx opTy fields
+ return $ encodeGQResp $ GQSuccess resp
+ where
+ gCtx = getGCtx (userRole userInfo) gCtxMap
+ runTx tx =
+ Q.runTx pool (isoL, Nothing) $
+ RQ.setHeadersTx userInfo >> tx
diff --git a/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs b/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs
new file mode 100644
index 0000000000000..c7df6a2dc8820
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs
@@ -0,0 +1,107 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.GraphQL.Transport.HTTP.Protocol
+ ( GraphQLRequest(..)
+ , GraphQLQuery(..)
+ , OperationName(..)
+ , VariableValues
+ , encodeGQErr
+ , encodeJSONObject
+ , encodeGQResp
+ , mkJSONObj
+ , GQResp(..)
+ , isExecError
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.ByteString.Builder as BB
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.HashMap.Strict as Map
+import qualified Data.Text.Encoding as TE
+import qualified Data.Vector as V
+import qualified Language.GraphQL.Draft.Parser as G
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.RQL.Types
+
+newtype GraphQLQuery
+ = GraphQLQuery { unGraphQLQuery :: [G.ExecutableDefinition] }
+ deriving (Show, Eq, Hashable)
+
+instance J.FromJSON GraphQLQuery where
+ parseJSON = J.withText "GraphQLQuery" $ \t ->
+ case G.parseExecutableDoc t of
+ Left _ -> fail "parsing the graphql query failed"
+ Right q -> return $ GraphQLQuery $ G.getExecutableDefinitions q
+
+instance J.ToJSON GraphQLQuery where
+ -- TODO, add pretty printer in graphql-parser
+ toJSON _ = J.String "toJSON not implemented for GraphQLQuery"
+
+newtype OperationName
+ = OperationName { _unOperationName :: G.Name }
+ deriving (Show, Eq, Hashable, J.ToJSON)
+
+instance J.FromJSON OperationName where
+ parseJSON v = OperationName . G.Name <$> J.parseJSON v
+
+type VariableValues = Map.HashMap G.Variable J.Value
+
+data GraphQLRequest
+ = GraphQLRequest
+ { _grOperationName :: !(Maybe OperationName)
+ , _grQuery :: !GraphQLQuery
+ , _grVariables :: !(Maybe VariableValues)
+ } deriving (Show, Eq, Generic)
+
+$(J.deriveJSON (J.aesonDrop 3 J.camelCase){J.omitNothingFields=True}
+ ''GraphQLRequest
+ )
+
+instance Hashable GraphQLRequest
+
+encodeGQErr :: Bool -> QErr -> J.Value
+encodeGQErr includeInternal qErr =
+ J.object [ "errors" J..= [encodeQErr includeInternal qErr]]
+
+data GQResp
+ = GQSuccess BL.ByteString
+ | GQPreExecError [J.Value]
+ | GQExecError [J.Value]
+ deriving (Show, Eq)
+
+isExecError :: GQResp -> Bool
+isExecError = \case
+ GQExecError _ -> True
+ _ -> False
+
+encodeJSONObject :: V.Vector (Text, BL.ByteString) -> BB.Builder
+encodeJSONObject xs
+ | V.null xs = BB.char7 '{' <> BB.char7 '}'
+ | otherwise = BB.char7 '{' <> builder' (V.unsafeHead xs) <>
+ V.foldr go (BB.char7 '}') (V.unsafeTail xs)
+ where
+ go v b = BB.char7 ',' <> builder' v <> b
+ -- builds "key":value from (key,value)
+ builder' (t, v) =
+ BB.char7 '"' <> TE.encodeUtf8Builder t <> BB.string7 "\":"
+ <> BB.lazyByteString v
+
+encodeGQResp :: GQResp -> BL.ByteString
+encodeGQResp gqResp =
+ mkJSONObj $ case gqResp of
+ GQSuccess r -> [("data", r)]
+ GQPreExecError e -> [("errors", J.encode e)]
+ GQExecError e -> [("data", "null"), ("errors", J.encode e)]
+
+mkJSONObj :: [(Text, BL.ByteString)] -> BL.ByteString
+mkJSONObj = BB.toLazyByteString . encodeJSONObject . V.fromList
diff --git a/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs b/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs
new file mode 100644
index 0000000000000..492eb60023880
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs
@@ -0,0 +1,307 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RankNTypes #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.GraphQL.Transport.WebSocket
+ ( createWSServerApp
+ , createWSServerEnv
+ ) where
+
+import qualified Control.Concurrent.Async as A
+import qualified Control.Concurrent.STM as STM
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.CaseInsensitive as CI
+import qualified Data.HashMap.Strict as Map
+import qualified Data.TByteString as TBS
+import qualified Data.Text as T
+import qualified Data.Text.Encoding as TE
+import qualified Language.GraphQL.Draft.Syntax as G
+import qualified ListT
+import qualified Network.HTTP.Client as H
+import qualified Network.HTTP.Types.Status as H
+import qualified Network.WebSockets as WS
+import qualified STMContainers.Map as STMMap
+
+import Control.Concurrent (threadDelay)
+import qualified Data.IORef as IORef
+
+import Hasura.GraphQL.Resolve (resolveSelSet)
+import Hasura.GraphQL.Resolve.Context (RespTx)
+import qualified Hasura.GraphQL.Resolve.LiveQuery as LQ
+import Hasura.GraphQL.Schema (GCtxMap, getGCtx)
+import Hasura.GraphQL.Transport.HTTP.Protocol
+import Hasura.GraphQL.Transport.WebSocket.Protocol
+import qualified Hasura.GraphQL.Transport.WebSocket.Server as WS
+import Hasura.GraphQL.Validate (validateGQ)
+import qualified Hasura.Logging as L
+import Hasura.Prelude
+import Hasura.RQL.Types
+import Hasura.Server.Auth (AuthMode,
+ getUserInfo)
+
+-- uniquely identifies an operation
+type GOperationId = (WS.WSId, OperationId)
+
+type TxRunner = RespTx -> IO (Either QErr BL.ByteString)
+
+type OperationMap
+ = STMMap.Map OperationId LQ.LiveQuery
+
+data WSConnData
+ = WSConnData
+ -- the role and headers are set only on connection_init message
+ { _wscUser :: !(IORef.IORef (Maybe UserInfo))
+ -- we only care about subscriptions,
+ -- the other operations (query/mutations)
+ -- are not tracked here
+ , _wscOpMap :: !OperationMap
+ }
+
+type LiveQueryMap = LQ.LiveQueryMap GOperationId
+type WSServer = WS.WSServer WSConnData
+
+type WSConn = WS.WSConn WSConnData
+sendMsg :: (MonadIO m) => WSConn -> ServerMsg -> m ()
+sendMsg wsConn =
+ liftIO . WS.sendMsg wsConn . encodeServerMsg
+
+data SubsDetail
+ = SDStarted
+ | SDStopped
+ deriving (Show, Eq)
+$(J.deriveToJSON
+ J.defaultOptions { J.constructorTagModifier = J.snakeCase . drop 2
+ , J.sumEncoding = J.TaggedObject "type" "detail"
+ }
+ ''SubsDetail)
+
+data OpDetail
+ = ODCompleted
+ | ODError !QErr
+ deriving (Show, Eq)
+$(J.deriveToJSON
+ J.defaultOptions { J.constructorTagModifier = J.snakeCase . drop 2
+ , J.sumEncoding = J.TaggedObject "type" "detail"
+ }
+ ''OpDetail)
+
+data WSEvent
+ = EAccepted
+ | ERejected !QErr
+ | EProtocolError !TBS.TByteString !ConnErrMsg
+ | EOperation !OperationId !OpDetail
+ | ESubscription !OperationId !SubsDetail
+ | EClosed
+ deriving (Show, Eq)
+$(J.deriveToJSON
+ J.defaultOptions { J.constructorTagModifier = J.snakeCase . drop 1
+ , J.sumEncoding = J.TaggedObject "type" "detail"
+ }
+ ''WSEvent)
+
+data WSLog
+ = WSLog
+ { _wslWebsocketId :: !WS.WSId
+ , _wslEvent :: !WSEvent
+ } deriving (Show, Eq)
+$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''WSLog)
+
+instance L.ToEngineLog WSLog where
+ toEngineLog wsLog =
+ (L.LevelInfo, "ws-handler", J.toJSON wsLog)
+
+data WSServerEnv
+ = WSServerEnv
+ { _wseLogger :: !L.Logger
+ , _wseServer :: !WSServer
+ , _wseRunTx :: !TxRunner
+ , _wseLiveQMap :: !LiveQueryMap
+ , _wseGCtxMap :: !(IORef.IORef (SchemaCache, GCtxMap))
+ , _wseHManager :: !H.Manager
+ }
+
+onConn :: L.Logger -> WS.OnConnH WSConnData
+onConn logger wsId requestHead = do
+ res <- runExceptT checkPath
+ either reject accept res
+ where
+
+ keepAliveAction wsConn = forever $ do
+ sendMsg wsConn SMConnKeepAlive
+ threadDelay $ 5 * 1000 * 1000
+
+ accept _ = do
+ logger $ WSLog wsId EAccepted
+ connData <- WSConnData <$> IORef.newIORef Nothing <*> STMMap.newIO
+ let acceptRequest = WS.defaultAcceptRequest
+ { WS.acceptSubprotocol = Just "graphql-ws"}
+ return $ Right (connData, acceptRequest, Just keepAliveAction)
+
+ reject qErr = do
+ logger $ WSLog wsId $ ERejected qErr
+ return $ Left $ WS.RejectRequest
+ (H.statusCode $ qeStatus qErr)
+ (H.statusMessage $ qeStatus qErr) []
+ (BL.toStrict $ J.encode $ encodeQErr False qErr)
+
+ checkPath =
+ when (WS.requestPath requestHead /= "/v1alpha1/graphql") $
+ throw404 "only /v1alpha1/graphql is supported on websockets"
+
+onStart :: WSServerEnv -> WSConn -> StartMsg -> IO ()
+onStart serverEnv wsConn msg@(StartMsg opId q) = catchAndSend $ do
+
+ opM <- liftIO $ STM.atomically $ STMMap.lookup opId opMap
+
+ when (isJust opM) $ withExceptT preExecErr $ loggingQErr $
+ throw400 UnexpectedPayload $
+ "an operation already exists with this id: " <> unOperationId opId
+
+ userInfoM <- liftIO $ IORef.readIORef userInfoR
+ userInfo <- case userInfoM of
+ Just userInfo -> return userInfo
+ Nothing -> do
+ let err = "start received before the connection is initialised"
+ liftIO $ logger $ WSLog wsId $
+ -- TODO: we are encoding the start msg back into a bytestring
+ -- should we be throwing protocol error here?
+ EProtocolError (TBS.fromLBS $ J.encode msg) err
+ throwError $ SMConnErr err
+
+ -- validate and build tx
+ gCtxMap <- fmap snd $ liftIO $ IORef.readIORef gCtxMapRef
+ let gCtx = getGCtx (userRole userInfo) gCtxMap
+ (opTy, fields) <- withExceptT preExecErr $ loggingQErr $
+ runReaderT (validateGQ q) gCtx
+ let qTx = resolveSelSet userInfo gCtx opTy fields
+
+ case opTy of
+ G.OperationTypeSubscription -> do
+ let lq = LQ.LiveQuery userInfo q
+ liftIO $ STM.atomically $ STMMap.insert lq opId opMap
+ liftIO $ LQ.addLiveQuery runTx lqMap lq
+ qTx (wsId, opId) liveQOnChange
+ liftIO $ logger $ WSLog wsId $ ESubscription opId SDStarted
+
+ _ -> withExceptT postExecErr $ loggingQErr $ do
+ resp <- ExceptT $ runTx qTx
+ sendMsg wsConn $ SMData $ DataMsg opId $ GQSuccess resp
+ sendMsg wsConn $ SMComplete $ CompletionMsg opId
+ liftIO $ logger $ WSLog wsId $ EOperation opId ODCompleted
+
+ where
+ (WSServerEnv logger _ runTx lqMap gCtxMapRef _) = serverEnv
+ wsId = WS.getWSId wsConn
+ (WSConnData userInfoR opMap) = WS.getData wsConn
+
+ -- on change, send message on the websocket
+ liveQOnChange resp = WS.sendMsg wsConn $ encodeServerMsg $ SMData $
+ DataMsg opId resp
+
+ loggingQErr m = catchError m $ \qErr -> do
+ liftIO $ logger $ WSLog wsId $ EOperation opId $ ODError qErr
+ throwError qErr
+
+ preExecErr qErr = SMErr $ ErrorMsg opId $ encodeQErr False qErr
+ postExecErr qErr = SMData $ DataMsg opId $ GQExecError
+ [encodeQErr False qErr]
+
+ catchAndSend :: ExceptT ServerMsg IO () -> IO ()
+ catchAndSend m = do
+ res <- runExceptT m
+ either (sendMsg wsConn) return res
+
+onMessage
+ :: AuthMode
+ -> WSServerEnv
+ -> WSConn -> BL.ByteString -> IO ()
+onMessage authMode serverEnv wsConn msgRaw =
+ case J.eitherDecode msgRaw of
+ Left e -> do
+ let err = ConnErrMsg $ "parsing ClientMessage failed: " <> T.pack e
+ liftIO $ logger $ WSLog (WS.getWSId wsConn) $
+ EProtocolError (TBS.fromLBS msgRaw) err
+ sendMsg wsConn $ SMConnErr err
+
+ Right msg -> case msg of
+ CMConnInit params -> onConnInit (_wseHManager serverEnv)
+ wsConn authMode params
+ CMStart startMsg -> onStart serverEnv wsConn startMsg
+ CMStop stopMsg -> onStop serverEnv wsConn stopMsg
+ CMConnTerm -> WS.closeConn wsConn "GQL_CONNECTION_TERMINATE received"
+ where
+ logger = _wseLogger serverEnv
+
+onStop :: WSServerEnv -> WSConn -> StopMsg -> IO ()
+onStop serverEnv wsConn (StopMsg opId) = do
+ -- probably wrap the whole thing in a single tx?
+ opM <- liftIO $ STM.atomically $ STMMap.lookup opId opMap
+ case opM of
+ Just liveQ -> do
+ liftIO $ logger $ WSLog wsId $ ESubscription opId SDStopped
+ LQ.removeLiveQuery lqMap liveQ (wsId, opId)
+ Nothing -> return ()
+ STM.atomically $ STMMap.delete opId opMap
+ where
+ logger = _wseLogger serverEnv
+ lqMap = _wseLiveQMap serverEnv
+ wsId = WS.getWSId wsConn
+ opMap = _wscOpMap $ WS.getData wsConn
+
+onConnInit
+ :: (MonadIO m)
+ => H.Manager -> WSConn -> AuthMode -> ConnParams -> m ()
+onConnInit manager wsConn authMode connParams = do
+ res <- runExceptT $ getUserInfo manager headers authMode
+ case res of
+ Left e ->
+ liftIO $ WS.closeConn wsConn $
+ BL.fromStrict $ TE.encodeUtf8 $ qeError e
+ Right userInfo -> do
+ liftIO $ IORef.writeIORef (_wscUser $ WS.getData wsConn) $ Just userInfo
+ sendMsg wsConn SMConnAck
+ -- TODO: send it periodically? Why doesn't apollo's protocol use
+ -- ping/pong frames of websocket spec?
+ sendMsg wsConn SMConnKeepAlive
+ where
+ headers = [ (CI.mk $ TE.encodeUtf8 h, TE.encodeUtf8 v)
+ | (h, v) <- maybe [] Map.toList $ _cpHeaders connParams
+ ]
+
+onClose
+ :: L.Logger
+ -> LiveQueryMap
+ -> WS.ConnectionException
+ -> WSConn
+ -> IO ()
+onClose logger lqMap _ wsConn = do
+ logger $ WSLog wsId EClosed
+ operations <- STM.atomically $ ListT.toList $ STMMap.stream opMap
+ void $ A.forConcurrently operations $ \(opId, liveQ) ->
+ LQ.removeLiveQuery lqMap liveQ (wsId, opId)
+ where
+ wsId = WS.getWSId wsConn
+ opMap = _wscOpMap $ WS.getData wsConn
+
+createWSServerEnv
+ :: L.Logger
+ -> H.Manager -> IORef.IORef (SchemaCache, GCtxMap)
+ -> TxRunner -> IO WSServerEnv
+createWSServerEnv logger httpManager gCtxMapRef runTx = do
+ (wsServer, lqMap) <-
+ STM.atomically $ (,) <$> WS.createWSServer logger <*> LQ.newLiveQueryMap
+ return $ WSServerEnv logger wsServer runTx lqMap gCtxMapRef httpManager
+
+createWSServerApp :: AuthMode -> WSServerEnv -> WS.ServerApp
+createWSServerApp authMode serverEnv =
+ WS.createServerApp (_wseServer serverEnv) handlers
+ where
+ handlers =
+ WS.WSHandlers
+ (onConn $ _wseLogger serverEnv)
+ (onMessage authMode serverEnv)
+ (onClose (_wseLogger serverEnv) $ _wseLiveQMap serverEnv)
diff --git a/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Protocol.hs b/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Protocol.hs
new file mode 100644
index 0000000000000..0274d65162e7e
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Protocol.hs
@@ -0,0 +1,154 @@
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.GraphQL.Transport.WebSocket.Protocol
+ ( OperationId(..)
+ , ConnParams(..)
+ , StartMsg(..)
+ , StopMsg(..)
+ , ClientMsg(..)
+ , ServerMsg(..)
+ , encodeServerMsg
+ , DataMsg(..)
+ , ErrorMsg(..)
+ , ConnErrMsg(..)
+ , CompletionMsg(..)
+ ) where
+
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.HashMap.Strict as Map
+
+import Hasura.GraphQL.Transport.HTTP.Protocol
+import Hasura.Prelude
+
+newtype OperationId
+ = OperationId { unOperationId :: Text }
+ deriving (Show, Eq, J.ToJSON, J.FromJSON, Hashable)
+
+data StartMsg
+ = StartMsg
+ { _smId :: !OperationId
+ , _smPayload :: !GraphQLRequest
+ } deriving (Show, Eq)
+$(J.deriveJSON (J.aesonDrop 3 J.snakeCase) ''StartMsg)
+
+data StopMsg
+ = StopMsg
+ { _stId :: OperationId
+ } deriving (Show, Eq)
+$(J.deriveJSON (J.aesonDrop 3 J.snakeCase) ''StopMsg)
+
+data ClientMsg
+ = CMConnInit !ConnParams
+ | CMStart !StartMsg
+ | CMStop !StopMsg
+ | CMConnTerm
+ deriving (Show, Eq)
+
+data ConnParams
+ = ConnParams
+ { _cpHeaders :: Maybe (Map.HashMap Text Text)
+ } deriving (Show, Eq)
+$(J.deriveJSON (J.aesonDrop 3 J.snakeCase) ''ConnParams)
+
+instance J.FromJSON ClientMsg where
+ parseJSON = J.withObject "ClientMessage" $ \obj -> do
+ t <- obj J..: "type"
+ case t of
+ "connection_init" -> CMConnInit <$> obj J..: "payload"
+ "start" -> CMStart <$> J.parseJSON (J.Object obj)
+ "stop" -> CMStop <$> J.parseJSON (J.Object obj)
+ "connection_terminate" -> return CMConnTerm
+ _ -> fail $ "unexpected type for ClientMessage: " <> t
+
+-- server to client messages
+
+data DataMsg
+ = DataMsg
+ { _dmId :: !OperationId
+ , _dmPayload :: !GQResp
+ } deriving (Show, Eq)
+
+data ErrorMsg
+ = ErrorMsg
+ { _emId :: !OperationId
+ , _emPayload :: !J.Value
+ } deriving (Show, Eq)
+
+newtype CompletionMsg
+ = CompletionMsg { unCompletionMsg :: OperationId }
+ deriving (Show, Eq)
+
+newtype ConnErrMsg
+ = ConnErrMsg { unConnErrMsg :: Text }
+ deriving (Show, Eq, J.ToJSON, J.FromJSON, IsString)
+
+data ServerMsg
+ = SMConnAck
+ | SMConnKeepAlive
+ | SMConnErr !ConnErrMsg
+ | SMData !DataMsg
+ | SMErr !ErrorMsg
+ | SMComplete !CompletionMsg
+ deriving (Show, Eq)
+
+data ServerMsgType
+ = SMT_GQL_CONNECTION_ACK
+ | SMT_GQL_CONNECTION_KEEP_ALIVE
+ | SMT_GQL_CONNECTION_ERROR
+ | SMT_GQL_DATA
+ | SMT_GQL_ERROR
+ | SMT_GQL_COMPLETE
+ deriving (Eq)
+
+instance Show ServerMsgType where
+ show = \case
+ SMT_GQL_CONNECTION_ACK -> "connection_ack"
+ SMT_GQL_CONNECTION_KEEP_ALIVE -> "ka"
+ SMT_GQL_CONNECTION_ERROR -> "connection_error"
+ SMT_GQL_DATA -> "data"
+ SMT_GQL_ERROR -> "error"
+ SMT_GQL_COMPLETE -> "complete"
+
+instance J.ToJSON ServerMsgType where
+ toJSON = J.toJSON . show
+
+encodeServerMsg :: ServerMsg -> BL.ByteString
+encodeServerMsg msg =
+ mkJSONObj $ case msg of
+
+ SMConnAck ->
+ [encTy SMT_GQL_CONNECTION_ACK]
+
+ SMConnKeepAlive ->
+ [encTy SMT_GQL_CONNECTION_KEEP_ALIVE]
+
+ SMConnErr connErr ->
+ [ encTy SMT_GQL_CONNECTION_ERROR
+ , ("payload", J.encode connErr)
+ ]
+
+ SMData (DataMsg opId payload) ->
+ [ encTy SMT_GQL_DATA
+ , ("id", J.encode opId)
+ , ("payload", encodeGQResp payload)
+ ]
+
+ SMErr (ErrorMsg opId payload) ->
+ [ encTy SMT_GQL_ERROR
+ , ("id", J.encode opId)
+ , ("payload", J.encode payload)
+ ]
+
+ SMComplete compMsg ->
+ [ encTy SMT_GQL_COMPLETE
+ , ("id", J.encode $ unCompletionMsg compMsg)
+ ]
+
+ where
+ encTy ty = ("type", J.encode ty)
diff --git a/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Server.hs b/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Server.hs
new file mode 100644
index 0000000000000..79f9a77903719
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Server.hs
@@ -0,0 +1,190 @@
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RankNTypes #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.GraphQL.Transport.WebSocket.Server
+ ( WSId(..)
+
+ , WSConn
+ , getData
+ , getWSId
+ , closeConn
+ , sendMsg
+
+ , OnConnH
+ , OnCloseH
+ , OnMessageH
+ , WSHandlers(..)
+
+ , WSServer
+ , createWSServer
+ , closeAll
+ , createServerApp
+ ) where
+
+import qualified Control.Concurrent.Async as A
+import qualified Control.Concurrent.STM as STM
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.TByteString as TBS
+import qualified Data.UUID as UUID
+import qualified Data.UUID.V4 as UUID
+import qualified ListT
+import qualified Network.WebSockets as WS
+import qualified STMContainers.Map as STMMap
+
+import Control.Exception (try)
+import qualified Hasura.Logging as L
+import Hasura.Prelude
+
+newtype WSId
+ = WSId { unWSId :: UUID.UUID }
+ deriving (Show, Eq, Hashable)
+
+instance J.ToJSON WSId where
+ toJSON (WSId uuid) =
+ J.toJSON $ UUID.toText uuid
+
+data WSEvent
+ = EConnectionRequest
+ | EAccepted
+ | ERejected
+ | EMessageReceived !TBS.TByteString
+ | EMessageSent !TBS.TByteString
+ | ECloseReceived
+ | ECloseSent !TBS.TByteString
+ | EClosed
+ deriving (Show, Eq)
+$(J.deriveToJSON
+ J.defaultOptions { J.constructorTagModifier = J.snakeCase . drop 1
+ , J.sumEncoding = J.TaggedObject "type" "detail"
+ }
+ ''WSEvent)
+
+data WSLog
+ = WSLog
+ { _wslWebsocketId :: !WSId
+ , _wslEvent :: !WSEvent
+ } deriving (Show, Eq)
+$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''WSLog)
+
+instance L.ToEngineLog WSLog where
+ toEngineLog wsLog =
+ (L.LevelDebug, "ws-server", J.toJSON wsLog)
+
+data WSConn a
+ = WSConn
+ { _wcConnId :: !WSId
+ , _wcLogger :: !L.Logger
+ , _wcConnRaw :: !WS.Connection
+ , _wcSendQ :: !(STM.TQueue BL.ByteString)
+ , _wcExtraData :: !a
+ }
+
+getData :: WSConn a -> a
+getData = _wcExtraData
+
+getWSId :: WSConn a -> WSId
+getWSId = _wcConnId
+
+closeConn :: WSConn a -> BL.ByteString -> IO ()
+closeConn wsConn bs = do
+ _wcLogger wsConn $ WSLog (_wcConnId wsConn) $ ECloseSent $ TBS.fromLBS bs
+ WS.sendClose (_wcConnRaw wsConn) bs
+
+-- writes to a queue instead of the raw connection
+-- so that sendMsg doesn't block
+sendMsg :: WSConn a -> BL.ByteString -> IO ()
+sendMsg wsConn msg =
+ STM.atomically $ STM.writeTQueue (_wcSendQ wsConn) msg
+
+type ConnMap a = STMMap.Map WSId (WSConn a)
+
+data WSServer a
+ = WSServer
+ { _wssLogger :: L.Logger
+ , _wssConnMap :: ConnMap a
+ }
+
+createWSServer :: L.Logger -> STM.STM (WSServer a)
+createWSServer logger = WSServer logger <$> STMMap.new
+
+closeAll :: WSServer a -> BL.ByteString -> IO ()
+closeAll (WSServer writeLog connMap) msg = do
+ writeLog $ L.debugT "closing all connections"
+ conns <- STM.atomically $ do
+ conns <- ListT.toList $ STMMap.stream connMap
+ STMMap.deleteAll connMap
+ return conns
+ void $ A.mapConcurrently (flip closeConn msg . snd) conns
+
+type AcceptWith a = (a, WS.AcceptRequest, Maybe (WSConn a -> IO ()))
+type OnConnH a = WSId -> WS.RequestHead ->
+ IO (Either WS.RejectRequest (AcceptWith a))
+type OnCloseH a = WS.ConnectionException -> WSConn a -> IO ()
+type OnMessageH a = WSConn a -> BL.ByteString -> IO ()
+
+data WSHandlers a
+ = WSHandlers
+ { _hOnConn :: OnConnH a
+ , _hOnMessage :: OnMessageH a
+ , _hOnClose :: OnCloseH a
+ }
+
+createServerApp
+ :: WSServer a
+ -- user provided handlers
+ -> WSHandlers a
+ -- aka WS.ServerApp
+ -> WS.PendingConnection -> IO ()
+createServerApp (WSServer writeLog connMap) wsHandlers pendingConn = do
+ wsId <- WSId <$> UUID.nextRandom
+ writeLog $ WSLog wsId EConnectionRequest
+ let reqHead = WS.pendingRequest pendingConn
+ onConnRes <- _hOnConn wsHandlers wsId reqHead
+ either (onReject wsId) (onAccept wsId) onConnRes
+
+ where
+ onReject wsId rejectRequest = do
+ WS.rejectRequestWith pendingConn rejectRequest
+ writeLog $ WSLog wsId ERejected
+
+ onAccept wsId (a, acceptWithParams, keepAliveM) = do
+ conn <- WS.acceptRequestWith pendingConn acceptWithParams
+ writeLog $ WSLog wsId EAccepted
+
+ sendQ <- STM.newTQueueIO
+ let wsConn = WSConn wsId writeLog conn sendQ a
+ STM.atomically $ STMMap.insert wsConn wsId connMap
+
+ rcvRef <- A.async $ forever $ do
+ msg <- WS.receiveData conn
+ writeLog $ WSLog wsId $ EMessageReceived $ TBS.fromLBS msg
+ _hOnMessage wsHandlers wsConn msg
+
+ sendRef <- A.async $ forever $ do
+ msg <- STM.atomically $ STM.readTQueue sendQ
+ WS.sendTextData conn msg
+ writeLog $ WSLog wsId $ EMessageSent $ TBS.fromLBS msg
+
+ keepAliveRefM <- forM keepAliveM $ \action -> A.async $ action wsConn
+
+ -- terminates on WS.ConnectionException
+ let waitOnRefs = maybeToList keepAliveRefM <> [rcvRef, sendRef]
+ res <- try $ A.waitAnyCancel waitOnRefs
+
+ case res of
+ Left e -> do
+ writeLog $ WSLog (_wcConnId wsConn) ECloseReceived
+ onConnClose e wsConn
+ -- this will never happen as both the threads never finish
+ Right _ -> return ()
+
+ onConnClose e wsConn = do
+ STM.atomically $ STMMap.delete (_wcConnId wsConn) connMap
+ _hOnClose wsHandlers e wsConn
+ writeLog $ WSLog (_wcConnId wsConn) EClosed
diff --git a/server/src-lib/Hasura/GraphQL/Validate.hs b/server/src-lib/Hasura/GraphQL/Validate.hs
new file mode 100644
index 0000000000000..d9bea05c44dd4
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Validate.hs
@@ -0,0 +1,141 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Validate
+ ( validateGQ
+ , GraphQLRequest
+ ) where
+
+import Data.Has
+import Hasura.Prelude
+
+import qualified Data.HashMap.Strict as Map
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.Schema
+import Hasura.GraphQL.Transport.HTTP.Protocol
+import Hasura.GraphQL.Validate.Context
+import Hasura.GraphQL.Validate.Field
+import Hasura.GraphQL.Validate.InputValue
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+
+getTypedOp
+ :: (MonadError QErr m)
+ => Maybe OperationName
+ -> [G.SelectionSet]
+ -> [G.TypedOperationDefinition]
+ -> m G.TypedOperationDefinition
+getTypedOp opNameM selSets opDefs =
+ case (opNameM, selSets, opDefs) of
+ (Just opName, [], _) -> do
+ let n = _unOperationName opName
+ opDefM = find (\opDef -> G._todName opDef == Just n) opDefs
+ onNothing opDefM $ throwVE $
+ "no such operation found in the document: " <> showName n
+ (Just _, _, _) ->
+ throwVE $ "operationName cannot be used when " <>
+ "an anonymous operation exists in the document"
+ (Nothing, [selSet], []) ->
+ return $ G.TypedOperationDefinition
+ G.OperationTypeQuery Nothing [] [] selSet
+ (Nothing, [], [opDef]) ->
+ return opDef
+ (Nothing, _, _) ->
+ throwVE $ "exactly one operation has to be present " <>
+ "in the document when operationName is not specified"
+
+-- For all the variables defined there will be a value in the final map
+-- If no default, not in variables and nullable, then null value
+getAnnVarVals
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m
+ )
+ => [G.VariableDefinition]
+ -> VariableValues
+ -> m AnnVarVals
+getAnnVarVals varDefsL inpVals = do
+
+ varDefs <- onLeft (mkMapWith G._vdVariable varDefsL) $ \dups ->
+ throwVE $ "the following variables are defined more than once: " <>
+ showVars dups
+
+ let unexpectedVars = filter (not . (`Map.member` varDefs)) $ Map.keys inpVals
+
+ unless (null unexpectedVars) $
+ throwVE $ "unexpected variables in variableValues: " <>
+ showVars unexpectedVars
+
+ forM varDefs $ \(G.VariableDefinition var ty defM) -> do
+ let baseTy = getBaseTy ty
+ baseTyInfo <- getTyInfoVE baseTy
+ -- check that the variable is defined on input types
+ when (isObjTy baseTyInfo) $ throwVE $ objTyErrMsg baseTy
+
+ let defM' = bool (defM <|> Just G.VCNull) defM $ G.isNotNull ty
+ annDefM <- withPathK "defaultValue" $
+ mapM (validateInputValue constValueParser ty) defM'
+ let inpValM = Map.lookup var inpVals
+ annInpValM <- withPathK "variableValues" $
+ mapM (validateInputValue jsonParser ty) inpValM
+ let varValM = annInpValM <|> annDefM
+ onNothing varValM $ throwVE $ "expecting a value for non-null type: "
+ <> G.showGT ty <> " in variableValues"
+ where
+ objTyErrMsg namedTy =
+ "variables can only be defined on input types"
+ <> "(enums, scalars, input objects), but "
+ <> showNamedTy namedTy <> " is an object type"
+
+ showVars :: (Functor f, Foldable f) => f G.Variable -> Text
+ showVars = showNames . fmap G.unVariable
+
+validateFrag
+ :: (MonadError QErr m, MonadReader r m, Has TypeMap r)
+ => G.FragmentDefinition -> m FragDef
+validateFrag (G.FragmentDefinition n onTy dirs selSet) = do
+ unless (null dirs) $ throwVE
+ "unexpected directives at fragment definition"
+ tyInfo <- getTyInfoVE onTy
+ objTyInfo <- onNothing (getObjTyM tyInfo) $ throwVE
+ "fragments can only be defined on object types"
+ return $ FragDef n objTyInfo selSet
+
+-- {-# SCC validateGQ #-}
+validateGQ
+ :: (MonadError QErr m, MonadReader GCtx m)
+ => GraphQLRequest
+ -> m (G.OperationType, SelSet)
+validateGQ (GraphQLRequest opNameM q varValsM) = do
+
+ -- get the operation that needs to be evaluated
+ opDef <- getTypedOp opNameM selSets opDefs
+
+ ctx <- ask
+ -- get the operation root
+ opRoot <- case G._todType opDef of
+ G.OperationTypeQuery -> return $ _gQueryRoot ctx
+ G.OperationTypeMutation ->
+ onNothing (_gMutRoot ctx) $ throwVE "no mutations exist"
+ G.OperationTypeSubscription ->
+ onNothing (_gSubRoot ctx) $ throwVE "no subscriptions exist"
+
+ -- annotate the variables of this operation
+ annVarVals <- getAnnVarVals (G._todVariableDefinitions opDef) $
+ fromMaybe Map.empty varValsM
+
+ -- annotate the fragments
+ fragDefs <- onLeft (mkMapWith G._fdName fragDefsL) $ \dups ->
+ throwVE $ "the following fragments are defined more than once: " <>
+ showNames dups
+ annFragDefs <- mapM validateFrag fragDefs
+
+ -- build a validation ctx
+ let valCtx = ValidationCtx (_gTypes ctx) annVarVals annFragDefs
+
+ selSet <- flip runReaderT valCtx $ denormSelSet [] opRoot $
+ G._todSelectionSet opDef
+ return (G._todType opDef, selSet)
+ where
+ (selSets, opDefs, fragDefsL) = G.partitionExDefs $ unGraphQLQuery q
diff --git a/server/src-lib/Hasura/Logging.hs b/server/src-lib/Hasura/Logging.hs
new file mode 100644
index 0000000000000..376db192b0c5e
--- /dev/null
+++ b/server/src-lib/Hasura/Logging.hs
@@ -0,0 +1,146 @@
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RankNTypes #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.Logging
+ ( LoggerSettings(..)
+ , defaultLoggerSettings
+ , EngineLog(..)
+ , EngineLogType
+ , ToEngineLog(..)
+ , debugT
+ , debugBS
+ , debugLBS
+ , Logger
+ , LogLevel(..)
+ , mkLogger
+ , LoggerCtx
+ , mkLoggerCtx
+ , cleanLoggerCtx
+ ) where
+
+import Hasura.Prelude
+
+import qualified Control.AutoUpdate as Auto
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.ByteString as B
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.TByteString as TBS
+import qualified Data.Text as T
+import qualified Data.Time.Clock as Time
+import qualified Data.Time.Format as Format
+import qualified Data.Time.LocalTime as Time
+import qualified System.Log.FastLogger as FL
+
+newtype FormattedTime
+ = FormattedTime { _unFormattedTime :: Text }
+ deriving (Show, Eq, J.ToJSON)
+
+newtype EngineLogType
+ = EngineLogType { _unEngineLogType :: Text }
+ deriving (Show, Eq, J.ToJSON, J.FromJSON, IsString)
+
+data LogLevel
+ = LevelDebug
+ | LevelInfo
+ | LevelWarn
+ | LevelError
+ | LevelOther Text
+ deriving (Show, Eq, Ord)
+
+instance J.ToJSON LogLevel where
+ toJSON = J.toJSON . \case
+ LevelDebug -> "debug"
+ LevelInfo -> "info"
+ LevelWarn -> "warn"
+ LevelError -> "error"
+ LevelOther t -> t
+
+data EngineLog
+ = EngineLog
+ { _elTimestamp :: !FormattedTime
+ , _elLevel :: !LogLevel
+ , _elType :: !EngineLogType
+ , _elDetail :: !J.Value
+ } deriving (Show, Eq)
+$(J.deriveToJSON (J.aesonDrop 3 J.snakeCase) ''EngineLog)
+
+
+newtype UnstructuredLog
+ = UnstructuredLog { _unUnstructuredLog :: TBS.TByteString }
+ deriving (Show, Eq)
+
+debugT :: Text -> UnstructuredLog
+debugT = UnstructuredLog . TBS.fromText
+
+debugBS :: B.ByteString -> UnstructuredLog
+debugBS = UnstructuredLog . TBS.fromBS
+
+debugLBS :: BL.ByteString -> UnstructuredLog
+debugLBS = UnstructuredLog . TBS.fromLBS
+
+instance ToEngineLog UnstructuredLog where
+ toEngineLog (UnstructuredLog t) =
+ (LevelDebug, "unstructured", J.toJSON t)
+
+class ToEngineLog a where
+ toEngineLog :: a -> (LogLevel, EngineLogType, J.Value)
+
+data LoggerCtx
+ = LoggerCtx
+ { _lcLoggerSet :: !FL.LoggerSet
+ , _lcLogLevel :: !LogLevel
+ , _lcTimeGetter :: !(IO FormattedTime)
+ }
+
+data LoggerSettings
+ = LoggerSettings
+ -- should current time be cached (refreshed every sec)
+ { _lsCachedTimestamp :: !Bool
+ , _lsTimeZone :: !(Maybe Time.TimeZone)
+ , _lsLevel :: !LogLevel
+ } deriving (Show, Eq)
+
+defaultLoggerSettings :: LoggerSettings
+defaultLoggerSettings =
+ LoggerSettings True Nothing LevelInfo
+
+getFormattedTime :: Maybe Time.TimeZone -> IO FormattedTime
+getFormattedTime tzM = do
+ tz <- maybe Time.getCurrentTimeZone return tzM
+ t <- Time.getCurrentTime
+ let zt = Time.utcToZonedTime tz t
+ return $ FormattedTime $ T.pack $ formatTime zt
+ where
+ formatTime = Format.formatTime Format.defaultTimeLocale format
+ format = "%FT%T%z"
+ -- format = Format.iso8601DateFormat (Just "%H:%M:%S")
+
+mkLoggerCtx :: LoggerSettings -> IO LoggerCtx
+mkLoggerCtx (LoggerSettings cacheTime tzM logLevel) = do
+ loggerSet <- FL.newStdoutLoggerSet FL.defaultBufSize
+ timeGetter <- bool (return $ getFormattedTime tzM) cachedTimeGetter cacheTime
+ return $ LoggerCtx loggerSet logLevel timeGetter
+ where
+ cachedTimeGetter =
+ Auto.mkAutoUpdate Auto.defaultUpdateSettings {
+ Auto.updateAction = getFormattedTime tzM
+ }
+
+cleanLoggerCtx :: LoggerCtx -> IO ()
+cleanLoggerCtx =
+ FL.rmLoggerSet . _lcLoggerSet
+
+type Logger = forall a. (ToEngineLog a) => a -> IO ()
+
+mkLogger :: LoggerCtx -> Logger
+mkLogger (LoggerCtx loggerSet serverLogLevel timeGetter) l = do
+ localTime <- timeGetter
+ let (logLevel, logTy, logDet) = toEngineLog l
+ when (logLevel >= serverLogLevel) $
+ FL.pushLogStrLn loggerSet $ FL.toLogStr $
+ J.encode $ EngineLog localTime logLevel logTy logDet
diff --git a/server/src-lib/Hasura/Prelude.hs b/server/src-lib/Hasura/Prelude.hs
index 641e98a655d87..94b0782abb2eb 100644
--- a/server/src-lib/Hasura/Prelude.hs
+++ b/server/src-lib/Hasura/Prelude.hs
@@ -12,12 +12,14 @@ import Data.Bool as M (bool)
import Data.Either as M (lefts, partitionEithers, rights)
import Data.Foldable as M (toList)
import Data.Hashable as M (Hashable)
-import Data.List as M (foldl', group, sortBy, find)
+import Data.List as M (find, foldl', group, sortBy)
import Data.Maybe as M (catMaybes, fromMaybe, isJust,
listToMaybe, mapMaybe,
maybeToList)
import Data.Ord as M (comparing)
import Data.Semigroup as M (Semigroup (..))
+import Data.String as M (IsString)
import Data.Text as M (Text)
+import GHC.Generics as M (Generic)
import Prelude as M hiding (fail, init, lookup)
import Text.Read as M (readEither, readMaybe)
diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs
index fe0b60f32098f..83af333b6cbca 100644
--- a/server/src-lib/Hasura/RQL/DML/Internal.hs
+++ b/server/src-lib/Hasura/RQL/DML/Internal.hs
@@ -242,7 +242,7 @@ mkColExtrAl alM (c, pct) =
-- validate headers
validateHeaders :: (P1C m) => [T.Text] -> m ()
validateHeaders depHeaders = do
- headers <- (map fst) . userHeaders <$> askUserInfo
+ headers <- M.keys . userHeaders <$> askUserInfo
forM_ depHeaders $ \hdr ->
unless (hdr `elem` map T.toLower headers) $
throw400 NotFound $ hdr <<> " header is expected but not found"
diff --git a/server/src-lib/Hasura/RQL/Types/Error.hs b/server/src-lib/Hasura/RQL/Types/Error.hs
index 8e0e2564077e4..33db692041d04 100644
--- a/server/src-lib/Hasura/RQL/Types/Error.hs
+++ b/server/src-lib/Hasura/RQL/Types/Error.hs
@@ -6,7 +6,7 @@ module Hasura.RQL.Types.Error
( Code(..)
, QErr(..)
, encodeQErr
- , nonAdminQErrEnc
+ , noInternalQErrEnc
, err400
, err404
, err401
@@ -121,17 +121,18 @@ instance ToJSON QErr where
, "internal" .= ie
]
-nonAdminQErrEnc :: QErr -> Value
-nonAdminQErrEnc (QErr jPath _ msg code _) =
+noInternalQErrEnc :: QErr -> Value
+noInternalQErrEnc (QErr jPath _ msg code _) =
object
[ "path" .= encodeJSONPath jPath
, "error" .= msg
, "code" .= show code
]
-encodeQErr :: T.Text -> QErr -> Value
-encodeQErr "admin" = toJSON
-encodeQErr _ = nonAdminQErrEnc
+-- whether internal should be included or not
+encodeQErr :: Bool -> QErr -> Value
+encodeQErr True = toJSON
+encodeQErr _ = noInternalQErrEnc
encodeJSONPath :: JSONPath -> String
encodeJSONPath = format "$"
@@ -171,8 +172,8 @@ type QErrM m = (MonadError QErr m)
throw400 :: (QErrM m) => Code -> T.Text -> m a
throw400 c t = throwError $ err400 c t
-throw404 :: (QErrM m) => Code -> T.Text -> m a
-throw404 c t = throwError $ err404 c t
+throw404 :: (QErrM m) => T.Text -> m a
+throw404 t = throwError $ err404 NotFound t
throw401 :: (QErrM m) => T.Text -> m a
throw401 t = throwError $ err401 AccessDenied t
diff --git a/server/src-lib/Hasura/RQL/Types/Permission.hs b/server/src-lib/Hasura/RQL/Types/Permission.hs
index a262c93fe6517..ca313b0b79cbb 100644
--- a/server/src-lib/Hasura/RQL/Types/Permission.hs
+++ b/server/src-lib/Hasura/RQL/Types/Permission.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveLift #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
@@ -14,8 +15,8 @@ module Hasura.RQL.Types.Permission
, PermId(..)
) where
-import Hasura.SQL.Types
import Hasura.Prelude
+import Hasura.SQL.Types
import qualified Database.PG.Query as Q
@@ -25,6 +26,7 @@ import Data.Word
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
+import qualified Data.HashMap.Strict as Map
import qualified Data.Text as T
import qualified PostgreSQL.Binary.Decoding as PD
@@ -45,11 +47,13 @@ newtype UserId = UserId { getUserId :: Word64 }
data UserInfo
= UserInfo
{ userRole :: !RoleName
- , userHeaders :: ![(T.Text, T.Text)]
- } deriving (Show, Eq)
+ , userHeaders :: !(Map.HashMap T.Text T.Text)
+ } deriving (Show, Eq, Generic)
+
+instance Hashable UserInfo
adminUserInfo :: UserInfo
-adminUserInfo = UserInfo adminRole [("X-Hasura-User-Id", "0")]
+adminUserInfo = UserInfo adminRole Map.empty
data PermType
= PTInsert
diff --git a/server/src-lib/Hasura/SQL/Types.hs b/server/src-lib/Hasura/SQL/Types.hs
index 83b31d93cef76..c2850c650cb5c 100644
--- a/server/src-lib/Hasura/SQL/Types.hs
+++ b/server/src-lib/Hasura/SQL/Types.hs
@@ -12,7 +12,6 @@ import Hasura.Prelude
import Data.Aeson
import Data.Aeson.Encoding (text)
-import GHC.Generics
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
diff --git a/server/src-lib/Hasura/Server/App.hs b/server/src-lib/Hasura/Server/App.hs
index a5331ee97253f..bc0eff007f24d 100644
--- a/server/src-lib/Hasura/Server/App.hs
+++ b/server/src-lib/Hasura/Server/App.hs
@@ -2,64 +2,57 @@
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
module Hasura.Server.App where
import Control.Concurrent.MVar
-import Control.Exception (try)
-import Control.Lens hiding ((.=))
-import Data.Char (isSpace)
import Data.IORef
-import Crypto.Hash (Digest, SHA1, hash)
-import Data.Aeson hiding (json)
-import qualified Data.ByteString.Lazy as BL
-import Data.CaseInsensitive (CI (..), original)
-import qualified Data.HashMap.Strict as M
-import qualified Data.String.Conversions as CS
-import qualified Data.Text as T
-import qualified Data.Text.Lazy as TL
-import qualified Data.Text.Lazy.Encoding as TLE
-import Data.Time.Clock (UTCTime,
- getCurrentTime)
-import qualified Network.Connection as NC
-import qualified Network.HTTP.Client as H
-import qualified Network.HTTP.Client.TLS as HT
-import Network.Wai (strictRequestBody)
-import qualified Network.Wreq as Wq
-import qualified Network.Wreq.Types as WqT
-import qualified Text.Mustache as M
-import qualified Text.Mustache.Compile as M
+import Data.Aeson hiding (json)
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.HashMap.Strict as M
+import qualified Data.Text as T
+import Data.Time.Clock (UTCTime,
+ getCurrentTime)
+import Network.Wai (requestHeaders,
+ strictRequestBody)
+import qualified Text.Mustache as M
+import qualified Text.Mustache.Compile as M
import Web.Spock.Core
-import qualified Network.HTTP.Types as N
-import qualified Network.Wai.Internal as WI
-import qualified Network.Wai.Middleware.Static as MS
+import qualified Network.HTTP.Client as HTTP
+import qualified Network.HTTP.Client.TLS as HTTP
+import qualified Network.Wai.Middleware.Static as MS
-import qualified Data.Text.Encoding.Error as TE
-import qualified Database.PG.Query as Q
-import qualified Hasura.GraphQL.Execute as GE
-import qualified Hasura.GraphQL.Execute.Result as GE
-import qualified Hasura.GraphQL.Schema as GS
+import qualified Database.PG.Query as Q
+import qualified Hasura.GraphQL.Schema as GS
+import qualified Hasura.GraphQL.Transport.HTTP as GH
+import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
+import qualified Hasura.GraphQL.Transport.WebSocket as WS
+import qualified Network.Wai as Wai
+import qualified Network.Wai.Handler.WebSockets as WS
+import qualified Network.WebSockets as WS
-import Hasura.Prelude hiding (get, put)
+import Hasura.Prelude hiding (get, put)
import Hasura.RQL.DDL.Schema.Table
import Hasura.RQL.DML.Explain
import Hasura.RQL.DML.QueryTemplate
import Hasura.RQL.Types
import Hasura.Server.Init
import Hasura.Server.Logging
-import Hasura.Server.Middleware (corsMiddleware,
- mkDefaultCorsPolicy)
+import Hasura.Server.Middleware (corsMiddleware,
+ mkDefaultCorsPolicy)
import Hasura.Server.Query
import Hasura.Server.Utils
import Hasura.Server.Version
import Hasura.SQL.Types
-type RavenLogger = ServerLogger (BL.ByteString, Either QErr BL.ByteString)
+import qualified Hasura.Logging as L
+import Hasura.Server.Auth (AuthMode, getUserInfo)
consoleTmplt :: M.Template
consoleTmplt = $(M.embedSingleTemplate "src-rsr/console.html")
@@ -73,50 +66,27 @@ mkConsoleHTML =
errMsg = "Fatal Error : console template rendering failed"
++ show errs
-ravenLogGen :: LogDetailG (BL.ByteString, Either QErr BL.ByteString)
-ravenLogGen _ (reqBody, res) =
-
- (status, toJSON <$> logDetail, Just qh, Just size)
- where
- status = either qeStatus (const N.status200) res
- logDetail = either (Just . qErrToLogDetail) (const Nothing) res
- reqBodyTxt = TL.filter (not . isSpace) $ decodeLBS reqBody
- qErrToLogDetail qErr =
- LogDetail reqBodyTxt $ toJSON qErr
- size = BL.length $ either encode id res
- qh = T.pack . show $ sha1 reqBody
- sha1 :: BL.ByteString -> Digest SHA1
- sha1 = hash . BL.toStrict
-
-decodeLBS :: BL.ByteString -> TL.Text
-decodeLBS = TLE.decodeUtf8With TE.lenientDecode
-
-data AuthMode
- = AMNoAuth
- | AMAccessKey !T.Text
- | AMAccessKeyAndHook !T.Text !T.Text
- deriving (Show, Eq)
-
data ServerCtx
= ServerCtx
- { scIsolation :: Q.TxIsolation
- , scPGPool :: Q.PGPool
- , scLogger :: RavenLogger
- , scCacheRef :: IORef (SchemaCache, GS.GCtxMap)
- , scCacheLock :: MVar ()
- , scServerMode :: AuthMode
+ { scIsolation :: Q.TxIsolation
+ , scPGPool :: Q.PGPool
+ , scLogger :: L.Logger
+ , scCacheRef :: IORef (SchemaCache, GS.GCtxMap)
+ , scCacheLock :: MVar ()
+ , scAuthMode :: AuthMode
+ , scManager :: HTTP.Manager
}
data HandlerCtx
= HandlerCtx
{ hcServerCtx :: ServerCtx
, hcReqBody :: BL.ByteString
- , hcHeaders :: [(T.Text, T.Text)]
+ , hcUser :: UserInfo
}
type Handler = ExceptT QErr (ReaderT HandlerCtx IO)
-{-# SCC parseBody #-}
+-- {-# SCC parseBody #-}
parseBody :: (FromJSON a) => Handler a
parseBody = do
reqBody <- hcReqBody <$> ask
@@ -129,107 +99,19 @@ filterHeaders hdrs = flip filter hdrs $ \(h, _) ->
isXHasuraTxt h && (T.toLower h /= userRoleHeader)
&& (T.toLower h /= accessKeyHeader)
-parseUserInfo :: Handler UserInfo
-parseUserInfo = do
- headers <- hcHeaders <$> ask
- let mUserRoleTuple = flip find headers $ \hdr ->
- userRoleHeader == T.toLower (fst hdr)
- mUserRoleV = snd <$> mUserRoleTuple
- userRoleV = fromMaybe "admin" mUserRoleV
- return $ UserInfo (RoleName userRoleV) $ filterHeaders headers
-
onlyAdmin :: Handler ()
onlyAdmin = do
- (UserInfo uRole _) <- parseUserInfo
+ uRole <- asks (userRole . hcUser)
when (uRole /= adminRole) $
throw400 AccessDenied "You have to be an admin to access this endpoint"
buildQCtx :: Handler QCtx
buildQCtx = do
scRef <- scCacheRef . hcServerCtx <$> ask
- userInfo <- parseUserInfo
+ userInfo <- asks hcUser
cache <- liftIO $ readIORef scRef
return $ QCtx userInfo $ fst cache
-httpToQErr :: H.HttpException -> QErr
-httpToQErr e = case e of
- H.InvalidUrlException _ _ -> err500 Unexpected "Invalid Webhook Url"
- H.HttpExceptionRequest _ H.ConnectionTimeout -> err500 Unexpected
- "Webhook : Connection timeout"
- H.HttpExceptionRequest _ H.ResponseTimeout -> err500 Unexpected
- "Webhook : Response timeout"
- _ -> err500 Unexpected "HTTP Exception from Webhook"
-
-fromWebHook
- :: (MonadIO m)
- => T.Text
- -> [N.Header]
- -> ExceptT QErr m [(T.Text, T.Text)]
-fromWebHook urlT reqHeaders = do
- manager <- liftIO $
- H.newManager $ HT.mkManagerSettings tlsSimple Nothing
- let options = Wq.defaults
- { WqT.headers = filteredHeaders
- , WqT.checkResponse = Just (\_ _ -> return ())
- , WqT.manager = Right manager
- }
- respWithExcept <- liftIO $ try $ Wq.getWith options $ T.unpack urlT
- resp <- either (throwError . httpToQErr) return respWithExcept
- let status = resp ^. Wq.responseStatus
- validateStatus status
- webHookResp <- decodeBS $ resp ^. Wq.responseBody
- return $ M.toList webHookResp
-
- where
- filteredHeaders = flip filter reqHeaders $ \(n, _) ->
- n /= "Content-Length" && n /= "User-Agent" && n /= "Host"
- && n /= "Origin" && n /= "Referer"
- tlsSimple = NC.TLSSettingsSimple True False False
-
- validateStatus statusCode
- | statusCode == N.status200 = return ()
- | statusCode == N.status401 = throw401
- "Authentication hook unauthorized this request"
- | otherwise = throw500
- "Invalid response from authorization hook"
-
- decodeBS bs = case eitherDecode bs of
- Left e -> throw500 $
- "Invalid response from authorization hook; " <> T.pack e
- Right a -> return a
-
-fetchHeaders
- :: (MonadIO m)
- => WI.Request
- -> Maybe T.Text
- -> AuthMode
- -> ExceptT QErr m [(T.Text, T.Text)]
-fetchHeaders req mReqAccessKey authMode =
- case authMode of
- AMNoAuth -> return headers
-
- AMAccessKey accKey -> do
- reqAccessKey <- maybe accessKeyAuthErr return mReqAccessKey
- validateKeyAndReturnHeaders accKey reqAccessKey
-
- AMAccessKeyAndHook accKey hook ->
- maybe (fromWebHook hook rawHeaders)
- (validateKeyAndReturnHeaders accKey)
- mReqAccessKey
- where
- rawHeaders = WI.requestHeaders req
- headers = headersTxt rawHeaders
-
- validateKeyAndReturnHeaders key reqKey = do
- when (reqKey /= key) accessKeyAuthErr
- return headers
-
- accessKeyAuthErr = throw401 "access keys don't match or not found"
-
- headersTxt hdrsRaw =
- flip map hdrsRaw $ \(hdrName, hdrVal) ->
- (CS.cs $ original hdrName, CS.cs hdrVal)
-
logResult
:: (MonadIO m)
=> ServerCtx
@@ -239,7 +121,7 @@ logResult
logResult sc res qTime = do
req <- request
reqBody <- liftIO $ strictRequestBody req
- liftIO $ logger req (reqBody, res) qTime
+ liftIO $ logger $ mkAccessLog req (reqBody, res) qTime
where
logger = scLogger sc
@@ -248,21 +130,21 @@ logError sc qErr = logResult sc (Left qErr) Nothing
mkSpockAction
:: (MonadIO m)
- => (T.Text -> QErr -> Value)
+ => (Bool -> QErr -> Value)
-> ServerCtx
-> Handler BL.ByteString
-> ActionT m ()
mkSpockAction qErrEncoder serverCtx handler = do
req <- request
reqBody <- liftIO $ strictRequestBody req
- role <- fromMaybe "admin" <$> header userRoleHeader
+ let headers = requestHeaders req
+ authMode = scAuthMode serverCtx
+ manager = scManager serverCtx
- accKeyHeader <- header accessKeyHeader
- headersRes <- runExceptT $
- fetchHeaders req accKeyHeader $ scServerMode serverCtx
- headers <- either (logAndThrow role) return headersRes
+ userInfoE <- liftIO $ runExceptT $ getUserInfo manager headers authMode
+ userInfo <- either (logAndThrow False) return userInfoE
- let handlerState = HandlerCtx serverCtx reqBody headers
+ let handlerState = HandlerCtx serverCtx reqBody userInfo
t1 <- liftIO getCurrentTime -- for measuring response time purposes
result <- liftIO $ runReaderT (runExceptT handler) handlerState
@@ -270,22 +152,22 @@ mkSpockAction qErrEncoder serverCtx handler = do
-- log result
logResult serverCtx result $ Just (t1, t2)
- either (qErrToResp role) resToResp result
+ either (qErrToResp $ userRole userInfo == adminRole) resToResp result
+
where
-- encode error response
- qErrToResp role qErr = do
+ qErrToResp includeInternal qErr = do
setStatus $ qeStatus qErr
- json $ qErrEncoder role qErr
+ json $ qErrEncoder includeInternal qErr
- logAndThrow role qErr = do
+ logAndThrow includeInternal qErr = do
logError serverCtx qErr
- qErrToResp role qErr
+ qErrToResp includeInternal qErr
resToResp resp = do
uncurry setHeader jsonHeader
lazyBytes resp
-
withLock :: (MonadIO m, MonadError e m)
=> MVar () -> m a -> m a
withLock lk action = do
@@ -298,21 +180,21 @@ withLock lk action = do
acquireLock = liftIO $ takeMVar lk
releaseLock = liftIO $ putMVar lk ()
-v1ExplainHandler :: RQLExplain -> Handler BL.ByteString
-v1ExplainHandler expQuery = dbAction
- where
- dbAction = do
- onlyAdmin
- scRef <- scCacheRef . hcServerCtx <$> ask
- schemaCache <- liftIO $ readIORef scRef
- pool <- scPGPool . hcServerCtx <$> ask
- isoL <- scIsolation . hcServerCtx <$> ask
- runExplainQuery pool isoL userInfo (fst schemaCache) selectQ
-
- selectQ = rqleQuery expQuery
- role = rqleRole expQuery
- headers = M.toList $ rqleHeaders expQuery
- userInfo = UserInfo role headers
+-- v1ExplainHandler :: RQLExplain -> Handler BL.ByteString
+-- v1ExplainHandler expQuery = dbAction
+-- where
+-- dbAction = do
+-- onlyAdmin
+-- scRef <- scCacheRef . hcServerCtx <$> ask
+-- schemaCache <- liftIO $ readIORef scRef
+-- pool <- scPGPool . hcServerCtx <$> ask
+-- isoL <- scIsolation . hcServerCtx <$> ask
+-- runExplainQuery pool isoL userInfo (fst schemaCache) selectQ
+
+-- selectQ = rqleQuery expQuery
+-- role = rqleRole expQuery
+-- headers = M.toList $ rqleHeaders expQuery
+-- userInfo = UserInfo role headers
v1QueryHandler :: RQLQuery -> Handler BL.ByteString
v1QueryHandler query = do
@@ -322,7 +204,7 @@ v1QueryHandler query = do
where
-- Hit postgres
dbAction = do
- userInfo <- parseUserInfo
+ userInfo <- asks hcUser
scRef <- scCacheRef . hcServerCtx <$> ask
schemaCache <- liftIO $ readIORef scRef
pool <- scPGPool . hcServerCtx <$> ask
@@ -337,14 +219,14 @@ v1QueryHandler query = do
liftIO $ writeIORef scRef (newSc, newGCtxMap)
return resp
-v1Alpha1GQHandler :: GE.GraphQLRequest -> Handler BL.ByteString
+v1Alpha1GQHandler :: GH.GraphQLRequest -> Handler BL.ByteString
v1Alpha1GQHandler query = do
- userInfo <- parseUserInfo
+ userInfo <- asks hcUser
scRef <- scCacheRef . hcServerCtx <$> ask
cache <- liftIO $ readIORef scRef
pool <- scPGPool . hcServerCtx <$> ask
isoL <- scIsolation . hcServerCtx <$> ask
- GE.runGQ pool isoL userInfo (snd cache) query
+ GH.runGQ pool isoL userInfo (snd cache) query
-- v1Alpha1GQSchemaHandler :: Handler BL.ByteString
-- v1Alpha1GQSchemaHandler = do
@@ -377,31 +259,46 @@ legacyQueryHandler :: TableName -> T.Text -> Handler BL.ByteString
legacyQueryHandler tn queryType =
case M.lookup queryType queryParsers of
Just queryParser -> getQueryParser queryParser qt >>= v1QueryHandler
- Nothing -> throw404 NotFound "No such resource exists"
+ Nothing -> throw404 "No such resource exists"
where
qt = QualifiedTable publicSchema tn
-app
+mkWaiApp
:: Q.TxIsolation
-> Maybe String
- -> RavenLogger
+ -> L.LoggerCtx
-> Q.PGPool
-> AuthMode
-> CorsConfig
-> Bool
- -> SpockT IO ()
-app isoLevel mRootDir logger pool mode corsCfg enableConsole = do
- cacheRef <- lift $ do
+ -> IO Wai.Application
+mkWaiApp isoLevel mRootDir loggerCtx pool mode corsCfg enableConsole = do
+ cacheRef <- do
pgResp <- liftIO $ runExceptT $ Q.runTx pool (Q.Serializable, Nothing) $ do
Q.catchE defaultTxErrorHandler initStateTx
sc <- buildSchemaCache
(,) sc <$> GS.mkGCtxMap (scTables sc)
either initErrExit return pgResp >>= newIORef
- cacheLock <- lift $ newMVar ()
+ httpManager <- HTTP.newManager HTTP.tlsManagerSettings
+
+ cacheLock <- newMVar ()
+
+ let serverCtx =
+ ServerCtx isoLevel pool (L.mkLogger loggerCtx) cacheRef
+ cacheLock mode httpManager
+
+ spockApp <- spockAsApp $ spockT id $
+ httpApp mRootDir corsCfg serverCtx enableConsole
+
+ let runTx tx = runExceptT $ Q.runTx pool (isoLevel, Nothing) tx
- let serverCtx = ServerCtx isoLevel pool logger cacheRef cacheLock mode
+ wsServerEnv <- WS.createWSServerEnv (scLogger serverCtx) httpManager cacheRef runTx
+ let wsServerApp = WS.createWSServerApp mode wsServerEnv
+ return $ WS.websocketsOr WS.defaultConnectionOptions wsServerApp spockApp
+httpApp :: Maybe String -> CorsConfig -> ServerCtx -> Bool -> SpockT IO ()
+httpApp mRootDir corsCfg serverCtx enableConsole = do
liftIO $ putStrLn "HasuraDB is now waiting for connections"
-- cors middleware
@@ -416,20 +313,20 @@ app isoLevel mRootDir logger pool mode corsCfg enableConsole = do
get "v1/version" getVersion
- get ("v1/template" /> var) $ tmpltGetOrDeleteH serverCtx
- post ("v1/template" /> var) $ tmpltPutOrPostH serverCtx
- put ("v1/template" /> var) $ tmpltPutOrPostH serverCtx
- delete ("v1/template" /> var) $ tmpltGetOrDeleteH serverCtx
+ get ("v1/template" /> var) tmpltGetOrDeleteH
+ post ("v1/template" /> var) tmpltPutOrPostH
+ put ("v1/template" /> var) tmpltPutOrPostH
+ delete ("v1/template" /> var) tmpltGetOrDeleteH
post "v1/query" $ mkSpockAction encodeQErr serverCtx $ do
query <- parseBody
v1QueryHandler query
- post "v1/query/explain" $ mkSpockAction encodeQErr serverCtx $ do
- expQuery <- parseBody
- v1ExplainHandler expQuery
+ -- post "v1/query/explain" $ mkSpockAction encodeQErr serverCtx $ do
+ -- expQuery <- parseBody
+ -- v1ExplainHandler expQuery
- post "v1alpha1/graphql" $ mkSpockAction GE.encodeGQErr serverCtx $ do
+ post "v1alpha1/graphql" $ mkSpockAction GH.encodeGQErr serverCtx $ do
query <- parseBody
v1Alpha1GQHandler query
@@ -437,7 +334,8 @@ app isoLevel mRootDir logger pool mode corsCfg enableConsole = do
-- mkSpockAction encodeQErr serverCtx v1Alpha1GQSchemaHandler
post ("api/1/table" /> var /> var) $ \tableName queryType ->
- mkSpockAction encodeQErr serverCtx $ legacyQueryHandler (TableName tableName) queryType
+ mkSpockAction encodeQErr serverCtx $
+ legacyQueryHandler (TableName tableName) queryType
hookAny GET $ \_ -> do
let qErr = err404 NotFound "resource does not exist"
@@ -446,11 +344,11 @@ app isoLevel mRootDir logger pool mode corsCfg enableConsole = do
lazyBytes $ encode qErr
where
- tmpltGetOrDeleteH serverCtx tmpltName = do
+ tmpltGetOrDeleteH tmpltName = do
tmpltArgs <- tmpltArgsFromQueryParams
mkSpockAction encodeQErr serverCtx $ mkQTemplateAction tmpltName tmpltArgs
- tmpltPutOrPostH serverCtx tmpltName = do
+ tmpltPutOrPostH tmpltName = do
tmpltArgs <- tmpltArgsFromQueryParams
mkSpockAction encodeQErr serverCtx $ do
bodyTmpltArgs <- parseBody
diff --git a/server/src-lib/Hasura/Server/Auth.hs b/server/src-lib/Hasura/Server/Auth.hs
new file mode 100644
index 0000000000000..f6568e4899b28
--- /dev/null
+++ b/server/src-lib/Hasura/Server/Auth.hs
@@ -0,0 +1,135 @@
+{-# LANGUAGE DataKinds #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MultiParamTypeClasses #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+
+module Hasura.Server.Auth
+ ( getUserInfo
+ , AuthMode(..)
+ ) where
+
+import Control.Exception (try)
+import Control.Lens
+import Data.Aeson
+import Data.CaseInsensitive (CI (..), original)
+
+import qualified Data.ByteString as B
+import qualified Data.HashMap.Strict as M
+import qualified Data.Text as T
+import qualified Data.Text.Encoding as TE
+import qualified Data.Text.Encoding.Error as TE
+import qualified Network.HTTP.Client as H
+import qualified Network.HTTP.Types as N
+import qualified Network.Wreq as Wreq
+
+import Hasura.Prelude
+import Hasura.RQL.Types
+
+bsToTxt :: B.ByteString -> T.Text
+bsToTxt = TE.decodeUtf8With TE.lenientDecode
+
+data AuthMode
+ = AMNoAuth
+ | AMAccessKey !T.Text
+ | AMAccessKeyAndHook !T.Text !T.Text
+ deriving (Show, Eq)
+
+httpToQErr :: H.HttpException -> QErr
+httpToQErr e = case e of
+ H.InvalidUrlException _ _ -> err500 Unexpected "Invalid Webhook Url"
+ H.HttpExceptionRequest _ H.ConnectionTimeout -> err500 Unexpected
+ "Webhook : Connection timeout"
+ H.HttpExceptionRequest _ H.ResponseTimeout -> err500 Unexpected
+ "Webhook : Response timeout"
+ _ -> err500 Unexpected "HTTP Exception from Webhook"
+
+userRoleHeader :: T.Text
+userRoleHeader = "x-hasura-role"
+
+userInfoFromWebhook
+ :: (MonadIO m, MonadError QErr m)
+ => H.Manager
+ -> T.Text
+ -> [N.Header]
+ -> m UserInfo
+userInfoFromWebhook manager urlT reqHeaders = do
+ let options =
+ Wreq.defaults
+ & Wreq.headers .~ filteredHeaders
+ & Wreq.checkResponse ?~ (\_ _ -> return ())
+ & Wreq.manager .~ Right manager
+
+ res <- liftIO $ try $ Wreq.getWith options $ T.unpack urlT
+ resp <- either (throwError . httpToQErr) return res
+ let status = resp ^. Wreq.responseStatus
+
+ validateStatus status
+ rawHeaders <- decodeResp $ resp ^. Wreq.responseBody
+
+ let headers = M.fromList [(T.toLower k, v) | (k, v) <- M.toList rawHeaders]
+
+ case M.lookup userRoleHeader headers of
+ Nothing -> throw500 "missing x-hasura-role key in webhook response: "
+ Just v -> return $ UserInfo (RoleName v) headers
+
+ where
+ filteredHeaders = flip filter reqHeaders $ \(n, _) ->
+ n /= "Content-Length" && n /= "User-Agent" && n /= "Host"
+ && n /= "Origin" && n /= "Referer"
+
+ validateStatus statusCode
+ | statusCode == N.status200 = return ()
+ | statusCode == N.status401 =
+ throw401 "Authentication hook unauthorized this request"
+ | otherwise =
+ throw500 "Invalid response from authorization hook"
+
+ decodeResp bs = case eitherDecode bs of
+ Left e -> throw500 $
+ "Invalid response from authorization hook: " <> T.pack e
+ Right a -> return a
+
+accessKeyHeader :: T.Text
+accessKeyHeader = "x-hasura-access-key"
+
+getUserInfo
+ :: (MonadIO m, MonadError QErr m)
+ => H.Manager
+ -> [N.Header]
+ -> AuthMode
+ -> m UserInfo
+getUserInfo manager rawHeaders = \case
+
+ AMNoAuth -> return userInfoFromHeaders
+
+ AMAccessKey accKey ->
+ case getHeader accessKeyHeader of
+ Just givenAccKey -> userInfoWhenAccessKey accKey givenAccKey
+ Nothing -> throw401 "x-hasura-access-key required, but not found"
+
+ AMAccessKeyAndHook accKey hook ->
+ maybe
+ (userInfoFromWebhook manager hook rawHeaders)
+ (userInfoWhenAccessKey accKey) $
+ getHeader accessKeyHeader
+
+ where
+
+ headers =
+ M.fromList $ filter (T.isPrefixOf "x-hasura-" . fst) $
+ flip map rawHeaders $
+ \(hdrName, hdrVal) ->
+ (T.toLower $ bsToTxt $ original hdrName, bsToTxt hdrVal)
+
+ getHeader h = M.lookup h headers
+
+ userInfoFromHeaders =
+ case M.lookup "x-hasura-role" headers of
+ Just v -> UserInfo (RoleName v) headers
+ Nothing -> UserInfo adminRole M.empty
+
+ userInfoWhenAccessKey key reqKey = do
+ when (reqKey /= key) $ throw401 "invalid x-hasura-access-key"
+ return userInfoFromHeaders
diff --git a/server/src-lib/Hasura/Server/Logging.hs b/server/src-lib/Hasura/Server/Logging.hs
index 3b24a24b9a0a4..1556536dd5cd9 100644
--- a/server/src-lib/Hasura/Server/Logging.hs
+++ b/server/src-lib/Hasura/Server/Logging.hs
@@ -5,30 +5,26 @@
-- This is taken from wai-logger and customised for our use
module Hasura.Server.Logging
- ( ServerLogger
- , withStdoutLogger
- , ServerLog(..)
- , LogDetail(..)
- , LogDetailG
- , getRequestHeader
- ) where
-
-import Control.Exception (bracket)
+ ( mkAccessLog
+ , getRequestHeader
+ ) where
+
+import Crypto.Hash (Digest, SHA1, hash)
import Data.Aeson
import Data.Bits (shift, (.&.))
import Data.ByteString.Char8 (ByteString)
+import qualified Data.ByteString.Lazy as BL
import Data.Int (Int64)
import Data.List (find)
+import qualified Data.TByteString as TBS
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
-import qualified Data.Text.Lazy as TL
import Data.Time.Clock
import Data.Word (Word32)
import Network.Socket (SockAddr (..))
import Network.Wai (Request (..))
import System.ByteOrder (ByteOrder (..), byteOrder)
-import System.Log.FastLogger
import Text.Printf (printf)
import qualified Data.ByteString.Char8 as BS
@@ -36,51 +32,51 @@ import qualified Data.CaseInsensitive as CI
import qualified Data.HashMap.Strict as M
import qualified Network.HTTP.Types as N
+import qualified Hasura.Logging as L
import Hasura.Prelude
+import Hasura.RQL.Types.Error
import Hasura.Server.Utils
-
-data ServerLog
- = ServerLog
- { slStatus :: !N.Status
- , slMethod :: !T.Text
- , slSource :: !T.Text
- , slPath :: !T.Text
- , slTimestamp :: !T.Text
- , slHttpVersion :: !N.HttpVersion
- , slDetail :: !(Maybe Value)
- , slRequestId :: !(Maybe T.Text)
- -- , slHasuraId :: !(Maybe T.Text)
- , slHasuraRole :: !(Maybe T.Text)
- , slHasuraMetadata :: !(Maybe Value)
- , slQueryHash :: !(Maybe T.Text)
- , slResponseSize :: !(Maybe Int64)
- , slResponseTime :: !(Maybe T.Text)
+data AccessLog
+ = AccessLog
+ { alStatus :: !N.Status
+ , alMethod :: !T.Text
+ , alSource :: !T.Text
+ , alPath :: !T.Text
+ , alHttpVersion :: !N.HttpVersion
+ , alDetail :: !(Maybe Value)
+ , alRequestId :: !(Maybe T.Text)
+ , alHasuraRole :: !(Maybe T.Text)
+ , alHasuraMetadata :: !(Maybe Value)
+ , alQueryHash :: !(Maybe T.Text)
+ , alResponseSize :: !(Maybe Int64)
+ , alResponseTime :: !(Maybe T.Text)
} deriving (Show, Eq)
-instance ToJSON ServerLog where
- toJSON (ServerLog st met src path ts hv det reqId hRole hMd qh rs rt) =
+instance L.ToEngineLog AccessLog where
+ toEngineLog accessLog =
+ (L.LevelInfo, "http-log", toJSON accessLog)
+
+instance ToJSON AccessLog where
+ toJSON (AccessLog st met src path hv det reqId hRole hMd qh rs rt) =
object [ "status" .= N.statusCode st
, "method" .= met
, "ip" .= src
, "url" .= path
- , "timestamp" .= ts
, "http_version" .= show hv
, "detail" .= det
, "request_id" .= reqId
- -- , "hasura_id" .= hId
, "hasura_role" .= hRole
, "hasura_metadata" .= hMd
, "query_hash" .= qh
, "response_size" .= rs
- -- , "response_time" .= rt
, "query_execution_time" .= rt
]
data LogDetail
= LogDetail
- { ldQuery :: !TL.Text
- , ldError :: !Value
+ { _ldQuery :: !TBS.TByteString
+ , _ldError :: !Value
} deriving (Show, Eq)
instance ToJSON LogDetail where
@@ -90,52 +86,64 @@ instance ToJSON LogDetail where
]
-- type ServerLogger = Request -> BL.ByteString -> Either QErr BL.ByteString -> IO ()
-type ServerLogger r = Request -> r -> Maybe (UTCTime, UTCTime) -> IO ()
-
-type LogDetailG r = Request -> r -> (N.Status, Maybe Value, Maybe T.Text, Maybe Int64)
-
-withStdoutLogger :: LogDetailG r -> (ServerLogger r -> IO a) -> IO a
-withStdoutLogger detailF appf =
- bracket setup teardown $ \(rlogger, _) -> appf rlogger
+-- type ServerLogger r = Request -> r -> Maybe (UTCTime, UTCTime) -> IO ()
+
+-- type LogDetailG r = Request -> r -> (N.Status, Maybe Value, Maybe T.Text, Maybe Int64)
+
+-- withStdoutLogger :: LogDetailG r -> (ServerLogger r -> IO a) -> IO a
+-- withStdoutLogger detailF appf =
+-- bracket setup teardown $ \(rlogger, _) -> appf rlogger
+-- where
+-- setup = do
+-- getter <- newTimeCache "%FT%T%z"
+-- lgrset <- newStdoutLoggerSet defaultBufSize
+-- let logger req env timeT = do
+-- zdata <- getter
+-- let serverLog = mkAccessLog detailF zdata req env timeT
+-- pushLogStrLn lgrset $ toLogStr $ encode serverLog
+-- when (isJust $ slDetail serverLog) $ flushLogStr lgrset
+-- remover = rmLoggerSet lgrset
+-- return (logger, remover)
+-- teardown (_, remover) = void remover
+
+ravenLogGen
+ :: (BL.ByteString, Either QErr BL.ByteString)
+ -> (N.Status, Maybe Value, Maybe T.Text, Maybe Int64)
+ravenLogGen (reqBody, res) =
+ (status, toJSON <$> logDetail, Just qh, Just size)
where
- setup = do
- getter <- newTimeCache "%FT%T%z"
- lgrset <- newStdoutLoggerSet defaultBufSize
- let logger req env timeT = do
- zdata <- getter
- let serverLog = mkServerLog detailF zdata req env timeT
- pushLogStrLn lgrset $ toLogStr $ encode serverLog
- when (isJust $ slDetail serverLog) $ flushLogStr lgrset
- remover = rmLoggerSet lgrset
- return (logger, remover)
- teardown (_, remover) = void remover
-
-mkServerLog
- :: LogDetailG r
- -> FormattedTime
- -> Request
- -> r
+ status = either qeStatus (const N.status200) res
+ logDetail = either (Just . qErrToLogDetail) (const Nothing) res
+ reqBodyTxt = TBS.fromLBS reqBody
+ qErrToLogDetail qErr =
+ LogDetail reqBodyTxt $ toJSON qErr
+ size = BL.length $ either encode id res
+ qh = T.pack . show $ sha1 reqBody
+ sha1 :: BL.ByteString -> Digest SHA1
+ sha1 = hash . BL.toStrict
+
+mkAccessLog
+ :: Request
+ -> (BL.ByteString, Either QErr BL.ByteString)
-> Maybe (UTCTime, UTCTime)
- -> ServerLog
-mkServerLog detailF tmstr req r mTimeT =
- ServerLog
- { slStatus = status
- , slMethod = decodeBS $ requestMethod req
- , slSource = decodeBS $ getSourceFromFallback req
- , slPath = decodeBS $ rawPathInfo req
- , slTimestamp = decodeBS tmstr
- , slHttpVersion = httpVersion req
- , slDetail = mDetail
- , slRequestId = decodeBS <$> getRequestId req
- -- , slHasuraId = decodeBS <$> getHasuraId req
- , slHasuraRole = decodeBS <$> getHasuraRole req
- , slHasuraMetadata = getHasuraMetadata req
- , slResponseSize = size
- , slResponseTime = T.pack . show <$> diffTime
- , slQueryHash = queryHash
+ -> AccessLog
+mkAccessLog req r mTimeT =
+ AccessLog
+ { alStatus = status
+ , alMethod = decodeBS $ requestMethod req
+ , alSource = decodeBS $ getSourceFromFallback req
+ , alPath = decodeBS $ rawPathInfo req
+ , alHttpVersion = httpVersion req
+ , alDetail = mDetail
+ , alRequestId = decodeBS <$> getRequestId req
+ , alHasuraRole = decodeBS <$> getHasuraRole req
+ , alHasuraMetadata = getHasuraMetadata req
+ , alResponseSize = size
+ , alResponseTime = T.pack . show <$> diffTime
+ , alQueryHash = queryHash
}
where
- (status, mDetail, queryHash, size) = detailF req r
+ (status, mDetail, queryHash, size) = ravenLogGen r
diffTime = case mTimeT of
Nothing -> Nothing
Just (t1, t2) -> Just $ diffUTCTime t2 t1
@@ -175,9 +183,9 @@ newtype HasuraMetadata
= HasuraMetadata { unHM :: M.HashMap T.Text T.Text } deriving (Show)
instance ToJSON HasuraMetadata where
- toJSON hash = toJSON $ M.fromList $ map (\(k,v) -> (format k, v)) hdrs
+ toJSON h = toJSON $ M.fromList $ map (\(k,v) -> (format k, v)) hdrs
where
- hdrs = M.toList $ unHM hash
+ hdrs = M.toList $ unHM h
format = T.map underscorify . T.drop 2
underscorify '-' = '_'
underscorify c = c
diff --git a/server/src-lib/Hasura/Server/Middleware.hs b/server/src-lib/Hasura/Server/Middleware.hs
index 025642965b3a1..252803b3a22ce 100644
--- a/server/src-lib/Hasura/Server/Middleware.hs
+++ b/server/src-lib/Hasura/Server/Middleware.hs
@@ -14,7 +14,6 @@ import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Network.HTTP.Types as H
-
data CorsPolicy
= CorsPolicy
{ cpDomain :: !T.Text
@@ -22,7 +21,6 @@ data CorsPolicy
, cpMaxAge :: !Int
} deriving (Show, Eq)
-
mkDefaultCorsPolicy :: T.Text -> CorsPolicy
mkDefaultCorsPolicy domain =
CorsPolicy
@@ -31,7 +29,6 @@ mkDefaultCorsPolicy domain =
, cpMaxAge = 1728000
}
-
corsMiddleware :: CorsPolicy -> Middleware
corsMiddleware policy app req sendResp =
maybe (app req sendResp) handleCors $ getRequestHeader "Origin" req
diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs
index bc1262cc68a00..667a3faf6fd07 100644
--- a/server/src-lib/Hasura/Server/Query.hs
+++ b/server/src-lib/Hasura/Server/Query.hs
@@ -12,6 +12,7 @@ import Language.Haskell.TH.Syntax (Lift)
import qualified Data.ByteString.Builder as BB
import qualified Data.ByteString.Lazy as BL
+import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Data.Vector as V
@@ -242,7 +243,7 @@ setHeadersTx :: UserInfo -> Q.TxE QErr ()
setHeadersTx userInfo =
forM_ hdrs $ \h -> Q.unitQE defaultTxErrorHandler (mkQ h) () False
where
- hdrs = userHeaders userInfo
+ hdrs = Map.toList $ userHeaders userInfo
mkQ (h, v) = Q.fromBuilder $ BB.string7 $
T.unpack $
"SET LOCAL hasura." <> dropAndSnakeCase h <> " = " <> pgFmtLit v
diff --git a/server/stack.yaml b/server/stack.yaml
index a5fad0c9edb09..1dec4df0ca515 100644
--- a/server/stack.yaml
+++ b/server/stack.yaml
@@ -18,7 +18,7 @@ extra-deps:
- git: https://github.com/hasura/pg-client-hs.git
commit: 77995388cab656f9180b851f33f3d603cf1017c7
- git: https://github.com/hasura/graphql-parser-hs.git
- commit: 0974c1a2cddb0b5aca0941abae4ad0a9b2e4a051
+ commit: eae59812ec537b3756c3ddb5f59a7cc59508869b
# Override default flag values for local packages and extra-deps
flags: {}
diff --git a/server/test/Main.hs b/server/test/Main.hs
index 6d4c901fc0503..653c9fc0c7da0 100644
--- a/server/test/Main.hs
+++ b/server/test/Main.hs
@@ -1,4 +1,5 @@
{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RankNTypes #-}
module Main where
@@ -11,22 +12,22 @@ import System.Exit (exitFailure)
import Test.Hspec.Core.Runner
import Test.Hspec.Formatters
import Test.Hspec.Wai
-import Web.Spock.Core (spockAsApp, spockT)
import qualified Data.Aeson as J
import qualified Data.ByteString.Lazy.Char8 as BLC
+import qualified Database.PG.Query as Q
+import qualified Hasura.Logging as L
import Hasura.Prelude
-import Hasura.Server.App (AuthMode (..), RavenLogger, app,
- ravenLogGen)
-import Hasura.Server.Init
-import Hasura.Server.Logging (withStdoutLogger)
-import Ops (initCatalogSafe)
-import Spec (mkSpecs)
+import Hasura.Server.App (mkWaiApp)
+import Hasura.Server.Auth (AuthMode (..))
+
-import qualified Database.PG.Query as Q
import qualified Database.PG.Query as PGQ
+import Hasura.Server.Init
+import Ops (initCatalogSafe)
+import Spec (mkSpecs)
data ConnectionParams = ConnectionParams RawConnInfo Q.ConnParams
@@ -40,13 +41,14 @@ resetStateTx = do
Q.unitQE PGQ.PGExecErrTx "DROP SCHEMA public CASCADE" () False
Q.unitQE PGQ.PGExecErrTx "CREATE SCHEMA public" () False
-ravenApp :: RavenLogger -> PGQ.PGPool -> IO Application
-ravenApp rlogger pool = do
+ravenApp :: L.LoggerCtx -> PGQ.PGPool -> IO Application
+ravenApp loggerCtx pool = do
let corsCfg = CorsConfigG "*" True -- cors is disabled
- spockAsApp $ spockT id $ app Q.Serializable Nothing rlogger pool AMNoAuth corsCfg True -- no access key and no webhook
+ -- spockAsApp $ spockT id $ app Q.Serializable Nothing rlogger pool AMNoAuth corsCfg True -- no access key and no webhook
+ mkWaiApp Q.Serializable Nothing loggerCtx pool AMNoAuth corsCfg True -- no access key and no webhook
main :: IO ()
-main = withStdoutLogger ravenLogGen $ \rlogger -> do
+main = do
-- parse CLI flags for connection params
ConnectionParams rci cp <- parseArgs
-- form the postgres connection info
@@ -59,9 +61,10 @@ main = withStdoutLogger ravenLogGen $ \rlogger -> do
-- intialize state for graphql-engine in the database
liftIO $ initialise pool
specs <- mkSpecs
+ loggerCtx <- L.mkLoggerCtx L.defaultLoggerSettings
-- run the tests
withArgs [] $ hspecWith defaultConfig {configFormatter = Just progress} $
- with (ravenApp rlogger pool) specs
+ with (ravenApp loggerCtx pool) specs
where
initialise :: Q.PGPool -> IO ()