这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
1 change: 1 addition & 0 deletions .circleci/test-server.sh
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ mkdir -p "$OUTPUT_FOLDER"
export EVENT_WEBHOOK_HEADER="MyEnvValue"
export HGE_URL="http://localhost:8080"
export WEBHOOK_FROM_ENV="http://127.0.0.1:5592"
export HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES=true

PID=""
WH_PID=""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,9 +139,13 @@ For ``serve`` sub-command these are the flags and ENV variables available:
- ``HASURA_GRAPHQL_TX_ISOLATION``
- transaction isolation. read-committed / repeatable-read / serializable (default: read-commited)

* - ``--stringify-numeric-types``
- ``HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES``
- Stringify certain Postgres numeric types, specifically ``bigint``, ``numeric``, ``decimal`` and ``double precision`` as they don't fit into the ``IEEE-754`` spec for JSON encoding-decoding. (default: false)

* - ``--enabled-apis <APIS>``
- ``HASURA_GRAPHQL_ENABLED_APIS``
- Comma separated list of APIs (metadata & graphql) to be enabled. (default: ``metadata,graphql``)

.. note::
When the equivalent flags for environment variables are used, the flags will take precedence.
1. When the equivalent flags for environment variables are used, the flags will take precedence.
7 changes: 4 additions & 3 deletions server/src-exec/Main.hs
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ parseHGECommand =
<*> parseCorsConfig
<*> parseEnableConsole
<*> parseEnableTelemetry
<*> parseStringifyNum
<*> parseEnabledAPIs

parseArgs :: IO HGEOptions
Expand Down Expand Up @@ -103,7 +104,7 @@ main = do
let logger = mkLogger loggerCtx
case hgeCmd of
HCServe so@(ServeOptions port host cp isoL mAdminSecret mAuthHook mJwtSecret
mUnAuthRole corsCfg enableConsole enableTelemetry enabledAPIs) -> do
mUnAuthRole corsCfg enableConsole enableTelemetry strfyNum enabledAPIs) -> do
-- log serve options
unLogger logger $ serveOptsToLog so
hloggerCtx <- mkLoggerCtx $ defaultLoggerSettings False
Expand All @@ -125,7 +126,7 @@ main = do

pool <- Q.initPGPool ci cp
(app, cacheRef) <- mkWaiApp isoL loggerCtx pool httpManager
am corsCfg enableConsole enableTelemetry enabledAPIs
strfyNum am corsCfg enableConsole enableTelemetry enabledAPIs

let warpSettings = Warp.setPort port $ Warp.setHost host Warp.defaultSettings

Expand Down Expand Up @@ -178,7 +179,7 @@ main = do
runAsAdmin ci httpManager m = do
pool <- getMinimalPool ci
res <- runExceptT $ peelRun emptySchemaCache adminUserInfo
httpManager pool Q.Serializable m
httpManager False pool Q.Serializable m
return $ fmap fst res

procConnInfo rci =
Expand Down
48 changes: 42 additions & 6 deletions server/src-exec/Migrate.hs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,13 @@ curCatalogVer :: T.Text
curCatalogVer = "10"

migrateMetadata
:: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m)
:: ( MonadTx m
, HasHttpManager m
, CacheRWM m
, UserInfoM m
, MonadIO m
, HasSQLGenCtx m
)
=> Bool -> RQLQuery -> m ()
migrateMetadata buildSC rqlQuery = do
-- Build schema cache from 'hdb_catalog' only if current
Expand Down Expand Up @@ -108,7 +114,13 @@ from08To1 = liftTx $ Q.catchE defaultTxErrorHandler $ do
|] () False

from1To2
:: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m)
:: ( MonadTx m
, HasHttpManager m
, HasSQLGenCtx m
, CacheRWM m
, UserInfoM m
, MonadIO m
)
=> m ()
from1To2 = do
-- Migrate database
Expand All @@ -130,7 +142,13 @@ from2To3 = liftTx $ Q.catchE defaultTxErrorHandler $ do

-- custom resolver
from4To5
:: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m)
:: ( MonadTx m
, HasHttpManager m
, HasSQLGenCtx m
, CacheRWM m
, UserInfoM m
, MonadIO m
)
=> m ()
from4To5 = do
Q.Discard () <- liftTx $ Q.multiQE defaultTxErrorHandler
Expand Down Expand Up @@ -183,7 +201,13 @@ from6To7 = liftTx $ do
return ()

from7To8
:: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m)
:: ( MonadTx m
, HasHttpManager m
, HasSQLGenCtx m
, CacheRWM m
, UserInfoM m
, MonadIO m
)
=> m ()
from7To8 = do
-- Migrate database
Expand All @@ -200,7 +224,13 @@ from7To8 = do

-- alter hdb_version table and track it (telemetry changes)
from8To9
:: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m)
:: ( MonadTx m
, HasHttpManager m
, HasSQLGenCtx m
, CacheRWM m
, UserInfoM m
, MonadIO m
)
=> m ()
from8To9 = do
Q.Discard () <- liftTx $ Q.multiQE defaultTxErrorHandler
Expand All @@ -222,7 +252,13 @@ from9To10 = liftTx $ do
return ()

migrateCatalog
:: (MonadTx m, CacheRWM m, MonadIO m, UserInfoM m, HasHttpManager m)
:: ( MonadTx m
, CacheRWM m
, MonadIO m
, UserInfoM m
, HasHttpManager m
, HasSQLGenCtx m
)
=> UTCTime -> m String
migrateCatalog migrationTime = do
preVer <- getCatalogVersion
Expand Down
12 changes: 9 additions & 3 deletions server/src-exec/Ops.hs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@ import qualified Database.PG.Query as Q
import qualified Database.PG.Query.Connection as Q

initCatalogSafe
:: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m)
:: ( QErrM m, UserInfoM m, CacheRWM m, MonadTx m
, MonadIO m, HasHttpManager m, HasSQLGenCtx m
)
=> UTCTime -> m String
initCatalogSafe initTime = do
hdbCatalogExists <- liftTx $ Q.catchE defaultTxErrorHandler $
Expand Down Expand Up @@ -56,7 +58,9 @@ initCatalogSafe initTime = do
|] (Identity sn) False

initCatalogStrict
:: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m)
:: ( QErrM m, UserInfoM m, CacheRWM m, MonadTx m
, MonadIO m, HasHttpManager m, HasSQLGenCtx m
)
=> Bool -> UTCTime -> m String
initCatalogStrict createSchema initTime = do
liftTx $ Q.catchE defaultTxErrorHandler $
Expand Down Expand Up @@ -127,7 +131,9 @@ cleanCatalog = liftTx $ Q.catchE defaultTxErrorHandler $ do
Q.unitQ "DROP SCHEMA hdb_catalog CASCADE" () False

execQuery
:: (MonadTx m, CacheRWM m, MonadIO m, UserInfoM m, HasHttpManager m)
:: ( MonadTx m, CacheRWM m, MonadIO m
, UserInfoM m, HasHttpManager m, HasSQLGenCtx m
)
=> BL.ByteString -> m BL.ByteString
execQuery queryBs = do
query <- case A.decode queryBs of
Expand Down
20 changes: 12 additions & 8 deletions server/src-lib/Hasura/GraphQL/Explain.hs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ module Hasura.GraphQL.Explain
, GQLExplain
) where

import Data.Has (getter)

import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
Expand Down Expand Up @@ -49,25 +51,25 @@ data FieldPlan
$(J.deriveJSON (J.aesonDrop 3 J.camelCase) ''FieldPlan)

type Explain =
(ReaderT (FieldMap, OrdByCtx) (Except QErr))
(ReaderT (FieldMap, OrdByCtx, SQLGenCtx) (Except QErr))

runExplain
:: (MonadError QErr m)
=> (FieldMap, OrdByCtx) -> Explain a -> m a
=> (FieldMap, OrdByCtx, SQLGenCtx) -> Explain a -> m a
runExplain ctx m =
either throwError return $ runExcept $ runReaderT m ctx

explainField
:: (MonadTx m)
=> UserInfo -> GCtx -> Field -> m FieldPlan
explainField userInfo gCtx fld =
=> UserInfo -> GCtx -> SQLGenCtx -> Field -> m FieldPlan
explainField userInfo gCtx sqlGenCtx fld =
case fName of
"__type" -> return $ FieldPlan fName Nothing Nothing
"__schema" -> return $ FieldPlan fName Nothing Nothing
"__typename" -> return $ FieldPlan fName Nothing Nothing
_ -> do
opCxt <- getOpCtx fName
builderSQL <- runExplain (fldMap, orderByCtx) $
builderSQL <- runExplain (fldMap, orderByCtx, sqlGenCtx) $
case opCxt of
OCSelect (SelOpCtx tn hdrs permFilter permLimit) -> do
validateHdrs hdrs
Expand Down Expand Up @@ -108,9 +110,10 @@ explainField userInfo gCtx fld =
validateHdrs hdrs
(tabArgs, eSel, frmItem) <-
RS.fromFuncQueryField txtConverter fn argSeq isAgg fld
strfyNum <- stringifyNum <$> asks getter
return $ toSQL $
RS.mkFuncSelectWith fn tn
(RS.TablePerm permFilter permLimit) tabArgs eSel frmItem
(RS.TablePerm permFilter permLimit) tabArgs strfyNum eSel frmItem

validateHdrs hdrs = do
let receivedHdrs = userVars userInfo
Expand All @@ -123,9 +126,10 @@ explainGQLQuery
=> Q.PGPool
-> Q.TxIsolation
-> SchemaCache
-> SQLGenCtx
-> GQLExplain
-> m BL.ByteString
explainGQLQuery pool iso sc (GQLExplain query userVarsRaw)= do
explainGQLQuery pool iso sc sqlGenCtx (GQLExplain query userVarsRaw)= do
(gCtx, _) <- flip runStateT sc $ getGCtx (userRole userInfo) gCtxMap
queryParts <- runReaderT (GV.getQueryParts query) gCtx
let topLevelNodes = TH.getTopLevelNodes (GV.qpOpDef queryParts)
Expand All @@ -136,7 +140,7 @@ explainGQLQuery pool iso sc (GQLExplain query userVarsRaw)= do
(opTy, selSet) <- runReaderT (GV.validateGQ queryParts) gCtx
unless (opTy == G.OperationTypeQuery) $
throw400 InvalidParams "only queries can be explained"
let tx = mapM (explainField userInfo gCtx) (toList selSet)
let tx = mapM (explainField userInfo gCtx sqlGenCtx) (toList selSet)
plans <- liftIO (runExceptT $ runTx tx) >>= liftEither
return $ J.encode plans
where
Expand Down
22 changes: 13 additions & 9 deletions server/src-lib/Hasura/GraphQL/Resolve.hs
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,14 @@ import qualified Hasura.GraphQL.Resolve.Mutation as RM
import qualified Hasura.GraphQL.Resolve.Select as RS

-- {-# SCC buildTx #-}
buildTx :: UserInfo -> GCtx -> Field -> Q.TxE QErr BL.ByteString
buildTx userInfo gCtx fld = do
buildTx :: UserInfo -> GCtx -> SQLGenCtx -> Field -> Q.TxE QErr BL.ByteString
buildTx userInfo gCtx sqlCtx fld = do
opCxt <- getOpCtx $ _fName fld
join $ fmap fst $ runConvert (fldMap, orderByCtx, insCtxMap) $ case opCxt of
join $ fmap fst $ runConvert ( fldMap
, orderByCtx
, insCtxMap
, sqlCtx
) $ case opCxt of

OCSelect ctx ->
validateHdrs (_socHeaders ctx) >> RS.convertSelect ctx fld
Expand Down Expand Up @@ -72,16 +76,16 @@ buildTx userInfo gCtx fld = do
-- {-# SCC resolveFld #-}
resolveFld
:: (MonadTx m)
=> UserInfo -> GCtx
=> UserInfo -> GCtx -> SQLGenCtx
-> G.OperationType
-> Field
-> m BL.ByteString
resolveFld userInfo gCtx opTy fld =
resolveFld userInfo gCtx sqlGenCtx opTy fld =
case _fName fld of
"__type" -> J.encode <$> runReaderT (typeR fld) gCtx
"__schema" -> J.encode <$> runReaderT (schemaR fld) gCtx
"__typename" -> return $ J.encode $ mkRootTypeName opTy
_ -> liftTx $ buildTx userInfo gCtx fld
_ -> liftTx $ buildTx userInfo gCtx sqlGenCtx fld
where
mkRootTypeName :: G.OperationType -> Text
mkRootTypeName = \case
Expand All @@ -91,11 +95,11 @@ resolveFld userInfo gCtx opTy fld =

resolveSelSet
:: (MonadTx m)
=> UserInfo -> GCtx
=> UserInfo -> GCtx -> SQLGenCtx
-> G.OperationType
-> SelSet
-> m BL.ByteString
resolveSelSet userInfo gCtx opTy fields =
resolveSelSet userInfo gCtx sqlGenCtx opTy fields =
fmap mkJSONObj $ forM (toList fields) $ \fld -> do
fldResp <- resolveFld userInfo gCtx opTy fld
fldResp <- resolveFld userInfo gCtx sqlGenCtx opTy fld
return (G.unName $ G.unAlias $ _fAlias fld, fldResp)
11 changes: 9 additions & 2 deletions server/src-lib/Hasura/GraphQL/Resolve/Context.hs
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,12 @@ withArgM args arg f = prependArgsInPath $ nameAsPath arg $
type PrepArgs = Seq.Seq Q.PrepArg

type Convert =
StateT PrepArgs (ReaderT (FieldMap, OrdByCtx, InsCtxMap) (Except QErr))
StateT PrepArgs (ReaderT ( FieldMap
, OrdByCtx
, InsCtxMap
, SQLGenCtx
) (Except QErr)
)

prepare
:: (MonadState PrepArgs m) => PrepFn m
Expand All @@ -136,7 +141,9 @@ prepare (colTy, colVal) = do

runConvert
:: (MonadError QErr m)
=> (FieldMap, OrdByCtx, InsCtxMap) -> Convert a -> m (a, PrepArgs)
=> (FieldMap, OrdByCtx, InsCtxMap, SQLGenCtx)
-> Convert a
-> m (a, PrepArgs)
runConvert ctx m =
either throwError return $
runExcept $ runReaderT (runStateT m Seq.empty) ctx
Loading