From 6aab2e482c13ea42d63c93ab802ec9ee18f3d6a8 Mon Sep 17 00:00:00 2001 From: rakeshkky Date: Mon, 25 Feb 2019 12:53:29 +0530 Subject: [PATCH 1/5] add config option to enable stringify numerics which exceed binary64, fix #1523 --- .circleci/test-server.sh | 1 + server/src-exec/Main.hs | 7 ++- server/src-exec/Ops.hs | 36 +++++++++--- server/src-lib/Hasura/GraphQL/Explain.hs | 20 ++++--- server/src-lib/Hasura/GraphQL/Resolve.hs | 23 +++++--- .../src-lib/Hasura/GraphQL/Resolve/Context.hs | 12 +++- .../src-lib/Hasura/GraphQL/Resolve/Insert.hs | 55 +++++++++++-------- .../Hasura/GraphQL/Resolve/Mutation.hs | 7 ++- .../src-lib/Hasura/GraphQL/Resolve/Select.hs | 38 +++++++++---- .../src-lib/Hasura/GraphQL/Transport/HTTP.hs | 12 ++-- .../Hasura/GraphQL/Transport/WebSocket.hs | 11 ++-- server/src-lib/Hasura/RQL/DDL/Metadata.hs | 12 ++-- .../src-lib/Hasura/RQL/DDL/QueryTemplate.hs | 6 +- server/src-lib/Hasura/RQL/DDL/Schema/Table.hs | 26 +++++---- server/src-lib/Hasura/RQL/DDL/Subscribe.hs | 33 ++++++----- server/src-lib/Hasura/RQL/DML/Count.hs | 4 +- server/src-lib/Hasura/RQL/DML/Delete.hs | 21 +++---- server/src-lib/Hasura/RQL/DML/Insert.hs | 25 +++++---- server/src-lib/Hasura/RQL/DML/Internal.hs | 11 ++-- .../src-lib/Hasura/RQL/DML/QueryTemplate.hs | 34 +++++++----- server/src-lib/Hasura/RQL/DML/Returning.hs | 12 ++-- server/src-lib/Hasura/RQL/DML/Select.hs | 26 +++++---- .../src-lib/Hasura/RQL/DML/Select/Internal.hs | 51 +++++++++-------- server/src-lib/Hasura/RQL/DML/Select/Types.hs | 9 +-- server/src-lib/Hasura/RQL/DML/Update.hs | 21 +++---- server/src-lib/Hasura/RQL/Types.hs | 23 +++++++- server/src-lib/Hasura/SQL/Types.hs | 8 +++ server/src-lib/Hasura/Server/App.hs | 37 ++++++++----- server/src-lib/Hasura/Server/Init.hs | 20 ++++++- server/src-lib/Hasura/Server/Query.hs | 28 ++++++---- 30 files changed, 389 insertions(+), 240 deletions(-) diff --git a/.circleci/test-server.sh b/.circleci/test-server.sh index b74a60c27074d..9d1fdf840c2e2 100755 --- a/.circleci/test-server.sh +++ b/.circleci/test-server.sh @@ -113,6 +113,7 @@ mkdir -p "$OUTPUT_FOLDER" export EVENT_WEBHOOK_HEADER="MyEnvValue" export HGE_URL="http://localhost:8080" export WEBHOOK_FROM_ENV="http://127.0.0.1:5592" +export HASURA_GRAPHQL_STRINGIFY_NUMERIC=true PID="" WH_PID="" diff --git a/server/src-exec/Main.hs b/server/src-exec/Main.hs index b834b9bf83933..35ad9d5d1ed56 100644 --- a/server/src-exec/Main.hs +++ b/server/src-exec/Main.hs @@ -73,6 +73,7 @@ parseHGECommand = <*> parseCorsConfig <*> parseEnableConsole <*> parseEnableTelemetry + <*> parseStringifyNum parseArgs :: IO HGEOptions parseArgs = do @@ -103,7 +104,7 @@ main = do let logger = mkLogger loggerCtx case hgeCmd of HCServe so@(ServeOptions port host cp isoL mAccessKey mAuthHook mJwtSecret - mUnAuthRole corsCfg enableConsole enableTelemetry) -> do + mUnAuthRole corsCfg enableConsole enableTelemetry strfyNum) -> do -- log serve options unLogger logger $ serveOptsToLog so hloggerCtx <- mkLoggerCtx $ defaultLoggerSettings False @@ -125,7 +126,7 @@ main = do pool <- Q.initPGPool ci cp (app, cacheRef) <- mkWaiApp isoL loggerCtx pool httpManager - am corsCfg enableConsole enableTelemetry + strfyNum am corsCfg enableConsole enableTelemetry let warpSettings = Warp.setPort port $ Warp.setHost host Warp.defaultSettings @@ -179,7 +180,7 @@ main = do runAsAdmin ci httpManager m = do pool <- getMinimalPool ci res <- runExceptT $ peelRun emptySchemaCache adminUserInfo - httpManager pool Q.Serializable m + httpManager False pool Q.Serializable m return $ fmap fst res procConnInfo rci = diff --git a/server/src-exec/Ops.hs b/server/src-exec/Ops.hs index 273d5901a38ed..5377820a3157b 100644 --- a/server/src-exec/Ops.hs +++ b/server/src-exec/Ops.hs @@ -26,7 +26,9 @@ curCatalogVer :: T.Text curCatalogVer = "9" initCatalogSafe - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m) + :: ( QErrM m, UserInfoM m, CacheRWM m, MonadTx m + , MonadIO m, HasHttpManager m, HasSQLGenCtx m + ) => UTCTime -> m String initCatalogSafe initTime = do hdbCatalogExists <- liftTx $ Q.catchE defaultTxErrorHandler $ @@ -59,7 +61,9 @@ initCatalogSafe initTime = do |] (Identity sn) False initCatalogStrict - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m) + :: ( QErrM m, UserInfoM m, CacheRWM m, MonadTx m + , MonadIO m, HasHttpManager m, HasSQLGenCtx m + ) => Bool -> UTCTime -> m String initCatalogStrict createSchema initTime = do liftTx $ Q.catchE defaultTxErrorHandler $ @@ -117,7 +121,9 @@ initCatalogStrict createSchema initTime = do migrateMetadata - :: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m) + :: ( MonadTx m, HasHttpManager m, CacheRWM m + , UserInfoM m, MonadIO m, HasSQLGenCtx m + ) => RQLQuery -> m () migrateMetadata rqlQuery = do -- build schema cache @@ -216,7 +222,9 @@ from08To1 = liftTx $ Q.catchE defaultTxErrorHandler $ do |] () False from1To2 - :: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m) + :: ( MonadTx m, HasHttpManager m, CacheRWM m + , UserInfoM m, MonadIO m, HasSQLGenCtx m + ) => m () from1To2 = do -- migrate database @@ -238,7 +246,9 @@ from2To3 = liftTx $ Q.catchE defaultTxErrorHandler $ do -- custom resolver from4To5 - :: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m) + :: ( MonadTx m, HasHttpManager m, CacheRWM m + , UserInfoM m, MonadIO m, HasSQLGenCtx m + ) => m () from4To5 = do Q.Discard () <- liftTx $ Q.multiQE defaultTxErrorHandler @@ -291,7 +301,9 @@ from6To7 = liftTx $ do return () from7To8 - :: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m) + :: ( MonadTx m, HasHttpManager m, CacheRWM m + , UserInfoM m, MonadIO m, HasSQLGenCtx m + ) => m () from7To8 = do -- migrate database @@ -306,7 +318,9 @@ from7To8 = do -- alter hdb_version table and track it (telemetry changes) from8To9 - :: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m) + :: ( MonadTx m, HasHttpManager m, CacheRWM m + , UserInfoM m, MonadIO m, HasSQLGenCtx m + ) => m () from8To9 = do Q.Discard () <- liftTx $ Q.multiQE defaultTxErrorHandler @@ -320,7 +334,9 @@ from8To9 = do migrateCatalog - :: (MonadTx m, CacheRWM m, MonadIO m, UserInfoM m, HasHttpManager m) + :: ( MonadTx m, CacheRWM m, MonadIO m + , UserInfoM m, HasHttpManager m, HasSQLGenCtx m + ) => UTCTime -> m String migrateCatalog migrationTime = do preVer <- getCatalogVersion @@ -389,7 +405,9 @@ migrateCatalog migrationTime = do |] (curCatalogVer, migrationTime) False execQuery - :: (MonadTx m, CacheRWM m, MonadIO m, UserInfoM m, HasHttpManager m) + :: ( MonadTx m, CacheRWM m, MonadIO m + , UserInfoM m, HasHttpManager m, HasSQLGenCtx m + ) => BL.ByteString -> m BL.ByteString execQuery queryBs = do query <- case A.decode queryBs of diff --git a/server/src-lib/Hasura/GraphQL/Explain.hs b/server/src-lib/Hasura/GraphQL/Explain.hs index d122766fbb33d..9571afdbbb746 100644 --- a/server/src-lib/Hasura/GraphQL/Explain.hs +++ b/server/src-lib/Hasura/GraphQL/Explain.hs @@ -3,6 +3,8 @@ module Hasura.GraphQL.Explain , GQLExplain ) where +import Data.Has (getter) + import qualified Data.Aeson as J import qualified Data.Aeson.Casing as J import qualified Data.Aeson.TH as J @@ -48,25 +50,25 @@ data FieldPlan $(J.deriveJSON (J.aesonDrop 3 J.camelCase) ''FieldPlan) type Explain = - (ReaderT (FieldMap, OrdByCtx, FuncArgCtx) (Except QErr)) + (ReaderT (FieldMap, OrdByCtx, FuncArgCtx, SQLGenCtx) (Except QErr)) runExplain :: (MonadError QErr m) - => (FieldMap, OrdByCtx, FuncArgCtx) -> Explain a -> m a + => (FieldMap, OrdByCtx, FuncArgCtx, SQLGenCtx) -> Explain a -> m a runExplain ctx m = either throwError return $ runExcept $ runReaderT m ctx explainField :: (MonadTx m) - => UserInfo -> GCtx -> Field -> m FieldPlan -explainField userInfo gCtx fld = + => UserInfo -> GCtx -> SQLGenCtx -> Field -> m FieldPlan +explainField userInfo gCtx sqlGenCtx fld = case fName of "__type" -> return $ FieldPlan fName Nothing Nothing "__schema" -> return $ FieldPlan fName Nothing Nothing "__typename" -> return $ FieldPlan fName Nothing Nothing _ -> do opCxt <- getOpCtx fName - builderSQL <- runExplain (fldMap, orderByCtx, funcArgCtx) $ + builderSQL <- runExplain (fldMap, orderByCtx, funcArgCtx, sqlGenCtx) $ case opCxt of OCSelect tn permFilter permLimit hdrs -> do validateHdrs hdrs @@ -108,9 +110,10 @@ explainField userInfo gCtx fld = validateHdrs hdrs (tabArgs, eSel, frmItem) <- RS.fromFuncQueryField txtConverter fn isAgg fld + strfyNum <- stringifyNum <$> asks getter return $ toSQL $ RS.mkFuncSelectWith fn tn - (RS.TablePerm permFilter permLimit) tabArgs eSel frmItem + (RS.TablePerm permFilter permLimit) tabArgs strfyNum eSel frmItem validateHdrs hdrs = do let receivedHdrs = userVars userInfo @@ -123,9 +126,10 @@ explainGQLQuery => Q.PGPool -> Q.TxIsolation -> SchemaCache + -> SQLGenCtx -> GQLExplain -> m BL.ByteString -explainGQLQuery pool iso sc (GQLExplain query userVarsRaw)= do +explainGQLQuery pool iso sc sqlGenCtx (GQLExplain query userVarsRaw)= do (gCtx, _) <- flip runStateT sc $ getGCtx (userRole userInfo) gCtxMap queryParts <- runReaderT (GV.getQueryParts query) gCtx let topLevelNodes = TH.getTopLevelNodes (GV.qpOpDef queryParts) @@ -136,7 +140,7 @@ explainGQLQuery pool iso sc (GQLExplain query userVarsRaw)= do (opTy, selSet) <- runReaderT (GV.validateGQ queryParts) gCtx unless (opTy == G.OperationTypeQuery) $ throw400 InvalidParams "only queries can be explained" - let tx = mapM (explainField userInfo gCtx) (toList selSet) + let tx = mapM (explainField userInfo gCtx sqlGenCtx) (toList selSet) plans <- liftIO (runExceptT $ runTx tx) >>= liftEither return $ J.encode plans where diff --git a/server/src-lib/Hasura/GraphQL/Resolve.hs b/server/src-lib/Hasura/GraphQL/Resolve.hs index 700e661efa8fa..b41ea62e29641 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve.hs @@ -24,10 +24,15 @@ import qualified Hasura.GraphQL.Resolve.Mutation as RM import qualified Hasura.GraphQL.Resolve.Select as RS -- {-# SCC buildTx #-} -buildTx :: UserInfo -> GCtx -> Field -> Q.TxE QErr BL.ByteString -buildTx userInfo gCtx fld = do +buildTx :: UserInfo -> GCtx -> SQLGenCtx -> Field -> Q.TxE QErr BL.ByteString +buildTx userInfo gCtx sqlCtx fld = do opCxt <- getOpCtx $ _fName fld - join $ fmap fst $ runConvert (fldMap, orderByCtx, insCtxMap, funcArgCtx) $ case opCxt of + join $ fmap fst $ runConvert ( fldMap + , orderByCtx + , insCtxMap + , funcArgCtx + , sqlCtx + ) $ case opCxt of OCSelect tn permFilter permLimit hdrs -> validateHdrs hdrs >> RS.convertSelect tn permFilter permLimit fld @@ -73,16 +78,16 @@ buildTx userInfo gCtx fld = do -- {-# SCC resolveFld #-} resolveFld :: (MonadTx m) - => UserInfo -> GCtx + => UserInfo -> GCtx -> SQLGenCtx -> G.OperationType -> Field -> m BL.ByteString -resolveFld userInfo gCtx opTy fld = +resolveFld userInfo gCtx sqlGenCtx opTy fld = case _fName fld of "__type" -> J.encode <$> runReaderT (typeR fld) gCtx "__schema" -> J.encode <$> runReaderT (schemaR fld) gCtx "__typename" -> return $ J.encode $ mkRootTypeName opTy - _ -> liftTx $ buildTx userInfo gCtx fld + _ -> liftTx $ buildTx userInfo gCtx sqlGenCtx fld where mkRootTypeName :: G.OperationType -> Text mkRootTypeName = \case @@ -92,11 +97,11 @@ resolveFld userInfo gCtx opTy fld = resolveSelSet :: (MonadTx m) - => UserInfo -> GCtx + => UserInfo -> GCtx -> SQLGenCtx -> G.OperationType -> SelSet -> m BL.ByteString -resolveSelSet userInfo gCtx opTy fields = +resolveSelSet userInfo gCtx sqlGenCtx opTy fields = fmap mkJSONObj $ forM (toList fields) $ \fld -> do - fldResp <- resolveFld userInfo gCtx opTy fld + fldResp <- resolveFld userInfo gCtx sqlGenCtx opTy fld return (G.unName $ G.unAlias $ _fAlias fld, fldResp) diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs index a77b5461ee459..4d3f541f85407 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs @@ -124,7 +124,13 @@ withArgM args arg f = prependArgsInPath $ nameAsPath arg $ type PrepArgs = Seq.Seq Q.PrepArg type Convert = - StateT PrepArgs (ReaderT (FieldMap, OrdByCtx, InsCtxMap, FuncArgCtx) (Except QErr)) + StateT PrepArgs (ReaderT ( FieldMap + , OrdByCtx + , InsCtxMap + , FuncArgCtx + , SQLGenCtx + ) (Except QErr) + ) prepare :: (MonadState PrepArgs m) => PrepFn m @@ -135,7 +141,9 @@ prepare (colTy, colVal) = do runConvert :: (MonadError QErr m) - => (FieldMap, OrdByCtx, InsCtxMap, FuncArgCtx) -> Convert a -> m (a, PrepArgs) + => (FieldMap, OrdByCtx, InsCtxMap, FuncArgCtx, SQLGenCtx) + -> Convert a + -> m (a, PrepArgs) runConvert ctx m = either throwError return $ runExcept $ runReaderT (runStateT m Seq.empty) ctx diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs index 808f8c46908e6..3cb68e6dd4d13 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs @@ -222,26 +222,28 @@ mkSelQ tn allColInfos pgColsWithVal = do (\(_, v) ci -> (ci, v)) execWithExp - :: QualifiedTable + :: Bool + -> QualifiedTable -> InsWithExp -> RR.MutFlds -> Q.TxE QErr RespBody -execWithExp tn (InsWithExp withExp ccM args) flds = do +execWithExp strfyNum tn (InsWithExp withExp ccM args) flds = do RI.setConflictCtx ccM runIdentity . Q.getRow <$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder sqlBuilder) (toList args) True where - sqlBuilder = toSQL $ RR.mkSelWith tn withExp flds True + sqlBuilder = toSQL $ RR.mkSelWith tn withExp flds True strfyNum insertAndRetCols - :: QualifiedTable + :: Bool + -> QualifiedTable -> InsWithExp -> T.Text -> [PGColInfo] -> Q.TxE QErr [PGColWithValue] -insertAndRetCols tn withExp errMsg retCols = do - resBS <- execWithExp tn withExp [("response", RR.MRet annSelFlds)] +insertAndRetCols strfyNum tn withExp errMsg retCols = do + resBS <- execWithExp strfyNum tn withExp [("response", RR.MRet annSelFlds)] insResp <- decodeFromBS resBS resObj <- onNothing (_irResponse insResp) $ throwVE errMsg forM retCols $ \(PGColInfo col colty _) -> do @@ -282,15 +284,16 @@ validateInsert insCols objRels addCols = do -- | insert an object relationship and return affected rows -- | and parent dependent columns insertObjRel - :: RoleName + :: Bool + -> RoleName -> ObjRelIns -> Q.TxE QErr (Int, [PGColWithValue]) -insertObjRel role objRelIns = +insertObjRel strfyNum role objRelIns = withPathK relNameTxt $ do - (aRows, withExp) <- insertObj role tn singleObjIns [] + (aRows, withExp) <- insertObj strfyNum role tn singleObjIns [] let errMsg = "cannot proceed to insert object relation " <> relName <<> " since insert to table " <> tn <<> " affects zero rows" - retColsWithVals <- insertAndRetCols tn withExp errMsg $ + retColsWithVals <- insertAndRetCols strfyNum tn withExp errMsg $ getColInfos rCols allCols let c = mergeListsWith mapCols retColsWithVals (\(_, rCol) (col, _) -> rCol == col) @@ -307,17 +310,18 @@ insertObjRel role objRelIns = -- | insert an array relationship and return affected rows insertArrRel - :: RoleName + :: Bool + -> RoleName -> [PGColWithValue] -> ArrRelIns -> Q.TxE QErr Int -insertArrRel role resCols arrRelIns = +insertArrRel strfyNum role resCols arrRelIns = withPathK relNameTxt $ do let addCols = mergeListsWith resCols colMapping (\(col, _) (lCol, _) -> col == lCol) (\(_, colVal) (_, rCol) -> (rCol, colVal)) - resBS <- insertMultipleObjects role tn multiObjIns addCols mutFlds "data" + resBS <- insertMultipleObjects strfyNum role tn multiObjIns addCols mutFlds "data" resObj <- decodeFromBS resBS onNothing (Map.lookup ("affected_rows" :: T.Text) resObj) $ throw500 "affected_rows not returned in array rel insert" @@ -330,17 +334,18 @@ insertArrRel role resCols arrRelIns = -- | insert an object with object and array relationships insertObj - :: RoleName + :: Bool + -> RoleName -> QualifiedTable -> SingleObjIns -> [PGColWithValue] -- ^ additional fields -> Q.TxE QErr (Int, InsWithExp) -insertObj role tn singleObjIns addCols = do +insertObj strfyNum role tn singleObjIns addCols = do -- validate insert validateInsert (map _1 cols) (map _riRelInfo objRels) $ map fst addCols -- insert all object relations and fetch this insert dependent column values - objInsRes <- forM objRels $ insertObjRel role + objInsRes <- forM objRels $ insertObjRel strfyNum role -- prepare final insert columns let objInsAffRows = sum $ map fst objInsRes @@ -369,9 +374,9 @@ insertObj role tn singleObjIns addCols = do withArrRels preAffRows insQ arrDepColsWithType = do arrDepColsWithVal <- - insertAndRetCols tn insQ cannotInsArrRelErr arrDepColsWithType + insertAndRetCols strfyNum tn insQ cannotInsArrRelErr arrDepColsWithType - arrInsARows <- forM arrRels $ insertArrRel role arrDepColsWithVal + arrInsARows <- forM arrRels $ insertArrRel strfyNum role arrDepColsWithVal let totalAffRows = preAffRows + sum arrInsARows @@ -385,14 +390,15 @@ insertObj role tn singleObjIns addCols = do -- | insert multiple Objects in postgres insertMultipleObjects - :: RoleName + :: Bool + -> RoleName -> QualifiedTable -> MultiObjIns -> [PGColWithValue] -- ^ additional fields -> RR.MutFlds -> T.Text -- ^ error path -> Q.TxE QErr RespBody -insertMultipleObjects role tn multiObjIns addCols mutFlds errP = +insertMultipleObjects strfyNum role tn multiObjIns addCols mutFlds errP = bool withoutRelsInsert withRelsInsert anyRelsToInsert where AnnIns insObjs onConflictM vn tableColInfos defVals = multiObjIns @@ -420,18 +426,18 @@ insertMultipleObjects role tn multiObjIns addCols mutFlds errP = let insQP1 = RI.InsertQueryP1 tn vn tableCols sqlRows onConflictM mutFlds p1 = (insQP1, prepArgs) - bool (RI.nonAdminInsert p1) (RI.insertP2 p1) $ isAdmin role + bool (RI.nonAdminInsert strfyNum p1) (RI.insertP2 strfyNum p1) $ isAdmin role -- insert each object with relations withRelsInsert = withErrPath $ do insResps <- indexedForM singleObjInserts $ \objIns -> - insertObj role tn objIns addCols + insertObj strfyNum role tn objIns addCols let affRows = sum $ map fst insResps withExps = map snd insResps retFlds = mapMaybe getRet mutFlds rawResps <- forM withExps - $ \withExp -> execWithExp tn withExp retFlds + $ \withExp -> execWithExp strfyNum tn withExp retFlds respVals :: [J.Object] <- mapM decodeFromBS rawResps respTups <- forM mutFlds $ \(t, mutFld) -> do jsonVal <- case mutFld of @@ -466,7 +472,8 @@ convertInsert role tn fld = prefixErrPath fld $ do annInsObjs <- forM annObjs $ mkAnnInsObj relInfoMap conflictClauseM <- forM onConflictM $ parseOnConflict tn updPerm let multiObjIns = AnnIns annInsObjs conflictClauseM vn tableCols defValMap - return $ prefixErrPath fld $ insertMultipleObjects role tn + strfyNum <- stringifyNum <$> asks getter + return $ prefixErrPath fld $ insertMultipleObjects strfyNum role tn multiObjIns [] mutFlds "objects" arguments = _fArguments fld diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs b/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs index fd15b368a8467..a3b3f344de7ce 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs @@ -6,6 +6,7 @@ module Hasura.GraphQL.Resolve.Mutation ) where import Control.Arrow (second) +import Data.Has (getter) import Hasura.Prelude import qualified Data.Aeson as J @@ -125,8 +126,9 @@ convertUpdate tn filterExp fld = do unless (any isJust updExpsM) $ throwVE $ "atleast any one of _set, _inc, _append, _prepend, _delete_key, _delete_elem and " <> " _delete_at_path operator is expected" + strfyNum <- stringifyNum <$> asks getter let p1 = RU.UpdateQueryP1 tn setItems (filterExp, whereExp) mutFlds - whenNonEmptyItems = return $ RU.updateQueryToTx (p1, prepArgs) + whenNonEmptyItems = return $ RU.updateQueryToTx strfyNum (p1, prepArgs) whenEmptyItems = buildEmptyMutResp mutFlds -- if there are not set items then do not perform -- update and return empty mutation response @@ -144,7 +146,8 @@ convertDelete tn filterExp fld = do mutFlds <- convertMutResp (_fType fld) $ _fSelSet fld args <- get let p1 = RD.DeleteQueryP1 tn (filterExp, whereExp) mutFlds - return $ RD.deleteQueryToTx (p1, args) + strfyNum <- stringifyNum <$> asks getter + return $ RD.deleteQueryToTx strfyNum (p1, args) -- | build mutation response for empty objects buildEmptyMutResp :: Monad m => RR.MutFlds -> m RespTx diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Select.hs b/server/src-lib/Hasura/GraphQL/Resolve/Select.hs index 6a7ec2f1b0b0a..1f8d724b4eb4e 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Select.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Select.hs @@ -44,7 +44,9 @@ withSelSet selSet f = return (G.unName $ G.unAlias $ _fAlias fld, res) fromSelSet - :: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r) + :: ( MonadError QErr m, MonadReader r m, Has FieldMap r + , Has OrdByCtx r, Has SQLGenCtx r + ) => PrepFn m -> G.NamedType -> SelSet -> m RS.AnnFlds fromSelSet f fldTy flds = forM (toList flds) $ \fld -> do @@ -71,7 +73,9 @@ fromSelSet f fldTy flds = ArrRel -> RS.FArr $ RS.ASSimple annRel fromAggSelSet - :: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r) + :: ( MonadError QErr m, MonadReader r m, Has FieldMap r + , Has OrdByCtx r, Has SQLGenCtx r + ) => PrepFn m -> G.NamedType -> SelSet -> m RS.TableAggFlds fromAggSelSet fn fldTy selSet = fmap toFields $ withSelSet selSet $ \f -> do @@ -87,7 +91,9 @@ fieldAsPath :: (MonadError QErr m) => Field -> m a -> m a fieldAsPath = nameAsPath . _fName parseTableArgs - :: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r) + :: ( MonadError QErr m, MonadReader r m + , Has FieldMap r, Has OrdByCtx r + ) => PrepFn m -> ArgsMap -> m RS.TableArgs parseTableArgs f args = do whereExpM <- withArgM args "where" $ parseBoolExp f @@ -115,7 +121,9 @@ parseTableArgs f args = do "\"distinct_on\" columns must match initial \"order_by\" columns" fromField - :: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r) + :: ( MonadError QErr m, MonadReader r m, Has FieldMap r + , Has OrdByCtx r, Has SQLGenCtx r + ) => PrepFn m -> QualifiedTable -> AnnBoolExpSQL -> Maybe Int -> Field -> m RS.AnnSel fromField f tn permFilter permLimitM fld = fieldAsPath fld $ do @@ -123,7 +131,8 @@ fromField f tn permFilter permLimitM fld = fieldAsPath fld $ do annFlds <- fromSelSet f (_fType fld) $ _fSelSet fld let tabFrom = RS.TableFrom tn Nothing tabPerm = RS.TablePerm permFilter permLimitM - return $ RS.AnnSelG annFlds tabFrom tabPerm tableArgs + strfyNum <- stringifyNum <$> asks getter + return $ RS.AnnSelG annFlds tabFrom tabPerm tableArgs strfyNum where args = _fArguments fld @@ -229,7 +238,7 @@ parseLimit v = do noIntErr = throwVE "expecting Integer value for \"limit\"" fromFieldByPKey - :: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r) + :: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r, Has SQLGenCtx r) => ((PGColType, PGColValue) -> m S.SQLExp) -> QualifiedTable -> AnnBoolExpSQL -> Field -> m RS.AnnSel fromFieldByPKey f tn permFilter fld = fieldAsPath fld $ do @@ -237,8 +246,9 @@ fromFieldByPKey f tn permFilter fld = fieldAsPath fld $ do annFlds <- fromSelSet f (_fType fld) $ _fSelSet fld let tabFrom = RS.TableFrom tn Nothing tabPerm = RS.TablePerm permFilter Nothing - return $ RS.AnnSelG annFlds tabFrom tabPerm $ - RS.noTableArgs { RS._taWhere = Just boolExp} + tabArgs = RS.noTableArgs { RS._taWhere = Just boolExp} + strfyNum <- stringifyNum <$> asks getter + return $ RS.AnnSelG annFlds tabFrom tabPerm tabArgs strfyNum convertSelect :: QualifiedTable -> AnnBoolExpSQL -> Maybe Int -> Field -> Convert RespTx @@ -310,7 +320,9 @@ convertAggFld ty selSet = fmap toFields $ throw500 $ "unexpected field in _aggregate node: " <> t fromAggField - :: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r) + :: ( MonadError QErr m, MonadReader r m, Has FieldMap r + , Has OrdByCtx r, Has SQLGenCtx r + ) => PrepFn m -> QualifiedTable -> AnnBoolExpSQL -> Maybe Int -> Field -> m RS.AnnAggSel fromAggField f tn permFilter permLimit fld = fieldAsPath fld $ do @@ -318,7 +330,8 @@ fromAggField f tn permFilter permLimit fld = fieldAsPath fld $ do aggSelFlds <- fromAggSelSet f (_fType fld) (_fSelSet fld) let tabFrom = RS.TableFrom tn Nothing tabPerm = RS.TablePerm permFilter permLimit - return $ RS.AnnSelG aggSelFlds tabFrom tabPerm tableArgs + strfyNum <- stringifyNum <$> asks getter + return $ RS.AnnSelG aggSelFlds tabFrom tabPerm tableArgs strfyNum where args = _fArguments fld @@ -332,7 +345,7 @@ convertAggSelect qt permFilter permLimit fld = do fromFuncQueryField ::( MonadError QErr m, MonadReader r m, Has FieldMap r - , Has OrdByCtx r, Has FuncArgCtx r + , Has OrdByCtx r, Has FuncArgCtx r, Has SQLGenCtx r ) => PrepFn m -> QualifiedFunction -> Bool -> Field -> m (RS.TableArgs, Either RS.TableAggFlds RS.AnnFlds, S.FromItem) @@ -373,4 +386,5 @@ convertFuncQuery qt qf permFilter permLimit isAgg fld = do fromFuncQueryField prepare qf isAgg fld let tabPerm = RS.TablePerm permFilter permLimit prepArgs <- get - return $ RS.funcQueryTx frmItem qf qt tabPerm tableArgs (sel, prepArgs) + strfyNum <- stringifyNum <$> asks getter + return $ RS.funcQueryTx frmItem qf qt tabPerm tableArgs strfyNum (sel, prepArgs) diff --git a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs index 28c4076e270e0..0b2076dccb520 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs @@ -37,13 +37,14 @@ runGQ :: (MonadIO m, MonadError QErr m) => Q.PGPool -> Q.TxIsolation -> UserInfo + -> SQLGenCtx -> SchemaCache -> HTTP.Manager -> [N.Header] -> GraphQLRequest -> BL.ByteString -- this can be removed when we have a pretty-printer -> m BL.ByteString -runGQ pool isoL userInfo sc manager reqHdrs req rawReq = do +runGQ pool isoL userInfo sqlGenCtx sc manager reqHdrs req rawReq = do (gCtx, _) <- flip runStateT sc $ getGCtx (userRole userInfo) gCtxRoleMap queryParts <- flip runReaderT gCtx $ VQ.getQueryParts req @@ -57,11 +58,11 @@ runGQ pool isoL userInfo sc manager reqHdrs req rawReq = do assertSameLocationNodes typeLocs case typeLocs of - [] -> runHasuraGQ pool isoL userInfo sc queryParts + [] -> runHasuraGQ pool isoL userInfo sqlGenCtx sc queryParts (typeLoc:_) -> case typeLoc of VT.HasuraType -> - runHasuraGQ pool isoL userInfo sc queryParts + runHasuraGQ pool isoL userInfo sqlGenCtx sc queryParts VT.RemoteType _ rsi -> runRemoteGQ manager userInfo reqHdrs rawReq rsi opDef where @@ -102,15 +103,16 @@ runHasuraGQ :: (MonadIO m, MonadError QErr m) => Q.PGPool -> Q.TxIsolation -> UserInfo + -> SQLGenCtx -> SchemaCache -> VQ.QueryParts -> m BL.ByteString -runHasuraGQ pool isoL userInfo sc queryParts = do +runHasuraGQ pool isoL userInfo sqlGenCtx sc queryParts = do (gCtx, _) <- flip runStateT sc $ getGCtx (userRole userInfo) gCtxMap (opTy, fields) <- runReaderT (VQ.validateGQ queryParts) gCtx when (opTy == G.OperationTypeSubscription) $ throw400 UnexpectedPayload "subscriptions are not supported over HTTP, use websockets instead" - let tx = R.resolveSelSet userInfo gCtx opTy fields + let tx = R.resolveSelSet userInfo gCtx sqlGenCtx opTy fields resp <- liftIO (runExceptT $ runTx tx) >>= liftEither return $ encodeGQResp $ GQSuccess resp where diff --git a/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs b/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs index 4d3ecb750acfb..db7ea5e912454 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs @@ -120,6 +120,7 @@ data WSServerEnv , _wseLiveQMap :: !LiveQueryMap , _wseGCtxMap :: !(IORef.IORef SchemaCache) , _wseHManager :: !H.Manager + , _wseSQLCtx :: !SQLGenCtx } onConn :: L.Logger -> WS.OnConnH WSConnData @@ -204,7 +205,7 @@ onStart serverEnv wsConn (StartMsg opId q) msgRaw = catchAndIgnore $ do runHasuraQ userInfo gCtx queryParts = do (opTy, fields) <- either (withComplete . preExecErr) return $ runReaderT (validateGQ queryParts) gCtx - let qTx = withUserInfo userInfo $ resolveSelSet userInfo gCtx opTy fields + let qTx = withUserInfo userInfo $ resolveSelSet userInfo gCtx sqlGenCtx opTy fields case opTy of G.OperationTypeSubscription -> do let lq = LQ.LiveQuery userInfo q @@ -218,7 +219,7 @@ onStart serverEnv wsConn (StartMsg opId q) msgRaw = catchAndIgnore $ do either postExecErr sendSuccResp resp sendCompleted - WSServerEnv logger _ runTx lqMap gCtxMapRef httpMgr = serverEnv + WSServerEnv logger _ runTx lqMap gCtxMapRef httpMgr sqlGenCtx = serverEnv wsId = WS.getWSId wsConn WSConnData userInfoR opMap = WS.getData wsConn @@ -351,12 +352,12 @@ onClose logger lqMap _ wsConn = do createWSServerEnv :: L.Logger - -> H.Manager -> IORef.IORef SchemaCache + -> H.Manager -> SQLGenCtx -> IORef.IORef SchemaCache -> TxRunner -> IO WSServerEnv -createWSServerEnv logger httpManager cacheRef runTx = do +createWSServerEnv logger httpManager sqlGenCtx cacheRef runTx = do (wsServer, lqMap) <- STM.atomically $ (,) <$> WS.createWSServer logger <*> LQ.newLiveQueryMap - return $ WSServerEnv logger wsServer runTx lqMap cacheRef httpManager + return $ WSServerEnv logger wsServer runTx lqMap cacheRef httpManager sqlGenCtx createWSServerApp :: AuthMode -> WSServerEnv -> WS.ServerApp createWSServerApp authMode serverEnv = diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata.hs b/server/src-lib/Hasura/RQL/DDL/Metadata.hs index 8957519dec10f..de557f0190b09 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata.hs @@ -32,7 +32,6 @@ import qualified Data.Text as T import Hasura.GraphQL.Utils import Hasura.Prelude -import Hasura.RQL.DDL.Utils import Hasura.RQL.Types import Hasura.SQL.Types @@ -124,7 +123,8 @@ clearMetadata = Q.catchE defaultTxErrorHandler $ do runClearMetadata :: ( QErrM m, UserInfoM m, CacheRWM m, MonadTx m - , MonadIO m, HasHttpManager m) + , MonadIO m, HasHttpManager m, HasSQLGenCtx m + ) => ClearMetadata -> m RespBody runClearMetadata _ = do adminOnly @@ -200,6 +200,7 @@ applyQP2 , MonadTx m , MonadIO m , HasHttpManager m + , HasSQLGenCtx m ) => ReplaceMetadata -> m RespBody @@ -267,7 +268,9 @@ applyQP2 (ReplaceMetadata tables templates mFunctions mSchemas) = do DP.addPermP2 (tiName tabInfo) permDef permInfo runReplaceMetadata - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m) + :: ( QErrM m, UserInfoM m, CacheRWM m, MonadTx m + , MonadIO m, HasHttpManager m, HasSQLGenCtx m + ) => ReplaceMetadata -> m RespBody runReplaceMetadata q = do applyQP1 q @@ -413,7 +416,8 @@ $(deriveToJSON defaultOptions ''ReloadMetadata) runReloadMetadata :: ( QErrM m, UserInfoM m, CacheRWM m - , MonadTx m, MonadIO m, HasHttpManager m) + , MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m + ) => ReloadMetadata -> m RespBody runReloadMetadata _ = do adminOnly diff --git a/server/src-lib/Hasura/RQL/DDL/QueryTemplate.hs b/server/src-lib/Hasura/RQL/DDL/QueryTemplate.hs index f9179f74b7197..9f483f9def0e4 100644 --- a/server/src-lib/Hasura/RQL/DDL/QueryTemplate.hs +++ b/server/src-lib/Hasura/RQL/DDL/QueryTemplate.hs @@ -95,7 +95,7 @@ data QueryTP1 deriving (Show, Eq) validateTQuery - :: (QErrM m, UserInfoM m, CacheRM m) + :: (QErrM m, UserInfoM m, CacheRM m, HasSQLGenCtx m) => QueryT -> m QueryTP1 validateTQuery qt = withPathK "args" $ case qt of @@ -122,7 +122,7 @@ collectDeps qt = case qt of QTP1Bulk qp1 -> concatMap collectDeps qp1 createQueryTemplateP1 - :: (UserInfoM m, QErrM m, CacheRM m) + :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) => CreateQueryTemplate -> m (WithDeps QueryTemplateInfo) createQueryTemplateP1 (CreateQueryTemplate qtn qt _) = do @@ -156,7 +156,7 @@ createQueryTemplateP2 cqt (qti, deps) = do return successMsg runCreateQueryTemplate - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m) + :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, HasSQLGenCtx m) => CreateQueryTemplate -> m RespBody runCreateQueryTemplate q = createQueryTemplateP1 q >>= createQueryTemplateP2 q diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs index e0e6535abb24a..dd97d6777314e 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs @@ -274,7 +274,7 @@ runUntrackTableQ q = do unTrackExistingTableOrViewP2 q buildSchemaCache - :: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m) + :: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) => m () buildSchemaCache = do -- clean hdb_views @@ -282,6 +282,7 @@ buildSchemaCache = do -- reset the current schemacache writeSchemaCache emptySchemaCache hMgr <- askHttpManager + strfyNum <- stringifyNum <$> askSQLGenCtx tables <- liftTx $ Q.catchE defaultTxErrorHandler fetchTables forM_ tables $ \(sn, tn, isSystemDefined) -> modifyErr (\e -> "table " <> tn <<> "; " <> e) $ @@ -304,16 +305,16 @@ buildSchemaCache = do forM_ permissions $ \(sn, tn, rn, pt, Q.AltJ pDef) -> modifyErr (\e -> "table " <> tn <<> "; role " <> rn <<> "; " <> e) $ case pt of - PTInsert -> permHelper sn tn rn pDef PAInsert - PTSelect -> permHelper sn tn rn pDef PASelect - PTUpdate -> permHelper sn tn rn pDef PAUpdate - PTDelete -> permHelper sn tn rn pDef PADelete + PTInsert -> permHelper strfyNum sn tn rn pDef PAInsert + PTSelect -> permHelper strfyNum sn tn rn pDef PASelect + PTUpdate -> permHelper strfyNum sn tn rn pDef PAUpdate + PTDelete -> permHelper strfyNum sn tn rn pDef PADelete -- Fetch all the query templates qtemplates <- liftTx $ Q.catchE defaultTxErrorHandler fetchQTemplates forM_ qtemplates $ \(qtn, Q.AltJ qtDefVal) -> do qtDef <- decodeValue qtDefVal - qCtx <- mkAdminQCtx <$> askSchemaCache + qCtx <- mkAdminQCtx strfyNum <$> askSchemaCache (qti, deps) <- liftP1WithQCtx qCtx $ createQueryTemplateP1 $ CreateQueryTemplate qtn qtDef Nothing addQTemplateToCache qti deps @@ -325,7 +326,7 @@ buildSchemaCache = do let qt = QualifiedObject sn tn subTableP2Setup qt trid etc allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt - liftTx $ mkTriggerQ trid trn qt allCols (etcDefinition etc) + liftTx $ mkTriggerQ trid trn qt allCols strfyNum (etcDefinition etc) functions <- liftTx $ Q.catchE defaultTxErrorHandler fetchFunctions forM_ functions $ \(sn, fn) -> @@ -346,8 +347,8 @@ buildSchemaCache = do writeSchemaCache postMergeSc { scDefaultRemoteGCtx = defGCtx } where - permHelper sn tn rn pDef pa = do - qCtx <- mkAdminQCtx <$> askSchemaCache + permHelper strfyNum sn tn rn pDef pa = do + qCtx <- mkAdminQCtx strfyNum <$> askSchemaCache perm <- decodeValue pDef let qt = QualifiedObject sn tn permDef = PermDef rn perm Nothing @@ -417,7 +418,7 @@ execRawSQL = in e {qeInternal = Just $ toJSON txe} execWithMDCheck - :: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m) + :: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) => RunSQL -> m RunSQLRes execWithMDCheck (RunSQL t cascade _) = do @@ -478,7 +479,8 @@ execWithMDCheck (RunSQL t cascade _) = do forM_ (M.toList $ tiEventTriggerInfoMap ti) $ \(trn, eti) -> do let opsDef = etiOpsDef eti trid = etiId eti - liftTx $ mkTriggerQ trid trn tn cols opsDef + strfyNum <- stringifyNum <$> askSQLGenCtx + liftTx $ mkTriggerQ trid trn tn cols strfyNum opsDef -- refresh the gCtxMap in schema cache refreshGCtxMapInSchema @@ -492,7 +494,7 @@ isAltrDropReplace = either throwErr return . matchRegex regex False regex = "alter|drop|replace" runRunSQL - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m) + :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) => RunSQL -> m RespBody runRunSQL q@(RunSQL t _ mChkMDCnstcy) = do adminOnly diff --git a/server/src-lib/Hasura/RQL/DDL/Subscribe.hs b/server/src-lib/Hasura/RQL/DDL/Subscribe.hs index b77ae1d6d164c..cbf76aafb8e36 100644 --- a/server/src-lib/Hasura/RQL/DDL/Subscribe.hs +++ b/server/src-lib/Hasura/RQL/DDL/Subscribe.hs @@ -59,9 +59,10 @@ getTriggerSql -> TriggerName -> QualifiedTable -> [PGColInfo] + -> Bool -> Maybe SubscribeOpSpec -> Maybe T.Text -getTriggerSql op trid trn qt allCols spec = +getTriggerSql op trid trn qt allCols strfyNum spec = let globalCtx = HashMap.fromList [ (T.pack "ID", trid) , (T.pack "NAME", trn) @@ -101,7 +102,7 @@ getTriggerSql op trid trn qt allCols spec = applyRowToJson e = S.SEFnApp "row_to_json" [e] Nothing applyRow e = S.SEFnApp "row" [e] Nothing toExtr = flip S.Extractor Nothing - mkQId opVar colInfo = toJSONableExp (pgiType colInfo) $ + mkQId opVar colInfo = toJSONableExp strfyNum (pgiType colInfo) $ S.SEQIden $ S.QIden (opToQual opVar) $ toIden $ pgiName colInfo opToQual = S.QualVar . opToTxt @@ -121,12 +122,13 @@ mkTriggerQ -> TriggerName -> QualifiedTable -> [PGColInfo] + -> Bool -> TriggerOpsDef -> Q.TxE QErr () -mkTriggerQ trid trn qt allCols (TriggerOpsDef insert update delete) = do - let msql = getTriggerSql INSERT trid trn qt allCols insert - <> getTriggerSql UPDATE trid trn qt allCols update - <> getTriggerSql DELETE trid trn qt allCols delete +mkTriggerQ trid trn qt allCols strfyNum (TriggerOpsDef insert update delete) = do + let msql = getTriggerSql INSERT trid trn qt allCols strfyNum insert + <> getTriggerSql UPDATE trid trn qt allCols strfyNum update + <> getTriggerSql DELETE trid trn qt allCols strfyNum delete case msql of Just sql -> Q.multiQE defaultTxErrorHandler (Q.fromText sql) Nothing -> throw500 "no trigger sql generated" @@ -139,9 +141,10 @@ delTriggerQ trn = mapM_ (\op -> Q.unitQE addEventTriggerToCatalog :: QualifiedTable -> [PGColInfo] + -> Bool -> EventTriggerConf -> Q.TxE QErr TriggerId -addEventTriggerToCatalog qt allCols etc = do +addEventTriggerToCatalog qt allCols strfyNum etc = do ids <- map runIdentity <$> Q.listQE defaultTxErrorHandler [Q.sql| INSERT into hdb_catalog.event_triggers @@ -151,7 +154,7 @@ addEventTriggerToCatalog qt allCols etc = do |] (name, sn, tn, Q.AltJ $ toJSON etc) True trid <- getTrid ids - mkTriggerQ trid name qt allCols opsdef + mkTriggerQ trid name qt allCols strfyNum opsdef return trid where QualifiedObject sn tn = qt @@ -171,9 +174,10 @@ delEventTriggerFromCatalog trn = do updateEventTriggerToCatalog :: QualifiedTable -> [PGColInfo] + -> Bool -> EventTriggerConf -> Q.TxE QErr TriggerId -updateEventTriggerToCatalog qt allCols etc = do +updateEventTriggerToCatalog qt allCols strfyNum etc = do ids <- map runIdentity <$> Q.listQE defaultTxErrorHandler [Q.sql| UPDATE hdb_catalog.event_triggers @@ -184,7 +188,7 @@ updateEventTriggerToCatalog qt allCols etc = do |] (Q.AltJ $ toJSON etc, name) True trid <- getTrid ids delTriggerQ name - mkTriggerQ trid name qt allCols opsdef + mkTriggerQ trid name qt allCols strfyNum opsdef return trid where EventTriggerConf name opsdef _ _ _ _ = etc @@ -283,20 +287,21 @@ getTrigDefDeps qt (TriggerOpsDef mIns mUpd mDel) = SubCArray pgcols -> pgcols subTableP2 - :: (QErrM m, CacheRWM m, MonadTx m, MonadIO m) + :: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasSQLGenCtx m) => QualifiedTable -> Bool -> EventTriggerConf -> m () subTableP2 qt replace etc = do allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt + strfyNum <- stringifyNum <$> askSQLGenCtx trid <- if replace then do delEventTriggerFromCache qt (etcName etc) - liftTx $ updateEventTriggerToCatalog qt allCols etc + liftTx $ updateEventTriggerToCatalog qt allCols strfyNum etc else - liftTx $ addEventTriggerToCatalog qt allCols etc + liftTx $ addEventTriggerToCatalog qt allCols strfyNum etc subTableP2Setup qt trid etc runCreateEventTriggerQuery - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m) + :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasSQLGenCtx m) => CreateEventTriggerQuery -> m RespBody runCreateEventTriggerQuery q = do (qt, replace, etc) <- subTableP1 q diff --git a/server/src-lib/Hasura/RQL/DML/Count.hs b/server/src-lib/Hasura/RQL/DML/Count.hs index d5ecd5dfeaea1..119accc3e169d 100644 --- a/server/src-lib/Hasura/RQL/DML/Count.hs +++ b/server/src-lib/Hasura/RQL/DML/Count.hs @@ -106,7 +106,7 @@ validateCountQWith prepValBuilder (CountQuery qt mDistCols mWhere) = do "Relationships can't be used in \"distinct\"." validateCountQ - :: (QErrM m, UserInfoM m, CacheRM m) + :: (QErrM m, UserInfoM m, CacheRM m, HasSQLGenCtx m) => CountQuery -> m (CountQueryP1, DS.Seq Q.PrepArg) validateCountQ = liftDMLP1 . validateCountQWith binRHSBuilder @@ -124,7 +124,7 @@ countQToTx (u, p) = do BB.byteString "{\"count\":" <> BB.intDec c <> BB.char7 '}' runCount - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m) + :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, HasSQLGenCtx m) => CountQuery -> m RespBody runCount q = validateCountQ q >>= countQToTx diff --git a/server/src-lib/Hasura/RQL/DML/Delete.hs b/server/src-lib/Hasura/RQL/DML/Delete.hs index 382919b806877..d6f6fced30069 100644 --- a/server/src-lib/Hasura/RQL/DML/Delete.hs +++ b/server/src-lib/Hasura/RQL/DML/Delete.hs @@ -30,9 +30,9 @@ data DeleteQueryP1 } deriving (Show, Eq) mkSQLDelete - :: DeleteQueryP1 -> S.SelectWith -mkSQLDelete (DeleteQueryP1 tn (fltr, wc) mutFlds) = - mkSelWith tn (S.CTEDelete delete) mutFlds False + :: Bool -> DeleteQueryP1 -> S.SelectWith +mkSQLDelete strfyNum (DeleteQueryP1 tn (fltr, wc) mutFlds) = + mkSelWith tn (S.CTEDelete delete) mutFlds False strfyNum where delete = S.SQLDelete tn Nothing tableFltr $ Just S.returningStar tableFltr = Just $ S.WhereFrag $ @@ -90,20 +90,21 @@ validateDeleteQWith prepValBuilder (DeleteQuery tableName rqlBE mRetCols) = do <> "without \"select\" permission on the table" validateDeleteQ - :: (QErrM m, UserInfoM m, CacheRM m) + :: (QErrM m, UserInfoM m, CacheRM m, HasSQLGenCtx m) => DeleteQuery -> m (DeleteQueryP1, DS.Seq Q.PrepArg) validateDeleteQ = liftDMLP1 . validateDeleteQWith binRHSBuilder -deleteQueryToTx :: (DeleteQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody -deleteQueryToTx (u, p) = +deleteQueryToTx :: Bool -> (DeleteQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody +deleteQueryToTx strfyNum (u, p) = runIdentity . Q.getRow <$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder deleteSQL) (toList p) True where - deleteSQL = toSQL $ mkSQLDelete u + deleteSQL = toSQL $ mkSQLDelete strfyNum u runDelete - :: (QErrM m, UserInfoM m, CacheRM m, MonadTx m) + :: (QErrM m, UserInfoM m, CacheRM m, MonadTx m, HasSQLGenCtx m) => DeleteQuery -> m RespBody -runDelete q = - validateDeleteQ q >>= liftTx . deleteQueryToTx +runDelete q = do + strfyNum <- stringifyNum <$> askSQLGenCtx + validateDeleteQ q >>= liftTx . deleteQueryToTx strfyNum diff --git a/server/src-lib/Hasura/RQL/DML/Insert.hs b/server/src-lib/Hasura/RQL/DML/Insert.hs index 951103535ba2e..b5bca89194cd0 100644 --- a/server/src-lib/Hasura/RQL/DML/Insert.hs +++ b/server/src-lib/Hasura/RQL/DML/Insert.hs @@ -40,9 +40,9 @@ data InsertQueryP1 , iqp1MutFlds :: !MutFlds } deriving (Show, Eq) -mkSQLInsert :: InsertQueryP1 -> S.SelectWith -mkSQLInsert (InsertQueryP1 tn vn cols vals c mutFlds) = - mkSelWith tn (S.CTEInsert insert) mutFlds False +mkSQLInsert :: Bool -> InsertQueryP1 -> S.SelectWith +mkSQLInsert strfyNum (InsertQueryP1 tn vn cols vals c mutFlds) = + mkSelWith tn (S.CTEInsert insert) mutFlds False strfyNum where insert = S.SQLInsert vn cols vals (toSQLConflict <$> c) $ Just S.returningStar @@ -227,30 +227,30 @@ decodeInsObjs v = do return objs convInsQ - :: (QErrM m, UserInfoM m, CacheRM m) + :: (QErrM m, UserInfoM m, CacheRM m, HasSQLGenCtx m) => InsertQuery -> m (InsertQueryP1, DS.Seq Q.PrepArg) convInsQ = liftDMLP1 . convInsertQuery (withPathK "objects" . decodeInsObjs) binRHSBuilder -insertP2 :: (InsertQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody -insertP2 (u, p) = +insertP2 :: Bool -> (InsertQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody +insertP2 strfyNum (u, p) = runIdentity . Q.getRow <$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder insertSQL) (toList p) True where - insertSQL = toSQL $ mkSQLInsert u + insertSQL = toSQL $ mkSQLInsert strfyNum u data ConflictCtx = CCUpdate !ConstraintName ![PGCol] !S.BoolExp | CCDoNothing !(Maybe ConstraintName) deriving (Show, Eq) -nonAdminInsert :: (InsertQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody -nonAdminInsert (insQueryP1, args) = do +nonAdminInsert :: Bool -> (InsertQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody +nonAdminInsert strfyNum (insQueryP1, args) = do conflictCtxM <- mapM extractConflictCtx conflictClauseP1 setConflictCtx conflictCtxM - insertP2 (withoutConflictClause, args) + insertP2 strfyNum (withoutConflictClause, args) where withoutConflictClause = insQueryP1{iqp1Conflict=Nothing} conflictClauseP1 = iqp1Conflict insQueryP1 @@ -287,10 +287,11 @@ setConflictCtx conflictCtxM = do <> " " <> toSQLTxt (S.WhereFrag filtr) runInsert - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m) + :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, HasSQLGenCtx m) => InsertQuery -> m RespBody runInsert q = do res <- convInsQ q role <- userRole <$> askUserInfo - liftTx $ bool (nonAdminInsert res) (insertP2 res) $ isAdmin role + strfyNum <- stringifyNum <$> askSQLGenCtx + liftTx $ bool (nonAdminInsert strfyNum res) (insertP2 strfyNum res) $ isAdmin role diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs index 604280be59584..5645acbdc3cde 100644 --- a/server/src-lib/Hasura/RQL/DML/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Internal.hs @@ -27,7 +27,7 @@ newtype DMLP1 a ) liftDMLP1 - :: (QErrM m, UserInfoM m, CacheRM m) + :: (QErrM m, UserInfoM m, CacheRM m, HasSQLGenCtx m) => DMLP1 a -> m (a, DS.Seq Q.PrepArg) liftDMLP1 = liftP1 . flip runStateT DS.empty . unDMLP1 @@ -38,6 +38,9 @@ instance CacheRM DMLP1 where instance UserInfoM DMLP1 where askUserInfo = DMLP1 $ lift askUserInfo +instance HasSQLGenCtx DMLP1 where + askSQLGenCtx = DMLP1 $ lift askSQLGenCtx + mkAdminRolePermInfo :: TableInfo -> RolePermInfo mkAdminRolePermInfo ti = RolePermInfo (Just i) (Just s) (Just u) (Just d) @@ -220,8 +223,8 @@ dmlTxErrorHandler p2Res = Just (code, msg) -> err400 code msg where err = simplifyError p2Res -toJSONableExp :: PGColType -> S.SQLExp -> S.SQLExp -toJSONableExp colTy expn +toJSONableExp :: Bool -> PGColType -> S.SQLExp -> S.SQLExp +toJSONableExp strfyNum colTy expn | colTy == PGGeometry || colTy == PGGeography = S.SEFnApp "ST_AsGeoJSON" [ expn @@ -229,7 +232,7 @@ toJSONableExp colTy expn , S.SEUnsafe "4" -- to print out crs ] Nothing `S.SETyAnn` S.jsonType - | colTy == PGBigInt || colTy == PGBigSerial = + | isBigNum colTy && strfyNum = expn `S.SETyAnn` S.textType | otherwise = expn diff --git a/server/src-lib/Hasura/RQL/DML/QueryTemplate.hs b/server/src-lib/Hasura/RQL/DML/QueryTemplate.hs index 9411e22375de7..3ba5667159bf9 100644 --- a/server/src-lib/Hasura/RQL/DML/QueryTemplate.hs +++ b/server/src-lib/Hasura/RQL/DML/QueryTemplate.hs @@ -92,7 +92,7 @@ mkSelQWithArgs (DMLQuery tn (SelectG c w o lim offset)) args = do return $ DMLQuery tn $ SelectG c w o intLim intOffset convQT - :: (UserInfoM m, QErrM m, CacheRM m) + :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) => TemplateArgs -> QueryT -> m QueryTProc @@ -114,26 +114,32 @@ convQT args qt = case qt of f = buildPrepArg args execQueryTemplateP1 - :: (UserInfoM m, QErrM m, CacheRM m) + :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) => ExecQueryTemplate -> m QueryTProc execQueryTemplateP1 (ExecQueryTemplate qtn args) = do (QueryTemplateInfo _ qt) <- askQTemplateInfo qtn convQT args qt -execQueryTP2 :: (QErrM m, CacheRM m, MonadTx m) => QueryTProc -> m RespBody -execQueryTP2 qtProc = case qtProc of - QTPInsert qp -> liftTx $ R.insertP2 qp - QTPSelect qp -> liftTx $ R.selectP2 False qp - QTPUpdate qp -> liftTx $ R.updateQueryToTx qp - QTPDelete qp -> liftTx $ R.deleteQueryToTx qp - QTPCount qp -> RC.countQToTx qp - QTPBulk qps -> do - respList <- mapM execQueryTP2 qps - let bsVector = V.fromList respList - return $ BB.toLazyByteString $ encodeJSONVector BB.lazyByteString bsVector +execQueryTP2 + :: (QErrM m, CacheRM m, MonadTx m, HasSQLGenCtx m) + => QueryTProc -> m RespBody +execQueryTP2 qtProc = do + strfyNum <- stringifyNum <$> askSQLGenCtx + case qtProc of + QTPInsert qp -> liftTx $ R.insertP2 strfyNum qp + QTPSelect qp -> liftTx $ R.selectP2 False qp + QTPUpdate qp -> liftTx $ R.updateQueryToTx strfyNum qp + QTPDelete qp -> liftTx $ R.deleteQueryToTx strfyNum qp + QTPCount qp -> RC.countQToTx qp + QTPBulk qps -> do + respList <- mapM execQueryTP2 qps + let bsVector = V.fromList respList + return $ BB.toLazyByteString $ encodeJSONVector BB.lazyByteString bsVector runExecQueryTemplate - :: (QErrM m, UserInfoM m, CacheRM m, MonadTx m) + :: ( QErrM m, UserInfoM m, CacheRM m + , MonadTx m, HasSQLGenCtx m + ) => ExecQueryTemplate -> m RespBody runExecQueryTemplate q = execQueryTemplateP1 q >>= execQueryTP2 diff --git a/server/src-lib/Hasura/RQL/DML/Returning.hs b/server/src-lib/Hasura/RQL/DML/Returning.hs index f5658c83d5ff2..de345acd38aac 100644 --- a/server/src-lib/Hasura/RQL/DML/Returning.hs +++ b/server/src-lib/Hasura/RQL/DML/Returning.hs @@ -44,8 +44,8 @@ qualTableToAliasIden :: QualifiedTable -> Iden qualTableToAliasIden qt = Iden $ snakeCaseTable qt <> "__mutation_result_alias" -mkMutFldExp :: QualifiedTable -> Bool -> MutFld -> S.SQLExp -mkMutFldExp qt singleObj = \case +mkMutFldExp :: QualifiedTable -> Bool -> Bool -> MutFld -> S.SQLExp +mkMutFldExp qt singleObj strfyNum = \case MCount -> S.SESelect $ S.mkSelect { S.selExtr = [S.Extractor S.countStar Nothing] @@ -57,13 +57,13 @@ mkMutFldExp qt singleObj = \case let tabFrom = TableFrom qt $ Just $ qualTableToAliasIden qt tabPerm = TablePerm annBoolExpTrue Nothing in S.SESelect $ mkSQLSelect singleObj $ - AnnSelG selFlds tabFrom tabPerm noTableArgs + AnnSelG selFlds tabFrom tabPerm noTableArgs strfyNum where frmItem = S.FIIden $ qualTableToAliasIden qt mkSelWith - :: QualifiedTable -> S.CTE -> MutFlds -> Bool -> S.SelectWith -mkSelWith qt cte mutFlds singleObj = + :: QualifiedTable -> S.CTE -> MutFlds -> Bool -> Bool -> S.SelectWith +mkSelWith qt cte mutFlds singleObj strfyNum = S.SelectWith [(alias, cte)] sel where alias = S.Alias $ qualTableToAliasIden qt @@ -73,7 +73,7 @@ mkSelWith qt cte mutFlds singleObj = jsonBuildObjArgs = flip concatMap mutFlds $ - \(k, mutFld) -> [S.SELit k, mkMutFldExp qt singleObj mutFld] + \(k, mutFld) -> [S.SELit k, mkMutFldExp qt singleObj strfyNum mutFld] encodeJSONVector :: (a -> BB.Builder) -> V.Vector a -> BB.Builder encodeJSONVector builder xs diff --git a/server/src-lib/Hasura/RQL/DML/Select.hs b/server/src-lib/Hasura/RQL/DML/Select.hs index 930eb69654ab1..e41ec2ea32422 100644 --- a/server/src-lib/Hasura/RQL/DML/Select.hs +++ b/server/src-lib/Hasura/RQL/DML/Select.hs @@ -140,7 +140,7 @@ convOrderByElem (flds, spi) = \case convOrderByElem (relFim, relSpi) rest convSelectQ - :: (UserInfoM m, QErrM m, CacheRM m) + :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) => FieldInfoMap -- Table information of current table -> SelPermInfo -- Additional select permission info -> SelectQExt -- Given Select Query @@ -178,9 +178,11 @@ convSelectQ fieldInfoMap selPermInfo selQ prepValBuilder = do let tabFrom = TableFrom (spiTable selPermInfo) Nothing tabPerm = TablePerm (spiFilter selPermInfo) mPermLimit - return $ AnnSelG annFlds tabFrom tabPerm $ - TableArgs wClause annOrdByM mQueryLimit - (S.intToSQLExp <$> mQueryOffset) Nothing + tabArgs = TableArgs wClause annOrdByM mQueryLimit + (S.intToSQLExp <$> mQueryOffset) Nothing + + strfyNum <- stringifyNum <$> askSQLGenCtx + return $ AnnSelG annFlds tabFrom tabPerm tabArgs strfyNum where mQueryOffset = sqOffset selQ @@ -200,7 +202,7 @@ convExtSimple fieldInfoMap selPermInfo pgCol = do relWhenPGErr = "relationships have to be expanded" convExtRel - :: (UserInfoM m, QErrM m, CacheRM m) + :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) => FieldInfoMap -> RelName -> Maybe RelName @@ -248,7 +250,7 @@ partAnnFlds flds = getSelectDeps :: AnnSel -> [SchemaDependency] -getSelectDeps (AnnSelG flds tabFrm _ tableArgs) = +getSelectDeps (AnnSelG flds tabFrm _ tableArgs _) = mkParentDep tn : fromMaybe [] whereDeps <> colDeps @@ -276,7 +278,7 @@ getSelectDeps (AnnSelG flds tabFrm _ tableArgs) = getAnnSel (ASAgg _) = Nothing convSelectQuery - :: (UserInfoM m, QErrM m, CacheRM m) + :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) => (PGColType -> Value -> m S.SQLExp) -> SelectQuery -> m AnnSel @@ -289,15 +291,15 @@ convSelectQuery prepArgBuilder (DMLQuery qt selQ) = do funcQueryTx :: S.FromItem -> QualifiedFunction -> QualifiedTable - -> TablePerm -> TableArgs + -> TablePerm -> TableArgs -> Bool -> (Either TableAggFlds AnnFlds, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody -funcQueryTx frmItem fn tn tabPerm tabArgs (eSelFlds, p) = +funcQueryTx frmItem fn tn tabPerm tabArgs strfyNum (eSelFlds, p) = runIdentity . Q.getRow <$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder sqlBuilder) (toList p) True where sqlBuilder = toSQL $ - mkFuncSelectWith fn tn tabPerm tabArgs eSelFlds frmItem + mkFuncSelectWith fn tn tabPerm tabArgs strfyNum eSelFlds frmItem selectAggP2 :: (AnnAggSel, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody selectAggP2 (sel, p) = @@ -315,7 +317,7 @@ selectP2 asSingleObject (sel, p) = selectSQL = toSQL $ mkSQLSelect asSingleObject sel phaseOne - :: (QErrM m, UserInfoM m, CacheRM m) + :: (QErrM m, UserInfoM m, CacheRM m, HasSQLGenCtx m) => SelectQuery -> m (AnnSel, DS.Seq Q.PrepArg) phaseOne = liftDMLP1 . convSelectQuery binRHSBuilder @@ -325,7 +327,7 @@ phaseTwo = liftTx . selectP2 False runSelect - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m) + :: (QErrM m, UserInfoM m, CacheRWM m, HasSQLGenCtx m, MonadTx m) => SelectQuery -> m RespBody runSelect q = phaseOne q >>= phaseTwo diff --git a/server/src-lib/Hasura/RQL/DML/Select/Internal.hs b/server/src-lib/Hasura/RQL/DML/Select/Internal.hs index b33a723bd4f42..28a44d53fbc98 100644 --- a/server/src-lib/Hasura/RQL/DML/Select/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Select/Internal.hs @@ -123,9 +123,9 @@ ordByFldName = FieldName "order_by" -- json_build_object is slower than row_to_json hence it is only -- used when needed buildJsonObject - :: Iden -> FieldName -> ArrRelCtx + :: Iden -> FieldName -> ArrRelCtx -> Bool -> [(FieldName, AnnFld)] -> (S.Alias, S.SQLExp) -buildJsonObject pfx parAls arrRelCtx flds = +buildJsonObject pfx parAls arrRelCtx strfyNum flds = if any ( (> 63) . T.length . getFieldNameTxt . fst ) flds then withJsonBuildObj parAls jsonBuildObjExps else withRowToJSON parAls rowToJsonExtrs @@ -142,7 +142,7 @@ buildJsonObject pfx parAls arrRelCtx flds = toSQLFld :: (FieldName -> S.SQLExp -> f) -> (FieldName, AnnFld) -> f toSQLFld f (fldAls, fld) = f fldAls $ case fld of - FCol col -> toJSONableExp (pgiType col) $ + FCol col -> toJSONableExp strfyNum (pgiType col) $ S.mkQIdenExp (mkBaseTableAls pfx) $ pgiName col FExp e -> S.SELit e FObj objSel -> @@ -191,6 +191,7 @@ processAnnOrderByItem :: Iden -> FieldName -> ArrRelCtx + -> Bool -> AnnOrderByItem -- the extractors which will select the needed columns -> ( (S.Alias, S.SQLExp) @@ -199,13 +200,15 @@ processAnnOrderByItem -- extra nodes for order by , OrderByNode ) -processAnnOrderByItem pfx parAls arrRelCtx (OrderByItemG obTyM annObCol obNullsM) = +processAnnOrderByItem pfx parAls arrRelCtx strfyNum obItemG = ( (obColAls, obColExp) , sqlOrdByItem , relNodeM ) where - ((obColAls, obColExp), relNodeM) = processAnnOrderByCol pfx parAls arrRelCtx annObCol + OrderByItemG obTyM annObCol obNullsM = obItemG + ((obColAls, obColExp), relNodeM) = + processAnnOrderByCol pfx parAls arrRelCtx strfyNum annObCol sqlOrdByItem = S.OrderByItem (S.SEIden $ toIden obColAls) @@ -215,13 +218,14 @@ processAnnOrderByCol :: Iden -> FieldName -> ArrRelCtx + -> Bool -> AnnObCol -- the extractors which will select the needed columns -> ( (S.Alias, S.SQLExp) -- extra nodes for order by , OrderByNode ) -processAnnOrderByCol pfx parAls arrRelCtx = \case +processAnnOrderByCol pfx parAls arrRelCtx strfyNum = \case AOCPG colInfo -> let qualCol = S.mkQIdenExp (mkBaseTableAls pfx) (toIden $ pgiName colInfo) @@ -233,7 +237,7 @@ processAnnOrderByCol pfx parAls arrRelCtx = \case AOCObj (RelInfo rn _ colMapping relTab _) relFltr rest -> let relPfx = mkObjRelTableAls pfx rn ((nesAls, nesCol), ordByNode) = - processAnnOrderByCol relPfx ordByFldName emptyArrRelCtx rest + processAnnOrderByCol relPfx ordByFldName emptyArrRelCtx strfyNum rest (objNodeM, arrNodeM) = case ordByNode of OBNNothing -> (Nothing, Nothing) OBNObjNode name node -> (Just (name, node), Nothing) @@ -259,7 +263,7 @@ processAnnOrderByCol pfx parAls arrRelCtx = \case tabPerm = TablePerm relFltr Nothing (extr, arrFlds) = mkAggObExtrAndFlds annAggOb selFld = TAFAgg arrFlds - bn = mkBaseNode arrPfx fldName selFld tabFrom tabPerm noTableArgs + bn = mkBaseNode arrPfx fldName selFld tabFrom tabPerm noTableArgs strfyNum aggNode = ArrNode [extr] colMapping $ mergeBaseNodes bn $ mkEmptyBaseNode arrPfx tabFrom obAls = arrPfx <> Iden "." <> toIden fldName @@ -309,7 +313,7 @@ aggSelToArrNode pfx als aggSel = ArrNode [extr] colMapping mergedBN where AnnRelG _ colMapping annSel = aggSel - AnnSelG aggFlds tabFrm tabPerm tabArgs = annSel + AnnSelG aggFlds tabFrm tabPerm tabArgs strfyNum = annSel fldAls = S.Alias $ toIden als extr = flip S.Extractor (Just fldAls) $ S.applyJsonBuildObj $ @@ -322,7 +326,7 @@ aggSelToArrNode pfx als aggSel = mergedBN = foldr mergeBaseNodes emptyBN allBNs mkAggBaseNode (fn, selFld) = - mkBaseNode pfx fn selFld tabFrm tabPerm tabArgs + mkBaseNode pfx fn selFld tabFrm tabPerm tabArgs strfyNum selFldToExtr (FieldName t, fld) = (:) (S.SELit t) $ pure $ case fld of TAFAgg flds -> aggFldToExp flds @@ -381,6 +385,7 @@ fetchOrdByAggRels orderByM = fromMaybe [] relNamesM mkOrdByItems :: Iden -> FieldName -> Maybe (NE.NonEmpty AnnOrderByItem) + -> Bool -> ArrRelCtx -- extractors -> ( [(S.Alias, S.SQLExp)] @@ -391,10 +396,10 @@ mkOrdByItems -- final order by expression , Maybe S.OrderByExp ) -mkOrdByItems pfx fldAls orderByM arrRelCtx = +mkOrdByItems pfx fldAls orderByM strfyNum arrRelCtx = (obExtrs, ordByObjsMap, ordByArrsMap, ordByExpM) where - procAnnOrdBy' = processAnnOrderByItem pfx fldAls arrRelCtx + procAnnOrdBy' = processAnnOrderByItem pfx fldAls arrRelCtx strfyNum procOrdByM = unzip3 . map procAnnOrdBy' . toList <$> orderByM @@ -415,8 +420,8 @@ mkOrdByItems pfx fldAls orderByM arrRelCtx = mkBaseNode :: Iden -> FieldName -> TableAggFld -> TableFrom - -> TablePerm -> TableArgs -> BaseNode -mkBaseNode pfx fldAls annSelFlds tableFrom tablePerm tableArgs = + -> TablePerm -> TableArgs -> Bool -> BaseNode +mkBaseNode pfx fldAls annSelFlds tableFrom tablePerm tableArgs strfyNum = BaseNode pfx distExprM fromItem finalWhere ordByExpM finalLimit offsetM allExtrs allObjsWithOb allArrsWithOb where @@ -429,7 +434,7 @@ mkBaseNode pfx fldAls annSelFlds tableFrom tablePerm tableArgs = TAFNodes flds -> let arrFlds = mapMaybe getAnnArr flds arrRelCtx = mkArrRelCtx arrFlds - selExtr = buildJsonObject pfx fldAls arrRelCtx flds + selExtr = buildJsonObject pfx fldAls arrRelCtx strfyNum flds -- all object relationships objNodes = HM.fromListWith mergeObjNodes $ map mkObjItem (mapMaybe getAnnObj flds) @@ -487,7 +492,7 @@ mkBaseNode pfx fldAls annSelFlds tableFrom tablePerm tableArgs = mkArrRelCtx arrSels = ArrRelCtx arrSels aggOrdByRelNames - mkOrdByItems' = mkOrdByItems pfx fldAls orderByM + mkOrdByItems' = mkOrdByItems pfx fldAls orderByM strfyNum distItemsM = processDistinctOnCol pfx <$> distM distExprM = fst <$> distItemsM @@ -517,9 +522,9 @@ mkBaseNode pfx fldAls annSelFlds tableFrom tablePerm tableArgs = annSelToBaseNode :: Iden -> FieldName -> AnnSel -> BaseNode annSelToBaseNode pfx fldAls annSel = - mkBaseNode pfx fldAls (TAFNodes selFlds) tabFrm tabPerm tabArgs + mkBaseNode pfx fldAls (TAFNodes selFlds) tabFrm tabPerm tabArgs strfyNum where - AnnSelG selFlds tabFrm tabPerm tabArgs = annSel + AnnSelG selFlds tabFrm tabPerm tabArgs strfyNum = annSel mkObjNode :: Iden -> (FieldName, ObjSel) -> ObjNode mkObjNode pfx (fldName, AnnRelG _ rMapn rAnnSel) = @@ -612,9 +617,9 @@ mkSQLSelect isSingleObject annSel = mkFuncSelectWith :: QualifiedFunction -> QualifiedTable - -> TablePerm -> TableArgs -> Either TableAggFlds AnnFlds - -> S.FromItem -> S.SelectWith -mkFuncSelectWith qf tn tabPerm tabArgs eSelFlds frmItem = selWith + -> TablePerm -> TableArgs -> Bool + -> Either TableAggFlds AnnFlds -> S.FromItem -> S.SelectWith +mkFuncSelectWith qf tn tabPerm tabArgs strfyNum eSelFlds frmItem = selWith where -- SELECT * FROM function_name(args) funcSel = S.mkSelect { S.selFrom = Just $ S.FromExp [frmItem] @@ -623,9 +628,9 @@ mkFuncSelectWith qf tn tabPerm tabArgs eSelFlds frmItem = selWith mainSel = case eSelFlds of Left aggFlds -> mkAggSelect $ - AnnSelG aggFlds tabFrom tabPerm tabArgs + AnnSelG aggFlds tabFrom tabPerm tabArgs strfyNum Right annFlds -> mkSQLSelect False $ - AnnSelG annFlds tabFrom tabPerm tabArgs + AnnSelG annFlds tabFrom tabPerm tabArgs strfyNum tabFrom = TableFrom tn $ Just $ toIden funcAls diff --git a/server/src-lib/Hasura/RQL/DML/Select/Types.hs b/server/src-lib/Hasura/RQL/DML/Select/Types.hs index 8ded97bf40f79..5d15c8a7b6292 100644 --- a/server/src-lib/Hasura/RQL/DML/Select/Types.hs +++ b/server/src-lib/Hasura/RQL/DML/Select/Types.hs @@ -140,10 +140,11 @@ data TablePerm data AnnSelG a = AnnSelG - { _asnFields :: !a - , _asnFrom :: !TableFrom - , _asnPerm :: !TablePerm - , _asnArgs :: !TableArgs + { _asnFields :: !a + , _asnFrom :: !TableFrom + , _asnPerm :: !TablePerm + , _asnArgs :: !TableArgs + , _asnStrfyNum :: !Bool } deriving (Show, Eq) type AnnSel = AnnSelG AnnFlds diff --git a/server/src-lib/Hasura/RQL/DML/Update.hs b/server/src-lib/Hasura/RQL/DML/Update.hs index b1961bdbaf47c..69fc3c576d175 100644 --- a/server/src-lib/Hasura/RQL/DML/Update.hs +++ b/server/src-lib/Hasura/RQL/DML/Update.hs @@ -33,9 +33,9 @@ data UpdateQueryP1 } deriving (Show, Eq) mkSQLUpdate - :: UpdateQueryP1 -> S.SelectWith -mkSQLUpdate (UpdateQueryP1 tn setExps (permFltr, wc) mutFlds) = - mkSelWith tn (S.CTEUpdate update) mutFlds False + :: Bool -> UpdateQueryP1 -> S.SelectWith +mkSQLUpdate strfyNum (UpdateQueryP1 tn setExps (permFltr, wc) mutFlds) = + mkSelWith tn (S.CTEUpdate update) mutFlds False strfyNum where update = S.SQLUpdate tn setExp Nothing tableFltr $ Just S.returningStar setExp = S.SetExp $ map S.SetExpItem setExps @@ -171,20 +171,21 @@ validateUpdateQueryWith f uq = do <> "without \"select\" permission on the table" validateUpdateQuery - :: (QErrM m, UserInfoM m, CacheRM m) + :: (QErrM m, UserInfoM m, CacheRM m, HasSQLGenCtx m) => UpdateQuery -> m (UpdateQueryP1, DS.Seq Q.PrepArg) validateUpdateQuery = liftDMLP1 . validateUpdateQueryWith binRHSBuilder -updateQueryToTx :: (UpdateQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody -updateQueryToTx (u, p) = +updateQueryToTx :: Bool -> (UpdateQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody +updateQueryToTx strfyNum (u, p) = runIdentity . Q.getRow <$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder updateSQL) (toList p) True where - updateSQL = toSQL $ mkSQLUpdate u + updateSQL = toSQL $ mkSQLUpdate strfyNum u runUpdate - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m) + :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, HasSQLGenCtx m) => UpdateQuery -> m RespBody -runUpdate q = - validateUpdateQuery q >>= liftTx . updateQueryToTx +runUpdate q = do + strfyNum <- stringifyNum <$> askSQLGenCtx + validateUpdateQuery q >>= liftTx . updateQueryToTx strfyNum diff --git a/server/src-lib/Hasura/RQL/Types.hs b/server/src-lib/Hasura/RQL/Types.hs index e01af551135a1..a8d488363b998 100644 --- a/server/src-lib/Hasura/RQL/Types.hs +++ b/server/src-lib/Hasura/RQL/Types.hs @@ -17,6 +17,9 @@ module Hasura.RQL.Types , HasHttpManager (..) , HasGCtxMap (..) + , SQLGenCtx(..) + , HasSQLGenCtx(..) + , QCtx(..) , HasQCtx(..) , mkAdminQCtx @@ -79,6 +82,7 @@ data QCtx = QCtx { qcUserInfo :: !UserInfo , qcSchemaCache :: !SchemaCache + , qcSQLCtx :: !SQLGenCtx } deriving (Show, Eq) class HasQCtx a where @@ -87,8 +91,8 @@ class HasQCtx a where instance HasQCtx QCtx where getQCtx = id -mkAdminQCtx :: SchemaCache -> QCtx -mkAdminQCtx = QCtx adminUserInfo +mkAdminQCtx :: Bool -> SchemaCache -> QCtx +mkAdminQCtx b sc = QCtx adminUserInfo sc $ SQLGenCtx b class (Monad m) => UserInfoM m where askUserInfo :: m UserInfo @@ -137,12 +141,23 @@ instance UserInfoM P1 where instance CacheRM P1 where askSchemaCache = qcSchemaCache <$> ask +instance HasSQLGenCtx P1 where + askSQLGenCtx = qcSQLCtx <$> ask + class (Monad m) => HasHttpManager m where askHttpManager :: m HTTP.Manager class (Monad m) => HasGCtxMap m where askGCtxMap :: m GC.GCtxMap +newtype SQLGenCtx + = SQLGenCtx + { stringifyNum :: Bool + } deriving (Show, Eq) + +class (Monad m) => HasSQLGenCtx m where + askSQLGenCtx :: m SQLGenCtx + class (MonadError QErr m) => MonadTx m where liftTx :: Q.TxE QErr a -> m a @@ -236,11 +251,13 @@ liftP1 :: ( QErrM m , UserInfoM m , CacheRM m + , HasSQLGenCtx m ) => P1 a -> m a liftP1 m = do ui <- askUserInfo sc <- askSchemaCache - let qCtx = QCtx ui sc + sqlCtx <- askSQLGenCtx + let qCtx = QCtx ui sc sqlCtx liftP1WithQCtx qCtx m liftP1WithQCtx diff --git a/server/src-lib/Hasura/SQL/Types.hs b/server/src-lib/Hasura/SQL/Types.hs index e171cb5afdf47..e9af40eadb259 100644 --- a/server/src-lib/Hasura/SQL/Types.hs +++ b/server/src-lib/Hasura/SQL/Types.hs @@ -396,3 +396,11 @@ isComparableType PGGeography = False isComparableType PGBoolean = False isComparableType (PGUnknown _) = False isComparableType _ = True + +isBigNum :: PGColType -> Bool +isBigNum = \case + PGBigInt -> True + PGBigSerial -> True + PGNumeric -> True + PGDouble -> True + _ -> False diff --git a/server/src-lib/Hasura/Server/App.hs b/server/src-lib/Hasura/Server/App.hs index bdc17ee636c25..d46a96de7605d 100644 --- a/server/src-lib/Hasura/Server/App.hs +++ b/server/src-lib/Hasura/Server/App.hs @@ -89,13 +89,14 @@ mkConsoleHTML path authMode enableTelemetry = data ServerCtx = ServerCtx - { scIsolation :: Q.TxIsolation - , scPGPool :: Q.PGPool - , scLogger :: L.Logger - , scCacheRef :: IORef SchemaCache - , scCacheLock :: MVar () - , scAuthMode :: AuthMode - , scManager :: HTTP.Manager + { scIsolation :: Q.TxIsolation + , scPGPool :: Q.PGPool + , scLogger :: L.Logger + , scCacheRef :: IORef SchemaCache + , scCacheLock :: MVar () + , scAuthMode :: AuthMode + , scManager :: HTTP.Manager + , scStringifyNum :: Bool } data HandlerCtx @@ -127,7 +128,8 @@ buildQCtx = do scRef <- scCacheRef . hcServerCtx <$> ask userInfo <- asks hcUser cache <- liftIO $ readIORef scRef - return $ QCtx userInfo cache + strfyNum <- scStringifyNum . hcServerCtx <$> ask + return $ QCtx userInfo cache $ SQLGenCtx strfyNum logResult :: (MonadIO m) @@ -212,9 +214,10 @@ v1QueryHandler query = do scRef <- scCacheRef . hcServerCtx <$> ask schemaCache <- liftIO $ readIORef scRef httpMgr <- scManager . hcServerCtx <$> ask + strfyNum <- scStringifyNum . hcServerCtx <$> ask pool <- scPGPool . hcServerCtx <$> ask isoL <- scIsolation . hcServerCtx <$> ask - runQuery pool isoL userInfo schemaCache httpMgr query + runQuery pool isoL userInfo schemaCache httpMgr strfyNum query -- Also update the schema cache dbActionReload = do @@ -240,7 +243,8 @@ v1Alpha1GQHandler query = do sc <- liftIO $ readIORef scRef pool <- scPGPool . hcServerCtx <$> ask isoL <- scIsolation . hcServerCtx <$> ask - GH.runGQ pool isoL userInfo sc manager reqHeaders query reqBody + strfyNum <- scStringifyNum . hcServerCtx <$> ask + GH.runGQ pool isoL userInfo (SQLGenCtx strfyNum) sc manager reqHeaders query reqBody gqlExplainHandler :: GE.GQLExplain -> Handler BL.ByteString gqlExplainHandler query = do @@ -249,7 +253,8 @@ gqlExplainHandler query = do sc <- liftIO $ readIORef scRef pool <- scPGPool . hcServerCtx <$> ask isoL <- scIsolation . hcServerCtx <$> ask - GE.explainGQLQuery pool isoL sc query + strfyNum <- scStringifyNum . hcServerCtx <$> ask + GE.explainGQLQuery pool isoL sc (SQLGenCtx strfyNum) query newtype QueryParser = QueryParser { getQueryParser :: QualifiedTable -> Handler RQLQuery } @@ -285,29 +290,31 @@ mkWaiApp -> L.LoggerCtx -> Q.PGPool -> HTTP.Manager + -> Bool -> AuthMode -> CorsConfig -> Bool -> Bool -> IO (Wai.Application, IORef SchemaCache) -mkWaiApp isoLevel loggerCtx pool httpManager mode corsCfg enableConsole enableTelemetry = do +mkWaiApp isoLevel loggerCtx pool httpManager strfyNum mode corsCfg enableConsole enableTelemetry = do cacheRef <- do pgResp <- runExceptT $ peelRun emptySchemaCache adminUserInfo - httpManager pool Q.Serializable buildSchemaCache + httpManager strfyNum pool Q.Serializable buildSchemaCache either initErrExit return pgResp >>= newIORef . snd cacheLock <- newMVar () let serverCtx = ServerCtx isoLevel pool (L.mkLogger loggerCtx) cacheRef - cacheLock mode httpManager + cacheLock mode httpManager strfyNum spockApp <- spockAsApp $ spockT id $ httpApp corsCfg serverCtx enableConsole enableTelemetry let runTx tx = runExceptT $ runLazyTx pool isoLevel tx + sqlGenCtx = SQLGenCtx strfyNum - wsServerEnv <- WS.createWSServerEnv (scLogger serverCtx) httpManager cacheRef runTx + wsServerEnv <- WS.createWSServerEnv (scLogger serverCtx) httpManager sqlGenCtx cacheRef runTx let wsServerApp = WS.createWSServerApp mode wsServerEnv return (WS.websocketsOr WS.defaultConnectionOptions wsServerApp spockApp, cacheRef) diff --git a/server/src-lib/Hasura/Server/Init.hs b/server/src-lib/Hasura/Server/Init.hs index 19a3b6a596ff2..1771e850a13c6 100644 --- a/server/src-lib/Hasura/Server/Init.hs +++ b/server/src-lib/Hasura/Server/Init.hs @@ -44,6 +44,7 @@ data RawServeOptions , rsoCorsConfig :: !RawCorsConfig , rsoEnableConsole :: !Bool , rsoEnableTelemetry :: !(Maybe Bool) + , rsoStringifyNum :: !Bool } deriving (Show, Eq) data CorsConfigG a @@ -68,6 +69,7 @@ data ServeOptions , soCorsConfig :: !CorsConfig , soEnableConsole :: !Bool , soEnableTelemetry :: !Bool + , soStringifyNum :: !Bool } deriving (Show, Eq) data RawConnInfo = @@ -222,9 +224,10 @@ mkServeOptions rso = do fst enableConsoleEnv enableTelemetry <- fromMaybe True <$> withEnv (rsoEnableTelemetry rso) (fst enableTelemetryEnv) + strfyNum <- withEnvBool (rsoStringifyNum rso) $ fst stringifyNumEnv return $ ServeOptions port host connParams txIso accKey authHook jwtSecret - unAuthRole corsCfg enableConsole enableTelemetry + unAuthRole corsCfg enableConsole enableTelemetry strfyNum where mkConnParams (RawConnParams s c i p) = do stripes <- fromMaybe 1 <$> withEnv s (fst pgStripesEnv) @@ -326,7 +329,7 @@ serveCmdFooter = [ servePortEnv, serveHostEnv, pgStripesEnv, pgConnsEnv, pgTimeoutEnv , txIsoEnv, accessKeyEnv, authHookEnv , authHookModeEnv , jwtSecretEnv , unAuthRoleEnv, corsDomainEnv , enableConsoleEnv - , enableTelemetryEnv + , enableTelemetryEnv, stringifyNumEnv ] eventEnvs = @@ -429,6 +432,12 @@ enableTelemetryEnv = , "Enable anonymous telemetry (default: true)" ) +stringifyNumEnv :: (String, String) +stringifyNumEnv = + ( "HASURA_GRAPHQL_STRINGIFY_NUMERIC" + , "Stringify numeric types" + ) + parseRawConnInfo :: Parser RawConnInfo parseRawConnInfo = RawConnInfo <$> host <*> port <*> user <*> password @@ -629,6 +638,12 @@ parseEnableTelemetry = optional $ help (snd enableTelemetryEnv) ) +parseStringifyNum :: Parser Bool +parseStringifyNum = + switch ( long "stringify-numeric" <> + help (snd stringifyNumEnv) + ) + -- Init logging related connInfoToLog :: Q.ConnInfo -> StartupLog connInfoToLog (Q.ConnInfo host port user _ db _) = @@ -654,6 +669,7 @@ serveOptsToLog so = , "enable_console" J..= soEnableConsole so , "enable_telemetry" J..= soEnableTelemetry so , "use_prepared_statements" J..= (Q.cpAllowPrepare . soConnParams) so + , "stringify_numeric" J..= soStringifyNum so ] mkGenericStrLog :: T.Text -> String -> StartupLog diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 45e2cb0691e60..3443c9e4267ef 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -27,6 +27,7 @@ import Hasura.RQL.DML.Returning (encodeJSONVector) import Hasura.RQL.DML.Select import Hasura.RQL.DML.Update import Hasura.RQL.Types +import Hasura.Server.Utils import qualified Database.PG.Query as Q @@ -92,11 +93,11 @@ $(deriveJSON ''RQLQuery) newtype Run a - = Run {unRun :: StateT SchemaCache (ReaderT (UserInfo, HTTP.Manager) (LazyTx QErr)) a} + = Run {unRun :: StateT SchemaCache (ReaderT (UserInfo, HTTP.Manager, SQLGenCtx) (LazyTx QErr)) a} deriving ( Functor, Applicative, Monad , MonadError QErr , MonadState SchemaCache - , MonadReader (UserInfo, HTTP.Manager) + , MonadReader (UserInfo, HTTP.Manager, SQLGenCtx) , CacheRM , CacheRWM , MonadTx @@ -104,30 +105,35 @@ newtype Run a ) instance UserInfoM Run where - askUserInfo = asks fst + askUserInfo = asks _1 instance HasHttpManager Run where - askHttpManager = asks snd + askHttpManager = asks _2 + +instance HasSQLGenCtx Run where + askSQLGenCtx = asks _3 peelRun :: SchemaCache -> UserInfo -> HTTP.Manager + -> Bool -> Q.PGPool -> Q.TxIsolation -> Run a -> ExceptT QErr IO (a, SchemaCache) -peelRun sc userInfo httMgr pgPool txIso (Run m) = +peelRun sc userInfo httMgr strfyNum pgPool txIso (Run m) = runLazyTx pgPool txIso $ withUserInfo userInfo lazyTx where - lazyTx = runReaderT (runStateT m sc) (userInfo, httMgr) + sqlGenCtx = SQLGenCtx strfyNum + lazyTx = runReaderT (runStateT m sc) (userInfo, httMgr, sqlGenCtx) runQuery :: (MonadIO m, MonadError QErr m) - => Q.PGPool -> Q.TxIsolation - -> UserInfo -> SchemaCache -> HTTP.Manager + => Q.PGPool -> Q.TxIsolation -> UserInfo + -> SchemaCache -> HTTP.Manager -> Bool -> RQLQuery -> m (BL.ByteString, SchemaCache) -runQuery pool isoL userInfo sc hMgr query = do +runQuery pool isoL userInfo sc hMgr strfyNum query = do res <- liftIO $ runExceptT $ - peelRun sc userInfo hMgr pool isoL $ runQueryM query + peelRun sc userInfo hMgr strfyNum pool isoL $ runQueryM query liftEither res queryNeedsReload :: RQLQuery -> Bool @@ -185,7 +191,7 @@ queryNeedsReload qi = case qi of runQueryM :: ( QErrM m, CacheRWM m, UserInfoM m, MonadTx m - , MonadIO m, HasHttpManager m + , MonadIO m, HasHttpManager m, HasSQLGenCtx m ) => RQLQuery -> m RespBody From 01f37076f6ba6559bc380747e68d2a319450ad41 Mon Sep 17 00:00:00 2001 From: rakeshkky Date: Mon, 25 Feb 2019 13:04:04 +0530 Subject: [PATCH 2/5] update tests, all tests assumes strigify numeric config is true --- .../insert/basic/insert_various_postgres_types.yaml | 6 +++--- .../graphql_query/basic/select_query_test_types.yaml | 6 +++--- .../tests-py/queries/v1/update/basic/product_mul_price.yaml | 2 +- .../queries/v1/update/basic/product_set_default_price.yaml | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/server/tests-py/queries/graphql_mutation/insert/basic/insert_various_postgres_types.yaml b/server/tests-py/queries/graphql_mutation/insert/basic/insert_various_postgres_types.yaml index 6fa428d3f215b..cf16a00d82f6b 100644 --- a/server/tests-py/queries/graphql_mutation/insert/basic/insert_various_postgres_types.yaml +++ b/server/tests-py/queries/graphql_mutation/insert/basic/insert_various_postgres_types.yaml @@ -8,10 +8,10 @@ - c1_smallint: 32767 c2_integer: 2147483647 c3_bigint: "9223372036854775807" - c4_decimal: 123.45 - c5_numeric: 1.234 + c4_decimal: "123.45" + c5_numeric: "1.234" c6_real: 0.00390625 - c7_double_precision: 16.0001220703125 + c7_double_precision: "16.0001220703125" c8_smallserial: 1 c9_serial: 1 c10_bigserial: "1" diff --git a/server/tests-py/queries/graphql_query/basic/select_query_test_types.yaml b/server/tests-py/queries/graphql_query/basic/select_query_test_types.yaml index 41d763c9eb807..6052a2c03fc57 100644 --- a/server/tests-py/queries/graphql_query/basic/select_query_test_types.yaml +++ b/server/tests-py/queries/graphql_query/basic/select_query_test_types.yaml @@ -7,10 +7,10 @@ response: - c1_smallint: 32767 c2_integer: 2147483647 c3_bigint: '9223372036854775807' - c4_decimal: 123.45 - c5_numeric: 1.234 + c4_decimal: '123.45' + c5_numeric: '1.234' c6_real: 0.00390625 - c7_double_precision: 16.0001220703125 + c7_double_precision: '16.0001220703125' c8_smallserial: 1 c9_serial: 1 c10_bigserial: '1' diff --git a/server/tests-py/queries/v1/update/basic/product_mul_price.yaml b/server/tests-py/queries/v1/update/basic/product_mul_price.yaml index 79996264d26fd..f4b1b01ed2832 100644 --- a/server/tests-py/queries/v1/update/basic/product_mul_price.yaml +++ b/server/tests-py/queries/v1/update/basic/product_mul_price.yaml @@ -32,7 +32,7 @@ response: returning: - product_id: 2 name: Product 2 - price: 16.5 + price: '16.5000' query: type: update args: diff --git a/server/tests-py/queries/v1/update/basic/product_set_default_price.yaml b/server/tests-py/queries/v1/update/basic/product_set_default_price.yaml index 13a470220df24..0bcf2e91cfbb8 100644 --- a/server/tests-py/queries/v1/update/basic/product_set_default_price.yaml +++ b/server/tests-py/queries/v1/update/basic/product_set_default_price.yaml @@ -6,7 +6,7 @@ response: returning: - product_id: 1 name: Product 1 - price: 10 + price: '10' query: type: update args: From 3ca9e66ad21a390580fcda6d314262470fdc7010 Mon Sep 17 00:00:00 2001 From: rakeshkky Date: Mon, 25 Feb 2019 15:49:12 +0530 Subject: [PATCH 3/5] updated docs --- .../manual/deployment/graphql-engine-flags/reference.rst | 9 ++++++++- server/src-lib/Hasura/Server/Init.hs | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst b/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst index 413c7b75451f7..e5c1c1844914a 100644 --- a/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst +++ b/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst @@ -139,6 +139,13 @@ For ``serve`` sub-command these are the flags and ENV variables available: - ``HASURA_GRAPHQL_TX_ISOLATION`` - transaction isolation. read-committed / repeatable-read / serializable (default: read-commited) + * - ``--stringify-numeric`` + - ``HASURA_GRAPHQL_STRINGIFY_NUMERIC`` + - Stringify numeric types (default: false) + .. note:: - When the equivalent flags for environment variables are used, the flags will take precedence. + 1. When the equivalent flags for environment variables are used, the flags will take precedence. + 2. If ``--stringify-numeric`` flag or ``HASURA_GRAPHQL_STRINGIFY_NUMERIC`` is set to ``true`` then + GraphQL Engine sends numeric data types which do not fit into `IEEE 754 `_ + ``binary64`` (double precision) range as ``String``. diff --git a/server/src-lib/Hasura/Server/Init.hs b/server/src-lib/Hasura/Server/Init.hs index 33b7b7f82e6c2..dbc4b30cb88c6 100644 --- a/server/src-lib/Hasura/Server/Init.hs +++ b/server/src-lib/Hasura/Server/Init.hs @@ -449,7 +449,7 @@ enableTelemetryEnv = stringifyNumEnv :: (String, String) stringifyNumEnv = ( "HASURA_GRAPHQL_STRINGIFY_NUMERIC" - , "Stringify numeric types" + , "Stringify numeric types (default: false)" ) parseRawConnInfo :: Parser RawConnInfo From 274165223901826286eba6f036c1c622cd365034 Mon Sep 17 00:00:00 2001 From: rakeshkky Date: Fri, 1 Mar 2019 13:46:04 +0530 Subject: [PATCH 4/5] change flag from 'stringify-numeric' to 'stringify-numeric-types' --- .circleci/test-server.sh | 2 +- .../manual/deployment/graphql-engine-flags/reference.rst | 4 ++-- server/src-lib/Hasura/Server/Init.hs | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.circleci/test-server.sh b/.circleci/test-server.sh index bdb824c802b60..bef9c07c0777b 100755 --- a/.circleci/test-server.sh +++ b/.circleci/test-server.sh @@ -124,7 +124,7 @@ mkdir -p "$OUTPUT_FOLDER" export EVENT_WEBHOOK_HEADER="MyEnvValue" export HGE_URL="http://localhost:8080" export WEBHOOK_FROM_ENV="http://127.0.0.1:5592" -export HASURA_GRAPHQL_STRINGIFY_NUMERIC=true +export HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES=true PID="" WH_PID="" diff --git a/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst b/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst index 1b441be124cca..3afebee876f84 100644 --- a/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst +++ b/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst @@ -139,8 +139,8 @@ For ``serve`` sub-command these are the flags and ENV variables available: - ``HASURA_GRAPHQL_TX_ISOLATION`` - transaction isolation. read-committed / repeatable-read / serializable (default: read-commited) - * - ``--stringify-numeric`` - - ``HASURA_GRAPHQL_STRINGIFY_NUMERIC`` + * - ``--stringify-numeric-types`` + - ``HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES`` - Stringify numeric types (default: false) * - ``--enabled-apis `` diff --git a/server/src-lib/Hasura/Server/Init.hs b/server/src-lib/Hasura/Server/Init.hs index 39d9fe594a8c0..a44ed25930dc7 100644 --- a/server/src-lib/Hasura/Server/Init.hs +++ b/server/src-lib/Hasura/Server/Init.hs @@ -462,7 +462,7 @@ enableTelemetryEnv = stringifyNumEnv :: (String, String) stringifyNumEnv = - ( "HASURA_GRAPHQL_STRINGIFY_NUMERIC" + ( "HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES" , "Stringify numeric types (default: false)" ) @@ -691,7 +691,7 @@ parseEnableTelemetry = optional $ parseStringifyNum :: Parser Bool parseStringifyNum = - switch ( long "stringify-numeric" <> + switch ( long "stringify-numeric-types" <> help (snd stringifyNumEnv) ) @@ -726,7 +726,7 @@ serveOptsToLog so = , "enable_console" J..= soEnableConsole so , "enable_telemetry" J..= soEnableTelemetry so , "use_prepared_statements" J..= (Q.cpAllowPrepare . soConnParams) so - , "stringify_numeric" J..= soStringifyNum so + , "stringify_numeric_types" J..= soStringifyNum so ] mkGenericStrLog :: T.Text -> String -> StartupLog From ce04aae1c5331e80b3bbce37e491fb870b776901 Mon Sep 17 00:00:00 2001 From: Shahidh K Muhammed Date: Fri, 1 Mar 2019 17:13:39 +0530 Subject: [PATCH 5/5] Update reference.rst --- .../manual/deployment/graphql-engine-flags/reference.rst | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst b/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst index 3afebee876f84..c3341bd670041 100644 --- a/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst +++ b/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst @@ -141,7 +141,7 @@ For ``serve`` sub-command these are the flags and ENV variables available: * - ``--stringify-numeric-types`` - ``HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES`` - - Stringify numeric types (default: false) + - Stringify certain Postgres numeric types, specifically ``bigint``, ``numeric``, ``decimal`` and ``double precision`` as they don't fit into the ``IEEE-754`` spec for JSON encoding-decoding. (default: false) * - ``--enabled-apis `` - ``HASURA_GRAPHQL_ENABLED_APIS`` @@ -149,6 +149,3 @@ For ``serve`` sub-command these are the flags and ENV variables available: .. note:: 1. When the equivalent flags for environment variables are used, the flags will take precedence. - 2. If ``--stringify-numeric`` flag or ``HASURA_GRAPHQL_STRINGIFY_NUMERIC`` is set to ``true`` then - GraphQL Engine sends numeric data types which do not fit into `IEEE 754 `_ - ``binary64`` (double precision) range as ``String``.