-
Notifications
You must be signed in to change notification settings - Fork 2.8k
Closed
Labels
Description
Hello,
I recently upgraded to hasura 1.0.0.beta.3 and I've noticed that CPU use in the docker container is generally increasing over time, even if no requests are being made to the hasura server.
Here are the logs produced by Hasura (mostly showing nothing is happening):
{"timestamp":"2019-07-17T21:51:27.827+0000","level":"info","type":"startup","detail":{"kind":"server_configuration","info":{"live_query_options":{"fallback_options":{"refetch_delay":1000},"multiplexed_options":{"batch_size":100,"refetch_delay":1000}},"transaction_isolation":"ISOLATION LEVEL READ COMMITTED","enabled_log_types":["http-log","websocket-log","startup","webhook-log","query-log"],"server_host":"HostAny","enable_allowlist":false,"log_level":"debug","auth_hook_mode":null,"use_prepared_statements":true,"unauth_role":null,"stringify_numeric_types":false,"enabled_apis":["metadata","graphql"],"enable_telemetry":false,"enable_console":true,"auth_hook":null,"jwt_secret":null,"cors_config":{"allowed_origins":"*","disabled":false,"ws_read_cookie":null},"console_assets_dir":null,"admin_secret_set":true,"port":8080}}}
{"timestamp":"2019-07-17T21:51:27.827+0000","level":"info","type":"startup","detail":{"kind":"postgres_connection","info":{"database":"postgres","retries":1,"user":"postgres","host":"postgres","port":5432}}}
{"internal":"could not connect to server: Connection refused\n\tIs the server running on host \"postgres\" (172.129.0.3) and accepting\n\tTCP/IP connections on port 5432?\n","path":"$","error":"connection error","code":"postgres-error"}
{"timestamp":"2019-07-17T21:51:29.951+0000","level":"info","type":"startup","detail":{"kind":"server_configuration","info":{"live_query_options":{"fallback_options":{"refetch_delay":1000},"multiplexed_options":{"batch_size":100,"refetch_delay":1000}},"transaction_isolation":"ISOLATION LEVEL READ COMMITTED","enabled_log_types":["http-log","websocket-log","startup","webhook-log","query-log"],"server_host":"HostAny","enable_allowlist":false,"log_level":"debug","auth_hook_mode":null,"use_prepared_statements":true,"unauth_role":null,"stringify_numeric_types":false,"enabled_apis":["metadata","graphql"],"enable_telemetry":false,"enable_console":true,"auth_hook":null,"jwt_secret":null,"cors_config":{"allowed_origins":"*","disabled":false,"ws_read_cookie":null},"console_assets_dir":null,"admin_secret_set":true,"port":8080}}}
{"timestamp":"2019-07-17T21:51:29.951+0000","level":"info","type":"startup","detail":{"kind":"postgres_connection","info":{"database":"postgres","retries":1,"user":"postgres","host":"postgres","port":5432}}}
{"internal":"could not connect to server: Connection refused\n\tIs the server running on host \"postgres\" (172.129.0.3) and accepting\n\tTCP/IP connections on port 5432?\n","path":"$","error":"connection error","code":"postgres-error"}
{"timestamp":"2019-07-17T21:51:31.891+0000","level":"info","type":"startup","detail":{"kind":"server_configuration","info":{"live_query_options":{"fallback_options":{"refetch_delay":1000},"multiplexed_options":{"batch_size":100,"refetch_delay":1000}},"transaction_isolation":"ISOLATION LEVEL READ COMMITTED","enabled_log_types":["http-log","websocket-log","startup","webhook-log","query-log"],"server_host":"HostAny","enable_allowlist":false,"log_level":"debug","auth_hook_mode":null,"use_prepared_statements":true,"unauth_role":null,"stringify_numeric_types":false,"enabled_apis":["metadata","graphql"],"enable_telemetry":false,"enable_console":true,"auth_hook":null,"jwt_secret":null,"cors_config":{"allowed_origins":"*","disabled":false,"ws_read_cookie":null},"console_assets_dir":null,"admin_secret_set":true,"port":8080}}}
{"timestamp":"2019-07-17T21:51:31.891+0000","level":"info","type":"startup","detail":{"kind":"postgres_connection","info":{"database":"postgres","retries":1,"user":"postgres","host":"postgres","port":5432}}}
{"timestamp":"2019-07-17T21:51:33.992+0000","level":"info","type":"startup","detail":{"kind":"db_init","info":"successfully initialised"}}
{"timestamp":"2019-07-17T21:51:33.992+0000","level":"info","type":"startup","detail":{"kind":"db_migrate","info":"already at the latest version. current version: \"17\""}}
{"timestamp":"2019-07-17T21:51:33.992+0000","level":"info","type":"startup","detail":{"kind":"schema-sync","info":{"thread_id":"ThreadId 86","instance_id":"5e7dd705-9331-474a-bda4-986d78801ffa","message":"listener thread started"}}}
{"timestamp":"2019-07-17T21:51:33.992+0000","level":"info","type":"startup","detail":{"kind":"schema-sync","info":{"thread_id":"ThreadId 87","instance_id":"5e7dd705-9331-474a-bda4-986d78801ffa","message":"processor thread started"}}}
{"timestamp":"2019-07-17T21:51:33.992+0000","level":"info","type":"startup","detail":{"kind":"event_triggers","info":"preparing data"}}
{"timestamp":"2019-07-17T21:51:33.992+0000","level":"info","type":"startup","detail":{"kind":"event_triggers","info":"starting workers"}}
{"timestamp":"2019-07-17T21:51:33.992+0000","level":"info","type":"startup","detail":{"kind":"server","info":{"time_taken":2.216395209,"message":"starting API server"}}}
Here is a screenshot of a grafana dashboard showing the CPU use over 6 hours:
Here is a minimal docker-compose script that can recreate the test environment and the monitoring tools.