1+ const CONNECTION_IDLE_TIMEOUT = 60 * 1000 // ms
2+ const TCP_KEEPALIVE_TIMEOUT = 2 * 60 * 1000 // ms
3+ const TCP_IDLE_TIMEOUT = 2 * 60 * 1000 + 1 * 10 * 1000 // ms
4+ const DEFAULT_QUEUE_LIMIT = 300
5+ const DEFAULT_CONNECTION_LIMIT = 45
6+ const DEFAULT_CONNECT_TIMEOUT = 2 * 1000 // ms
7+ const DEFAULT_CACHE_TTL = 5 * 60 // s
8+ const DEFAULT_CACHE_CHECKPERIOD = 1 * 60 // s
9+ const DEFAULT_INSECURE_AUTH = true
10+
111const debug = require ( 'debug' ) ( 'mysql2-cache' )
212const mysql = require ( 'mysql2' )
3- const crypto = require ( 'crypto' )
13+ const crypto = require ( 'node: crypto' )
414const NodeCache = require ( 'node-cache' )
5- const queryCache = new NodeCache ( { stdTTL : 5 * 60 , checkperiod : 1 * 60 } )
15+ const queryCache = new NodeCache ( { stdTTL : DEFAULT_CACHE_TTL , checkperiod : DEFAULT_CACHE_CHECKPERIOD } )
616
717const { Console } = require ( 'console' )
818const { Transform } = require ( 'stream' )
919const ts = new Transform ( { transform ( chunk , enc , cb ) { cb ( null , chunk ) } } )
10- const logger = new Console ( { stdout : ts , stderr : ts , colorMode : true } )
20+ const logger = new Console ( { stdout : ts , stderr : ts , colorMode : true , inspectOptions : { depth : Infinity , breakLength : Infinity , compact : true } } )
1121function getTable ( data ) {
1222 logger . table ( data )
1323 return ( ts . read ( ) || '' ) . toString ( )
1424}
1525
1626debug ( 'init' )
27+ debug . inspectOpts = { depth : Infinity , breakLength : Infinity , compact : true }
1728
1829module . exports = mysql
1930
2031module . exports . connect = ( config = { } ) => {
21- config . connectionLimit = config . connectionLimit || 15
22- config . queueLimit = config . queueLimit || 100
32+ // queueLimit shouldn't be 0 as it leads to long pool of lost queries
33+ // in case of zombie sockets instead of throwing error
34+ // https://github.com/sidorares/node-mysql2/blob/master/lib/pool_config.js
35+ config . queueLimit = config . queueLimit || DEFAULT_QUEUE_LIMIT
36+ // default mysql max_connections=151
37+ config . connectionLimit = config . connectionLimit || DEFAULT_CONNECTION_LIMIT
38+ // should be less then TCP_KEEPALIVE_TIMEOUT
39+ config . idleTimeout = config . idleTimeout || CONNECTION_IDLE_TIMEOUT
40+ config . connectTimeout = config . connectTimeout || DEFAULT_CONNECT_TIMEOUT
41+ config . insecureAuth = config . insecureAuth || DEFAULT_INSECURE_AUTH
2342
2443 const pool = mysql . createPool ( config ) . promise ( )
2544 let qid = 0
2645
2746 pool . q = async ( sql , params = [ ] , cache = false , ttl = undefined ) => {
2847 qid ++
29- const id = qid
48+ const log = debug . extend ( qid )
49+ log ( sql , params )
50+ // https://medium.com/@chris_ 72272/what-is-the-fastest-node-js-hashing-algorithm-c15c1a0e164e
3051 const hash = crypto . createHash ( 'sha1' ) . update ( sql + JSON . stringify ( params ) ) . digest ( 'base64' )
31- const log = debug . extend ( id )
32- log ( '%s %j' , sql , params )
33-
3452 if ( cache && queryCache . has ( hash ) ) {
35- log ( 'cache hit %s %j %j ' , hash , queryCache . getStats ( ) , queryCache . keys ( ) )
53+ log ( 'Cache hit' , hash , queryCache . getStats ( ) /* , queryCache.keys() */ )
3654 return queryCache . get ( hash )
3755 } else if ( cache ) {
38- log ( 'cache missed %j %j ' , queryCache . getStats ( ) , queryCache . keys ( ) )
56+ log ( 'Cache missed' , queryCache . getStats ( ) /* , queryCache.keys() */ )
3957 }
4058 const [ rows , fields ] = await pool . query ( sql , params ) . catch ( error => {
41- console . error ( '[MYSQL] query_error %s %j ' , sql , params , error )
59+ console . error ( '[MYSQL] query ' , sql , params , error )
4260 if ( error . message === 'Queue limit reached.' ) {
4361 // @todo Graceful server and mysql connections exit
4462 console . error ( '[MYSQL] POOL_ENQUEUELIMIT EXIT' )
@@ -47,9 +65,7 @@ module.exports.connect = (config = {}) => {
4765 throw error
4866 } )
4967 const result = Array . isArray ( rows ) && rows . length ? rows : false
50- if ( debug . enabled ) {
51- log ( getTable ( rows ) )
52- }
68+ log ( getTable ( rows ) )
5369 if ( cache ) {
5470 queryCache . set ( hash , result , ttl )
5571 }
@@ -61,5 +77,120 @@ module.exports.connect = (config = {}) => {
6177 return Array . isArray ( rows ) && rows . length ? rows [ 0 ] : false
6278 }
6379
80+ pool . stat = ( ) => {
81+ return {
82+ ALL : pool . pool . _allConnections . toArray ( ) . length ,
83+ // USE: pool.pool._allConnections.toArray().length - pool.pool._freeConnections.toArray().length,
84+ FRE : pool . pool . _freeConnections . toArray ( ) . length ,
85+ QUE : pool . pool . _connectionQueue . toArray ( ) . length
86+ }
87+ }
88+
89+ pool . insert = async ( table , row ) => {
90+ qid ++
91+ const log = debug . extend ( qid )
92+ log ( 'INSERT INTO' , table /*, row, rows/*, fields */ )
93+ const [ rows , fields ] = await pool . query ( 'INSERT INTO ?? SET ?' , [ table , row ] )
94+ . catch ( error => {
95+ console . error ( '[MYSQL] insert' , table , row , error )
96+ throw error
97+ } )
98+ log ( getTable ( row ) )
99+ log ( getTable ( rows ) )
100+ return rows || false
101+ }
102+
103+ pool . update = async ( table , row , where ) => {
104+ qid ++
105+ const log = debug . extend ( qid )
106+ log ( 'UPDATE' , table /*, [row, where], rows/*, fields */ )
107+ const _where = Object . keys ( where ) . map ( key => key + '=' + pool . escape ( where [ key ] ) ) . join ( ' AND ' )
108+ const [ rows , fields ] = await pool . query ( `UPDATE ?? SET ? WHERE ${ _where } ` , [ table , row ] )
109+ . catch ( error => {
110+ console . error ( '[MYSQL] update' , table , [ row , where ] , error )
111+ throw error
112+ } )
113+ log ( getTable ( row ) )
114+ log ( getTable ( where ) )
115+ log ( getTable ( rows ) )
116+ return rows || false
117+ }
118+
119+ pool . on ( 'acquire' , ( connection ) => {
120+ debug ( 'Connection #%s acquired' , connection . threadId , pool . stat ( ) )
121+ } )
122+ pool . on ( 'connection' , ( connection ) => {
123+ debug ( 'Connected #%s to %s:%s' , connection . threadId , connection . config . host , connection . config . port , pool . stat ( ) )
124+ /**
125+ * tcp_keepalive and ESTABLISHED zombie sockets bug
126+ * https://blog.cloudflare.com/when-tcp-sockets-refuse-to-die/
127+ * https://github.com/mysqljs/mysql/issues/835
128+ *
129+ * tcp_keepalive is off in Node by default
130+ * https://nodejs.org/dist/latest-v20.x/docs/api/net.html#net_socket_setkeepalive_enable_initialdelay
131+ *
132+ * _socket.setKeepAlive(true, 1000 * 60 * 2); // ms
133+ * https://github.com/mysqljs/mysql/issues/1939#issuecomment-365715668
134+ *
135+ * TCP_TIMEOUT = TCP_KEEPIDLE + TCP_KEEPINTVL * TCP_KEEPCNT
136+ * 130 = 120 + 1 * 10
137+ */
138+ connection . stream . setKeepAlive ( true , TCP_KEEPALIVE_TIMEOUT )
139+
140+ /**
141+ * _socket.setTimeout is an alternative:
142+ * https://github.com/nodejs/node/issues/4560#issuecomment-302008479
143+ *
144+ * Set socket idle timeout in milliseconds
145+ * https://nodejs.org/api/net.html#socketsettimeouttimeout-callback
146+ * _socket.setTimeout(1000 * 60 * 15); // ms
147+ *
148+ * Wait for timeout event (node will emit it when idle timeout elapses)
149+ * socket.on('timeout', function () {
150+ * socket.destroy();
151+ * });
152+ *
153+ * Recently added param idleTimeout is also used in mysql.createPool()
154+ * but they both used as there is no guarantee one will help with the bug
155+ */
156+ connection . stream . setTimeout ( TCP_IDLE_TIMEOUT )
157+ connection . stream . on ( 'timeout' , ( ) => {
158+ connection . stream . destroy ( )
159+ connection . destroy ( )
160+ debug ( 'Connection #%s socket timeout' , connection . threadId , pool . stat ( ) )
161+ } )
162+
163+ /**
164+ * No events emitted on connection close => listen on sockets
165+ * https://github.com/sidorares/node-mysql2/blob/68cc3358121a88f955c0adab95a2d5f3d2b4ecb4/lib/connection.js#L770
166+ */
167+ connection . stream . on ( 'error' , ( error ) => {
168+ debug ( 'Connection #%s socket error' , connection . threadId , pool . stat ( ) , error )
169+ } )
170+ connection . stream . on ( 'close' , ( hadError ) => {
171+ debug ( 'Connection #%s socket closed%s' , connection . threadId , hadError ? ' on error' : '' , pool . stat ( ) )
172+ } )
173+ connection . on ( 'error' , ( error ) => {
174+ console . error ( '[MYSQL] Connection error' , error ) // 'ER_BAD_DB_ERROR'
175+ } )
176+ } )
177+
178+ pool . on ( 'enqueue' , ( connection ) => {
179+ debug ( 'Connection queued' , pool . stat ( ) )
180+ } )
181+ pool . on ( 'release' , ( connection ) => {
182+ debug ( 'Connection #%d released' , connection . threadId , pool . stat ( ) )
183+ } )
184+ pool . on ( 'error' , ( ...args ) => {
185+ console . error ( '[MYSQL]' , ...args )
186+ } )
187+
64188 return pool
65189}
190+
191+ process . on ( 'unhandledRejection' , ( reason , p ) => {
192+ console . error ( 'Unhandled Rejection at:' , p , 'reason:' , reason )
193+ } )
194+ process . on ( 'uncaughtException' , ( error ) => {
195+ console . error ( `Caught exception: ${ error } \n` + `Exception origin: ${ error . stack } ` )
196+ } )
0 commit comments