2024-01-23 21:21:46 +07:00
"use strict" ;
const { version } = require ( "./package.json" ) ;
2023-10-30 03:14:50 +07:00
const WebSocket = require ( "ws" ) ;
2024-02-18 11:25:33 +07:00
const querystring = require ( "querystring" ) ;
2024-02-18 12:56:00 +07:00
const { validateEvent , nip19 , matchFilters , mergeFilters , getFilterLimit } = require ( "nostr-tools" ) ;
2023-11-18 16:49:20 +07:00
const auth = require ( "./auth.js" ) ;
const nip42 = require ( "./nip42.js" ) ;
2023-11-16 21:53:58 +07:00
2024-02-19 18:37:50 +07:00
let { relays , approved _publishers , log _about _relays , authorized _keys , private _keys , reconnect _time , wait _eose , pause _on _limit , max _eose _score , broadcast _ratelimit , upstream _ratelimit _expiration , max _client _subs } = require ( process . env . BOSTR _CONFIG _PATH || "./config" ) ;
2023-11-16 21:53:58 +07:00
2024-01-03 14:10:01 +07:00
log _about _relays = process . env . LOG _ABOUT _RELAYS || log _about _relays ;
2023-11-16 21:53:58 +07:00
authorized _keys = authorized _keys ? . map ( i => i . startsWith ( "npub" ) ? nip19 . decode ( i ) . data : i ) ;
2024-01-01 11:16:53 +07:00
approved _publishers = approved _publishers ? . map ( i => i . startsWith ( "npub" ) ? nip19 . decode ( i ) . data : i ) ;
2023-11-16 21:53:58 +07:00
2023-11-22 16:38:46 +07:00
// CL MaxEoseScore: Set <max_eose_score> as 0 if configured relays is under of the expected number from <max_eose_score>
if ( relays . length < max _eose _score ) max _eose _score = 0 ;
2023-10-30 03:14:50 +07:00
// CL - User socket
2024-01-23 22:49:43 +07:00
module . exports = ( ws , req , onClose ) => {
2024-02-18 11:25:33 +07:00
let query = querystring . parse ( req . url . slice ( 2 ) ) ;
2023-11-16 21:53:58 +07:00
let authKey = null ;
let authorized = true ;
2024-02-18 19:37:59 +07:00
let sessStarted = false ;
2024-01-04 22:01:08 +07:00
let lastEvent = Date . now ( ) ;
2024-02-17 15:33:15 +07:00
ws . ip = req . headers [ "x-forwarded-for" ] ? . split ( "," ) [ 0 ] || req . socket . address ( ) ? . address ;
2024-02-16 22:55:49 +07:00
ws . relays = new Set ( ) ; // Set() of connected relays.
2023-11-21 18:42:04 +07:00
ws . subs = new Map ( ) ; // contains filter submitted by clients. per subID
ws . pause _subs = new Set ( ) ; // pause subscriptions from receiving events after reached over <filter.limit> until all relays send EOSE. per subID
2023-11-18 16:49:20 +07:00
ws . events = new Map ( ) ; // only to prevent the retransmit of the same event. per subID
ws . my _events = new Set ( ) ; // for event retransmitting.
ws . pendingEOSE = new Map ( ) ; // each contain subID
2023-12-24 21:16:44 +07:00
ws . reconnectTimeout = new Set ( ) ; // relays timeout() before reconnection. Only use after client disconnected.
2024-01-29 22:03:39 +07:00
ws . subalias = new Map ( ) ;
ws . fakesubalias = new Map ( ) ;
2024-02-18 11:41:32 +07:00
ws . mergedFilters = new Map ( ) ;
2024-01-23 21:12:06 +07:00
ws . pubkey = null ;
2024-02-18 11:25:33 +07:00
ws . rejectKinds = query . reject ? . split ( "," ) . map ( _ => parseInt ( _ ) ) ;
ws . acceptKinds = query . accept ? . split ( "," ) . map ( _ => parseInt ( _ ) ) ;
2024-02-23 13:11:00 +07:00
ws . forcedLimit = parseInt ( query . limit ) ;
2024-02-24 12:23:55 +07:00
ws . accurateMode = parseInt ( query . accurate ) ;
2024-02-25 00:23:09 +07:00
ws . saveMode = parseInt ( query . save ) ;
2023-10-31 14:00:16 +07:00
2023-11-16 21:53:58 +07:00
if ( authorized _keys ? . length ) {
authKey = Date . now ( ) + Math . random ( ) . toString ( 36 ) ;
authorized = false ;
ws . send ( JSON . stringify ( [ "AUTH" , authKey ] ) ) ;
2023-11-20 21:21:22 +07:00
} else if ( private _keys !== { } ) {
// If there is no whitelist, Then we ask to client what is their public key.
// We will enable NIP-42 function for this session if user pubkey was available & valid in <private_keys>.
// There is no need to limit this session. We only ask who is this user.
// If it was the users listed at <private_keys> in config.js, Then the user could use NIP-42 protected relays.
authKey = Date . now ( ) + Math . random ( ) . toString ( 36 ) ;
ws . send ( JSON . stringify ( [ "AUTH" , authKey ] ) ) ;
2023-11-16 21:53:58 +07:00
}
2024-02-17 15:33:15 +07:00
console . log ( process . pid , ` ->- ${ ws . ip } connected [ ${ req . headers [ "user-agent" ] || "" } ] ` ) ;
2023-10-30 03:14:50 +07:00
ws . on ( "message" , data => {
try {
data = JSON . parse ( data ) ;
} catch {
return ws . send (
JSON . stringify ( [ "NOTICE" , "error: bad JSON." ] )
)
}
2024-01-23 21:12:06 +07:00
2023-10-30 03:14:50 +07:00
switch ( data [ 0 ] ) {
case "EVENT" :
2023-11-17 14:36:13 +07:00
if ( ! authorized ) return ;
2024-02-18 12:56:00 +07:00
if ( ! validateEvent ( data [ 1 ] ) ) return ws . send ( JSON . stringify ( [ "NOTICE" , "error: invalid event" ] ) ) ;
2023-11-16 21:53:58 +07:00
if ( data [ 1 ] . kind == 22242 ) return ws . send ( JSON . stringify ( [ "OK" , data [ 1 ] ? . id , false , "rejected: kind 22242" ] ) ) ;
2024-01-03 16:18:39 +07:00
2024-01-01 11:16:53 +07:00
if (
approved _publishers ? . length &&
! approved _publishers ? . includes ( data [ 1 ] . pubkey )
) return ws . send ( JSON . stringify ( [ "OK" , data [ 1 ] ? . id , false , "rejected: unauthorized" ] ) ) ;
2024-01-04 22:01:08 +07:00
if ( broadcast _ratelimit && ( broadcast _ratelimit > ( Date . now ( ) - lastEvent ) ) ) {
lastEvent = Date . now ( ) ;
return ws . send ( JSON . stringify ( [ "OK" , data [ 1 ] ? . id , false , "rate-limited: request too fast." ] ) ) ;
}
2024-02-18 19:55:51 +07:00
lastEvent = Date . now ( ) ;
ws . my _events . add ( data [ 1 ] ) ;
2024-02-18 19:37:59 +07:00
if ( ! ws . relays . size && ! sessStarted ) {
2024-02-18 19:45:19 +07:00
console . log ( process . pid , ` >>> ` , ` ${ ws . ip } executed ${ data [ 0 ] } command for the first. Initializing session ` ) ;
2024-02-18 19:37:59 +07:00
newsess ( ws ) ;
sessStarted = true ;
}
2024-02-17 15:33:15 +07:00
bc ( data , ws ) ;
2023-10-30 03:14:50 +07:00
ws . send ( JSON . stringify ( [ "OK" , data [ 1 ] ? . id , true , "" ] ) ) ;
break ;
2024-01-29 22:03:39 +07:00
case "REQ" : {
2023-11-17 14:36:13 +07:00
if ( ! authorized ) return ;
2023-10-30 03:14:50 +07:00
if ( data . length < 3 ) return ws . send ( JSON . stringify ( [ "NOTICE" , "error: bad request." ] ) ) ;
2024-01-04 13:09:06 +07:00
if ( typeof ( data [ 1 ] ) !== "string" ) return ws . send ( JSON . stringify ( [ "NOTICE" , "error: expected subID a string. but got the otherwise." ] ) ) ;
if ( typeof ( data [ 2 ] ) !== "object" ) return ws . send ( JSON . stringify ( [ "CLOSED" , data [ 1 ] , "error: expected filter to be obj, instead gives the otherwise." ] ) ) ;
2024-01-23 21:12:06 +07:00
if ( ( max _client _subs !== - 1 ) && ( ws . subs . size > max _client _subs ) ) return ws . send ( JSON . stringify ( [ "CLOSED" , data [ 1 ] , "rate-limited: too many subscriptions." ] ) ) ;
2024-01-29 21:36:20 +07:00
if ( ws . subs . has ( data [ 1 ] ) ) return ws . send ( JSON . stringify ( [ "CLOSED" , data [ 1 ] , "duplicate: subscription already opened" ] ) ) ;
2024-01-29 22:03:39 +07:00
const origID = data [ 1 ] ;
const faked = Date . now ( ) + Math . random ( ) . toString ( 36 ) ;
2024-02-18 11:25:33 +07:00
let filters = data . slice ( 2 ) ;
2024-02-18 11:41:32 +07:00
let filter = mergeFilters ( ... filters ) ;
2024-02-18 11:25:33 +07:00
for ( const fn in filters ) {
2024-02-18 12:45:26 +07:00
if ( ! Array . isArray ( filters [ fn ] . kinds ) ) {
filters [ fn ] . kinds = ws . acceptKinds ;
continue ;
} else {
filters [ fn ] . kinds = filters [ fn ] . kinds ? . filter ( kind => {
if ( ws . rejectKinds && ws . rejectKinds . includes ( kind ) ) return false ;
2024-02-18 13:08:16 +07:00
if ( ws . acceptKinds && ! ws . acceptKinds . includes ( kind ) ) return false ;
2024-02-18 12:45:26 +07:00
return true ;
} ) ;
}
2024-02-23 13:11:00 +07:00
if ( filters [ fn ] . limit > ws . forcedLimit )
filters [ fn ] . limit = ws . forcedLimit ;
2024-02-18 11:25:33 +07:00
}
2024-02-18 19:37:59 +07:00
if ( ! ws . relays . size && ! sessStarted ) {
2024-02-18 19:45:19 +07:00
console . log ( process . pid , ` >>> ` , ` ${ ws . ip } executed ${ data [ 0 ] } command for the first. Initializing session ` ) ;
2024-02-18 19:37:59 +07:00
newsess ( ws ) ;
sessStarted = true ;
}
2024-02-18 11:25:33 +07:00
ws . subs . set ( origID , filters ) ;
2024-01-29 22:03:39 +07:00
ws . events . set ( origID , new Set ( ) ) ;
ws . pause _subs . delete ( origID ) ;
ws . subalias . set ( faked , origID ) ;
ws . fakesubalias . set ( origID , faked ) ;
2024-02-18 11:41:32 +07:00
ws . mergedFilters . set ( origID , filter ) ;
2024-01-29 22:03:39 +07:00
data [ 1 ] = faked ;
2024-02-17 15:33:15 +07:00
bc ( data , ws ) ;
2024-02-18 11:41:32 +07:00
if ( filter . limit < 1 ) return ws . send ( JSON . stringify ( [ "EOSE" , origID ] ) ) ;
2024-01-29 22:03:39 +07:00
ws . pendingEOSE . set ( origID , 0 ) ;
2023-10-30 03:14:50 +07:00
break ;
2024-01-29 22:03:39 +07:00
}
2023-10-30 03:14:50 +07:00
case "CLOSE" :
2023-11-17 14:36:13 +07:00
if ( ! authorized ) return ;
2023-11-02 22:06:12 +07:00
if ( typeof ( data [ 1 ] ) !== "string" ) return ws . send ( JSON . stringify ( [ "NOTICE" , "error: bad request." ] ) ) ;
2024-01-31 20:46:43 +07:00
if ( ! ws . fakesubalias . has ( data [ 1 ] ) ) return ws . send ( JSON . stringify ( [ "CLOSED" , data [ 1 ] , "error: this sub is not opened." ] ) ) ;
2024-01-29 22:03:39 +07:00
const origID = data [ 1 ] ;
const faked = ws . fakesubalias . get ( origID ) ;
ws . subs . delete ( origID ) ;
ws . events . delete ( origID ) ;
ws . pendingEOSE . delete ( origID ) ;
ws . pause _subs . delete ( origID ) ;
2024-01-29 22:08:16 +07:00
ws . fakesubalias . delete ( origID ) ;
ws . subalias . delete ( faked ) ;
2024-02-18 11:41:32 +07:00
ws . mergedFilters . delete ( origID ) ;
2024-01-29 22:03:39 +07:00
data [ 1 ] = faked ;
2024-02-17 15:33:15 +07:00
bc ( data , ws ) ;
2024-01-29 22:03:39 +07:00
ws . send ( JSON . stringify ( [ "CLOSED" , origID , "" ] ) ) ;
2023-10-30 03:14:50 +07:00
break ;
2023-11-16 21:53:58 +07:00
case "AUTH" :
2023-11-20 21:21:22 +07:00
if ( auth ( authKey , data [ 1 ] , ws , req ) ) {
2023-11-16 21:53:58 +07:00
ws . pubkey = data [ 1 ] . pubkey ;
2024-02-17 15:33:15 +07:00
console . log ( process . pid , "---" , ws . ip , "successfully authorized as" , ws . pubkey , private _keys [ ws . pubkey ] ? "(admin)" : "(user)" ) ;
2023-11-20 21:21:22 +07:00
if ( authorized ) return ;
authorized = true ;
2024-01-04 22:01:08 +07:00
lastEvent = Date . now ( ) ;
2023-11-16 21:53:58 +07:00
}
break ;
2023-10-30 03:14:50 +07:00
default :
2024-02-25 00:35:24 +07:00
ws . send ( JSON . stringify ( [ "NOTICE" , ` error: unrecognized command: ${ data [ 0 ] } ` ] ) ) ;
2023-10-30 03:14:50 +07:00
break ;
}
} ) ;
ws . on ( 'error' , console . error ) ;
ws . on ( 'close' , _ => {
2024-01-23 22:49:43 +07:00
onClose ( ) ;
2024-01-27 22:57:41 +07:00
2024-02-17 15:33:15 +07:00
console . log ( process . pid , "---" , ` ${ ws . ip } disconnected ` ) ;
2023-10-30 03:14:50 +07:00
2024-01-23 21:21:46 +07:00
for ( const i of ws . reconnectTimeout ) {
2023-12-25 23:12:26 +09:00
clearTimeout ( i ) ;
2023-12-24 21:16:44 +07:00
// Let the garbage collector do the thing. No need to add ws.reconnectTimeout.delete(i);
}
2024-01-03 14:10:01 +07:00
2024-02-17 15:33:15 +07:00
for ( const sock of ws . relays ) {
2024-02-17 17:40:23 +07:00
sock . terminate ( ) ;
2024-02-17 15:33:15 +07:00
}
2023-10-30 03:14:50 +07:00
} ) ;
}
2023-12-19 22:56:48 +07:00
// WS - New session for client $id
2024-02-17 15:33:15 +07:00
function newsess ( ws ) {
relays . forEach ( _ => newConn ( _ , ws ) ) ;
2023-12-19 22:56:48 +07:00
}
2023-10-30 03:14:50 +07:00
// WS - Broadcast message to every existing sockets
2024-02-17 15:33:15 +07:00
function bc ( msg , ws ) {
for ( const sock of ws . relays ) {
2024-01-27 07:33:15 +07:00
if ( sock . readyState !== 1 ) continue ;
2024-01-23 21:12:06 +07:00
// skip the ratelimit after <config.upstream_ratelimit_expiration>
if ( ( upstream _ratelimit _expiration ) > ( Date . now ( ) - sock . ratelimit ) ) continue ;
2023-10-30 03:14:50 +07:00
sock . send ( JSON . stringify ( msg ) ) ;
2023-10-31 15:14:02 +07:00
}
2023-10-30 03:14:50 +07:00
}
// WS - Sessions
2024-02-17 15:33:15 +07:00
function newConn ( addr , client , reconn _t = 0 ) {
2024-02-17 17:40:23 +07:00
if ( client . readyState !== 1 ) return ;
2023-11-07 23:18:16 +07:00
const relay = new WebSocket ( addr , {
headers : {
2024-02-21 16:17:50 +07:00
"User-Agent" : ` Bostr ${ version } ; The nostr relay bouncer; https://github.com/Yonle/bostr `
2024-01-04 13:36:14 +07:00
} ,
2024-01-27 19:42:18 +07:00
noDelay : true ,
allowSynchronousEvents : true
2023-11-07 23:18:16 +07:00
} ) ;
2023-10-30 03:14:50 +07:00
2024-01-23 21:12:06 +07:00
relay . ratelimit = 0 ;
2023-10-30 03:14:50 +07:00
relay . on ( 'open' , _ => {
2024-02-17 17:40:23 +07:00
if ( client . readyState !== 1 ) return relay . terminate ( ) ;
2024-01-23 21:12:06 +07:00
reconn _t = 0 ;
2024-02-17 15:33:15 +07:00
if ( log _about _relays ) console . log ( process . pid , "---" , client . ip , ` ${ relay . url } is connected ` ) ;
2023-11-12 17:52:02 +07:00
2024-01-23 21:21:46 +07:00
for ( const i of client . my _events ) {
2023-12-19 23:41:33 +07:00
relay . send ( JSON . stringify ( [ "EVENT" , i ] ) ) ;
}
2023-11-12 17:52:02 +07:00
2024-01-23 21:21:46 +07:00
for ( const i of client . subs ) {
2024-01-29 22:03:39 +07:00
relay . send ( JSON . stringify ( [ "REQ" , client . fakesubalias . get ( i [ 0 ] ) , ... i [ 1 ] ] ) ) ;
2023-10-31 15:14:02 +07:00
}
2023-10-30 03:14:50 +07:00
} ) ;
relay . on ( 'message' , data => {
2024-02-17 17:40:23 +07:00
if ( client . readyState !== 1 ) return relay . terminate ( ) ;
2023-10-30 03:14:50 +07:00
try {
data = JSON . parse ( data ) ;
} catch ( error ) {
2023-12-19 23:41:33 +07:00
return ;
2023-10-30 03:14:50 +07:00
}
switch ( data [ 0 ] ) {
case "EVENT" : {
2023-10-31 14:00:16 +07:00
if ( data . length < 3 || typeof ( data [ 1 ] ) !== "string" || typeof ( data [ 2 ] ) !== "object" ) return ;
2024-01-29 22:03:39 +07:00
if ( ! client . subalias . has ( data [ 1 ] ) ) return ;
data [ 1 ] = client . subalias . get ( data [ 1 ] ) ;
2024-02-16 23:21:06 +07:00
if ( client . pause _subs . has ( data [ 1 ] ) ) return ;
2023-11-19 08:46:35 +07:00
2024-02-18 12:45:26 +07:00
if ( client . rejectKinds && client . rejectKinds . includes ( data [ 2 ] ? . id ) ) return ;
2024-02-17 18:42:57 +07:00
const filters = client . subs . get ( data [ 1 ] ) ;
2024-02-18 11:54:02 +07:00
if ( ! matchFilters ( filters , data [ 2 ] ) ) return ;
2024-02-18 11:41:32 +07:00
const filter = client . mergedFilters . get ( data [ 1 ] ) ;
2024-02-17 19:55:39 +07:00
const NotInSearchQuery = "search" in filter && ! data [ 2 ] ? . content ? . toLowerCase ( ) . includes ( filter . search . toLowerCase ( ) ) ;
2024-02-18 11:54:02 +07:00
if ( NotInSearchQuery ) return ;
2024-01-04 14:51:06 +07:00
if ( client . events . get ( data [ 1 ] ) . has ( data [ 2 ] ? . id ) ) return ; // No need to transmit once it has been transmitted before.
2023-10-31 14:00:16 +07:00
2024-02-24 12:23:55 +07:00
client . events . get ( data [ 1 ] ) . add ( data [ 2 ] ? . id ) ;
client . send ( JSON . stringify ( data ) ) ;
2023-12-19 22:56:48 +07:00
2023-11-06 20:10:48 +07:00
// Now count for REQ limit requested by client.
// If it's at the limit, Send EOSE to client and delete pendingEOSE of subID
// Skip if EOSE has been omitted
2024-02-24 12:23:55 +07:00
if ( ! client . pendingEOSE . has ( data [ 1 ] ) ) return ;
2024-02-17 19:20:25 +07:00
const limit = getFilterLimit ( filter ) ;
if ( limit === Infinity ) return ;
if ( client . events . get ( data [ 1 ] ) . size >= limit ) {
// Once reached to <filter.limit>, send EOSE to client.
client . send ( JSON . stringify ( [ "EOSE" , data [ 1 ] ] ) ) ;
2024-02-25 00:23:09 +07:00
if ( ! client . accurateMode && ( client . saveMode || pause _on _limit ) ) {
2024-02-17 19:20:25 +07:00
client . pause _subs . add ( data [ 1 ] ) ;
} else {
client . pendingEOSE . delete ( data [ 1 ] ) ;
2023-11-21 18:42:04 +07:00
}
2023-11-06 20:10:48 +07:00
}
2023-10-30 03:14:50 +07:00
break ;
}
2023-11-01 22:20:21 +07:00
case "EOSE" :
2024-01-29 22:03:39 +07:00
if ( ! client . subalias . has ( data [ 1 ] ) ) return ;
data [ 1 ] = client . subalias . get ( data [ 1 ] ) ;
2024-01-04 14:51:06 +07:00
if ( ! client . pendingEOSE . has ( data [ 1 ] ) ) return ;
client . pendingEOSE . set ( data [ 1 ] , client . pendingEOSE . get ( data [ 1 ] ) + 1 ) ;
2024-02-17 15:33:15 +07:00
if ( log _about _relays ) console . log ( process . pid , "---" , client . ip , ` got EOSE from ${ relay . url } for ${ data [ 1 ] } . There are ${ client . pendingEOSE . get ( data [ 1 ] ) } EOSE received out of ${ client . relays . size } connected relays. ` ) ;
2023-12-19 22:56:48 +07:00
2024-02-16 23:21:06 +07:00
if ( wait _eose && ( ( client . pendingEOSE . get ( data [ 1 ] ) < max _eose _score ) || ( client . pendingEOSE . get ( data [ 1 ] ) < client . relays . size ) ) ) return ;
client . pendingEOSE . delete ( data [ 1 ] ) ;
2023-12-19 22:56:48 +07:00
2024-02-16 23:21:06 +07:00
if ( client . pause _subs . has ( data [ 1 ] ) ) {
client . pause _subs . delete ( data [ 1 ] ) ;
} else {
client . send ( JSON . stringify ( data ) ) ;
2023-12-19 22:56:48 +07:00
}
2023-11-01 22:20:21 +07:00
break ;
2023-11-18 16:49:20 +07:00
case "AUTH" :
2024-01-04 14:51:06 +07:00
if ( ! private _keys || typeof ( data [ 1 ] ) !== "string" || ! client . pubkey ) return ;
nip42 ( relay , client . pubkey , private _keys [ client . pubkey ] , data [ 1 ] ) ;
2023-11-16 21:53:58 +07:00
break ;
2024-01-23 21:12:06 +07:00
case "NOTICE" :
case "CLOSED" :
if ( typeof ( data [ 1 ] ) !== "string" ) return ;
if ( data [ 1 ] . startsWith ( "rate-limited" ) ) relay . ratelimit = Date . now ( ) ;
break ;
case "OK" :
if ( typeof ( data [ 2 ] ) !== "string" ) return ;
if ( data [ 2 ] . startsWith ( "rate-limited" ) ) relay . ratelimit = Date . now ( ) ;
break ;
2023-10-30 03:14:50 +07:00
}
} ) ;
2023-11-01 19:54:03 +07:00
relay . on ( 'error' , _ => {
2024-02-17 15:33:15 +07:00
if ( log _about _relays ) console . error ( process . pid , "-!-" , client . ip , relay . url , _ . toString ( ) )
2023-11-01 19:54:03 +07:00
} ) ;
2023-11-18 16:49:20 +07:00
2023-10-30 03:14:50 +07:00
relay . on ( 'close' , _ => {
2024-02-17 17:40:23 +07:00
if ( client . readyState !== 1 ) return ;
2024-02-16 22:55:49 +07:00
client . relays . delete ( relay ) ; // Remove this socket session from <client.relays> list
2024-02-17 15:33:15 +07:00
if ( log _about _relays ) console . log ( process . pid , "-!-" , client . ip , "Disconnected from" , relay . url ) ;
2024-01-23 21:12:06 +07:00
reconn _t += reconnect _time || 5000
2023-12-24 21:16:44 +07:00
const reconnectTimeout = setTimeout ( _ => {
2024-02-17 15:33:15 +07:00
newConn ( addr , client , reconn _t ) ;
2024-01-04 14:51:06 +07:00
client ? . reconnectTimeout . delete ( reconnectTimeout ) ;
2024-01-23 21:12:06 +07:00
} , reconn _t ) ; // As a bouncer server, We need to reconnect.
2024-01-04 14:51:06 +07:00
client ? . reconnectTimeout . add ( reconnectTimeout ) ;
2023-10-30 03:14:50 +07:00
} ) ;
2024-01-23 21:12:06 +07:00
relay . on ( 'unexpected-response' , ( req , res ) => {
2024-02-17 17:40:23 +07:00
if ( client . readyState !== 1 ) return ;
2024-02-16 22:55:49 +07:00
client . relays . delete ( relay ) ;
2024-01-23 21:12:06 +07:00
if ( res . statusCode >= 500 ) return relay . emit ( "close" , null ) ;
2024-02-18 19:42:45 +07:00
relays = relays . filter ( _ => ! relay . url . startsWith ( _ ) ) ;
2024-01-23 21:12:06 +07:00
console . log ( process . pid , "-!-" , ` ${ relay . url } give status code ${ res . statusCode } . Not (re)connect with new session again. ` ) ;
} ) ;
2024-01-23 22:49:43 +07:00
2024-02-17 15:33:15 +07:00
client . relays . add ( relay ) ; // Add this socket session to <client.relays>
2023-10-30 03:14:50 +07:00
}