这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "bfx-report",
"version": "4.6.0",
"version": "4.7.0",
"description": "Reporting tool",
"main": "worker.js",
"license": "Apache-2.0",
Expand Down
20 changes: 20 additions & 0 deletions workers/loc.api/abstract.ws.event.emitter/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,26 @@ class AbstractWSEventEmitter {
action
)
}

emitMaintenanceTurnedOn (
handler = () => {},
action = 'emitMaintenanceTurnedOn'
) {
return this.emit(
handler,
action
)
}

emitMaintenanceTurnedOff (
handler = () => {},
action = 'emitMaintenanceTurnedOff'
) {
return this.emit(
handler,
action
)
}
}

decorateInjectable(AbstractWSEventEmitter)
Expand Down
42 changes: 42 additions & 0 deletions workers/loc.api/generate-csv/csv-writer/helpers/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
'use strict'

const { pipeline } = require('stream/promises')
const { stringify } = require('csv')

const streamWriterToOne = async (
rStream,
wStream,
writeFn,
opts
) => {
const { end = true } = opts ?? {}
const promise = pipeline(rStream, wStream, { end })

writeFn(rStream)
rStream.end()

await promise
}

const streamWriter = async (wStream, csvStreamDataMap) => {
for (const [i, csvStreamData] of csvStreamDataMap.entries()) {
const isLast = (i + 1) === csvStreamDataMap.length
const {
columnParams,
writeFn
} = csvStreamData

const stringifier = stringify(columnParams)
await streamWriterToOne(
stringifier,
wStream,
writeFn,
{ end: isLast }
)
}
}

module.exports = {
streamWriterToOne,
streamWriter
}
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
'use strict'

const { pipeline } = require('stream')
const { stringify } = require('csv')

const {
write
} = require('../../queue/write-data-to-stream/helpers')

const nope = () => {}
const { streamWriter } = require('./helpers')

module.exports = (
rService,
Expand All @@ -28,50 +25,53 @@ module.exports = (
queue.emit('progress', 0)

if (typeof jobData === 'string') {
const stringifier = stringify(
{ columns: ['mess'] }
await streamWriter(
wStream,
[{
columnParams: { columns: ['mess'] },
writeFn: (stream) => write([{ mess: jobData }], stream)
}]
)

pipeline(stringifier, wStream, nope)
write([{ mess: jobData }], stringifier)
queue.emit('progress', 100)
stringifier.end()

return
}

wStream.setMaxListeners(50)

const headerStringifier = stringify(
{ columns: ['empty', 'buy', 'empty', 'empty', 'sell', 'empty', 'empty', 'cumulative', 'empty'] }
)
const resStringifier = stringify({
header: true,
columns: columnsCsv
})

pipeline(headerStringifier, wStream, nope)
pipeline(resStringifier, wStream, nope)

const { res } = await getDataFromApi({
getData: rService[name].bind(rService),
args,
callerName: 'CSV_WRITER'
})

write(
[{ empty: '', buy: 'Buy', sell: 'Sell', cumulative: 'Cumulative' }],
headerStringifier
)
write(
res,
resStringifier,
formatSettings,
params
wStream.setMaxListeners(50)

await streamWriter(
wStream,
[
{
columnParams: {
columns: ['empty', 'buy', 'empty', 'empty', 'sell', 'empty', 'empty', 'cumulative', 'empty']
},
writeFn: (stream) => write(
[{ empty: '', buy: 'Buy', sell: 'Sell', cumulative: 'Cumulative' }],
stream
)
},
{
columnParams: {
header: true,
columns: columnsCsv
},
writeFn: (stream) => write(
res,
stream,
formatSettings,
params
)
}
]
)

queue.emit('progress', 100)

headerStringifier.end()
resStringifier.end()
}
2 changes: 1 addition & 1 deletion workers/loc.api/generate-csv/csv.job.data.js
Original file line number Diff line number Diff line change
Expand Up @@ -1038,7 +1038,7 @@ class CsvJobData {
uId,
uInfo
) {
checkParams(args, 'paramsSchemaForWeightedAveragesReportApiCsv')
checkParams(args, 'paramsSchemaForWeightedAveragesReportApiCsv', ['symbol'])

const {
userId,
Expand Down
8 changes: 7 additions & 1 deletion workers/loc.api/helpers/api-errors-testers.js
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,11 @@ const isForbiddenError = (err) => {
return /forbidden/i.test(_getErrorString(err))
}

// https://docs.bitfinex.com/docs/rest-general
const isMaintenanceError = (err) => {
return /maintenance/i.test(_getErrorString(err))
}

const isENetError = (err) => (
isENetUnreachError(err) ||
isEConnResetError(err) ||
Expand Down Expand Up @@ -107,5 +112,6 @@ module.exports = {
isEProtoError,
isTempUnavailableError,
isENetError,
isForbiddenError
isForbiddenError,
isMaintenanceError
}
4 changes: 3 additions & 1 deletion workers/loc.api/helpers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ const {
isENotFoundError,
isESocketTimeoutError,
isENetError,
isForbiddenError
isForbiddenError,
isMaintenanceError
} = require('./api-errors-testers')
const {
accountCache,
Expand Down Expand Up @@ -82,6 +83,7 @@ module.exports = {
isESocketTimeoutError,
isENetError,
isForbiddenError,
isMaintenanceError,
accountCache,
parseFields,
parseLoginsExtraDataFields,
Expand Down
27 changes: 22 additions & 5 deletions workers/loc.api/helpers/utils.js
Original file line number Diff line number Diff line change
@@ -1,22 +1,39 @@
'use strict'

const { transform } = require('lodash')
const LRU = require('lru')

const accountCache = new LRU({ maxAge: 900000, max: 1 })

const parseFields = (res, opts) => {
const { executed, rate } = opts

return transform(res, (result, obj) => {
if (
!Array.isArray(res) ||
res.length === 0
) {
return res
}

return res.reduce((accum, curr) => {
if (
!curr ||
typeof curr !== 'object'
) {
accum.push(curr)

return accum
}

if (executed) {
obj.amountExecuted = obj.amountOrig - obj.amount
curr.amountExecuted = curr.amountOrig - curr.amount
}
if (rate) {
obj.rate = obj.rate || 'Flash Return Rate'
curr.rate = curr.rate ?? 'Flash Return Rate'
}

result.push(obj)
accum.push(curr)

return accum
}, [])
}

Expand Down
45 changes: 32 additions & 13 deletions workers/loc.api/responder/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ const {
isNonceSmallError,
isUserIsNotMerchantError,
isSymbolInvalidError,
isForbiddenError
isForbiddenError,
isMaintenanceError
} = require('../helpers')

const {
Expand Down Expand Up @@ -184,7 +185,6 @@ const _getErrorMetadata = (args, err) => {
const _logError = (loggerArgs, err) => {
const {
logger,
wsEventEmitter,
args,
name,
isInternalRequest
Expand All @@ -198,8 +198,33 @@ const _logError = (loggerArgs, err) => {
error
} = _getErrorMetadata(args, err)

_emitEventByWs(loggerArgs, error)

if (
code !== 500 ||
shouldNotBeLoggedToStdErrorStream
) {
logger.debug(_prepareErrorData(error, name))

return
}

logger.error(_prepareErrorData(error, name))
}

const _emitEventByWs = (emitterArgs, error) => {
const {
logger,
wsEventEmitter,
args,
name
} = emitterArgs ?? {}

if (!(wsEventEmitter instanceof AbstractWSEventEmitter)) {
return
}

if (
wsEventEmitter instanceof AbstractWSEventEmitter &&
args?.auth?.authToken &&
(
error instanceof AuthError ||
Expand All @@ -217,17 +242,11 @@ const _logError = (loggerArgs, err) => {
logger.error(_prepareErrorData(err, name))
})
}

if (
code !== 500 ||
shouldNotBeLoggedToStdErrorStream
) {
logger.debug(_prepareErrorData(error, name))

return
if (isMaintenanceError(error)) {
wsEventEmitter.emitMaintenanceTurnedOn().then(() => {}, (err) => {
logger.error(_prepareErrorData(err, name))
})
}

logger.error(_prepareErrorData(error, name))
}

/*
Expand Down