From c1302300ed4c211fceda16c4e36ebc043ce24471 Mon Sep 17 00:00:00 2001 From: williamlardier Date: Wed, 11 Oct 2023 16:10:25 +0200 Subject: [PATCH] S3UTILS-148: rework logs logic and lint files --- CountItems/CountManager.js | 5 ++++ CountItems/CountWorker.js | 2 +- CountItems/masterProcess.js | 5 +++- CountItems/workerProcess.js | 5 +++- utils/S3UtilsMongoClient.js | 49 ++++++++++++++++++++++++++----------- 5 files changed, 49 insertions(+), 17 deletions(-) diff --git a/CountItems/CountManager.js b/CountItems/CountManager.js index 356ccfdb..7fa19a04 100644 --- a/CountItems/CountManager.js +++ b/CountItems/CountManager.js @@ -36,6 +36,11 @@ class CountManager { } const id = this.workerList.shift(); return this.workers[id].count(bucketInfo, (err, res) => { + this.log.info(`processing the bucket "${bucketInfo._name}"`, { + method: 'CountManager::_setupQueue', + bucket: bucketInfo.name, + workInQueue: this.q.length(), + }); if (err) { return done(err); } diff --git a/CountItems/CountWorker.js b/CountItems/CountWorker.js index 6b76ff90..41fc9ec0 100644 --- a/CountItems/CountWorker.js +++ b/CountItems/CountWorker.js @@ -31,7 +31,7 @@ class CountWorker { } const bucketInfo = BucketInfo.fromObj(bucketInfoObj); const bucketName = bucketInfo.getName(); - this.log.debug(`${process.pid} handling ${bucketName}`); + this.log.info(`${process.pid} handling ${bucketName}`); return async.waterfall([ next => this.client._getIsTransient(bucketInfo, this.log, next), (isTransient, next) => this.client.getObjectMDStats(bucketName, bucketInfo, isTransient, this.log, next), diff --git a/CountItems/masterProcess.js b/CountItems/masterProcess.js index 6fb5704d..35888508 100644 --- a/CountItems/masterProcess.js +++ b/CountItems/masterProcess.js @@ -7,8 +7,11 @@ const CountManager = require('./CountManager'); const createMongoParams = require('../utils/createMongoParams'); const createWorkers = require('./utils/createWorkers'); +const logLevel = Number.parseInt(process.env.DEBUG, 10) === 1 + ? 'debug' : 'info'; + const loggerConfig = { - level: 'debug', + level: logLevel, dump: 'error', }; diff --git a/CountItems/workerProcess.js b/CountItems/workerProcess.js index e48a9f7c..090c48b6 100644 --- a/CountItems/workerProcess.js +++ b/CountItems/workerProcess.js @@ -5,8 +5,11 @@ const S3UtilsMongoClient = require('../utils/S3UtilsMongoClient'); const CountWorker = require('./CountWorker'); const createMongoParams = require('../utils/createMongoParams'); +const logLevel = Number.parseInt(process.env.DEBUG, 10) === 1 + ? 'debug' : 'info'; + const loggerConfig = { - level: 'info', + level: logLevel, dump: 'error', }; diff --git a/utils/S3UtilsMongoClient.js b/utils/S3UtilsMongoClient.js index 57495f39..9d006e0c 100644 --- a/utils/S3UtilsMongoClient.js +++ b/utils/S3UtilsMongoClient.js @@ -52,9 +52,18 @@ class S3UtilsMongoClient extends MongoClientInterface { const usersBucketCreationDatesMap = usersBucketCreationDatesArray .reduce((map, obj) => ({ ...map, [obj._id]: obj.value.creationDate }), {}); - + const startCursorDate = new Date(); + let processed = 0; await cursor.forEach( res => { + // Periodically display information about the cursor + // if more than 30s elapsed + if (Date.now() - startCursorDate > 30000) { + log.info('Processing cursor', { + method: 'getObjectMDStats', + bucketName, + }); + } const { data, error } = this._processEntryData( bucketName, bucketInfo, @@ -150,12 +159,14 @@ class S3UtilsMongoClient extends MongoClientInterface { collRes.account[account].locations[location].deleteMarkerCount += res.value.isDeleteMarker ? 1 : 0; }); }); + processed++; }, err => { if (err) { log.error('Error when processing mongo entries', { method: 'getObjectMDStats', - error: err, + errDetails: { ...err }, + errorString: err.toString(), }); return callback(err); } @@ -178,7 +189,8 @@ class S3UtilsMongoClient extends MongoClientInterface { } catch (err) { log.error('An error occurred', { method: 'getObjectMDStats', - error: err, + errDetails: { ...err }, + errorString: err.toString(), }); return callback(err); } @@ -406,8 +418,7 @@ class S3UtilsMongoClient extends MongoClientInterface { upsert: false, }); if (!updateResult.ok) { - log.error( - 'updateBucketCapacityInfo: failed to update bucket CapacityInfo', { + log.error('updateBucketCapacityInfo: failed to update bucket CapacityInfo', { bucketName, capacityInfo, }); @@ -417,6 +428,8 @@ class S3UtilsMongoClient extends MongoClientInterface { } catch (err) { log.error('updateBucketCapacityInfo: error putting bucket CapacityInfo', { error: err.message, + errDetails: { ...err }, + errorString: err.toString(), bucketName, capacityInfo, }); @@ -455,6 +468,7 @@ class S3UtilsMongoClient extends MongoClientInterface { ...S3UtilsMongoClient.convertNumberToLong(metrics), }))), ]; + log.info('updateStorageConsumptionMetrics: updating storage metrics'); // Drop the temporary collection if it exists try { @@ -468,12 +482,14 @@ class S3UtilsMongoClient extends MongoClientInterface { await tempCollection.insertMany(updatedStorageMetricsList, { ordered: false }); await async.retry( 3, - async () => tempCollection.rename(INFOSTORE, { dropTarget: true }) + async () => tempCollection.rename(INFOSTORE, { dropTarget: true }), ); return cb(); } catch (err) { log.error('updateStorageConsumptionMetrics: error updating storage metrics', { - error: err.message, + error: err, + errDetails: { ...err }, + errorString: err.toString(), }); return cb(errors.InternalError); } @@ -484,7 +500,7 @@ class S3UtilsMongoClient extends MongoClientInterface { const i = this.getCollection(INFOSTORE); const doc = await async.retry( 3, - async () => i.findOne({ _id: entityName }) + async () => i.findOne({ _id: entityName }), ); if (!doc) { return cb(errors.NoSuchEntity); @@ -492,7 +508,9 @@ class S3UtilsMongoClient extends MongoClientInterface { return cb(null, doc); } catch (err) { log.error('readStorageConsumptionMetrics: error reading metrics', { - error: err.message, + error: err, + errDetails: { ...err }, + errorString: err.toString(), }); return cb(errors.InternalError); } @@ -507,7 +525,7 @@ class S3UtilsMongoClient extends MongoClientInterface { try { const bucketInfos = []; const collInfos = await this.db.listCollections().toArray(); - await async.eachLimit(collInfos, 10, async (value) => { + await async.eachLimit(collInfos, 10, async value => { if (this._isSpecialCollection(value.name)) { // skip return; @@ -529,7 +547,8 @@ class S3UtilsMongoClient extends MongoClientInterface { } else { log.error('failed to get bucket attributes', { bucketName, - error: err, + errDetails: { ...err }, + errorString: err.toString(), }); throw errors.InternalError; } @@ -542,7 +561,8 @@ class S3UtilsMongoClient extends MongoClientInterface { } catch (err) { log.error('could not get list of collections', { method: '_getBucketInfos', - error: err, + errDetails: { ...err }, + errorString: err.toString(), }); return cb(err); } @@ -570,11 +590,12 @@ class S3UtilsMongoClient extends MongoClientInterface { log.error('failed to read bucket entry from __usersbucket', { bucketName, ownerId, - error: err, + errDetails: { ...err }, + errorString: err.toString(), }); return cb(err); } - } + } } module.exports = S3UtilsMongoClient;