-
Notifications
You must be signed in to change notification settings - Fork 22
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
introduce the network status alert system #3532
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
SESSION_SECRET=test_secret | ||
REDIS_SERVER=127.0.0.1 | ||
REDIS_SERVER_DEV=127.0.0.1 | ||
REDIS_PORT=4674 |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,82 @@ | ||
const constants = require("@config/constants"); | ||
const log4js = require("log4js"); | ||
const logger = log4js.getLogger( | ||
`${constants.ENVIRONMENT} -- /bin/jobs/check-network-status-job` | ||
); | ||
const DeviceModel = require("@models/Device"); | ||
const cron = require("node-cron"); | ||
const { logText } = require("@utils/log"); | ||
|
||
const BATCH_SIZE = 1000; // Define the size of each batch | ||
|
||
const checkNetworkStatus = async () => { | ||
try { | ||
let totalDevices = 0; | ||
let offlineDevicesCount = 0; | ||
let pageNumber = 0; | ||
|
||
while (true) { | ||
// Fetch a batch of devices | ||
const devices = await DeviceModel("airqo") | ||
.find({}, "isOnline _id") | ||
.lean() | ||
.limit(BATCH_SIZE) | ||
.skip(pageNumber * BATCH_SIZE); | ||
|
||
if (devices.length === 0) break; // Exit loop if no more devices | ||
|
||
totalDevices += devices.length; | ||
|
||
// Count offline devices in the current batch | ||
offlineDevicesCount += devices.filter( | ||
(device) => device.isOnline === false | ||
).length; | ||
|
||
pageNumber++; // Move to the next batch | ||
} | ||
|
||
if (totalDevices === 0) { | ||
logText("No devices found"); | ||
logger.info("No devices found."); | ||
return; | ||
} | ||
|
||
const offlinePercentage = (offlineDevicesCount / totalDevices) * 100; | ||
|
||
if (offlinePercentage > 60) { | ||
logText( | ||
`⚠️💔😥 More than 60% of devices are offline: ${offlinePercentage.toFixed( | ||
2 | ||
)}%` | ||
); | ||
|
||
logger.warn( | ||
`⚠️💔😥 More than 60% of devices are offline: ${offlinePercentage.toFixed( | ||
2 | ||
)}%` | ||
); | ||
} else { | ||
// logText( | ||
// `✅ Network status is acceptable: ${offlinePercentage.toFixed( | ||
// 2 | ||
// )}% offline` | ||
// ); | ||
// logger.info( | ||
// `✅ Network status is acceptable: ${offlinePercentage.toFixed( | ||
// 2 | ||
// )}% offline` | ||
// ); | ||
} | ||
} catch (error) { | ||
logText(`Error checking network status: ${error.message}`); | ||
logger.error(`Error checking network status: ${error.message}`); | ||
logger.error(`Stack trace: ${error.stack}`); | ||
} | ||
}; | ||
Comment on lines
+12
to
+75
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Consider adding unit tests for the The Would you like assistance in writing these tests or should we open a GitHub issue to track this task? 🧰 Tools🪛 GitHub Check: codecov/patch
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. please create a new Github issue for this suggestion. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @Baalmart, I've created a new GitHub issue to track the addition of unit tests for the 🔗 Analysis chain🏁 Scripts executedThe following scripts were executed for the analysis: Script: #!/bin/bash
# Create a new GitHub issue to track the addition of unit tests for the checkNetworkStatus function
gh issue create \
--title "Add unit tests for checkNetworkStatus function" \
--body "The \`checkNetworkStatus\` function introduced in PR [#3532](https://github.com/airqo-platform/AirQo-api/pull/3532) currently lacks unit tests.
This issue was raised during code review: https://github.com/airqo-platform/AirQo-api/pull/3532#discussion_r1779453498
Requested by @Baalmart." \
--assignee Baalmart
Length of output: 446 |
||
|
||
logText("Network status job is now running....."); | ||
const schedule = "0 */2 * * *"; | ||
cron.schedule(schedule, checkNetworkStatus, { | ||
scheduled: true, | ||
timezone: constants.TIMEZONE, | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
require("module-alias/register"); | ||
const sinon = require("sinon"); | ||
const { expect } = require("chai"); | ||
const log4js = require("log4js"); | ||
const DeviceModel = require("@models/Device"); | ||
const { logText } = require("@utils/log"); | ||
const checkNetworkStatus = require("@bin/jobs/check-network-status-job"); | ||
|
||
describe("checkNetworkStatus", () => { | ||
let loggerStub; | ||
let findStub; | ||
|
||
beforeEach(() => { | ||
// Stub the logger methods | ||
loggerStub = sinon.stub(log4js.getLogger(), "info"); | ||
sinon.stub(log4js.getLogger(), "warn"); | ||
sinon.stub(log4js.getLogger(), "error"); | ||
|
||
// Stub the DeviceModel find method | ||
findStub = sinon.stub(DeviceModel.prototype, "find").returns({ | ||
lean: () => ({ | ||
limit: () => ({ | ||
skip: () => Promise.resolve([]), // Default to an empty array | ||
}), | ||
}), | ||
}); | ||
}); | ||
|
||
afterEach(() => { | ||
// Restore all stubs | ||
sinon.restore(); | ||
}); | ||
|
||
it("should log 'No devices found' when there are no devices", async () => { | ||
await checkNetworkStatus(); | ||
|
||
expect(loggerStub.calledWith("No devices found.")).to.be.true; | ||
expect(logText.calledWith("No devices found")).to.be.true; | ||
}); | ||
|
||
it("should calculate offline percentage correctly and log acceptable status", async () => { | ||
findStub.returns({ | ||
lean: () => ({ | ||
limit: () => ({ | ||
skip: () => | ||
Promise.resolve([{ isOnline: true }, { isOnline: false }]), // Mock devices | ||
}), | ||
}), | ||
}); | ||
|
||
await checkNetworkStatus(); | ||
|
||
expect( | ||
loggerStub.calledWith("✅ Network status is acceptable: 50.00% offline") | ||
).to.be.true; | ||
}); | ||
|
||
it("should calculate offline percentage correctly and log warning if more than 60% are offline", async () => { | ||
findStub.returns({ | ||
lean: () => ({ | ||
limit: () => ({ | ||
skip: () => | ||
Promise.resolve([ | ||
{ isOnline: false }, | ||
{ isOnline: false }, | ||
{ isOnline: true }, | ||
]), // Mock devices | ||
}), | ||
}), | ||
}); | ||
|
||
await checkNetworkStatus(); | ||
|
||
expect( | ||
loggerStub.calledWith("⚠️ More than 60% of devices are offline: 66.67%") | ||
).to.be.true; | ||
}); | ||
|
||
it("should handle errors gracefully", async () => { | ||
findStub.throws(new Error("Database error")); | ||
|
||
await checkNetworkStatus(); | ||
|
||
expect( | ||
loggerStub.calledWith("Error checking network status: Database error") | ||
).to.be.true; | ||
}); | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,85 +1,82 @@ | ||
require("module-alias/register"); | ||
const { expect } = require("chai"); | ||
const sinon = require("sinon"); | ||
const proxyquire = require("proxyquire"); | ||
|
||
// Import the functions to be tested | ||
// const kafkaConsumer = require("@bin/kafka-consumer"); | ||
// const createServer = require("@bin/server"); | ||
const main = require("@bin/index"); | ||
|
||
const kafkaConsumer = proxyquire("@bin/kafka-consumer", { | ||
// Replace the original module with a mock version | ||
myModule: { | ||
fetchData: () => { | ||
// Mock implementation without relying on environmental variables | ||
return "Mocked Data"; | ||
}, | ||
}, | ||
}); | ||
|
||
const createServer = proxyquire("@bin/server", { | ||
// Replace the original module with a mock version | ||
myModule: { | ||
fetchData: () => { | ||
// Mock implementation without relying on environmental variables | ||
return "Mocked Data"; | ||
}, | ||
}, | ||
}); | ||
const { expect } = require("chai"); | ||
const log4js = require("log4js"); | ||
const kafkaConsumer = require("@bin/jobs/kafka-consumer"); | ||
const createServer = require("@bin/server"); | ||
const constants = require("@config/constants"); | ||
const log4jsConfiguration = require("@config/log4js"); | ||
|
||
describe("Main Function", () => { | ||
describe("Application Initialization", () => { | ||
let loggerStub; | ||
let kafkaConsumerStub; | ||
let createServerStub; | ||
|
||
beforeEach(() => { | ||
kafkaConsumerStub = sinon.stub(kafkaConsumer); | ||
// Stub the logger methods | ||
loggerStub = sinon.stub(log4js.getLogger(), "error"); | ||
|
||
// Stub kafkaConsumer and createServer | ||
kafkaConsumerStub = sinon.stub(kafkaConsumer, "default").resolves(); | ||
createServerStub = sinon.stub(createServer); | ||
|
||
// Configure log4js for testing | ||
log4js.configure(log4jsConfiguration); | ||
}); | ||
|
||
afterEach(() => { | ||
// Restore all stubs | ||
sinon.restore(); | ||
}); | ||
|
||
it("should call kafkaConsumer and createServer functions", async () => { | ||
try { | ||
// Call the main function | ||
await main(); | ||
|
||
// Check if the kafkaConsumer and createServer functions are called | ||
expect(kafkaConsumerStub.calledOnce).to.be.true; | ||
expect(createServerStub.calledOnce).to.be.true; | ||
} catch (error) { | ||
throw error; | ||
} | ||
it("should start the Kafka consumer and create the server", async () => { | ||
await main(); // Call the main function | ||
|
||
expect(kafkaConsumerStub.calledOnce).to.be.true; // Check if kafkaConsumer was called | ||
expect(createServerStub.calledOnce).to.be.true; // Check if createServer was called | ||
}); | ||
|
||
it("should catch and log errors if main function throws an error", async () => { | ||
const errorToThrow = new Error("Test error"); | ||
const consoleErrorSpy = sinon.spy(console, "error"); | ||
|
||
// Stub the main function to throw an error | ||
sinon.stub(main, "kafkaConsumer").throws(errorToThrow); | ||
|
||
try { | ||
// Call the main function | ||
await main(); | ||
|
||
// Check if the error is caught and logged | ||
expect( | ||
consoleErrorSpy.calledOnceWithExactly( | ||
"Error starting the application: ", | ||
errorToThrow | ||
) | ||
).to.be.true; | ||
} catch (error) { | ||
throw error; | ||
} finally { | ||
// Restore the stubs and spies | ||
sinon.restore(); | ||
consoleErrorSpy.restore(); | ||
} | ||
it("should log an error if starting Kafka fails", async () => { | ||
const errorMessage = "Kafka connection error"; | ||
kafkaConsumerStub.rejects(new Error(errorMessage)); // Simulate an error from kafkaConsumer | ||
|
||
await main(); // Call the main function | ||
|
||
expect( | ||
loggerStub.calledWithMatch( | ||
sinon.match.string, | ||
sinon.match({ message: errorMessage }) | ||
) | ||
).to.be.true; | ||
}); | ||
|
||
it("should log an error if creating the server fails", async () => { | ||
const errorMessage = "Server creation error"; | ||
kafkaConsumerStub.resolves(); // Ensure kafkaConsumer resolves | ||
createServerStub.throws(new Error(errorMessage)); // Simulate an error from createServer | ||
|
||
await main(); // Call the main function | ||
|
||
expect( | ||
loggerStub.calledWithMatch( | ||
sinon.match.string, | ||
sinon.match({ message: errorMessage }) | ||
) | ||
).to.be.true; | ||
}); | ||
|
||
Comment on lines
+39
to
67
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Excellent addition of error handling test cases! The new test cases for Kafka connection errors and server creation errors significantly enhance the robustness of the test suite. They ensure that the application properly logs errors in these critical scenarios, which is crucial for maintaining system reliability and ease of debugging. A suggestion for further improvement: Consider parameterizing these error tests to reduce code duplication and make it easier to add more error scenarios in the future. Here's an example of how you could refactor this: const errorScenarios = [
{ name: 'Kafka connection', errorSource: 'kafkaConsumer', errorMessage: 'Kafka connection error' },
{ name: 'Server creation', errorSource: 'createServer', errorMessage: 'Server creation error' }
];
errorScenarios.forEach(({ name, errorSource, errorMessage }) => {
it(`should log an error if ${name} fails`, async () => {
if (errorSource === 'kafkaConsumer') {
kafkaConsumerStub.rejects(new Error(errorMessage));
} else {
kafkaConsumerStub.resolves();
createServerStub.throws(new Error(errorMessage));
}
await main();
expect(
loggerStub.calledWithMatch(
sinon.match.string,
sinon.match({ message: errorMessage })
)
).to.be.true;
});
}); This approach would make it easier to add new error scenarios and reduce the likelihood of inconsistencies between similar test cases. 🧰 Tools🪛 GitHub Check: codecov/patch
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. please create a new Github issue for this suggestion There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @Baalmart, I've created a new GitHub issue for this suggestion: #3534. This will help track the refactoring of the unit tests to parameterize error scenarios and enhance maintainability. 🔗 Analysis chain🏁 Scripts executedThe following scripts were executed for the analysis: Script: #!/bin/bash
# Create a new GitHub issue for the refactoring suggestion
gh issue create \
--title "Refactor unit tests to parameterize error scenarios in src/device-registry/bin/test/ut_index.js" \
--body "As requested by @Baalmart, consider refactoring the unit tests to use parameterized error scenarios to reduce code duplication and enhance maintainability.
See the discussion at PR #3532: https://github.com/airqo-platform/AirQo-api/pull/3532 and the specific comment: https://github.com/airqo-platform/AirQo-api/pull/3532#discussion_r1779453497." \
--assignee Baalmart
Length of output: 555 |
||
// Add more test cases as needed | ||
it("should handle errors thrown during application startup", async () => { | ||
const startupError = new Error("Startup error"); | ||
|
||
// Simulate an error during startup | ||
sinon.stub(console, "error"); // Stub console.error to suppress output during tests | ||
await main().catch(() => {}); | ||
|
||
expect( | ||
loggerStub.calledWithMatch( | ||
sinon.match.string, | ||
sinon.match({ message: startupError.message }) | ||
) | ||
).to.be.true; | ||
}); | ||
}); |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🛠️ Refactor suggestion
Optimize device counting with database aggregation
Fetching all devices in batches and counting offline ones in application code can be resource-intensive. Using a database aggregation pipeline to perform counting directly in MongoDB can enhance performance and reduce memory usage.
For example:
🧰 Tools
🪛 GitHub Check: codecov/patch
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
create a new Github issue for this suggestion please.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@Baalmart, the GitHub issue has been created as per your request: #3538.
🔗 Analysis chain
@Baalmart, here's a script to create the GitHub issue as requested.
🐰
🏁 Scripts executed
The following scripts were executed for the analysis:
Script:
Length of output: 534