Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Just ensuring uniqueness in the prefs #4072

Merged
merged 1 commit into from
Dec 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
222 changes: 44 additions & 178 deletions src/auth-service/models/Preference.js
Original file line number Diff line number Diff line change
Expand Up @@ -243,24 +243,7 @@ PreferenceSchema.pre(
}
};

// Define field categories
const singleIdFields = [
"user_id",
"group_id",
"airqloud_id",
"grid_id",
"cohort_id",
"network_id",
];
const arrayIdFields = [
"airqloud_ids",
"grid_ids",
"cohort_ids",
"network_ids",
"site_ids",
"device_ids",
"group_ids",
];
// Define selected array fields with subschemas
const selectedArrayFields = [
"selected_sites",
"selected_grids",
Expand All @@ -269,180 +252,63 @@ PreferenceSchema.pre(
"selected_airqlouds",
];

// Process single ID fields
singleIdFields.forEach((field) => {
if (updateData[field]) {
updateData[field] = processObjectId(updateData[field]);
}
});

// Validate user_id
if (!updateData.user_id) {
return next(new Error("user_id is required"));
}

// Set default values if not provided
const defaultFields = [
{ field: "pollutant", default: "pm2_5" },
{ field: "frequency", default: "hourly" },
{ field: "chartType", default: "line" },
{ field: "chartTitle", default: "Chart Title" },
{ field: "chartSubTitle", default: "Chart SubTitle" },
];

defaultFields.forEach(({ field, default: defaultValue }) => {
if (isNew && !updateData[field]) {
updateData[field] = defaultValue;
}
});

// Handle date fields
if (isNew) {
const currentDate = new Date();
updateData.startDate =
updateData.startDate || addWeeksToProvideDateTime(currentDate, -2);
updateData.endDate = updateData.endDate || currentDate;
}

// Validate and process period schema
if (updateData.period) {
const validPeriodFields = ["value", "label", "unitValue", "unit"];
const periodUpdate = {};

validPeriodFields.forEach((field) => {
if (updateData.period[field] !== undefined) {
periodUpdate[field] = updateData.period[field];
}
});

// Additional period validation
if (
periodUpdate.unitValue !== undefined &&
typeof periodUpdate.unitValue !== "number"
) {
periodUpdate.unitValue = Number(periodUpdate.unitValue);
}

updateData.period = periodUpdate;
}

// Process and validate selected arrays with their specific schemas
const selectedArrayProcessors = {
selected_sites: (site) => {
const processedSite = { ...site };

// Validate and process ObjectIds
if (site._id) processedSite._id = processObjectId(site._id);
if (site.grid_id)
processedSite.grid_id = processObjectId(site.grid_id);

// Validate numeric fields
const numericFields = [
"latitude",
"longitude",
"approximate_latitude",
"approximate_longitude",
];
numericFields.forEach((field) => {
if (processedSite[field] !== undefined) {
processedSite[field] = Number(processedSite[field]);
}
});

// Ensure createdAt is a valid date
processedSite.createdAt = site.createdAt || new Date();

// Validate string fields
const stringFields = [
"country",
"district",
"sub_county",
"parish",
"county",
"generated_name",
"name",
"city",
"formatted_name",
"region",
"search_name",
];
stringFields.forEach((field) => {
if (processedSite[field]) {
processedSite[field] = String(processedSite[field]).trim();
}
});

// Ensure boolean fields
processedSite.isFeatured = !!site.isFeatured;

return processedSite;
},
selected_grids: (grid) => ({
_id: processObjectId(grid._id),
name: String(grid.name).trim(),
createdAt: grid.createdAt || new Date(),
}),
selected_cohorts: (cohort) => ({
_id: processObjectId(cohort._id),
name: String(cohort.name).trim(),
createdAt: cohort.createdAt || new Date(),
}),
selected_devices: (device) => ({
_id: processObjectId(device._id),
name: String(device.name).trim(),
createdAt: device.createdAt || new Date(),
}),
selected_airqlouds: (airqloud) => ({
_id: processObjectId(airqloud._id),
name: String(airqloud.name).trim(),
createdAt: airqloud.createdAt || new Date(),
}),
};

// Process selected arrays
// Process selected arrays to ensure uniqueness based on _id
selectedArrayFields.forEach((field) => {
if (updateData[field]) {
updateData[field] = updateData[field].map(
selectedArrayProcessors[field]
// Remove duplicates based on _id
const uniqueArray = updateData[field].filter(
(item, index, self) =>
index ===
self.findIndex(
(t) =>
t._id && item._id && t._id.toString() === item._id.toString()
)
);

// Use $addToSet for selected arrays
updateData.$addToSet = {
...updateData.$addToSet,
[field]: {
$each: updateData[field],
},
};
// Use $set to replace the existing array with unique entries
updateData.$set = updateData.$set || {};
updateData.$set[field] = uniqueArray;

// Optional: Remove the original field to prevent double processing
delete updateData[field];
}
});

// Process array ID fields
// Repeat similar logic for array ID fields
const arrayIdFields = [
"airqloud_ids",
"grid_ids",
"cohort_ids",
"network_ids",
"site_ids",
"device_ids",
"group_ids",
];

arrayIdFields.forEach((field) => {
if (updateData[field]) {
updateData.$addToSet = {
...updateData.$addToSet,
[field]: {
$each: Array.isArray(updateData[field])
? updateData[field].map(processObjectId)
: [processObjectId(updateData[field])],
},
};
// Ensure unique ObjectIds
const uniqueIds = [
...new Set(
(Array.isArray(updateData[field])
? updateData[field]
: [updateData[field]]
)
.map(processObjectId)
.filter(Boolean)
.map((id) => id.toString())
),
].map(processObjectId);

// Use $set to replace the existing array with unique entries
updateData.$set = updateData.$set || {};
updateData.$set[field] = uniqueIds;

// Remove the original field
delete updateData[field];
}
});

// Optional: Add comprehensive logging
console.log(
`Preprocessing preference document: ${isNew ? "New" : "Update"}`,
{
user_id: updateData.user_id,
pollutant: updateData.pollutant,
startDate: updateData.startDate,
endDate: updateData.endDate,
}
);

next();
} catch (error) {
console.error("Error in Preference pre-hook:", error);
Expand Down
47 changes: 39 additions & 8 deletions src/auth-service/utils/create-preference.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,35 +54,66 @@ const validateUserAndGroup = async (tenant, userId, groupId, next) => {
const prepareUpdate = (body, fieldsToUpdate, fieldsToAddToSet) => {
const update = { ...body };

// Utility function to remove duplicates based on _id
const removeDuplicates = (arr, idField = "_id") => {
return arr.filter(
(item, index, self) =>
index ===
self.findIndex(
(t) =>
t[idField] &&
item[idField] &&
t[idField].toString() === item[idField].toString()
)
);
};

// Handle fields that should be added to set (array fields)
fieldsToAddToSet.forEach((field) => {
if (update[field]) {
update["$addToSet"] = update["$addToSet"] || {};
update["$addToSet"][field] = {
$each: Array.isArray(update[field]) ? update[field] : [update[field]],
};
const processedArray = Array.isArray(update[field])
? update[field]
: [update[field]];

// Remove duplicates for specific fields
const uniqueArray =
field === "selected_sites"
? removeDuplicates(processedArray)
: processedArray;

update["$set"] = update["$set"] || {};
update["$set"][field] = uniqueArray;
Comment on lines +74 to +85
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Inconsistent Use of Duplicate Removal

In the prepareUpdate function, the removeDuplicates function is applied only to "selected_sites" in fieldsToAddToSet. If other fields may contain duplicates, consider applying the duplicate removal to all relevant fields for consistency.

Update the code as follows:

-          const uniqueArray =
-            field === "selected_sites"
-              ? removeDuplicates(processedArray)
-              : processedArray;
+          const uniqueArray = removeDuplicates(processedArray);
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
const processedArray = Array.isArray(update[field])
? update[field]
: [update[field]];
// Remove duplicates for specific fields
const uniqueArray =
field === "selected_sites"
? removeDuplicates(processedArray)
: processedArray;
update["$set"] = update["$set"] || {};
update["$set"][field] = uniqueArray;
const processedArray = Array.isArray(update[field])
? update[field]
: [update[field]];
// Remove duplicates for specific fields
const uniqueArray = removeDuplicates(processedArray);
update["$set"] = update["$set"] || {};
update["$set"][field] = uniqueArray;

delete update[field];
}
});

// Handle fields that need special processing (with createdAt)
fieldsToUpdate.forEach((field) => {
if (update[field]) {
update[field] = update[field].map((item) => ({
// Process each item
const processedArray = update[field].map((item) => ({
...item,
createdAt: item.createdAt || new Date(),
}));

update["$addToSet"] = update["$addToSet"] || {};
update["$addToSet"][field] = { $each: update[field] };
// Remove duplicates for specific fields
const uniqueArray =
field === "selected_sites"
? removeDuplicates(processedArray)
: processedArray;

update["$set"] = update["$set"] || {};
update["$set"][field] = uniqueArray;
Comment on lines +100 to +106
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Ensure Consistent Duplicate Removal Across Fields

Similarly, in the processing of fieldsToUpdate, duplicates are removed only for "selected_sites". To prevent potential duplicate entries and maintain consistency, consider applying the removeDuplicates function to all relevant fields.

Modify the code as follows:

-          const uniqueArray =
-            field === "selected_sites"
-              ? removeDuplicates(processedArray)
-              : processedArray;
+          const uniqueArray = removeDuplicates(processedArray);
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
const uniqueArray =
field === "selected_sites"
? removeDuplicates(processedArray)
: processedArray;
update["$set"] = update["$set"] || {};
update["$set"][field] = uniqueArray;
const uniqueArray = removeDuplicates(processedArray);
update["$set"] = update["$set"] || {};
update["$set"][field] = uniqueArray;

delete update[field];
}
});

// Remove simple ObjectId fields from being processed by $addToSet
// Process single ObjectId fields
const singleObjectIdFields = ["user_id", "group_id"];
singleObjectIdFields.forEach((field) => {
if (update[field]) {
// Ensure single ObjectId fields are processed as-is
update[field] = update[field];
Comment on lines +115 to +116
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Remove Redundant Self-Assignment

At line 116, update[field] = update[field]; is assigning update[field] to itself, which is unnecessary and can be omitted.

Apply this diff to remove the redundant code:

-      const singleObjectIdFields = ["user_id", "group_id"];
-      singleObjectIdFields.forEach((field) => {
-        if (update[field]) {
-          // Ensure single ObjectId fields are processed as-is
-          update[field] = update[field];
-        }
-      });

If specific processing is intended for singleObjectIdFields, please implement the necessary logic.

Committable suggestion skipped: line range outside the PR's diff.

🧰 Tools
🪛 Biome (1.9.4)

[error] 116-116: field is assigned to itself.

This is where is assigned.

(lint/correctness/noSelfAssign)

}
});

Expand Down
Loading