Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor logging functions to use %s formatting instead of f-strings #3822

Merged
6 changes: 4 additions & 2 deletions bugbug/models/browsername.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,12 @@ def get_labels(self):
classes[issue["number"]] = 0

logger.info(
f"{sum(1 for label in classes.values() if label == 1)} issues belong to Firefox"
"%d issues belong to Firefox",
sum(1 for label in classes.values() if label == 1),
)
logger.info(
f"{sum(1 for label in classes.values() if label == 0)} issues do not belong to Firefox"
"%d issues do not belong to Firefox",
sum(1 for label in classes.values() if label == 0),
)

return classes, [0, 1]
Expand Down
8 changes: 7 additions & 1 deletion bugbug/models/bugtype.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,13 @@ def get_labels(self) -> tuple[dict[int, np.ndarray], list[str]]:

for type_ in TYPE_LIST:
logger.info(
f"{sum(1 for target in classes.values() if target[TYPE_LIST.index(type_)] == 1)} {type_} bugs"
"%d %s bugs",
sum(
1
for target in classes.values()
if target[TYPE_LIST.index(type_)] == 1
),
type_,
)

return classes, TYPE_LIST
Expand Down
6 changes: 4 additions & 2 deletions bugbug/models/needsdiagnosis.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,12 @@ def get_labels(self):
classes[issue["number"]] = 1

logger.info(
f"{sum(1 for label in classes.values() if label == 1)} issues have not been moved to needsdiagnosis"
"%d issues have not been moved to needsdiagnosis",
sum(1 for label in classes.values() if label == 1),
)
logger.info(
f"{sum(1 for label in classes.values() if label == 0)} issues have been moved to needsdiagnosis"
"%d issues have been moved to needsdiagnosis",
sum(1 for label in classes.values() if label == 0),
)

return classes, [0, 1]
Expand Down
17 changes: 11 additions & 6 deletions scripts/bug_retriever.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ def retrieve_bugs(self, limit: int = None) -> None:

if last_modified is not None:
logger.info(
f"Retrieving IDs of bugs modified since the last run on {last_modified}"
"Retrieving IDs of bugs modified since the last run on %s",
last_modified,
)
changed_ids = set(
bugzilla.get_ids(
Expand All @@ -52,7 +53,7 @@ def retrieve_bugs(self, limit: int = None) -> None:
if "{}::{}".format(bug["product"], bug["component"]) not in all_components
)
logger.info(
f"{len(deleted_component_ids)} bugs belonging to deleted components"
"%d bugs belonging to deleted components", len(deleted_component_ids)
)
changed_ids |= deleted_component_ids

Expand Down Expand Up @@ -107,7 +108,8 @@ def retrieve_bugs(self, limit: int = None) -> None:
if limit:
regression_related_ids = regression_related_ids[-limit:]
logger.info(
f"{len(regression_related_ids)} bugs which caused regressions fixed by commits."
"%d bugs which caused regressions fixed by commits.",
len(regression_related_ids),
)

# Get IDs of bugs linked to intermittent failures.
Expand Down Expand Up @@ -152,7 +154,8 @@ def retrieve_bugs(self, limit: int = None) -> None:
)
)
logger.info(
f"{len(regression_related_ids)} bugs which caused regressions fixed by commits."
"%d bugs which caused regressions fixed by commits.",
len(regression_related_ids),
)
if limit:
regression_related_ids = regression_related_ids[-limit:]
Expand All @@ -175,7 +178,8 @@ def retrieve_bugs(self, limit: int = None) -> None:
break

logger.info(
f"Re-downloading {len(inconsistent_bug_ids)} bugs, as they were inconsistent"
"Re-downloading %d bugs, as they were inconsistent",
len(inconsistent_bug_ids),
)
bugzilla.delete_bugs(lambda bug: bug["id"] in inconsistent_bug_ids)
bugzilla.download_bugs(inconsistent_bug_ids)
Expand All @@ -186,7 +190,8 @@ def retrieve_bugs(self, limit: int = None) -> None:
}
bugzilla.delete_bugs(lambda bug: bug["id"] in missing_history_bug_ids)
logger.info(
f"Deleted {len(missing_history_bug_ids)} bugs as we couldn't retrieve their history"
"Deleted %d bugs as we couldn't retrieve their history",
len(missing_history_bug_ids),
)

zstd_compress(bugzilla.BUGS_DB)
Expand Down
4 changes: 2 additions & 2 deletions scripts/commit_classifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def apply_phab(self, hg, phabricator_deployment, diff_id):
# Stop as soon as a base revision is available
if self.has_revision(hg, patch.base_revision):
logger.info(
f"Stopping at diff {patch.id} and revision {patch.base_revision}"
"Stopping at diff %s and revision %s", patch.id, patch.base_revision
)
break

Expand Down Expand Up @@ -383,7 +383,7 @@ def load_user(phid):
message = replace_reviewers(message, reviewers)

logger.info(
f"Applying {patch.phid} from revision {revision['id']}: {message}"
"Applying %s from revision %s: %s", patch.phid, message, revision["id"]
StarKBhaviN marked this conversation as resolved.
Show resolved Hide resolved
)

hg.import_(
Expand Down
3 changes: 2 additions & 1 deletion scripts/github_issue_retriever.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ def retrieve_issues(self) -> None:

if last_modified:
logger.info(
f"Retrieving issues modified or created since the last run on {last_modified.isoformat()}"
"Retrieving issues modified or created since the last run on %s",
last_modified.isoformat(),
)
data = self.github.fetch_issues_updated_since_timestamp(
last_modified.isoformat()
Expand Down
2 changes: 1 addition & 1 deletion scripts/maintenance_effectiveness_indicator.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def main() -> None:
)

for factor, value in result.items():
logger.info(f"{factor}: {round(value, 2) if value != math.inf else value}")
logger.info("%s: %d", factor, round(value, 2) if value != math.inf else value)


if __name__ == "__main__":
Expand Down
29 changes: 19 additions & 10 deletions scripts/regressor_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,9 @@ def find_bug_fixing_commits(self) -> None:
commit_map[commit["bug_id"]].append(commit["node"])

logger.info(
f"{sum(len(commit_list) for commit_list in commit_map.values())} commits found, {len(commit_map)} bugs linked to commits"
"%d commits found, %d bugs linked to commits",
sum(len(commit_list) for commit_list in commit_map.values()),
len(commit_map),
)
assert len(commit_map) > 0

Expand All @@ -236,7 +238,9 @@ def get_relevant_bugs() -> Iterator[dict]:

bug_count = sum(1 for bug in get_relevant_bugs())
logger.info(
f"{bug_count} bugs in total, {len(commit_map) - bug_count} bugs linked to commits missing"
"%d bugs in total, %d bugs linked to commits missing",
bug_count,
len(commit_map) - bug_count,
)

known_defect_labels, _ = defect_model.get_labels()
Expand Down Expand Up @@ -340,7 +344,7 @@ def mercurial_to_git(revs):
for bug_introducing_commit in prev_bug_introducing_commits
)
logger.info(
f"Already classified {len(prev_bug_introducing_commits)} commits..."
"Already classified %s commits...", len(prev_bug_introducing_commits)
StarKBhaviN marked this conversation as resolved.
Show resolved Hide resolved
)

hashes_to_ignore = set(commit["rev"] for commit in commits_to_ignore)
Expand All @@ -363,7 +367,8 @@ def mercurial_to_git(revs):
]

logger.info(
f"{len(bug_fixing_commits)} commits left to analyze after skipping already analyzed ones"
"%d commits left to analyze after skipping already analyzed ones",
len(bug_fixing_commits),
)

bug_fixing_commits = [
Expand All @@ -372,7 +377,8 @@ def mercurial_to_git(revs):
if bug_fixing_commit["rev"] not in hashes_to_ignore
]
logger.info(
f"{len(bug_fixing_commits)} commits left to analyze after skipping the ones in the ignore list"
"%d commits left to analyze after skipping the ones in the ignore list",
len(bug_fixing_commits),
)

if tokenized:
Expand All @@ -382,7 +388,8 @@ def mercurial_to_git(revs):
if bug_fixing_commit["rev"] in self.mercurial_to_tokenized_git
]
logger.info(
f"{len(bug_fixing_commits)} commits left to analyze after skipping the ones with no git hash"
"%d commits left to analyze after skipping the ones with no git hash",
len(bug_fixing_commits),
)

git_init_lock = threading.Lock()
Expand Down Expand Up @@ -476,7 +483,9 @@ def compress_and_upload():

workers = os.cpu_count() + 1
logger.info(
f"Analyzing {len(bug_fixing_commits)} commits using {workers} workers..."
"Analyzing %d commits using %d workers...",
len(bug_fixing_commits),
len(bug_fixing_commits),
)

with concurrent.futures.ThreadPoolExecutor(
Expand All @@ -500,7 +509,7 @@ def results():
exc = future.exception()
if exc is not None:
logger.info(
f"Exception {exc} while analyzing {futures[future]}"
"Exception %s while analyzing %s", exc, futures[future]
)
for f in futures:
f.cancel()
Expand Down Expand Up @@ -608,11 +617,11 @@ def evaluate(bug_introducing_commits):
misassigned_regressors += 1

logger.info(
f"Perfectly found {perfect_regressors} regressors out of {all_regressors}"
"Perfectly found %d regressors out of %d", perfect_regressors, all_regressors
)
logger.info("Found %d regressors out of %d", found_regressors, all_regressors)
logger.info(
f"Misassigned {misassigned_regressors} regressors out of {all_regressors}"
"Misassigned %d regressors out of %d", misassigned_regressors, all_regressors
)


Expand Down
2 changes: 1 addition & 1 deletion scripts/shadow_scheduler_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def go(months: int) -> None:
scheduler_stat["id"] for scheduler_stat in db.read(SHADOW_SCHEDULER_STATS_DB)
}
logger.info(
f"Already gathered statistics for {len(known_scheduler_stats)} pushes..."
"Already gathered statistics for %d pushes...", len(known_scheduler_stats)
)

to_date = datetime.utcnow() - relativedelta(days=3)
Expand Down
2 changes: 1 addition & 1 deletion scripts/test_scheduling_history_retriever.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def retrieve_from_cache(push):
next_from_date = to_date

logger.info(
f"Retrieving pushes from {from_date} to {next_from_date}..."
"Retrieving pushes from %s to %s...", from_date, next_from_date
)

pushes = mozci.push.make_push_objects(
Expand Down