Skip to content

Commit

Permalink
enable most ruff lints
Browse files Browse the repository at this point in the history
  • Loading branch information
Mic92 committed Jan 5, 2025
1 parent e6919f6 commit 0148f28
Show file tree
Hide file tree
Showing 24 changed files with 192 additions and 174 deletions.
2 changes: 1 addition & 1 deletion nixpkgs_merge_bot/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def parse_args() -> Settings:
parser.add_argument(
"--database-folder",
type=str,
default="/tmp",
default=".",
help="Path where the nixpkgs-merge-bot database will be stored. Default to /tmp",
)
parser.add_argument(
Expand Down
Empty file.
52 changes: 27 additions & 25 deletions nixpkgs_merge_bot/commands/merge.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
import logging
from dataclasses import dataclass

from ..database import Database
from ..github.GitHubClient import GithubClient, GithubClientError, get_github_client
from ..github.Issue import IssueComment
from ..github.PullRequest import PullRequest
from ..merging_strategies.committer_pr import CommitterPR
from ..merging_strategies.maintainer_update import MaintainerUpdate
from ..settings import Settings
from ..webhook.http_response import HttpResponse
from ..webhook.utils.issue_response import issue_response
from nixpkgs_merge_bot.database import Database
from nixpkgs_merge_bot.github.github_client import (
GithubClient,
GithubClientError,
get_github_client,
)
from nixpkgs_merge_bot.github.issue import IssueComment
from nixpkgs_merge_bot.github.pull_request import PullRequest
from nixpkgs_merge_bot.merging_strategies.committer_pr import CommitterPR
from nixpkgs_merge_bot.merging_strategies.maintainer_update import MaintainerUpdate
from nixpkgs_merge_bot.settings import Settings
from nixpkgs_merge_bot.webhook.http_response import HttpResponse
from nixpkgs_merge_bot.webhook.utils.issue_response import issue_response

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -51,19 +55,18 @@ def process_pull_request_status(
if check_run["status"] == "in_progress" or check_run["status"] == "queued":
log.debug(f"{pull_request.number}: Check run is in progress or queued")
check_run_result.pending = True
else:
# if the state is not success or skipped we will decline the merge. The state can be
# Can be one of: success, failure, neutral, cancelled, timed_out, action_required, stale, null, skipped, startup_failure
if not (
check_run["conclusion"] == "success"
or check_run["conclusion"] == "skipped"
or check_run["conclusion"] == "neutral"
):
check_run_result.success = False
check_run_result.failed = True
message = f"Check suite {check_run['app']['name']} has the state: {check_run['conclusion']}"
check_run_result.messages.append(message)
log.info(f"{pull_request.number}: {message}")
# if the state is not success or skipped we will decline the merge. The state can be
# Can be one of: success, failure, neutral, cancelled, timed_out, action_required, stale, null, skipped, startup_failure
elif not (
check_run["conclusion"] == "success"
or check_run["conclusion"] == "skipped"
or check_run["conclusion"] == "neutral"
):
check_run_result.success = False
check_run_result.failed = True
message = f"Check suite {check_run['app']['name']} has the state: {check_run['conclusion']}"
check_run_result.messages.append(message)
log.info(f"{pull_request.number}: {message}")

return check_run_result

Expand Down Expand Up @@ -116,7 +119,6 @@ def merge_command(issue_comment: IssueComment, settings: Settings) -> HttpRespon
client.create_issue_reaction(
issue_comment.repo_owner,
issue_comment.repo_name,
issue_comment.issue_number,
issue_comment.comment_id,
"rocket",
issue_comment.comment_type,
Expand All @@ -128,7 +130,7 @@ def merge_command(issue_comment: IssueComment, settings: Settings) -> HttpRespon
db = Database(settings)
db.add(
pull_request.head_sha,
f"{str(issue_comment.issue_number)};{issue_comment.commenter_id};{issue_comment.commenter_login};{issue_comment.comment_id}",
f"{issue_comment.issue_number!s};{issue_comment.commenter_id};{issue_comment.commenter_login};{issue_comment.comment_id}",
)
msg = "One or more checks are still pending, I will retry this after they complete. Darwin checks can be ignored."
log.info(f"{issue_comment.issue_number}: {msg}")
Expand All @@ -139,7 +141,7 @@ def merge_command(issue_comment: IssueComment, settings: Settings) -> HttpRespon
msg,
)
return issue_response("merge-postponed")
elif check_suite_result.success:
if check_suite_result.success:
try:
commenter_info = client.get_user_info(
issue_comment.commenter_login
Expand Down
3 changes: 1 addition & 2 deletions nixpkgs_merge_bot/custom_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,7 @@ def get_caller() -> str:
if caller_frame is None:
return "unknown"
frame_info = inspect.getframeinfo(caller_frame)
ret = f"{frame_info.filename}:{frame_info.lineno}::{frame_info.function}"
return ret
return f"{frame_info.filename}:{frame_info.lineno}::{frame_info.function}"


def setup_logging(level: Any, root_log_name: str = __name__.split(".")[0]) -> None:
Expand Down
22 changes: 8 additions & 14 deletions nixpkgs_merge_bot/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,29 +5,23 @@

class Database:
def __init__(self, settings: Settings) -> None:
self.datbase_store_path = Path(settings.database_path)
self.datbase_store_path.mkdir(parents=True, exist_ok=True)
self.db_store_path = Path(settings.database_path)
self.db_store_path.mkdir(parents=True, exist_ok=True)

def add(self, key: str, value: str) -> None:
path = self.datbase_store_path / key
path = self.db_store_path / key
path.mkdir(parents=True, exist_ok=True)
(path / value).touch()

def delete(self, key: str, value: str) -> None:
if value is None:
path = self.datbase_store_path / key
if path.exists:
path.rmdir()
else:
path = self.datbase_store_path / key / value
if path.exists():
path.unlink()
path = self.db_store_path / key / value
if path.exists():
path.unlink()

def get(self, key: str) -> list[str]:
path = self.datbase_store_path / key
path = self.db_store_path / key
values: list[str] = []
if not path.exists():
return values
for child in path.iterdir():
values.append(child.name)
values.extend(child.name for child in path.iterdir())
return values
Empty file.
64 changes: 30 additions & 34 deletions nixpkgs_merge_bot/github/GitHubClient.py → nixpkgs_merge_bot/github/github_client.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,20 @@

import argparse
import base64
import contextlib
import http.client
import json
import logging
import os
import shutil
import subprocess
import time
import urllib.parse
import urllib.request
from pathlib import Path
from typing import Any

from ..settings import Settings
from nixpkgs_merge_bot.settings import Settings

log = logging.getLogger(__name__)
STAGING = os.environ.get("STAGING", None)
Expand All @@ -40,8 +42,7 @@ def build_jwt_payload(app_id: int) -> dict[str, Any]:
jwt_exp_delta = 600
now = int(time.time())
iat = now - jwt_iat_drift
jwt_payload = {"iat": iat, "exp": iat + jwt_exp_delta, "iss": str(app_id)}
return jwt_payload
return {"iat": iat, "exp": iat + jwt_exp_delta, "iss": str(app_id)}


class HttpResponse:
Expand All @@ -52,8 +53,8 @@ def json(self) -> Any:
return json.load(self.raw)

def save(self, path: str) -> None:
with open(path, "wb") as f:
f.write(self.raw.read())
with Path(path).open("wb") as f:
shutil.copyfileobj(self.raw, f)

def headers(self) -> http.client.HTTPMessage:
return self.raw.headers
Expand Down Expand Up @@ -83,8 +84,10 @@ def _request(
path: str,
method: str,
data: dict[str, Any] | None = None,
headers: dict[str, str] = {},
headers: dict[str, str] | None = None,
) -> HttpResponse:
if headers is None:
headers = {}
url = urllib.parse.urljoin("https://api.github.com/", path)
headers = headers.copy()
headers = {
Expand All @@ -100,16 +103,15 @@ def _request(
if data:
body = json.dumps(data).encode("ascii")

req = urllib.request.Request(url, headers=headers, method=method, data=body)
assert url.startswith("https://"), f"Invalid URL: {url}"
req = urllib.request.Request(url, headers=headers, method=method, data=body) # noqa: S310
try:
resp = urllib.request.urlopen(req)
resp = urllib.request.urlopen(req) # noqa: S310

except urllib.request.HTTPError as e:
resp_body = ""
try:
with contextlib.suppress(Exception):
resp_body = e.fp.read().decode("utf-8", "replace")
except Exception:
pass
raise GithubClientError(e.code, e.reason, url, resp_body) from e
return HttpResponse(resp)

Expand Down Expand Up @@ -180,7 +182,7 @@ def get_request_file_content(
def get_issue(self, owner: str, repo: str, issue_number: int) -> HttpResponse:
return self.get(f"/repos/{owner}/{repo}/issues/{issue_number}")

def get_team_members(self, owner: str, team_slug: str):
def get_team_members(self, owner: str, team_slug: str) -> list[dict[str, Any]]:
per_page = 100
current_page = 1
result = []
Expand All @@ -197,14 +199,12 @@ def get_team_members(self, owner: str, team_slug: str):
def create_issue_comment(
self, owner: str, repo: str, issue_number: int, body: str
) -> HttpResponse | None:
global STAGING
if STAGING:
log.debug("Staging running")
return None
else:
return self.post(
f"/repos/{owner}/{repo}/issues/{issue_number}/comments", {"body": body}
)
return self.post(
f"/repos/{owner}/{repo}/issues/{issue_number}/comments", {"body": body}
)

def get_user_info(self, username: str) -> HttpResponse:
return self.get(f"/users/{username}")
Expand All @@ -213,31 +213,26 @@ def create_issue_reaction(
self,
owner: str,
repo: str,
issue_number: int,
comment_id: int,
reaction: str,
issue_type: str = "issue_comment",
) -> HttpResponse | None:
global STAGING
if STAGING:
log.debug("Staging, not creating reaction")
return None
else:
if issue_type == "review":
return self.post(
f"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions",
{"content": reaction},
)
else:
return self.post(
f"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions",
{"content": reaction},
)
if issue_type == "review":
return self.post(
f"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions",
{"content": reaction},
)
return self.post(
f"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions",
{"content": reaction},
)

def merge_pull_request(
self, owner: str, repo: str, pr_number: int, sha: str, commenter
self, owner: str, repo: str, pr_number: int, sha: str, commenter: dict[str, Any]
) -> HttpResponse | None:
global STAGING
if STAGING:
log.debug(f"pull request {pr_number}: Staging, not merging")
return None
Expand Down Expand Up @@ -281,7 +276,8 @@ def request_access_token(app_login: str, app_id: int, app_private_key: Path) ->
log.error(
f"Installation not found for {app_login} and {app_id}, this is case sensitive!"
)
raise ValueError("Access token URL not found")
msg = "Access token URL not found"
raise ValueError(msg)

resp = client.create_installation_access_token(installation_id)
return resp.json()["token"]
Expand All @@ -291,7 +287,7 @@ def request_access_token(app_login: str, app_id: int, app_private_key: Path) ->


def get_github_client(settings: Settings) -> GithubClient:
global CACHED_CLIENT
global CACHED_CLIENT # noqa: PLW0603
if CACHED_CLIENT and CACHED_CLIENT.token_age + 300 > time.time():
return CACHED_CLIENT
token = request_access_token(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,7 @@ def from_issue_comment_json(body: dict[str, Any]) -> "IssueComment":
is_pull_request = False

try:
if body["action"] is not None:
action = body["action"]
else:
action = "created"
action = body["action"] if body["action"] is not None else "created"
except KeyError:
action = "created"

Expand All @@ -58,7 +55,7 @@ def from_issue_comment_json(body: dict[str, Any]) -> "IssueComment":
except KeyError as e:
log.debug(e)
log.debug(body)
raise e
raise

@staticmethod
def from_review_comment_json(body: dict[str, Any]) -> "IssueComment":
Expand All @@ -80,7 +77,7 @@ def from_review_comment_json(body: dict[str, Any]) -> "IssueComment":
except KeyError as e:
log.debug(e)
log.debug(body)
raise e
raise

@staticmethod
def from_review_json(body: dict[str, Any]) -> "IssueComment":
Expand All @@ -102,7 +99,7 @@ def from_review_json(body: dict[str, Any]) -> "IssueComment":
except KeyError as e:
log.debug(e)
log.debug(body)
raise e
raise

def __str__(self) -> str:
return f"{self.issue_number}: Pull Request: {self.title} by {self.commenter_login}"
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def from_json(body: dict[str, Any]) -> "PullRequest":
except KeyError as e:
log.debug(e)
log.debug(body)
raise e
raise

def __str__(self) -> str:
return f"Pull Request: {self.title} by {self.user_login} with head sha {self.head_sha} "
Empty file.
13 changes: 5 additions & 8 deletions nixpkgs_merge_bot/merging_strategies/committer_pr.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import logging
from pathlib import Path

from ..github.Issue import IssueComment
from ..github.PullRequest import PullRequest
from ..nix.nix_utils import get_package_maintainers, is_maintainer
from nixpkgs_merge_bot.github.issue import IssueComment
from nixpkgs_merge_bot.github.pull_request import PullRequest
from nixpkgs_merge_bot.nix.nix_utils import get_package_maintainers, is_maintainer

from .merging_strategy import MergingStrategyTemplate

log = logging.getLogger(__name__)
Expand All @@ -13,11 +14,7 @@ class CommitterPR(MergingStrategyTemplate):
def run(
self, pull_request: PullRequest, issue_comment: IssueComment
) -> tuple[bool, list[str]]:
# Analyze the pull request here
# This is just a placeholder implementation
result, decline_reasons = self.run_technical_limits_check(
pull_request, issue_comment.commenter_id
)
result, decline_reasons = self.run_technical_limits_check(pull_request)
if not result:
return result, decline_reasons

Expand Down
Loading

0 comments on commit 0148f28

Please sign in to comment.