Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

style: update ruff #416

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ repos:
- id: detect-aws-credentials
args: [ --allow-missing-credentials ]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.0 # ruff version
rev: v0.8.4 # ruff version
hooks:
- id: ruff-format
- id: ruff
Expand Down
38 changes: 32 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ dynamic = ["version"]

[project.optional-dependencies]
tests = ["pytest", "pytest-cov", "mock", "pytest-asyncio", "deepdiff"]
dev = ["pre-commit>=3.7.1", "ruff==0.5.0"]
dev = ["pre-commit>=3.7.1", "ruff==0.8.4"]
notebooks = ["ipykernel", "jupyterlab"]
docs = [
"sphinx==6.1.3",
Expand Down Expand Up @@ -116,10 +116,14 @@ select = [
"RSE", # https://docs.astral.sh/ruff/rules/#flake8-raise-rse
"RET", # https://docs.astral.sh/ruff/rules/#flake8-return-ret
"SLF", # https://docs.astral.sh/ruff/rules/#flake8-self-slf
"SLOT", # https://docs.astral.sh/ruff/rules/#flake8-slots-slot
"SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim
"ARG", # https://docs.astral.sh/ruff/rules/#flake8-unused-arguments-arg
"PTH", # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
"PGH", # https://docs.astral.sh/ruff/rules/#pygrep-hooks-pgh
"PLC", # https://docs.astral.sh/ruff/rules/#convention-c
"PLE", # https://docs.astral.sh/ruff/rules/#error-e_1
"TRY", # https://docs.astral.sh/ruff/rules/#tryceratops-try
"PERF", # https://docs.astral.sh/ruff/rules/#perflint-perf
"FURB", # https://docs.astral.sh/ruff/rules/#refurb-furb
"RUF", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
Expand All @@ -138,13 +142,14 @@ fixable = [
"PT",
"RSE",
"SIM",
"PLC",
"PLE",
"TRY",
"PERF",
"FURB",
"RUF"
]
# ANN003 - missing-type-kwargs
# ANN101 - missing-type-self
# ANN102 - missing-type-cls
# D203 - one-blank-line-before-class
# D205 - blank-line-after-summary
# D206 - indent-with-spaces*
Expand All @@ -158,19 +163,22 @@ fixable = [
# E501 - line-too-long*
# W191 - tab-indentation*
# S321 - suspicious-ftp-lib-usage
# PLC0206 - dict-index-missing-items
# *ignored for compatibility with formatter
ignore = [
"ANN003", "ANN101", "ANN102",
"ANN003",
"D203", "D205", "D206", "D213", "D300", "D400", "D415",
"E111", "E114", "E117", "E501",
"W191",
"S321",
"PLC0206",
]

[tool.ruff.lint.per-file-ignores]
# ANN001 - missing-type-function-argument
# ANN2 - missing-return-type
# ANN102 - missing-type-cls
# D100 - undocumented-public-module
# D102 - undocumented-public-class
# S101 - assert
# B011 - assert-false
# D104 - undocumented-public-package
Expand All @@ -179,5 +187,23 @@ ignore = [
# ARG001 - unused-function-argument
# SLF001 - private-member-acces
# N815 - mixed-case-variable-in-class-scope
"tests/*" = ["ANN001", "ANN2", "ANN102", "S101", "B011", "D100", "D104", "INP001", "SLF001", "ARG001"]
"tests/*" = [
"ANN001",
"ANN2",
"D100",
"D102",
"S101",
"B011",
"D100",
"D104",
"INP001",
"SLF001",
"ARG001"
]
"src/metakb/schemas/*" = ["ANN102", "N815"]

[tool.ruff.lint.flake8-annotations]
mypy-init-return = true

[tool.ruff.format]
docstring-code-format = true
8 changes: 4 additions & 4 deletions src/metakb/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,13 +171,13 @@ def update_normalizers(
f"prohibited. Unset the environment variable "
f"{NORMALIZER_AWS_ENV_VARS[name]} to proceed."
)
_logger.error(msg)
_logger.exception(msg)
click.echo(msg)
success = False
continue
except (Exception, SystemExit) as e:
_logger.error(
"Encountered error while updating %s database: %s", name.value, e
except (Exception, SystemExit):
_logger.exception(
"Encountered error while updating %s database", name.value
)
click.echo(f"Failed to update {name.value} normalizer.")
success = False
Expand Down
6 changes: 3 additions & 3 deletions src/metakb/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,11 @@ def _get_secret() -> str:

try:
get_secret_value_response = client.get_secret_value(SecretId=secret_name)
except ClientError as e:
except ClientError:
# For a list of exceptions thrown, see
# https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html
logger.error(e)
raise e
logger.exception("Boto client error while acquiring secrets")
raise
else:
return get_secret_value_response["SecretString"]

Expand Down
4 changes: 2 additions & 2 deletions src/metakb/harvesters/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def save_harvested_data_to_file(
try:
with (harvested_filepath).open("w+") as f:
json.dump(harvested_data.model_dump(), f, indent=2)
except Exception as e:
logger.error("Error creating %s harvester JSON: %s", src_name, e)
except Exception:
logger.exception("Error creating %s harvester JSON", src_name)
return False
return True
2 changes: 1 addition & 1 deletion src/metakb/load_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ def add_transformed_data(driver: Driver, data: dict) -> None:
for method in data.get("methods", []):
session.execute_write(_add_method, method, ids_in_stmts)

for obj_type in {"genes", "conditions"}:
for obj_type in ("genes", "conditions"):
for obj in data.get(obj_type, []):
session.execute_write(_add_gene_or_disease, obj, ids_in_stmts)

Expand Down
80 changes: 45 additions & 35 deletions src/metakb/normalizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,12 @@
from variation.query import QueryHandler as VariationQueryHandler

__all__ = [
"ViccNormalizers",
"NORMALIZER_AWS_ENV_VARS",
"IllegalUpdateError",
"NormalizerName",
"ViccNormalizers",
"check_normalizers",
"IllegalUpdateError",
"update_normalizer",
"NORMALIZER_AWS_ENV_VARS",
]

_logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -90,7 +90,9 @@ def __init__(self, db_url: str | None = None) -> None:
Note that gene concept lookups within the Variation Normalizer are resolved
using the Gene Normalizer instance, rather than creating a second sub-instance.

>>> id(norm.gene_query_handler) == id(norm.variation_normalizer.gnomad_vcf_to_protein_handler.gene_normalizer)
>>> id(norm.gene_query_handler) == id(
... norm.variation_normalizer.gnomad_vcf_to_protein_handler.gene_normalizer
... )
True

:param db_url: optional definition of shared normalizer database. Because the
Expand Down Expand Up @@ -127,14 +129,16 @@ async def normalize_variation(
)
if variation_norm_resp and variation_norm_resp.variation:
return variation_norm_resp.variation
except TokenRetrievalError as e:
_logger.error(e)
raise e
except Exception as e:
_logger.error(
"Variation Normalizer raised an exception using query %s: %s",
except TokenRetrievalError:
_logger.exception(
"Variation Normalizer encountered boto token retrieval error for query %s",
query,
)
raise
except Exception:
_logger.exception(
"Variation Normalizer raised an exception using query %s",
query,
e,
)
return None

Expand Down Expand Up @@ -173,14 +177,16 @@ def normalize_gene(

try:
gene_norm_resp = self.gene_query_handler.normalize(query_str)
except TokenRetrievalError as e:
_logger.error(e)
raise e
except Exception as e:
_logger.error(
"Gene Normalizer raised an exception using query %s: %s",
except TokenRetrievalError:
_logger.exception(
"Gene Normalizer encountered boto token retrieval error fetching query %s",
query_str,
)
raise
except Exception:
_logger.exception(
"Gene Normalizer raised an exception fetching query %s",
query_str,
e,
)
else:
if gene_norm_resp.match_type > highest_match:
Expand Down Expand Up @@ -223,14 +229,16 @@ def normalize_disease(

try:
disease_norm_resp = self.disease_query_handler.normalize(query)
except TokenRetrievalError as e:
_logger.error(e)
raise e
except Exception as e:
_logger.error(
"Disease Normalizer raised an exception using query %s: %s",
except TokenRetrievalError:
_logger.exception(
"Disease Normalizer encountered boto retrieval error while fetching term %s",
query,
)
raise
except Exception:
_logger.exception(
"Disease Normalizer raised an exception using query %s",
query,
e,
)
else:
if disease_norm_resp.match_type > highest_match:
Expand Down Expand Up @@ -273,14 +281,16 @@ def normalize_therapy(

try:
therapy_norm_resp = self.therapy_query_handler.normalize(query)
except TokenRetrievalError as e:
_logger.error(e)
raise e
except Exception as e:
_logger.error(
"Therapy Normalizer raised an exception using query %s: %s",
except TokenRetrievalError:
_logger.exception(
"Failed to retrieve from boto while fetching therapy query %s",
query,
)
raise
except Exception:
_logger.exception(
"Therapy Normalizer raised an exception using query %s",
query,
e,
)
else:
if therapy_norm_resp.match_type > highest_match:
Expand Down Expand Up @@ -424,9 +434,9 @@ def check_normalizers(
"Tables for %s normalizer appear to be unpopulated.", name.value
)
success = False
except Exception as e:
_logger.error(
"Encountered exception while checking %s normalizer: %s", name.value, e
except Exception:
_logger.exception(
"Encountered exception while checking %s normalizer", name.value
)
success = False
return success
Expand Down
11 changes: 4 additions & 7 deletions src/metakb/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def __init__(
>>> from metakb.normalizers import ViccNormalizers
>>> qh = QueryHandler(
... get_driver("bolt://localhost:7687", ("neo4j", "password")),
... ViccNormalizers("http://localhost:8000")
... ViccNormalizers("http://localhost:8000"),
... )

``default_page_limit`` sets the default max number of statements to include in
Expand All @@ -129,10 +129,7 @@ def __init__(

This value is overruled by an explicit ``limit`` parameter:

>>> response = await limited_qh.batch_search_statements(
... ["BRAF V600E"],
... limit=2
... )
>>> response = await limited_qh.batch_search_statements(["BRAF V600E"], limit=2)
>>> print(len(response.statement_ids))
2

Expand Down Expand Up @@ -491,8 +488,8 @@ def _get_nested_stmts(self, statement_nodes: list[Node]) -> list[dict]:
if s_id not in added_stmts:
try:
nested_stmt = self._get_nested_stmt(s)
except ValidationError as e:
logger.error("%s: %s", s_id, e)
except ValidationError:
logger.exception("Validation error while constructing %s", s_id)
else:
if nested_stmt:
nested_stmts.append(nested_stmt)
Expand Down
Loading