Skip to content

Commit

Permalink
chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.9…
Browse files Browse the repository at this point in the history
….2 (#2087)

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: anders-albert <[email protected]>
  • Loading branch information
renovate[bot] and doctrino authored Jan 25, 2025
1 parent 94421b4 commit 9afcbcf
Show file tree
Hide file tree
Showing 9 changed files with 19 additions and 20 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
---
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.6
rev: v0.9.2
hooks:
- id: ruff
args:
Expand Down
3 changes: 1 addition & 2 deletions cognite/client/data_classes/data_modeling/instances.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,8 +233,7 @@ def load(
view_tuple = tuple(view_id_str.split("/", 1))
if len(view_tuple) != 2:
warnings.warn(
f"Unknown type of view id: {view_id_str}, expected format <external_id>/<version>. "
"Skipping...",
f"Unknown type of view id: {view_id_str}, expected format <external_id>/<version>. Skipping...",
stacklevel=2,
)
continue
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/data_classes/sequences.py
Original file line number Diff line number Diff line change
Expand Up @@ -663,7 +663,7 @@ def __init__(
col_length = len(columns)
if wrong_length := [r for r in rows if len(r.values) != col_length]:
raise ValueError(
f"Rows { [r.row_number for r in wrong_length] } have wrong number of values, expected {col_length}"
f"Rows {[r.row_number for r in wrong_length]} have wrong number of values, expected {col_length}"
)
self.rows = rows
self.columns: SequenceColumnList = columns
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/data_classes/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ def dump(self, camel_case: bool = True) -> dict[str, Any]:
if self.run_time:
simulation["runTime" if camel_case else "run_time"] = self.run_time
if self.inputs:
simulation["inputs" if camel_case else "inputs"] = [item.dump(camel_case) for item in self.inputs]
simulation["inputs"] = [item.dump(camel_case) for item in self.inputs]

return {"simulation": simulation}

Expand Down
2 changes: 1 addition & 1 deletion cognite/client/utils/_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,4 +89,4 @@ def shorten(obj: Any, width: int = 20, placeholder: str = "...") -> str:
s = obj if isinstance(obj, str) else repr(obj)
if len(s) <= width:
return s
return f"{s[:width-n]}{placeholder}"
return f"{s[: width - n]}{placeholder}"
2 changes: 1 addition & 1 deletion scripts/custom_checks/docstr_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def parse_example_section(lines, ex_idx) -> tuple[bool, set[int]]:
def fix_single_file(path: Path) -> str | None:
was_fixed = []
dotted = path_to_importable(path)
full_text = path.read_text()
full_text = path.read_text(encoding="utf-8")
module = importlib.import_module(dotted)
for cls_name, cls in get_valid_members(module, dotted):
for method_name, ex_idx, docstr, lines in get_info_on_valid_methods(cls, full_text):
Expand Down
12 changes: 6 additions & 6 deletions tests/tests_integration/test_api/test_datapoint_subscriptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,14 +248,14 @@ def test_iterate_data_subscription_initial_call(
)
batch = next(subscription_changes)

assert (
len(batch.subscription_changes.added) > 0
), "The subscription used for testing datapoint subscriptions must have at least one time series"
assert len(batch.subscription_changes.added) > 0, (
"The subscription used for testing datapoint subscriptions must have at least one time series"
)

batch = next(subscription_changes)
assert (
len(batch.subscription_changes.added) == 0
), "There should be no more timeseries in the subsequent batches"
assert len(batch.subscription_changes.added) == 0, (
"There should be no more timeseries in the subsequent batches"
)

def test_iterate_data_subscription_changed_time_series(
self, cognite_client: CogniteClient, time_series_external_ids: list[str]
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_integration/test_api/test_diagrams.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
)
from cognite.client.data_classes.data_modeling import NodeApply, NodeId, NodeOrEdgeData, Space, SpaceApply, ViewId

PNID_FILE_EXTERNAL_ID = "mypnid.pdf" ""
PNID_FILE_EXTERNAL_ID = "mypnid.pdf"
DIAGRAM_SPACE = "diagram_space"

CDM_SPACE = "cdf_cdm"
Expand Down
12 changes: 6 additions & 6 deletions tests/tests_unit/test_data_classes/test_data_models/test_ids.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,9 @@ def test_load(
identifier = _load_identifier(ids, id_type)

assert identifier.as_dicts() == expected_dict
assert (
identifier.is_singleton() == expected_is_singleton
), f"Expected {expected_is_singleton} but got {identifier.is_singleton()}"
assert identifier.is_singleton() == expected_is_singleton, (
f"Expected {expected_is_singleton} but got {identifier.is_singleton()}"
)

@pytest.mark.parametrize(
"ids, expected_dict, expected_is_singleton",
Expand All @@ -76,6 +76,6 @@ def test_load_space_identifier(
identifier = _load_space_identifier(ids)

assert identifier.as_dicts() == expected_dict
assert (
identifier.is_singleton() == expected_is_singleton
), f"Expected {expected_is_singleton} but got {identifier.is_singleton()}"
assert identifier.is_singleton() == expected_is_singleton, (
f"Expected {expected_is_singleton} but got {identifier.is_singleton()}"
)

0 comments on commit 9afcbcf

Please sign in to comment.