Skip to content

Commit

Permalink
MAINT: update ruff (config) + add mypy (#46)
Browse files Browse the repository at this point in the history
  • Loading branch information
theroggy authored Dec 20, 2024
1 parent 184d88b commit 0b33c36
Show file tree
Hide file tree
Showing 13 changed files with 64 additions and 100 deletions.
11 changes: 9 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,14 @@ ci:

repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.1.13"
rev: "v0.8.2"
hooks:
# Format the code
- id: ruff-format
- id: ruff
# Lint the code
- id: ruff
# args: [ --fix ]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: "v1.8.0"
hooks:
- id: mypy
9 changes: 6 additions & 3 deletions benchmarks_IO/benchmarks_pyogrio.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def _get_version() -> str:

class set_env_variables(object):
def __init__(self, env_variables_to_set: dict):
self.env_variables_backup = {}
self.env_variables_backup: dict[str, str] = {}
self.env_variables_to_set = env_variables_to_set

def __enter__(self):
Expand Down Expand Up @@ -63,7 +63,10 @@ def __exit__(self, type, value, traceback):
def write_dataframe(tmp_dir: Path) -> List[RunResult]:
# Init
results = []
input_path, _, _ = testdata.TestFile.AGRIPRC_2018.get_file(tmp_dir)
(
input_path,
_,
) = testdata.TestFile.AGRIPRC_2018.get_file(tmp_dir)

# Go!
# Read input files
Expand All @@ -84,7 +87,7 @@ def write_dataframe(tmp_dir: Path) -> List[RunResult]:
sqlite_pragma_combinations_tmp = []
for lengths in range(0, len(sqlite_possible_pragmas) + 1):
for subset in itertools.combinations(sqlite_possible_pragmas, lengths):
sqlite_pragma_combinations_tmp.append(subset)
sqlite_pragma_combinations_tmp.append(list(subset))

# Now additionally add some different values for the cache_size pragma
sqlite_caches_sizes = {
Expand Down
4 changes: 2 additions & 2 deletions benchmarks_vector_ops/benchmarks_dask_geopandas.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,8 +247,8 @@ def dissolve_groupby(tmp_dir: Path) -> RunResult:

def join_by_location_intersects(tmp_dir: Path) -> RunResult:
# Init-
input1_path = testdata.TestFile.AGRIPRC_2018.get_file(tmp_dir)
input2_path = testdata.TestFile.AGRIPRC_2019.get_file(tmp_dir)
input1_path, _ = testdata.TestFile.AGRIPRC_2018.get_file(tmp_dir)
input2_path, _ = testdata.TestFile.AGRIPRC_2019.get_file(tmp_dir)

### Go! ###
# Read input files
Expand Down
Empty file.
6 changes: 3 additions & 3 deletions benchmarks_zonalstats/benchmarks_exactextract.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import rasterio

from benchmarker import RunResult
from benchmarks_zonalstats import _common as common
from benchmarks_zonalstats import _common
import testdata

logger = logging.getLogger(__name__)
Expand All @@ -33,7 +33,7 @@ def zonalstats_1band(tmp_dir: Path) -> List[RunResult]:
raster_path, _ = testdata.TestFile.S2_NDVI_2020.get_file(tmp_dir)

# Prepare a sample of the parcels, otherwise to slow
nb_poly = common.nb_polygons_for_test
nb_poly = _common.nb_polygons_for_test
vector_gdf = gpd.read_file(vector_path, rows=slice(0, nb_poly))
vector_tmp_path = tmp_dir / "vector_input.gpkg"
vector_gdf.to_file(vector_tmp_path)
Expand Down Expand Up @@ -97,7 +97,7 @@ def zonalstats_3bands(tmp_dir: Path) -> List[RunResult]:
raster_path, _ = testdata.TestFile.S2_NDVI_2020.get_file(tmp_dir)

# Prepare a sample of the parcels, otherwise to slow
nb_poly = common.nb_polygons_for_test
nb_poly = _common.nb_polygons_for_test
vector_gdf = gpd.read_file(vector_path, rows=slice(0, nb_poly))
vector_tmp_path = tmp_dir / "vector_input.gpkg"
vector_gdf.to_file(vector_tmp_path)
Expand Down
4 changes: 2 additions & 2 deletions benchmarks_zonalstats/benchmarks_geowombat.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import geowombat as gw

from benchmarker import RunResult
from benchmarks_zonalstats import _common as common
from benchmarks_zonalstats import _common
import testdata

logger = logging.getLogger(__name__)
Expand All @@ -32,7 +32,7 @@ def zonalstats_1band(tmp_dir: Path) -> List[RunResult]:
raster_path, _ = testdata.TestFile.S2_NDVI_2020.get_file(tmp_dir)

# Prepare a sample of the parcels, otherwise to slow
nb_poly = common.nb_polygons_for_test
nb_poly = _common.nb_polygons_for_test
vector_gdf = gpd.read_file(vector_path, rows=slice(0, nb_poly))
vector_tmp_path = tmp_dir / "vector_input.gpkg"
vector_gdf.to_file(vector_tmp_path)
Expand Down
4 changes: 2 additions & 2 deletions benchmarks_zonalstats/benchmarks_pygeoprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import pygeoprocessing.geoprocessing

from benchmarker import RunResult
from benchmarks_zonalstats import _common as common
from benchmarks_zonalstats import _common
import testdata

logger = logging.getLogger(__name__)
Expand All @@ -32,7 +32,7 @@ def zonalstats_1band(tmp_dir: Path) -> List[RunResult]:
raster_path, _ = testdata.TestFile.S2_NDVI_2020.get_file(tmp_dir)

# Prepare a sample of the parcels, otherwise to slow
nb_poly = common.nb_polygons_for_test
nb_poly = _common.nb_polygons_for_test
vector_gdf = gpd.read_file(vector_path, rows=slice(0, nb_poly))
vector_tmp_path = tmp_dir / "vector_input.gpkg"
vector_gdf.to_file(vector_tmp_path)
Expand Down
5 changes: 3 additions & 2 deletions benchmarks_zonalstats/benchmarks_pyjeo.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Module to benchmark zonalstats.
"""

import os
from datetime import datetime
import logging
Expand All @@ -12,7 +13,7 @@
import pyjeo as pj

from benchmarker import RunResult
from benchmarks_zonalstats import _common as common
from benchmarks_zonalstats import _common
import testdata

logger = logging.getLogger(__name__)
Expand All @@ -37,7 +38,7 @@ def zonalstats_1band(tmp_dir: Path) -> List[RunResult]:
raster_path, _ = testdata.TestFile.S2_NDVI_2020.get_file(tmp_dir)

# Prepare a sample of the parcels, otherwise to slow
nb_poly = common.nb_polygons_for_test
nb_poly = _common.nb_polygons_for_test
vector_gdf = gpd.read_file(vector_path, rows=slice(0, nb_poly))
vector_tmp_path = tmp_dir / "vector_input.gpkg"
vector_gdf.to_file(vector_tmp_path)
Expand Down
4 changes: 2 additions & 2 deletions benchmarks_zonalstats/benchmarks_pyqgis.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import qgis.analysis # type: ignore

from benchmarker import RunResult
from benchmarks_zonalstats import _common as common
from benchmarks_zonalstats import _common
import testdata

logger = logging.getLogger(__name__)
Expand All @@ -34,7 +34,7 @@ def zonalstats_1band(tmp_dir: Path) -> List[RunResult]:
raster_path, _ = testdata.TestFile.S2_NDVI_2020.get_file(tmp_dir)

# Prepare a sample of the parcels, otherwise to slow
nb_poly = common.nb_polygons_for_test
nb_poly = _common.nb_polygons_for_test
vector_gdf = gpd.read_file(vector_path, rows=slice(0, nb_poly))
vector_tmp_path = tmp_dir / "vector_input.gpkg"
vector_gdf.to_file(vector_tmp_path)
Expand Down
4 changes: 2 additions & 2 deletions benchmarks_zonalstats/benchmarks_rasterstats.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import rasterstats

from benchmarker import RunResult
from benchmarks_zonalstats import _common as common
from benchmarks_zonalstats import _common
import testdata

logger = logging.getLogger(__name__)
Expand All @@ -32,7 +32,7 @@ def zonalstats_1band(tmp_dir: Path) -> List[RunResult]:
raster_path, _ = testdata.TestFile.S2_NDVI_2020.get_file(tmp_dir)

# Prepare a sample of the parcels, otherwise to slow
nb_poly = common.nb_polygons_for_test
nb_poly = _common.nb_polygons_for_test
vector_gdf = gpd.read_file(vector_path, rows=slice(0, nb_poly))
vector_tmp_path = tmp_dir / "vector_input.gpkg"
vector_gdf.to_file(vector_tmp_path)
Expand Down
99 changes: 30 additions & 69 deletions project.toml
Original file line number Diff line number Diff line change
@@ -1,77 +1,60 @@

[tool.ruff]
line-length = 88
target-version = "py39"
extend-exclude = ["docs/*", "local_ignore/*"]

[tool.ruff.lint]
select = [
# pyflakes
"F",
# pycodestyle
"E",
"W",
# flake8-2020
"YTT",
# pyupgrade
"UP",
# flake8-bugbear
"B",
# flake8-quotes
"Q",
# flake8-debugger
"T10",
# flake8-gettext
"INT",
# flake8-simplify
# "SIM",
# pylint
"PLC",
"PLE",
"PLR",
"PLW",
# misc lints
"PIE",
# flake8-pyi
"PYI",
# tidy imports
"TID",
# implicit string concatenation
"ISC",
# type-checking imports
"TCH",
# comprehensions
"C4",
# pygrep-hooks
"PGH",
# Ruff-specific rules
"RUF",
# isort
"I",
# pydocstyle
"D",
]
target-version = "py38"
ignore = [ # space before : (needed for how black formats slicing)
# "E203", # not yet implemented

ignore = [
### Intentionally disabled
# module level import not at top of file
"E402",
# do not assign a lambda expression, use a def
"E731",
# line break before binary operator
# "W503", # not yet implemented
# line break after binary operator
# "W504", # not yet implemented
# controversial
# mutable-argument-default
"B006",
# controversial
# unused-loop-control-variable
"B007",
# controversial
"B008",
# setattr is used to side-step mypy
# get-attr-with-constant
"B009",
# getattr is used to side-step mypy
"B010",
# tests use assert False
"B011",
# tests use comparisons but not their returned value
"B015",
# false positives
"B019",
# Loop control variable overrides iterable it iterates
"B020",
# Function definition does not bind loop variable
"B023",
# Functions defined inside a loop must not use variables redefined in the loop
# "B301", # not yet implemented
# Only works with python >=3.10
"B905",
# dict literals
"C408",
# Too many arguments to function call
"PLR0913",
# Too many returns
Expand All @@ -80,42 +63,20 @@ ignore = [ # space before : (needed for how black formats slicing)
"PLR0912",
# Too many statements
"PLR0915",
# Magic number
"PLR2004",
# Redefined loop name
"PLW2901",
# Global statements are discouraged
"PLW0603",
# Docstrings should not be included in stubs
"PYI021",
# No builtin `eval()` allowed
"PGH001",
# compare-to-empty-string
"PLC1901",
# Use typing_extensions.TypeAlias for type aliases
# "PYI026", # not yet implemented
# Use "collections.abc.*" instead of "typing.*" (PEP 585 syntax)
# "PYI027", # not yet implemented
# while int | float can be shortened to float, the former is more explicit
# "PYI041", # not yet implemented

# Additional checks that don't pass yet
# Useless statement
"B018",
# Within an except clause, raise exceptions with ...
"B904",
# Magic number
"PLR2004",
# Consider `elif` instead of `else` then `if` to remove indentation level
"PLR5501",
# ambiguous-unicode-character-string
"RUF001",
# ambiguous-unicode-character-docstring
"RUF002",
# ambiguous-unicode-character-comment
"RUF003",
# collection-literal-concatenation
"RUF005",
# pairwise-over-zipped (>=PY310 only)
"RUF007",
# explicit-f-string-type-conversion
"RUF010",
]

[tool.ruff.lint.per-file-ignores]
"tests/*" = ["D"]

[tool.ruff.lint.pydocstyle]
convention = "google"
7 changes: 0 additions & 7 deletions reporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

import ast
import math
import os
from pathlib import Path
import shutil
import tempfile
Expand Down Expand Up @@ -119,7 +118,6 @@ def save_chart(
yscale: Optional[Literal["linear", "log", "symlog", "logit"]] = None,
y_value_formatter: Optional[str] = None,
print_labels_on_points: bool = False,
open_output_file: bool = False,
size: Tuple[float, float] = (8, 4),
plot_kind: Literal[
"line",
Expand Down Expand Up @@ -151,7 +149,6 @@ def save_chart(
- {0:.2f} for a float with two decimals.
Defaults to None.
print_labels_on_points (bool, optional): _description_. Defaults to False.
open_output_file (bool, optional): _description_. Defaults to False.
size (Tuple[float, float], optional): _description_. Defaults to (8, 4).
plot_kind (str, optional): _description_. Defaults to "line".
gridlines (str, optional): where to draw grid lines:
Expand Down Expand Up @@ -268,10 +265,6 @@ def save_chart(

plt.close(fig)

# Open if wanted
if open_output_file is True:
os.startfile(output_path)


if __name__ == "__main__":
# results_dir = Path(__file__).resolve().parent / "results_vector_ops"
Expand Down
Loading

0 comments on commit 0b33c36

Please sign in to comment.