Skip to content

Commit

Permalink
refactor: clean up, standardize _exceptions.py
Browse files Browse the repository at this point in the history
  • Loading branch information
dangotbanned committed Jan 17, 2025
1 parent 5975a8b commit 7fd1f4d
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 28 deletions.
2 changes: 1 addition & 1 deletion altair/datasets/_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def __getitem__(self, key: _Dataset, /) -> Metadata:
def url(self, name: _Dataset, /) -> str:
if meta := self.get(name, None):
if meta["suffix"] == ".parquet" and not find_spec("vegafusion"):
raise AltairDatasetsError.url_parquet(meta)
raise AltairDatasetsError.from_url(meta)
return meta["url"]

if name in get_args(Dataset):
Expand Down
63 changes: 40 additions & 23 deletions altair/datasets/_exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,20 @@


class AltairDatasetsError(Exception):
# TODO: Rename, try to reduce verbosity of message, link to vegafusion?
@classmethod
def url_parquet(cls, meta: Metadata, /) -> AltairDatasetsError:
name = meta["file_name"]
msg = (
f"Currently unable to load {name!r} via url, as '.parquet' datasets require `vegafusion`.\n"
"See upstream issue for details: https://github.com/vega/vega/issues/3961"
)
def from_url(cls, meta: Metadata, /) -> AltairDatasetsError:
if meta["suffix"] == ".parquet":
msg = (
f"{_failed_url(meta)}"
f"{meta['suffix']!r} datasets require `vegafusion`.\n"
"See upstream issue for details: https://github.com/vega/vega/issues/3961"
)
else:
msg = (
f"{cls.from_url.__qualname__}() called for "
f"unimplemented extension: {meta['suffix']}\n\n{meta!r}"
)
raise NotImplementedError(msg)
return cls(msg)

@classmethod
Expand All @@ -43,30 +49,41 @@ def module_not_found(
return ModuleNotFoundError(msg, name=missing)


# TODO: Give more direct help (e.g. url("7zip"))
def image(meta: Metadata):
name = meta["file_name"]
ext = meta["suffix"]
msg = (
f"Unable to load {name!r} as tabular data.\n"
f"{ext!r} datasets are only compatible with `url(...)` or `Loader.url(...)`."
)
def image(meta: Metadata, /) -> AltairDatasetsError:
msg = f"{_failed_tabular(meta)}\n{_suggest_url(meta)}"
return AltairDatasetsError(msg)


# TODO: Pass in `meta`
def geospatial(backend_name: str) -> NotImplementedError:
msg = _suggest_supported(
def geospatial(meta: Metadata, backend_name: str) -> NotImplementedError:
msg = (
f"{_failed_tabular(meta)}"
f"Geospatial data is not supported natively by {backend_name!r}."
f"{_suggest_url(meta, 'polars')}"
)
return NotImplementedError(msg)


# TODO: Pass in `meta`
def non_tabular_json(backend_name: str) -> NotImplementedError:
msg = _suggest_supported(f"Non-tabular json is not supported {backend_name!r}.")
def non_tabular_json(meta: Metadata, backend_name: str) -> NotImplementedError:
msg = (
f"{_failed_tabular(meta)}"
f"Non-tabular json is not supported natively by {backend_name!r}."
f"{_suggest_url(meta, 'polars')}"
)
return NotImplementedError(msg)


def _suggest_supported(msg: str) -> str:
return f"{msg}\nTry installing `polars` or using `Loader.url(...)` instead."
def _failed_url(meta: Metadata, /) -> str:
return f"Unable to load {meta['file_name']!r} via url.\n"


def _failed_tabular(meta: Metadata, /) -> str:
return f"Unable to load {meta['file_name']!r} as tabular data.\n"


def _suggest_url(meta: Metadata, install_other: str | None = None) -> str:
other = f" installing `{install_other}` or" if install_other else ""
return (
f"\n\nInstead, try{other}:\n\n"
" from altair.datasets import url\n"
f" url({meta['dataset_name']!r})"
)
8 changes: 4 additions & 4 deletions altair/datasets/_readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def url(
frame = self.query(**_extract_constraints(name, suffix))
meta = next(_iter_metadata(frame))
if meta["suffix"] == ".parquet" and not is_available("vegafusion"):
raise _ds_exc.AltairDatasetsError.url_parquet(meta)
raise _ds_exc.AltairDatasetsError.from_url(meta)
url = meta["url"]
if isinstance(url, str):
return url
Expand Down Expand Up @@ -263,7 +263,7 @@ def _schema_kwds(self, meta: Metadata, /) -> dict[str, Any]:
def _maybe_fn(self, meta: Metadata, /) -> Callable[..., pd.DataFrame]:
fn = super()._maybe_fn(meta)
if meta["is_spatial"]:
raise _ds_exc.geospatial(self._name)
raise _ds_exc.geospatial(meta, self._name)
return fn


Expand Down Expand Up @@ -383,9 +383,9 @@ def _maybe_fn(self, meta: Metadata, /) -> Callable[..., pa.Table]:
if meta["is_tabular"]:
return self._read_json_tabular
elif meta["is_spatial"]:
raise _ds_exc.geospatial(self._name)
raise _ds_exc.geospatial(meta, self._name)
else:
raise _ds_exc.non_tabular_json(self._name)
raise _ds_exc.non_tabular_json(meta, self._name)
else:
return fn

Expand Down

0 comments on commit 7fd1f4d

Please sign in to comment.