Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Try windows test workflow #440

Merged
merged 7 commits into from
Dec 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions .github/workflows/testing-windows.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# This workflow will install Python dependencies and run tests in a Windows environment.
# This is intended to catch any file-system specific issues, and so runs less
# frequently than other test suites.

name: Windows unit test

on:
push:
branches: [ main ]
pull_request:
branches: [ main ]

jobs:
build:

runs-on: windows-latest
strategy:
matrix:
python-version: ['3.10']

steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[dev]
- name: Run unit tests with pytest
run: |
python -m pytest tests
4 changes: 0 additions & 4 deletions benchmarks/benchmarks.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import hats.pixel_math.healpix_shim as hp
from hats.catalog import Catalog, PartitionInfo, TableProperties
from hats.catalog.association_catalog.partition_join_info import PartitionJoinInfo
from hats.io.paths import pixel_catalog_files
from hats.pixel_math import HealpixPixel
from hats.pixel_tree import PixelAlignment, align_trees
from hats.pixel_tree.pixel_tree import PixelTree
Expand Down Expand Up @@ -66,9 +65,6 @@ def time_inner_pixel_alignment(self):
def time_outer_pixel_alignment(self):
align_trees(self.pixel_tree_1, self.pixel_tree_2, alignment_type="outer")

def time_paths_creation(self):
pixel_catalog_files("foo/", self.pixel_list)


class MetadataSuite:
"""Suite that generates catalog files and benchmarks the operations on them."""
Expand Down
49 changes: 1 addition & 48 deletions src/hats/io/paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import re
from pathlib import Path
from typing import Dict, List
from typing import Dict
from urllib.parse import urlencode

from fsspec.implementations.http import HTTPFileSystem
Expand Down Expand Up @@ -90,53 +90,6 @@ def get_healpix_from_path(path: str) -> HealpixPixel:
return HealpixPixel(int(order), int(pixel))


def pixel_catalog_files(
catalog_base_dir: str | Path | UPath | None,
pixels: List[HealpixPixel],
query_params: Dict | None = None,
) -> List[UPath]:
"""Create a list of path *pointers* for pixel catalog files. This will not create the directory
or files.

The catalog file names will take the HiPS standard form of::

<catalog_base_dir>/Norder=<pixel_order>/Dir=<directory number>/Npix=<pixel_number>.parquet

Where the directory number is calculated using integer division as::

(pixel_number/10000)*10000

Args:
catalog_base_dir (UPath): base directory of the catalog (includes catalog name)
pixels (List[HealpixPixel]): the healpix pixels to create pointers to
query_params (dict): Params to append to URL. Ex: {'cols': ['ra', 'dec'], 'fltrs': ['r>=10', 'g<18']}

Returns (List[str]):
A list of paths to the pixels, in the same order as the input pixel list.
"""
catalog_base_dir = get_upath(catalog_base_dir) / DATASET_DIR
fs = catalog_base_dir.fs
base_path = str(catalog_base_dir)
if not base_path.endswith(fs.sep):
base_path += fs.sep

url_params = ""
if isinstance(fs, HTTPFileSystem) and query_params:
url_params = dict_to_query_urlparams(query_params)

return [
base_path
+ fs.sep.join(
[
f"{PARTITION_ORDER}={pixel.order}",
f"{PARTITION_DIR}={pixel.dir}",
f"{PARTITION_PIXEL}={pixel.pixel}.parquet" + url_params,
]
)
for pixel in pixels
]


def dict_to_query_urlparams(query_params: Dict | None = None) -> str:
"""Converts a dictionary to a url query parameter string

Expand Down
4 changes: 2 additions & 2 deletions tests/hats/io/file_io/test_file_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,10 +103,10 @@ def test_load_csv_to_pandas_generator_encoding(tmp_path):


def test_write_df_to_csv(tmp_path):
random_df = pd.DataFrame(np.random.randint(0, 100, size=(100, 4)), columns=list("ABCD"))
random_df = pd.DataFrame(np.random.randint(0, 100, size=(100, 4)), columns=list("ABCD")).astype(int)
test_file_path = tmp_path / "test.csv"
write_dataframe_to_csv(random_df, test_file_path, index=False)
loaded_df = pd.read_csv(test_file_path)
loaded_df = pd.read_csv(test_file_path).astype(int)
pd.testing.assert_frame_equal(loaded_df, random_df)


Expand Down
28 changes: 9 additions & 19 deletions tests/hats/io/test_paths.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
"""Test pixel path creation"""

import os

import pytest

from hats.io import paths
Expand All @@ -8,14 +10,14 @@

def test_pixel_directory():
"""Simple case with sensical inputs"""
expected = "/foo/dataset/Norder=0/Dir=0"
expected = os.path.join(os.sep, "foo", "dataset", "Norder=0", "Dir=0")
result = paths.pixel_directory("/foo", 0, 5)
assert str(result) == expected
assert str(result) == str(expected)


def test_pixel_directory_number():
"""Simple case with sensical inputs"""
expected = "/foo/dataset/Norder=0/Dir=0"
expected = os.path.join(os.sep, "foo", "dataset", "Norder=0", "Dir=0")
result = paths.pixel_directory("/foo", pixel_order=0, pixel_number=5, directory_number=0)
assert str(result) == expected

Expand All @@ -37,7 +39,7 @@ def test_pixel_directory_nonint():

def test_pixel_catalog_file():
"""Simple case with sensical inputs"""
expected = "/foo/dataset/Norder=0/Dir=0/Npix=5.parquet"
expected = os.path.join(os.sep, "foo", "dataset", "Norder=0", "Dir=0", "Npix=5.parquet")
result = paths.pixel_catalog_file("/foo", HealpixPixel(0, 5))
assert str(result) == expected

Expand All @@ -57,21 +59,6 @@ def test_pixel_catalog_file_nonint():
paths.pixel_catalog_file("/foo", "zero", "five")


def test_pixel_catalog_files():
expected = ["/foo/dataset/Norder=0/Dir=0/Npix=5.parquet", "/foo/dataset/Norder=1/Dir=0/Npix=16.parquet"]
result = paths.pixel_catalog_files("/foo/", [HealpixPixel(0, 5), HealpixPixel(1, 16)])
assert expected == result


def test_pixel_catalog_files_w_query_params():
expected = [
"https://foo/dataset/Norder=0/Dir=0/Npix=5.parquet?columns=ID%2CRA%2CDEC%2Cr_auto&filters=r_auto%3C13"
]
query_params = {"columns": ["ID", "RA", "DEC", "r_auto"], "filters": ["r_auto<13"]}
result = paths.pixel_catalog_files("https://foo", [HealpixPixel(0, 5)], query_params=query_params)
assert expected == result


def test_dict_to_query_urlparams():
expected = "?columns=ID%2CRA%2CDEC%2Cr_auto&filters=r_auto%3C13"
query_params = {"columns": ["ID", "RA", "DEC", "r_auto"], "filters": ["r_auto<13"]}
Expand Down Expand Up @@ -106,6 +93,9 @@ def test_get_healpix_from_path():
result = paths.get_healpix_from_path("/foo/dataset/Norder=5/Dir=0/Npix=34.parquet")
assert result == expected

result = paths.get_healpix_from_path("C:\\foo\\dataset\\Norder=5\\Dir=0\\Npix=34.parquet")
assert result == expected

result = paths.get_healpix_from_path("Norder=5/Dir=0/Npix=34.pq")
assert result == expected

Expand Down
Loading