Skip to content

Commit

Permalink
Parallelize the test suite and fix a test polluted bug (#1338)
Browse files Browse the repository at this point in the history
  • Loading branch information
hoxbro authored May 30, 2024
1 parent 6d84b96 commit d6cbacf
Show file tree
Hide file tree
Showing 11 changed files with 72 additions and 15 deletions.
9 changes: 7 additions & 2 deletions datashader/data_libraries/cudf.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
from __future__ import annotations
from contextlib import suppress
from datashader.data_libraries.pandas import default
from datashader.core import bypixel
import cudf


@bypixel.pipeline.register(cudf.DataFrame)
def cudf_pipeline(df, schema, canvas, glyph, summary, *, antialias=False):
return default(glyph, df, schema, canvas, summary, antialias=antialias, cuda=True)


with suppress(ImportError):
import cudf

cudf_pipeline = bypixel.pipeline.register(cudf.DataFrame)(cudf_pipeline)
9 changes: 7 additions & 2 deletions datashader/data_libraries/dask_cudf.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
from __future__ import annotations
from contextlib import suppress
from datashader.data_libraries.dask import dask_pipeline
from datashader.core import bypixel
import dask_cudf


@bypixel.pipeline.register(dask_cudf.DataFrame)
def dask_cudf_pipeline(df, schema, canvas, glyph, summary, *, antialias=False):
return dask_pipeline(df, schema, canvas, glyph, summary, antialias=antialias, cuda=True)


with suppress(ImportError):
import dask_cudf

dask_cudf_pipeline = bypixel.pipeline.register(dask_cudf.DataFrame)(dask_cudf_pipeline)
4 changes: 2 additions & 2 deletions datashader/data_libraries/dask_xarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,8 @@ def dask_raster(glyph, xr_ds, schema, canvas, summary, *, antialias=False, cuda=
src_y0, src_y1 = glyph._compute_bounds_from_1d_centers(
xr_ds, y_name, maybe_expand=False, orient=False
)
xbinsize = float(xr_ds[x_name][1] - xr_ds[x_name][0])
ybinsize = float(xr_ds[y_name][1] - xr_ds[y_name][0])
xbinsize = abs(float(xr_ds[x_name][1] - xr_ds[x_name][0]))
ybinsize = abs(float(xr_ds[y_name][1] - xr_ds[y_name][0]))

# Compute scale/translate
out_h, out_w = shape
Expand Down
2 changes: 1 addition & 1 deletion datashader/datashape/coretypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ def to_numpy_dtype(self):
return np.dtype('datetime64[us]')


_units = set(['ns', 'us', 'ms', 's', 'm', 'h', 'D', 'W', 'M', 'Y'])
_units = ('ns', 'us', 'ms', 's', 'm', 'h', 'D', 'W', 'M', 'Y')


_unit_aliases = {
Expand Down
3 changes: 3 additions & 0 deletions datashader/tests/benchmarks/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import pytest

pytestmark = pytest.mark.benchmark
34 changes: 34 additions & 0 deletions datashader/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
CUSTOM_MARKS = ("benchmark",)


def pytest_addoption(parser):
for marker in CUSTOM_MARKS:
parser.addoption(
f"--{marker}",
action="store_true",
default=False,
help=f"Run {marker} related tests",
)


def pytest_configure(config):
for marker in CUSTOM_MARKS:
config.addinivalue_line("markers", f"{marker}: {marker} test marker")


def pytest_collection_modifyitems(config, items):
skipped, selected = [], []
markers = [m for m in CUSTOM_MARKS if config.getoption(f"--{m}")]
empty = not markers
for item in items:
if empty and any(m in item.keywords for m in CUSTOM_MARKS):
skipped.append(item)
elif empty:
selected.append(item)
elif not empty and any(m in item.keywords for m in markers):
selected.append(item)
else:
skipped.append(item)

config.hook.pytest_deselected(items=skipped)
items[:] = selected
7 changes: 5 additions & 2 deletions datashader/tests/test_transfer_functions.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import os
from io import BytesIO

import numpy as np
Expand All @@ -13,6 +14,8 @@
coords = dict([('x_axis', [3, 4, 5]), ('y_axis', [0, 1, 2])])
dims = ['y_axis', 'x_axis']

test_gpu = bool(int(os.getenv("DATASHADER_TEST_GPU", 0)))

# CPU
def build_agg(array_module=np):
a = array_module.arange(10, 19, dtype='u4').reshape((3, 3))
Expand Down Expand Up @@ -42,12 +45,12 @@ def create_dask_array_np(*args, **kwargs):
return da.from_array(np.array(*args, **kwargs))


try:
if test_gpu:
import cupy
aggs = [build_agg(np), build_agg(cupy), build_agg_dask()]
arrays = [np.array, cupy.array, create_dask_array_np]
array_modules = [np, cupy]
except ImportError:
else:
cupy = None
aggs = [build_agg(np), build_agg_dask()]
arrays = [np.array, create_dask_array_np]
Expand Down
4 changes: 3 additions & 1 deletion examples/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

import platform

collect_ignore_glob = []
collect_ignore_glob = [
"tiling.ipynb",
]

if find_spec("geopandas") is None:
collect_ignore_glob += [
Expand Down
2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,6 @@ norecursedirs = 'doc .git dist build _build .ipynb_checkpoints'
minversion = "7"
xfail_strict = true
log_cli_level = "INFO"
# skipping any notebooks that require extra deps
nbsmoke_skip_run = ".*tiling.ipynb$\n.*streaming-aggregation.ipynb$\n.*8_Geography.ipynb$"
filterwarnings = [
"ignore:Passing a (SingleBlockManager|BlockManager) to (Series|GeoSeries|DataFrame|GeoDataFrame) is deprecated:DeprecationWarning", # https://github.com/holoviz/spatialpandas/issues/137
"ignore:Accessing the underlying geometries through the `.data`:DeprecationWarning:dask_geopandas.core", # https://github.com/geopandas/dask-geopandas/issues/264
Expand Down
6 changes: 6 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,9 @@
'nbformat',
'nbsmoke[verify] >0.5',
'netcdf4',
'nbval',
'psutil',
'pytest-xdist',
# 'pyarrow',
'pytest <8', # Fails lint with IPynbFile is deprecated
'pytest-benchmark',
Expand All @@ -91,11 +94,14 @@
'nbconvert',
'nbformat',
'nbsmoke[verify] >0.5',
'nbval',
'netcdf4',
'pyarrow',
'pytest <8', # Fails lint with IPynbFile is deprecated
'pytest-benchmark',
'pytest-cov',
'psutil',
'pytest-xdist',
'rasterio',
'rioxarray',
'scikit-image',
Expand Down
7 changes: 4 additions & 3 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ commands = flake8
[_unit]
description = Run unit tests
deps = .[tests]
commands = pytest datashader --cov=./datashader --cov-append
commands = pytest datashader -n logical --dist loadgroup --cov=./datashader --cov-append --benchmark-skip
pytest datashader --benchmark --cov=./datashader --cov-append

[_unit_deploy]
description = Run unit tests without coverage
Expand All @@ -27,12 +28,12 @@ commands = pytest datashader
[_unit_nojit]
description = Run select unit tests with numba jit disabled
deps = .[tests]
commands = pytest datashader -k "not benchmarks and not test_tiles" --cov=./datashader --cov-append
commands = pytest datashader -k "not test_tiles" -n logical --dist loadgroup --cov=./datashader --cov-append --benchmark-skip

[_examples]
description = Test that default examples run
deps = .[examples, tests]
commands = pytest --nbsmoke-run -k ".ipynb"
commands = pytest -n logical --dist loadscope --nbval-lax examples --benchmark-skip
# could add more, to test types of example other than nbs

[_examples_extra]
Expand Down

0 comments on commit d6cbacf

Please sign in to comment.