Skip to content

Commit

Permalink
Add pre-commit (#1295)
Browse files Browse the repository at this point in the history
* Basic pre-commit

* Add codespell

* Add ruff

* Add pre-commit to CI
  • Loading branch information
ianthomas23 authored Oct 23, 2023
1 parent 81ce9b8 commit d338d98
Show file tree
Hide file tree
Showing 54 changed files with 736 additions and 420 deletions.
8 changes: 8 additions & 0 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,15 @@ concurrency:
cancel-in-progress: true

jobs:
pre_commit:
name: Run pre-commit
runs-on: 'ubuntu-latest'
steps:
- uses: holoviz-dev/holoviz_tasks/[email protected]

test_suite:
name: Tests on ${{ matrix.os }} with Python ${{ matrix.python-version }}
needs: [pre_commit]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
Expand Down Expand Up @@ -72,6 +79,7 @@ jobs:
test_pip:
name: Pip tests on ${{ matrix.os }} with Python ${{ matrix.python-version }}
needs: [pre_commit]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
Expand Down
28 changes: 28 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
exclude: (\.(js|svg)$)

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-builtin-literals
- id: check-case-conflict
- id: check-docstring-first
- id: check-executables-have-shebangs
- id: check-toml
- id: detect-private-key
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.292
hooks:
- id: ruff
files: datashader/
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
hooks:
- id: codespell
additional_dependencies:
- tomli

ci:
autofix_prs: false
1 change: 0 additions & 1 deletion benchmarks/benchmarks/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +0,0 @@

10 changes: 7 additions & 3 deletions datashader/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
from packaging.version import Version

import param
__version__ = str(param.version.Version(fpath=__file__, archive_commit="$Format:%h$",reponame="datashader"))
__version__ = str(param.version.Version(fpath=__file__, archive_commit="$Format:%h$",
reponame="datashader"))

from .core import Canvas # noqa (API import)
from .reductions import * # noqa (API import)
Expand All @@ -26,8 +27,11 @@
fetch_data = partial(_fetch,'datashader')
examples = partial(_examples,'datashader')
except ImportError:
def _missing_cmd(*args,**kw): return("install pyct to enable this command (e.g. `conda install pyct or `pip install pyct[cmd]`)")
def _missing_cmd(*args,**kw):
return("install pyct to enable this command (e.g. `conda install pyct or "
"`pip install pyct[cmd]`)")
_copy = _fetch = _examples = _missing_cmd
def err(): raise ValueError(_missing_cmd())
def err():
raise ValueError(_missing_cmd())
fetch_data = copy_examples = examples = err
del partial, _examples, _copy, _fetch
3 changes: 2 additions & 1 deletion datashader/antialias.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ class AntialiasStage2(NamedTuple):


if TYPE_CHECKING:
UnzippedAntialiasStage2 = tuple[tuple[AntialiasCombination], tuple[float], tuple[bool], tuple[bool]]
UnzippedAntialiasStage2 = \
tuple[tuple[AntialiasCombination], tuple[float], tuple[bool], tuple[bool]]


def two_stage_agg(antialias_stage_2: UnzippedAntialiasStage2 | None):
Expand Down
43 changes: 27 additions & 16 deletions datashader/bundling.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,8 @@ def resample_edge(segments, min_segment_length, max_segment_length, ndims):
def resample_edges(edge_segments, min_segment_length, max_segment_length, ndims):
replaced_edges = []
for segments in edge_segments:
replaced_edges.append(resample_edge(segments, min_segment_length, max_segment_length, ndims))
replaced_edges.append(resample_edge(segments, min_segment_length, max_segment_length,
ndims))
return replaced_edges


Expand All @@ -122,8 +123,8 @@ def smooth_segment(segments, tension, idx, idy):
seg_length = len(segments) - 2
for i in range(1, seg_length):
previous, current, next_point = segments[i - 1], segments[i], segments[i + 1]
current[idx] = ((1 - tension) * current[idx]) + (tension * (previous[idx] + next_point[idx]) / 2)
current[idy] = ((1 - tension) * current[idy]) + (tension * (previous[idy] + next_point[idy]) / 2)
current[idx] = ((1-tension)*current[idx]) + (tension*(previous[idx] + next_point[idx]) / 2)
current[idy] = ((1-tension)*current[idy]) + (tension*(previous[idy] + next_point[idy]) / 2)


def smooth(edge_segments, tension, idx, idy):
Expand All @@ -142,25 +143,29 @@ def advect_segments(segments, vert, horiz, accuracy, idx, idy):
segments[i][idy] = max(0, min(segments[i][idy], 1))


def advect_and_resample(vert, horiz, segments, iterations, accuracy, min_segment_length, max_segment_length, segment_class):
def advect_and_resample(vert, horiz, segments, iterations, accuracy, min_segment_length,
max_segment_length, segment_class):
for it in range(iterations):
advect_segments(segments, vert, horiz, accuracy, segment_class.idx, segment_class.idy)
if it % 2 == 0:
segments = resample_edge(segments, min_segment_length, max_segment_length, segment_class.ndims)
segments = resample_edge(segments, min_segment_length, max_segment_length,
segment_class.ndims)
return segments


@delayed
def advect_resample_all(gradients, edge_segments, iterations, accuracy, min_segment_length, max_segment_length, segment_class):
def advect_resample_all(gradients, edge_segments, iterations, accuracy, min_segment_length,
max_segment_length, segment_class):
vert, horiz = gradients
return [advect_and_resample(vert, horiz, edges, iterations, accuracy, min_segment_length, max_segment_length, segment_class)
return [advect_and_resample(vert, horiz, edges, iterations, accuracy, min_segment_length,
max_segment_length, segment_class)
for edges in edge_segments]


def batches(l, n):
"""Yield successive n-sized batches from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
def batches(seq, n):
"""Yield successive n-sized batches from seq."""
for i in range(0, len(seq), n):
yield seq[i:i + n]


@delayed
Expand Down Expand Up @@ -252,7 +257,8 @@ def get_merged_columns(params):
@staticmethod
@ngjit
def create_segment(edge):
return np.array([[edge[0], edge[1], edge[2], edge[5]], [edge[0], edge[3], edge[4], edge[5]]])
return np.array([[edge[0], edge[1], edge[2], edge[5]],
[edge[0], edge[3], edge[4], edge[5]]])

@staticmethod
@ngjit
Expand Down Expand Up @@ -475,7 +481,8 @@ def __call__(self, nodes, edges, **params):

# This gets the edges split into lots of small segments
# Doing this inside a delayed function lowers the transmission overhead
edge_segments = [resample_edges(batch, p.min_segment_length, p.max_segment_length, segment_class.ndims) for batch in edge_batches]
edge_segments = [resample_edges(batch, p.min_segment_length, p.max_segment_length,
segment_class.ndims) for batch in edge_batches]

for i in range(p.iterations):
# Each step, the size of the 'blur' shrinks
Expand All @@ -486,18 +493,22 @@ def __call__(self, nodes, edges, **params):
break

# Draw the density maps and combine them
images = [draw_to_surface(segment, bandwidth, p.accuracy, segment_class.accumulate) for segment in edge_segments]
images = [draw_to_surface(segment, bandwidth, p.accuracy, segment_class.accumulate)
for segment in edge_segments]
overall_image = sum(images)

gradients = get_gradients(overall_image)

# Move edges along the gradients and resample when necessary
# This could include smoothing to adjust the amount a graph can change
edge_segments = [advect_resample_all(gradients, segment, p.advect_iterations, p.accuracy, p.min_segment_length, p.max_segment_length, segment_class)
edge_segments = [advect_resample_all(gradients, segment, p.advect_iterations,
p.accuracy, p.min_segment_length,
p.max_segment_length, segment_class)
for segment in edge_segments]

# Do a final resample to a smaller size for nicer rendering
edge_segments = [resample_edges(segment, p.min_segment_length, p.max_segment_length, segment_class.ndims) for segment in edge_segments]
edge_segments = [resample_edges(segment, p.min_segment_length, p.max_segment_length,
segment_class.ndims) for segment in edge_segments]

# Finally things can be sent for computation
edge_segments = compute(*edge_segments)
Expand Down
10 changes: 5 additions & 5 deletions datashader/colors.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,19 +128,19 @@ def rgb(x):
# Example palettes

# Copied from from bokeh.palettes.Greys9
Greys9 = ["#000000", "#252525", "#525252", "#737373", "#969696", "#bdbdbd", "#d9d9d9", "#f0f0f0", "#ffffff"]
Greys9 = ["#000000", "#252525", "#525252", "#737373", "#969696", "#bdbdbd", "#d9d9d9", "#f0f0f0", "#ffffff"] # noqa: E501

# Adapted from matplotlib.cm.hot to be more uniform at the high end
Hot = ["black", "maroon", "darkred", "red", "orangered", "darkorange", "orange", "gold", "yellow", "white"]
Hot = ["black", "maroon", "darkred", "red", "orangered", "darkorange", "orange", "gold", "yellow", "white"] # noqa: E501

# pseudo terrestrial elevation ramp
Elevation = ["aqua", "sandybrown", "limegreen", "green", "green", "darkgreen", "saddlebrown", "gray", "white"]
Elevation = ["aqua", "sandybrown", "limegreen", "green", "green", "darkgreen", "saddlebrown", "gray", "white"] # noqa: E501

# Qualitative color maps, for use in colorizing categories
# Originally from Cynthia Brewer (http://colorbrewer2.org), via Bokeh
Set1 = ['#e41a1c', '#377eb8', '#4daf4a', '#984ea3', '#ff7f00', '#ffff33', '#a65628', '#f781bf', '#999999']
Set1 = ['#e41a1c', '#377eb8', '#4daf4a', '#984ea3', '#ff7f00', '#ffff33', '#a65628', '#f781bf', '#999999'] # noqa: E501
Set2 = ['#66c2a5', '#fc8d62', '#8da0cb', '#e78ac3', '#a6d854', '#ffd92f', '#e5c494', '#b3b3b3']
Set3 = ['#8dd3c7', '#ffffb3', '#bebada', '#fb8072', '#80b1d3', '#fdb462', '#b3de69', '#fccde5', '#d9d9d9', '#bc80bd', '#ccebc5', '#ffed6f']
Set3 = ['#8dd3c7', '#ffffb3', '#bebada', '#fb8072', '#80b1d3', '#fdb462', '#b3de69', '#fccde5', '#d9d9d9', '#bc80bd', '#ccebc5', '#ffed6f'] # noqa: E501

# Sets 1, 2, and 3 combined, minus indistinguishable colors
Sets1to3 = Set1+Set2[0:3]+Set2[4:7]+Set3[1:2]+Set3[3:4]+Set3[5:6]+Set3[7:9]+Set3[10:]
Expand Down
21 changes: 14 additions & 7 deletions datashader/compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,8 @@ def compile_components(agg, schema, glyph, *, antialias=False, cuda=False, parti
else:
array_module = np
antialias_stage_2 = antialias_stage_2(array_module)
antialias_stage_2_funcs = make_antialias_stage_2_functions(antialias_stage_2, bases, cuda, partitioned)
antialias_stage_2_funcs = make_antialias_stage_2_functions(antialias_stage_2, bases, cuda,
partitioned)
else:
self_intersect = False
antialias_stage_2 = False
Expand Down Expand Up @@ -145,7 +146,8 @@ def compile_components(agg, schema, glyph, *, antialias=False, cuda=False, parti

column_names = [c.column for c in cols if c.column != SpecialColumn.RowIndex]

return create, info, append, combine, finalize, antialias_stage_2, antialias_stage_2_funcs, column_names
return create, info, append, combine, finalize, antialias_stage_2, antialias_stage_2_funcs, \
column_names


def _get_antialias_stage_2_combine_func(combination: AntialiasCombination, zero: float,
Expand Down Expand Up @@ -220,7 +222,8 @@ def make_antialias_stage_2_functions(antialias_stage_2, bases, cuda, partitioned
" a[1][:] = a[0][:]",
" else:",
]
for i, (func, is_where, next_is_where) in enumerate(zip(funcs, base_is_where, next_base_is_where)):
for i, (func, is_where, next_is_where) in enumerate(zip(funcs, base_is_where,
next_base_is_where)):
if is_where:
where_reduction = bases[i]
if isinstance(where_reduction, by):
Expand All @@ -230,12 +233,14 @@ def make_antialias_stage_2_functions(antialias_stage_2, bases, cuda, partitioned
name = next(names) # Unique name
namespace[name] = combine

lines.append(f" {name}(aggs_and_copies[{i}][::-1], aggs_and_copies[{i-1}][::-1])")
lines.append(
f" {name}(aggs_and_copies[{i}][::-1], aggs_and_copies[{i-1}][::-1])")
elif next_is_where:
# This is dealt with as part of the following base which is a where reduction.
pass
else:
lines.append(f" {func.__name__}(aggs_and_copies[{i}][1], aggs_and_copies[{i}][0])")
lines.append(
f" {func.__name__}(aggs_and_copies[{i}][1], aggs_and_copies[{i}][0])")
code = "\n".join(lines)
logger.debug(code)
exec(code, namespace)
Expand Down Expand Up @@ -477,7 +482,8 @@ def make_combine(bases, dshapes, temps, combine_temps, antialias, cuda, partitio
# it from explicit combine calls.
base_is_where = [b.is_where() for b in bases]
next_base_is_where = base_is_where[1:] + [False]
calls = [(None if n else b._build_combine(d, antialias, cuda, partitioned), [arg_lk[i] for i in (b,) + t + ct])
calls = [(None if n else b._build_combine(d, antialias, cuda, partitioned),
[arg_lk[i] for i in (b,) + t + ct])
for (b, d, t, ct, n) in zip(bases, dshapes, temps, combine_temps, next_base_is_where)]

def combine(base_tuples):
Expand Down Expand Up @@ -539,6 +545,7 @@ def make_antialias_stage_2(reds, bases):
break

def antialias_stage_2(array_module) -> UnzippedAntialiasStage2:
return tuple(zip(*concat(b._antialias_stage_2(self_intersect, array_module) for b in bases)))
return tuple(zip(*concat(b._antialias_stage_2(self_intersect, array_module)
for b in bases)))

return self_intersect, antialias_stage_2
7 changes: 4 additions & 3 deletions datashader/composite.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,10 @@ def arr_operator(f):

@arr_operator
def source_arr(src, dst):
if src: return src
else: return dst
if src:
return src
else:
return dst

@arr_operator
def add_arr(src, dst):
Expand All @@ -162,4 +164,3 @@ def max_arr(src, dst):
@arr_operator
def min_arr(src, dst):
return min([src, dst])

Loading

0 comments on commit d338d98

Please sign in to comment.