Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add more benchmarks #387

Merged
merged 28 commits into from
Jul 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
914ba63
Try slower plot again to see how it does now with performance
SolarDrew May 17, 2024
68db214
Don't need both fixtures in this test any more
SolarDrew May 20, 2024
98aee6b
Don't need this old stuff either
SolarDrew May 20, 2024
9bdefbe
Use more points for pixel_to_world benchmark
SolarDrew May 20, 2024
86671cb
Add some more benchmarks
SolarDrew Jun 6, 2024
adee6da
Need units on parameters for updating vct some of the time
SolarDrew Jun 6, 2024
38b341b
Benchmarks are taking too long again
SolarDrew Jun 6, 2024
78eb098
Nope too slow
SolarDrew Jun 6, 2024
19d80e5
Benchmark dataset slicing
SolarDrew Jun 6, 2024
7b252d3
Add changelog
SolarDrew Jun 7, 2024
4bd56ff
Merge branch 'main' of github.com:DKISTDC/dkist into benchmarking-cod…
SolarDrew Jul 17, 2024
0ec14e0
Try slower plot again to see how it does now with performance
SolarDrew May 17, 2024
6b64729
Don't need both fixtures in this test any more
SolarDrew May 20, 2024
f0247d7
Don't need this old stuff either
SolarDrew May 20, 2024
f0c6f9e
Use more points for pixel_to_world benchmark
SolarDrew May 20, 2024
b6bf0e9
Add some more benchmarks
SolarDrew Jun 6, 2024
81fd084
Need units on parameters for updating vct some of the time
SolarDrew Jun 6, 2024
cea4931
Benchmarks are taking too long again
SolarDrew Jun 6, 2024
18b59a6
Nope too slow
SolarDrew Jun 6, 2024
f6ae660
Benchmark dataset slicing
SolarDrew Jun 6, 2024
05f27fe
Add changelog
SolarDrew Jun 7, 2024
29aa7c8
Update dkist/wcs/models.py
SolarDrew Jul 17, 2024
c4d55af
Merge branch 'benchmarking-codspeed' of github.com:SolarDrew/dkist in…
SolarDrew Jul 17, 2024
e37a5e5
Add tests to hit update_celestial_transform and make sure the units are
SolarDrew Jul 17, 2024
c4d0ffc
Correct test comparison value
SolarDrew Jul 18, 2024
c5ee358
Merge branch 'main' into benchmarking-codspeed
Cadair Jul 18, 2024
a0406e6
Merge branch 'main' of github.com:DKISTDC/dkist into benchmarking-cod…
SolarDrew Jul 18, 2024
275bd42
Merge branch 'benchmarking-codspeed' of github.com:SolarDrew/dkist in…
SolarDrew Jul 18, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog/387.trivial.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add some more benchmarks to track performance of more parts of the user tools.
76 changes: 72 additions & 4 deletions dkist/tests/test_benchmarks.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
import matplotlib.pyplot as plt
import numpy as np
import pytest
from numpy.random import default_rng

import astropy.units as u
from astropy.modeling.models import Tabular1D

from dkist import load_dataset
from dkist.wcs.models import (Ravel, generate_celestial_transform,
update_celestial_transform_parameters)


@pytest.mark.benchmark
Expand All @@ -11,11 +17,8 @@


@pytest.mark.benchmark
def test_pixel_to_world(benchmark, visp_dataset_no_headers, large_visp_dataset):
def test_pixel_to_world(benchmark, visp_dataset_no_headers):
ds = visp_dataset_no_headers
# pxcoords2 = []
# for size in ds2.wcs.pixel_shape:
# pxcoords2.append(np.arange(size))

pxcoords = np.mgrid[:ds.wcs.pixel_shape[0]:50,
:ds.wcs.pixel_shape[1]:50,
Expand All @@ -35,3 +38,68 @@
ds.plot(plot_axes=axes)
plt.savefig("tmpplot")
plt.close()


@pytest.mark.benchmark
def test_generate_celestial(benchmark):
benchmark(generate_celestial_transform,

Check warning on line 45 in dkist/tests/test_benchmarks.py

View check run for this annotation

Codecov / codecov/patch

dkist/tests/test_benchmarks.py#L45

Added line #L45 was not covered by tests
crpix=[0, 0] * u.pix,
crval=[0, 0] * u.arcsec,
cdelt=[1, 1] * u.arcsec/u.pix,
pc=np.identity(2) * u.pix,
)


@pytest.mark.benchmark
def test_update_celestial(benchmark):
trsfm = generate_celestial_transform(

Check warning on line 55 in dkist/tests/test_benchmarks.py

View check run for this annotation

Codecov / codecov/patch

dkist/tests/test_benchmarks.py#L55

Added line #L55 was not covered by tests
crpix=[0, 0] * u.pix,
crval=[0, 0] * u.arcsec,
cdelt=[1, 1] * u.arcsec/u.pix,
pc=np.identity(2) * u.pix)

benchmark(update_celestial_transform_parameters,

Check warning on line 61 in dkist/tests/test_benchmarks.py

View check run for this annotation

Codecov / codecov/patch

dkist/tests/test_benchmarks.py#L61

Added line #L61 was not covered by tests
trsfm,
[1, 1] * u.pix,
[0.5, 0.5] * u.arcsec/u.pix,
np.identity(2) * u.pix,
[1, 1] * u.arcsec,
180 * u.deg,
)


@pytest.mark.benchmark
def test_raveled_tab1d_model(benchmark):
ndim = 3
rng = default_rng()
array_shape = rng.integers(1, 21, ndim)
array_bounds = array_shape - 1
ravel = Ravel(array_shape)
nelem = np.prod(array_shape)
units = u.pix
values = np.arange(nelem) * units
lut_values = values
tabular = Tabular1D(

Check warning on line 82 in dkist/tests/test_benchmarks.py

View check run for this annotation

Codecov / codecov/patch

dkist/tests/test_benchmarks.py#L73-L82

Added lines #L73 - L82 were not covered by tests
values,
lut_values,
bounds_error=False,
fill_value=np.nan,
method="linear",
)
raveled_tab = ravel | tabular

Check warning on line 89 in dkist/tests/test_benchmarks.py

View check run for this annotation

Codecov / codecov/patch

dkist/tests/test_benchmarks.py#L89

Added line #L89 was not covered by tests
# adding the new axis onto array_bounds makes broadcasting work below
array_bounds = array_bounds[:, np.newaxis]

Check warning on line 91 in dkist/tests/test_benchmarks.py

View check run for this annotation

Codecov / codecov/patch

dkist/tests/test_benchmarks.py#L91

Added line #L91 was not covered by tests
# use 5 as an arbitrary number of inputs
random_number_shape = len(array_shape), 5
random_numbers = rng.random(random_number_shape)
raw_inputs = random_numbers * array_bounds
inputs = tuple(raw_inputs * units)

Check warning on line 96 in dkist/tests/test_benchmarks.py

View check run for this annotation

Codecov / codecov/patch

dkist/tests/test_benchmarks.py#L93-L96

Added lines #L93 - L96 were not covered by tests

benchmark(raveled_tab, *inputs)

Check warning on line 98 in dkist/tests/test_benchmarks.py

View check run for this annotation

Codecov / codecov/patch

dkist/tests/test_benchmarks.py#L98

Added line #L98 was not covered by tests


@pytest.mark.benchmark
def test_slice_dataset(benchmark, large_visp_dataset):
@benchmark
def slice_dataset(dataset=large_visp_dataset, idx = np.s_[:2, 10:15, 0]):
sliced = dataset[idx]

Check warning on line 105 in dkist/tests/test_benchmarks.py

View check run for this annotation

Codecov / codecov/patch

dkist/tests/test_benchmarks.py#L103-L105

Added lines #L103 - L105 were not covered by tests
2 changes: 1 addition & 1 deletion dkist/wcs/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def update_celestial_transform_parameters(
-crpix[0],
-crpix[1],
pc,
transform[2].translation.value,
transform[2].translation.quantity if hasattr(pc, "unit") else transform[2].translation.value,
cdelt[0],
cdelt[1],
crval[0],
Expand Down
40 changes: 39 additions & 1 deletion dkist/wcs/tests/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

from dkist.wcs.models import (AsymmetricMapping, Ravel, Unravel, VaryingCelestialTransform,
VaryingCelestialTransform2D, VaryingCelestialTransform3D,
generate_celestial_transform,
generate_celestial_transform, update_celestial_transform_parameters,
varying_celestial_transform_from_tables)


Expand Down Expand Up @@ -52,6 +52,44 @@ def test_generate_celestial_unitless():
assert u.allclose(shift1.offset, 0)


def test_update_celestial():
trsfm = generate_celestial_transform(
crpix=[0, 0] * u.pix,
crval=[0, 0] * u.arcsec,
cdelt=[1, 1] * u.arcsec/u.pix,
pc=np.identity(2) * u.pix)

update_celestial_transform_parameters(
trsfm,
[1, 1] * u.pix,
[0.5, 0.5] * u.arcsec/u.pix,
np.identity(2) * u.pix,
[1, 1] * u.arcsec,
180 * u.deg)

# Copout and only test that one parameter is correct
shift1 = trsfm.left.left.left.left.right
assert u.allclose(shift1.offset.quantity, -1 * u.pix)

def test_update_celestial_unitless():
trsfm = generate_celestial_transform(
crpix=[0, 0],
crval=[0, 0],
cdelt=[1, 1],
pc=np.identity(2))

update_celestial_transform_parameters(
trsfm,
[1, 1],
[0.5, 0.5],
np.identity(2),
[1, 1],
180)

shift1 = trsfm.left.left.left.left.right
assert u.allclose(shift1.offset.value, -1)


def test_varying_transform_no_lon_pole_unit():
varying_matrix_lt = [rotation_matrix(a)[:2, :2] for a in np.linspace(0, 90, 10)] * u.pix
# Without a lon_pole passed, the transform was originally setting
Expand Down