diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d996605..24cf16c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: toxdeps: tox-pypi-filter posargs: -n auto envs: | - - linux: py311 + - linux: py312 secrets: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} @@ -42,7 +42,7 @@ jobs: posargs: -n auto envs: | - windows: py310 - - macos: py39 + - macos: py311 secrets: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} @@ -50,7 +50,7 @@ jobs: needs: [core] uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@main with: - default_python: "3.9" + default_python: "3.12" submodules: false pytest: false toxdeps: tox-pypi-filter @@ -69,13 +69,12 @@ jobs: needs: [docs] uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@main with: - default_python: "3.9" submodules: false coverage: codecov toxdeps: tox-pypi-filter posargs: -n auto --dist loadgroup envs: | - - linux: py39-online + - linux: py312-online secrets: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} @@ -95,7 +94,7 @@ jobs: needs: [test] uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish_pure_python.yml@main with: - python-version: "3.10" + python-version: "3.12" test_extras: "all,tests" test_command: 'pytest -p no:warnings --doctest-rst -m "not mpl_image_compare" --pyargs sunpy' submodules: false diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4244c13..4152678 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,11 @@ -exclude: ".*(.fits|.fts|.fit|.txt|.csv)$" repos: - repo: https://github.com/myint/docformatter rev: v1.7.5 hooks: - id: docformatter - args: [--in-place, --pre-summary-newline, --make-summary-multi] + args: ["--in-place", "--pre-summary-newline", "--make-summary-multi"] - repo: https://github.com/myint/autoflake - rev: v2.2.1 + rev: v2.3.1 hooks: - id: autoflake args: @@ -17,29 +16,31 @@ repos: ] exclude: ".*(.fits|.fts|.fit|.txt|tca.*|extern.*|.rst|.md|docs/conf.py)$" - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.1.8" + rev: "v0.4.4" hooks: - id: ruff args: ["--fix", "--unsafe-fixes"] - - repo: https://github.com/psf/black - rev: 23.12.0 - hooks: - - id: black - - repo: https://github.com/PyCQA/isort - rev: 5.13.2 - hooks: - - id: isort - - repo: https://github.com/pre-commit/mirrors-prettier - rev: v3.1.0 - hooks: - - id: prettier + - id: ruff-format - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-ast - id: check-case-conflict - id: trailing-whitespace + exclude: ".*(.fits|.fts|.fit|.txt|.csv)$" - id: mixed-line-ending + exclude: ".*(.fits|.fts|.fit|.txt|.csv)$" - id: end-of-file-fixer + exclude: ".*(.fits|.fts|.fit|.txt|.csv)$" - id: check-yaml - id: debug-statements + - repo: https://github.com/codespell-project/codespell + rev: v2.2.6 + hooks: + - id: codespell + additional_dependencies: + - tomli + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v4.0.0-alpha.8 + hooks: + - id: prettier diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 27e2586..8b749dd 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -234,8 +234,8 @@ Features - Added a function (``aiapy.calibrate.normalize_exposure``) to normalize an image by its exposure time. (`#78 `__) - :func:`aiapy.calibrate.degradation` can now accept `~astropy.time.Time` objects with length greater than 1. This makes it easier to compute the channel degradation over long intervals. (`#80 `__) -- Citation information for `aiapy` is now available from `aiapy.__citation__`. (`#82 `__) -- The pointing table can now be passsed in as a keyword argument to :func:`aiapy.calibrate.update_pointing`. +- Citation information for `aiapy` is now available from ``aiapy.__citation__``. (`#82 `__) +- The pointing table can now be passed in as a keyword argument to :func:`aiapy.calibrate.update_pointing`. Added a :func:`aiapy.calibrate.util.get_pointing_table` to retrieve the 3-hour pointing table from JSOC over a given time interval. (`#84 `__) Bug Fixes diff --git a/aiapy/__init__.py b/aiapy/__init__.py index 86e5839..7fb005a 100644 --- a/aiapy/__init__.py +++ b/aiapy/__init__.py @@ -1,5 +1,5 @@ -from pathlib import Path from itertools import compress +from pathlib import Path from .version import version as __version__ diff --git a/aiapy/calibrate/__init__.py b/aiapy/calibrate/__init__.py index ef35167..b6d071d 100644 --- a/aiapy/calibrate/__init__.py +++ b/aiapy/calibrate/__init__.py @@ -1,8 +1,9 @@ """ Subpackage for calibrating AIA imaging data. """ -from .meta import * # NOQA -from .prep import * # NOQA -from .spikes import * # NOQA -from .transform import * # NOQA -from .uncertainty import * # NOQA + +from .meta import * # NOQA: F403 +from .prep import * # NOQA: F403 +from .spikes import * # NOQA: F403 +from .transform import * # NOQA: F403 +from .uncertainty import * # NOQA: F403 diff --git a/aiapy/calibrate/meta.py b/aiapy/calibrate/meta.py index 67a3f05..25e0f96 100644 --- a/aiapy/calibrate/meta.py +++ b/aiapy/calibrate/meta.py @@ -1,6 +1,7 @@ """ Functions for updating/fixing header keywords. """ + import copy import warnings @@ -25,13 +26,20 @@ def fix_observer_location(smap): keywords using the heliocentric aries ecliptic keywords, ``HAEX_OBS, HAEY_OBS, HAEZ_OBS``. - .. note:: `~sunpy.map.sources.AIAMap` already accounts for the inaccurate - HGS keywords by using the HAE keywords to construct the - derived observer location. + .. note:: + + `~sunpy.map.sources.AIAMap` already accounts for the inaccurate + HGS keywords by using the HAE keywords to construct the + derived observer location. Parameters ---------- - smap : `~sunpy.map.source.sdo.AIAMap` + smap : `~sunpy.map.sources.AIAMap` + Input map. + + Returns + ------- + `~sunpy.map.sources.AIAMap` """ # Create observer coordinate from HAE coordinates coord = SkyCoord( @@ -48,48 +56,60 @@ def fix_observer_location(smap): new_meta["hglt_obs"] = coord.lat.to(u.degree).value new_meta["dsun_obs"] = coord.radius.to(u.m).value - return smap._new_instance(smap.data, new_meta, plot_settings=smap.plot_settings, mask=smap.mask) + return smap._new_instance(smap.data, new_meta, plot_settings=smap.plot_settings, mask=smap.mask) # NOQA: SLF001 def update_pointing(smap, *, pointing_table=None): """ - Update pointing information in the `smap` header. + Update the pointing information in the input map header. - This function updates the pointing information in `smap` by + This function updates the pointing information in ``smap`` by updating the ``CRPIX1, CRPIX2, CDELT1, CDELT2, CROTA2`` keywords - in the header using the information provided in `pointing_table`. - If `pointing_table` is not specified, the 3-hour pointing + in the header using the information provided in ``pointing_table``. + If ``pointing_table`` is not specified, the 3-hour pointing information is queried from the `JSOC `_. - .. note:: The method removes any ``PCi_j`` matrix keys in the header and - updates the ``CROTA2`` keyword. + .. note:: + + The method removes any ``PCi_j`` matrix keys in the header and + updates the ``CROTA2`` keyword. + + .. note:: + + If correcting pointing information for a large number of images, + it is strongly recommended to query the table once for the + appropriate interval and then pass this table in rather than + executing repeated queries. + + .. warning:: - .. note:: If correcting pointing information for a large number of images, - it is strongly recommended to query the table once for the - appropriate interval and then pass this table in rather than - executing repeated queries. + This function is only intended to be used for full-disk images + at the full resolution of 4096x4096 pixels. It will raise a + ``ValueError`` if the input map does not meet these criteria. Parameters ---------- - smap : `~sunpy.map.sources.sdo.AIAMap` + smap : `~sunpy.map.sources.AIAMap` + Input map. pointing_table : `~astropy.table.QTable`, optional Table of pointing information. If not specified, the table will be retrieved from JSOC. Returns ------- - `~sunpy.map.sources.sdo.AIAMap` + `~sunpy.map.sources.AIAMap` See Also -------- - aiapy.calibrate.util.get_pointing_table + `aiapy.calibrate.util.get_pointing_table` """ - # This function can only be applied to full-resolution, full-frame images if not contains_full_disk(smap): - raise ValueError("Input must be a full disk image.") + msg = "Input must be a full disk image." + raise ValueError(msg) shape_full_frame = (4096, 4096) - if not all(d == (s * u.pixel) for d, s in zip(smap.dimensions, shape_full_frame)): - raise ValueError(f"Input must be at the full resolution of {shape_full_frame}") + if not all(d == (s * u.pixel) for d, s in zip(smap.dimensions, shape_full_frame, strict=True)): + msg = f"Input must be at the full resolution of {shape_full_frame}" + raise ValueError(msg) if pointing_table is None: # Make range wide enough to get closest 3-hour pointing pointing_table = get_pointing_table(smap.date - 12 * u.h, smap.date + 12 * u.h) @@ -116,11 +136,12 @@ def update_pointing(smap, *, pointing_table=None): t_obs = astropy.time.Time(t_obs) t_obs_in_interval = np.logical_and(t_obs >= pointing_table["T_START"], t_obs < pointing_table["T_STOP"]) if not t_obs_in_interval.any(): - raise IndexError( + msg = ( f"No valid entries for {t_obs} in pointing table " f'with first T_START date of {pointing_table[0]["T_START"]} ' - f'and a last T_STOP date of {pointing_table[-1]["T_STOP"]}.', + f'and a last T_STOP date of {pointing_table[-1]["T_STOP"]}.' ) + raise IndexError(msg) i_nearest = np.where(t_obs_in_interval)[0][0] w_str = f"{smap.wavelength.to(u.angstrom).value:03.0f}" new_meta = copy.deepcopy(smap.meta) @@ -167,4 +188,4 @@ def update_pointing(smap, *, pointing_table=None): new_meta.pop("PC1_2") new_meta.pop("PC2_1") new_meta.pop("PC2_2") - return smap._new_instance(smap.data, new_meta, plot_settings=smap.plot_settings, mask=smap.mask) + return smap._new_instance(smap.data, new_meta, plot_settings=smap.plot_settings, mask=smap.mask) # NOQA: SLF001 diff --git a/aiapy/calibrate/prep.py b/aiapy/calibrate/prep.py index 4745cdd..2d130f5 100644 --- a/aiapy/calibrate/prep.py +++ b/aiapy/calibrate/prep.py @@ -1,6 +1,7 @@ """ Functions for calibrating AIA images. """ + import warnings import astropy.units as u @@ -20,13 +21,13 @@ @add_common_docstring(rotation_function_names=_rotation_function_names) def register(smap, *, missing=None, order=3, method="scipy"): """ - Processes a full-disk level 1 `~sunpy.map.sources.sdo.AIAMap` into a level - 1.5 `~sunpy.map.sources.sdo.AIAMap`. + Processes a full-disk level 1 `~sunpy.map.sources.AIAMap` into a level + 1.5 `~sunpy.map.sources.AIAMap`. Rotates, scales and translates the image so that solar North is aligned with the y axis, each pixel is 0.6 arcsec across, and the center of the Sun is at the center of the image. The actual transformation is done by - the `~sunpy.map.mapbase.GenericMap.rotate` method. + the `~sunpy.map.GenericMap.rotate` method. .. warning:: @@ -48,7 +49,7 @@ def register(smap, *, missing=None, order=3, method="scipy"): Parameters ---------- - smap : `~sunpy.map.sources.sdo.AIAMap` or `~sunpy.map.sources.sdo.HMIMap` + smap : `~sunpy.map.sources.AIAMap` or `~sunpy.map.sources.sdo.HMIMap` A `~sunpy.map.Map` containing a full-disk AIA image or HMI magnetogram missing : `float`, optional If there are missing values after the interpolation, they will be @@ -61,17 +62,19 @@ def register(smap, *, missing=None, order=3, method="scipy"): Returns ------- - `~sunpy.map.sources.sdo.AIAMap` or `~sunpy.map.sources.sdo.HMIMap`: - A level 1.5 copy of `~sunpy.map.sources.sdo.AIAMap` or + `~sunpy.map.sources.AIAMap` or `~sunpy.map.sources.sdo.HMIMap`: + A level 1.5 copy of `~sunpy.map.sources.AIAMap` or `~sunpy.map.sources.sdo.HMIMap`. """ # This implementation is taken directly from the `aiaprep` method in # sunpy.instr.aia.aiaprep under the terms of the BSD 2 Clause license. # See licenses/SUNPY.rst. - if not isinstance(smap, (AIAMap, HMIMap)): - raise ValueError("Input must be an AIAMap or HMIMap.") + if not isinstance(smap, AIAMap | HMIMap): + msg = "Input must be an AIAMap or HMIMap." + raise TypeError(msg) if not contains_full_disk(smap): - raise ValueError("Input must be a full disk image.") + msg = "Input must be a full disk image." + raise ValueError(msg) if smap.processing_level is None or smap.processing_level > 1: warnings.warn( "Image registration should only be applied to level 1 data", @@ -121,7 +124,7 @@ def correct_degradation(smap, *, correction_table=None, calibration_version=None Parameters ---------- - smap : `~sunpy.map.sources.sdo.AIAMap` + smap : `~sunpy.map.sources.AIAMap` Map to be corrected. correction_table : `~astropy.table.Table` or `str`, optional Table of correction parameters or path to correction table file. @@ -138,7 +141,7 @@ def correct_degradation(smap, *, correction_table=None, calibration_version=None Returns ------- - `~sunpy.map.sources.sdo.AIAMap` + `~sunpy.map.sources.AIAMap` See Also -------- @@ -167,20 +170,20 @@ def degradation( The correction factor to account for the time-varying degradation of the telescopes is given by a normalization to the calibration epoch - closest to `obstime` and an interpolation within that epoch to - `obstime`, + closest to ``obstime`` and an interpolation within that epoch to + ``obstime``, .. math:: \frac{A_{eff}(t_{e})}{A_{eff}(t_0)}(1 + p_1\delta t + p_2\delta t^2 + p_3\delta t^3) where :math:`A_{eff}(t_e)` is the effective area calculated at the - calibration epoch for `obstime`, :math:`A_{eff}(t_0)` is the effective + calibration epoch for ``obstime``, :math:`A_{eff}(t_0)` is the effective area at the first calibration epoch (i.e. at launch), :math:`p_1,p_2,p_3` are the interpolation coefficients for the - `obstime` epoch, and :math:`\delta t` is the difference between the - start time of the epoch and `obstime`. - All calibration terms are taken from the `aia.response` series in JSOC + ``obstime`` epoch, and :math:`\delta t` is the difference between the + start time of the epoch and ``obstime``. + All calibration terms are taken from the ``aia.response`` series in JSOC or read from the table input by the user. .. note:: This function is adapted directly from the @@ -206,8 +209,7 @@ def degradation( See Also -------- - degradation - aiapy.calibrate.get_correction_table + aiapy.calibrate.util.get_correction_table aiapy.response.Channel.wavelength_response aiapy.response.Channel.eve_correction """ diff --git a/aiapy/calibrate/spikes.py b/aiapy/calibrate/spikes.py index 1ee40f6..b63c07d 100644 --- a/aiapy/calibrate/spikes.py +++ b/aiapy/calibrate/spikes.py @@ -37,35 +37,37 @@ def respike(smap, *, spikes=None): .. note:: If the image series of interest is large, it is advised to obtain the spike data via JSOC externally and specify them - via the `spikes` keyword argument. To retrieve the coordinates + via the ``spikes`` keyword argument. To retrieve the coordinates of the positions of the spikes use the function `aiapy.calibrate.fetch_spikes`. Parameters ---------- - smap : `~sunpy.map.sources.sdo.AIAMap` + smap : `~sunpy.map.sources.AIAMap` Level 1 AIA image. This can be a cutout or a full-frame image. spikes : array-like, with shape ``(2, N)``, optional Tuple of pixel positions of the spikes in the coordinate system of - the level 1 AIA image in `smap` (first entry) and original intensity + the level 1 AIA image in ``smap`` (first entry) and original intensity values (second entry). This can be calculated using `fetch_spikes`. If not specified, the spike positions and intensities are automatically queried from the JSOC. Returns ------- - `~sunpy.map.sources.sdo.AIAMap` - A level 0.5 version of `smap` with the spike data re-inserted at the - appropriate pixels + `~sunpy.map.sources.AIAMap` + A level 0.5 version of ``smap`` with the spike data re-inserted at the + appropriate pixels. See Also -------- - fetch_spikes + `fetch_spikes` """ if not isinstance(smap, AIAMap): - raise ValueError("Input must be an AIAMap.") + msg = "Input must be an AIAMap." + raise TypeError(msg) if smap.meta["lvl_num"] != 1.0: - raise ValueError("Can only apply respike procedure to level 1 data") + msg = "Can only apply respike procedure to level 1 data" + raise ValueError(msg) # Approximate check to make sure the input map has not been interpolated # in any way. Note that the level 1 plate scales are not exactly 0.6 # ''/pixel, but should not differ by more than 0.1%. This is only a @@ -88,7 +90,8 @@ def respike(smap, *, spikes=None): # Or better yet, why can't the logic below just handle the case of # no spikes? if smap.meta["nspikes"] == 0: - raise ValueError("No spikes were present in the level 0 data.") + msg = "No spikes were present in the level 0 data." + raise ValueError(msg) if spikes is None: coords, values = fetch_spikes(smap, as_coords=False) else: @@ -102,7 +105,7 @@ def respike(smap, *, spikes=None): new_meta["lvl_num"] = 0.5 new_meta["comments"] = f"Respike applied; {values.shape[0]} hot pixels reinserted." new_meta["nspikes"] = 0 - return smap._new_instance( + return smap._new_instance( # NOQA: SLF001 new_data, new_meta, plot_settings=smap.plot_settings, @@ -131,7 +134,7 @@ def fetch_spikes(smap, *, as_coords=False): ------- `~astropy.coordinates.SkyCoord` or `~sunpy.map.mapbase.PixelPair` Locations of the removed spikes. By default, these are represented as - pixel coordinates. If `as_coords=True`, the locations are returned in + pixel coordinates. If ``as_coords=True``, the locations are returned in the projected coordinate system of the image. array-like Original intensity values of the spikes @@ -151,7 +154,7 @@ def fetch_spikes(smap, *, as_coords=False): # If this is a cutout, need to transform the full-frame pixel # coordinates into the cutout pixel coordinates and then only select # those in the FOV of the cutout - if not all(d == (s * u.pixel) for d, s in zip(smap.dimensions, shape_full_frame)): + if not all(d == (s * u.pixel) for d, s in zip(smap.dimensions, shape_full_frame, strict=True)): # Construct WCS for full frame wcs_full_frame = copy.deepcopy(smap.wcs) wcs_full_frame.wcs.crval = np.array([0.0, 0.0]) diff --git a/aiapy/calibrate/tests/test_prep.py b/aiapy/calibrate/tests/test_prep.py index 297eacb..6a13fb7 100644 --- a/aiapy/calibrate/tests/test_prep.py +++ b/aiapy/calibrate/tests/test_prep.py @@ -78,7 +78,7 @@ def test_register_unsupported_maps(aia_171_map, non_sdo_map): with pytest.raises(ValueError, match="Input must be a full disk image."): register(original_cutout) # A Map besides AIA or HMI - with pytest.raises(ValueError, match="Input must be an AIAMap"): + with pytest.raises(TypeError, match="Input must be an AIAMap"): register(non_sdo_map) @@ -95,7 +95,7 @@ def test_register_level_15(lvl_15_map): AiapyUserWarning, match="Image registration should only be applied to level 1 data", ): - register(lvl_15_map._new_instance(lvl_15_map.data, new_meta)) + register(lvl_15_map._new_instance(lvl_15_map.data, new_meta)) # NOQA: SLF001 @pytest.mark.parametrize( @@ -160,7 +160,6 @@ def test_correct_degradation(aia_171_map, correction_table, version): ), ], ) -@pytest.mark.filterwarnings("ignore:Multiple valid epochs for") def test_degradation(correction_table, version, time_correction_truth): # NOTE: this just tests an expected result from aiapy, not necessarily an # absolutely correct result. It was calculated for the above time and @@ -223,7 +222,6 @@ def test_degradation(correction_table, version, time_correction_truth): ), ], ) -@pytest.mark.filterwarnings("ignore:Multiple valid epochs for") def test_degradation_all_wavelengths(wavelength, result): obstime = astropy.time.Time("2015-01-01T00:00:00", scale="utc") time_correction = degradation( @@ -258,7 +256,7 @@ def test_degradation_time_array(): calibration_version=8, ) assert time_correction.shape == obstime.shape - for o, tc in zip(obstime, time_correction): + for o, tc in zip(obstime, time_correction, strict=True): assert tc == degradation(94 * u.angstrom, o, correction_table=correction_table, calibration_version=8) diff --git a/aiapy/calibrate/tests/test_spikes.py b/aiapy/calibrate/tests/test_spikes.py index d95205c..d8b9c0a 100644 --- a/aiapy/calibrate/tests/test_spikes.py +++ b/aiapy/calibrate/tests/test_spikes.py @@ -11,7 +11,6 @@ from aiapy.util import AiapyUserWarning -@pytest.mark.remote_data() @pytest.fixture() def despiked_map(): # Need an actual 4K-by-4K map to do the spike replacement @@ -20,13 +19,11 @@ def despiked_map(): ) -@pytest.mark.remote_data() @pytest.fixture() def respiked_map(despiked_map): return respike(despiked_map) -@pytest.mark.remote_data() @pytest.fixture() def spikes(despiked_map): return fetch_spikes(despiked_map) @@ -35,7 +32,7 @@ def spikes(despiked_map): @pytest.mark.remote_data() def test_respike(respiked_map, spikes): coords, values = spikes - for x, y, v in zip(coords.x.value, coords.y.value, values): + for x, y, v in zip(coords.x.value, coords.y.value, values, strict=True): assert v == respiked_map.data[int(y), int(x)] @@ -70,24 +67,27 @@ def test_cutout(respiked_map, despiked_map): @pytest.mark.remote_data() @pytest.mark.parametrize( - ("key", "value", "match"), + ("key", "value", "error", "match"), [ - ("lvl_num", 1.5, "Can only apply respike procedure to level 1 data"), - ("nspikes", 0, "No spikes were present in the level 0 data."), - ("instrume", "not AIA", "Input must be an AIAMap."), + ("lvl_num", 1.5, ValueError, "Can only apply respike procedure to level 1 data"), + ("nspikes", 0, ValueError, "No spikes were present in the level 0 data."), + ("instrume", "not AIA", TypeError, "Input must be an AIAMap."), ], ) -def test_exceptions(despiked_map, key, value, match): +def test_exceptions(despiked_map, key, value, error, match): new_meta = copy.deepcopy(despiked_map.meta) new_meta[key] = value - with pytest.raises(ValueError, match=match): + with pytest.raises(error, match=match): respike(sunpy.map.Map(despiked_map.data, new_meta)) @pytest.mark.remote_data() def test_resample_warning(despiked_map): despiked_map_resample = despiked_map.resample((512, 512) * u.pixel) - with pytest.warns(AiapyUserWarning): + with ( + pytest.warns(AiapyUserWarning, match="is significantly different from the expected level 1 plate scale"), + pytest.warns(ResourceWarning), + ): respike(despiked_map_resample) diff --git a/aiapy/calibrate/tests/test_util.py b/aiapy/calibrate/tests/test_util.py index 77ecd86..4538df4 100644 --- a/aiapy/calibrate/tests/test_util.py +++ b/aiapy/calibrate/tests/test_util.py @@ -134,5 +134,5 @@ def test_error_table(error_table): def test_invalid_error_table_input(): - with pytest.raises(ValueError, match="error_table must be a file path, an existing table, or None."): + with pytest.raises(TypeError, match="error_table must be a file path, an existing table, or None"): get_error_table(error_table=-1) diff --git a/aiapy/calibrate/transform.py b/aiapy/calibrate/transform.py index 4e44977..d14aafe 100644 --- a/aiapy/calibrate/transform.py +++ b/aiapy/calibrate/transform.py @@ -10,10 +10,10 @@ handles_image_nans=False, handles_nan_missing=True, ) -def _rotation_cupy(image, matrix, shift, order, missing, clip): +def _rotation_cupy(image, matrix, shift, order, missing, clip): # NOQA: ARG001 """ * Rotates using `cupyx.scipy.ndimage.affine_transform` from `cupy `__ - * Coverts from a numpy array to a cupy array and then back again. + * Converts from a numpy array to a cupy array and then back again. * The ``order`` parameter is the order of the spline interpolation, and ranges from 0 to 5. * The ``mode`` parameter for :func:`~cupyx.scipy.ndimage.affine_transform` is fixed to @@ -23,8 +23,9 @@ def _rotation_cupy(image, matrix, shift, order, missing, clip): import cupy import cupyx.scipy.ndimage except ImportError as e: + msg = "cupy or cupy-cuda* (pre-compiled for each cuda version) is required to use this rotation method." raise ImportError( - "cupy or cupy-cuda* (pre-compiled for each cuda version) is required to use this rotation method.", + msg, ) from e rotated_image = cupyx.scipy.ndimage.affine_transform( cupy.array(image).T, diff --git a/aiapy/calibrate/uncertainty.py b/aiapy/calibrate/uncertainty.py index 50d4f15..5f67d01 100644 --- a/aiapy/calibrate/uncertainty.py +++ b/aiapy/calibrate/uncertainty.py @@ -1,6 +1,7 @@ """ Estimate uncertainty on intensities. """ + import astropy.units as u import numpy as np @@ -118,7 +119,8 @@ def estimate_error( # Photometric calibration if include_eve and include_preflight: - raise ValueError("Cannot include both EVE and pre-flight correction.") + msg = "Cannot include both EVE and pre-flight correction." + raise ValueError(msg) calib = 0 if include_eve: calib = error_table["EVEERR"] diff --git a/aiapy/calibrate/util.py b/aiapy/calibrate/util.py index 9abd126..5d7ee9b 100644 --- a/aiapy/calibrate/util.py +++ b/aiapy/calibrate/util.py @@ -1,6 +1,7 @@ """ Utilities for computing intensity corrections. """ + import os import pathlib import warnings @@ -13,12 +14,12 @@ from astropy.table import QTable from astropy.time import Time from erfa.core import ErfaWarning +from sunpy import log from sunpy.data import manager from sunpy.net import attrs, jsoc from aiapy import _SSW_MIRRORS from aiapy.util.decorators import validate_channel -from aiapy.util.exceptions import AiapyUserWarning __all__ = ["get_correction_table", "get_pointing_table", "get_error_table"] @@ -48,11 +49,11 @@ def get_correction_table(*, correction_table=None): This function returns a table of parameters for estimating the time-dependent degradation of the instrument. By default, this table - is queried from `aia.response` series in - `JSOC `_. The correction table can also be read - from a file by passing a filepath to `correction_table`. These files are + is queried from ``aia.response`` series in + `JSOC `__. The correction table can also be read + from a file by passing a filepath to ``correction_table``. These files are typically included in the SDO tree of an SSW installation in - `$SSW/sdo/aia/response/` with filenames like `aia_V*_response_table.txt`. + ``$SSW/sdo/aia/response/`` with filenames like ``aia_V*_response_table.txt``. Parameters ---------- @@ -71,10 +72,11 @@ def get_correction_table(*, correction_table=None): if isinstance(correction_table, astropy.table.QTable): return correction_table if correction_table is not None: - if isinstance(correction_table, (str, pathlib.Path)): + if isinstance(correction_table, str | pathlib.Path): table = QTable(astropy.io.ascii.read(correction_table)) else: - raise ValueError("correction_table must be a file path, an existing table, or None.") + msg = "correction_table must be a file path, an existing table, or None." + raise ValueError(msg) else: # NOTE: the [!1=1!] disables the drms PrimeKey logic and enables # the query to find records that are ordinarily considered @@ -116,7 +118,7 @@ def get_correction_table(*, correction_table=None): def _select_epoch_from_correction_table(channel: u.angstrom, obstime, table, *, version=None): """ Return correction table with only the first epoch and the epoch in which - `obstime` falls and for only one given calibration version. + ``obstime`` falls and for only one given calibration version. Parameters ---------- @@ -142,15 +144,14 @@ def _select_epoch_from_correction_table(channel: u.angstrom, obstime, table, *, # Select the epoch for the given observation time obstime_in_epoch = np.logical_and(obstime >= table["T_START"], obstime < table["T_STOP"]) if not obstime_in_epoch.any(): - raise ValueError(f"No valid calibration epoch for {obstime}") + msg = f"No valid calibration epoch for {obstime}" + raise ValueError(msg) # NOTE: In some cases, there may be multiple entries for a single epoch. We want to # use the most up-to-date one. i_epoch = np.where(obstime_in_epoch)[0] if i_epoch.shape[0] > 1: - warnings.warn( + log.debug( f"Multiple valid epochs for {obstime}. Using the most recent one", - AiapyUserWarning, - stacklevel=3, ) # Create new table with only first and obstime epochs return QTable(table[[0, i_epoch[-1]]]) @@ -162,7 +163,7 @@ def get_pointing_table(start, end): This function queries `JSOC `__ for the 3-hourly master pointing table (MPT) in the interval defined by - `start` and `end`. + ``start`` and ``end``. The 3-hourly MPT entries are computed from limb fits of images with ``T_OBS`` between ``T_START`` and ``T_STOP``. @@ -200,7 +201,8 @@ def get_pointing_table(start, end): # If there's no pointing information available between these times, # JSOC will raise a cryptic KeyError # (see https://github.com/LM-SAL/aiapy/issues/71) - raise RuntimeError(f"Could not find any pointing information between {start} and {end}") + msg = f"Could not find any pointing information between {start} and {end}" + raise RuntimeError(msg) table["T_START"] = Time(table["T_START"], scale="utc") table["T_STOP"] = Time(table["T_STOP"], scale="utc") for c in table.colnames: @@ -212,9 +214,8 @@ def get_pointing_table(start, end): table[c].unit = "degree" # Remove masking on columns with pointing parameters for c in table.colnames: - if any(n in c for n in ["X0", "Y0", "IMSCALE", "INSTROT"]): - if hasattr(table[c], "mask"): - table[c] = table[c].filled(np.nan) + if any(n in c for n in ["X0", "Y0", "IMSCALE", "INSTROT"]) and hasattr(table[c], "mask"): + table[c] = table[c].filled(np.nan) return table @@ -225,12 +226,13 @@ def get_error_table(error_table=None): os.environ["PARFIVE_DISABLE_RANGE"] = "1" error_table = fetch_error_table() os.environ.pop("PARFIVE_DISABLE_RANGE") - if isinstance(error_table, (str, pathlib.Path)): + if isinstance(error_table, str | pathlib.Path): table = astropy.io.ascii.read(error_table) elif isinstance(error_table, QTable): table = error_table else: - raise ValueError("error_table must be a file path, an existing table, or None.") + msg = f"error_table must be a file path, an existing table, or None, not {type(error_table)}" + raise TypeError(msg) table = QTable(table) table["DATE"] = Time(table["DATE"], scale="utc") table["T_START"] = Time(table["T_START"], scale="utc") diff --git a/aiapy/conftest.py b/aiapy/conftest.py index 5b31fc2..d95027f 100644 --- a/aiapy/conftest.py +++ b/aiapy/conftest.py @@ -4,15 +4,16 @@ import pytest import sunpy.data.test import sunpy.map +from sunpy import log ALL_CHANNELS = (94, 131, 171, 193, 211, 304, 335, 1600, 1700, 4500) * u.angstrom CHANNELS = (94, 131, 171, 193, 211, 304, 335) * u.angstrom # Force MPL to use non-gui backends for testing. with contextlib.suppress(ImportError): - import matplotlib + import matplotlib as mpl - matplotlib.use("Agg") + mpl.use("Agg") @pytest.fixture() @@ -37,3 +38,33 @@ def psf_94(channels): import aiapy.psf return aiapy.psf.psf(channels[0], use_preflightcore=True) + + +def idl_available(): + try: + import hissw + + hissw.Environment().run("") + return True # NOQA: TRY300 + except Exception as e: # NOQA: BLE001 + log.warning(e) + return False + + +@pytest.fixture(scope="session") +def idl_environment(): + if idl_available(): + import hissw + + return hissw.Environment( + ssw_packages=["sdo/aia"], + ssw_paths=["aia"], + ) + pytest.skip( + "A working IDL installation is not available. You will not be able to run portions of the test suite.", + ) + + +@pytest.fixture(scope="session") +def ssw_home(idl_environment): + return idl_environment.ssw_home if idl_available() else None diff --git a/aiapy/data/__init__.py b/aiapy/data/__init__.py index 3c316cf..e69de29 100644 --- a/aiapy/data/__init__.py +++ b/aiapy/data/__init__.py @@ -1,3 +0,0 @@ -from aiapy.data._sample import download_sample_data - -__all__ = ["download_sample_data"] diff --git a/aiapy/data/_sample.py b/aiapy/data/_sample.py index 5b1a300..5fae548 100644 --- a/aiapy/data/_sample.py +++ b/aiapy/data/_sample.py @@ -1,9 +1,10 @@ +import os from pathlib import Path from urllib.parse import urljoin from parfive import SessionConfig from sunpy import log -from sunpy.util.config import get_and_create_sample_dir +from sunpy.util.config import _is_writable_dir, get_and_create_sample_dir from sunpy.util.parfive_helpers import Downloader _BASE_URLS = ( @@ -45,54 +46,91 @@ def _download_sample_data(base_url, sample_files, overwrite): def _retry_sample_data(results, new_url_base): + # In case we have a broken file on disk, overwrite it. dl = Downloader(overwrite=True, progress=True, config=DOWNLOAD_CONFIG) for err in results.errors: file_name = err.url.split("/")[-1] log.debug(f"Failed to download {_SAMPLE_FILES[file_name]} from {err.url}: {err.exception}") + # Update the url to a mirror and requeue the file. new_url = urljoin(new_url_base, file_name) log.debug(f"Attempting redownload of {_SAMPLE_FILES[file_name]} using {new_url}") dl.enqueue_file(new_url, filename=err.filepath_partial) extra_results = dl.download() + # Make a new results object which contains all the successful downloads + # from the previous results object and this retry, and all the errors from + # this retry. new_results = results + extra_results - new_results._errors = extra_results._errors + new_results._errors = extra_results._errors # NOQA: SLF001 return new_results def _handle_final_errors(results): for err in results.errors: file_name = err.url.split("/")[-1] - log.debug(f"Failed to download {_SAMPLE_FILES[file_name]} from {err.url}: {err.exception}") - log.error(f"Failed to download {_SAMPLE_FILES[file_name]} from all mirrors," "the file will not be available.") + log.debug( + f"Failed to download {_SAMPLE_FILES[file_name]} from {err.url}: {err.exception}", + ) + log.error( + f"Failed to download {_SAMPLE_FILES[file_name]} from all mirrors," "the file will not be available.", + ) + + +def _get_sampledata_dir(): + # Workaround for tox only. This is not supported as a user option + sampledata_dir = os.environ.get("SUNPY_SAMPLEDIR", False) + if sampledata_dir: + sampledata_dir = Path(sampledata_dir).expanduser().resolve() + _is_writable_dir(sampledata_dir) + else: + # Creating the directory for sample files to be downloaded + sampledata_dir = Path(get_and_create_sample_dir()) + return sampledata_dir -def download_sample_data(*, overwrite=False): +def _get_sample_files(filename_list, *, no_download=False, force_download=False): """ - Download all sample data at once. This will overwrite any existing files. + Returns a list of disk locations corresponding to a list of filenames for + sample data, downloading the sample data files as necessary. Parameters ---------- - overwrite : `bool` - Overwrite existing sample data. + filename_list : `list` of `str` + List of filenames for sample data + no_download : `bool` + If ``True``, do not download any files, even if they are not present. + Default is ``False``. + force_download : `bool` + If ``True``, download all files, and overwrite any existing ones. + Default is ``False``. + + Returns + ------- + `list` of `pathlib.Path` + List of disk locations corresponding to the list of filenames. An entry + will be ``None`` if ``no_download == True`` and the file is not present. + + Raises + ------ + RuntimeError + Raised if any of the files cannot be downloaded from any of the mirrors. """ - sampledata_dir = Path(get_and_create_sample_dir()).parent / Path("aiapy") - already_downloaded = [] - to_download = [] - for url_file_name in _SAMPLE_FILES.keys(): - fname = sampledata_dir / url_file_name - # We want to avoid calling download if we already have all the files. - if fname.exists() and not overwrite: - already_downloaded.append(fname) - else: - to_download.append((url_file_name, fname)) - if to_download: - results = _download_sample_data(_BASE_URLS[0], to_download, overwrite=overwrite) + sampledata_dir = _get_sampledata_dir() + fullpaths = [sampledata_dir / fn for fn in filename_list] + if no_download: + fullpaths = [fp if fp.exists() else None for fp in fullpaths] else: - return already_downloaded - if results.errors: - for next_url in _BASE_URLS[1:]: - results = _retry_sample_data(results, next_url) - if not results.errors: - break - else: - _handle_final_errors(results) - return results + already_downloaded + to_download = zip(filename_list, fullpaths, strict=True) + if not force_download: + to_download = [(fn, fp) for fn, fp in to_download if not fp.exists()] + if to_download: + results = _download_sample_data(_BASE_URLS[0], to_download, overwrite=force_download) + # Try the other mirrors for any download errors + if results.errors: + for next_url in _BASE_URLS[1:]: + results = _retry_sample_data(results, next_url) + if not results.errors: + break + else: + _handle_final_errors(results) + raise RuntimeError + return fullpaths diff --git a/aiapy/data/sample.py b/aiapy/data/sample.py index e504139..bce218f 100644 --- a/aiapy/data/sample.py +++ b/aiapy/data/sample.py @@ -1,33 +1,79 @@ """ -This module provides the following sample data files. These files are -downloaded when this module is imported for the first time. +This module provides the following sample data files. +When a sample shortname is accessed, the corresponding file is downloaded if needed. +All files can be downloaded by calling :func:`~aiapy.data.sample.download_all`. + +Summary variables +----------------- +.. list-table:: + :widths: auto + + * - ``file_dict`` + - Dictionary of all sample shortnames and, if downloaded, corresponding + file locations on disk (otherwise, ``None``) + * - ``file_list`` + - List of disk locations for sample data files that have been downloaded + +Sample shortnames +----------------- .. list-table:: :widths: auto :header-rows: 1 - * - Variable name + * - Sample shortname - Name of downloaded file """ -import sys -from pathlib import Path -from ._sample import _SAMPLE_FILES, download_sample_data +from ._sample import _SAMPLE_DATA, _get_sample_files + +# Add a table row to the module docstring for each sample file +for _keyname, _filename in sorted(_SAMPLE_DATA.items()): + __doc__ += f" * - ``{_keyname}``\n - {_filename}\n" # NOQA: A001 + + +# file_dict and file_list are not normal variables; see __getattr__() below +__all__ = [ # NOQA: PLE0604, F822 + "download_all", + "file_dict", + "file_list", + *sorted(_SAMPLE_DATA.keys()), +] + + +# See PEP 562 (https://peps.python.org/pep-0562/) for module-level __dir__() +def __dir__(): + return __all__ + -files = download_sample_data() -file_dict = {} -for f in files: - name = Path(f).name - _key = _SAMPLE_FILES.get(name, None) - if _key: - setattr(sys.modules[__name__], _key, str(f)) - file_dict.update({_key: f}) +# See PEP 562 (https://peps.python.org/pep-0562/) for module-level __getattr__() +def __getattr__(name): + if name in _SAMPLE_DATA: + return _get_sample_files([_SAMPLE_DATA[name]])[0] + if name == "file_dict": + return dict( + sorted( + zip( + _SAMPLE_DATA.keys(), + _get_sample_files(_SAMPLE_DATA.values(), no_download=True), + strict=False, + ) + ) + ) + if name == "file_list": + return [v for v in __getattr__("file_dict").values() if v] + msg = f"module '{__name__}' has no attribute '{name}'" + raise AttributeError(msg) -# Sort the entries in the dictionary -file_dict = dict(sorted(file_dict.items())) -file_list = file_dict.values() -for keyname, filename in file_dict.items(): - __doc__ += f" * - ``{keyname}``\n - {Path(filename).name}\n" +def download_all(*, force_download=False): + """ + Download all sample data at once that has not already been downloaded. -__all__ = [*list(_SAMPLE_FILES.values()), "file_dict", "file_list"] + Parameters + ---------- + force_download : `bool` + If ``True``, files are downloaded even if they already exist. Default is + ``False``. + """ + _get_sample_files(_SAMPLE_DATA.values(), force_download=force_download) diff --git a/aiapy/psf/__init__.py b/aiapy/psf/__init__.py index 7e28e8d..6adf45f 100644 --- a/aiapy/psf/__init__.py +++ b/aiapy/psf/__init__.py @@ -1,2 +1,2 @@ -from .deconvolve import * # NOQA -from .psf import * # NOQA +from .deconvolve import * # NOQA: F403 +from .psf import * # NOQA: F403 diff --git a/aiapy/psf/deconvolve.py b/aiapy/psf/deconvolve.py index c1abf07..c7e1838 100644 --- a/aiapy/psf/deconvolve.py +++ b/aiapy/psf/deconvolve.py @@ -1,6 +1,7 @@ """ Deconvolve an AIA image with the channel point spread function. """ + import copy import warnings @@ -78,6 +79,7 @@ def deconvolve(smap, *, psf=None, iterations=25, clip_negative=True, use_gpu=Tru if use_gpu and not HAS_CUPY: log.info("cupy not installed or working, falling back to CPU") if HAS_CUPY and use_gpu: + log.info("Using a GPU via cupy") img = cupy.array(img) psf = cupy.array(psf) # Center PSF at pixel (0,0) @@ -91,7 +93,7 @@ def deconvolve(smap, *, psf=None, iterations=25, clip_negative=True, use_gpu=Tru ratio = img / np.fft.irfft2(np.fft.rfft2(img_decon) * psf) img_decon = img_decon * np.fft.irfft2(np.fft.rfft2(ratio) * psf_conj) - return smap._new_instance( + return smap._new_instance( # NOQA: SLF001 cupy.asnumpy(img_decon) if (HAS_CUPY and use_gpu) else img_decon, copy.deepcopy(smap.meta), plot_settings=copy.deepcopy(smap.plot_settings), diff --git a/aiapy/psf/psf.py b/aiapy/psf/psf.py index c7e75d9..0936803 100644 --- a/aiapy/psf/psf.py +++ b/aiapy/psf/psf.py @@ -1,6 +1,7 @@ """ Calculate the point spread function (PSF) for the AIA telescopes. """ + import astropy.units as u import numpy as np from sunpy import log @@ -35,13 +36,13 @@ def filter_mesh_parameters(*, use_preflightcore=False): describing filter mesh properties of that channel (see Table 2 of [1]_): - * `angle_arm`: Angles of the four entrance filter arms - * `error_angle_arm`: Error in angle of the four entrance filter arms - * `spacing_e`: Distance between diffraction spikes from entrance filter - * `spacing_fp`: Distance between diffraction spikes from focal plane filter - * `mesh_pitch`: Pitch of the mesh - * `mesh_width`: Width of the mesh - * `width`: Width applied to the Gaussian such that *after* + * ``angle_arm``: Angles of the four entrance filter arms + * ``error_angle_arm``: Error in angle of the four entrance filter arms + * ``spacing_e``: Distance between diffraction spikes from entrance filter + * ``spacing_fp``: Distance between diffraction spikes from focal plane filter + * ``mesh_pitch``: Pitch of the mesh + * ``mesh_width``: Width of the mesh + * ``width``: Width applied to the Gaussian such that *after* convolution we have the proper width (:math:`4/3` at :math:`1/e` of max) @@ -96,8 +97,7 @@ def filter_mesh_parameters(*, use_preflightcore=False): * reference: 'AIA20101016_190905_0335.fits' """ return { - 94 - * u.angstrom: { + 94 * u.angstrom: { "angle_arm": [49.81, 40.16, -40.28, -49.92] * u.deg, "error_angle_arm": [0.02, 0.02, 0.02, 0.02] * u.deg, "spacing_e": 8.99 * u.pixel, @@ -107,8 +107,7 @@ def filter_mesh_parameters(*, use_preflightcore=False): "width": (0.951 if use_preflightcore else 4.5) * u.pixel, "CDELT": [0.600109, 0.600109] * u.arcsec, }, - 131 - * u.angstrom: { + 131 * u.angstrom: { "angle_arm": [50.27, 40.17, -39.70, -49.95] * u.deg, "error_angle_arm": [0.02, 0.02, 0.02, 0.02] * u.deg, "spacing_e": 12.37 * u.pixel, @@ -118,8 +117,7 @@ def filter_mesh_parameters(*, use_preflightcore=False): "width": (1.033 if use_preflightcore else 4.5) * u.pixel, "CDELT": [0.600698, 0.600698] * u.arcsec, }, - 171 - * u.angstrom: { + 171 * u.angstrom: { "angle_arm": [49.81, 39.57, -40.13, -50.38] * u.deg, "error_angle_arm": [0.02, 0.02, 0.02, 0.02] * u.deg, "spacing_e": 16.26 * u.pixel, @@ -129,8 +127,7 @@ def filter_mesh_parameters(*, use_preflightcore=False): "width": (0.962 if use_preflightcore else 4.5) * u.pixel, "CDELT": [0.599489, 0.599489] * u.arcsec, }, - 193 - * u.angstrom: { + 193 * u.angstrom: { "angle_arm": [49.82, 39.57, -40.12, -50.37] * u.deg, "error_angle_arm": [0.02, 0.02, 0.03, 0.04] * u.deg, "spacing_e": 18.39 * u.pixel, @@ -140,8 +137,7 @@ def filter_mesh_parameters(*, use_preflightcore=False): "width": (1.512 if use_preflightcore else 4.5) * u.pixel, "CDELT": [0.600758, 0.600758] * u.arcsec, }, - 211 - * u.angstrom: { + 211 * u.angstrom: { "angle_arm": [49.78, 40.08, -40.34, -49.95] * u.deg, "error_angle_arm": [0.02, 0.02, 0.02, 0.02] * u.deg, "spacing_e": 19.97 * u.pixel, @@ -151,8 +147,7 @@ def filter_mesh_parameters(*, use_preflightcore=False): "width": (1.199 if use_preflightcore else 4.5) * u.pixel, "CDELT": [0.600758, 0.600758] * u.arcsec, }, - 304 - * u.angstrom: { + 304 * u.angstrom: { "angle_arm": [49.76, 40.18, -40.14, -49.90] * u.degree, "error_angle_arm": [0.02, 0.02, 0.02, 0.02] * u.deg, "spacing_e": 28.87 * u.pixel, @@ -162,8 +157,7 @@ def filter_mesh_parameters(*, use_preflightcore=False): "width": (1.247 if use_preflightcore else 4.5) * u.pixel, "CDELT": [0.600165, 0.600165] * u.arcsec, }, - 335 - * u.angstrom: { + 335 * u.angstrom: { "angle_arm": [50.40, 39.80, -39.64, -50.25] * u.degree, "error_angle_arm": [0.02, 0.02, 0.02, 0.02] * u.deg, "spacing_e": 31.83 * u.pixel, @@ -324,7 +318,7 @@ def _psf(meshinfo, angles, diffraction_orders, *, focal_plane=False, use_gpu=Tru if order == 0: continue intensity = np.sinc(order / mesh_ratio) ** 2 # I_0 - for dx, dy in zip(spacing_x.value, spacing_y.value): + for dx, dy in zip(spacing_x.value, spacing_y.value, strict=True): x_centered = x - (0.5 * Nx + dx * order + 0.5) y_centered = y - (0.5 * Ny + dy * order + 0.5) # NOTE: this step is the bottleneck and is VERY slow on a CPU diff --git a/aiapy/psf/tests/conftest.py b/aiapy/psf/tests/conftest.py index 4951334..7cdaf6a 100644 --- a/aiapy/psf/tests/conftest.py +++ b/aiapy/psf/tests/conftest.py @@ -1,6 +1,7 @@ """ Shared fixtures for PSF tests. """ + import pytest import aiapy.psf diff --git a/aiapy/psf/tests/test_deconvolve.py b/aiapy/psf/tests/test_deconvolve.py index eeefedc..f6650b1 100644 --- a/aiapy/psf/tests/test_deconvolve.py +++ b/aiapy/psf/tests/test_deconvolve.py @@ -8,12 +8,7 @@ def test_deconvolve(aia_171_map): - # Skip this test if cupy is not installed because it is too - # slow. This is mostly for the benefit of the CI. - try: - import cupy # NOQA - except ImportError: - pytest.skip("Cannot import cupy. Skipping deconvolution test with full PSF") + pytest.importorskip(modname="cupy", reason="Cannot import cupy. Skipping deconvolution test with full PSF") map_decon = aiapy.psf.deconvolve(aia_171_map) assert isinstance(map_decon, sunpy.map.GenericMap) assert map_decon.data.shape == aia_171_map.data.shape @@ -26,11 +21,11 @@ def test_deconvolve_specify_psf(aia_171_map, psf): def test_deconvolve_negative_pixels(aia_171_map, psf): - aia_171_map_neg = aia_171_map._new_instance( + aia_171_map_neg = aia_171_map._new_instance( # NOQA: SLF001 np.where(aia_171_map.data < 1, -1, aia_171_map.data), aia_171_map.meta, ) - with pytest.warns(AiapyUserWarning): + with pytest.warns(AiapyUserWarning, match="Image contains negative intensity values."): aiapy.psf.deconvolve( aia_171_map_neg, psf=psf, diff --git a/aiapy/response/__init__.py b/aiapy/response/__init__.py index fc2b897..c8da563 100644 --- a/aiapy/response/__init__.py +++ b/aiapy/response/__init__.py @@ -1,4 +1,5 @@ """ Subpackage for AIA response functions. """ -from .channel import * # NOQA + +from .channel import * # NOQA: F403 diff --git a/aiapy/response/channel.py b/aiapy/response/channel.py index f084a36..34fde44 100644 --- a/aiapy/response/channel.py +++ b/aiapy/response/channel.py @@ -1,6 +1,7 @@ """ Class for accessing response function data from each channel. """ + import collections from urllib.parse import urljoin @@ -57,7 +58,7 @@ class Channel: -------- >>> import astropy.units as u >>> from aiapy.response import Channel - >>> c = Channel(171*u.angstrom) # doctest: +REMOTE_DATA + >>> c = Channel(171 * u.angstrom) # doctest: +REMOTE_DATA >>> c.telescope_number # doctest: +REMOTE_DATA 3 >>> c.name # doctest: +REMOTE_DATA @@ -87,10 +88,7 @@ def _get_instrument_data(self, instrument_file): if isinstance(instrument_file, collections.OrderedDict): return instrument_file if instrument_file is None: - if self.is_fuv: - instrument_file = self._get_fuv_instrument_file() - else: - instrument_file = self._get_euv_instrument_file() + instrument_file = self._get_fuv_instrument_file() if self.is_fuv else self._get_euv_instrument_file() return read_genx(instrument_file) @manager.require("instrument_file_euv", *URL_HASH[VERSION_NUMBER]["euv"]) @@ -295,7 +293,7 @@ def eve_correction(self, obstime, **kwargs) -> u.dimensionless_unscaled: where :math:`A_{eff}(\lambda_n,t_0)` is the effective area at the nominal wavelength of the channel (:math:`\lambda_n`) at the first calibration epoch and :math:`A_{eff}(\lambda_E,t_e)` is the effective - area at the ``obstime`` calibration epoch interpolated to the effective + area at the ```obstime``` calibration epoch interpolated to the effective wavelength (:math:`\lambda_E`). .. note:: This function is adapted directly from the @@ -399,7 +397,7 @@ def wavelength_response( obstime : `~astropy.time.Time`, optional If specified, a time-dependent correction is applied to account for degradation. include_eve_correction : `bool`, optional - If true and `obstime` is not `None`, include correction to EVE calibration. + If true and ``obstime`` is not `None`, include correction to EVE calibration. The time-dependent correction is also included. include_crosstalk : `bool`, optional If true, include the effect of crosstalk between channels that share a telescope diff --git a/aiapy/response/tests/test_channel.py b/aiapy/response/tests/test_channel.py index f533c5b..5fa552f 100644 --- a/aiapy/response/tests/test_channel.py +++ b/aiapy/response/tests/test_channel.py @@ -14,7 +14,7 @@ # Mark all tests which use this fixture as online @pytest.fixture(params=[pytest.param(None, marks=pytest.mark.remote_data)]) -def channel(request, ssw_home): +def channel(request, ssw_home): # NOQA: ARG001 if ssw_home is not None: instrument_file = Path(ssw_home) / "sdo" / "aia" / "response" / f"aia_V{VERSION_NUMBER}_all_fullinst.genx" else: @@ -58,16 +58,16 @@ def required_keys(): def test_has_instrument_data(channel): assert hasattr(channel, "_instrument_data") - assert isinstance(channel._instrument_data, collections.OrderedDict) + assert isinstance(channel._instrument_data, collections.OrderedDict) # NOQA: SLF001 def test_has_channel_data(channel): assert hasattr(channel, "_data") - assert isinstance(channel._data, MetaDict) + assert isinstance(channel._data, MetaDict) # NOQA: SLF001 def test_channel_data_has_keys(channel, required_keys): - assert all(k in channel._data for k in required_keys) + assert all(k in channel._data for k in required_keys) # NOQA: SLF001 def test_has_wavelength(channel): @@ -226,7 +226,7 @@ def test_wavelength_response_time(channel, idl_environment, include_eve_correcti def test_fuv_channel(channel_wavelength, channel_properties, required_keys): # There are a few corner cases for the 1600, 1700, and 4500 channels channel = Channel(channel_wavelength) - assert all(k in channel._data for k in required_keys) + assert all(k in channel._data for k in required_keys) # NOQA: SLF001 for p in channel_properties: assert isinstance(getattr(channel, p), u.Quantity) assert channel.contamination == u.Quantity( diff --git a/aiapy/tests/test_idl.py b/aiapy/tests/test_idl.py index 0aa9acf..f1c636d 100644 --- a/aiapy/tests/test_idl.py +++ b/aiapy/tests/test_idl.py @@ -1,47 +1,17 @@ """ Contains all the the IDL specific tests for aiapy. """ + from pathlib import Path import astropy.units as u import numpy as np import pytest -from sunpy import log from aiapy.calibrate import estimate_error from aiapy.conftest import CHANNELS -def idl_available(): - try: - import hissw - - hissw.Environment().run("") - return True - except Exception as e: # NOQA - log.warning(e) - return False - - -@pytest.fixture(scope="session") -def idl_environment(): - if idl_available(): - import hissw - - return hissw.Environment( - ssw_packages=["sdo/aia"], - ssw_paths=["aia"], - ) - pytest.skip( - "A working IDL installation is not available. You will not be able to run portions of the test suite.", - ) - - -@pytest.fixture(scope="session") -def ssw_home(idl_environment): - return idl_environment.ssw_home if idl_available() else None - - @pytest.mark.parametrize( ("channel", "counts", "include_eve", "include_preflight", "include_chianti"), [[c, 10 * u.ct / u.pixel] + 3 * [False] for c in CHANNELS] @@ -88,15 +58,14 @@ def test_error_consistent(idl_environment, channel, counts, include_eve, include assert u.allclose(error, error_ssw, rtol=1e-4) -@pytest.fixture(scope="module") -@pytest.mark.parametrize("channel", CHANNELS) -def psf_idl(idl_environment, channels): +@pytest.fixture(params=CHANNELS) +def psf_idl(idl_environment, request): """ The point spread function as calculated by aia_calc_psf.pro. """ r = idl_environment.run( "psf = aia_calc_psf({{channel}},/use_preflightcore)", - args={"channel": f"{channels[0].value:.0f}"}, + args={"channel": f"{request.value:.0f}"}, save_vars=["psf"], verbose=False, ) diff --git a/aiapy/util/__init__.py b/aiapy/util/__init__.py index 536709b..218acab 100644 --- a/aiapy/util/__init__.py +++ b/aiapy/util/__init__.py @@ -1,5 +1,6 @@ """ Subpackage with miscellaneous utility functions. """ -from .exceptions import * # NOQA -from .util import * # NOQA + +from .exceptions import * # NOQA: F403 +from .util import * # NOQA: F403 diff --git a/aiapy/util/decorators.py b/aiapy/util/decorators.py index 8a09246..8c49364 100644 --- a/aiapy/util/decorators.py +++ b/aiapy/util/decorators.py @@ -1,5 +1,5 @@ -import inspect import functools +import inspect import astropy.units as u @@ -32,14 +32,16 @@ def validate_channel(argument, *, valid_channels="all"): def outer(function): sig = inspect.signature(function) if argument not in sig.parameters: - raise ValueError(f"Did not find {argument} in function signature ({sig}).") + msg = f"Did not find {argument} in function signature ({sig})." + raise ValueError(msg) @functools.wraps(function) def inner(*args, **kwargs): all_args = sig.bind(*args, **kwargs) channel = all_args.arguments[argument] if channel not in valid_channels: - raise ValueError(f'channel "{channel}" not in ' f"list of valid channels: {valid_channels}.") + msg = f'channel "{channel}" not in ' f"list of valid channels: {valid_channels}." + raise ValueError(msg) return function(*args, **kwargs) return inner diff --git a/aiapy/util/util.py b/aiapy/util/util.py index 304f8b2..547ad57 100644 --- a/aiapy/util/util.py +++ b/aiapy/util/util.py @@ -1,6 +1,7 @@ """ Miscellaneous utility functions. """ + import astropy.units as u import drms import numpy as np @@ -38,7 +39,8 @@ def sdo_location(time): key="T_OBS, HAEX_OBS, HAEY_OBS, HAEZ_OBS", ) if keys is None or len(keys) == 0: - raise ValueError("No DRMS records near this time") + msg = "No DRMS records near this time" + raise ValueError(msg) # Linear interpolation between the nearest records within the returned set times = Time(list(keys["T_OBS"]), scale="utc") x = np.interp(t.mjd, times.mjd, keys["HAEX_OBS"]) diff --git a/aiapy/version.py b/aiapy/version.py index b90b60f..5d3a922 100644 --- a/aiapy/version.py +++ b/aiapy/version.py @@ -1,12 +1,12 @@ # NOTE: First try _dev.scm_version if it exists and setuptools_scm is installed -# This file is not included in wheels/tarballs, so otherwise it will +# This file is not included in the wheels/tarballs, so otherwise it will # fall back on the generated _version module. try: try: from ._dev.scm_version import version except ImportError: from ._version import version -except Exception: # NOQA +except Exception: # NOQA: BLE001 import warnings warnings.warn( @@ -14,5 +14,10 @@ stacklevel=3, ) del warnings - version = "0.0.0" + +from packaging.version import parse as _parse + +_version = _parse(version) +major, minor, bugfix = [*_version.release, 0][:3] +release = not _version.is_devrelease diff --git a/changelog/313.breaking.rst b/changelog/313.breaking.rst index ba45935..fbd6815 100644 --- a/changelog/313.breaking.rst +++ b/changelog/313.breaking.rst @@ -1 +1 @@ -Increased the minimum version of Python to 3.9 +Increased the minimum version of Python to 3.10 diff --git a/changelog/318.breaking.rst b/changelog/318.breaking.rst new file mode 100644 index 0000000..3b9c21b --- /dev/null +++ b/changelog/318.breaking.rst @@ -0,0 +1 @@ +Downgraded warning for Multiple Valid Epochs (`aiapy.util.util._select_epoch_from_correction_table`) to a logging debug message. diff --git a/changelog/318.doc.rst b/changelog/318.doc.rst new file mode 100644 index 0000000..5a1740f --- /dev/null +++ b/changelog/318.doc.rst @@ -0,0 +1 @@ +Transformed the documentation layout. diff --git a/changelog/322.doc.rst b/changelog/322.doc.rst index deddb56..d88750c 100644 --- a/changelog/322.doc.rst +++ b/changelog/322.doc.rst @@ -1 +1 @@ -Fixed incorrect IDL routine reference in the `aiapy.calibrate.uncertainty.estimate_error` documentation. +Fixed incorrect IDL routine reference in the `aiapy.calibrate.estimate_error` documentation. diff --git a/docs/conf.py b/docs/conf.py index a0484cc..9207a3c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,32 +1,45 @@ -# Configuration file for the Sphinx documentation builder. -# -- Project information ----------------------------------------------------- +""" +Configuration file for the Sphinx documentation builder. +""" + import os -import warnings -from pathlib import Path -from datetime import datetime -from astropy.utils.exceptions import AstropyDeprecationWarning -from packaging.version import Version -from sunpy.util.exceptions import SunpyDeprecationWarning, SunpyPendingDeprecationWarning +# This needs to be done before aiapy or sunpy is imported +os.environ["PARFIVE_HIDE_PROGRESS"] = "True" -from aiapy import __version__ +import datetime # NOQA: E402 +import warnings # NOQA: E402 +from pathlib import Path # NOQA: E402 -os.environ["JSOC_EMAIL"] = "jsoc@sunpy.org" -os.environ["HIDE_PARFIVE_PROGESS"] = "True" +from astropy.utils.exceptions import AstropyDeprecationWarning # NOQA: E402 +from matplotlib import MatplotlibDeprecationWarning # NOQA: E402 +from sunpy.util.exceptions import SunpyDeprecationWarning, SunpyPendingDeprecationWarning # NOQA: E402 +from sunpy_sphinx_theme import PNG_ICON # NOQA: E402 + +from aiapy import __version__ # NOQA: E402 +# -- Project information ------------------------------------------------------- +project = "aiapy" +author = "AIA Instrument Team" +copyright = f"{datetime.datetime.now(datetime.timezone.utc).year}, {author}" # NOQA: A001 release = __version__ -aiapy_version = Version(__version__) -is_release = not (aiapy_version.is_prerelease or aiapy_version.is_devrelease) -if is_release: - warnings.simplefilter("ignore") +is_development = ".dev" in __version__ + +# Need to make sure that our documentation does not raise any of these warnings.filterwarnings("error", category=SunpyDeprecationWarning) warnings.filterwarnings("error", category=SunpyPendingDeprecationWarning) +warnings.filterwarnings("error", category=MatplotlibDeprecationWarning) warnings.filterwarnings("error", category=AstropyDeprecationWarning) -# -- General configuration --------------------------------------------------- -project = "aiapy" -copyright = f"{datetime.now().year}, AIA Instrument Team" -author = "AIA Instrument Team" +linkcheck_ignore = [ + r"https://doi.org/\d+", + r"https://element.io/\d+", + r"https://github.com/\d+", + r"https://docs.sunpy.org/\d+", +] +linkcheck_anchors = False + +# -- General configuration ----------------------------------------------------- extensions = [ "matplotlib.sphinxext.plot_directive", "sphinx_automodapi.automodapi", @@ -42,92 +55,60 @@ "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "sunpy.util.sphinx.doctest", + "sunpy.util.sphinx.generate", "sphinxext.opengraph", "sphinx_design", "sphinx_copybutton", "hoverxref.extension", ] +automodapi_toctreedirnm = "generated/api" +html_extra_path = ["robots.txt"] exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] source_suffix = ".rst" master_doc = "index" default_role = "obj" -ogp_image = "https://gitlab.com/LMSAL_HUB/aia_hub/aiapy/-/raw/main/docs/_static/sdo.png" +napoleon_use_rtype = False +napoleon_google_docstring = False +napoleon_use_param = False +suppress_warnings = ["app.add_directive"] +nitpicky = True +# This is not used. See docs/nitpick-exceptions file for the actual listing. +nitpick_ignore = [] +with Path("nitpick-exceptions").open() as nitpick_exceptions: + for line in nitpick_exceptions: + if line.strip() == "" or line.startswith("#"): + continue + dtype, target = line.split(None, 1) + target = target.strip() + nitpick_ignore.append((dtype, target)) + +# -- Options for sphinxext-opengraph ------------------------------------------ +ogp_image = "https://raw.githubusercontent.com/sunpy/sunpy-logo/master/generated/sunpy_logo_word.png" ogp_use_first_image = True ogp_description_length = 160 ogp_custom_meta_tags = [ '', ] -# -- Options for intersphinx extension --------------------------------------- -intersphinx_mapping = { - "python": ( - "https://docs.python.org/3/", - (None, "http://data.astropy.org/intersphinx/python3.inv"), - ), - "numpy": ( - "https://docs.scipy.org/doc/numpy/", - (None, "http://data.astropy.org/intersphinx/numpy.inv"), - ), - "scipy": ( - "https://docs.scipy.org/doc/scipy/reference/", - (None, "http://data.astropy.org/intersphinx/scipy.inv"), - ), - "matplotlib": ( - "https://matplotlib.org/", - (None, "http://data.astropy.org/intersphinx/matplotlib.inv"), - ), - "astropy": ("http://docs.astropy.org/en/stable/", None), - "sunpy": ("https://docs.sunpy.org/en/stable/", None), - "skimage": ("https://scikit-image.org/docs/stable/", None), - "cupy": ("https://docs.cupy.dev/en/stable/", None), -} - -# -- Options for HTML output ------------------------------------------------- -html_theme = "sunpy" - -graphviz_output_format = "svg" -graphviz_dot_args = [ - "-Nfontsize=10", - "-Nfontname=Helvetica Neue, Helvetica, Arial, sans-serif", - "-Efontsize=10", - "-Efontname=Helvetica Neue, Helvetica, Arial, sans-serif", - "-Gfontsize=10", - "-Gfontname=Helvetica Neue, Helvetica, Arial, sans-serif", -] -# -- Sphinx-gallery ---------------------------------------------------------- -sphinx_gallery_conf = { - "backreferences_dir": Path("generated") / "modules", - "filename_pattern": "^((?!skip_).)*$", - "examples_dirs": Path("..") / "examples", - "gallery_dirs": Path("generated") / "gallery", - "matplotlib_animations": True, - "default_thumb_file": "_static/sdo.png", - "abort_on_example_error": False, - "plot_gallery": "True", - "remove_config_comments": True, - "doc_module": ("sunpy"), - "only_warn_on_example_error": True, -} +# -- Options for sphinx-copybutton --------------------------------------------- +# Python Repl + continuation, Bash, ipython and qtconsole + continuation, jupyter-console + continuation +copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " +copybutton_prompt_is_regexp = True # -- Options for hoverxref ----------------------------------------------------- -# adapted from sphinx-hoverxref conf.py if os.environ.get("READTHEDOCS"): - # Building on Read the Docs hoverxref_api_host = "https://readthedocs.org" - if os.environ.get("PROXIED_API_ENDPOINT"): # Use the proxied API endpoint # - A RTD thing to avoid a CSRF block when docs are using a # custom domain hoverxref_api_host = "/_" - hoverxref_tooltip_maxwidth = 600 # RTD main window is 696px hoverxref_auto_ref = True hoverxref_mathjax = True - # hoverxref has to be applied to these hoverxref_domains = ["py"] - hoverxref_role_types = { # roles with py domain "attr": "tooltip", @@ -139,7 +120,6 @@ "meth": "tooltip", "mod": "tooltip", "obj": "tooltip", - # # roles with std domain "confval": "tooltip", "hoverxref": "tooltip", @@ -147,7 +127,51 @@ "term": "tooltip", } -# -- Options for sphinx-copybutton --------------------------------------------- -# Python Repl + continuation, Bash, ipython and qtconsole + continuation, jupyter-console + continuation -copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " -copybutton_prompt_is_regexp = True +# -- Options for intersphinx extension ----------------------------------------- +intersphinx_mapping = { + "astropy": ("https://docs.astropy.org/en/stable/", None), + "cupy": ("https://docs.cupy.dev/en/stable/", None), + "drms": ("https://docs.sunpy.org/projects/drms/en/stable/", None), + "matplotlib": ("https://matplotlib.org/stable", None), + "numpy": ("https://numpy.org/doc/stable/", (None, "http://www.astropy.org/astropy-data/intersphinx/numpy.inv")), + "parfive": ("https://parfive.readthedocs.io/en/stable/", None), + "pyerfa": ("https://pyerfa.readthedocs.io/en/stable/", None), + "python": ("https://docs.python.org/3/", (None, "http://www.astropy.org/astropy-data/intersphinx/python3.inv")), + "reproject": ("https://reproject.readthedocs.io/en/stable/", None), + "scipy": ( + "https://docs.scipy.org/doc/scipy/reference/", + (None, "http://www.astropy.org/astropy-data/intersphinx/scipy.inv"), + ), + "skimage": ("https://scikit-image.org/docs/stable/", None), + "sunpy": ("https://docs.sunpy.org/en/stable/", None), +} + +# -- Options for HTML output --------------------------------------------------- +html_theme = "sunpy" +graphviz_output_format = "svg" +graphviz_dot_args = [ + "-Nfontsize=10", + "-Nfontname=Helvetica Neue, Helvetica, Arial, sans-serif", + "-Efontsize=10", + "-Efontname=Helvetica Neue, Helvetica, Arial, sans-serif", + "-Gfontsize=10", + "-Gfontname=Helvetica Neue, Helvetica, Arial, sans-serif", +] + +# -- Sphinx Gallery ------------------------------------------------------------ +# JSOC email os env +# see https://github.com/sunpy/sunpy/wiki/Home:-JSOC +os.environ["JSOC_EMAIL"] = "jsoc@sunpy.org" +sphinx_gallery_conf = { + "backreferences_dir": Path("generated") / "modules", + "filename_pattern": "^((?!skip_).)*$", + "examples_dirs": Path("..") / "examples", + "gallery_dirs": Path("generated") / "gallery", + "matplotlib_animations": True, + "default_thumb_file": PNG_ICON, + "abort_on_example_error": False, + "plot_gallery": "True", + "remove_config_comments": True, + "doc_module": ("aiapy"), + "only_warn_on_example_error": True, +} diff --git a/docs/develop.rst b/docs/develop.rst index 32e8fac..02feb4e 100644 --- a/docs/develop.rst +++ b/docs/develop.rst @@ -1,10 +1,10 @@ -.. _dev-guide: +.. _aiapy_dev-guide: ============ Contributing ============ -Contributing to open source projects is a fantastic way to advance one's coding skills; it's trying to create something, making mistakes, and learning from those mistakes. +Contributing to open source projects is a good way to advance one's coding skills; it's trying to create something, making mistakes, and learning from those mistakes. That's how we all improve, and we are happy to help others learn. Being an open source contributor doesn't just mean writing code, either. @@ -14,126 +14,26 @@ Some of these contributions may be the most valuable to the project as a whole, Issue Tracking -------------- -All bugs, feature requests, and other issues related to ``aiapy`` should be recorded using the GitLab issue tracker. -You can find instructions for how to create an issue `here `__. +All bugs, feature requests, and other issues related to ``aiapy`` should be recorded using the GitHub issue tracker. +You can find instructions for how to create an issue `here `__. All conversation regarding these issues should take place on the issue tracker. When a merge request resolves an issue, the issue will be closed and the appropriate merge request will be referenced. Issues will not be closed without a reason given. -Creating a fork ---------------- +Code +---- If you would like to contribute to ``aiapy``, you will first need to setup your development environment. -First create a fork of the main ``aiapy`` repository under your GitLab username. -You can find the instructions for how to do this `here `__. -If you don't already have an account on GitLab, you'll need to create one. -You can also sign into GitLab using your GitHub username. -Next, clone your fork of ``aiapy`` to your local machine, +We suggest reading through the `SunPy developer's guide`_ for a more detailed description of the development process, as we follow there process for ``aiapy``. -.. code:: shell - - git clone https://gitlab.com//``aiapy``.git - -Now add the main ``aiapy`` repository as an upstream repository, - -.. code:: shell - - git remote add upstream https://gitlab.com/LMSAL_HUB/aia_hub/``aiapy``.git - -You can now keep your fork up to date with main repository by running, - -.. code:: shell - - git pull upstream main - -Installation -------------- - -If you're using the `Miniconda Python distribution `__ (recommended), -create a new environment for ``aiapy`` development, - -.. code-block:: shell - - conda create --name ``aiapy``-dev pip - conda activate ``aiapy``-dev - -If you're using an alternate python installation, you can also use `virtual environments `__. -Next, install the needed dependencies, - -.. code-block:: shell - - cd aiapy - pip install -e .[test,docs] - -This includes all of the dependencies for the package as well as ``pytest`` for running tests and ``sphinx`` for building the docs. - -To make sure everything is working alright, you can run the tests, - -.. code-block:: shell - - pytest --remote-data=any - -See :ref:`tests` for more details regarding running the tests. - -Making a contribution ---------------------- - -If you want to add a feature or bugfix to ``aiapy``, start by first making sure the main branch of your fork is up to date with the main branch of the main repository (see above, this will help to prevent potential file conflicts). -Next, create a new branch and switch to it, - -.. code:: shell - - git checkout -b my-new-feature - -After you've made your changes, commit and push them up to GitLab, - -.. code:: shell - - git add changed_file_1.py changed_file_2.py - git commit -m "short description of my change" - git push origin my-new-feature - -Once you see the changes in GitLab, create a merge request against the main ``aiapy`` repository. -You can find instructions for how to do this `here `__. -Others will likely have comments and suggestions regarding your proposed changes. -You can make these changes using the instructions listed above. - -At least one other ``aiapy`` developer must approve your changes before the code can be merged. -Additionally, all automated tests should pass and all conversations should be resolved. -Once these steps are complete, the code can be merged and you can delete your branch ``my-new-feature``. - -.. _tests: +This will hopefully make it easier for you to contribute to ``aiapy`` and other SunPy affiliated packages in the future. +If you encounter any problems, please don't hesitate to ask for help on any of our communication channels (found on the landing page of the documentation). Testing ------- -Before committing any changes, you should ensure that the all of the tests pass locally. -To run the tests, - -.. code:: shell - - pytest --remote-data=any - -This will generate report showing which tests passed and which failed (if any). -Dropping the ``--remote-data`` flag will skip tests that require a network connection. -``aiapy`` uses the `pytest `__ framework for discovering and running all of the tests. - -Additions to the codebase should be accompanied by appropriate tests such that the test coverage of the entire package does not decrease. -You can check the test coverage by running, - -.. code:: shell - - pytest --remote-data=any --cov aiapy - -Additionally, the test suite, including the documentation build and code style checks can be run with `tox `__. -See the `SunPy developer's guide`_ for more information on running the test suite with ``tox``. - -Tests should be added to the directory in the appropriate subpackage, e.g. for ``calibrate``, the tests should be placed in ``calibrate/tests``. -Your tests can be added to an existing file or placed in a new file following the naming convention ``test_*.py``. -This organization allows the tests to be automatically discovered by pytest. - There are several tests that require a working installation of `sswidl `__ in order to compare results from IDL and Python. This is managed via the `hissw `__ package. If you'd like to run these tests, you must first tell ``hissw`` where to find your IDL and SSW installations by placing the following lines in the file: ``$HOME/.hissw/hisswrc``, @@ -148,26 +48,11 @@ where ``ssw_home`` is the path to the top of the sswidl tree and ``idl_home`` is For more details, see the `hissw documentation `__. If a working installation is not available, these tests are automatically skipped. -Documentation -------------- - -All documentation is written in `reStructuredText `__ and rendered using `Sphinx `__. -Documentation strings are automatically pulled from all modules, functions and classes to create the API documentation. -You can build and test the documentation locally by running, - -.. code:: shell - - cd docs - make html - -This will run Sphinx on the restructured text files in order to create the HTML version of the documentation. -The built documentation, in HTML format, is in ``docs/_build/html``. - -Best practices --------------- +Standards +--------- All contributors to the ``aiapy`` codebase should follow the `SunPy developer's guide`_. This guide lays out a set of best practices for contributing, reviewing, testing, and documenting code. All contributions to ``aiapy`` must adhere to the `Python in Heliophysics Community Standards `__. -.. _`SunPy developer's guide`: https://docs.sunpy.org/en/latest/dev_guide/index.html +.. _`SunPy developer's guide`: https://docs.sunpy.org/en/latest/dev_guide/contents/newcomers.html diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 12dcec1..246f8cd 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -1,24 +1,11 @@ -Getting Started -=============== +.. _aiapy-getting-started-index: -The stable version of ``aiapy`` is available through both PyPI via ``pip``, +****************************** +Getting started with ``aiapy`` +****************************** -.. code-block:: shell +.. toctree:: + :maxdepth: 1 - pip install aiapy - -as well as through the `Miniconda distribution `__ via ``conda-forge``, - -.. code-block:: shell - - conda install -c conda-forge aiapy - -You can also install the development version from GitLab, - -.. code-block:: shell - - git clone https://gitlab.com/LMSAL_HUB/aia_hub/aiapy.git - cd aiapy - pip install -e ".[dev]" - -If you will be developing ``aiapy``, please see the :ref:`dev-guide`. + installation + preparing_data diff --git a/docs/index.rst b/docs/index.rst index 86e33a7..acc1c61 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,36 +1,75 @@ +.. _aia_docs_index: + +=================== aiapy documentation =================== -``aiapy`` is a Python package for analyzing data from the Atmospheric Imaging Assembly (AIA) instrument onboard the Solar Dynamics Observatory (SDO) spacecraft. +``aiapy`` is a community-developed, free and open-source Python library for analyzing data from the Atmospheric Imaging Assembly (AIA) onboard the Solar Dynamics Observatory (SDO). -``aiapy`` includes software for converting AIA images from level 1 to level 1.5, point spread function deconvolution, and computing the wavelength and temperature response functions for the EUV channels. +Capabilities include converting AIA images from level 1 to level 1.5, deconvolving images with the instrument point spread function, and computing the wavelength response functions for the EUV channels. .. grid:: 1 2 2 2 - :gutter: 3 + :gutter: 2 + + .. grid-item-card:: Getting Started + :link: aiapy-getting-started-index + :link-type: ref + :text-align: center + + :material-outlined:`accessibility_new;8em;sd-text-secondary` + + How to install aiapy and what its key features are. + + .. grid-item-card:: Example gallery + :link: generated/gallery + :text-align: center + + :material-outlined:`palette;8em;sd-text-secondary` + + Examples including plots on accomplishing common tasks using aiapy. + + .. grid-item-card:: Reference + :link: aia_api_reference + :link-type: ref + :text-align: center + + :material-outlined:`code;8em;sd-text-secondary` + + Technical description of the inputs, outputs, and behavior of each component of aiapy. + + .. grid-item-card:: Get Help + :text-align: center + + :material-outlined:`live_help;8em;sd-text-secondary` - .. grid-item-card:: - :class-card: card + .. button-link:: https://app.element.io/#/room/#sunpy:openastronomy.org + :shadow: + :expand: + :color: warning - Getting started - ^^^^^^^^^^^^^^^ + **Join the chat** - .. toctree:: - :maxdepth: 1 + .. button-link:: https://github.com/LM-SAL/aiapy/issues + :shadow: + :expand: + :color: warning - getting_started - preparing_data - generated/gallery/index - code_ref/index + **Report an issue** - .. grid-item-card:: - :class-card: card + .. button-link:: https://community.openastronomy.org/c/sunpy/5 + :shadow: + :expand: + :color: warning - Other info - ^^^^^^^^^^ + **Post on Discourse** - .. toctree:: - :maxdepth: 1 +.. toctree:: + :maxdepth: 1 + :hidden: - citation - changelog - develop + getting_started + generated/gallery/index + reference/index + citation + changelog + develop diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 0000000..635b4d7 --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,8 @@ +.. _aiapy-installing: + +****************** +aiapy Installation +****************** + +The easiest way to install aiapy to follow the instructions on :ref:`sunpy-tutorial-installing`. +The only difference is that you will need to install aiapy instead of sunpy. diff --git a/docs/nitpick-exceptions b/docs/nitpick-exceptions new file mode 100644 index 0000000..13b8e11 --- /dev/null +++ b/docs/nitpick-exceptions @@ -0,0 +1,98 @@ +# Prevents sphinx nitpicky mode picking up on optional +# (see https://github.com/sphinx-doc/sphinx/issues/6861) +# Even if it was "fixed", still broken +py:class optional +# See https://github.com/numpy/numpy/issues/10039 +py:obj numpy.datetime64 +# There's no specific file or function classes to link to +py:class (Unit('deg'), Unit('pix')) +py:class (Unit('Mm'), None) +py:class any type +py:class array-like +py:class file object +py:class function +py:class path-like +py:class str-like +py:class time-like +py:class Unit('%') +py:class Unit('Angstrom') +py:class Unit('arcsec / pix') +py:class Unit('arcsec') +py:class Unit('deg') +py:class Unit('pix') +py:class Unit('s') +py:class Unit('W / m2') +py:class Unit('ct / pix') +py:obj function +py:obj iterable +py:obj parfive +py:mod parfive +py:obj astropy.io.fits.hdu.base.ExtensionHDU + +# This comes from Map.wcs +py:class prop +py:class Maxwell + +# These come from astropy.coordinates.baseframe.represent_as +py:class data +py:class keyword only +py:class keyword-only +py:class string +py:class subclass of BaseRepresentation + +# These come from astropy QTable +py:class list of lists +py:class list of list +py:class numpy ndarray +py:class numpy ndarray +py:class Table +py:class table-like object + +# numpy inherited docstrings +py:obj a +py:obj a.size == 1 +py:obj args +py:obj dtype +py:obj n +py:obj ndarray + +# other classes and functions that cannot be linked to +py:class astropy.table.column.BaseColumn +py:class docutils.parsers.rst.Directive +py:class numpy.core.records.recarray +py:class numpy.ma.core.MaskedArray +py:class numpy.ma.mvoid +py:class numpy.void +py:class pandas.DataFrame +py:class xmlrpc.client.Error +py:class xmlrpc.client.Fault +py:class xmlrpclib.Error +py:class xmlrpclib.Fault +py:obj aiohttp.ClientResponse +py:obj astropy.visualization.wcsaxes.coordinates_map.CoordinatesMap.grid +py:obj data +py:obj numpy.ma.core.MaskedArray +py:obj pkg_resources.parse_version +py:obj sunpy.extern.parse.parse + +# Pending on python docs links issue #11975 +py:class classmethod +py:class list +py:meth list.pop +py:obj Artist +py:obj AttributeError +py:obj BboxBase +py:obj int +py:obj list.append +py:obj list.append +py:obj list.count +py:obj list.extend +py:obj list.index +py:obj list.insert +py:obj list.remove +py:obj NotImplementedError +py:obj NotImplementedError +py:obj RendererBase +py:obj RuntimeError +py:obj text +py:obj Text diff --git a/docs/preparing_data.rst b/docs/preparing_data.rst index 8b06648..b3125d5 100644 --- a/docs/preparing_data.rst +++ b/docs/preparing_data.rst @@ -1,16 +1,21 @@ -Preparing AIA data -================== +.. _aiapy-prepping-level-1: -AIA data provided by the JSOC are level 1 data products. -This means that the images still include the roll angle of the satellite and -each channel may have a slightly different resolution. +============================================ +Preparing AIA data from level 1 to level 1.5 +============================================ + +AIA data products provided by the JSOC are level 1 data products. +This means that the images still include the roll angle of the satellite and each channel may have a slightly different pixel scale. Typically, before performing any sort of data analysis on AIA images, you will want to promote your AIA data from level 1 to level 1.5. -This promotion involves updating the pointing keywords, removing the roll angle, scaling the image to a resolution of 0.6 arcseconds per pixel, and translating the image such that the center of the Sun is located in the center of the image. +This is important if you want to compare images from different channels or create Differential Emission Measure (DEM) maps. + +The promotion to level 1.5 involves updating the pointing keywords, removing the roll angle, scaling the image to a resolution of 0.6 arcseconds per pixel, and translating the image such that the center of the Sun is located in the center of the image. -In IDL, this is done with the `aia_prep.pro` procedure in SSWIDL as described in the `SDO Analysis Guide `__. +In IDL, this is done with the ``aia_prep.pro`` procedure in SSWIDL as described in the `SDO Analysis Guide `__. The following example, :ref:`sphx_glr_generated_gallery_prepping_level_1_data.py` demonstrates how to achieve this in Python with ``aiapy``. -Additional data processing steps (e.g. a PSF deconvolution) should be done in the following order: +There are also additional processing steps that can be applied to the level 1 AIA images. +If you want to do any additional data processing steps (e.g., PSF deconvolution) should be done in the following order: 1. Pointing correction (`aiapy.calibrate.update_pointing`) 2. Image respiking (`aiapy.calibrate.respike`) @@ -19,13 +24,13 @@ Additional data processing steps (e.g. a PSF deconvolution) should be done in th 5. Degradation correction (`aiapy.calibrate.correct_degradation`) 6. Exposure normalization -A few notes on this: +.. note:: -* Level 1.5, in its typical usage, only includes steps 1 and 4. - Unless stated otherwise, a science publication mentioning level 1.5 AIA data does not include steps 2, 3, 5 and 6. -* The PSF functions are defined on the level 1 pixel grid so PSF deconvolution MUST be done on the level 1 data products (i.e. before image registration). - This is described in the PSF gallery example :ref:`sphx_glr_generated_gallery_skip_psf_deconvolution.py`. -* The pointing update should be done prior to image registration as the updated keywords, namely ``CRPIX1`` and ``CRPIX2``, are used in the image registration step. - More details can be found in this gallery example :ref:`sphx_glr_generated_gallery_update_header_keywords.py`. -* The exposure time normalization and degradation correction (`aiapy.calibrate.correct_degradation`) operations are just scalar multiplication and are thus linear such that their ordering is inconsequential. -* Exposure time normalization can be performed by simply dividing a map by the exposure time property, ``my_map / my_map.exposure_time``. + * Level 1.5, in its typical usage, only includes steps 1 and 4. + Unless stated otherwise, any science publication mentioning level 1.5 AIA data does not include steps 2, 3, 5 and 6. + * The PSF functions are defined on the level 1 pixel grid so PSF deconvolution **MUST** be done on the level 1 data products (i.e., before image registration). + This is described in the PSF gallery example :ref:`sphx_glr_generated_gallery_skip_psf_deconvolution.py`. + * The pointing update should be done prior to image registration as the updated keywords, namely ``CRPIX1`` and ``CRPIX2``, are used in the image registration step. + More details can be found in this gallery example :ref:`sphx_glr_generated_gallery_update_header_keywords.py`. + * The exposure time normalization and degradation correction (`aiapy.calibrate.correct_degradation`) operations are just scalar multiplication and are thus linear such that their ordering is inconsequential. + * Exposure time normalization can be performed by simply dividing a map by the exposure time property, ``my_map / my_map.exposure_time``. diff --git a/docs/code_ref/aiapy.rst b/docs/reference/aiapy.rst similarity index 100% rename from docs/code_ref/aiapy.rst rename to docs/reference/aiapy.rst diff --git a/docs/code_ref/calibrate.rst b/docs/reference/calibrate.rst similarity index 100% rename from docs/code_ref/calibrate.rst rename to docs/reference/calibrate.rst diff --git a/docs/code_ref/index.rst b/docs/reference/index.rst similarity index 83% rename from docs/code_ref/index.rst rename to docs/reference/index.rst index e41a898..f1fb0af 100644 --- a/docs/code_ref/index.rst +++ b/docs/reference/index.rst @@ -1,3 +1,5 @@ +.. _aia_api_reference: + ============= API reference ============= diff --git a/docs/code_ref/psf.rst b/docs/reference/psf.rst similarity index 100% rename from docs/code_ref/psf.rst rename to docs/reference/psf.rst diff --git a/docs/code_ref/response.rst b/docs/reference/response.rst similarity index 100% rename from docs/code_ref/response.rst rename to docs/reference/response.rst diff --git a/docs/code_ref/util.rst b/docs/reference/util.rst similarity index 100% rename from docs/code_ref/util.rst rename to docs/reference/util.rst diff --git a/docs/robots.txt b/docs/robots.txt new file mode 100644 index 0000000..18d83dc --- /dev/null +++ b/docs/robots.txt @@ -0,0 +1,6 @@ +User-agent: * +Allow: /*/latest/ +Allow: /en/latest/ # Fallback for bots that don't understand wildcards +Allow: /*/stable/ +Allow: /en/stable/ # Fallback for bots that don't understand wildcards +Disallow: / \ No newline at end of file diff --git a/examples/calculate_response_function.py b/examples/calculate_response_function.py index f69da19..94711de 100644 --- a/examples/calculate_response_function.py +++ b/examples/calculate_response_function.py @@ -8,13 +8,14 @@ well as explore the different properties of the telescope channels. """ + import astropy.time import astropy.units as u import matplotlib.pyplot as plt from aiapy.response import Channel -################################################## +############################################################################### # Since AIA uses narrow-band filters, other wavelengths (outside of the nominal # wavelength attributed to each filter) contribute to the image data. # Computing these response functions allow us to see which other wavelengths @@ -28,9 +29,10 @@ # this as the most recent instrument data file will # need to be downloaded from a remote server. Subsequent # calls will know that the data has been downloaded. -c = Channel(335 * u.angstrom) -################################################## +aia_335_channel = Channel(335 * u.angstrom) + +############################################################################### # From `Boerner et al. (2012) `_, # the wavelength response function is given by, # @@ -55,8 +57,8 @@ # Reflectance fig = plt.figure() ax = fig.add_subplot(221) -ax.plot(c.wavelength, c.primary_reflectance, label=r"$R_P$") -ax.plot(c.wavelength, c.secondary_reflectance, label=r"$R_S$") +ax.plot(aia_335_channel.wavelength, aia_335_channel.primary_reflectance, label=r"$R_P$") +ax.plot(aia_335_channel.wavelength, aia_335_channel.secondary_reflectance, label=r"$R_S$") ax.set_ylabel(r"Reflectance") ax.set_xlim(50, 400) ax.set_xlabel(r"$\lambda$ [Å]") @@ -64,8 +66,8 @@ # Transmittance ax = fig.add_subplot(222) -ax.plot(c.wavelength, c.entrance_filter_efficiency, label=r"$T_E$") -ax.plot(c.wavelength, c.focal_plane_filter_efficiency, label=r"$T_F$") +ax.plot(aia_335_channel.wavelength, aia_335_channel.entrance_filter_efficiency, label=r"$T_E$") +ax.plot(aia_335_channel.wavelength, aia_335_channel.focal_plane_filter_efficiency, label=r"$T_F$") ax.set_ylabel(r"Transmittance") ax.set_xlim(50, 400) ax.set_xlabel(r"$\lambda$ [Å]") @@ -73,62 +75,66 @@ # Contamination ax = fig.add_subplot(223) -ax.plot(c.wavelength, c.contamination) +ax.plot(aia_335_channel.wavelength, aia_335_channel.contamination) ax.set_ylabel(r"Contamination, $D(\lambda)$") ax.set_xlim(50, 400) ax.set_xlabel(r"$\lambda$ [Å]") # Quantumn efficiency ax = fig.add_subplot(224) -ax.plot(c.wavelength, c.quantum_efficiency) +ax.plot(aia_335_channel.wavelength, aia_335_channel.quantum_efficiency) ax.set_ylabel(r"Quantum Efficiency, $Q(\lambda)$") ax.set_xlim(50, 800) ax.set_xlabel(r"$\lambda$ [Å]") + plt.tight_layout() -plt.show() -################################################## +############################################################################### # Additionally, `aiapy.response.Channel` provides a method for calculating # the wavelength response function using the equation above, -r = c.wavelength_response() -print(r) -################################################## +wavelength_response_335 = aia_335_channel.wavelength_response() +print(wavelength_response_335) + +############################################################################### # We can then plot the response as a function of # wavelength. + fig = plt.figure() + ax = fig.gca() -ax.plot(c.wavelength, r) -ax.set_xlim((c.channel + [-10, 10] * u.angstrom).value) +ax.plot(aia_335_channel.wavelength, wavelength_response_335) +ax.set_xlim((aia_335_channel.channel + [-10, 10] * u.angstrom).value) ax.set_ylim(0, 0.03) ax.set_xlabel(r"$\lambda$ [Å]") -ax.set_ylabel(f'$R(\\lambda)$ [{r.unit.to_string("latex")}]') -plt.show() +ax.set_ylabel(f'$R(\\lambda)$ [{wavelength_response_335.unit.to_string("latex")}]') -################################################## +############################################################################### # On telescopes 1, 3, and 4, both channels are always illuminated. # This can lead to "crosstalk" contamination in a channel from the channel with # which it shares a telescope. This impacts the 94 Å and 304 Å channels # as well as the 131 Å and 335 Å channels. This effect is included # by default in the wavelength response calculation. To exclude this # effect, -r_no_cross = c.wavelength_response(include_crosstalk=False) -################################################## +wavelength_response_335_no_cross = aia_335_channel.wavelength_response(include_crosstalk=False) + +############################################################################### # If we look at the response around 131 Å (the channel with which 335 Å shares # a telescope), we can see the effect that the channel crosstalk has on the # 335 Å response function. + fig = plt.figure() + ax = fig.gca() -ax.plot(c.wavelength, r, label="crosstalk") -ax.plot(c.wavelength, r_no_cross, label="no crosstalk") +ax.plot(aia_335_channel.wavelength, wavelength_response_335, label="crosstalk") +ax.plot(aia_335_channel.wavelength, wavelength_response_335_no_cross, label="no crosstalk") ax.set_xlim(50, 350) ax.set_xlabel(r"$\lambda$ [Å]") -ax.set_ylabel(f'$R(\\lambda)$ [{r.unit.to_string("latex")}]') +ax.set_ylabel(f'$R(\\lambda)$ [{wavelength_response_335.unit.to_string("latex")}]') ax.legend(loc=1, frameon=False) -plt.show() -################################################### +############################################################################### # We can also incorporate various corrections to the # response functions, including a time-dependent # degradation correction as well as a correction based @@ -136,21 +142,24 @@ # time-dependent correction. As an example, to apply the # two aforementioned corrections given the degradation as # of 1 January 2019, + obstime = astropy.time.Time("2019-01-01T00:00:00") -r_time = c.wavelength_response(obstime=obstime) -r_eve = c.wavelength_response(obstime=obstime, include_eve_correction=True) +wavelength_response_335_time = aia_335_channel.wavelength_response(obstime=obstime) +wavelength_response_335_eve = aia_335_channel.wavelength_response(obstime=obstime, include_eve_correction=True) -#################################################### +############################################################################### # We can then compare the two corrected response # functions to the uncorrected case. + fig = plt.figure() ax = fig.gca() -ax.plot(c.wavelength, r, label="uncorrected") -ax.plot(c.wavelength, r_time, label="degradation correction") -ax.plot(c.wavelength, r_eve, label="EVE correction") -ax.set_xlim((c.channel + [-20, 20] * u.angstrom).value) +ax.plot(aia_335_channel.wavelength, wavelength_response_335, label="uncorrected") +ax.plot(aia_335_channel.wavelength, wavelength_response_335_time, label="degradation correction") +ax.plot(aia_335_channel.wavelength, wavelength_response_335_eve, label="EVE correction") +ax.set_xlim((aia_335_channel.channel + [-20, 20] * u.angstrom).value) ax.set_ylim(0, 0.03) ax.set_xlabel(r"$\lambda$ [Å]") -ax.set_ylabel(f'$R(\\lambda)$ [{r.unit.to_string("latex")}]') +ax.set_ylabel(f'$R(\\lambda)$ [{wavelength_response_335.unit.to_string("latex")}]') ax.legend(loc=2, frameon=False) + plt.show() diff --git a/examples/skip_correct_degradation.py b/examples/correct_degradation.py similarity index 58% rename from examples/skip_correct_degradation.py rename to examples/correct_degradation.py index 2c57e0e..376c7f5 100644 --- a/examples/skip_correct_degradation.py +++ b/examples/correct_degradation.py @@ -3,42 +3,37 @@ Correcting for instrument degradation ===================================== -This example demonstrates the degradation of the filters on AIA over time. +This example demonstrates the degradation of the filters on AIA and how to correct it. """ import astropy.time import astropy.units as u import matplotlib.pyplot as plt -from astropy.visualization import quantity_support, time_support +from astropy.visualization import time_support from sunpy.net import Fido from sunpy.net import attrs as a from aiapy.calibrate import degradation -# These are needed to allow the use of quantities and astropy -# time objects in the plot. +# This lets you pass `astropy.time.Time` objects directly to matplotlib time_support(format="jyear") -quantity_support() -########################################################### -# The performance of the AIA telescope is unfortunately degrading over time, -# leading to the resulting images becoming increasingly dim. We -# can correct for this by modeling the degradation over time and -# then dividing the image intensity by this correction. -# -# First, let's fetch some metadata for the 335 Å channel of AIA between 2010 -# and 2018 at a cadence of 30 days. We choose the 335 Å channel because it has experienced +############################################################################### +# First, let's fetch the metadata for the 335 Å channel of AIA between 2021 +# and 2023 at a cadence of 7 days. We choose the 335 Å channel because it has experienced # significant degradation compared to the other EUV channels. + results = Fido.search( - a.Time("2010-06-01T00:00:00", "2021-06-01T00:00:00"), - a.Sample(30 * u.day), + a.Time("2021-01-01T00:00:00", "2023-01-01T00:00:00"), + a.Sample(7 * u.day), a.jsoc.Series.aia_lev1_euv_12s, a.jsoc.Wavelength(335 * u.angstrom), ) -########################################################### +############################################################################### # We only need the date and mean intensity columns from the # metadata that was returned. We select those and nothing else. + table = results["jsoc"].show("DATE__OBS", "DATAMEAN") table["DATAMEAN"].unit = u.ct table["DATE_OBS"] = astropy.time.Time(table["DATE__OBS"], scale="utc") @@ -46,25 +41,28 @@ print(table) -########################################################### +############################################################################### # Next, we pass the date column to the `aiapy.calibrate.correct_degradation` # function. This function calculates the time-dependent correction factor # based on the time and wavelength of the observation. # We then divide the mean intensity by the correction factor to get the corrected intensity. # For more details on how the correction factor is calculated, see the documentation for the # `aiapy.calibrate.degradation` function. + correction_factor = degradation(335 * u.angstrom, table["DATE_OBS"]) -# This correction can be applied to a sunpy Map as well. table["DATAMEAN_DEG"] = table["DATAMEAN"] / correction_factor -########################################################### +############################################################################### # To understand the effect of the degradation and the correction factor, we # plot the corrected and uncorrected mean intensity as a function of time. # Note that the uncorrected intensity decreases monotonically over time # while the corrected intensity recovers to pre-2011 values in 2020. -plt.plot(table["DATE_OBS"], table["DATAMEAN"], label="mean", marker="o") -plt.plot(table["DATE_OBS"], table["DATAMEAN_DEG"], label="mean, corrected", marker="o") -plt.title(f'{(335*u.angstrom).to_string(format="latex")} Channel Degradation') -plt.legend(frameon=False) + +fig = plt.figure() +ax = fig.add_subplot(111) +ax.plot(table["DATE_OBS"], table["DATAMEAN"], label="mean", marker="o") +ax.plot(table["DATE_OBS"], table["DATAMEAN_DEG"], label="corrected mean", marker="+") +ax.set_title(f'{(335*u.angstrom).to_string(format="latex")} Channel Degradation') +ax.legend(frameon=False) plt.show() diff --git a/examples/download_specific_data.py b/examples/download_specific_data.py index d1f7a77..1b397f4 100644 --- a/examples/download_specific_data.py +++ b/examples/download_specific_data.py @@ -7,6 +7,7 @@ We will be filtering the data we require by keywords and requesting short exposure images from a recent (at time of writing) flare. """ + import os import astropy.units as u @@ -19,7 +20,7 @@ from aiapy.calibrate import correct_degradation, register, update_pointing from aiapy.calibrate.util import get_correction_table, get_pointing_table -##################################################### +############################################################################### # Exporting data from the JSOC requires registering your # email first. Please replace the text after the ``=`` # with your email address once you have registered. @@ -27,7 +28,7 @@ jsoc_email = os.environ.get("JSOC_EMAIL") -##################################################### +############################################################################### # Our goal is to request data of a recent X-class flare. # The X-class flare occurred on the 2021/07/03 at 14:30:00 UTC. # We will focus on the 5 minutes before and after this time. @@ -45,7 +46,7 @@ print(query) -##################################################### +############################################################################### # Now we will download the data and "aia prep" the # data with every feature of `aiapy` and plot the # data sequence using `sunpy`. @@ -70,8 +71,8 @@ map_cropped = map_normalized.submap(bottom_left, top_right=top_right) level_15_maps.append(map_cropped) -##################################################### -# Finally, we create a sequence of maps and animate it +############################################################################### +# Finally, we create a sequence of maps and animate it: sequence = sunpy.map.Map(level_15_maps, sequence=True) diff --git a/examples/instrument_degradation.py b/examples/instrument_degradation.py index d7d5ea6..6a71739 100644 --- a/examples/instrument_degradation.py +++ b/examples/instrument_degradation.py @@ -1,7 +1,7 @@ """ -======================================== +====================================== Modeling channel degradation over time -======================================== +====================================== This example demonstrates how to model the degradation of the AIA channels as a function of time over the entire @@ -17,7 +17,10 @@ from aiapy.calibrate import degradation from aiapy.calibrate.util import get_correction_table -########################################################### +# This lets you pass `astropy.time.Time` objects directly to matplotlib +time_support(format="jyear") + +############################################################################### # The sensitivity of the AIA channels degrade over time. Possible causes include # the deposition of organic molecules from the telescope structure onto the # optical elements and the decrease in detector sensitivity following (E)UV @@ -37,37 +40,46 @@ # First, fetch this correction table. It is not strictly necessary to do this explicitly, # but will significantly speed up the calculation by only fetching the table # once. + correction_table = get_correction_table() -########################################################### +############################################################################### # We want to compute the degradation for each EUV channel. -channels = [94, 131, 171, 193, 211, 304, 335] * u.angstrom -########################################################### -# We can use the `~astropy.time` subpackage to create an array of times +aia_channels = [94, 131, 171, 193, 211, 304, 335] * u.angstrom + +############################################################################### +# We can use `~astropy.time.Time` to create an array of times # between now and the start of the mission with a cadence of one week. -time_0 = astropy.time.Time("2010-03-25T00:00:00", scale="utc") + +start_time = astropy.time.Time("2010-03-25T00:00:00", scale="utc") now = astropy.time.Time.now() -time = time_0 + np.arange(0, (now - time_0).to(u.day).value, 7) * u.day +time_range = start_time + np.arange(0, (now - start_time).to(u.day).value, 7) * u.day -########################################################### +############################################################################### # Finally, we can use the `aiapy.calibrate.degradation` function to # compute the degradation for a particular channel and observation time. # This is modeled as the ratio of the effective area measured at a particular # calibration epoch over the uncorrected effective area with a polynomial # interpolation to the exact time. -deg = {c: degradation(c, time, correction_table=correction_table) for c in channels} -########################################################### +degradations = { + channel: degradation(channel, time_range, correction_table=correction_table) for channel in aia_channels +} + +############################################################################### # Plotting the different degradation curves as a function of time, we can # easily visualize how the different channels have degraded over time. -time_support(format="jyear") # This lets you pass astropy.time.Time objects directly to matplotlib + fig = plt.figure() ax = fig.gca() -for c in channels: - ax.plot(time, deg[c], label=f"{c.value:.0f} Å") -ax.set_xlim(time[[0, -1]]) + +for channel in aia_channels: + ax.plot(time_range, degradations[channel], label=f"{channel:latex}") + +ax.set_xlim(time_range[[0, -1]]) ax.legend(frameon=False, ncol=4, bbox_to_anchor=(0.5, 1), loc="lower center") ax.set_xlabel("Time") ax.set_ylabel("Degradation") + plt.show() diff --git a/examples/prepping_level_1_data.py b/examples/prepping_level_1_data.py index 602f435..99fae71 100644 --- a/examples/prepping_level_1_data.py +++ b/examples/prepping_level_1_data.py @@ -7,23 +7,25 @@ rescale them to a common plate scale, and remove the roll angle. This process is often referred to as "aia_prep" and the resulting data are typically referred to as level 1.5 data. In this example, we will demonstrate how to do this with `aiapy`. -This corresponds to the `aia_prep.pro` procedure as described in the `SDO Analysis Guide `__. +This corresponds to the ``aia_prep.pro`` procedure as described in the `SDO Analysis Guide `__. """ +import matplotlib.pyplot as plt import sunpy.map import aiapy.data.sample as sample_data from aiapy.calibrate import register, update_pointing -########################################################### +############################################################################### # Performing multi-wavelength analysis on level 1 data can be problematic as # each of the AIA channels have slightly different spatial scales and roll # angles. Furthermore, the estimates of the pointing keywords (``CDELT1``, ``CDELT2``, ``CRPIX1``, -# ``CRPIX2``, ``CROTA2``) may have been improved due to limb fitting procedures. The -# `Joint Science Operations Center (JSOC) `_ stores +# ``CRPIX2``, ``CROTA2``) may have been improved due to limb fitting procedures after +# the level 1 file has been created. +# The `Joint Science Operations Center (JSOC) `_ stores # AIA image data and metadata separately; when users download AIA data, these # two data types are combined to produce a FITS file. While metadata are -# continuously updated at JSOC, previously downloaded FITS files will not +# continuously updated at the JSOC, previously downloaded FITS files will not # contain the most recent information. # # Thus, before performing any multi-wavelength analyses, level 1 data @@ -33,42 +35,51 @@ # # First, let's read a level 1 94 Å AIA image from the ``aiapy`` sample data into # a `~sunpy.map.Map` object. -m = sunpy.map.Map(sample_data.AIA_094_IMAGE) -########################################################### +aia_map = sunpy.map.Map(sample_data.AIA_094_IMAGE) + +############################################################################### # The first step in this process is to update the metadata of the map to the # most recent pointing using the `aiapy.calibrate.update_pointing` function. # This function queries the JSOC for the most recent pointing information, # updates the metadata, and returns a `sunpy.map.Map` with updated metadata. -m_updated_pointing = update_pointing(m) -########################################################### +aia_map_updated_pointing = update_pointing(aia_map) + +############################################################################### # If we take a look at the plate scale and rotation matrix of the map, we # find that the scale is slightly off from the expected value of :math:`0.6''` per # pixel and that the rotation matrix has off-diagonal entries. -print(m_updated_pointing.scale) -print(m_updated_pointing.rotation_matrix) -########################################################### +print(aia_map_updated_pointing.scale) +print(aia_map_updated_pointing.rotation_matrix) + +############################################################################### # We can use the `aiapy.calibrate.register` function to scale the image to # the :math:`0.6''` per pixel and derotate the image such that the y-axis is aligned # with solar North. -m_registered = register(m_updated_pointing) -########################################################### +aia_map_registered = register(aia_map_updated_pointing) + +############################################################################### # If we look again at the plate scale and rotation matrix, we # should find that the plate scale in each direction is :math:`0.6''` # per pixel and that the rotation matrix is diagonalized. -# The image in `m_registered` is now a level 1.5 data product. -print(m_registered.scale) -print(m_registered.rotation_matrix) +# The image in ``aia_map_registered`` is now a level 1.5 data product. +print(aia_map_registered.scale) +print(aia_map_registered.rotation_matrix) -########################################################### +############################################################################### # Finally, we can plot the exposure-normalized map. # Note that small negative pixel values are possible because # CCD images were taken with a pedestal set at ~100 DN. # This pedestal is then subtracted when the JSOC pipeline -# performs dark (+pedestal) subtraction and flatfielding +# performs dark (+ pedestal) subtraction and flat fielding # to generate level 1 files. -m_registered.peek(vmin=0) + +fig = plt.figure() +ax = fig.add_subplot(projection=aia_map_registered) +aia_map_registered.plot(axes=ax) + +plt.show() diff --git a/examples/replace_hot_pixels.py b/examples/replace_hot_pixels.py index c61ea9a..5721986 100644 --- a/examples/replace_hot_pixels.py +++ b/examples/replace_hot_pixels.py @@ -14,7 +14,7 @@ import aiapy.data.sample as sample_data from aiapy.calibrate import fetch_spikes, respike -#################################################### +############################################################################### # AIA level 1 images have been corrected for hot-pixels (commonly referred to # as "spikes") using an automated correction algorithm which detects them, # removes them, and replaces the "holes" left in the image via interpolation. @@ -23,7 +23,7 @@ # physically meaningful. In this example, we will demonstrate how to revert # this removal by putting back all the removed pixel values with the # `aiapy.calibrate.respike` in function. This corresponds to the -# `aia_respike.pro` IDL procedure as described in the +# ``aia_respike.pro`` IDL procedure as described in the # `SDO Analysis Guide `_. # # The header keywords ``LVL_NUM`` and ``NSPIKES`` describe the level number of the @@ -31,14 +31,15 @@ # (i.e. the "spikes"). The data containing the information of the pixel # position and the intensities of the removed hot pixels are available from the # `Joint Science Operations Center (JSOC) `_ as a -# separate segment of the `aia.lev1_euv_12s` and `aia.lev1_uv_24s` data series +# separate segment of the ``aia.lev1_euv_12s`` and ``aia.lev1_uv_24s`` data series -#################################################### +############################################################################### # First, let's read a level 1 193 Å AIA image from the aiapy sample data # into a `~sunpy.map.Map` object. -m = sunpy.map.Map(sample_data.AIA_193_IMAGE) -########################################################### +aia_map = sunpy.map.Map(sample_data.AIA_193_IMAGE) + +############################################################################### # The spike data are stored as separate data segments in JSOC # as a :math:`3\times N` arrays, where :math:`N` is the number of spikes # removed and the three dimensions correspond to the the 1-D pixel index @@ -52,58 +53,64 @@ # spikes to the 2D pixel full-disk pixel coordinate system given a # `~sunpy.map.Map` representing a level 1 AIA image. # -positions, values = fetch_spikes(m) -########################################################### +positions, values = fetch_spikes(aia_map) + +############################################################################### # Now we are ready to respike the level 1 AIA image. The # `aiapy.calibrate.respike` function performs the respike operation on the given # input image and returns a `~sunpy.map.Map` with the respiked image. This # operation also alters the metadata by updating the ``LVL_NUM``, ``NSPIKES``, -# and `COMMENTS` keywords. +# and ``COMMENTS`` keywords. # # Note that explicitly specifying the spike positions and values is optional. # If they are not given, they are automatically queried from the JSOC. -m_respiked = respike(m, spikes=(positions, values)) -########################################################### +aia_map_respiked = respike(aia_map, spikes=(positions, values)) + +############################################################################### # Now let's create a cutouts of the original level 1 and "re-spiked" (i.e. # level 0.5) images for a region with hot pixels. -top_right = SkyCoord(30 * u.arcsec, 420 * u.arcsec, frame=m.coordinate_frame) -bottom_left = SkyCoord(-120 * u.arcsec, 280 * u.arcsec, frame=m.coordinate_frame) -m_cutout = m.submap(bottom_left, top_right=top_right) -m_respiked_cutout = m_respiked.submap(bottom_left, top_right=top_right) -########################################################### +top_right = SkyCoord(30 * u.arcsec, 420 * u.arcsec, frame=aia_map.coordinate_frame) +bottom_left = SkyCoord(-120 * u.arcsec, 280 * u.arcsec, frame=aia_map.coordinate_frame) +aia_map_cutout = aia_map.submap(bottom_left, top_right=top_right) +aia_map_respiked_cutout = aia_map_respiked.submap(bottom_left, top_right=top_right) + +############################################################################### # Note that we can also retrieve the positions of the spikes # as `~astropy.coordinates.SkyCoord` objects in the projected coordinate -# system of the image using the `as_coords=True` keyword argument. This +# system of the image using the ``as_coords=True`` keyword argument. This # gives us only those spikes in the field of view of the cutout. -spike_coords, _ = fetch_spikes(m_cutout, as_coords=True) -########################################################### +spike_coords, _ = fetch_spikes(aia_map_cutout, as_coords=True) + +############################################################################### # Finally, let's plot the two cutouts for comparison and plot # the positions of the spikes in both images, denoted by white # circles. + fig = plt.figure() -ax = fig.add_subplot(121, projection=m_cutout) +ax = fig.add_subplot(121, projection=aia_map_cutout) ax.plot_coord(spike_coords, "o", color="white", fillstyle="none", markersize=15) -m_cutout.plot(axes=ax, title='Level 1 "de-spiked" data') +aia_map_cutout.plot(axes=ax, title='Level 1 "de-spiked" data') lon, lat = ax.coords lon.set_axislabel("HPC Longitude") lat.set_axislabel("HPC Latitude") -ax = fig.add_subplot(122, projection=m_respiked_cutout) +ax = fig.add_subplot(122, projection=aia_map_respiked_cutout) ax.plot_coord(spike_coords, "o", color="white", fillstyle="none", markersize=15) -m_respiked_cutout.plot(axes=ax, annotate=False) +aia_map_respiked_cutout.plot(axes=ax, annotate=False) ax.set_title('Level 0.5 "re-spiked" data') lon, lat = ax.coords lon.set_axislabel("HPC Longitude") lat.set_axislabel(" ") lat.set_ticklabel_visible(visible=False) + plt.show() -########################################################### +############################################################################### # Lastly, let's check the metadata in both the level 1 and resulting # 0.5 images to double check that the appropriate keywords have been updated. for k in ["lvl_num", "nspikes", "comments"]: - print(f"Level 1: {k}: {m_cutout.meta.get(k)}") - print(f"Level 0.5: {k}: {m_respiked_cutout.meta.get(k)}") + print(f"Level 1: {k}: {aia_map_cutout.meta.get(k)}") + print(f"Level 0.5: {k}: {aia_map_respiked_cutout.meta.get(k)}") diff --git a/examples/skip_psf_deconvolution.py b/examples/skip_psf_deconvolution.py index 455b9f4..800b807 100644 --- a/examples/skip_psf_deconvolution.py +++ b/examples/skip_psf_deconvolution.py @@ -6,6 +6,7 @@ This example demonstrates how to deconvolve an AIA image with the instrument point spread function (PSF). """ + import astropy.units as u import matplotlib.pyplot as plt import sunpy.map @@ -15,13 +16,13 @@ import aiapy.data.sample as sample_data import aiapy.psf -######################################### +############################################################################### # AIA images are subject to convolution with the instrument point-spread # function (PSF) due to effects introduced by the filter mesh of the telescope # and the CCD, among others. This has the effect of "blurring" the image. -# The PSF diffraction pattern may also be particularly noticable during the +# The PSF diffraction pattern may also be particularly noticeable during the # impulsive phase of a flare where the intensity enhancement is very localized. -# To remove these artifacts, the PSF must be deconvolved from the image. +# To remove these artifacts, the PSF must be de-convolved from the image. # # First, we'll use a single level 1 image from the 171 Å channel from # 15 March 2019. Note that deconvolution should be performed on level 1 images @@ -29,39 +30,46 @@ # on the CCD grid. Once deconvolved, the image can be passed to # `aiapy.calibrate.register` # (see the :ref:`sphx_glr_generated_gallery_prepping_level_1_data.py` example). -m = sunpy.map.Map(sample_data.AIA_171_IMAGE) + +aia_map = sunpy.map.Map(sample_data.AIA_171_IMAGE) + fig = plt.figure() -ax = fig.add_subplot(111, projection=m) -m.plot( +ax = fig.add_subplot(111, projection=aia_map) +aia_map.plot( axes=ax, ) -####################################### +############################################################################### # Next, we'll calculate the PSF using `aiapy.psf.psf` for the 171 Å channel. # The PSF model accounts for several different effects, including diffraction # from the mesh grating of the filters, charge spreading, and jitter. See # `Grigis et al (2012) `_ -# for more details. Currently, this only works for -# :math:`4096\times4096` full frame images. +# for more details. Currently, this only works for :math:`4096\times4096` full frame images. # -# Note that this will be significantly faster if you have a GPU and the `cupy` +# Note that this will be significantly faster if you have a Nvidia GPU and the `cupy` # package installed. -psf = aiapy.psf.psf(m.wavelength) -############################################# +psf = aiapy.psf.psf(aia_map.wavelength) + +############################################################################### # We'll plot just a 500-by-500 pixel section centered on the center pixel. The # diffraction "arms" extending from the center pixel can often be seen in # flare observations due to the intense, small-scale brightening. + fov = 500 lc_x, lc_y = psf.shape[0] // 2 - fov // 2, psf.shape[1] // 2 - fov // 2 -plt.imshow( +fig = plt.figure() +ax = fig.add_subplot(111) +ax.imshow( psf[lc_x : lc_x + fov, lc_y : lc_y + fov], norm=ImageNormalize(vmin=1e-8, vmax=1e-3, stretch=LogStretch()), + origin="lower", ) -plt.colorbar() -plt.show() +ax.set_title("PSF") +ax.set_xlabel("Pixels") +ax.set_ylabel("Pixels") -############################################### +############################################################################### # Now that we've downloaded our image and computed the PSF, we can deconvolve # the image with the PSF using the # `Richardson-Lucy deconvolution algorithm `_. @@ -70,41 +78,54 @@ # wavelength, it is most efficient to only calculate the PSF once. # # As with `aiapy.psf.psf`, this will be much faster if you have -# a GPU and `cupy` installed. -m_deconvolved = aiapy.psf.deconvolve(m, psf=psf) +# a Nvidia GPU and `cupy` installed. + +aia_map_deconvolved = aiapy.psf.deconvolve(aia_map, psf=psf) -################################################ +############################################################################### # Let's compare the convolved and deconvolved images. -norm = ImageNormalize(vmin=0, vmax=1.5e4, stretch=AsinhStretch(0.01)) + fig = plt.figure() -ax = fig.add_subplot(121, projection=m) -m.plot(axes=ax, norm=norm) -ax = fig.add_subplot(122, projection=m_deconvolved) -m_deconvolved.plot(axes=ax, annotate=False, norm=norm) +ax = fig.add_subplot(121, projection=aia_map) +norm = ImageNormalize(vmin=0, vmax=1.5e4, stretch=AsinhStretch(0.01)) +aia_map.plot(axes=ax, norm=norm) +ax.set_title("Normal") + +ax = fig.add_subplot(122, projection=aia_map_deconvolved) +aia_map_deconvolved.plot(axes=ax, annotate=False, norm=norm) +ax.set_title("Deconvolved") ax.coords[0].set_axislabel(" ") ax.coords[1].set_axislabel(" ") ax.coords[1].set_ticklabel_visible(visible=False) -plt.show() +fig.tight_layout() -################################################# +############################################################################### # The differences become a bit more obvious when we zoom in. Note that the # deconvolution has the effect of "deblurring" the image. + left_corner = 500 * u.arcsec, -600 * u.arcsec right_corner = 1000 * u.arcsec, -100 * u.arcsec -fig = plt.figure() -m_sub = m.submap( - bottom_left=SkyCoord(*left_corner, frame=m.coordinate_frame), - top_right=SkyCoord(*right_corner, frame=m.coordinate_frame), + +aia_map_sub = aia_map.submap( + bottom_left=SkyCoord(*left_corner, frame=aia_map.coordinate_frame), + top_right=SkyCoord(*right_corner, frame=aia_map.coordinate_frame), ) -ax = fig.add_subplot(121, projection=m_sub) -m_sub.plot(axes=ax, norm=norm) -m_deconvolved_sub = m_deconvolved.submap( - bottom_left=SkyCoord(*left_corner, frame=m_deconvolved.coordinate_frame), - top_right=SkyCoord(*right_corner, frame=m_deconvolved.coordinate_frame), +aia_map_deconvolved_sub = aia_map_deconvolved.submap( + bottom_left=SkyCoord(*left_corner, frame=aia_map_deconvolved.coordinate_frame), + top_right=SkyCoord(*right_corner, frame=aia_map_deconvolved.coordinate_frame), ) -ax = fig.add_subplot(122, projection=m_deconvolved_sub) -m_deconvolved_sub.plot(axes=ax, annotate=False, norm=norm) + +fig = plt.figure() + +ax = fig.add_subplot(121, projection=aia_map_sub) +aia_map_sub.plot(axes=ax, norm=norm) +ax.set_title("Normal") + +ax = fig.add_subplot(122, projection=aia_map_deconvolved_sub) +aia_map_deconvolved_sub.plot(axes=ax, annotate=False, norm=norm) +ax.set_title("Deconvolved") ax.coords[0].set_axislabel(" ") ax.coords[1].set_axislabel(" ") ax.coords[1].set_ticklabel_visible(visible=False) + plt.show() diff --git a/examples/update_header_keywords.py b/examples/update_header_keywords.py index f3e0cef..cb44cff 100644 --- a/examples/update_header_keywords.py +++ b/examples/update_header_keywords.py @@ -9,18 +9,19 @@ position. """ +import matplotlib.pyplot as plt import sunpy.map import aiapy.data.sample as sample_data from aiapy.calibrate import fix_observer_location, update_pointing -########################################################### +############################################################################### # An AIA FITS header contains various pieces of # `standard `_. # metadata that are critical to the physical interpretation of the data. # These include the pointing of the spacecraft, necessary for connecting # positions on the pixel grid to physical locations on the Sun, as well as -# the observer (i.e. satellite) location. +# the observer (i.e., satellite) location. # # While this metadata is recorded in the FITS header, some values in # the headers exported by data providers (e.g. @@ -32,62 +33,74 @@ # # For this example, we will read a 171 Å image from the aiapy sample data # into a `~sunpy.map.Map` object. -m = sunpy.map.Map(sample_data.AIA_171_IMAGE) -########################################################### +aia_map = sunpy.map.Map(sample_data.AIA_171_IMAGE) + +############################################################################### # To update the pointing keywords, we can pass our `~sunpy.map.Map` to the # `aiapy.calibrate.update_pointing` function. This function will query the # JSOC, using `~sunpy`, for the most recent pointing information, update # the metadata, and then return a new `~sunpy.map.Map` with this updated # metadata. -m_updated_pointing = update_pointing(m) -############################################################ -# If we inspect the reference pixel and rotation matrix of the original map -print(m.reference_pixel) -print(m.rotation_matrix) +aia_map_updated_pointing = update_pointing(aia_map) + +############################################################################### +# If we inspect the reference pixel and rotation matrix of the original map: + +print(aia_map.reference_pixel) +print(aia_map.rotation_matrix) -############################################################ -# and the map with the updated pointing information -print(m_updated_pointing.reference_pixel) -print(m_updated_pointing.rotation_matrix) +############################################################################### +# and the map with the updated pointing information: -############################################################ -# we find that the relevant keywords, `CRPIX1`, `CRPIX2`, `CDELT1`, `CDELT2`, -# and `CROTA2`, have been updated. +print(aia_map_updated_pointing.reference_pixel) +print(aia_map_updated_pointing.rotation_matrix) + +############################################################################### +# We find that the relevant keywords, ``CRPIX1``, ``CRPIX2``, ``CDELT1``, ``CDELT2``, +# and ``CROTA2``, have been updated. # # Similarly, the Heliographic Stonyhurst (HGS) coordinates of the observer # location in the header are inaccurate. If we check the HGS longitude keyword # in the header, we find that it is 0 degrees which is not the HGS longitude # coordinate of SDO. -print(m_updated_pointing.meta["hgln_obs"]) -print(m_updated_pointing.meta["hglt_obs"]) -############################################################ +print(aia_map_updated_pointing.meta["hgln_obs"]) +print(aia_map_updated_pointing.meta["hglt_obs"]) + +############################################################################### # To update the HGS observer coordinates, we can use the # `aiapy.calibrate.fix_observer_location` function. This function reads the # correct observer location from Heliocentric Aries Ecliptic (HAE) coordinates # in the header, converts them to HGS, and replaces the inaccurate HGS # keywords. -m_observer_fixed = fix_observer_location(m_updated_pointing) -############################################################ +aia_map_observer_fixed = fix_observer_location(aia_map_updated_pointing) + +############################################################################### # Looking again at the HGS longitude and latitude keywords, we can see that # they have been updated. -print(m_observer_fixed.meta["hgln_obs"]) -print(m_observer_fixed.meta["hglt_obs"]) +print(aia_map_observer_fixed.meta["hgln_obs"]) +print(aia_map_observer_fixed.meta["hglt_obs"]) -############################################################ -# Note that in `~sunpy.map.AIAMap`, the `~sunpy.map.Map.observer_coordinate` +############################################################################### +# Note that in `~sunpy.map.sources.AIAMap`, the `~sunpy.map.GenericMap.observer_coordinate` # attribute is already derived from the HAE coordinates such that it is not # strictly necessary to apply `aiapy.calibrate.fix_observer_location`. For # example, the unfixed `~sunpy.map.Map` will still have an accurate derived # observer position -print(m_updated_pointing.observer_coordinate) -############################################################ +print(aia_map_updated_pointing.observer_coordinate) + +############################################################################### # However, we suggest that users apply this fix such that the information -# stored in `~sunpy.map.Map.meta` is accurate and consistent. +# stored in `~sunpy.map.GenericMap.meta` is accurate and consistent. # # Finally, plot the fixed map. -m_observer_fixed.peek() + +fig = plt.figure() +ax = fig.add_subplot(projection=aia_map_observer_fixed) +aia_map_observer_fixed.plot(axes=ax) + +plt.show() diff --git a/pyproject.toml b/pyproject.toml index 53e1ffb..c2e2ca4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ name = "aiapy" dynamic = ["version"] description = "Python library for AIA data analysis." readme = "README.rst" -requires-python = ">=3.9" +requires-python = ">=3.10" license = {file = "LICENSE.txt"} keywords = [ "solar physics", @@ -25,7 +25,7 @@ keywords = [ ] authors = [ {email = "freij@baeri.org"}, - {name = "AIA Instrument Team"} + {name = "AIA Instrument Team @ LMSAL"} ] classifiers = [ "Development Status :: 4 - Beta", @@ -35,7 +35,6 @@ classifiers = [ "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -54,13 +53,15 @@ changelog = "https://aiapy.readthedocs.io/en/stable/changelog.html" [project.optional-dependencies] all = ["aiapy"] cupy = [ - 'cupy', + "cupy", ] tests = [ "aiapy[all]", "hissw", "pytest", "pytest-astropy", + "pytest-cov", + "pytest-xdist", ] docs = [ "aiapy[all]", @@ -87,118 +88,12 @@ write_to = "aiapy/_version.py" [tool.setuptools.exclude-package-data] aiapy = ["aiapy._dev"] -[tool.pytest.ini_options] -testpaths = [ - "aiapy", - "docs", -] -norecursedirs = [ - ".tox", - "build", - '''docs[\/]_build''', - '''docs[\/]generated''', - "*.egg-info", - "examples", - '''aiapy[/\]_dev''', - ".jupyter", - ".history", -] -doctest_plus = "enabled" -doctest_optionflags = "NORMALIZE_WHITESPACE FLOAT_CMP ELLIPSIS" -addopts = "--doctest-rst --doctest-ignore-import-errors -p no:unraisableexception -p no:threadexception" -markers = [ - "remote_data: marks this test function as needing remote data.", -] -remote_data_strict = "True" -filterwarnings = [ - "error", - "always::pytest.PytestConfigWarning", - "ignore:.*deprecated and slated for removal in Python 3.13", - "ignore:numpy.ufunc size changed:RuntimeWarning", - "ignore:numpy.ndarray size changed:RuntimeWarning", - "ignore:.*unitfix.*", - "ignore:invalid value encountered in sqrt", -] - [tool.coverage.run] branch = true omit = [ "*test_idl.py", ] -[tool.black] -line-length = 120 -target-version = ['py39'] - -[tool.isort] -profile = "black" -line_length = 120 -length_sort = "False" -length_sort_sections = "stdlib" - -[tool.ruff] -# Allow unused variables when underscore-prefixed. -dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" -target-version = "py39" -line-length = 120 -exclude = [ - ".eggs", - ".git", - ".mypy_cache", - ".ruff_cache", - ".tox", - ".venv", - "__pypackages__", - "_build", - "build", - "dist", - "node_modules", - "venv", -] -select = [ - "E", - "F", - "W", - "UP", - "PT", - "RET", - "TID", - "PLE", - "NPY", - "RUF", - "PGH", - "PTH", - "BLE", - "FBT", - "B", - "A", - "COM", - "C4", - "T20", - "RSE", - "ERA", -] -ignore = ["E501"] -extend-ignore = [ - "PGH004", # NOQA IS THE BEST OF ALL TIME -] - -[tool.ruff.per-file-ignores] -"examples/*.py" = [ - "T201", # We need print in our examples -] -"docs/*.py" = [ - "INP001", # implicit-namespace-package. The examples are not a package. - "A001", # Variable `copyright` is shadowing a python builtin -] -"aiapy/data/sample.py" = [ -"A001", # Variable `__doc__` is shadowing a python builtin -"PLE0604", # Invalid object in `__all__`, must contain only strings -] - -[tool.ruff.pydocstyle] -convention = "numpy" - [tool.codespell] ignore-words-list = "emiss" diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..971523a --- /dev/null +++ b/pytest.ini @@ -0,0 +1,48 @@ +[pytest] +minversion = 7.0 +testpaths = + aiapy + docs +norecursedirs = + .tox + build + docs/_build + docs/generated + *.egg-info + examples + aiapy/_dev + .history +doctest_plus = enabled +doctest_optionflags = NORMALIZE_WHITESPACE FLOAT_CMP ELLIPSIS +addopts = --arraydiff --doctest-rst --doctest-ignore-import-errors -p no:unraisableexception -p no:threadexception +remote_data_strict = true +junit_family = xunit1 +filterwarnings = + error + # Do not fail on pytest config issues (i.e. missing plugins) but do show them + always::pytest.PytestConfigWarning + # A list of warnings to ignore follows. If you add to this list, you MUST + # add a comment or ideally a link to an issue that explains why the warning + # is being ignored + # https://github.com/pytest-dev/pytest-cov/issues/557 + # It was fixed and released but it does not seem to be fixed + ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning + # Raised by sunpy.coordinates.transformations and will be removed in sunpy 6.1 + ignore:.*module is deprecated, as it was designed for internal use + # https://github.com/pandas-dev/pandas/issues/54466 + # Should stop when pandas 3.0.0 is released + ignore:(?s).*Pyarrow will become a required dependency of pandas:DeprecationWarning + # Zeep relies on deprecated cgi in Python 3.11 + # Needs a release of zeep 4.2.2 or higher + # https://github.com/mvantellingen/python-zeep/pull/1364 + ignore:'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning + # Can be removed when https://github.com/dateutil/dateutil/issues/1314 is resolved + # deprecated in Python 3.12, needs a release of dateutil 2.8.3 or higher + ignore:datetime.datetime.utcfromtimestamp():DeprecationWarning + # Raised by changing 'seconds' -> 's' + ignore::astropy.wcs.wcs.FITSFixedWarning + # SOURCE_UNCERTAINTY_DN = np.sqrt(SOURCE_DATA_DN) + ignore:invalid value encountered in sqrt:RuntimeWarning + # The following are raised by the py310-oldestdeps job + ignore:distutils Version classes are deprecated + ignore:ERFA function * diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000..a3c7395 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,86 @@ +# Allow unused variables when underscore-prefixed. +lint.dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +target-version = "py310" +line-length = 120 +exclude=[ + ".git,", + "__pycache__", + "build", + "tools/**", +] +lint.select = [ + "A", + "ARG", + "ASYNC", + "B", + "BLE", + "C4", +# "C90", + "COM", +# "D", + "DTZ", + "E", + "EM", + "ERA", + "EXE", + "F", + "FBT", + "FLY", +# "FURB", + "G", + "I", + "ICN", + "INP", + "INT", + "ISC", + "LOG", +# "N", + "NPY", + "PERF", + "PGH", + "PIE", +# "PL", + "PLE", + "PT", + "PTH", + "PYI", + "Q", + "RET", + "RSE", + "RUF", +# "S", + "SIM", + "SLF", + "SLOT", + "T10", + "T20", + "TCH", + "TID", + "TRIO", + "TRY", + "UP", + "W", + "YTT", +] +lint.extend-ignore = [ + "E501", # Line too long + "COM812", # May cause conflicts when used with the formatter + "ISC001", # May cause conflicts when used with the formatter +] + +[lint.per-file-ignores] +"examples/*.py" = [ + "INP001", # examples is part of an implicit namespace package + "T201", # We need print in our examples +] +"docs/conf.py" = [ + "INP001", # conf.py is part of an implicit namespace package +] + +[lint.pydocstyle] +convention = "numpy" + +[format] +docstring-code-format = true +indent-style = "space" +quote-style = "double" diff --git a/tox.ini b/tox.ini index e8f545b..8a696dc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,7 @@ [tox] minversion = 4.0 envlist = - py{39,310,311}{,-online,-devdeps,-rc} + py{310,311,312}{,-online,-devdeps,-rc} build_docs codestyle @@ -13,7 +13,7 @@ allowlist_externals= setenv = MPLBACKEND = agg SUNPY_SAMPLEDIR = {env:SUNPY_SAMPLEDIR:{toxinidir}/.tox/sample_data/} - PYTEST_COMMAND = pytest -vvv -r as --pyargs aiapy --cov-report=xml --cov=aiapy {toxinidir}/docs + PYTEST_COMMAND = pytest -vvv -r as --pyargs aiapy --cov-report=xml --cov=aiapy -n auto --color=yes {toxinidir}/docs devdeps,build_docs,online: HOME = {envtmpdir} PARFIVE_HIDE_PROGRESS = True devdeps: PIP_EXTRA_INDEX_URL = https://pypi.anaconda.org/astropy/simple https://pypi.anaconda.org/scientific-python-nightly-wheels/simple @@ -23,18 +23,14 @@ description = deps = devdeps: git+https://github.com/astropy/astropy.git devdeps: git+https://github.com/sunpy/sunpy.git - online: pytest-rerunfailures - online: pytest-timeout rc: sunpy>=0.0.dev0 - pytest-cov - pytest-xdist extras = all tests commands = pip freeze --all --no-input !online: {env:PYTEST_COMMAND} {posargs} - online: {env:PYTEST_COMMAND} --reruns 2 --reruns-delay 15 --timeout=30 --remote-data=any {posargs} + online: {env:PYTEST_COMMAND} --remote-data=any {posargs} [testenv:build_docs] changedir = docs @@ -44,7 +40,7 @@ extras = docs commands = pip freeze --all --no-input - sphinx-build -j auto --color -W --keep-going -b html -d _build/.doctrees . _build/html {posargs} + sphinx-build --color -W --keep-going -b html -d _build/.doctrees . _build/html {posargs} python -c 'import pathlib; print("Documentation available under file://\{0\}".format(pathlib.Path(r"{toxinidir}") / "docs" / "_build" / "index.html"))' [testenv:codestyle]