diff --git a/docs/reference/api_nuts_bolts.rst b/docs/reference/api_nuts_bolts.rst index 3e376a7808..1c0116deec 100644 --- a/docs/reference/api_nuts_bolts.rst +++ b/docs/reference/api_nuts_bolts.rst @@ -23,6 +23,10 @@ Utilities API :no-inheritance-diagram: :no-inherited-members: +.. automodapi:: jdaviz.configs.cubeviz.plugins.mixins + :no-inheritance-diagram: + :no-inherited-members: + .. automodapi:: jdaviz.configs.imviz.wcs_utils :no-inheritance-diagram: :no-inherited-members: diff --git a/jdaviz/configs/cubeviz/plugins/__init__.py b/jdaviz/configs/cubeviz/plugins/__init__.py index 2f3d5b58d5..61390844ae 100644 --- a/jdaviz/configs/cubeviz/plugins/__init__.py +++ b/jdaviz/configs/cubeviz/plugins/__init__.py @@ -1,4 +1,5 @@ from .tools import * # noqa +from .mixins import * # noqa from .viewers import * # noqa from .parsers import * # noqa from .moment_maps.moment_maps import * # noqa diff --git a/jdaviz/configs/cubeviz/plugins/mixins.py b/jdaviz/configs/cubeviz/plugins/mixins.py new file mode 100644 index 0000000000..a5be11bf59 --- /dev/null +++ b/jdaviz/configs/cubeviz/plugins/mixins.py @@ -0,0 +1,148 @@ +import numpy as np +import astropy.units as u +from functools import cached_property + +from jdaviz.core.marks import SliceIndicatorMarks + +__all__ = ['WithSliceIndicator', 'WithSliceSelection'] + + +class WithSliceIndicator: + @property + def slice_component_label(self): + return str(self.state.x_att) + + @property + def slice_display_unit_name(self): + return 'spectral' + + @cached_property + def slice_indicator(self): + # SliceIndicatorMarks does not yet exist + slice_indicator = SliceIndicatorMarks(self) + self.figure.marks = self.figure.marks + slice_indicator.marks + return slice_indicator + + @property + def slice_values(self): + # NOTE: these are cached at the slice-plugin level + # Retrieve display units + slice_display_units = self.jdaviz_app._get_display_unit( + self.slice_display_unit_name + ) + + def _get_component(layer): + try: + # Retrieve layer data and units + data_comp = layer.layer.data.get_component(self.slice_component_label) + except (AttributeError, KeyError): + # layer either does not have get_component (because its a subset) + # or slice_component_label is not a component in this layer + # either way, return an empty array and skip this layer + return np.array([]) + + # Convert axis if display units are set and are different + data_units = getattr(data_comp, 'units', None) + if slice_display_units and data_units and slice_display_units != data_units: + data = np.asarray(data_comp.data, dtype=float) * u.Unit(data_units) + return data.to_value(slice_display_units, + equivalencies=u.spectral()) + else: + return data_comp.data + try: + return np.asarray(np.unique(np.concatenate([_get_component(layer) for layer in self.layers])), # noqa + dtype=float) + except ValueError: + # NOTE: this will result in caching an empty list + return np.array([]) + + def _set_slice_indicator_value(self, value): + # this is a separate method so that viewers can override and map value if necessary + # NOTE: on first call, this will initialize the indicator itself + self.slice_indicator.value = value + + +class WithSliceSelection: + @property + def slice_index(self): + # index in state.slices corresponding to the slice axis + return 2 + + @property + def slice_component_label(self): + slice_plg = self.jdaviz_helper.plugins.get('Slice', None) + if slice_plg is None: # pragma: no cover + raise ValueError("slice plugin must be activated to access slice_component_label") + return slice_plg._obj.slice_indicator_viewers[0].slice_component_label + + @property + def slice_display_unit_name(self): + return 'spectral' + + @property + def slice_values(self): + # NOTE: these are cached at the slice-plugin level + # TODO: add support for multiple cubes (but then slice selection needs to be more complex) + # if slice_index is 0, then we want the equivalent of [:, 0, 0] + # if slice_index is 1, then we want the equivalent of [0, :, 0] + # if slice_index is 2, then we want the equivalent of [0, 0, :] + take_inds = [2, 1, 0] + take_inds.remove(self.slice_index) + converted_axis = np.array([]) + for layer in self.layers: + world_comp_ids = layer.layer.data.world_component_ids + if self.slice_index >= len(world_comp_ids): + # Case where 2D image is loaded in image viewer + continue + + # Retrieve display units + slice_display_units = self.jdaviz_app._get_display_unit( + self.slice_display_unit_name + ) + + try: + # Retrieve layer data and units using the slice index of the world components ids + data_comp = layer.layer.data.get_component(world_comp_ids[self.slice_index]) + except (AttributeError, KeyError): + continue + + data = np.asarray(data_comp.data.take(0, take_inds[0]).take(0, take_inds[1]), # noqa + dtype=float) + + # Convert to display units if applicable + data_units = getattr(data_comp, 'units', None) + if slice_display_units and data_units and slice_display_units != data_units: + converted_axis = (data * u.Unit(data_units)).to_value( + slice_display_units, + equivalencies=u.spectral() + u.pixel_scale(1*u.pix) + ) + else: + converted_axis = data + + return converted_axis + + @property + def slice(self): + return self.state.slices[self.slice_index] + + @slice.setter + def slice(self, slice): + # NOTE: not intended for user-access - this should be controlled through the slice plugin + # in order to sync with all other viewers/slice indicators + slices = [0, 0, 0] + slices[self.slice_index] = slice + self.state.slices = tuple(slices) + + @property + def slice_value(self): + return self.slice_values[self.slice] + + @slice_value.setter + def slice_value(self, slice_value): + # NOTE: not intended for user-access - this should be controlled through the slice plugin + # in order to sync with all other viewers/slice indicators + # find the slice nearest slice_value + slice_values = self.slice_values + if not len(slice_values): + return + self.slice = np.argmin(abs(slice_values - slice_value)) diff --git a/jdaviz/configs/cubeviz/plugins/parsers.py b/jdaviz/configs/cubeviz/plugins/parsers.py index a565ba36e1..cedf8191ba 100644 --- a/jdaviz/configs/cubeviz/plugins/parsers.py +++ b/jdaviz/configs/cubeviz/plugins/parsers.py @@ -73,7 +73,6 @@ def parse_data(app, file_obj, data_type=None, data_label=None, flux_viewer_reference_name=flux_viewer_reference_name, uncert_viewer_reference_name=uncert_viewer_reference_name ) - app.get_tray_item_from_name("Spectral Extraction").disabled_msg = "" elif isinstance(file_obj, str): if file_obj.lower().endswith('.gif'): # pragma: no cover _parse_gif(app, file_obj, data_label, @@ -135,7 +134,6 @@ def parse_data(app, file_obj, data_type=None, data_label=None, flux_viewer_reference_name=flux_viewer_reference_name, uncert_viewer_reference_name=uncert_viewer_reference_name ) - app.get_tray_item_from_name("Spectral Extraction").disabled_msg = "" # If the data types are custom data objects, use explicit parsers. Note # that this relies on the glue-astronomy machinery to turn the data object @@ -152,13 +150,11 @@ def parse_data(app, file_obj, data_type=None, data_label=None, app, file_obj, data_label=data_label, spectrum_viewer_reference_name=spectrum_viewer_reference_name ) - app.get_tray_item_from_name("Spectral Extraction").disabled_msg = "" elif isinstance(file_obj, np.ndarray) and file_obj.ndim == 3: _parse_ndarray(app, file_obj, data_label=data_label, data_type=data_type, flux_viewer_reference_name=flux_viewer_reference_name, uncert_viewer_reference_name=uncert_viewer_reference_name) - app.get_tray_item_from_name("Spectral Extraction").disabled_msg = "" else: raise NotImplementedError(f'Unsupported data format: {file_obj}') diff --git a/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.py b/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.py index 86590c4d03..809fe08e07 100644 --- a/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.py +++ b/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.py @@ -20,11 +20,12 @@ ApertureSubsetSelectMixin, ApertureSubsetSelect, AddResults, AddResultsMixin, - skip_if_no_updates_since_last_active, skip_if_not_tray_instance, + skip_if_no_updates_since_last_active, with_spinner, with_temp_disable) from jdaviz.core.user_api import PluginUserApi from jdaviz.configs.cubeviz.plugins.parsers import _return_spectrum_with_correct_units +from jdaviz.configs.cubeviz.plugins.viewers import WithSliceIndicator __all__ = ['SpectralExtraction'] @@ -74,6 +75,12 @@ class SpectralExtraction(PluginTemplateMixin, ApertureSubsetSelectMixin, active_step = Unicode().tag(sync=True) + resulting_product_name = Unicode("spectrum").tag(sync=True) + do_auto_extraction = True + # whether wavelength dependent options should be exposed to the user (in the UI) + wavelength_dependent_available = Bool(True).tag(sync=True) + bg_export_available = Bool(True).tag(sync=True) + wavelength_dependent = Bool(False).tag(sync=True) reference_spectral_value = FloatHandleEmpty().tag(sync=True) slice_spectral_value = Float().tag(sync=True) @@ -97,7 +104,7 @@ class SpectralExtraction(PluginTemplateMixin, ApertureSubsetSelectMixin, function_items = List().tag(sync=True) function_selected = Unicode('Sum').tag(sync=True) filename = Unicode().tag(sync=True) - extracted_spec_available = Bool(False).tag(sync=True) + extraction_available = Bool(False).tag(sync=True) overwrite_warn = Bool(False).tag(sync=True) aperture_method_items = List().tag(sync=True) @@ -113,11 +120,6 @@ class SpectralExtraction(PluginTemplateMixin, ApertureSubsetSelectMixin, export_enabled = Bool(True).tag(sync=True) def __init__(self, *args, **kwargs): - - self._default_spectrum_viewer_reference_name = kwargs.get( - "spectrum_viewer_reference_name", "spectrum-viewer" - ) - super().__init__(*args, **kwargs) self.extracted_spec = None @@ -128,6 +130,7 @@ def __init__(self, *args, **kwargs): self.aperture._default_text = 'Entire Cube' self.aperture._manual_options = ['Entire Cube'] self.aperture.items = [{"label": "Entire Cube"}] + self.aperture._subset_selected_changed_callback = self._update_extract # need to reinitialize choices since we overwrote items and some subsets may already # exist. self.aperture._initialize_choices() @@ -140,7 +143,8 @@ def __init__(self, *args, **kwargs): 'bg_scale_factor', dataset='dataset', multiselect=None, - default_text='None') + default_text='None', + subset_selected_changed_callback=self._update_extract) # noqa self.bg_spec_add_results = AddResults(self, 'bg_spec_results_label', 'bg_spec_results_label_default', @@ -149,8 +153,8 @@ def __init__(self, *args, **kwargs): 'bg_spec_results_label_overwrite', 'bg_spec_add_to_viewer_items', 'bg_spec_add_to_viewer_selected') - self.bg_spec_add_results.viewer.filters = ['is_spectrum_viewer'] - self.bg_spec_results_label_default = 'background-spectrum' + self.bg_spec_add_results.viewer.filters = ['is_slice_indicator_viewer'] + self.bg_spec_results_label_default = f'background-{self.resulting_product_name}' self.function = SelectPluginComponent( self, @@ -166,27 +170,18 @@ def __init__(self, *args, **kwargs): manual_options=self.aperture_method_manual_options ) self._set_default_results_label() - self.add_results.viewer.filters = ['is_spectrum_viewer'] + self.add_results.viewer.filters = ['is_slice_indicator_viewer'] self.session.hub.subscribe(self, SliceValueUpdatedMessage, handler=self._on_slice_changed) + self._update_disabled_msg() + if self.app.state.settings.get('server_is_remote', False): # when the server is remote, saving the file in python would save on the server, not # on the user's machine, so export support in cubeviz should be disabled self.export_enabled = False - for data in self.app.data_collection: - if len(data.data.shape) == 3: - break - else: - # no cube-like data loaded. Once loaded, the parser will unset this - # TODO: change to an event listener on AddDataMessage - self.disabled_msg = ( - "Spectral Extraction requires a single dataset to be loaded into Cubeviz, " - "please load data to enable this plugin." - ) - @property def user_api(self): expose = ['dataset', 'function', 'aperture', @@ -198,6 +193,19 @@ def user_api(self): return PluginUserApi(self, expose=expose) + @observe('dataset_items') + def _update_disabled_msg(self, msg={}): + for data in self.app.data_collection: + if data.data.ndim == 3: + self.disabled_msg = '' + break + else: + # no cube-like data loaded. Once loaded, the parser will unset this + self.disabled_msg = ( + f"{self.__class__.__name__} requires a 3d cube dataset to be loaded, " + "please load data to enable this plugin." + ) + @property def live_update_subscriptions(self): return {'data': ('dataset',), 'subset': ('aperture', 'background')} @@ -209,11 +217,22 @@ def __call__(self, add_data=True): def slice_display_unit_name(self): return 'spectral' + @property + def spatial_axes(self): + # Collapse an e.g. 3D spectral cube to 1D spectrum, assuming that last axis + # is always wavelength. This may need adjustment after the following + # specutils PR is merged: https://github.com/astropy/specutils/pull/1033 + return (0, 1) + + @property + def slice_indicator_viewers(self): + return [v for v in self.app._viewer_store.values() if isinstance(v, WithSliceIndicator)] + @observe('active_step', 'is_active') def _active_step_changed(self, *args): - self.aperture._set_mark_visiblities(self.active_step in ('', 'ap', 'ext')) + self.aperture._set_mark_visiblities(self.active_step in ('', 'ap', 'extract')) self.background._set_mark_visiblities(self.active_step == 'bg') - self.marks['bg_spec'].visible = self.active_step == 'bg' + self.marks['bg_extract'].visible = self.active_step == 'bg' @property def slice_plugin(self): @@ -222,6 +241,8 @@ def slice_plugin(self): @observe('aperture_items') @skip_if_not_tray_instance() def _aperture_items_changed(self, msg): + if not self.do_auto_extraction: + return if not hasattr(self, 'aperture'): return for item in msg['new']: @@ -234,11 +255,11 @@ def _aperture_items_changed(self, msg): auto_update=True, add_data=True) except Exception: msg = SnackbarMessage( - f"Automatic spectrum extraction for {subset_lbl} failed", + f"Automatic {self.resulting_product_name} extraction for {subset_lbl} failed", # noqa color='error', sender=self, timeout=10000) else: msg = SnackbarMessage( - f"Automatic spectrum extraction for {subset_lbl} successful", + f"Automatic {self.resulting_product_name} extraction for {subset_lbl} successful", # noqa color='success', sender=self) self.app.hub.broadcast(msg) @@ -246,12 +267,8 @@ def _extract_in_new_instance(self, dataset=None, function='Sum', subset_lbl=None auto_update=False, add_data=False): # create a new instance of the Spectral Extraction plugin (to not affect the instance in # the tray) and extract the entire cube with defaults. - if dataset is None: - if self._app._jdaviz_helper._loaded_flux_cube is None: - return - dataset = self._app._jdaviz_helper._loaded_flux_cube.label plg = self.new() - plg.dataset.selected = dataset + plg.dataset.selected = self.dataset.selected if subset_lbl is not None: plg.aperture.selected = subset_lbl plg.aperture_method.selected = 'Center' @@ -300,19 +317,21 @@ def _update_aperture_method_on_function_change(self, *args): self.conflicting_aperture_and_function = False @property - def spectral_cube(self): + def cube(self): return self.dataset.selected_dc_item @property def uncert_cube(self): - if self.dataset.selected == self._app._jdaviz_helper._loaded_flux_cube.label: + if (hasattr(self._app._jdaviz_helper, '_loaded_flux_cube') and + hasattr(self.app._jdaviz_helper, '_loaded_uncert_cube') and + self.dataset.selected == self._app._jdaviz_helper._loaded_flux_cube.label): return self._app._jdaviz_helper._loaded_uncert_cube else: # TODO: allow selecting or associating an uncertainty cube? return None @property - def spectral_display_unit(self): + def slice_display_unit(self): return astropy.units.Unit(self.app._get_display_unit(self.slice_display_unit_name)) @property @@ -339,7 +358,8 @@ def aperture_weight_mask(self): self.aperture.get_mask( self.dataset.selected_obj, self.aperture_method_selected, - self.spectral_display_unit, + self.slice_display_unit, + self.spatial_axes, self.reference_spectral_value if self.wavelength_dependent else None) ) @@ -354,7 +374,8 @@ def bg_weight_mask(self): self.background.get_mask( self.dataset.selected_obj, self.aperture_method_selected, - self.spectral_display_unit, + self.slice_display_unit, + self.spatial_axes, self.reference_spectral_value if self.bg_wavelength_dependent else None) ) @@ -362,14 +383,14 @@ def bg_weight_mask(self): def aperture_area_along_spectral(self): # Weight mask summed along the spatial axes so that we get area of the aperture, in pixels, # as a function of wavelength. - # To convert to steradians, multiply by self.spectral_cube.meta.get('PIXAR_SR', 1.0) - return np.sum(self.aperture_weight_mask, axis=(0, 1)) + # To convert to steradians, multiply by self.cube.meta.get('PIXAR_SR', 1.0) + return np.sum(self.aperture_weight_mask, axis=self.spatial_axes) @property def bg_area_along_spectral(self): - return np.sum(self.bg_weight_mask, axis=(0, 1)) + return np.sum(self.bg_weight_mask, axis=self.spatial_axes) - def _extract_from_aperture(self, spectral_cube, uncert_cube, aperture, + def _extract_from_aperture(self, cube, uncert_cube, aperture, weight_mask, wavelength_dependent, selected_func, **kwargs): # This plugin collapses over the *spatial axes* (optionally over a spatial subset, @@ -379,7 +400,7 @@ def _extract_from_aperture(self, spectral_cube, uncert_cube, aperture, if not isinstance(aperture, ApertureSubsetSelect): raise ValueError("aperture must be an ApertureSubsetSelect object") if aperture.selected != aperture.default_text: - nddata = spectral_cube.get_subset_object( + nddata = cube.get_subset_object( subset_id=aperture.selected, cls=NDDataArray ) if uncert_cube: @@ -403,18 +424,19 @@ def _extract_from_aperture(self, spectral_cube, uncert_cube, aperture, mask &= nddata.mask else: - nddata = spectral_cube.get_object(cls=NDDataArray) + nddata = cube.get_object(cls=NDDataArray) if uncert_cube: uncertainties = uncert_cube.get_object(cls=StdDevUncertainty) else: uncertainties = None flux = nddata.data << nddata.unit mask = nddata.mask + # Use the spectral coordinate from the WCS: - if '_orig_spec' in spectral_cube.meta: - wcs = spectral_cube.meta['_orig_spec'].wcs.spectral - elif hasattr(spectral_cube.coords, 'spectral'): - wcs = spectral_cube.coords.spectral + if '_orig_spec' in cube.meta: + wcs = cube.meta['_orig_spec'].wcs.spectral + elif hasattr(cube.coords, 'spectral'): + wcs = cube.coords.spectral else: wcs = None @@ -429,19 +451,15 @@ def _extract_from_aperture(self, spectral_cube, uncert_cube, aperture, # by default we want to propagate uncertainties: kwargs.setdefault("propagate_uncertainties", True) - # Collapse an e.g. 3D spectral cube to 1D spectrum, assuming that last axis - # is always wavelength. This may need adjustment after the following - # specutils PR is merged: https://github.com/astropy/specutils/pull/1033 - spatial_axes = (0, 1) if selected_func == 'mean': # Use built-in sum function to collapse NDDataArray - collapsed_sum_for_mean = nddata_reshaped.sum(axis=spatial_axes, **kwargs) + collapsed_sum_for_mean = nddata_reshaped.sum(axis=self.spatial_axes, **kwargs) # But we still need the mean function for everything except flux - collapsed_as_mean = nddata_reshaped.mean(axis=spatial_axes, **kwargs) + collapsed_as_mean = nddata_reshaped.mean(axis=self.spatial_axes, **kwargs) # Then normalize the flux based on the fractional pixel array flux_for_mean = (collapsed_sum_for_mean.data / - np.sum(weight_mask, axis=spatial_axes)) << nddata_reshaped.unit + np.sum(weight_mask, axis=self.spatial_axes)) << nddata_reshaped.unit # Combine that information into a new NDDataArray collapsed_nddata = NDDataArray(flux_for_mean, mask=collapsed_as_mean.mask, uncertainty=collapsed_as_mean.uncertainty, @@ -449,22 +467,28 @@ def _extract_from_aperture(self, spectral_cube, uncert_cube, aperture, meta=collapsed_as_mean.meta) elif selected_func == 'sum': collapsed_nddata = getattr(nddata_reshaped, selected_func)( - axis=spatial_axes, **kwargs + axis=self.spatial_axes, **kwargs ) # returns an NDDataArray # Remove per steradian denominator if astropy.units.sr in collapsed_nddata.unit.bases: - aperture_area = self.spectral_cube.meta.get('PIXAR_SR', 1.0) * u.sr + aperture_area = self.cube.meta.get('PIXAR_SR', 1.0) * u.sr collapsed_nddata = collapsed_nddata.multiply(aperture_area, propagate_uncertainties=True) else: collapsed_nddata = getattr(nddata_reshaped, selected_func)( - axis=spatial_axes, **kwargs + axis=self.spatial_axes, **kwargs ) # returns an NDDataArray + + return self._return_extracted(cube, wcs, collapsed_nddata) + + def _return_extracted(self, cube, wcs, collapsed_nddata): # Convert to Spectrum1D, with the spectral axis in correct units: - if hasattr(spectral_cube.coords, 'spectral_wcs'): - target_wave_unit = spectral_cube.coords.spectral_wcs.world_axis_units[0] + if hasattr(cube.coords, 'spectral_wcs'): + target_wave_unit = cube.coords.spectral_wcs.world_axis_units[0] + elif hasattr(cube.coords, 'spectral'): + target_wave_unit = cube.coords.spectral.world_axis_units[0] else: - target_wave_unit = spectral_cube.coords.spectral.world_axis_units[0] + target_wave_unit = None if target_wave_unit == '': target_wave_unit = 'pix' @@ -481,6 +505,12 @@ def _extract_from_aperture(self, spectral_cube, uncert_cube, aperture, ) return collapsed_spec + def _preview_x_from_extracted(self, extracted): + return extracted.spectral_axis.value + + def _preview_y_from_extracted(self, extracted): + return extracted.flux.value + @with_spinner() def extract(self, return_bg=False, add_data=True, **kwargs): """ @@ -503,7 +533,7 @@ def extract(self, return_bg=False, add_data=True, **kwargs): raise ValueError("aperture and background cannot be set to the same subset") selected_func = self.function_selected.lower() - spec = self._extract_from_aperture(self.spectral_cube, self.uncert_cube, + spec = self._extract_from_aperture(self.cube, self.uncert_cube, self.aperture, self.aperture_weight_mask, self.wavelength_dependent, selected_func, **kwargs) @@ -513,11 +543,11 @@ def extract(self, return_bg=False, add_data=True, **kwargs): spec = spec - bg_spec # per https://jwst-docs.stsci.edu/jwst-near-infrared-camera/nircam-performance/nircam-absolute-flux-calibration-and-zeropoints # noqa - pix_scale_factor = self.spectral_cube.meta.get('PIXAR_SR', 1.0) + pix_scale_factor = self.cube.meta.get('PIXAR_SR', 1.0) spec.meta['_pixel_scale_factor'] = pix_scale_factor # inform the user if scale factor keyword not in metadata - if 'PIXAR_SR' not in self.spectral_cube.meta: + if 'PIXAR_SR' not in self.cube.meta: snackbar_message = SnackbarMessage( ("PIXAR_SR FITS header keyword not found when parsing spectral cube. " "Flux/Surface Brightness will use default PIXAR_SR value of 1 sr/pix^2."), @@ -527,7 +557,7 @@ def extract(self, return_bg=False, add_data=True, **kwargs): # stuff for exporting to file self.extracted_spec = spec - self.extracted_spec_available = True + self.extraction_available = True fname_label = self.dataset_selected.replace("[", "_").replace("]", "") self.filename = f"extracted_{selected_func}_{fname_label}.fits" @@ -537,7 +567,7 @@ def extract(self, return_bg=False, add_data=True, **kwargs): self.add_results.add_results_from_plugin(spec) snackbar_message = SnackbarMessage( - "Spectrum extracted successfully.", + f"{self.resulting_product_name.title()} extracted successfully.", color="success", sender=self) self.hub.broadcast(snackbar_message) @@ -566,7 +596,7 @@ def extract_bg_spectrum(self, add_data=False, **kwargs): # allow internal calls to override the behavior of the bg_spec_per_spaxel traitlet bg_spec_per_spaxel = kwargs.pop('bg_spec_per_spaxel', self.bg_spec_per_spaxel) if self.background.selected != self.background.default_text: - bg_spec = self._extract_from_aperture(self.spectral_cube, self.uncert_cube, + bg_spec = self._extract_from_aperture(self.cube, self.uncert_cube, self.background, self.bg_weight_mask, self.bg_wavelength_dependent, self.function_selected.lower(), **kwargs) @@ -610,7 +640,7 @@ def _save_extracted_spec_to_fits(self, overwrite=False, *args): if not self.export_enabled: # this should never be triggered since this is intended for UI-disabling and the # UI section is hidden, but would prevent any JS-hacking - raise ValueError("Writing out extracted spectrum to file is currently disabled") + raise ValueError(f"Writing out extracted {self.resulting_product_name} to file is currently disabled") # noqa # Make sure file does not end up in weird places in standalone mode. path = os.path.dirname(self.filename) @@ -637,28 +667,29 @@ def _save_extracted_spec_to_fits(self, overwrite=False, *args): # Let the user know where we saved the file. self.hub.broadcast(SnackbarMessage( - f"Extracted spectrum saved to {os.path.abspath(filename)}", - sender=self, color="success")) + f"Extracted {self.resulting_product_name} saved to {os.path.abspath(filename)}", + sender=self, color="success")) @observe('aperture_selected', 'function_selected') def _set_default_results_label(self, event={}): if not hasattr(self, 'aperture'): return if self.aperture.selected == self.aperture.default_text: - self.results_label_default = f"Spectrum ({self.function_selected.lower()})" + self.results_label_default = f"{self.resulting_product_name.title()} ({self.function_selected.lower()})" # noqa else: - self.results_label_default = f"Spectrum ({self.aperture_selected}, {self.function_selected.lower()})" # noqa + self.results_label_default = f"{self.resulting_product_name.title()} ({self.aperture_selected}, {self.function_selected.lower()})" # noqa @cached_property def marks(self): if not self._tray_instance: return {} - sv = self.spectrum_viewer - marks = {'spec': PluginLine(sv, visible=self.is_active), - 'bg_spec': PluginLine(sv, - line_style='dotted', - visible=self.is_active and self.active_step == 'bg')} - sv.figure.marks = sv.figure.marks + [marks['spec'], marks['bg_spec']] + # TODO: iterate over self.slice_indicator_viewers and handle adding/removing viewers + sv = self.slice_indicator_viewers[0] + marks = {'extract': PluginLine(sv, visible=self.is_active), + 'bg_extract': PluginLine(sv, + line_style='dotted', + visible=self.is_active and self.active_step == 'bg')} + sv.figure.marks = sv.figure.marks + [marks['extract'], marks['bg_extract']] return marks def _clear_marks(self): @@ -666,42 +697,49 @@ def _clear_marks(self): if mark.visible: mark.visible = False - @observe('is_active', 'show_live_preview') - def _toggle_marks(self, event={}): + @observe('is_active', 'show_live_preview', + 'dataset_selected', 'aperture_selected', 'bg_selected', + 'wavelength_dependent', 'bg_wavelength_dependent', 'reference_spectral_value', + 'function_selected', + 'aperture_method_selected', + 'previews_temp_disabled') + def _live_update_marks(self, event={}): + self._update_marks(event) + + @skip_if_not_tray_instance() + def _update_marks(self, event={}): visible = self.show_live_preview and self.is_active if not visible: self._clear_marks() - elif event.get('name', '') in ('is_active', 'show_live_preview'): - # then the marks themselves need to be updated - self._live_update(event) + return + + # ensure the correct visibility, always (whether or not there have been updates) + self.marks['bg_extract'].visible = self.active_step == 'bg' and self.bg_selected != self.background.default_text # noqa + self.marks['extract'].visible = True + + # _live_update will skip if no updates since last active + self._live_update_extract(event) - @observe('dataset_selected', 'aperture_selected', 'bg_selected', - 'wavelength_dependent', 'bg_wavelength_dependent', 'reference_spectral_value', - 'function_selected', - 'aperture_method_selected', - 'previews_temp_disabled') @skip_if_no_updates_since_last_active() @with_temp_disable(timeout=0.4) - def _live_update(self, event={}): - if not self._tray_instance: - return - if not self.show_live_preview or not self.is_active: - self._clear_marks() - return + def _live_update_extract(self, event={}): + self._update_extract() + @skip_if_not_tray_instance() + def _update_extract(self): try: - sp, bg_spec = self.extract(return_bg=True, add_data=False) + ext, bg_extract = self.extract(return_bg=True, add_data=False) except (ValueError, Exception): self._clear_marks() - return + return False - self.marks['spec'].update_xy(sp.spectral_axis.value, sp.flux.value) - self.marks['spec'].visible = True + self.marks['extract'].update_xy(self._preview_x_from_extracted(ext), + self._preview_y_from_extracted(ext)) - if bg_spec is None: - self.marks['bg_spec'].clear() - self.marks['bg_spec'].visible = False + if bg_extract is None: + self.marks['bg_extract'].clear() + self.marks['bg_extract'].visible = False else: - self.marks['bg_spec'].update_xy(bg_spec.spectral_axis.value, bg_spec.flux.value) - self.marks['bg_spec'].visible = self.active_step == 'bg' + self.marks['bg_extract'].update_xy(self._preview_x_from_extracted(bg_extract), + self._preview_y_from_extracted(bg_extract)) diff --git a/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.vue b/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.vue index 00a974d757..74629b7a65 100644 --- a/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.vue +++ b/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.vue @@ -1,6 +1,6 @@