diff --git a/doc/help/run_pypeit.rst b/doc/help/run_pypeit.rst index 33669bf891..60a98ddfc7 100644 --- a/doc/help/run_pypeit.rst +++ b/doc/help/run_pypeit.rst @@ -4,7 +4,7 @@ usage: run_pypeit [-h] [-v VERBOSITY] [-r REDUX_PATH] [-m] [-s] [-o] [-c] pypeit_file - ## PypeIt : The Python Spectroscopic Data Reduction Pipeline v1.16.1.dev84+g643dd5acc + ## PypeIt : The Python Spectroscopic Data Reduction Pipeline v1.16.1.dev97+g2ab5988a9 ## ## Available spectrographs include: ## bok_bc, gemini_flamingos1, gemini_flamingos2, gemini_gmos_north_e2v, diff --git a/doc/releases/1.16.1dev.rst b/doc/releases/1.16.1dev.rst index 33b59d4259..fe68fc7496 100644 --- a/doc/releases/1.16.1dev.rst +++ b/doc/releases/1.16.1dev.rst @@ -13,6 +13,8 @@ Installation Changes Dependency Changes ------------------ +- Support added for numpy>=2.0.0; numpy<=2.0.0 should still be supported + Functionality/Performance Improvements and Additions ---------------------------------------------------- diff --git a/presentations/py/users.py b/presentations/py/users.py new file mode 100644 index 0000000000..9f3affcddd --- /dev/null +++ b/presentations/py/users.py @@ -0,0 +1,80 @@ +from matplotlib import pyplot, rc, dates +import matplotlib.dates as mdates +import matplotlib as mpl +mpl.rcParams['font.family'] = 'stixgeneral' + +import numpy +from IPython import embed + +def set_fontsize(ax, fsz): + """ + Set the fontsize throughout an Axis + + Args: + ax (Matplotlib Axis): + fsz (float): Font size + + Returns: + + """ + for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] + + ax.get_xticklabels() + ax.get_yticklabels()): + item.set_fontsize(fsz) + +user_dates = ["2021-03-11", "2022-04-29", "2022-11-07", "2022-12-06", "2023-06-08", "2023-06-29", "2023-07-11", "2023-09-03", "2023-10-13", "2023-12-01", "2023-12-15", "2024-02-22", "2024-03-21", "2024-04-09", "2024-05-02", "2024-05-19", "2024-06-06", "2024-06-10"] +user_dates = numpy.array([numpy.datetime64(date) for date in user_dates]) +user_number = numpy.array([125, 293, 390, 394, 477, 487, 506, 518, 531, 544, 551, 568, 579, 588, 596, 603, 616, 620]) + +user_pred_dates = numpy.array([numpy.datetime64(date) + for date in ["2024-06-10", "2024-12-31", "2025-12-31", "2026-12-31", + "2027-12-31"]]) +user_pred_num = numpy.array([620, 620+0.5*160, 620+1.5*160, 620+2.5*160, 620+3.5*160]) + + +cite_dates = ["2020-12-31", "2021-12-31", "2022-12-31", "2023-12-31", "2024-06-10"] +cite_dates = numpy.array([numpy.datetime64(date) for date in cite_dates]) +cite_ref = numpy.cumsum([7, 24, 31, 56, 53]) +cite_all = numpy.cumsum([8, 25, 33, 68, 81]) + +cite_pred_dates = numpy.array([numpy.datetime64(date) + for date in ["2024-06-10", "2024-12-31", "2025-12-31", "2026-12-31", "2027-12-31"]]) +cite_pred_all = numpy.cumsum([8, 25, 33, 68, 81, numpy.sqrt(1.5)*81, 1.5*68, 1.5**2*68, 1.5**3*68]) +cite_pred_all = cite_pred_all[cite_all.size-1:] + +rc('font', size=14) + +w,h = pyplot.figaspect(1) +fig = pyplot.figure(figsize=(1.9*w,1.5*h)) + +ax = fig.add_axes([0.2, 0.2, 0.75, 0.75]) +ax.plot(user_dates, user_number, ls='-', color='k', label='Slack Users') +ax.plot(user_pred_dates, user_pred_num, ls=':', color='k') +ax.set_ylim([0,1400]) +ax.set_ylabel("Cumulative Usage Metric") +ax.set_xlabel("Date") +ax.xaxis.set_major_locator(dates.MonthLocator(bymonth=[1,7])) +fig.autofmt_xdate() + +#axt = ax.twinx() +#ax.plot(cite_dates, cite_ref, ls='--', color='0.5', label='All Cite') +ax.plot(cite_dates, cite_all, ls='-', color='C0', label='Citations') +ax.plot(cite_pred_dates, cite_pred_all, ls=':', color='C0') +#axt.set_ylim([0,250]) + +#ax.scatter(user_dates, user_number, marker='.', lw=0, s=200, color='k') +#fig.canvas.print_figure('pypeit_users.pdf', bbox_inches='tight') + +ax.legend() + +ax.xaxis.set_major_locator(mdates.YearLocator()) + +set_fontsize(ax, 16) + + +outfile = "pypeit_users.png" +pyplot.savefig(outfile, dpi=300) +print(f"Saved: {outfile}") + +#pyplot.show() +#fig.clear() +#pyplot.close(fig) diff --git a/pypeit/cache.py b/pypeit/cache.py index b761b19f09..52379cf6ad 100644 --- a/pypeit/cache.py +++ b/pypeit/cache.py @@ -63,13 +63,22 @@ def git_branch(): """ Return the name/hash of the currently checked out branch - + Returns: - :obj:`str`: Branch name or hash + :obj:`str`: Branch name or hash. Defaults to "develop" if PypeIt is not currently in a repository + or pygit2 is inot installed. + """ - if Repository is None: + if Repository is not None: + try: + repo = Repository(resources.files('pypeit')) + except Exception as e: + # PypeIt not in a git repo + repo = None + + if Repository is None or repo is None: return 'develop' if '.dev' in __version__ else __version__ - repo = Repository(resources.files('pypeit')) + return str(repo.head.target) if repo.head_is_detached else str(repo.head.shorthand) diff --git a/pypeit/core/findobj_skymask.py b/pypeit/core/findobj_skymask.py index 8d5ba51880..fe56f70bc4 100644 --- a/pypeit/core/findobj_skymask.py +++ b/pypeit/core/findobj_skymask.py @@ -66,57 +66,68 @@ def create_skymask(sobjs, thismask, slit_left, slit_righ, box_rad_pix=None, trim global sky subtraction (True means the pixel is usable for sky subtraction, False means it should be masked when subtracting sky). """ + # Number of objects nobj = len(sobjs) - ximg, _ = pixels.ximg_and_edgemask(slit_left, slit_righ, thismask, trim_edg=trim_edg) - # How many pixels wide is the slit at each Y? - xsize = slit_righ - slit_left - #nsamp = np.ceil(np.median(xsize)) # JFH Changed 07-07-19 - nsamp = np.ceil(xsize.max()) + if nobj == 0: + msgs.info('No objects were detected. The entire slit will be used for sky subtraction.') + return thismask[thismask] - # Objmask + # Compute the object mask skymask_objsnr = np.copy(thismask) - if nobj == 0: - msgs.info('No objects were detected. The entire slit will be used to determine the sky subtraction.') - else: - # Compute some inputs for the object mask - xtmp = (np.arange(nsamp) + 0.5)/nsamp - # threshold for object finding - for iobj in range(nobj): - # this will skip also sobjs with THRESHOLD=0, because are the same that have smash_snr=0. - if (sobjs[iobj].smash_snr != 0.) and (sobjs[iobj].smash_snr != None): - qobj = np.zeros_like(xtmp) - sep = np.abs(xtmp-sobjs[iobj].SPAT_FRACPOS) - sep_inc = sobjs[iobj].maskwidth/nsamp - close = sep <= sep_inc - # This is an analytical SNR profile with a Gaussian shape. - # JFH modified to use SNR here instead of smash peakflux. I believe that the 2.77 is supposed to be - # 2.355**2/2, i.e. the argument of a gaussian with sigma = FWHM/2.35 - qobj[close] = sobjs[iobj].smash_snr * \ - np.exp(np.fmax(-2.77*(sep[close]*nsamp)**2/sobjs[iobj].FWHM**2, -9.0)) - skymask_objsnr[thismask] &= np.interp(ximg[thismask], xtmp, qobj) < skymask_snr_thresh - # FWHM + # Create an image with pixel values equal to the fraction of the spatial + # position along the slit, ranging from 0 -> 1 + ximg, _ = pixels.ximg_and_edgemask(slit_left, slit_righ, thismask, trim_edg=trim_edg) + # Maximum spatial width rounded up + nsamp = np.ceil(np.amax(slit_righ - slit_left)) + # Fractional position within the maximum spatial width + xtmp = (np.arange(nsamp) + 0.5)/nsamp + # threshold for object finding + for iobj in range(nobj): + # this will skip also sobjs with THRESHOLD=0, because are the same that have smash_snr=0. + if sobjs[iobj].smash_snr is None or sobjs[iobj].smash_snr <= 0.: + continue + # Select pixels within the defined width of the object + sep = np.absolute(xtmp-sobjs[iobj].SPAT_FRACPOS) + sep_inc = sobjs[iobj].maskwidth/nsamp + close = sep <= sep_inc + # This is an analytical SNR profile with a Gaussian shape. + # JFH modified to use SNR here instead of smash peakflux. I believe that + # the 2.77 is supposed to be 2.355**2/2, i.e. the argument of a gaussian + # with sigma = FWHM/2.35 + qobj = np.zeros_like(xtmp) + qobj[close] = sobjs[iobj].smash_snr * \ + np.exp(np.fmax(-2.77*(sep[close]*nsamp)**2/sobjs[iobj].FWHM**2, -9.0)) + skymask_objsnr[thismask] &= np.interp(ximg[thismask], xtmp, qobj) < skymask_snr_thresh + + # Compute the FWHM mask skymask_fwhm = np.copy(thismask) - if nobj > 0: - nspec, nspat = thismask.shape - # spatial position everywhere along image - spat_img = np.outer(np.ones(nspec, dtype=int),np.arange(nspat, dtype=int)) - # Boxcar radius? - if box_rad_pix is not None: - msgs.info("Using boxcar radius for masking") - # Loop me - for iobj in range(nobj): - # Create a mask for the pixels that will contribute to the object - skymask_radius = box_rad_pix if box_rad_pix is not None else sobjs[iobj].FWHM - msgs.info(f"Masking around object {iobj+1} within a radius = {skymask_radius} pixels") - slit_img = np.outer(sobjs[iobj].TRACE_SPAT, np.ones(nspat)) # central trace replicated spatially - objmask_now = thismask & (spat_img > (slit_img - skymask_radius)) & (spat_img < (slit_img + skymask_radius)) - skymask_fwhm &= np.invert(objmask_now) - - # Check that we have not performed too much masking - if (np.sum(skymask_fwhm)/np.sum(thismask) < 0.10): - msgs.warn('More than 90% of usable area on this slit would be masked and not used by global sky subtraction. ' - 'Something is probably wrong with object finding for this slit. Not masking object for global sky subtraction.') - skymask_fwhm = np.copy(thismask) + nspec, nspat = thismask.shape + # spatial position everywhere along image +# spat_img = np.outer(np.ones(nspec, dtype=int),np.arange(nspat, dtype=int)) + spat_img = np.tile(np.arange(nspat, dtype=int), (nspec,1)) + # Boxcar radius? + if box_rad_pix is not None: + msgs.info("Using boxcar radius for masking") + # Loop me + for iobj in range(nobj): + # Create a mask for the pixels that will contribute to the object + skymask_radius = box_rad_pix if box_rad_pix is not None else sobjs[iobj].FWHM + msgs.info(f"Masking around object {iobj+1} within a radius = {skymask_radius} pixels") +# slit_img = np.outer(sobjs[iobj].TRACE_SPAT, np.ones(nspat)) # central trace replicated spatially + slit_img = np.tile(sobjs[iobj].TRACE_SPAT, (nspat,1)).T + objmask_now = thismask \ + & (spat_img > slit_img - skymask_radius) \ + & (spat_img < slit_img + skymask_radius) + skymask_fwhm &= np.logical_not(objmask_now) + + # Check that we have not performed too much masking + # TODO: There is this hard-coded check here, and then there is a similar + # check in skysub.global_skysub. Do we need both? + if np.sum(skymask_fwhm)/np.sum(thismask) < 0.10: + msgs.warn('More than 90% of usable area on this slit would be masked and not used by ' + 'global sky subtraction. Something is probably wrong with object finding for ' + 'this slit. Not masking object for global sky subtraction.') + skymask_fwhm = np.copy(thismask) # Still have to make the skymask # # TODO -- Make sure this is right @@ -135,8 +146,10 @@ def create_skymask(sobjs, thismask, slit_left, slit_righ, box_rad_pix=None, trim # computation from objs_in_slit is not necessarily that reliable and when large amounts of masking are performed # on narrow slits/orders, we have problems. We should revisit this after object finding is refactored since # maybe then the fwhm estimates will be more robust. - if box_rad_pix is None and np.all([sobj.smash_snr is not None for sobj in sobjs]) \ - and np.all([sobj.smash_snr != 0. for sobj in sobjs]) and not np.all(skymask_objsnr == thismask): + if box_rad_pix is None \ + and np.all([sobj.smash_snr is not None for sobj in sobjs]) \ + and np.all([sobj.smash_snr != 0. for sobj in sobjs]) \ + and not np.all(skymask_objsnr == thismask): # TODO This is a kludge until we refactor this routine. Basically mask design objects that are not auto-ID # always have smash_snr undefined. If there is a hybrid situation of auto-ID and maskdesign, the logic # here does not really make sense. Soution would be to compute thershold and smash_snr for all objects. diff --git a/pypeit/core/flux_calib.py b/pypeit/core/flux_calib.py index b94e1bdf4d..816f06be08 100644 --- a/pypeit/core/flux_calib.py +++ b/pypeit/core/flux_calib.py @@ -411,7 +411,7 @@ def get_standard_spectrum(star_type=None, star_mag=None, ra=None, dec=None): # Pull star spectral model from archive msgs.info("Getting archival standard spectrum") # Grab closest standard within a tolerance - std_dict = find_standard_file(ra, dec) + std_dict = find_standard_file(ra, dec,to_pkg='symlink') elif (star_mag is not None) and (star_type is not None): ## using vega spectrum diff --git a/pypeit/core/skysub.py b/pypeit/core/skysub.py index 91acc0f2bc..a5114d886c 100644 --- a/pypeit/core/skysub.py +++ b/pypeit/core/skysub.py @@ -136,11 +136,14 @@ def global_skysub(image, ivar, tilts, thismask, slit_left, slit_righ, inmask=Non msgs.error("Type of inmask should be bool and is of type: {:}".format(inmask.dtype)) # Sky pixels for fitting - gpm = thismask & (ivar > 0.0) & inmask & np.logical_not(edgmask) & np.isfinite(image) & np.isfinite(ivar) + gpm = thismask & (ivar > 0.0) & inmask & np.logical_not(edgmask) \ + & np.isfinite(image) & np.isfinite(ivar) bad_pixel_frac = np.sum(thismask & np.logical_not(gpm))/np.sum(thismask) if bad_pixel_frac > max_mask_frac: - msgs.warn('This slit/order has {:5.3f}% of the pixels masked, which exceeds the threshold of {:f}%. '.format(100.0*bad_pixel_frac, 100.0*max_mask_frac) - + msgs.newline() + 'There is likely a problem with this slit. Giving up on global sky-subtraction.') + msgs.warn(f'This slit/order has {100.0*bad_pixel_frac:.3f}% of the pixels masked, which ' + f'exceeds the threshold of {100.0*max_mask_frac:.3f}%.' + + msgs.newline() + 'There is likely a problem with this slit. Giving up on ' + 'global sky-subtraction.') return np.zeros(np.sum(thismask)) # Sub arrays diff --git a/pypeit/core/wavecal/autoid.py b/pypeit/core/wavecal/autoid.py index 4fcebb1add..09f195652f 100644 --- a/pypeit/core/wavecal/autoid.py +++ b/pypeit/core/wavecal/autoid.py @@ -3189,7 +3189,7 @@ def finalize_fit(self, detections): if self._outroot is not None: # Write IDs out_dict = dict(pix=use_tcent, IDs=self._all_patt_dict[str(slit)]['IDs']) - jdict = ltu.jsonify(out_dict) + jdict = utils.jsonify(out_dict) ltu.savejson(self._outroot + slittxt + '.json', jdict, easy_to_read=True, overwrite=True) msgs.info("Wrote: {:s}".format(self._outroot + slittxt + '.json')) diff --git a/pypeit/find_objects.py b/pypeit/find_objects.py index 120a800bcc..6ca9dbbeed 100644 --- a/pypeit/find_objects.py +++ b/pypeit/find_objects.py @@ -253,28 +253,29 @@ def create_skymask(self, sobjs_obj): subtraction. True = usable for sky subtraction, False = should be masked when sky subtracting. """ - # Masking options - boxcar_rad_pix = None - + # Instantiate the mask skymask = np.ones_like(self.sciImg.image, dtype=bool) - gdslits = np.where(np.invert(self.reduce_bpm))[0] - if sobjs_obj.nobj > 0: - for slit_idx in gdslits: - slit_spat = self.slits.spat_id[slit_idx] - qa_title ="Generating skymask for slit # {:d}".format(slit_spat) - msgs.info(qa_title) - thismask = self.slitmask == slit_spat - this_sobjs = sobjs_obj.SLITID == slit_spat - # Boxcar mask? - if self.par['reduce']['skysub']['mask_by_boxcar']: - boxcar_rad_pix = self.par['reduce']['extraction']['boxcar_radius'] / \ - self.get_platescale(slitord_id=self.slits.slitord_id[slit_idx]) - # Do it - skymask[thismask] = findobj_skymask.create_skymask(sobjs_obj[this_sobjs], thismask, - self.slits_left[:,slit_idx], - self.slits_right[:,slit_idx], - box_rad_pix=boxcar_rad_pix, - trim_edg=self.par['reduce']['findobj']['find_trim_edge']) + if sobjs_obj.nobj == 0: + # No objects found, so entire image contains sky + return skymask + + # Build the mask for each slit + boxcar_rad_pix = None + gdslits = np.where(np.logical_not(self.reduce_bpm))[0] + for slit_idx in gdslits: + slit_spat = self.slits.spat_id[slit_idx] + msgs.info(f'Generating skymask for slit # {slit_spat}') + thismask = self.slitmask == slit_spat + this_sobjs = sobjs_obj.SLITID == slit_spat + # Boxcar mask? + if self.par['reduce']['skysub']['mask_by_boxcar']: + boxcar_rad_pix = self.par['reduce']['extraction']['boxcar_radius'] / \ + self.get_platescale(slitord_id=self.slits.slitord_id[slit_idx]) + # Do it + skymask[thismask] = findobj_skymask.create_skymask( + sobjs_obj[this_sobjs], thismask, self.slits_left[:,slit_idx], + self.slits_right[:,slit_idx], box_rad_pix=boxcar_rad_pix, + trim_edg=self.par['reduce']['findobj']['find_trim_edge']) # Return return skymask @@ -592,6 +593,7 @@ def global_skysub(self, skymask=None, bkg_redux_sciimg=None, pos_mask=not self.bkg_redux and not objs_not_masked, max_mask_frac=self.par['reduce']['skysub']['max_mask_frac'], show_fit=show_fit) + # Mask if something went wrong if np.sum(global_sky[thismask]) == 0.: msgs.warn("Bad fit to sky. Rejecting slit: {:d}".format(slit_spat)) diff --git a/pypeit/images/buildimage.py b/pypeit/images/buildimage.py index b9ecfa9339..e156638728 100644 --- a/pypeit/images/buildimage.py +++ b/pypeit/images/buildimage.py @@ -157,8 +157,8 @@ def construct_file_name(cls, calib_key, calib_dir=None, basename=None): def buildimage_fromlist(spectrograph, det, frame_par, file_list, bias=None, bpm=None, dark=None, - scattlight=None, flatimages=None, maxiters=5, ignore_saturation=True, slits=None, - mosaic=None, calib_dir=None, setup=None, calib_id=None): + scattlight=None, flatimages=None, maxiters=5, ignore_saturation=True, + slits=None, mosaic=None, calib_dir=None, setup=None, calib_id=None): """ Perform basic image processing on a list of images and combine the results. @@ -249,8 +249,8 @@ def buildimage_fromlist(spectrograph, det, frame_par, file_list, bias=None, bpm= # Do it combineImage = combineimage.CombineImage(spectrograph, det, frame_par['process'], file_list) - pypeitImage = combineImage.run(bias=bias, bpm=bpm, dark=dark, flatimages=flatimages, scattlight=scattlight, - sigma_clip=frame_par['process']['clip'], + pypeitImage = combineImage.run(bias=bias, bpm=bpm, dark=dark, flatimages=flatimages, + scattlight=scattlight, sigma_clip=frame_par['process']['clip'], sigrej=frame_par['process']['comb_sigrej'], maxiters=maxiters, ignore_saturation=ignore_saturation, slits=slits, combine_method=frame_par['process']['combine'], diff --git a/pypeit/metadata.py b/pypeit/metadata.py index 3fc1d1b17d..e883c5f953 100644 --- a/pypeit/metadata.py +++ b/pypeit/metadata.py @@ -747,7 +747,7 @@ def unique_configurations(self, force=False, copy=False, rm_none=False): ignore_frames, ignore_indx = self.ignore_frames() # Find the indices of the frames not to ignore indx = np.arange(len(self.table)) - indx = indx[np.logical_not(np.in1d(indx, ignore_indx))] + indx = indx[np.logical_not(np.isin(indx, ignore_indx))] if len(indx) == 0: msgs.error('No frames to use to define configurations!') diff --git a/pypeit/par/parset.py b/pypeit/par/parset.py index a2f8202153..10b660f415 100644 --- a/pypeit/par/parset.py +++ b/pypeit/par/parset.py @@ -345,7 +345,7 @@ def _data_table_string(data_table, delimeter='print'): return '\n'.join(row_string)+'\n' @staticmethod - def _data_string(data, use_repr=True, verbatim=False): + def _data_string(data, use_repr=False, verbatim=False): """ Convert a single datum into a string diff --git a/pypeit/pypeit.py b/pypeit/pypeit.py index 863bd8e597..68bcd193ec 100644 --- a/pypeit/pypeit.py +++ b/pypeit/pypeit.py @@ -705,8 +705,8 @@ def calib_one(self, frames, det): msgs.info(f'Building/loading calibrations for detector {det}') # Instantiate Calibrations class - user_slits=slittrace.merge_user_slit(self.par['rdx']['slitspatnum'], - self.par['rdx']['maskIDs']) + user_slits = slittrace.merge_user_slit(self.par['rdx']['slitspatnum'], + self.par['rdx']['maskIDs']) caliBrate = calibrations.Calibrations.get_instance( self.fitstbl, self.par['calibrations'], self.spectrograph, self.calibrations_path, qadir=self.qa_path, @@ -760,7 +760,8 @@ def objfind_one(self, frames, det, bg_frames=None, std_outfile=None): # Is this a standard star? self.std_redux = self.objtype == 'standard' - frame_par = self.par['calibrations']['standardframe'] if self.std_redux else self.par['scienceframe'] + frame_par = self.par['calibrations']['standardframe'] \ + if self.std_redux else self.par['scienceframe'] # Get the standard trace if need be if self.std_redux is False and std_outfile is not None: diff --git a/pypeit/scripts/identify.py b/pypeit/scripts/identify.py index 610241dc81..8b944e43fa 100644 --- a/pypeit/scripts/identify.py +++ b/pypeit/scripts/identify.py @@ -67,7 +67,7 @@ def main(args): from pypeit import slittrace from pypeit.images.buildimage import ArcImage from pypeit.core.wavecal import autoid - from linetools.utils import jsonify + from pypeit.utils import jsonify chk_version = not args.try_old diff --git a/pypeit/scripts/show_2dspec.py b/pypeit/scripts/show_2dspec.py index 6c54be71f0..07d9b6ae32 100644 --- a/pypeit/scripts/show_2dspec.py +++ b/pypeit/scripts/show_2dspec.py @@ -248,6 +248,9 @@ def main(args): waveimg = spec2DObj.waveimg img_gpm = spec2DObj.select_flag(invert=True) + if not np.any(img_gpm): + msgs.warn('The full science image is masked!') + model_gpm = img_gpm.copy() if args.ignore_extract_mask: model_gpm |= spec2DObj.select_flag(flag='EXTRACT') @@ -306,6 +309,7 @@ def main(args): cut_min = mean - 1.0 * sigma cut_max = mean + 4.0 * sigma chname_sci = args.prefix+f'sciimg-{detname}' + # Clear all channels at the beginning viewer, ch_sci = display.show_image(sciimg, chname=chname_sci, waveimg=waveimg, clear=_clear, cuts=(cut_min, cut_max)) diff --git a/pypeit/utils.py b/pypeit/utils.py index fbc6550c15..86d61f5f0e 100644 --- a/pypeit/utils.py +++ b/pypeit/utils.py @@ -1442,7 +1442,7 @@ def yamlify(obj, debug=False): obj = bool(obj) # elif isinstance(obj, bytes): # obj = obj.decode('utf-8') - elif isinstance(obj, (np.string_, str)): + elif isinstance(obj, (np.str_, str)): obj = str(obj) # Worry about colons! if ':' in obj: @@ -1484,6 +1484,64 @@ def yamlify(obj, debug=False): print(type(obj)) return obj +def jsonify(obj, debug=False): + """ Recursively process an object so it can be serialised in json + format. Taken from linetools. + + WARNING - the input object may be modified if it's a dictionary or + list! + + Parameters + ---------- + obj : any object + debug : bool, optional + + Returns + ------- + obj - the same obj is json_friendly format (arrays turned to + lists, np.int64 converted to int, np.float64 to float, and so on). + + """ + if isinstance(obj, np.float64): + obj = float(obj) + elif isinstance(obj, np.float32): + obj = float(obj) + elif isinstance(obj, np.int32): + obj = int(obj) + elif isinstance(obj, np.int64): + obj = int(obj) + elif isinstance(obj, np.int16): + obj = int(obj) + elif isinstance(obj, np.bool_): + obj = bool(obj) + elif isinstance(obj, np.str_): + obj = str(obj) + elif isinstance(obj, units.Quantity): + if obj.size == 1: + obj = dict(value=obj.value, unit=obj.unit.to_string()) + else: + obj = dict(value=obj.value.tolist(), unit=obj.unit.to_string()) + elif isinstance(obj, np.ndarray): # Must come after Quantity + obj = obj.tolist() + elif isinstance(obj, dict): + for key, value in obj.items(): + obj[key] = jsonify(value, debug=debug) + elif isinstance(obj, list): + for i,item in enumerate(obj): + obj[i] = jsonify(item, debug=debug) + elif isinstance(obj, tuple): + obj = list(obj) + for i,item in enumerate(obj): + obj[i] = jsonify(item, debug=debug) + obj = tuple(obj) + elif isinstance(obj, units.Unit): + obj = obj.name + elif obj is units.dimensionless_unscaled: + obj = 'dimensionless_unit' + + if debug: + print(type(obj)) + return obj def add_sub_dict(d, key): """ diff --git a/pypeit/wavecalib.py b/pypeit/wavecalib.py index db1b5b8aa0..d297afa160 100644 --- a/pypeit/wavecalib.py +++ b/pypeit/wavecalib.py @@ -10,7 +10,7 @@ import numpy as np from matplotlib import pyplot as plt -from linetools.utils import jsonify +from pypeit.utils import jsonify from astropy.table import Table from astropy.io import fits