Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,22 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [main - 2.2.0] - 2025-11-27

Minor bugfixes and enhancements.

### Changed

- Nod & Shuffle
- Maintain user-defined coadd state when separating Nod & Shuffle components

- File management
- quick-clean option removes intermediate files as it goes

- Bugfixes
- Restore `--just-calib` handling
- Ensure wavelength keywords comply with standards

## [main - 2.1.0] - 2025-06-26

Minor enhancements for added flexibility.
Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,8 @@ When multiple sky frames are associated with a science image, they are scaled by

`--just-calib`: Triggers the data reduction in the absence of on-sky data (both science and calibration). It only produces basic calibration files.

`--quick-clean`: Clean up intermediate files after the next step has been produced.

`--greedy-stds`: Treat observations vaguely near known standard stars as STANDARD frames even if IMAGETYP = 'OBJECT'. If this option is not set, only IMAGETYP = 'STANDARD' frames are used as standards.

`--extract`: Automatically locate sources in the output datacubes and extract sources. Default parameters defined in JSON5 file (normally in pip's site-packages/pywifes directory):
Expand Down
1 change: 1 addition & 0 deletions docs/source/usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ When multiple sky frames are associated with a science image, they are scaled by

- `--skip-done`: Skip already completed steps. This will check for the existence of the output files from each step, as well as whether the output files are newer than the input files -- if either part fails, the step will be executed.
- `--just-calib`: Triggers the data reduction in the absence of on-sky data (both science and calibration). It only produces basic calibration files.
- `--quick-clean`: Deletes the intermediate files after the next step has been generated.
- `--greedy-stds`: Treat observations vaguely near known standard stars as STANDARD frames even if IMAGETYP = 'OBJECT'. If this option is not set, only IMAGETYP = 'STANDARD' frames are used as standards.
- `--extract`: Automatically locate sources in the output datacubes and extract spectra. By default, uses the parameters defined in JSON5 file (normally in pip's site-packages/pywifes directory rather than the install directory):

Expand Down
25 changes: 22 additions & 3 deletions reduction_scripts/reduce_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

def run_arm_indiv(temp_data_dir, obs_metadatas, arm, master_dir, output_master_dir,
output_dir, params_path, grism_key, just_calib, plot_dir,
from_master, extra_skip_steps, return_dict, skip_done):
from_master, extra_skip_steps, return_dict, skip_done, quick_clean):
# Reduces the data for an individual arm.

try:
Expand All @@ -42,6 +42,7 @@ def run_arm_indiv(temp_data_dir, obs_metadatas, arm, master_dir, output_master_d
gargs['arm'] = arm
gargs['master_dir'] = master_dir
gargs['from_master'] = from_master
gargs['just_calib'] = just_calib
gargs['output_master_dir'] = output_master_dir
gargs['output_dir'] = output_dir

Expand Down Expand Up @@ -207,6 +208,14 @@ def run_arm_indiv(temp_data_dir, obs_metadatas, arm, master_dir, output_master_d
**step_args,
)
if step_suffix is not None:
if quick_clean:
files_to_delete = glob.glob('{}/*.[ps]{}.fits'.format(gargs['out_dir_arm'], prev_suffix))
for file_path in files_to_delete:
try:
os.remove(file_path)
except OSError as e:
print(f"Error removing {file_path}: {e.strerror}")

prev_suffix = step_suffix

else:
Expand Down Expand Up @@ -375,6 +384,13 @@ def main():
help="Optional: Skip processing and only extract or extract-and-splice",
)

# Option to delete each intermediate step after the subsequent one has been produced
parser.add_argument(
"--quick-clean",
action="store_true",
help="Optional: Delete each intermediate step after the next one has been made",
)

args = parser.parse_args()

# Validate and process the user_data_dir
Expand Down Expand Up @@ -441,6 +457,9 @@ def main():
# Skip processing and only extract or extract-and-splice
no_processing = args.no_processing

# Delete each intermediate step after it has produced the next one
quick_clean = args.quick_clean

# Creates a directory for plot.
plot_dir = os.path.join(output_dir, "plots/")
os.makedirs(plot_dir, exist_ok=True)
Expand Down Expand Up @@ -501,7 +520,7 @@ def main():
if args.run_both:
# Try and reduce both arms at the same time
for arm in obs_metadatas.keys():
p = multiprocessing.Process(target=run_arm_indiv, args=(temp_data_dir, obs_metadatas, arm, master_dir, output_master_dir, output_dir, params_path, grism_key, just_calib, plot_dir, from_master, extra_skip_steps, return_dict, skip_done))
p = multiprocessing.Process(target=run_arm_indiv, args=(temp_data_dir, obs_metadatas, arm, master_dir, output_master_dir, output_dir, params_path, grism_key, just_calib, plot_dir, from_master, extra_skip_steps, return_dict, skip_done, quick_clean))
jobs.append(p)
p.start()

Expand All @@ -510,7 +529,7 @@ def main():
else:
# Otherwise reduce them sequentially
for arm in obs_metadatas.keys():
p = multiprocessing.Process(target=run_arm_indiv, args=(temp_data_dir, obs_metadatas, arm, master_dir, output_master_dir, output_dir, params_path, grism_key, just_calib, plot_dir, from_master, extra_skip_steps, return_dict, skip_done))
p = multiprocessing.Process(target=run_arm_indiv, args=(temp_data_dir, obs_metadatas, arm, master_dir, output_master_dir, output_dir, params_path, grism_key, just_calib, plot_dir, from_master, extra_skip_steps, return_dict, skip_done, quick_clean))
jobs.append(p)
p.start()

Expand Down
2 changes: 1 addition & 1 deletion src/pywifes/pywifes.py
Original file line number Diff line number Diff line change
Expand Up @@ -5443,7 +5443,7 @@ def generate_wifes_3dcube(inimg, outimg, halfframe=False, taros=False, nan_bad_p
if wave_ext:
ctype3 = "PIXEL"
else:
ctype3 = "WAVE"
ctype3 = "WAVE" if "PYWWVREF" in f[1].header and f[1].header["PYWWVREF"] == "VACUUM" else "AWAV"

# Coordinate transformations
# Telescope angle
Expand Down
27 changes: 15 additions & 12 deletions src/pywifes/recipes/overscan_sub.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,25 +74,28 @@ def _run_overscan_sub(metadata, gargs, prev_suffix, curr_suffix, poly_high_oscan
to avoid using the epoch-based values.
"""
full_obs_list = get_full_obs_list(metadata)
target_list = get_sci_obs_list(metadata)
std_list = get_std_obs_list(metadata)

# Check if any 1x2-binned standards need a different binning to match the science data
# Check if any 1x2-binned standards need a different binning to match the other data
match_binning = None
sci_list = get_sci_obs_list(metadata)
sci_binning = []
for fn in sci_list:
# If no science frames or only processing calibrations, check the calibrations instead
if len(target_list) == 0 or gargs['just_calib']:
target_list = [item for item in full_obs_list if item not in target_list and item not in std_list]
target_binning = []
for fn in target_list:
this_head = fits.getheader(os.path.join(gargs['data_dir'], "%s.fits" % fn))
sci_binning.append(this_head['CCDSUM'])
sci_binning = set(sci_binning)
if len(sci_binning) > 1:
raise ValueError(f"Must process different science binning modes separately! Found: {sci_binning}")
std_list = get_std_obs_list(metadata)
target_binning.append(this_head['CCDSUM'])
target_binning = set(target_binning)
if len(target_binning) > 1:
raise ValueError(f"Must process different binning modes separately! Found: {target_binning}")
std_binning = []
for fn in std_list:
this_head = fits.getheader(os.path.join(gargs['data_dir'], "%s.fits" % fn))
std_binning.append(this_head['CCDSUM'])
# Set desired binning if not all std_binning are contained in sci_binning (a 1-element set)
if not set(std_binning).issubset(sci_binning):
[match_binning] = sci_binning
# Set desired binning if not all std_binning are contained in target_binning (a 1-element set)
if not set(std_binning).issubset(target_binning):
[match_binning] = target_binning

first = True
if not poly_high_oscan:
Expand Down
12 changes: 10 additions & 2 deletions src/pywifes/recipes/sky_sub.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def _run_sky_sub(metadata, gargs, prev_suffix, curr_suffix, separate_ns=False):

# Create a stable copy to enable additions without repetitions
this_metadata_sci = metadata["sci"].copy()

for obs in this_metadata_sci:
if len(obs["sky"]) > 0:
# Prepare the separate sky exposure, if defined.
Expand Down Expand Up @@ -78,6 +79,7 @@ def _run_sky_sub(metadata, gargs, prev_suffix, curr_suffix, separate_ns=False):
)
else:
# No separate sky frames defined.
new_sci_list = []
for fn in obs["sci"]:
in_fn = os.path.join(gargs['out_dir_arm'], "%s.p%s.fits" % (fn, prev_suffix))
out_fn = os.path.join(
Expand All @@ -103,8 +105,9 @@ def _run_sky_sub(metadata, gargs, prev_suffix, curr_suffix, separate_ns=False):
else:
print(f"Copying N&S sky image {os.path.basename(sky_fn)}")
pywifes.imcopy(sky_fn, out_sky_fn)
# Add 'sky' from N&S to science metadata
metadata['sci'].append({"sci": [os.path.basename(out_sky_fn).replace(f".p{curr_suffix}.fits", "")], "sky": []})
# Populate a list of science images from the sky frames
new_sci_list.append(os.path.basename(out_sky_fn).replace(f".p{curr_suffix}.fits", ""))

# Update headers
fh = fits.open(out_fn, mode="update")
fh_sky = fits.open(out_sky_fn, mode="update")
Expand Down Expand Up @@ -142,6 +145,11 @@ def _run_sky_sub(metadata, gargs, prev_suffix, curr_suffix, separate_ns=False):
continue
print(f"Copying science image {os.path.basename(in_fn)}")
pywifes.imcopy(in_fn, out_fn)

# If separate_ns, add list of 'sky' from N&S to science metadata
if len(new_sci_list) > 0:
metadata['sci'].append({"sci": new_sci_list, "sky": []})

# copy stdstar frames
std_obs_list = get_std_obs_list(metadata)
for fn in std_obs_list:
Expand Down
24 changes: 13 additions & 11 deletions src/pywifes/splice.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,18 +325,19 @@ def splice_spectra(blue_spec_path, red_spec_path, output_path, get_dq=False,
hdulist[0].header["EXPTBLUE"] = (blueSpec.header["EXPTIME"],
"Exposure time in blue arm")
hdulist[0].header.remove("EXPTIME")
wave_ref = "WAVE" if "PYWWVREF" in hdulist[0].header and hdulist[0].header["PYWWVREF"] == "VACUUM" else "AWAV"
hdulist[0].header["CRPIX1"] = 1
hdulist[0].header["CRVAL1"] = wave_min
hdulist[0].header["CDELT1"] = wstep_out
hdulist[0].header["CTYPE1"] = "WAVE"
hdulist[0].header["CTYPE1"] = wave_ref
hdulist[0].header["CUNIT1"] = "Angstrom"

hdr_fluxvar = fits.Header()
hdr_fluxvar["EXTNAME"] = "VAR"
hdr_fluxvar["CRPIX1"] = 1
hdr_fluxvar["CRVAL1"] = wave_min
hdr_fluxvar["CDELT1"] = wstep_out
hdr_fluxvar["CTYPE1"] = "WAVE"
hdr_fluxvar["CTYPE1"] = wave_ref
hdr_fluxvar["CUNIT1"] = "Angstrom"
hdr_fluxvar["BUNIT"] = "(Flux)^2"

Expand All @@ -351,7 +352,7 @@ def splice_spectra(blue_spec_path, red_spec_path, output_path, get_dq=False,
hdr_dq["CRPIX1"] = 1
hdr_dq["CRVAL1"] = wave_min
hdr_dq["CDELT1"] = wstep_out
hdr_dq["CTYPE1"] = "WAVE"
hdr_dq["CTYPE1"] = wave_ref
hdr_dq["CUNIT1"] = "Angstrom"

hdu_dq = fits.ImageHDU(data=dq.astype("int16", casting="unsafe"),
Expand All @@ -365,7 +366,7 @@ def splice_spectra(blue_spec_path, red_spec_path, output_path, get_dq=False,
hdr_sky["CRPIX1"] = 1
hdr_sky["CRVAL1"] = wave_min
hdr_sky["CDELT1"] = wstep_out
hdr_sky["CTYPE1"] = "WAVE"
hdr_sky["CTYPE1"] = wave_ref
hdr_sky["CUNIT1"] = "Angstrom"

hdu_sky = fits.ImageHDU(data=sky.astype("float32", casting="same_kind"),
Expand All @@ -379,7 +380,7 @@ def splice_spectra(blue_spec_path, red_spec_path, output_path, get_dq=False,
hdr_tell["CRPIX1"] = 1
hdr_tell["CRVAL1"] = wave_min
hdr_tell["CDELT1"] = wstep_out
hdr_tell["CTYPE1"] = "WAVE"
hdr_tell["CTYPE1"] = wave_ref
hdr_tell["CUNIT1"] = "Angstrom"

tell_data = tell.astype("float32", casting="same_kind")
Expand All @@ -395,7 +396,7 @@ def splice_spectra(blue_spec_path, red_spec_path, output_path, get_dq=False,
hdr_ext["CRPIX1"] = 1
hdr_ext["CRVAL1"] = wave_min
hdr_ext["CDELT1"] = wstep_out
hdr_ext["CTYPE1"] = "WAVE"
hdr_ext["CTYPE1"] = wave_ref
hdr_ext["CUNIT1"] = "Angstrom"
hdr_ext['COMMENT'] = 'Flux extinction correction applied to reach airmass zero'
hdr_ext['COMMENT'] = 'NB: the correction _in magnitudes_ scales linearly with airmass'
Expand Down Expand Up @@ -695,18 +696,19 @@ def splice_cubes(blue_path, red_path, output_path, get_dq=False, wstep=None,
hdulist[0].header["EXPTBLUE"] = (blue_header["EXPTIME"],
"Exposure time in blue arm")
hdulist[0].header.remove("EXPTIME")
wave_ref = "WAVE" if "PYWWVREF" in hdulist[0].header and hdulist[0].header["PYWWVREF"] == "VACUUM" else "AWAV"
hdulist[0].header["CRPIX3"] = 1
hdulist[0].header["CRVAL3"] = wave_min
hdulist[0].header["CDELT3"] = wstep_out
hdulist[0].header["CTYPE3"] = "Wavelength"
hdulist[0].header["CTYPE3"] = wave_ref
hdulist[0].header["CUNIT3"] = "Angstrom"

hdr_fluxvar = fits.Header()
hdr_fluxvar["EXTNAME"] = "VAR"
hdr_fluxvar["CRPIX3"] = 1
hdr_fluxvar["CRVAL3"] = wave_min
hdr_fluxvar["CDELT3"] = wstep_out
hdr_fluxvar["CTYPE3"] = "Wavelength"
hdr_fluxvar["CTYPE3"] = wave_ref
hdr_fluxvar["CUNIT3"] = "Angstrom"
hdr_fluxvar["BUNIT"] = "(Flux)^2"

Expand All @@ -721,7 +723,7 @@ def splice_cubes(blue_path, red_path, output_path, get_dq=False, wstep=None,
hdr_dq["CRPIX3"] = 1
hdr_dq["CRVAL3"] = wave_min
hdr_dq["CDELT3"] = wstep_out
hdr_dq["CTYPE3"] = "Wavelength"
hdr_dq["CTYPE3"] = wave_ref
hdr_dq["CUNIT3"] = "Angstrom"

hdu_dq = fits.ImageHDU(data=dq.astype("int16", casting="unsafe"), header=hdr_dq)
Expand All @@ -734,7 +736,7 @@ def splice_cubes(blue_path, red_path, output_path, get_dq=False, wstep=None,
hdr_tell["CRPIX1"] = 1
hdr_tell["CRVAL1"] = wave_min
hdr_tell["CDELT1"] = wstep_out
hdr_tell["CTYPE1"] = "WAVE"
hdr_tell["CTYPE1"] = wave_ref
hdr_tell["CUNIT1"] = "Angstrom"

tell_data = tell.astype("float32", casting="same_kind")
Expand All @@ -750,7 +752,7 @@ def splice_cubes(blue_path, red_path, output_path, get_dq=False, wstep=None,
hdr_ext["CRPIX1"] = 1
hdr_ext["CRVAL1"] = wave_min
hdr_ext["CDELT1"] = wstep_out
hdr_ext["CTYPE1"] = "WAVE"
hdr_ext["CTYPE1"] = wave_ref
hdr_ext["CUNIT1"] = "Angstrom"
hdr_ext['COMMENT'] = 'Flux extinction correction applied to reach airmass zero'
hdr_ext['COMMENT'] = 'NB: the correction _in magnitudes_ scales linearly with airmass'
Expand Down
2 changes: 1 addition & 1 deletion src/pywifes/wifes_metadata.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os

__version__ = '2.1.0'
__version__ = '2.2.0'

# If you set the environment variable 'PYWIFES_DIR' then it will be found
mdir = os.getenv('PYWIFES_DIR')
Expand Down