Skip to content

Commit

Permalink
Minor changes to DC loaders for future surveys (#1122)
Browse files Browse the repository at this point in the history
* Include all remote data in coverage run

This should reduce the number of tests skipped, and resolve some of the
weirdness with coverage seen in #1122.

* Minor changes to DC loaders for future surveys

This makes it easier to use a fallback header and access the last
spectra from a wrapper loader. These changes are to support loading some
of the IFS surveys (such as SAMI) that Data Central hosts.

* Add SAMI loaders for testing updated DC loaders

This adds an initial version of the future SAMI loaders, in order to
test the changes to the lower level DC loaders.
  • Loading branch information
aragilar committed Apr 30, 2024
1 parent a702a2c commit bf2355b
Show file tree
Hide file tree
Showing 4 changed files with 237 additions and 4 deletions.
11 changes: 7 additions & 4 deletions specutils/io/default_loaders/dc_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,12 @@ def add_single_spectra_to_map(
return None

if valid_wcs or not spec_wcs_info:
wcs = WCS(header)
try:
wcs = WCS(header)
except (ValueError, KeyError):
if fallback_header is None:
raise
wcs = WCS(fallback_header)
if drop_wcs_axes is not None:
if isinstance(drop_wcs_axes, Callable):
wcs = drop_wcs_axes(wcs)
Expand Down Expand Up @@ -352,9 +357,7 @@ def add_single_spectra_to_map(
spectrum.uncertainty = UNCERTAINTY_MAP[purpose](aligned_flux)
spectrum.meta["uncertainty_header"] = header

# We never actually want to return something, this just flags it to pylint
# that we know we're breaking out of the function when skip is selected
return None
return spectrum


def get_purpose(
Expand Down
185 changes: 185 additions & 0 deletions specutils/io/default_loaders/sami.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
from astropy.nddata import VarianceUncertainty
from astropy.table import QTable
from specutils import SpectrumList
from specutils.io.default_loaders.dc_common import (
FITS_FILE_EXTS, add_single_spectra_to_map,
)
from specutils.io.parsing_utils import read_fileobj_or_hdulist
from specutils.io.registers import data_loader

# There appears to be nothing which says "this is a SAMI 1D spectra", so guess
# it based on the headers that should be there
SAMI_1D_SPECTRA_HEADER_KEYWORDS = [
"BUNIT", "CATADEC", "CATARA", "CDELT1", "CRPIX1", "CRVAL1", "CTYPE1",
"CUNIT1", "DROPFACT", "ELLIP", "GRATID", "IFUPROBE", "KPC_SIZE", "NAME",
"N_SPAX", "POS_ANG", "PSFALPHA", "PSFBETA", "PSFFWHM", "RADESYS", "RADIUS",
"RO_GAIN", "RO_NOISE", "STDNAME", "WCSAXES", "Z_TONRY",
]


def identify_sami_cube(origin, *args, **kwargs):
"""
Identify if the current file is a SAMI cube file
"""
# TODO check this
with read_fileobj_or_hdulist(*args, **kwargs) as hdulist:
header = hdulist[0].header
data = hdulist[0].data
if "SAMI" in header.get("INSTRUME", "") and len(data.shape) == 3:
return True
return False


def identify_sami_1d_spec(origin, *args, **kwargs):
"""
Identify if the current file is a SAMI 1d spectra file of some kind
"""
# TODO check this
with read_fileobj_or_hdulist(*args, **kwargs) as hdulist:
header = hdulist[0].header
for key in SAMI_1D_SPECTRA_HEADER_KEYWORDS:
if key not in header:
return False
return True


@data_loader(
label="SAMI-cube", extensions=FITS_FILE_EXTS, dtype=SpectrumList,
identifier=identify_sami_cube, priority=10,
)
def sami_cube_loader(filename):
spectra_map = {
"sky": [],
"combined": [],
"unreduced": [],
"reduced": [],
}
primary_header = None

with read_fileobj_or_hdulist(filename) as hdulist:
for i, hdu in enumerate(hdulist):
if i == 0:
# This is the primary extension, and the one with the
# science data. The header is fairly complete.
primary_header = hdu.header
spec = add_single_spectra_to_map(
spectra_map,
header=primary_header,
data=hdu.data,
index=None,
all_standard_units=True,
all_keywords=True,
valid_wcs=True,
)

elif "VARIANCE" == hdu.header.get("EXTNAME"):
# This is the variance extension, and is missing wcs and
# units.
uncertainty = VarianceUncertainty(
hdu.data, unit=spec.flux.unit ** 2
)
spec.uncertainty = uncertainty

elif "WEIGHT" == hdu.header.get("EXTNAME"):
# This is the weight extension, and is missing wcs. The
# units are effectively "normalised" (from 0-1 it seems).
spec.meta["sami_cube_weight_map"] = hdu.data

elif "COVAR" == hdu.header.get("EXTNAME"):
# This is the spatial covariance extension. It's not clear
# as to how best to expose this, so skipping for now.
pass

elif "QC" == hdu.header.get("EXTNAME"):
# This is the QC extension, and is a binary table. This we
# add to the metadata.
spec.meta["sami_QC_table"] = QTable.read(hdu)

elif "DUST" == hdu.header.get("EXTNAME"):
# This is the dust extension, and is missing wcs and
# units. This should likely be represented as an array plus
# the metadata in the header.
spec.meta["sami_dust_vector_weights"] = hdu.data

elif "BIN_MASK" == hdu.header.get("EXTNAME"):
# This is the bin mask extension, where the value of each
# pixel indicates the bin to which it belongs. The bin mask
# is used to construct the binned fluxes and variances in
# the above two extensions from the default cubes.
# This is not the same as the aperture spectra mask with the
# same HDU name.
spec.meta["sami_bin_mask"] = hdu.data

else:
raise NotImplementedError(
"Extension is not handled: index {}; name {}".format(
i, hdu.header.get("EXTNAME")
)
)

spectra = SpectrumList(
spectra_map["combined"] +
spectra_map["reduced"] +
spectra_map["unreduced"] +
spectra_map["sky"]
)
return spectra


@data_loader(
label="SAMI-1d-spec", extensions=FITS_FILE_EXTS, dtype=SpectrumList,
identifier=identify_sami_1d_spec, priority=10,
)
def sami_1d_spec_loader(filename):
spectra_map = {
"sky": [],
"combined": [],
"unreduced": [],
"reduced": [],
}
primary_header = None

with read_fileobj_or_hdulist(filename) as hdulist:
for i, hdu in enumerate(hdulist):
if i == 0:
# This is the primary extension, and the one with the
# science data. The header is fairly complete.
primary_header = hdu.header
spec = add_single_spectra_to_map(
spectra_map,
header=primary_header,
data=hdu.data,
index=None,
all_standard_units=True,
all_keywords=True,
valid_wcs=True,
)

elif "VARIANCE" == hdu.header.get("EXTNAME"):
# This is the variance extension, and is missing wcs and
# units.
uncertainty = VarianceUncertainty(
hdu.data, unit=spec.flux.unit ** 2
)
spec.uncertainty = uncertainty

elif "BIN_MASK" == hdu.header.get("EXTNAME"):
# Contains the bin mask used to construct the aperture
# spectra. A 1 indicates a spaxel was included in the
# aperture, a 0 indicates a spaxel was not included.
spec.meta["sami_aperture_spectra_mask"] = hdu.data

else:
raise NotImplementedError(
"Extension is not handled: index {}; name {}".format(
i, hdu.header.get("EXTNAME")
)
)

spectra = SpectrumList(
spectra_map["combined"] +
spectra_map["reduced"] +
spectra_map["unreduced"] +
spectra_map["sky"]
)
return spectra
43 changes: 43 additions & 0 deletions specutils/tests/test_loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -1740,3 +1740,46 @@ def test_jwst_niriss_c1d_v1_2_3(remote_data_path):
[assert_multi_equals(d.shape, r) for d,r in zip(data, [(56,), (107,)])]
[assert_multi_equals(d.unit, u.Jy) for d in data]
[assert_multi_equals(d.spectral_axis.unit, u.um) for d in data]


class TestSAMI:
@remote_access([
{'id': "10802828", 'filename':"24433_A_adaptive_blue.fits.gz"},
{'id': "10802828", 'filename':"24433_A_adaptive_red.fits.gz"},
{'id': "10802828", 'filename':"24433_A_annular_blue.fits.gz"},
{'id': "10802828", 'filename':"24433_A_annular_red.fits.gz"},
{'id': "10802828", 'filename':"24433_A_cube_blue.fits.gz"},
{'id': "10802828", 'filename':"24433_A_cube_red.fits.gz"},
{'id': "10802828", 'filename':"24433_adaptive_blue.fits.gz"},
{'id': "10802828", 'filename':"24433_adaptive_red.fits.gz"},
{'id': "10802828", 'filename':"24433_spectrum_1-4-arcsec_blue.fits"},
{'id': "10802828", 'filename':"24433_spectrum_1-4-arcsec_red.fits"},
{'id': "10802828", 'filename':"24433_spectrum_2-arcsec_blue.fits"},
{'id': "10802828", 'filename':"24433_spectrum_2-arcsec_red.fits"},
{'id': "10802828", 'filename':"24433_spectrum_3-arcsec_blue.fits"},
{'id': "10802828", 'filename':"24433_spectrum_3-arcsec_red.fits"},
{'id': "10802828", 'filename':"24433_spectrum_3-kpc_blue.fits"},
{'id': "10802828", 'filename':"24433_spectrum_3-kpc_red.fits"},
{'id': "10802828", 'filename':"24433_spectrum_4-arcsec_blue.fits"},
{'id': "10802828", 'filename':"24433_spectrum_4-arcsec_red.fits"},
{'id': "10802828", 'filename':"24433_spectrum_re_blue.fits"},
{'id': "10802828", 'filename':"24433_spectrum_re_red.fits"},
])
def test_sami_guess(self, remote_data_path):
spectra = SpectrumList.read(remote_data_path)
assert len(spectra) == 1

spec = spectra[0]
assert isinstance(spec.uncertainty, VarianceUncertainty)
assert spec.flux.unit == u.Unit('10**(-16) erg/s/cm**2/angstrom/pixel')

if len(spec.flux.shape) == 3:
# This is a cube
assert spec.flux.shape == (50, 50, 2048)
assert "sami_QC_table" in spec.meta
assert "sami_dust_vector_weights" in spec.meta

else:
# This is a 1D spectrum
assert spec.flux.shape == (2048,)
assert "sami_aperture_spectra_mask" in spec.meta
2 changes: 2 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ description =
oldestdeps: with the oldest supported version of key dependencies
predeps: with any pre-release if available
cov: and test coverage
html: generate HTML report of coverage

# The following provides some specific pinnings for key packages
deps =
Expand Down Expand Up @@ -72,6 +73,7 @@ commands =
!cov: pytest --pyargs specutils '{toxinidir}/docs' {posargs}
cov: pytest --pyargs specutils '{toxinidir}/docs' --cov specutils --cov-config='{toxinidir}/setup.cfg' {posargs}
cov: coverage xml -o '{toxinidir}/coverage.xml'
html: coverage html -d .coverage_html

pip_pre =
predeps: true
Expand Down

0 comments on commit bf2355b

Please sign in to comment.