def deblend_cube(region='OrionA',vmin=0.,vmax=20.):
tex = fits.getdata('{0}/parameterMaps/{0}_Tex_DR1_rebase3_flag.fits'.format(region))
mom0 = fits.getdata('{0}/{0}_NH3_11_DR1_rebase3_mom0_QA_trim.fits'.format(region))
vlsr = fits.getdata('{0}/parameterMaps/{0}_Vlsr_DR1_rebase3_flag.fits'.format(region))
sigv = fits.getdata('{0}/parameterMaps/{0}_Sigma_DR1_rebase3_flag.fits'.format(region))
nnh3 = fits.getdata('{0}/parameterMaps/{0}_N_NH3_DR1_rebase3_flag.fits'.format(region))
cube = SpectralCube.read('{0}/{0}_NH3_11_DR1_rebase3_trim.fits'.format(region))
cube = cube.with_spectral_unit(u.km/u.s,velocity_convention='radio')
tpeak = mom0/(np.sqrt(2*np.pi)*sigv)
vlsr[vlsr==0]=np.nan
sigv[sigv==0]=np.nan
deblend = np.zeros(cube.shape)
hdr = cube.wcs.to_header()
spaxis = cube.spectral_axis.value
for plane in np.arange(deblend.shape[0]):
deblend[plane,:,:] = tpeak*np.exp(-(spaxis[plane]-vlsr)**2/(2*sigv**2))
newcube = SpectralCube(deblend,cube.wcs,header=hdr)
slab = newcube.spectral_slab(vmin*u.km/u.s,vmax*u.km/u.s)
slab.write('{0}/{0}_singlecomp.fits'.format(region),overwrite=True)
python类getdata()的实例源码
def test_compute_region_densities_and_weights(self):
test_rand_ration = [0.0101507, 0.01101003, 0.01151311, 0.01081756,
0.01281959, 0.01212378, 0.0128798, 0.01246496]
test_ave_weight = np.ones_like(test_rand_ration)
cosmos_data = fits.getdata(
'data/COSMOS_iband_2009_radecidstomp_regionzp_best.fits')
cosmos_zp_cut = cosmos_data[np.logical_and(
cosmos_data.zp_best > 0.3, cosmos_data.zp_best <= 0.5)]
open_hdf5_data_file = h5py.File(self.dummy_args.input_pair_hdf5_file)
hdf5_data_grp = open_hdf5_data_file['data']
id_array, rand_ratio, weight_array, ave_weight = \
pdf_maker_utils._compute_region_densities_and_weights(
cosmos_zp_cut, hdf5_data_grp, self.dummy_args)
for rand, ave, test_rand, test_ave in \
zip(rand_ratio, ave_weight, test_rand_ration,
test_ave_weight):
self.assertAlmostEqual(rand, test_rand)
self.assertAlmostEqual(ave, test_ave)
open_hdf5_data_file.close()
def getdark(expt):
"""
Generate a dark given an exposure time or scale down from longest dark available
Parameters
----------
expt : exposure time (in the data frame: df['exp'])
Returns
-------
dark : dark image for that exposure time (numpy array)
"""
try:
dark = pyfits.getdata(os.path.join(cals_direc,'master_dark_{0}.fits'.format(expt)))
except IOError:
scaleto = np.max(df['exp'][df['exp'] != ''])
dark = pyfits.getdata(os.path.join(cals_direc,'master_dark_{0}.fits'.format(scaleto)))
dark *= (expt/scaleto)
return dark
### CREATE MASTER BIAS #######################################
def fromfits(infilename, hdu = 0, verbose = True):
"""
Reads a FITS file and returns a 2D numpy array of the data.
Use hdu to specify which HDU you want (default = primary = 0)
"""
pixelarray, hdr = fits.getdata(infilename, hdu, header=True)
pixelarray = np.asarray(pixelarray).transpose()
pixelarrayshape = pixelarray.shape
if verbose :
print "FITS import shape : (%i, %i)" % (pixelarrayshape[0], pixelarrayshape[1])
print "FITS file BITPIX : %s" % (hdr["BITPIX"])
print "Internal array type :", pixelarray.dtype.name
return pixelarray, hdr
def _applymask(self, mask):
"""Apply a mask to the input data to provide a cleaner basis set.
mask is >1 for objects, 0 for sky so that people can use sextractor.
The file is read with ``astropy.io.fits.getdata`` which first tries to
read the primary extension, then the first extension is no data was
found before.
"""
logger.info('Applying Mask for SVD Calculation from %s', mask)
self.maskfile = mask
mask = fits.getdata(mask).astype(bool)
nmasked = np.count_nonzero(mask)
logger.info('Masking %d pixels (%d%%)', nmasked,
nmasked / np.prod(mask.shape) * 100)
self.cube[:, mask] = np.nan
###########################################################################
##################################### Output Functions ####################
###########################################################################
def compute_bad_pixel_map(bpm_files, dtype=np.uint8):
'''
Compute a combined bad pixel map provided a list of files
Parameters
----------
bpm_files : list
List of names for the bpm files
dtype : data type
Data type for the final bpm
Returns
bpm : array_like
Combined bad pixel map
'''
# check that we have files
if len(bpm_files) == 0:
raise ValueError('No bad pixel map files provided')
# get shape
shape = fits.getdata(bpm_files[0]).shape
# star with empty bpm
bpm = np.zeros((shape[-2], shape[-1]), dtype=np.uint8)
# fill if files are provided
for f in bpm_files:
data = fits.getdata(f)
bpm = np.logical_or(bpm, data)
bpm = bpm.astype(dtype)
return bpm
def __init__(self, source, frames=None):
# matplotlib Figure
if isinstance(source,Figure):
self.setbuf(source.canvas.buffer_rgba())
self.shape = source.canvas.get_width_height()
# numpy array
elif isinstance(source,np.ndarray) and source.ndim == 3 and source.shape[2] == 3:
self.setarr(source)
self.shape = self.darr.shape[0:2]
# standard image file
elif isinstance(source,types.StringTypes) and \
source.lower().endswith((".jpg",".jpeg",".png",".tif",".tiff")):
self.setpil(Image.open(os.path.expanduser(source)))
self.shape = self.dpil.size
# FITS file
elif isinstance(source,types.StringTypes) and source.lower().endswith(".fits"):
try: data = pyfits.getdata(arch.openbinary(os.path.expanduser(source))).T
except TypeError: raise ValueError("Something is wrong with the FITS file (the file may have been truncated)")
# the above gives us an array with shape (nx, ny) or (nx, ny, nlambda)
if data.ndim == 2:
self.setarr(np.dstack(( data,data,data )))
elif data.ndim == 3:
if frames is None:
n = data.shape[2]
frames = (n-1, n//2, 0)
self.setarr(np.dstack(( data[:,:,frames[0]],data[:,:,frames[1]],data[:,:,frames[2]] )))
else:
raise ValueError("Data in FITS file has unsupported shape")
self.shape = self.darr.shape[0:2]
# unsupported type
else:
raise ValueError("Source argument has unsupported type")
# ---------- Basic attributes -------------------------------------
## This function returns the image's shape as a tuple (nx, ny).
# You can also directly access the \c shape property.
def __init__(self, source, frames=None):
# matplotlib Figure
if isinstance(source,Figure):
self.setbuf(source.canvas.buffer_rgba())
self.shape = source.canvas.get_width_height()
# numpy array
elif isinstance(source,np.ndarray) and source.ndim == 3 and source.shape[2] == 3:
self.setarr(source)
self.shape = self.darr.shape[0:2]
# standard image file
elif isinstance(source,types.StringTypes) and \
source.lower().endswith((".jpg",".jpeg",".png",".tif",".tiff")):
self.setpil(Image.open(os.path.expanduser(source)))
self.shape = self.dpil.size
# FITS file
elif isinstance(source,types.StringTypes) and source.lower().endswith(".fits"):
try: data = pyfits.getdata(arch.openbinary(os.path.expanduser(source))).T
except TypeError: raise ValueError("Something is wrong with the FITS file (the file may have been truncated)")
# the above gives us an array with shape (nx, ny) or (nx, ny, nlambda)
if data.ndim == 2:
self.setarr(np.dstack(( data,data,data )))
elif data.ndim == 3:
if frames is None:
n = data.shape[2]
frames = (n-1, n//2, 0)
self.setarr(np.dstack(( data[:,:,frames[0]],data[:,:,frames[1]],data[:,:,frames[2]] )))
else:
raise ValueError("Data in FITS file has unsupported shape")
self.shape = self.darr.shape[0:2]
# unsupported type
else:
raise ValueError("Source argument has unsupported type")
# ---------- Basic attributes -------------------------------------
## This function returns the image's shape as a tuple (nx, ny).
# You can also directly access the \c shape property.
def getdata(*args, **kwargs):
raise ImportError("Could not import fitsio or astropy.io.fits. "
"Install fitsio or astropy.")
def __init__(self, fname, scaling):
self.data, header = getdata(fname, header=True)
self.data *= scaling
self.crpix1 = header['CRPIX1']
self.crpix2 = header['CRPIX2']
self.lam_scal = header['LAM_SCAL']
self.sign = header['LAM_NSGP'] # north = 1, south = -1
def read(filename):
from astropy.io import fits
return fits.getdata(filename)
def _externalzlevel(self, extSVD):
"""Remove the zero level from the extSVD file."""
logger.info('Using external zlevel')
self.zlsky = fits.getdata(extSVD, 0)
self.stack -= self.zlsky[:, np.newaxis]
self.run_zlevel = 'extSVD'
def zoom_fits(fitsfile, scalefactor, preserve_bad_pixels=True, **kwargs):
"""
This function is used to zoom in on a FITS image by interpolating using scipy.ndimage.interpolation.zoom.
It takes the following arguments:
:param fitsfile: the FITS file name
:param scalefactor: the zoom factor along all axes
:param preserve_bad_pixels: try to set NaN pixels to NaN in the zoomed image. Otherwise, bad pixels will be set to
zero.
:param kwargs:
:return:
"""
# Get the data array and the header of the FITS file
arr = pyfits.getdata(fitsfile)
h = pyfits.getheader(fitsfile)
h['CRPIX1'] = (h['CRPIX1']-1)*scalefactor + scalefactor/2. + 0.5
h['CRPIX2'] = (h['CRPIX2']-1)*scalefactor + scalefactor/2. + 0.5
if 'CD1_1' in h:
for ii in (1,2):
for jj in (1,2):
k = "CD%i_%i" % (ii,jj)
if k in h: # allow for CD1_1 but not CD1_2
h[k] = h[k]/scalefactor
elif 'CDELT1' in h:
h['CDELT1'] = h['CDELT1']/scalefactor
h['CDELT2'] = h['CDELT2']/scalefactor
bad_pixels = np.isnan(arr) + np.isinf(arr)
arr[bad_pixels] = 0
upscaled = scipy.ndimage.zoom(arr,scalefactor,**kwargs)
if preserve_bad_pixels:
bp_up = scipy.ndimage.zoom(bad_pixels,scalefactor,mode='constant',cval=np.nan,order=0)
upscaled[bp_up] = np.nan
up_hdu = pyfits.PrimaryHDU(data=upscaled, header=h)
return up_hdu
# -----------------------------------------------------------------
def zoom_fits(fitsfile, scalefactor, preserve_bad_pixels=True, **kwargs):
"""
This function is used to zoom in on a FITS image by interpolating using scipy.ndimage.interpolation.zoom.
It takes the following arguments:
:param fitsfile: the FITS file name
:param scalefactor: the zoom factor along all axes
:param preserve_bad_pixels: try to set NaN pixels to NaN in the zoomed image. Otherwise, bad pixels will be set to
zero.
:param kwargs:
:return:
"""
# Get the data array and the header of the FITS file
arr = pyfits.getdata(fitsfile)
h = pyfits.getheader(fitsfile)
h['CRPIX1'] = (h['CRPIX1']-1)*scalefactor + scalefactor/2. + 0.5
h['CRPIX2'] = (h['CRPIX2']-1)*scalefactor + scalefactor/2. + 0.5
if 'CD1_1' in h:
for ii in (1,2):
for jj in (1,2):
k = "CD%i_%i" % (ii,jj)
if k in h: # allow for CD1_1 but not CD1_2
h[k] = h[k]/scalefactor
elif 'CDELT1' in h:
h['CDELT1'] = h['CDELT1']/scalefactor
h['CDELT2'] = h['CDELT2']/scalefactor
bad_pixels = np.isnan(arr) + np.isinf(arr)
arr[bad_pixels] = 0
upscaled = scipy.ndimage.zoom(arr,scalefactor,**kwargs)
if preserve_bad_pixels:
bp_up = scipy.ndimage.zoom(bad_pixels,scalefactor,mode='constant',cval=np.nan,order=0)
upscaled[bp_up] = np.nan
up_hdu = pyfits.PrimaryHDU(data=upscaled, header=h)
return up_hdu
# -----------------------------------------------------------------
def trim_image(f, overscan_poly_order = 8):
"""
trim_image returns a trimmed version of the raw image. The ARCTIC detector is structured in four quadrants which can be read out individually (Quad Mode) or as a whole (Lower Left Mode) and trim_image identifies which readout mode was used and crops the image accordingly.
Parameters
----------
f : raw fits image from ARCTIC
overscan_poly_order : order of polynomial used to fit overscan
Returns
-------
alldat : a list with [the image in a numpy array, the astropy header]
"""
datfile = pyfits.getdata(f, header=True)
dat_raw = datfile[0]
dat_head = datfile[1]
amp = pyfits.open(f)[0].header['READAMPS']
if amp == "Quad":
# ll, ul, lr, ur
quads = ['DSEC11', 'DSEC21', 'DSEC12', 'DSEC22']
dat = [[],[],[],[]]
for i,quad in enumerate(quads):
idx_string = pyfits.open(f)[0].header[quad]
idx = re.split('[: ,]',idx_string.rstrip(']').lstrip('['))
dat[i] = dat_raw[int(idx[2])-1:int(idx[3]),int(idx[0])-1:int(idx[1])]
sci_lo = np.concatenate((dat[2], dat[3]), axis = 1)
sci_up = np.concatenate((dat[0], dat[1]), axis = 1)
sci = np.concatenate((sci_up, sci_lo), axis = 0)
if amp == 'LL':
idx_string = pyfits.open(f)[0].header['DSEC11']
idx = re.split('[: ,]',idx_string.rstrip(']').lstrip('['))
sci = dat_raw[int(idx[2])-1:int(idx[3]),int(idx[0])-1:int(idx[1])].astype(np.float64)
idx_over_string = pyfits.open(f)[0].header['BSEC11']
idx_over = re.split('[: ,]',idx_over_string.rstrip(']').lstrip('['))
over = dat_raw[int(idx_over[2])-1:int(idx_over[3]),int(idx_over[0])-1:int(idx_over[1])]
#Average along columns
avg_overscan = np.mean(over,axis=1)
#Index array, then fit!
row_idx = np.arange(len(avg_overscan))
p = np.polyfit(row_idx,avg_overscan,deg=overscan_poly_order)
#Calculate array from fit, then transpose into a column
fit_overscan = np.poly1d(p)(row_idx)
fit_overscan_col = fit_overscan[:,np.newaxis]
#Subtract column!
sci -= fit_overscan_col
alldat = [sci,dat_head]
return alldat
def remove_conflictive_keywords(path, file_list):
"""Removes problematic keywords
The blue camera has a set of keywords whose comments contain non-ascii
characters, in particular the degree symbol. Those keywords are not
needed in any stage of the data reduction therefore they are removed.
The data will be overwritten with the keywords removed. The user will
need to have backups of raw data.
Notes:
This function solves a problem with old data, new headers are compliant
with the headers.
Args:
path (str): Path to the folder containing the files
file_list (list): List of files to remove keywords
"""
log_ccd.debug('Removing conflictive keywords in Blue Camera Headers')
log_ccd.warning('Files will be overwritten')
for blue_file in file_list:
full_path = os.path.join(path, blue_file)
log_ccd.debug('Processing file {:s}'.format(blue_file))
try:
data, header = fits.getdata(full_path,
header=True,
ignore_missing_end=True)
keys_to_remove = ['PARAM0',
'PARAM61',
'PARAM62',
'PARAM63',
'NAXIS3']
if data.ndim == 3:
header['NAXIS'] = 2
data = data[0]
log_ccd.debug('Modified file to be 2D instead of 3D '
'(problematic)')
for keyword in keys_to_remove:
header.remove(keyword)
log_ccd.debug('Removed conflictive keyword '
'{:s}'.format(keyword))
log_ccd.debug('Updated headers')
fits.writeto(full_path,
data,
header,
clobber=True)
except KeyError as error:
log_ccd.debug(error)
# spectroscopy specific functions