Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def cropManual(inputDir, outputDir,E1,E2,E3,E4):
data = glob.glob(inputDir+'/*.fits')
borders = []
# - - - - - - CREA EL SUBDIRECTORIO PARA GUARDAR LAS FOTOS PNG - - - - - -
dir_png = outputDir+'/PNG_Images'
if not os.path.isdir(dir_png):
os.makedirs(dir_png)
for i in xrange(0,len(data)):
name = data[i].split('/')[-1].split('.')[0]
image = fits.getdata(data[i])
if isinstance(image, list):
image = image[0]
print "Crop: "+'/Img_0_'+str(i)+'.fits'
fits.writeto(outputDir+'/Img_0_'+str(i)+'.fits',image, clobber=True)
image = manualCrop(image,E1,E2,E3,E4)
fits.writeto(outputDir+'/Img_1_'+str(i)+'.fits',image, clobber=True)
print "Done."
os.remove( flat_P )
if (os.access(sm_flat_fits,os.F_OK)):
os.remove( sm_flat_fits )
hdu = pyfits.PrimaryHDU( flat_S )
hdu.writeto( S_flat_fits )
hdu = pyfits.PrimaryHDU( flat_Ss )
hdu.writeto( S_flat_fits_simple )
hdu = pyfits.PrimaryHDU( P )
hdu.writeto( flat_P )
hdu = pyfits.PrimaryHDU( sm_flat )
hdu.writeto( sm_flat_fits )
# recover Flat spectra and P matrix
else:
flat_S = pyfits.getdata(S_flat_fits)
flat_Ss = pyfits.getdata(S_flat_fits_simple)
P = pyfits.getdata(flat_P)
sm_flat = pyfits.getdata( sm_flat_fits )
##################################################### Science images extraction ######################################################################
sm_flat, norms = GLOBALutils.FlatNormalize_single( sm_flat[:,1,:], mid=int(0.5*sm_flat.shape[2]),span=200)
new_list = []
new_list_obnames = []
for i in range(len(objects)):
fsim = objects[i]
obname = pucherosutils.search_name(fsim)
if (object2do == 'all'):
new_list.append(fsim)
new_list_obnames.append( obname )
# Get the global min and max pixel
extents = []
for f in glob.glob(path.join('..', 'test', 'data',"*.fits.gz")):
data = fits.getdata(f)
extents.append( np.nanmin(data) )
extents.append( np.nanmax(data) )
extents = np.array(extents)
minimum = extents.min()
maximum = extents.max()
extent = float(maximum - minimum)
extent = extent - 0.75 * extent
for f in glob.glob(path.join('..', 'test', 'data', "*.fits.gz")):
data = fits.getdata(f)
data = (data - minimum) / extent
data = (255.0 * data)
data = np.clip(data, 0, 255).astype('uint8')
data = np.flipud(data)
im = Image.fromarray(data)
im.save(f.replace('fits.gz', 'png'))
:type flag: int
:param commission: whether the spectra is taken during commissioning
:type commission: bool
:return: full file path and download in background if not found locally, False if cannot be found on server
:rtype: str
:History:
| 2017-Nov-11 - Written - Henry Leung (University of Toronto)
| 2018-Aug-31 - Updated - Henry Leung (University of Toronto)
"""
dr = apogee_default_dr(dr=dr)
if location is None: # for DR16=<, location is expected to be none because field is used
global _ALLSTAR_TEMP
if not str(f'dr{dr}') in _ALLSTAR_TEMP:
_ALLSTAR_TEMP[f'dr{dr}'] = fits.getdata(allstar(dr=dr))
if telescope is None:
matched_idx = [np.nonzero(_ALLSTAR_TEMP[f'dr{dr}']['APOGEE_ID'] == apogee)[0]][0]
else:
matched_idx = [np.nonzero([(_ALLSTAR_TEMP[f'dr{dr}']['APOGEE_ID'] == apogee) &
(_ALLSTAR_TEMP[f'dr{dr}']['TELESCOPE'] == telescope)])][0][1]
if len(matched_idx) == 0:
raise ValueError(f"No entry found in allstar DR{dr} met with your requirement!!")
location = _ALLSTAR_TEMP[f'dr{dr}']['LOCATION_ID'][matched_idx][0]
field = _ALLSTAR_TEMP[f'dr{dr}']['FIELD'][matched_idx][0]
telescope = _ALLSTAR_TEMP[f'dr{dr}']['TELESCOPE'][matched_idx][0]
if dr == 13:
reduce_prefix = 'r6'
str1 = f'https://data.sdss.org/sas/dr{dr}/apogee/spectro/redux/{reduce_prefix}/stars/apo25m/{location}/'
if commission:
A modelfunc must be specified. Model functions should take an xarr and
a series of keyword arguments corresponding to the line parameters
(Tex, tau, xoff_v, and width (gaussian sigma, not FWHM))
"""
self.modelfunc = modelfunc
if self.modelfunc is None:
raise ValueError("Must specify a spectral model function. See class help for form.")
if texgrid is None and taugrid is None:
if path_to_texgrid == '' or path_to_taugrid=='':
raise IOError("Must specify model grids to use.")
else:
self.taugrid = [pyfits.getdata(path_to_taugrid)]
self.texgrid = [pyfits.getdata(path_to_texgrid)]
hdr = pyfits.getheader(path_to_taugrid)
self.yinds,self.xinds = np.indices(self.taugrid[0].shape[1:])
self.densityarr = (xinds+hdr['CRPIX1']-1)*hdr['CD1_1']+hdr['CRVAL1'] # log density
self.columnarr = (yinds+hdr['CRPIX2']-1)*hdr['CD2_2']+hdr['CRVAL2'] # log column
self.minfreq = (4.8,)
self.maxfreq = (5.0,)
elif len(taugrid)==len(texgrid) and hdr is not None:
self.minfreq,self.maxfreq,self.texgrid = zip(*texgrid)
self.minfreq,self.maxfreq,self.taugrid = zip(*taugrid)
self.yinds,self.xinds = np.indices(self.taugrid[0].shape[1:])
self.densityarr = (xinds+hdr['CRPIX1']-1)*hdr['CD1_1']+hdr['CRVAL1'] # log density
self.columnarr = (yinds+hdr['CRPIX2']-1)*hdr['CD2_2']+hdr['CRVAL2'] # log column
else:
raise Exception
# Convert X-units to frequency in GHz
@allow_rasterization
def draw(self, renderer):
for _c in self._collections:
_c.draw(renderer)
def insert_rasterized_contour_plot(c, ax):
collections = c.collections
for _c in collections:
_c.remove()
cc = ListCollection(collections, rasterized=True)
ax.add_artist(cc)
return cc
img, hdr = fits.getdata('pipeCenterB59-350.fits', header=True)
beam = 24.9
img = img + 31.697
filfind = fil_finder_2D(img, hdr, beam, glob_thresh=20,
distance=145.)
filfind.create_mask()#size_thresh=400)
filfind.medskel()
filfind.analyze_skeletons()
filfind.exec_rht()
filfind.find_widths(verbose=False)
r = 460. / 145.
conv = np.sqrt(r ** 2. - 1) * \
(beam / np.sqrt(8*np.log(2)) / (np.abs(hdr["CDELT2"]) * 3600.))
kernel = convolution.Gaussian2DKernel(conv)
if (os.access(R_flat_co_fits,os.F_OK)):
os.remove( R_flat_co_fits )
hdu = pyfits.PrimaryHDU( P_co_B )
hdu.writeto( P_co_B_fits )
hdu = pyfits.PrimaryHDU( P_co_R )
hdu.writeto( P_co_R_fits )
hdu = pyfits.PrimaryHDU( B_flat_co )
hdu.writeto( B_flat_co_fits )
hdu = pyfits.PrimaryHDU( R_flat_co )
hdu.writeto( R_flat_co_fits )
else:
print "\t\tExtracted flat comparison spectra found, loading..."
P_co_B = pyfits.getdata( P_co_B_fits )
P_co_R = pyfits.getdata( P_co_R_fits )
B_flat_co = pyfits.getdata( B_flat_co_fits )
R_flat_co = pyfits.getdata( R_flat_co_fits )
# Normalize flat field spectra.
B_flat_ob_n,Bnorms = GLOBALutils.FlatNormalize_single( B_flat_ob, mid=2048)
R_flat_ob_n,Rnorms = GLOBALutils.FlatNormalize_single( R_flat_ob, mid=2048)
print '\n\tExtraction of ThAr calibration frames:'
# Extract all ThAr files
for fsim in ThAr_ref:
print "\t\tWorking on ThAr+Ne file ", fsim, "..."
hthar = pyfits.open( fsim )
dtharB = harpsutils.OverscanTrim( hthar[1].data ) - MasterBias[:,:,0]
dtharR = harpsutils.OverscanTrim( hthar[2].data ) - MasterBias[:,:,1]
print 'one'
def make_int_flux_image(self, emin=1e6, emax=1e20):
"""Make integral flux for an energy band"""
for ii in range(len(components)):
in_file = filename(self.tag, ii, 'cube')
out_file = filename(self.tag, ii, 'image')
logging.info('---> Processing {0}'.format(components[ii]))
fluxes = fits.getdata(in_file)
image = cube_integrate(fluxes, self.energy, emin, emax)
fits.writeto(out_file, image, clobber=self.clobber)
def make_n2hp_fitter(path_to_radex='/Users/adam/work/n2hp/',
fileprefix='1-2_T=5to55_lvg'):
"""
Create a n2hp fitter using RADEX data cubes. The following files must exist::
path_to_radex+fileprefix+'_tex1.fits'
path_to_radex+fileprefix+'_tau1.fits'
path_to_radex+fileprefix+'_tex2.fits'
path_to_radex+fileprefix+'_tau2.fits'
e.g. `/Users/adam/work/n2hp/1-2_T=5to55_lvg_tau1.fits`
"""
# create the n2hp Radex fitter
# This step cannot be easily generalized: the user needs to read in their own grids
texgrid1 = pyfits.getdata(path_to_radex+fileprefix+'_tex1.fits')
taugrid1 = pyfits.getdata(path_to_radex+fileprefix+'_tau1.fits')
texgrid2 = pyfits.getdata(path_to_radex+fileprefix+'_tex2.fits')
taugrid2 = pyfits.getdata(path_to_radex+fileprefix+'_tau2.fits')
hdr = pyfits.getheader(path_to_radex+fileprefix+'_tau2.fits')
# this deserves a lot of explanation:
# models.n2hp.n2hp_radex is the MODEL that we are going to fit
# models.model.SpectralModel is a wrapper to deal with parinfo, multiple peaks,
# and annotations
# all of the parameters after the first are passed to the model function
n2hp_radex_fitter = models.model.SpectralModel(
models.n2hp.n2hp_radex, 4,
parnames=['density','column','center','width'],
parvalues=[4,12,0,1],
parlimited=[(True,True), (True,True), (False,False), (True,False)],
parlimits=[(1,8), (11,16), (0,0), (0,0)],
hdu = GLOBALutils.update_header(hdu,'RESOL', RESI)
hdu = GLOBALutils.update_header(hdu,'PIPELINE', 'CERES')
hdu = GLOBALutils.update_header(hdu,'XC_MIN', XC_min)
hdu = GLOBALutils.update_header(hdu,'BJD_OUT', bjd_out)
line_out = "%-15s %18.8f %9.4f %7.4f %9.3f %5.3f harps ceres %8d %6d %5.2f %5.2f %5.1f %4.2f %5.2f %6.1f %4d %s\n"%\
(obname, bjd_out, RV, RVerr2, BS, BSerr, RESI, T_eff_epoch, logg_epoch, Z_epoch, vsini_epoch, XC_min, disp_epoch,\
TEXP, SNR_5130_R, ccf_pdf)
f_res.write(line_out)
if (os.access( dirout + fout,os.F_OK)):
os.remove( dirout + fout)
hdu.writeto( dirout + fout )
else:
print "\t\tReading spectral file from", fout
spec = pyfits.getdata( fout )
f_res.close()