debug query and improve plots for paper

This commit is contained in:
Thibault Barnouin
2023-06-02 16:34:44 +02:00
parent a3b2de0e4b
commit 181eb77ec4
8 changed files with 69 additions and 51 deletions

View File

@@ -28,12 +28,12 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data"):
algo="richardson" algo="richardson"
# Initial crop # Initial crop
display_crop = False display_crop = True
# Background estimation # Background estimation
error_sub_type = 'freedman-diaconis' #sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51,51)) error_sub_type = 'freedman-diaconis' #sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51,51))
subtract_error = 1.25 subtract_error = 1.00
display_error = False display_error = True
# Data binning # Data binning
rebin = True rebin = True
@@ -42,12 +42,14 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data"):
rebin_operation = 'sum' #sum or average rebin_operation = 'sum' #sum or average
# Alignement # Alignement
align_center = 'image' #If None will align image to image center align_center = 'center' #If None will not align the images
display_bkg = False
display_align = False
display_data = False display_data = False
# Smoothing # Smoothing
smoothing_function = 'gaussian' #gaussian_after, weighted_gaussian_after, gaussian, weighted_gaussian or combine smoothing_function = 'combine' #gaussian_after, weighted_gaussian_after, gaussian, weighted_gaussian or combine
smoothing_FWHM = None #If None, no smoothing is done smoothing_FWHM = 0.20 #If None, no smoothing is done
smoothing_scale = 'arcsec' #pixel or arcsec smoothing_scale = 'arcsec' #pixel or arcsec
# Rotation # Rotation
@@ -56,7 +58,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data"):
# Final crop # Final crop
crop = False #Crop to desired ROI crop = False #Crop to desired ROI
final_display = True #Whether to display all polarization map outputs final_display = False #Whether to display all polarization map outputs
# Polarization map output # Polarization map output
SNRp_cut = 3. #P measurments with SNR>3 SNRp_cut = 3. #P measurments with SNR>3
@@ -77,15 +79,15 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data"):
target = input("Target name:\n>") target = input("Target name:\n>")
else: else:
target, products = retrieve_products(target,proposal_id,output_dir=output_dir) target, products = retrieve_products(target,proposal_id,output_dir=output_dir)
prod = products[0] prod = products.pop()
for prods in products[1:]: for prods in products:
main(target=target,infiles=["/".join(pr) for pr in prods],output_dir=output_dir) main(target=target,infiles=["/".join(pr) for pr in prods],output_dir=output_dir)
data_folder = prod[0,0] data_folder = prod[0][0]
try: try:
plots_folder = data_folder.replace("data","plots") plots_folder = data_folder.replace("data","plots")
except: except:
plots_folder = "." plots_folder = "."
infiles = prod[:,1] infiles = [p[1] for p in prod]
data_array, headers = proj_fits.get_obs_data(infiles, data_folder=data_folder, compute_flux=True) data_array, headers = proj_fits.get_obs_data(infiles, data_folder=data_folder, compute_flux=True)
figname = "_".join([target,"FOC"]) figname = "_".join([target,"FOC"])
@@ -98,6 +100,8 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data"):
figtype = "full" figtype = "full"
else: else:
figtype = "_".join(["".join([s[0] for s in smoothing_function.split("_")]),"".join(["{0:.2f}".format(smoothing_FWHM).replace(".",""),smoothing_scale])]) #additionnal informations figtype = "_".join(["".join([s[0] for s in smoothing_function.split("_")]),"".join(["{0:.2f}".format(smoothing_FWHM).replace(".",""),smoothing_scale])]) #additionnal informations
if align_center is None:
figtype += "_not_aligned"
# Crop data to remove outside blank margins. # Crop data to remove outside blank margins.
data_array, error_array, headers = proj_red.crop_array(data_array, headers, step=5, null_val=0., inside=True, display=display_crop, savename=figname, plots_folder=plots_folder) data_array, error_array, headers = proj_red.crop_array(data_array, headers, step=5, null_val=0., inside=True, display=display_crop, savename=figname, plots_folder=plots_folder)
@@ -110,9 +114,15 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data"):
background = None background = None
data_array, error_array, headers, background = proj_red.get_error(data_array, headers, error_array, sub_type=error_sub_type, subtract_error=subtract_error, display=display_error, savename="_".join([figname,"errors"]), plots_folder=plots_folder, return_background=True) data_array, error_array, headers, background = proj_red.get_error(data_array, headers, error_array, sub_type=error_sub_type, subtract_error=subtract_error, display=display_error, savename="_".join([figname,"errors"]), plots_folder=plots_folder, return_background=True)
if display_bkg:
proj_plots.plot_obs(data_array, headers, vmin=data_array[data_array>0.].min()*headers[0]['photflam'], vmax=data_array[data_array>0.].max()*headers[0]['photflam'], savename="_".join([figname,"bkg"]), plots_folder=plots_folder)
# Align and rescale images with oversampling. # Align and rescale images with oversampling.
data_array, error_array, headers, data_mask = proj_red.align_data(data_array, headers, error_array=error_array, background=background, upsample_factor=10, ref_center=align_center, return_shifts=False) data_array, error_array, headers, data_mask = proj_red.align_data(data_array, headers, error_array=error_array, background=background, upsample_factor=10, ref_center=align_center, return_shifts=False)
if display_align:
proj_plots.plot_obs(data_array, headers, vmin=data_array[data_array>0.].min()*headers[0]['photflam'], vmax=data_array[data_array>0.].max()*headers[0]['photflam'], savename="_".join([figname,"center",str(align_center)]), plots_folder=plots_folder)
# Rebin data to desired pixel size. # Rebin data to desired pixel size.
if rebin: if rebin:
data_array, error_array, headers, Dxy, data_mask = proj_red.rebin_array(data_array, error_array, headers, pxsize=pxsize, scale=px_scale, operation=rebin_operation, data_mask=data_mask) data_array, error_array, headers, Dxy, data_mask = proj_red.rebin_array(data_array, error_array, headers, pxsize=pxsize, scale=px_scale, operation=rebin_operation, data_mask=data_mask)
@@ -125,7 +135,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data"):
#Plot array for checking output #Plot array for checking output
if display_data and px_scale.lower() not in ['full','integrate']: if display_data and px_scale.lower() not in ['full','integrate']:
proj_plots.plot_obs(data_array, headers, vmin=data_array[data_array>0.].min()*headers[0]['photflam'], vmax=data_array[data_array>0.].max()*headers[0]['photflam'], savename="_".join([figname,"center",align_center]), plots_folder=plots_folder) proj_plots.plot_obs(data_array, headers, vmin=data_array[data_array>0.].min()*headers[0]['photflam'], vmax=data_array[data_array>0.].max()*headers[0]['photflam'], savename="_".join([figname,"rebin"]), plots_folder=plots_folder)
background = np.array([np.array(bkg).reshape(1,1) for bkg in background]) background = np.array([np.array(bkg).reshape(1,1) for bkg in background])
background_error = np.array([np.array(np.sqrt((bkg-background[np.array([h['filtnam1']==head['filtnam1'] for h in headers],dtype=bool)].mean())**2/np.sum([h['filtnam1']==head['filtnam1'] for h in headers]))).reshape(1,1) for bkg,head in zip(background,headers)]) background_error = np.array([np.array(np.sqrt((bkg-background[np.array([h['filtnam1']==head['filtnam1'] for h in headers],dtype=bool)].mean())**2/np.sum([h['filtnam1']==head['filtnam1'] for h in headers]))).reshape(1,1) for bkg,head in zip(background,headers)])

View File

@@ -21,6 +21,9 @@ root_dir_plot_S = path_join(root_dir,'FOC_Reduction','plots','NGC1068','5144')
filename_S = "NGC1068_FOC_b_10px.fits" filename_S = "NGC1068_FOC_b_10px.fits"
plt.rcParams.update({'font.size': 15}) plt.rcParams.update({'font.size': 15})
SNRi_cut = 30.
SNRp_cut = 3.
data_K = {} data_K = {}
data_S = {} data_S = {}
for d,i in zip(['I','Q','U','P','PA','sI','sQ','sU','sP','sPA'],[0,1,2,5,8,(3,0,0),(3,1,1),(3,2,2),6,9]): for d,i in zip(['I','Q','U','P','PA','sI','sQ','sU','sP','sPA'],[0,1,2,5,8,(3,0,0),(3,1,1),(3,2,2),6,9]):
@@ -59,7 +62,7 @@ for d in [data_S, data_K]:
d['SNRp'][d['sP']>0.] = d['P'][d['sP']>0.]/d['sP'][d['sP']>0.] d['SNRp'][d['sP']>0.] = d['P'][d['sP']>0.]/d['sP'][d['sP']>0.]
d['SNRi'] = np.zeros(d['I'].shape) d['SNRi'] = np.zeros(d['I'].shape)
d['SNRi'][d['sI']>0.] = d['I'][d['sI']>0.]/d['sI'][d['sI']>0.] d['SNRi'][d['sI']>0.] = d['I'][d['sI']>0.]/d['sI'][d['sI']>0.]
d['mask'] = np.logical_and(d['SNRi']>30,d['SNRp']>5) d['mask'] = np.logical_and(d['SNRi']>SNRi_cut,d['SNRp']>SNRp_cut)
data_S['mask'], data_K['mask'] = np.logical_and(data_S['mask'],data_K['mask']), np.logical_and(data_S['mask'],data_K['mask']) data_S['mask'], data_K['mask'] = np.logical_and(data_S['mask'],data_K['mask']), np.logical_and(data_S['mask'],data_K['mask'])
@@ -121,7 +124,7 @@ im0 = ax.imshow(data_S['I']*convert_flux,norm=LogNorm(data_S['I'][data_S['I']>0]
quiv0 = ax.quiver(data_S['X'],data_S['Y'],data_S['xy_U'],data_S['xy_V'],units='xy',angles='uv',scale=0.5,scale_units='xy',pivot='mid',headwidth=0.,headlength=0.,headaxislength=0.,width=0.2,color='b',alpha=0.75, label="PA through this pipeline") quiv0 = ax.quiver(data_S['X'],data_S['Y'],data_S['xy_U'],data_S['xy_V'],units='xy',angles='uv',scale=0.5,scale_units='xy',pivot='mid',headwidth=0.,headlength=0.,headaxislength=0.,width=0.2,color='b',alpha=0.75, label="PA through this pipeline")
quiv1 = ax.quiver(data_K['X'],data_K['Y'],data_K['xy_U'],data_K['xy_V'],units='xy',angles='uv',scale=0.5,scale_units='xy',pivot='mid',headwidth=0.,headlength=0.,headaxislength=0.,width=0.1,color='r',alpha=0.75, label="PA through Kishimoto's pipeline") quiv1 = ax.quiver(data_K['X'],data_K['Y'],data_K['xy_U'],data_K['xy_V'],units='xy',angles='uv',scale=0.5,scale_units='xy',pivot='mid',headwidth=0.,headlength=0.,headaxislength=0.,width=0.1,color='r',alpha=0.75, label="PA through Kishimoto's pipeline")
ax.set_title(r"$SNR_P \geq 5 \; & \; SNR_I \geq 30$") ax.set_title(r"$SNR_P \geq$ "+str(SNRi_cut)+r"$\; & \; SNR_I \geq $"+str(SNRp_cut))
#ax.coords.grid(True, color='white', ls='dotted', alpha=0.5) #ax.coords.grid(True, color='white', ls='dotted', alpha=0.5)
ax.coords[0].set_axislabel('Right Ascension (J2000)') ax.coords[0].set_axislabel('Right Ascension (J2000)')
ax.coords[0].set_axislabel_position('b') ax.coords[0].set_axislabel_position('b')

View File

@@ -56,6 +56,7 @@ def display_bkg(data, background, std_bkg, headers, histograms=None, binning=Non
formatter = mdates.ConciseDateFormatter(locator) formatter = mdates.ConciseDateFormatter(locator)
ax.xaxis.set_major_locator(locator) ax.xaxis.set_major_locator(locator)
ax.xaxis.set_major_formatter(formatter) ax.xaxis.set_major_formatter(formatter)
ax.set_ylim(bottom=0.)
ax.set_xlabel("Observation date and time") ax.set_xlabel("Observation date and time")
ax.set_ylabel(r"Flux [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]") ax.set_ylabel(r"Flux [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
plt.legend() plt.legend()

View File

@@ -79,8 +79,8 @@ def get_obs_data(infiles, data_folder="", compute_flux=False):
print(np.unique(cdelt[np.logical_not(is_pol60)],axis=0).size) print(np.unique(cdelt[np.logical_not(is_pol60)],axis=0).size)
raise ValueError("Not all images have same pixel size") raise ValueError("Not all images have same pixel size")
else: else:
for head in np.array(headers,dtype=object)[is_pol60]: for i in np.arange(len(headers))[is_pol60]:
head['cdelt1'],head['cdelt2'] = np.unique(cdelt[np.logical_not(is_pol60)],axis=0)[0] headers[i]['cdelt1'],headers[i]['cdelt2'] = np.unique(cdelt[np.logical_not(is_pol60)],axis=0)[0]
if compute_flux: if compute_flux:
for i in range(len(infiles)): for i in range(len(infiles)):

View File

@@ -139,6 +139,7 @@ def plot_obs(data_array, headers, shape=None, vmin=None, vmax=None, rectangle=No
if vmin is None or vmax is None: if vmin is None or vmax is None:
vmin, vmax = convert*data[data>0.].min()/10., convert*data[data>0.].max() vmin, vmax = convert*data[data>0.].min()/10., convert*data[data>0.].max()
#im = axe.imshow(convert*data, vmin=vmin, vmax=vmax, origin='lower', cmap='gray') #im = axe.imshow(convert*data, vmin=vmin, vmax=vmax, origin='lower', cmap='gray')
data[data*convert<vmin*10.] = vmin*10./convert
im = axe.imshow(convert*data, norm=LogNorm(vmin,vmax), origin='lower', cmap='gray') im = axe.imshow(convert*data, norm=LogNorm(vmin,vmax), origin='lower', cmap='gray')
if not(rectangle is None): if not(rectangle is None):
x, y, width, height, angle, color = rectangle[i] x, y, width, height, angle, color = rectangle[i]
@@ -320,9 +321,9 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
# If no display selected, show intensity map # If no display selected, show intensity map
display='i' display='i'
if mask.sum() > 0.: if mask.sum() > 0.:
vmin, vmax = 1/5.0*np.mean(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux) vmin, vmax = 1./2.*np.median(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
else: else:
vmin, vmax = 1/5.0*np.mean(np.sqrt(stk_cov.data[0,0][stkI.data > 0.])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux) vmin, vmax = 1./2.*np.median(np.sqrt(stk_cov.data[0,0][stkI.data > 0.])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
im = ax.imshow(stkI.data*convert_flux, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.) im = ax.imshow(stkI.data*convert_flux, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.)
cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]") cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
levelsI = np.linspace(vmax*0.01, vmax*0.99, 10) levelsI = np.linspace(vmax*0.01, vmax*0.99, 10)
@@ -334,9 +335,9 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
display='pf' display='pf'
pf_mask = (stkI.data > 0.) * (pol.data > 0.) pf_mask = (stkI.data > 0.) * (pol.data > 0.)
if mask.sum() > 0.: if mask.sum() > 0.:
vmin, vmax = 1.*np.mean(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux) vmin, vmax = 1./2.*np.median(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
else: else:
vmin, vmax = 1.*np.mean(np.sqrt(stk_cov.data[0,0][stkI.data > 0.])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux) vmin, vmax = 1./2.*np.median(np.sqrt(stk_cov.data[0,0][stkI.data > 0.])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
im = ax.imshow(stkI.data*convert_flux*pol.data, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.) im = ax.imshow(stkI.data*convert_flux*pol.data, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.)
cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]") cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
levelsPf = np.linspace(vmax*0.01, vmax*0.99, 10) levelsPf = np.linspace(vmax*0.01, vmax*0.99, 10)
@@ -1787,12 +1788,12 @@ class pol_map(object):
self.display_selection = "total_flux" self.display_selection = "total_flux"
if self.display_selection.lower() in ['total_flux']: if self.display_selection.lower() in ['total_flux']:
self.data = self.I*self.convert_flux self.data = self.I*self.convert_flux
vmin, vmax = 1/5.0*np.median(self.data[self.data > 0.]), np.max(self.data[self.data > 0.]) vmin, vmax = 1./2.*np.median(self.data[self.data > 0.]), np.max(self.data[self.data > 0.])
norm = LogNorm(vmin, vmax) norm = LogNorm(vmin, vmax)
label = r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]" label = r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]"
elif self.display_selection.lower() in ['pol_flux']: elif self.display_selection.lower() in ['pol_flux']:
self.data = self.I*self.convert_flux*self.P self.data = self.I*self.convert_flux*self.P
vmin, vmax = 1/2.0*np.median(self.I[self.I > 0.]*self.convert_flux), np.max(self.I[self.I > 0.]*self.convert_flux) vmin, vmax = 1./2.*np.median(self.I[self.I > 0.]*self.convert_flux), np.max(self.I[self.I > 0.]*self.convert_flux)
norm = LogNorm(vmin, vmax) norm = LogNorm(vmin, vmax)
label = r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]" label = r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]"
elif self.display_selection.lower() in ['pol_deg']: elif self.display_selection.lower() in ['pol_deg']:

View File

@@ -91,7 +91,7 @@ def get_product_list(target=None, proposal_id=None):
used_pol = np.zeros(3) used_pol = np.zeros(3)
for dataset in obs[obs['Proposal ID'] == pid]: for dataset in obs[obs['Proposal ID'] == pid]:
used_pol[polfilt[dataset['Filters'][0]]] += 1 used_pol[polfilt[dataset['Filters'][0]]] += 1
if np.all(used_pol < 1): if np.any(used_pol < 1):
obs.remove_rows(np.arange(len(obs))[obs['Proposal ID'] == pid]) obs.remove_rows(np.arange(len(obs))[obs['Proposal ID'] == pid])
tab = unique(obs, ['Target name', 'Proposal ID']) tab = unique(obs, ['Target name', 'Proposal ID'])
@@ -134,8 +134,8 @@ def get_product_list(target=None, proposal_id=None):
for prod in products: for prod in products:
prod['proposal_id'] = results['Proposal ID'][results['Dataset']==prod['productFilename'][:len(results['Dataset'][0])].upper()][0] prod['proposal_id'] = results['Proposal ID'][results['Dataset']==prod['productFilename'][:len(results['Dataset'][0])].upper()][0]
#for prod in products: for prod in products:
# prod['target_name'] = observations['target_name'][observation['obsid']==prod['obsID']] prod['target_name'] = observations['target_name'][observations['obsid']==prod['obsID']][0]
tab = unique(products, ['target_name', 'proposal_id']) tab = unique(products, ['target_name', 'proposal_id'])
if np.all(tab['target_name']==tab['target_name'][0]): if np.all(tab['target_name']==tab['target_name'][0]):
target = tab['target_name'][0] target = tab['target_name'][0]
@@ -156,7 +156,7 @@ def retrieve_products(target=None, proposal_id=None, output_dir='./data'):
filepaths = [] filepaths = []
#obs_dir = path_join(data_dir, obs['prodposal_id']) #obs_dir = path_join(data_dir, obs['prodposal_id'])
#if obs['target_name']!=target: #if obs['target_name']!=target:
obs_dir = path_join(path_join(output_dir, obs['target_name']), obs['proposal_id']) obs_dir = path_join(path_join(output_dir, target), obs['proposal_id'])
if not path_exists(obs_dir): if not path_exists(obs_dir):
system("mkdir -p {0:s} {1:s}".format(obs_dir,obs_dir.replace("data","plots"))) system("mkdir -p {0:s} {1:s}".format(obs_dir,obs_dir.replace("data","plots")))
for file in products['productFilename'][products['Obs'] == obs['Obs']]: for file in products['productFilename'][products['Obs'] == obs['Obs']]:
@@ -169,7 +169,7 @@ def retrieve_products(target=None, proposal_id=None, output_dir='./data'):
filepaths.append([obs_dir,file]) filepaths.append([obs_dir,file])
prodpaths.append(np.array(filepaths,dtype=str)) prodpaths.append(np.array(filepaths,dtype=str))
return target, np.array(prodpaths) return target, prodpaths
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -290,7 +290,7 @@ def crop_array(data_array, headers, error_array=None, data_mask=None, step=5,
crop_headers[i]['naxis1'], crop_headers[i]['naxis2'] = crop_array[i].shape crop_headers[i]['naxis1'], crop_headers[i]['naxis2'] = crop_array[i].shape
if display: if display:
plt.rcParams.update({'font.size': 20}) plt.rcParams.update({'font.size': 15})
fig, ax = plt.subplots(figsize=(10,10)) fig, ax = plt.subplots(figsize=(10,10))
convert_flux = headers[0]['photflam'] convert_flux = headers[0]['photflam']
data = deepcopy(data_array[0]*convert_flux) data = deepcopy(data_array[0]*convert_flux)
@@ -326,7 +326,7 @@ def crop_array(data_array, headers, error_array=None, data_mask=None, step=5,
if not(savename is None): if not(savename is None):
#fig.suptitle(savename+'_'+filt+'_crop_region') #fig.suptitle(savename+'_'+filt+'_crop_region')
fig.savefig(plots_folder+savename+'_'+filt+'_crop_region.png', fig.savefig("/".join([plots_folder,savename+'_'+filt+'_crop_region.png']),
bbox_inches='tight') bbox_inches='tight')
plot_obs(data_array, headers, vmin=convert_flux*data_array[data_array>0.].mean()/5., plot_obs(data_array, headers, vmin=convert_flux*data_array[data_array>0.].mean()/5.,
vmax=convert_flux*data_array[data_array>0.].max(), rectangle=[rectangle,]*len(headers), vmax=convert_flux*data_array[data_array>0.].max(), rectangle=[rectangle,]*len(headers),
@@ -730,11 +730,12 @@ def align_data(data_array, headers, error_array=None, background=None,
data_array, ref_data, headers = full_array[:-1], full_array[-1], full_headers[:-1] data_array, ref_data, headers = full_array[:-1], full_array[-1], full_headers[:-1]
error_array = err_array[:-1] error_array = err_array[:-1]
do_shift = True
if ref_center is None: if ref_center is None:
# Define the center of the reference image to be the center pixel # Define the center of the reference image to be the center pixel
#if None have been specified #if None have been specified
ref_center = (np.array(ref_data.shape)/2).astype(int) ref_center = (np.array(ref_data.shape)/2).astype(int)
do_shift = False
elif ref_center.lower() in ['max', 'flux', 'maxflux', 'max_flux']: elif ref_center.lower() in ['max', 'flux', 'maxflux', 'max_flux']:
# Define the center of the reference image to be the pixel of max flux. # Define the center of the reference image to be the pixel of max flux.
ref_center = np.unravel_index(np.argmax(ref_data),ref_data.shape) ref_center = np.unravel_index(np.argmax(ref_data),ref_data.shape)
@@ -767,8 +768,10 @@ def align_data(data_array, headers, error_array=None, background=None,
rescaled_error[i,res_shift[0]:res_shift[0]+shape[1], rescaled_error[i,res_shift[0]:res_shift[0]+shape[1],
res_shift[1]:res_shift[1]+shape[2]] = deepcopy(error_array[i]) res_shift[1]:res_shift[1]+shape[2]] = deepcopy(error_array[i])
# Shift images to align # Shift images to align
if do_shift:
rescaled_image[i] = sc_shift(rescaled_image[i], shift, order=1, cval=0.) rescaled_image[i] = sc_shift(rescaled_image[i], shift, order=1, cval=0.)
rescaled_error[i] = sc_shift(rescaled_error[i], shift, order=1, cval=background[i]) rescaled_error[i] = sc_shift(rescaled_error[i], shift, order=1, cval=background[i])
curr_mask = sc_shift(res_mask, shift, order=1, cval=False) curr_mask = sc_shift(res_mask, shift, order=1, cval=False)
mask_vertex = clean_ROI(curr_mask) mask_vertex = clean_ROI(curr_mask)
rescaled_mask[i,mask_vertex[2]:mask_vertex[3],mask_vertex[0]:mask_vertex[1]] = True rescaled_mask[i,mask_vertex[2]:mask_vertex[3],mask_vertex[0]:mask_vertex[1]] = True

View File

@@ -6,47 +6,47 @@ from copy import deepcopy
from lib.plots import overplot_radio, overplot_pol, align_pol from lib.plots import overplot_radio, overplot_pol, align_pol
from matplotlib.colors import LogNorm from matplotlib.colors import LogNorm
Stokes_UV = fits.open("../data/IC5063_x3nl030/IC5063_FOC_c_020.fits") Stokes_UV = fits.open("./data/IC5063/5918/IC5063_FOC_c_020arcsec.fits")
Stokes_18GHz = fits.open("../data/IC5063_x3nl030/radio/IC5063_18GHz.fits") Stokes_18GHz = fits.open("./data/IC5063/radio/IC5063_18GHz.fits")
Stokes_24GHz = fits.open("../data/IC5063_x3nl030/radio/IC5063_24GHz.fits") Stokes_24GHz = fits.open("./data/IC5063/radio/IC5063_24GHz.fits")
Stokes_103GHz = fits.open("../data/IC5063_x3nl030/radio/IC5063_103GHz.fits") Stokes_103GHz = fits.open("./data/IC5063/radio/IC5063_103GHz.fits")
Stokes_229GHz = fits.open("../data/IC5063_x3nl030/radio/IC5063_229GHz.fits") Stokes_229GHz = fits.open("./data/IC5063/radio/IC5063_229GHz.fits")
Stokes_357GHz = fits.open("../data/IC5063_x3nl030/radio/IC5063_357GHz.fits") Stokes_357GHz = fits.open("./data/IC5063/radio/IC5063_357GHz.fits")
#Stokes_S2 = fits.open("../data/IC5063_x3nl030/POLARIZATION_COMPARISON/S2_rot_crop.fits") #Stokes_S2 = fits.open("./data/IC5063/POLARIZATION_COMPARISON/S2_rot_crop.fits")
Stokes_IR = fits.open("../data/IC5063_x3nl030/IR/u2e65g01t_c0f_rot.fits") Stokes_IR = fits.open("./data/IC5063/IR/u2e65g01t_c0f_rot.fits")
levelsMorganti = np.array([1.,2.,3.,8.,16.,32.,64.,128.]) levelsMorganti = np.array([1.,2.,3.,8.,16.,32.,64.,128.])
#levels18GHz = np.array([0.6, 1.5, 3, 6, 12, 24, 48, 96])/100.*Stokes_18GHz[0].data.max() #levels18GHz = np.array([0.6, 1.5, 3, 6, 12, 24, 48, 96])/100.*Stokes_18GHz[0].data.max()
levels18GHz = levelsMorganti*0.28*1e-3 levels18GHz = levelsMorganti*0.28*1e-3
A = overplot_radio(Stokes_UV, Stokes_18GHz) A = overplot_radio(Stokes_UV, Stokes_18GHz)
A.plot(levels=levels18GHz, SNRp_cut=2.0, SNRi_cut=15.0, savename='../plots/IC5063_x3nl030/18GHz_overplot_forced.png') A.plot(levels=levels18GHz, SNRp_cut=1.0, SNRi_cut=10.0, savename='./plots/IC5063/18GHz_overplot_forced.png')
#levels24GHz = np.array([1.,1.5, 3, 6, 12, 24, 48, 96])/100.*Stokes_24GHz[0].data.max() #levels24GHz = np.array([1.,1.5, 3, 6, 12, 24, 48, 96])/100.*Stokes_24GHz[0].data.max()
levels24GHz = levelsMorganti*0.46*1e-3 levels24GHz = levelsMorganti*0.46*1e-3
B = overplot_radio(Stokes_UV, Stokes_24GHz) B = overplot_radio(Stokes_UV, Stokes_24GHz)
B.plot(levels=levels24GHz, SNRp_cut=2.0, SNRi_cut=15.0, savename='../plots/IC5063_x3nl030/24GHz_overplot_forced.png') B.plot(levels=levels24GHz, SNRp_cut=1.0, SNRi_cut=10.0, savename='./plots/IC5063/24GHz_overplot_forced.png')
levels103GHz = np.linspace(1,99,11)/100.*np.max(deepcopy(Stokes_103GHz[0].data[Stokes_103GHz[0].data > 0.])) levels103GHz = np.linspace(1,99,11)/100.*np.max(deepcopy(Stokes_103GHz[0].data[Stokes_103GHz[0].data > 0.]))
C = overplot_radio(Stokes_UV, Stokes_103GHz) C = overplot_radio(Stokes_UV, Stokes_103GHz)
C.plot(levels=levels103GHz, SNRp_cut=2.0, SNRi_cut=15.0, savename='../plots/IC5063_x3nl030/103GHz_overplot_forced.png') C.plot(levels=levels103GHz, SNRp_cut=1.0, SNRi_cut=10.0, savename='./plots/IC5063/103GHz_overplot_forced.png')
levels229GHz = np.linspace(1,99,11)/100.*np.max(deepcopy(Stokes_229GHz[0].data[Stokes_229GHz[0].data > 0.])) levels229GHz = np.linspace(1,99,11)/100.*np.max(deepcopy(Stokes_229GHz[0].data[Stokes_229GHz[0].data > 0.]))
D = overplot_radio(Stokes_UV, Stokes_229GHz) D = overplot_radio(Stokes_UV, Stokes_229GHz)
D.plot(levels=levels229GHz, SNRp_cut=2.0, SNRi_cut=15.0, savename='../plots/IC5063_x3nl030/229GHz_overplot_forced.png') D.plot(levels=levels229GHz, SNRp_cut=1.0, SNRi_cut=10.0, savename='./plots/IC5063/229GHz_overplot_forced.png')
levels357GHz = np.linspace(1,99,11)/100.*np.max(deepcopy(Stokes_357GHz[0].data[Stokes_357GHz[0].data > 0.])) levels357GHz = np.linspace(1,99,11)/100.*np.max(deepcopy(Stokes_357GHz[0].data[Stokes_357GHz[0].data > 0.]))
E = overplot_radio(Stokes_UV, Stokes_357GHz) E = overplot_radio(Stokes_UV, Stokes_357GHz)
E.plot(levels=levels357GHz, SNRp_cut=2.0, SNRi_cut=15.0, savename='../plots/IC5063_x3nl030/357GHz_overplot_forced.png') E.plot(levels=levels357GHz, SNRp_cut=1.0, SNRi_cut=10.0, savename='./plots/IC5063/357GHz_overplot_forced.png')
#F = overplot_pol(Stokes_UV, Stokes_S2) #F = overplot_pol(Stokes_UV, Stokes_S2)
#F.plot(SNRp_cut=3.0, SNRi_cut=80.0, savename='../plots/IC5063_x3nl030/S2_overplot_forced.png', norm=LogNorm(vmin=5e-20,vmax=5e-18)) #F.plot(SNRp_cut=3.0, SNRi_cut=80.0, savename='./plots/IC5063/S2_overplot_forced.png', norm=LogNorm(vmin=5e-20,vmax=5e-18))
G = overplot_pol(Stokes_UV, Stokes_IR, cmap='inferno') G = overplot_pol(Stokes_UV, Stokes_IR, cmap='inferno')
G.plot(SNRp_cut=1.0, SNRi_cut=10.0, savename='../plots/IC5063_x3nl030/IR_overplot_forced.png', norm=LogNorm(vmin=1e-17,vmax=5e-15), cmap='inferno_r') G.plot(SNRp_cut=1.0, SNRi_cut=10.0, savename='./plots/IC5063/IR_overplot_forced.png', norm=LogNorm(vmin=1e-17,vmax=5e-15), cmap='inferno_r')
#data_folder1 = "../data/M87/POS1/" #data_folder1 = "./data/M87/POS1/"
#plots_folder1 = "../plots/M87/POS1/" #plots_folder1 = "./plots/M87/POS1/"
#basename1 = "M87_020_log" #basename1 = "M87_020_log"
#M87_1_95 = fits.open(data_folder1+"M87_POS1_1995_FOC_combine_FWHM020.fits") #M87_1_95 = fits.open(data_folder1+"M87_POS1_1995_FOC_combine_FWHM020.fits")
#M87_1_96 = fits.open(data_folder1+"M87_POS1_1996_FOC_combine_FWHM020.fits") #M87_1_96 = fits.open(data_folder1+"M87_POS1_1996_FOC_combine_FWHM020.fits")
@@ -58,8 +58,8 @@ G.plot(SNRp_cut=1.0, SNRi_cut=10.0, savename='../plots/IC5063_x3nl030/IR_overplo
#H.plot(SNRp_cut=5.0, SNRi_cut=50.0, savename=plots_folder1+'animated_loop/'+basename1, norm=LogNorm()) #H.plot(SNRp_cut=5.0, SNRi_cut=50.0, savename=plots_folder1+'animated_loop/'+basename1, norm=LogNorm())
#command("convert -delay 50 -loop 0 {0:s}animated_loop/{1:s}*.png {0:s}animated_loop/{1:s}.gif".format(plots_folder1, basename1)) #command("convert -delay 50 -loop 0 {0:s}animated_loop/{1:s}*.png {0:s}animated_loop/{1:s}.gif".format(plots_folder1, basename1))
#data_folder3 = "../data/M87/POS3/" #data_folder3 = "./data/M87/POS3/"
#plots_folder3 = "../plots/M87/POS3/" #plots_folder3 = "./plots/M87/POS3/"
#basename3 = "M87_020_log" #basename3 = "M87_020_log"
#M87_3_95 = fits.open(data_folder3+"M87_POS3_1995_FOC_combine_FWHM020.fits") #M87_3_95 = fits.open(data_folder3+"M87_POS3_1995_FOC_combine_FWHM020.fits")
#M87_3_96 = fits.open(data_folder3+"M87_POS3_1996_FOC_combine_FWHM020.fits") #M87_3_96 = fits.open(data_folder3+"M87_POS3_1996_FOC_combine_FWHM020.fits")