debug query and improve plots for paper

This commit is contained in:
Thibault Barnouin
2023-06-02 16:34:44 +02:00
parent a3b2de0e4b
commit 181eb77ec4
8 changed files with 69 additions and 51 deletions

View File

@@ -56,6 +56,7 @@ def display_bkg(data, background, std_bkg, headers, histograms=None, binning=Non
formatter = mdates.ConciseDateFormatter(locator)
ax.xaxis.set_major_locator(locator)
ax.xaxis.set_major_formatter(formatter)
ax.set_ylim(bottom=0.)
ax.set_xlabel("Observation date and time")
ax.set_ylabel(r"Flux [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
plt.legend()

View File

@@ -79,8 +79,8 @@ def get_obs_data(infiles, data_folder="", compute_flux=False):
print(np.unique(cdelt[np.logical_not(is_pol60)],axis=0).size)
raise ValueError("Not all images have same pixel size")
else:
for head in np.array(headers,dtype=object)[is_pol60]:
head['cdelt1'],head['cdelt2'] = np.unique(cdelt[np.logical_not(is_pol60)],axis=0)[0]
for i in np.arange(len(headers))[is_pol60]:
headers[i]['cdelt1'],headers[i]['cdelt2'] = np.unique(cdelt[np.logical_not(is_pol60)],axis=0)[0]
if compute_flux:
for i in range(len(infiles)):

View File

@@ -139,6 +139,7 @@ def plot_obs(data_array, headers, shape=None, vmin=None, vmax=None, rectangle=No
if vmin is None or vmax is None:
vmin, vmax = convert*data[data>0.].min()/10., convert*data[data>0.].max()
#im = axe.imshow(convert*data, vmin=vmin, vmax=vmax, origin='lower', cmap='gray')
data[data*convert<vmin*10.] = vmin*10./convert
im = axe.imshow(convert*data, norm=LogNorm(vmin,vmax), origin='lower', cmap='gray')
if not(rectangle is None):
x, y, width, height, angle, color = rectangle[i]
@@ -320,9 +321,9 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
# If no display selected, show intensity map
display='i'
if mask.sum() > 0.:
vmin, vmax = 1/5.0*np.mean(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
vmin, vmax = 1./2.*np.median(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
else:
vmin, vmax = 1/5.0*np.mean(np.sqrt(stk_cov.data[0,0][stkI.data > 0.])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
vmin, vmax = 1./2.*np.median(np.sqrt(stk_cov.data[0,0][stkI.data > 0.])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
im = ax.imshow(stkI.data*convert_flux, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.)
cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
levelsI = np.linspace(vmax*0.01, vmax*0.99, 10)
@@ -334,9 +335,9 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
display='pf'
pf_mask = (stkI.data > 0.) * (pol.data > 0.)
if mask.sum() > 0.:
vmin, vmax = 1.*np.mean(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
vmin, vmax = 1./2.*np.median(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
else:
vmin, vmax = 1.*np.mean(np.sqrt(stk_cov.data[0,0][stkI.data > 0.])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
vmin, vmax = 1./2.*np.median(np.sqrt(stk_cov.data[0,0][stkI.data > 0.])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
im = ax.imshow(stkI.data*convert_flux*pol.data, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.)
cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
levelsPf = np.linspace(vmax*0.01, vmax*0.99, 10)
@@ -1787,12 +1788,12 @@ class pol_map(object):
self.display_selection = "total_flux"
if self.display_selection.lower() in ['total_flux']:
self.data = self.I*self.convert_flux
vmin, vmax = 1/5.0*np.median(self.data[self.data > 0.]), np.max(self.data[self.data > 0.])
vmin, vmax = 1./2.*np.median(self.data[self.data > 0.]), np.max(self.data[self.data > 0.])
norm = LogNorm(vmin, vmax)
label = r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]"
elif self.display_selection.lower() in ['pol_flux']:
self.data = self.I*self.convert_flux*self.P
vmin, vmax = 1/2.0*np.median(self.I[self.I > 0.]*self.convert_flux), np.max(self.I[self.I > 0.]*self.convert_flux)
vmin, vmax = 1./2.*np.median(self.I[self.I > 0.]*self.convert_flux), np.max(self.I[self.I > 0.]*self.convert_flux)
norm = LogNorm(vmin, vmax)
label = r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]"
elif self.display_selection.lower() in ['pol_deg']:

View File

@@ -91,7 +91,7 @@ def get_product_list(target=None, proposal_id=None):
used_pol = np.zeros(3)
for dataset in obs[obs['Proposal ID'] == pid]:
used_pol[polfilt[dataset['Filters'][0]]] += 1
if np.all(used_pol < 1):
if np.any(used_pol < 1):
obs.remove_rows(np.arange(len(obs))[obs['Proposal ID'] == pid])
tab = unique(obs, ['Target name', 'Proposal ID'])
@@ -134,8 +134,8 @@ def get_product_list(target=None, proposal_id=None):
for prod in products:
prod['proposal_id'] = results['Proposal ID'][results['Dataset']==prod['productFilename'][:len(results['Dataset'][0])].upper()][0]
#for prod in products:
# prod['target_name'] = observations['target_name'][observation['obsid']==prod['obsID']]
for prod in products:
prod['target_name'] = observations['target_name'][observations['obsid']==prod['obsID']][0]
tab = unique(products, ['target_name', 'proposal_id'])
if np.all(tab['target_name']==tab['target_name'][0]):
target = tab['target_name'][0]
@@ -156,7 +156,7 @@ def retrieve_products(target=None, proposal_id=None, output_dir='./data'):
filepaths = []
#obs_dir = path_join(data_dir, obs['prodposal_id'])
#if obs['target_name']!=target:
obs_dir = path_join(path_join(output_dir, obs['target_name']), obs['proposal_id'])
obs_dir = path_join(path_join(output_dir, target), obs['proposal_id'])
if not path_exists(obs_dir):
system("mkdir -p {0:s} {1:s}".format(obs_dir,obs_dir.replace("data","plots")))
for file in products['productFilename'][products['Obs'] == obs['Obs']]:
@@ -169,7 +169,7 @@ def retrieve_products(target=None, proposal_id=None, output_dir='./data'):
filepaths.append([obs_dir,file])
prodpaths.append(np.array(filepaths,dtype=str))
return target, np.array(prodpaths)
return target, prodpaths
if __name__ == "__main__":

View File

@@ -290,7 +290,7 @@ def crop_array(data_array, headers, error_array=None, data_mask=None, step=5,
crop_headers[i]['naxis1'], crop_headers[i]['naxis2'] = crop_array[i].shape
if display:
plt.rcParams.update({'font.size': 20})
plt.rcParams.update({'font.size': 15})
fig, ax = plt.subplots(figsize=(10,10))
convert_flux = headers[0]['photflam']
data = deepcopy(data_array[0]*convert_flux)
@@ -326,7 +326,7 @@ def crop_array(data_array, headers, error_array=None, data_mask=None, step=5,
if not(savename is None):
#fig.suptitle(savename+'_'+filt+'_crop_region')
fig.savefig(plots_folder+savename+'_'+filt+'_crop_region.png',
fig.savefig("/".join([plots_folder,savename+'_'+filt+'_crop_region.png']),
bbox_inches='tight')
plot_obs(data_array, headers, vmin=convert_flux*data_array[data_array>0.].mean()/5.,
vmax=convert_flux*data_array[data_array>0.].max(), rectangle=[rectangle,]*len(headers),
@@ -730,11 +730,12 @@ def align_data(data_array, headers, error_array=None, background=None,
data_array, ref_data, headers = full_array[:-1], full_array[-1], full_headers[:-1]
error_array = err_array[:-1]
do_shift = True
if ref_center is None:
# Define the center of the reference image to be the center pixel
#if None have been specified
ref_center = (np.array(ref_data.shape)/2).astype(int)
do_shift = False
elif ref_center.lower() in ['max', 'flux', 'maxflux', 'max_flux']:
# Define the center of the reference image to be the pixel of max flux.
ref_center = np.unravel_index(np.argmax(ref_data),ref_data.shape)
@@ -767,8 +768,10 @@ def align_data(data_array, headers, error_array=None, background=None,
rescaled_error[i,res_shift[0]:res_shift[0]+shape[1],
res_shift[1]:res_shift[1]+shape[2]] = deepcopy(error_array[i])
# Shift images to align
rescaled_image[i] = sc_shift(rescaled_image[i], shift, order=1, cval=0.)
rescaled_error[i] = sc_shift(rescaled_error[i], shift, order=1, cval=background[i])
if do_shift:
rescaled_image[i] = sc_shift(rescaled_image[i], shift, order=1, cval=0.)
rescaled_error[i] = sc_shift(rescaled_error[i], shift, order=1, cval=background[i])
curr_mask = sc_shift(res_mask, shift, order=1, cval=False)
mask_vertex = clean_ROI(curr_mask)
rescaled_mask[i,mask_vertex[2]:mask_vertex[3],mask_vertex[0]:mask_vertex[1]] = True