change name convention and cleanup

This commit is contained in:
Tibeuleu
2023-02-09 15:28:21 +01:00
parent fc80bafc66
commit ce670f4a19
8 changed files with 31 additions and 21 deletions

View File

@@ -142,7 +142,7 @@ def main():
align_center = 'image' #If None will align image to image center
display_data = False
# Smoothing
smoothing_function = 'weighted_gaussian' #gaussian_after, weighted_gaussian_after, gaussian, weighted_gaussian or combine
smoothing_function = 'combine' #gaussian_after, weighted_gaussian_after, gaussian, weighted_gaussian or combine
smoothing_FWHM = 0.20 #If None, no smoothing is done
smoothing_scale = 'arcsec' #pixel or arcsec
# Rotation
@@ -153,7 +153,7 @@ def main():
final_display = True
# Polarization map output
figname = 'NGC1068_FOC' #target/intrument name
figtype = '_wg_020' #additionnal informations
figtype = '_c_020' #additionnal informations
SNRp_cut = 5. #P measurments with SNR>3
SNRi_cut = 50. #I measurments with SNR>30, which implies an uncertainty in P of 4.7%.
step_vec = 1 #plot all vectors in the array. if step_vec = 2, then every other vector will be plotted
@@ -202,15 +202,18 @@ def main():
# FWHM of FOC have been estimated at about 0.03" across 1500-5000 Angstrom band, which is about 2 detector pixels wide
# see Jedrzejewski, R.; Nota, A.; Hack, W. J., A Comparison Between FOC and WFPC2
# Bibcode : 1995chst.conf...10J
I_stokes, Q_stokes, U_stokes, Stokes_cov = proj_red.compute_Stokes(data_array, error_array, data_mask, headers, FWHM=smoothing_FWHM, scale=smoothing_scale, smoothing=smoothing_function,transmitcorr=False)
I_stokes, Q_stokes, U_stokes, Stokes_cov = proj_red.compute_Stokes(data_array, error_array, data_mask, headers, FWHM=smoothing_FWHM, scale=smoothing_scale, smoothing=smoothing_function,transmitcorr=True)
I_bkg, Q_bkg, U_bkg, S_cov_bkg = proj_red.compute_Stokes(background, background_error, np.array(True).reshape(1,1), headers, FWHM=None, scale=smoothing_scale, smoothing=smoothing_function,transmitcorr=True)
## Step 3:
# Rotate images to have North up
if rotate_stokes:
I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers = proj_red.rotate_Stokes(I_stokes, Q_stokes, U_stokes, Stokes_cov, data_mask, headers, SNRi_cut=None)
I_bkg, Q_bkg, U_bkg, S_cov_bkg, _, _ = proj_red.rotate_Stokes(I_bkg, Q_bkg, U_bkg, S_cov_bkg, np.array(True).reshape(1,1), headers, SNRi_cut=None)
# Compute polarimetric parameters (polarization degree and angle).
P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P = proj_red.compute_pol(I_stokes, Q_stokes, U_stokes, Stokes_cov, headers)
P_bkg, debiased_P_bkg, s_P_bkg, s_P_P_bkg, PA_bkg, s_PA_bkg, s_PA_P_bkg = proj_red.compute_pol(I_bkg, Q_bkg, U_bkg, S_cov_bkg, headers)
## Step 4:
# Save image to FITS.
@@ -229,6 +232,10 @@ def main():
print("F_int({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(headers[0]['photplam'],*proj_plots.sci_not(Stokes_test[0].data[data_mask].sum()*headers[0]['photflam'],np.sqrt(Stokes_test[3].data[0,0][data_mask].sum())*headers[0]['photflam'],2,out=int)))
print("P_int = {0:.1f} ± {1:.1f} %".format(headers[0]['p_int']*100.,np.ceil(headers[0]['p_int_err']*1000.)/10.))
print("PA_int = {0:.1f} ± {1:.1f} °".format(headers[0]['pa_int'],np.ceil(headers[0]['pa_int_err']*10.)/10.))
# Background values
print("F_bkg({0:.0f} Angs) = ({1} ± {2})e{3} ergs.cm^-2.s^-1.Angs^-1".format(headers[0]['photplam'],*proj_plots.sci_not(I_bkg[0,0]*headers[0]['photflam'],np.sqrt(S_cov_bkg[0,0][0,0])*headers[0]['photflam'],2,out=int)))
print("P_bkg = {0:.1f} ± {1:.1f} %".format(debiased_P_bkg[0,0]*100.,np.ceil(s_P_bkg[0,0]*1000.)/10.))
print("PA_bkg = {0:.1f} ± {1:.1f} °".format(PA_bkg[0,0],np.ceil(s_PA_bkg[0,0]*10.)/10.))
# Plot polarization map (Background is either total Flux, Polarization degree or Polarization degree error).
if px_scale.lower() not in ['full','integrate'] and final_display:
proj_plots.polarization_map(deepcopy(Stokes_test), data_mask, SNRp_cut=SNRp_cut, SNRi_cut=SNRi_cut, step_vec=step_vec, savename=figname+figtype, plots_folder=plots_folder)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 239 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 242 KiB

View File

@@ -15,12 +15,13 @@ root_dir = path_join('/home/t.barnouin/Documents/Thesis/HST')
root_dir_K = path_join(root_dir,'Kishimoto','output')
root_dir_S = path_join(root_dir,'FOC_Reduction','output')
root_dir_data_S = path_join(root_dir,'FOC_Reduction','data','NGC1068_x274020')
filename_S = "NGC1068_FOC_bg_b_10px.fits"
data_K = {}
data_S = {}
for d,i in zip(['I','Q','U','P','PA','sI','sQ','sU','sP','sPA'],[0,1,2,5,8,(3,0,0),(3,1,1),(3,2,2),6,9]):
data_K[d] = np.loadtxt(path_join(root_dir_K,d+'.txt'))
with fits.open(path_join(root_dir_data_S,'NGC1068_FOC_bin10px.fits')) as f:
with fits.open(path_join(root_dir_data_S,filename_S)) as f:
if not type(i) is int:
data_S[d] = np.sqrt(f[i[0]].data[i[1],i[2]])
else:
@@ -110,7 +111,7 @@ print("This pipeline : average sI/I={0:.2f} ; sQ/Q={1:.2f} ; sU/U={2:.2f} ; sP/P
print("Kishimoto's pipeline : average sI/I={0:.2f} ; sQ/Q={1:.2f} ; sU/U={2:.2f} ; sP/P={3:.2f}".format(np.mean(data_K['sI'][data_S['mask']]/data_K['I'][data_S['mask']]),np.mean(data_K['sQ'][data_S['mask']]/data_K['Q'][data_S['mask']]),np.mean(data_K['sU'][data_S['mask']]/data_K['U'][data_S['mask']]),np.mean(data_K['sP'][data_S['mask']]/data_K['P'][data_S['mask']])))
for d,i in zip(['I','Q','U','P','PA','sI','sQ','sU','sP','sPA'],[0,1,2,5,8,(3,0,0),(3,1,1),(3,2,2),6,9]):
data_K[d] = np.loadtxt(path_join(root_dir_K,d+'.txt'))
with fits.open(path_join(root_dir_data_S,'NGC1068_FOC_bin10px.fits')) as f:
with fits.open(path_join(root_dir_data_S,filename_S)) as f:
if not type(i) is int:
data_S[d] = np.sqrt(f[i[0]].data[i[1],i[2]])
else:

View File

@@ -105,10 +105,10 @@ def display_bkg(data, background, std_bkg, headers, histograms=None, binning=Non
if not(savename is None):
fig2.savefig(plots_folder+savename+'_'+filt+'_background_location.png', bbox_inches='tight')
if not(rectangle is None):
plot_obs(data, headers, vmin=data.min(), vmax=data.max(), rectangle=rectangle,
plot_obs(data, headers, vmin=data[data > 0.].min(), vmax=data[data > 0.].max(), rectangle=rectangle,
savename=savename+"_background_location",plots_folder=plots_folder)
elif not(rectangle is None):
plot_obs(data, headers, vmin=vmin, vmax=vmax, rectangle=rectangle)
plot_obs(data, headers, vmin=data[data > 0.].min(), vmax=data[data > 0.].max(), rectangle=rectangle)
plt.show()

View File

@@ -86,7 +86,7 @@ def sci_not(v,err,rnd=1,out=str):
return *output[1:],-power
def plot_obs(data_array, headers, shape=None, vmin=0., vmax=6., rectangle=None,
def plot_obs(data_array, headers, shape=None, vmin=None, vmax=None, rectangle=None,
savename=None, plots_folder=""):
"""
Plots raw observation imagery with some information on the instrument and
@@ -135,8 +135,10 @@ def plot_obs(data_array, headers, shape=None, vmin=0., vmax=6., rectangle=None,
exptime = headers[i]['exptime']
filt = headers[i]['filtnam1']
#plots
if vmin is None or vmax is None:
vmin, vmax = data[data>0.].min()/10., data[data>0.].max()
#im = ax.imshow(data, vmin=vmin, vmax=vmax, origin='lower', cmap='gray')
im = ax.imshow(data, norm=LogNorm(data[data>0.].min()/10.,data.max()), origin='lower', cmap='gray')
im = ax.imshow(data, norm=LogNorm(vmin,vmax), origin='lower', cmap='gray')
if not(rectangle is None):
x, y, width, height, angle, color = rectangle[i]
ax.add_patch(Rectangle((x, y), width, height, angle=angle,
@@ -314,7 +316,7 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
if display.lower() in ['intensity']:
# If no display selected, show intensity map
display='i'
vmin, vmax = 3.*np.mean(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
vmin, vmax = 1/10*np.median(stkI.data[stkI.data > 0.]*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
im = ax.imshow(stkI.data*convert_flux, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.)
cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
levelsI = np.linspace(vmax*0.01, vmax*0.99, 10)
@@ -325,7 +327,7 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
# Display polarisation flux
display='pf'
pf_mask = (stkI.data > 0.) * (pol.data > 0.)
vmin, vmax = 3.*np.mean(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
vmin, vmax = 1/10*np.median(stkI.data[stkI.data > 0.]*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
im = ax.imshow(stkI.data*convert_flux*pol.data, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.)
cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
levelsPf = np.linspace(vmax*0.01, vmax*0.99, 10)
@@ -380,7 +382,7 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
#ax.clabel(cont,inline=True,fontsize=6)
else:
# Defaults to intensity map
vmin, vmax = 3.*np.mean(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
vmin, vmax = 1/10*np.median(stkI.data[stkI.data > 0.]*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
#im = ax.imshow(stkI.data*convert_flux, vmin=vmin, vmax=vmax, aspect='equal', cmap='inferno', alpha=1.)
#cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA$]")
im = ax.imshow(stkI.data*convert_flux, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.)
@@ -1743,12 +1745,12 @@ class pol_map(object):
self.display_selection = "total_flux"
if self.display_selection.lower() in ['total_flux']:
self.data = self.I*self.convert_flux
vmin, vmax = .5*np.mean(np.sqrt(self.IQU_cov[0,0][self.IQU_cov[0,0] < 3.*self.I])*self.convert_flux), np.max(self.data[self.data > 0.])
vmin, vmax = 1/10.*np.median(self.data[self.data > 0.]), np.max(self.data[self.data > 0.])
norm = LogNorm(vmin, vmax)
label = r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]"
elif self.display_selection.lower() in ['pol_flux']:
self.data = self.I*self.convert_flux*self.P
vmin, vmax = .5*np.mean(np.sqrt(self.IQU_cov[0,0][self.IQU_cov[0,0] < 3.*self.I])*self.convert_flux), np.max(self.I[self.data > 0.]*self.convert_flux)
vmin, vmax = 1/10.*np.median(self.I[self.I > 0.]*self.convert_flux), np.max(self.I[self.I > 0.]*self.convert_flux)
norm = LogNorm(vmin, vmax)
label = r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]"
elif self.display_selection.lower() in ['pol_deg']:

View File

@@ -46,7 +46,7 @@ import matplotlib.dates as mdates
from matplotlib.patches import Rectangle
from matplotlib.colors import LogNorm
from scipy.ndimage import rotate as sc_rotate, shift as sc_shift
from scipy.signal import convolve2d
from scipy.signal import fftconvolve
from astropy.wcs import WCS
from astropy import log
log.setLevel('ERROR')
@@ -683,7 +683,7 @@ def align_data(data_array, headers, error_array=None, background=None,
raise ValueError("All images in data_array must have same shape as\
ref_data")
if (error_array is None) or (background is None):
_, error_array, headers, background = get_error(data_array, headers, sub_type=(10,10), return_background=True)
_, error_array, headers, background = get_error(data_array, headers, return_background=True)
# Crop out any null edges
#(ref_data must be cropped as well)
@@ -866,8 +866,8 @@ def smooth_data(data_array, error_array, data_mask, headers, FWHM=1.,
weights /= weights.sum()
kernel = gaussian2d(x, y, stdev)
kernel /= kernel.sum()
smoothed[i] = np.where(data_mask, convolve2d(image*weights,kernel,'same')/convolve2d(weights,kernel,'same'), image)
error[i] = np.where(data_mask, np.sqrt(convolve2d(image_error**2*weights**2,kernel**2,'same'))/convolve2d(weights,kernel,'same'), image_error)
smoothed[i] = np.where(data_mask, fftconvolve(image*weights,kernel,'same')/fftconvolve(weights,kernel,'same'), image)
error[i] = np.where(data_mask, np.sqrt(fftconvolve(image_error**2*weights**2,kernel**2,'same'))/fftconvolve(weights,kernel,'same'), image_error)
# Nan handling
error[i][np.logical_or(np.isnan(smoothed[i]*error[i]),1-data_mask)] = 0.
@@ -1007,7 +1007,7 @@ def polarizer_avg(data_array, error_array, data_mask, headers, FWHM=None,
list_head = headers60
elif header['filtnam1']=='POL120':
list_head = headers120
header['exptime'] = np.sum([head['exptime'] for head in list_head])#/len(list_head)
header['exptime'] = np.sum([head['exptime'] for head in list_head])
pol_headers = [headers0[0], headers60[0], headers120[0]]
# Get image shape

View File

@@ -6,7 +6,7 @@ from copy import deepcopy
from lib.plots import overplot_radio, overplot_pol, align_pol
from matplotlib.colors import LogNorm
Stokes_UV = fits.open("../data/IC5063_x3nl030/IC5063_FOC_combine_FWHM020.fits")
Stokes_UV = fits.open("../data/IC5063_x3nl030/IC5063_FOC_c_020.fits")
Stokes_18GHz = fits.open("../data/IC5063_x3nl030/radio/IC5063.18GHz.fits")
Stokes_24GHz = fits.open("../data/IC5063_x3nl030/radio/IC5063.24GHz.fits")
Stokes_103GHz = fits.open("../data/IC5063_x3nl030/radio/I5063_103GHz.fits")
@@ -43,7 +43,7 @@ E.plot(levels=levels357GHz, SNRp_cut=2.0, SNRi_cut=15.0, savename='../plots/IC50
#F.plot(SNRp_cut=3.0, SNRi_cut=80.0, savename='../plots/IC5063_x3nl030/S2_overplot_forced.png', norm=LogNorm(vmin=5e-20,vmax=5e-18))
G = overplot_pol(Stokes_UV, Stokes_IR, cmap='inferno')
G.plot(SNRp_cut=2.0, SNRi_cut=15.0, savename='../plots/IC5063_x3nl030/IR_overplot_forced.png', norm=LogNorm(vmin=1e-17,vmax=5e-15), cmap='inferno_r')
G.plot(SNRp_cut=1.0, SNRi_cut=10.0, savename='../plots/IC5063_x3nl030/IR_overplot_forced.png', norm=LogNorm(vmin=1e-17,vmax=5e-15), cmap='inferno_r')
#data_folder1 = "../data/M87/POS1/"
#plots_folder1 = "../plots/M87/POS1/"