allow to play with background estimation

This commit is contained in:
Tibeuleu
2023-02-10 17:01:25 +01:00
parent 22758be11a
commit 001759556d
6 changed files with 57 additions and 35 deletions

View File

@@ -158,6 +158,10 @@ def bkg_fit(data, error, mask, headers, subtract_error=True, display=False, save
2D boolean array delimiting the data to work on.
headers : header list
Headers associated with the images in data_array.
subtract_error : float or bool, optional
If float, factor to which the estimated background should be multiplied
If False the background is not subtracted.
Defaults to True (factor = 1.).
display : boolean, optional
If True, data_array will be displayed with a rectangle around the
sub-image selected for background computation.
@@ -203,7 +207,7 @@ def bkg_fit(data, error, mask, headers, subtract_error=True, display=False, save
weights = 1/chi2**2
weights /= weights.sum()
bkg = np.sum(weights*coeff[:,1])
bkg = np.sum(weights*coeff[:,1])*subtract_error if subtract_error>0 else np.sum(weights*coeff[:,1])
error_bkg[i] *= bkg
@@ -221,7 +225,7 @@ def bkg_fit(data, error, mask, headers, subtract_error=True, display=False, save
n_error_array[i] = np.sqrt(n_error_array[i]**2 + error_bkg[i]**2 + err_wav**2 + err_psf**2 + err_flat**2)
#Substract background
if subtract_error:
if subtract_error>0:
n_data_array[i][mask] = n_data_array[i][mask] - bkg
n_data_array[i][np.logical_and(mask,n_data_array[i] <= 0.01*bkg)] = 0.01*bkg
@@ -250,6 +254,10 @@ def bkg_hist(data, error, mask, headers, sub_type=None, subtract_error=True, dis
If str, statistic rule to be used for the number of bins in counts/s.
If int, number of bins for the counts/s histogram.
Defaults to "Freedman-Diaconis".
subtract_error : float or bool, optional
If float, factor to which the estimated background should be multiplied
If False the background is not subtracted.
Defaults to True (factor = 1.).
display : boolean, optional
If True, data_array will be displayed with a rectangle around the
sub-image selected for background computation.
@@ -314,7 +322,7 @@ def bkg_hist(data, error, mask, headers, sub_type=None, subtract_error=True, dis
p0 = [hist.max(), binning[-1][np.argmax(hist)], fwhm, 1e-3, 1e-3, 1e-3, 1e-3]
popt, pcov = curve_fit(gausspol, binning[-1], hist, p0=p0)
coeff.append(popt)
bkg = popt[1]
bkg = popt[1]*subtract_error if subtract_error>0 else popt[1]
error_bkg[i] *= bkg
@@ -332,9 +340,9 @@ def bkg_hist(data, error, mask, headers, sub_type=None, subtract_error=True, dis
n_error_array[i] = np.sqrt(n_error_array[i]**2 + error_bkg[i]**2 + err_wav**2 + err_psf**2 + err_flat**2)
#Substract background
if subtract_error:
if subtract_error > 0:
n_data_array[i][mask] = n_data_array[i][mask] - bkg
n_data_array[i][np.logical_and(mask,n_data_array[i] <= 0.01*bkg)] = 0.01*bkg
n_data_array[i][np.logical_and(mask,n_data_array[i] < 0.)] = 0.
std_bkg[i] = image[np.abs(image-bkg)/bkg<1.].std()
background[i] = bkg
@@ -363,6 +371,10 @@ def bkg_mini(data, error, mask, headers, sub_shape=(15,15), subtract_error=True,
sub_shape : tuple, optional
Shape of the sub-image to look for. Must be odd.
Defaults to 10% of input array.
subtract_error : float or bool, optional
If float, factor to which the estimated background should be multiplied
If False the background is not subtracted.
Defaults to True (factor = 1.).
display : boolean, optional
If True, data_array will be displayed with a rectangle around the
sub-image selected for background computation.
@@ -419,7 +431,7 @@ def bkg_mini(data, error, mask, headers, sub_shape=(15,15), subtract_error=True,
# Compute error : root mean square of the background
sub_image = image[minima[0]:minima[0]+sub_shape[0],minima[1]:minima[1]+sub_shape[1]]
#bkg = np.std(sub_image) # Previously computed using standard deviation over the background
bkg = np.sqrt(np.sum(sub_image**2)/sub_image.size)
bkg = np.sqrt(np.sum(sub_image**2)/sub_image.size)*subtract_error if subtract_error>0 else np.sqrt(np.sum(sub_image**2)/sub_image.size)
error_bkg[i] *= bkg
# Quadratically add uncertainties in the "correction factors" (see Kishimoto 1999)
@@ -436,7 +448,7 @@ def bkg_mini(data, error, mask, headers, sub_shape=(15,15), subtract_error=True,
n_error_array[i] = np.sqrt(n_error_array[i]**2 + error_bkg[i]**2 + err_wav**2 + err_psf**2 + err_flat**2)
#Substract background
if subtract_error:
if subtract_error>0.:
n_data_array[i][mask] = n_data_array[i][mask] - bkg
n_data_array[i][np.logical_and(mask,n_data_array[i] <= 0.01*bkg)] = 0.01*bkg

View File

@@ -316,7 +316,7 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
if display.lower() in ['intensity']:
# If no display selected, show intensity map
display='i'
vmin, vmax = 1/10*np.median(stkI.data[stkI.data > 0.]*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
vmin, vmax = 3.*np.mean(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
im = ax.imshow(stkI.data*convert_flux, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.)
cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
levelsI = np.linspace(vmax*0.01, vmax*0.99, 10)
@@ -327,7 +327,7 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
# Display polarisation flux
display='pf'
pf_mask = (stkI.data > 0.) * (pol.data > 0.)
vmin, vmax = 1/10*np.median(stkI.data[stkI.data > 0.]*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
vmin, vmax = 3.*np.mean(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
im = ax.imshow(stkI.data*convert_flux*pol.data, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.)
cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]")
levelsPf = np.linspace(vmax*0.01, vmax*0.99, 10)
@@ -382,7 +382,7 @@ def polarization_map(Stokes, data_mask=None, rectangle=None, SNRp_cut=3., SNRi_c
#ax.clabel(cont,inline=True,fontsize=6)
else:
# Defaults to intensity map
vmin, vmax = 1/10*np.median(stkI.data[stkI.data > 0.]*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
vmin, vmax = 3.*np.mean(np.sqrt(stk_cov.data[0,0][mask])*convert_flux), np.max(stkI.data[stkI.data > 0.]*convert_flux)
#im = ax.imshow(stkI.data*convert_flux, vmin=vmin, vmax=vmax, aspect='equal', cmap='inferno', alpha=1.)
#cbar = plt.colorbar(im, cax=cbar_ax, label=r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA$]")
im = ax.imshow(stkI.data*convert_flux, norm=LogNorm(vmin,vmax), aspect='equal', cmap='inferno', alpha=1.)
@@ -1745,12 +1745,12 @@ class pol_map(object):
self.display_selection = "total_flux"
if self.display_selection.lower() in ['total_flux']:
self.data = self.I*self.convert_flux
vmin, vmax = 1/10.*np.median(self.data[self.data > 0.]), np.max(self.data[self.data > 0.])
vmin, vmax = 1/2.0*np.median(self.data[self.data > 0.]), np.max(self.data[self.data > 0.])
norm = LogNorm(vmin, vmax)
label = r"$F_{\lambda}$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]"
elif self.display_selection.lower() in ['pol_flux']:
self.data = self.I*self.convert_flux*self.P
vmin, vmax = 1/10.*np.median(self.I[self.I > 0.]*self.convert_flux), np.max(self.I[self.I > 0.]*self.convert_flux)
vmin, vmax = 1/2.0*np.median(self.I[self.I > 0.]*self.convert_flux), np.max(self.I[self.I > 0.]*self.convert_flux)
norm = LogNorm(vmin, vmax)
label = r"$F_{\lambda} \cdot P$ [$ergs \cdot cm^{-2} \cdot s^{-1} \cdot \AA^{-1}$]"
elif self.display_selection.lower() in ['pol_deg']:

View File

@@ -327,8 +327,8 @@ def crop_array(data_array, headers, error_array=None, data_mask=None, step=5,
#fig.suptitle(savename+'_'+filt+'_crop_region')
fig.savefig(plots_folder+savename+'_'+filt+'_crop_region.png',
bbox_inches='tight')
plot_obs(data_array, headers, vmin=data_array.min(),
vmax=data_array.max(), rectangle=[rectangle,]*len(headers),
plot_obs(data_array, headers, vmin=data_array[data_array>0.].min(),
vmax=data_array[data_array>0.].max(), rectangle=[rectangle,]*len(headers),
savename=savename+'_crop_region',plots_folder=plots_folder)
plt.show()
@@ -436,6 +436,10 @@ def get_error(data_array, headers, error_array=None, data_mask=None,
If int, number of bins for the counts/s histogram.
If tuple, shape of the sub-image to look for. Must be odd.
Defaults to None.
subtract_error : float or bool, optional
If float, factor to which the estimated background should be multiplied
If False the background is not subtracted.
Defaults to True (factor = 1.).
display : boolean, optional
If True, data_array will be displayed with a rectangle around the
sub-image selected for background computation.