Add Stokes_cov_stat to fits and compute again P_debiased in plots

This commit is contained in:
2025-08-08 11:45:11 +02:00
parent e639695618
commit f4effac343
4 changed files with 223 additions and 142 deletions

View File

@@ -41,7 +41,7 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
# Background estimation # Background estimation
error_sub_type = "scott" # sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51, 51)) error_sub_type = "scott" # sqrt, sturges, rice, scott, freedman-diaconis (default) or shape (example (51, 51))
subtract_error = 0.50 subtract_error = 0.80
display_bkg = False display_bkg = False
# Data binning # Data binning
@@ -203,24 +203,38 @@ def main(target=None, proposal_id=None, infiles=None, output_dir="./data", crop=
# FWHM of FOC have been estimated at about 0.03" across 1500-5000 Angstrom band, which is about 2 detector pixels wide # FWHM of FOC have been estimated at about 0.03" across 1500-5000 Angstrom band, which is about 2 detector pixels wide
# see Jedrzejewski, R.; Nota, A.; Hack, W. J., A Comparison Between FOC and WFPC2 # see Jedrzejewski, R.; Nota, A.; Hack, W. J., A Comparison Between FOC and WFPC2
# Bibcode : 1995chst.conf...10J # Bibcode : 1995chst.conf...10J
Stokes, Stokes_cov, header_stokes, s_IQU_stat = proj_red.compute_Stokes( Stokes, Stokes_cov, header_stokes, Stokes_cov_stat = proj_red.compute_Stokes(
data_array, error_array, data_mask, headers, FWHM=smoothing_FWHM, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=transmitcorr data_array, error_array, data_mask, headers, FWHM=smoothing_FWHM, scale=smoothing_scale, smoothing=smoothing_function, transmitcorr=transmitcorr
) )
# Step 3: # Step 3:
# Rotate images to have North up # Rotate images to have North up
if rotate_North: if rotate_North:
Stokes, Stokes_cov, data_mask, header_stokes, s_IQU_stat = proj_red.rotate_Stokes( Stokes, Stokes_cov, data_mask, header_stokes, Stokes_cov_stat = proj_red.rotate_Stokes(
Stokes, Stokes_cov, data_mask, header_stokes, s_IQU_stat=s_IQU_stat, SNRi_cut=None Stokes, Stokes_cov, data_mask, header_stokes, Stokes_cov_stat=Stokes_cov_stat, SNRi_cut=None
) )
# Compute polarimetric parameters (polarization degree and angle). # Compute polarimetric parameters (polarization degree and angle).
P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P = proj_red.compute_pol(Stokes, Stokes_cov, header_stokes, s_IQU_stat=s_IQU_stat) P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P = proj_red.compute_pol(Stokes, Stokes_cov, header_stokes, Stokes_cov_stat=Stokes_cov_stat)
# Step 4: # Step 4:
# Save image to FITS. # Save image to FITS.
figname = "_".join([figname, figtype]) if figtype != "" else figname figname = "_".join([figname, figtype]) if figtype != "" else figname
Stokes_hdul = proj_fits.save_Stokes( Stokes_hdul = proj_fits.save_Stokes(
Stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P, header_stokes, data_mask, figname, data_folder=data_folder, return_hdul=True Stokes,
Stokes_cov,
Stokes_cov_stat,
P,
debiased_P,
s_P,
s_P_P,
PA,
s_PA,
s_PA_P,
header_stokes,
data_mask,
figname,
data_folder=data_folder,
return_hdul=True,
) )
outfiles.append("/".join([data_folder, Stokes_hdul[0].header["FILENAME"] + ".fits"])) outfiles.append("/".join([data_folder, Stokes_hdul[0].header["FILENAME"] + ".fits"]))

View File

@@ -105,7 +105,9 @@ def get_obs_data(infiles, data_folder="", compute_flux=False):
return data_array, headers return data_array, headers
def save_Stokes(Stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P, header_stokes, data_mask, filename, data_folder="", return_hdul=False): def save_Stokes(
Stokes, Stokes_cov, Stokes_cov_stat, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P, header_stokes, data_mask, filename, data_folder="", return_hdul=False
):
""" """
Save computed polarimetry parameters to a single fits file, Save computed polarimetry parameters to a single fits file,
updating header accordingly. updating header accordingly.
@@ -116,8 +118,9 @@ def save_Stokes(Stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P,
Stokes parameters I, Q, U, V, Polarization degree and debieased, Stokes parameters I, Q, U, V, Polarization degree and debieased,
its error propagated and assuming Poisson noise, Polarization angle, its error propagated and assuming Poisson noise, Polarization angle,
its error propagated and assuming Poisson noise. its error propagated and assuming Poisson noise.
Stokes_cov : numpy.ndarray Stokes_cov, Stokes_cov_stat : numpy.ndarray
Covariance matrix of the Stokes parameters I, Q, U. Covariance matrix of the Stokes parameters I, Q, U, V and its statistical
uncertainties part.
headers : header list headers : header list
Header of reference some keywords will be copied from (CRVAL, CDELT, Header of reference some keywords will be copied from (CRVAL, CDELT,
INSTRUME, PROPOSID, TARGNAME, ORIENTAT, EXPTOT). INSTRUME, PROPOSID, TARGNAME, ORIENTAT, EXPTOT).
@@ -135,14 +138,14 @@ def save_Stokes(Stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P,
---------- ----------
Return: Return:
hdul : astropy.io.fits.hdu.hdulist.HDUList hdul : astropy.io.fits.hdu.hdulist.HDUList
HDUList containing the Stokes cube in the PrimaryHDU, then HDUList containing the Stokes cube in the PrimaryHDU, then Stokes_cov,
P, s_P, PA, s_PA in this order. Headers have been updated to relevant Stokes_cov_stat, P, s_P, PA, s_PA in this order. Headers have been updated
informations (WCS, orientation, data_type). to relevant informations (WCS, orientation, data_type).
Only returned if return_hdul is True. Only returned if return_hdul is True.
""" """
# Create new WCS object given the modified images # Create new WCS object given the modified images
new_wcs = WCS(header_stokes).celestial.deepcopy() new_wcs = WCS(header_stokes).celestial.deepcopy()
header = remove_stokes_axis_from_header(header_stokes).copy() header = remove_stokes_axis_from_header(header_stokes).copy() if header_stokes["NAXIS"] > 2 else header_stokes.copy()
if data_mask.shape != (1, 1): if data_mask.shape != (1, 1):
vertex = clean_ROI(data_mask) vertex = clean_ROI(data_mask)
@@ -176,6 +179,7 @@ def save_Stokes(Stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P,
if data_mask.shape != (1, 1): if data_mask.shape != (1, 1):
Stokes = Stokes[:, vertex[2] : vertex[3], vertex[0] : vertex[1]] Stokes = Stokes[:, vertex[2] : vertex[3], vertex[0] : vertex[1]]
Stokes_cov = Stokes_cov[:, :, vertex[2] : vertex[3], vertex[0] : vertex[1]] Stokes_cov = Stokes_cov[:, :, vertex[2] : vertex[3], vertex[0] : vertex[1]]
Stokes_cov_stat = Stokes_cov_stat[:, :, vertex[2] : vertex[3], vertex[0] : vertex[1]]
P = P[vertex[2] : vertex[3], vertex[0] : vertex[1]] P = P[vertex[2] : vertex[3], vertex[0] : vertex[1]]
debiased_P = debiased_P[vertex[2] : vertex[3], vertex[0] : vertex[1]] debiased_P = debiased_P[vertex[2] : vertex[3], vertex[0] : vertex[1]]
s_P = s_P[vertex[2] : vertex[3], vertex[0] : vertex[1]] s_P = s_P[vertex[2] : vertex[3], vertex[0] : vertex[1]]
@@ -192,7 +196,7 @@ def save_Stokes(Stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P,
# Add I_stokes as PrimaryHDU # Add I_stokes as PrimaryHDU
header["datatype"] = ("STOKES", "type of data stored in the HDU") header["datatype"] = ("STOKES", "type of data stored in the HDU")
Stokes[np.broadcast_to((1 - data_mask).astype(bool), Stokes.shape)] = 0.0 Stokes[np.broadcast_to((1 - data_mask).astype(bool), Stokes.shape)] = 0.0
hdu_head = add_stokes_axis_to_header(header, 2) hdu_head = add_stokes_axis_to_header(header, 0)
primary_hdu = fits.PrimaryHDU(data=Stokes, header=hdu_head) primary_hdu = fits.PrimaryHDU(data=Stokes, header=hdu_head)
primary_hdu.name = "STOKES" primary_hdu.name = "STOKES"
hdul.append(primary_hdu) hdul.append(primary_hdu)
@@ -200,6 +204,7 @@ def save_Stokes(Stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P,
# Add Stokes_cov, P, s_P, PA, s_PA to the HDUList # Add Stokes_cov, P, s_P, PA, s_PA to the HDUList
for data, name in [ for data, name in [
[Stokes_cov, "STOKES_COV"], [Stokes_cov, "STOKES_COV"],
[Stokes_cov_stat, "STOKES_COV_STAT"],
[P, "Pol_deg"], [P, "Pol_deg"],
[debiased_P, "Pol_deg_debiased"], [debiased_P, "Pol_deg_debiased"],
[s_P, "Pol_deg_err"], [s_P, "Pol_deg_err"],
@@ -211,9 +216,9 @@ def save_Stokes(Stokes, Stokes_cov, P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P,
]: ]:
hdu_head = header.copy() hdu_head = header.copy()
hdu_head["datatype"] = name hdu_head["datatype"] = name
if name == "STOKES_COV": if name[:10] == "STOKES_COV":
hdu_head = add_stokes_axis_to_header(hdu_head, 2) hdu_head = add_stokes_axis_to_header(hdu_head, 0)
hdu_head = add_stokes_axis_to_header(hdu_head, 3) hdu_head = add_stokes_axis_to_header(hdu_head, 0)
data[np.broadcast_to((1 - data_mask).astype(bool), data.shape)] = 0.0 data[np.broadcast_to((1 - data_mask).astype(bool), data.shape)] = 0.0
else: else:
data[(1 - data_mask).astype(bool)] = 0.0 data[(1 - data_mask).astype(bool)] = 0.0

View File

@@ -2113,7 +2113,7 @@ class crop_Stokes(crop_map):
for dataset in self.hdul_crop: for dataset in self.hdul_crop:
if dataset.header["datatype"] == "STOKES": if dataset.header["datatype"] == "STOKES":
dataset.data = deepcopy(dataset.data[:, vertex[2] : vertex[3], vertex[0] : vertex[1]]) dataset.data = deepcopy(dataset.data[:, vertex[2] : vertex[3], vertex[0] : vertex[1]])
elif dataset.header["datatype"] == "STOKES_COV": elif dataset.header["datatype"][:10] == "STOKES_COV":
dataset.data = deepcopy(dataset.data[:, :, vertex[2] : vertex[3], vertex[0] : vertex[1]]) dataset.data = deepcopy(dataset.data[:, :, vertex[2] : vertex[3], vertex[0] : vertex[1]])
else: else:
dataset.data = deepcopy(dataset.data[vertex[2] : vertex[3], vertex[0] : vertex[1]]) dataset.data = deepcopy(dataset.data[vertex[2] : vertex[3], vertex[0] : vertex[1]])
@@ -2142,6 +2142,12 @@ class crop_Stokes(crop_map):
IQ_diluted_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV"].data[0, 1][mask] ** 2)) IQ_diluted_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV"].data[0, 1][mask] ** 2))
IU_diluted_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV"].data[0, 2][mask] ** 2)) IU_diluted_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV"].data[0, 2][mask] ** 2))
QU_diluted_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV"].data[1, 2][mask] ** 2)) QU_diluted_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV"].data[1, 2][mask] ** 2))
I_diluted_stat_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV_STAT"].data[0, 0][mask]))
Q_diluted_stat_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV_STAT"].data[1, 1][mask]))
U_diluted_stat_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV_STAT"].data[2, 2][mask]))
IQ_diluted_stat_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV_STAT"].data[0, 1][mask] ** 2))
IU_diluted_stat_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV_STAT"].data[0, 2][mask] ** 2))
QU_diluted_stat_err = np.sqrt(np.sum(self.hdul_crop["STOKES_COV_STAT"].data[1, 2][mask] ** 2))
P_diluted = np.sqrt(Q_diluted**2 + U_diluted**2) / I_diluted P_diluted = np.sqrt(Q_diluted**2 + U_diluted**2) / I_diluted
P_diluted_err = (1.0 / I_diluted) * np.sqrt( P_diluted_err = (1.0 / I_diluted) * np.sqrt(
@@ -2150,6 +2156,18 @@ class crop_Stokes(crop_map):
- 2.0 * (Q_diluted / I_diluted) * IQ_diluted_err - 2.0 * (Q_diluted / I_diluted) * IQ_diluted_err
- 2.0 * (U_diluted / I_diluted) * IU_diluted_err - 2.0 * (U_diluted / I_diluted) * IU_diluted_err
) )
P_diluted_stat_err = (
P_diluted
/ I_diluted
* np.sqrt(
I_diluted_stat_err
- 2.0 / (I_diluted * P_diluted**2) * (Q_diluted * IQ_diluted_stat_err + U_diluted * IU_diluted_stat_err)
+ 1.0
/ (I_diluted**2 * P_diluted**4)
* (Q_diluted**2 * Q_diluted_stat_err + U_diluted**2 * U_diluted_stat_err + 2.0 * Q_diluted * U_diluted * QU_diluted_stat_err)
)
)
debiased_P_diluted = np.sqrt(P_diluted**2 - P_diluted_stat_err**2) if P_diluted**2 > P_diluted_stat_err**2 else 0.0
PA_diluted = princ_angle((90.0 / np.pi) * np.arctan2(U_diluted, Q_diluted)) PA_diluted = princ_angle((90.0 / np.pi) * np.arctan2(U_diluted, Q_diluted))
PA_diluted_err = (90.0 / (np.pi * (Q_diluted**2 + U_diluted**2))) * np.sqrt( PA_diluted_err = (90.0 / (np.pi * (Q_diluted**2 + U_diluted**2))) * np.sqrt(
@@ -2159,7 +2177,7 @@ class crop_Stokes(crop_map):
for dataset in self.hdul_crop: for dataset in self.hdul_crop:
if dataset.header["FILENAME"][-4:] != "crop": if dataset.header["FILENAME"][-4:] != "crop":
dataset.header["FILENAME"] += "_crop" dataset.header["FILENAME"] += "_crop"
dataset.header["P_int"] = (P_diluted, "Integrated polarization degree") dataset.header["P_int"] = (debiased_P_diluted, "Integrated polarization degree")
dataset.header["sP_int"] = (np.ceil(P_diluted_err * 1000.0) / 1000.0, "Integrated polarization degree error") dataset.header["sP_int"] = (np.ceil(P_diluted_err * 1000.0) / 1000.0, "Integrated polarization degree error")
dataset.header["PA_int"] = (PA_diluted, "Integrated polarization angle") dataset.header["PA_int"] = (PA_diluted, "Integrated polarization angle")
dataset.header["sPA_int"] = (np.ceil(PA_diluted_err * 10.0) / 10.0, "Integrated polarization angle error") dataset.header["sPA_int"] = (np.ceil(PA_diluted_err * 10.0) / 10.0, "Integrated polarization angle error")
@@ -2492,7 +2510,7 @@ class pol_map(object):
ax_vec_sc = self.fig.add_axes([0.260, 0.030, 0.090, 0.01]) ax_vec_sc = self.fig.add_axes([0.260, 0.030, 0.090, 0.01])
ax_snr_reset = self.fig.add_axes([0.060, 0.020, 0.05, 0.02]) ax_snr_reset = self.fig.add_axes([0.060, 0.020, 0.05, 0.02])
ax_snr_conf = self.fig.add_axes([0.115, 0.020, 0.05, 0.02]) ax_snr_conf = self.fig.add_axes([0.115, 0.020, 0.05, 0.02])
SNRi_max = np.max(self.I[self.IQU_cov[0, 0] > 0.0] / np.sqrt(self.IQU_cov[0, 0][self.IQU_cov[0, 0] > 0.0])) SNRi_max = np.max(self.I[self.Stokes_cov[0, 0] > 0.0] / np.sqrt(self.Stokes_cov[0, 0][self.Stokes_cov[0, 0] > 0.0]))
SNRp_max = np.max(self.P[self.P_ERR > 0.0] / self.P_ERR[self.P_ERR > 0.0]) SNRp_max = np.max(self.P[self.P_ERR > 0.0] / self.P_ERR[self.P_ERR > 0.0])
s_I_cut = Slider(ax_I_cut, r"$SNR^{I}_{cut}$", 1.0, int(SNRi_max * 0.95), valstep=0.5, valinit=self.SNRi_cut) s_I_cut = Slider(ax_I_cut, r"$SNR^{I}_{cut}$", 1.0, int(SNRi_max * 0.95), valstep=0.5, valinit=self.SNRi_cut)
self.P_ERR_cut = Slider( self.P_ERR_cut = Slider(
@@ -2988,9 +3006,13 @@ class pol_map(object):
return self.U_ERR / np.where(self.I > 0, self.I, np.nan) return self.U_ERR / np.where(self.I > 0, self.I, np.nan)
@property @property
def IQU_cov(self): def Stokes_cov(self):
return self.Stokes["STOKES_COV"].data return self.Stokes["STOKES_COV"].data
@property
def Stokes_cov_stat(self):
return self.Stokes["STOKES_COV_STAT"].data
@property @property
def P(self): def P(self):
return self.Stokes["POL_DEG_DEBIASED"].data return self.Stokes["POL_DEG_DEBIASED"].data
@@ -3016,7 +3038,7 @@ class pol_map(object):
@property @property
def cut(self): def cut(self):
s_I = np.sqrt(self.IQU_cov[0, 0]) s_I = np.sqrt(self.Stokes_cov[0, 0])
SNRp_mask, SNRi_mask = (np.zeros(self.P.shape).astype(bool), np.zeros(self.I.shape).astype(bool)) SNRp_mask, SNRi_mask = (np.zeros(self.P.shape).astype(bool), np.zeros(self.I.shape).astype(bool))
SNRi_mask[s_I > 0.0] = self.I[s_I > 0.0] / s_I[s_I > 0.0] > self.SNRi SNRi_mask[s_I > 0.0] = self.I[s_I > 0.0] / s_I[s_I > 0.0] > self.SNRi
if self.SNRp >= 1.0: if self.SNRp >= 1.0:
@@ -3159,7 +3181,7 @@ class pol_map(object):
kwargs["alpha"] = 1.0 - 0.75 * (self.P < self.P_ERR) kwargs["alpha"] = 1.0 - 0.75 * (self.P < self.P_ERR)
label = r"$\theta_{P}$ [°]" label = r"$\theta_{P}$ [°]"
elif self.display_selection.lower() in ["snri"]: elif self.display_selection.lower() in ["snri"]:
s_I = np.sqrt(self.IQU_cov[0, 0]) s_I = np.sqrt(self.Stokes_cov[0, 0])
SNRi = np.zeros(self.I.shape) SNRi = np.zeros(self.I.shape)
SNRi[s_I > 0.0] = self.I[s_I > 0.0] / s_I[s_I > 0.0] SNRi[s_I > 0.0] = self.I[s_I > 0.0] / s_I[s_I > 0.0]
self.data = SNRi self.data = SNRi
@@ -3354,33 +3376,33 @@ class pol_map(object):
) )
fig.canvas.draw_idle() fig.canvas.draw_idle()
def pol_int(self, fig=None, ax=None): def pol_int(self, fig=None, ax=None, cut=False):
str_conf = "" str_conf = ""
if self.region is None: if self.region is None:
s_I = np.sqrt(self.IQU_cov[0, 0]) s_I = np.sqrt(self.Stokes_cov[0, 0])
I_reg = self.I.sum() I_reg = self.I.sum()
I_reg_err = np.sqrt(np.sum(s_I**2)) I_reg_err = np.sqrt(np.sum(s_I**2))
P_reg = self.Stokes[0].header["P_int"] debiased_P_reg = self.Stokes[0].header["P_int"]
P_reg_err = self.Stokes[0].header["sP_int"] P_reg_err = self.Stokes[0].header["sP_int"]
PA_reg = self.Stokes[0].header["PA_int"] PA_reg = self.Stokes[0].header["PA_int"]
PA_reg_err = self.Stokes[0].header["sPA_int"] PA_reg_err = self.Stokes[0].header["sPA_int"]
s_I = np.sqrt(self.IQU_cov[0, 0]) if cut:
s_Q = np.sqrt(self.IQU_cov[1, 1])
s_U = np.sqrt(self.IQU_cov[2, 2])
s_IQ = self.IQU_cov[0, 1]
s_IU = self.IQU_cov[0, 2]
s_QU = self.IQU_cov[1, 2]
I_cut = self.I[self.cut].sum() I_cut = self.I[self.cut].sum()
Q_cut = self.Q[self.cut].sum() Q_cut = self.Q[self.cut].sum()
U_cut = self.U[self.cut].sum() U_cut = self.U[self.cut].sum()
I_cut_err = np.sqrt(np.sum(s_I[self.cut] ** 2)) I_cut_err = np.sqrt(np.sum(self.Stokes_cov[0, 0][self.cut]))
Q_cut_err = np.sqrt(np.sum(s_Q[self.cut] ** 2)) Q_cut_err = np.sqrt(np.sum(self.Stokes_cov[1, 1][self.cut]))
U_cut_err = np.sqrt(np.sum(s_U[self.cut] ** 2)) U_cut_err = np.sqrt(np.sum(self.Stokes_cov[2, 2][self.cut]))
IQ_cut_err = np.sqrt(np.sum(s_IQ[self.cut] ** 2)) IQ_cut_err = np.sqrt(np.sum(self.Stokes_cov[0, 1][self.cut] ** 2))
IU_cut_err = np.sqrt(np.sum(s_IU[self.cut] ** 2)) IU_cut_err = np.sqrt(np.sum(self.Stokes_cov[0, 2][self.cut] ** 2))
QU_cut_err = np.sqrt(np.sum(s_QU[self.cut] ** 2)) QU_cut_err = np.sqrt(np.sum(self.Stokes_cov[1, 2][self.cut] ** 2))
I_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[0, 0][self.cut]))
Q_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[1, 1][self.cut]))
U_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[2, 2][self.cut]))
IQ_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[0, 1][self.cut] ** 2))
IU_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[0, 2][self.cut] ** 2))
QU_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[1, 2][self.cut] ** 2))
with np.errstate(divide="ignore", invalid="ignore"): with np.errstate(divide="ignore", invalid="ignore"):
P_cut = np.sqrt(Q_cut**2 + U_cut**2) / I_cut P_cut = np.sqrt(Q_cut**2 + U_cut**2) / I_cut
@@ -3393,6 +3415,16 @@ class pol_map(object):
) )
/ I_cut / I_cut
) )
P_cut_stat_err = (
P_cut
/ I_cut
* np.sqrt(
I_cut_stat_err
- 2.0 / (I_cut * P_cut**2) * (Q_cut * IQ_cut_stat_err + U_cut * IU_cut_stat_err)
+ 1.0 / (I_cut**2 * P_cut**4) * (Q_cut**2 * Q_cut_stat_err + U_cut**2 * U_cut_stat_err + 2.0 * Q_cut * U_cut * QU_cut_stat_err)
)
)
debiased_P_cut = np.sqrt(P_cut**2 - P_cut_stat_err**2) if P_cut**2 > P_cut_stat_err**2 else 0.0
PA_cut = princ_angle((90.0 / np.pi) * np.arctan2(U_cut, Q_cut)) PA_cut = princ_angle((90.0 / np.pi) * np.arctan2(U_cut, Q_cut))
PA_cut_err = (90.0 / (np.pi * (Q_cut**2 + U_cut**2))) * np.sqrt( PA_cut_err = (90.0 / (np.pi * (Q_cut**2 + U_cut**2))) * np.sqrt(
@@ -3400,22 +3432,21 @@ class pol_map(object):
) )
else: else:
s_I = np.sqrt(self.IQU_cov[0, 0])
s_Q = np.sqrt(self.IQU_cov[1, 1])
s_U = np.sqrt(self.IQU_cov[2, 2])
s_IQ = self.IQU_cov[0, 1]
s_IU = self.IQU_cov[0, 2]
s_QU = self.IQU_cov[1, 2]
I_reg = self.I[self.region].sum() I_reg = self.I[self.region].sum()
Q_reg = self.Q[self.region].sum() Q_reg = self.Q[self.region].sum()
U_reg = self.U[self.region].sum() U_reg = self.U[self.region].sum()
I_reg_err = np.sqrt(np.sum(s_I[self.region] ** 2)) I_reg_err = np.sqrt(np.sum(self.Stokes_cov[0, 0][self.region]))
Q_reg_err = np.sqrt(np.sum(s_Q[self.region] ** 2)) Q_reg_err = np.sqrt(np.sum(self.Stokes_cov[1, 1][self.region]))
U_reg_err = np.sqrt(np.sum(s_U[self.region] ** 2)) U_reg_err = np.sqrt(np.sum(self.Stokes_cov[2, 2][self.region]))
IQ_reg_err = np.sqrt(np.sum(s_IQ[self.region] ** 2)) IQ_reg_err = np.sqrt(np.sum(self.Stokes_cov[0, 1][self.region] ** 2))
IU_reg_err = np.sqrt(np.sum(s_IU[self.region] ** 2)) IU_reg_err = np.sqrt(np.sum(self.Stokes_cov[0, 2][self.region] ** 2))
QU_reg_err = np.sqrt(np.sum(s_QU[self.region] ** 2)) QU_reg_err = np.sqrt(np.sum(self.Stokes_cov[1, 2][self.region] ** 2))
I_reg_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[0, 0][self.region]))
Q_reg_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[1, 1][self.region]))
U_reg_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[2, 2][self.region]))
IQ_reg_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[0, 1][self.region] ** 2))
IU_reg_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[0, 2][self.region] ** 2))
QU_reg_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[1, 2][self.region] ** 2))
conf = PCconf(QN=Q_reg / I_reg, QN_ERR=Q_reg_err / I_reg, UN=U_reg / I_reg, UN_ERR=U_reg_err / I_reg) conf = PCconf(QN=Q_reg / I_reg, QN_ERR=Q_reg_err / I_reg, UN=U_reg / I_reg, UN_ERR=U_reg_err / I_reg)
if 1.0 - conf > 1e-3: if 1.0 - conf > 1e-3:
@@ -3432,22 +3463,39 @@ class pol_map(object):
) )
/ I_reg / I_reg
) )
P_reg_stat_err = (
P_reg
/ I_reg
* np.sqrt(
I_reg_stat_err
- 2.0 / (I_reg * P_reg**2) * (Q_reg * IQ_reg_stat_err + U_reg * IU_reg_stat_err)
+ 1.0 / (I_reg**2 * P_reg**4) * (Q_reg**2 * Q_reg_stat_err + U_reg**2 * U_reg_stat_err + 2.0 * Q_reg * U_reg * QU_reg_stat_err)
)
)
debiased_P_reg = np.sqrt(P_reg**2 - P_reg_stat_err**2) if P_reg**2 > P_reg_stat_err**2 else 0.0
PA_reg = princ_angle((90.0 / np.pi) * np.arctan2(U_reg, Q_reg)) PA_reg = princ_angle((90.0 / np.pi) * np.arctan2(U_reg, Q_reg))
PA_reg_err = (90.0 / (np.pi * (Q_reg**2 + U_reg**2))) * np.sqrt( PA_reg_err = (90.0 / (np.pi * (Q_reg**2 + U_reg**2))) * np.sqrt(
U_reg**2 * Q_reg_err**2 + Q_reg**2 * U_reg_err**2 - 2.0 * Q_reg * U_reg * QU_reg_err U_reg**2 * Q_reg_err**2 + Q_reg**2 * U_reg_err**2 - 2.0 * Q_reg * U_reg * QU_reg_err
) )
if cut:
new_cut = np.logical_and(self.region, self.cut) new_cut = np.logical_and(self.region, self.cut)
I_cut = self.I[new_cut].sum() I_cut = self.I[new_cut].sum()
Q_cut = self.Q[new_cut].sum() Q_cut = self.Q[new_cut].sum()
U_cut = self.U[new_cut].sum() U_cut = self.U[new_cut].sum()
I_cut_err = np.sqrt(np.sum(s_I[new_cut] ** 2)) I_cut_err = np.sqrt(np.sum(self.Stokes_cov[0, 0][new_cut]))
Q_cut_err = np.sqrt(np.sum(s_Q[new_cut] ** 2)) Q_cut_err = np.sqrt(np.sum(self.Stokes_cov[1, 1][new_cut]))
U_cut_err = np.sqrt(np.sum(s_U[new_cut] ** 2)) U_cut_err = np.sqrt(np.sum(self.Stokes_cov[2, 2][new_cut]))
IQ_cut_err = np.sqrt(np.sum(s_IQ[new_cut] ** 2)) IQ_cut_err = np.sqrt(np.sum(self.Stokes_cov[0, 1][new_cut] ** 2))
IU_cut_err = np.sqrt(np.sum(s_IU[new_cut] ** 2)) IU_cut_err = np.sqrt(np.sum(self.Stokes_cov[0, 2][new_cut] ** 2))
QU_cut_err = np.sqrt(np.sum(s_QU[new_cut] ** 2)) QU_cut_err = np.sqrt(np.sum(self.Stokes_cov[1, 2][new_cut] ** 2))
I_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[0, 0][new_cut]))
Q_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[1, 1][new_cut]))
U_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[2, 2][new_cut]))
IQ_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[0, 1][new_cut] ** 2))
IU_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[0, 2][new_cut] ** 2))
QU_cut_stat_err = np.sqrt(np.sum(self.Stokes_cov_stat[1, 2][new_cut] ** 2))
with np.errstate(divide="ignore", invalid="ignore"): with np.errstate(divide="ignore", invalid="ignore"):
P_cut = np.sqrt(Q_cut**2 + U_cut**2) / I_cut P_cut = np.sqrt(Q_cut**2 + U_cut**2) / I_cut
@@ -3460,6 +3508,16 @@ class pol_map(object):
) )
/ I_cut / I_cut
) )
P_cut_stat_err = (
P_cut
/ I_cut
* np.sqrt(
I_cut_stat_err
- 2.0 / (I_cut * P_cut**2) * (Q_cut * IQ_cut_stat_err + U_cut * IU_cut_stat_err)
+ 1.0 / (I_cut**2 * P_cut**4) * (Q_cut**2 * Q_cut_stat_err + U_cut**2 * U_cut_stat_err + 2.0 * Q_cut * U_cut * QU_cut_stat_err)
)
)
debiased_P_cut = np.sqrt(P_cut**2 - P_cut_stat_err**2) if P_cut**2 > P_cut_stat_err**2 else 0.0
PA_cut = princ_angle((90.0 / np.pi) * np.arctan2(U_cut, Q_cut)) PA_cut = princ_angle((90.0 / np.pi) * np.arctan2(U_cut, Q_cut))
PA_cut_err = (90.0 / (np.pi * (Q_cut**2 + U_cut**2))) * np.sqrt( PA_cut_err = (90.0 / (np.pi * (Q_cut**2 + U_cut**2))) * np.sqrt(
@@ -3480,22 +3538,23 @@ class pol_map(object):
self.pivot_wav, sci_not(I_reg * self.map_convert, I_reg_err * self.map_convert, 2) self.pivot_wav, sci_not(I_reg * self.map_convert, I_reg_err * self.map_convert, 2)
) )
+ "\n" + "\n"
+ r"$P^{{int}}$ = {0:.1f} $\pm$ {1:.1f} %".format(P_reg * 100.0, np.ceil(P_reg_err * 1000.0) / 10.0) + r"$P^{{int}}$ = {0:.1f} $\pm$ {1:.1f} %".format(debiased_P_reg * 100.0, np.ceil(P_reg_err * 1000.0) / 10.0)
+ "\n" + "\n"
+ r"$\Psi^{{int}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_reg, np.ceil(PA_reg_err * 10.0) / 10.0) + r"$\Psi^{{int}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_reg, np.ceil(PA_reg_err * 10.0) / 10.0)
+ str_conf + str_conf
) )
self.str_cut = "" self.str_cut = ""
# self.str_cut = ( if cut:
# "\n" self.str_cut = (
# + r"$F_{{\lambda}}^{{cut}}$({0:.0f} $\AA$) = {1} $ergs \cdot cm^{{-2}} \cdot s^{{-1}} \cdot \AA^{{-1}}$".format( "\n"
# self.pivot_wav, sci_not(I_cut * self.map_convert, I_cut_err * self.map_convert, 2) + r"$F_{{\lambda}}^{{cut}}$({0:.0f} $\AA$) = {1} $ergs \cdot cm^{{-2}} \cdot s^{{-1}} \cdot \AA^{{-1}}$".format(
# ) self.pivot_wav, sci_not(I_cut * self.map_convert, I_cut_err * self.map_convert, 2)
# + "\n" )
# + r"$P^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} %".format(P_cut * 100.0, np.ceil(P_cut_err * 1000.0) / 10.0) + "\n"
# + "\n" + r"$P^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} %".format(debiased_P_cut * 100.0, np.ceil(P_cut_err * 1000.0) / 10.0)
# + r"$\Psi^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_cut, np.ceil(PA_cut_err * 10.0) / 10.0) + "\n"
# ) + r"$\Psi^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_cut, np.ceil(PA_cut_err * 10.0) / 10.0)
)
self.an_int = ax.annotate( self.an_int = ax.annotate(
self.str_int + self.str_cut, self.str_int + self.str_cut,
color="white", color="white",
@@ -3522,16 +3581,17 @@ class pol_map(object):
+ str_conf + str_conf
) )
str_cut = "" str_cut = ""
# str_cut = ( if cut:
# "\n" str_cut = (
# + r"$F_{{\lambda}}^{{cut}}$({0:.0f} $\AA$) = {1} $ergs \cdot cm^{{-2}} \cdot s^{{-1}} \cdot \AA^{{-1}}$".format( "\n"
# self.pivot_wav, sci_not(I_cut * self.map_convert, I_cut_err * self.map_convert, 2) + r"$F_{{\lambda}}^{{cut}}$({0:.0f} $\AA$) = {1} $ergs \cdot cm^{{-2}} \cdot s^{{-1}} \cdot \AA^{{-1}}$".format(
# ) self.pivot_wav, sci_not(I_cut * self.map_convert, I_cut_err * self.map_convert, 2)
# + "\n" )
# + r"$P^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} %".format(P_cut * 100.0, np.ceil(P_cut_err * 1000.0) / 10.0) + "\n"
# + "\n" + r"$P^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} %".format(P_cut * 100.0, np.ceil(P_cut_err * 1000.0) / 10.0)
# + r"$\Psi^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_cut, np.ceil(PA_cut_err * 10.0) / 10.0) + "\n"
# ) + r"$\Psi^{{cut}}$ = {0:.1f} $\pm$ {1:.1f} °".format(PA_cut, np.ceil(PA_cut_err * 10.0) / 10.0)
)
ax.annotate( ax.annotate(
str_int + str_cut, str_int + str_cut,
color="white", color="white",

View File

@@ -1301,12 +1301,12 @@ def compute_Stokes(data_array, error_array, data_mask, headers, FWHM=None, scale
# Statistical error: Poisson noise is assumed # Statistical error: Poisson noise is assumed
sigma_flux = np.array([np.sqrt(flux / head["exptime"]) for flux, head in zip(pol_flux, pol_headers)]) sigma_flux = np.array([np.sqrt(flux / head["exptime"]) for flux, head in zip(pol_flux, pol_headers)])
s_IQU_stat = np.zeros(Stokes_cov.shape) Stokes_cov_stat = np.zeros(Stokes_cov.shape)
for i in range(3): for i in range(3):
s_IQU_stat[i, i] = np.sum([coeff_stokes[i, k] ** 2 * sigma_flux[k] ** 2 for k in range(len(sigma_flux))], axis=0) Stokes_cov_stat[i, i] = np.sum([coeff_stokes[i, k] ** 2 * sigma_flux[k] ** 2 for k in range(len(sigma_flux))], axis=0)
for j in [k for k in range(3) if k > i]: for j in [k for k in range(3) if k > i]:
s_IQU_stat[i, j] = np.sum([coeff_stokes[i, k] * coeff_stokes[j, k] * sigma_flux[k] ** 2 for k in range(len(sigma_flux))], axis=0) Stokes_cov_stat[i, j] = np.sum([coeff_stokes[i, k] * coeff_stokes[j, k] * sigma_flux[k] ** 2 for k in range(len(sigma_flux))], axis=0)
s_IQU_stat[j, i] = np.sum([coeff_stokes[i, k] * coeff_stokes[j, k] * sigma_flux[k] ** 2 for k in range(len(sigma_flux))], axis=0) Stokes_cov_stat[j, i] = np.sum([coeff_stokes[i, k] * coeff_stokes[j, k] * sigma_flux[k] ** 2 for k in range(len(sigma_flux))], axis=0)
# Compute the derivative of each Stokes parameter with respect to the polarizer orientation # Compute the derivative of each Stokes parameter with respect to the polarizer orientation
dIQU_dtheta = np.zeros(Stokes_cov.shape) dIQU_dtheta = np.zeros(Stokes_cov.shape)
@@ -1359,19 +1359,19 @@ def compute_Stokes(data_array, error_array, data_mask, headers, FWHM=None, scale
) )
# Compute the uncertainty associated with the polarizers' orientation (see Kishimoto 1999) # Compute the uncertainty associated with the polarizers' orientation (see Kishimoto 1999)
s_IQU_axis = np.zeros(Stokes_cov.shape) Stokes_cov_axis = np.zeros(Stokes_cov.shape)
for i in range(3): for i in range(3):
s_IQU_axis[i, i] = np.sum([dIQU_dtheta[i, k] ** 2 * globals()["sigma_theta"][k] ** 2 for k in range(len(globals()["sigma_theta"]))], axis=0) Stokes_cov_axis[i, i] = np.sum([dIQU_dtheta[i, k] ** 2 * globals()["sigma_theta"][k] ** 2 for k in range(len(globals()["sigma_theta"]))], axis=0)
for j in [k for k in range(3) if k > i]: for j in [k for k in range(3) if k > i]:
s_IQU_axis[i, j] = np.sum( Stokes_cov_axis[i, j] = np.sum(
[dIQU_dtheta[i, k] * dIQU_dtheta[j, k] * globals()["sigma_theta"][k] ** 2 for k in range(len(globals()["sigma_theta"]))], axis=0 [dIQU_dtheta[i, k] * dIQU_dtheta[j, k] * globals()["sigma_theta"][k] ** 2 for k in range(len(globals()["sigma_theta"]))], axis=0
) )
s_IQU_axis[j, i] = np.sum( Stokes_cov_axis[j, i] = np.sum(
[dIQU_dtheta[i, k] * dIQU_dtheta[j, k] * globals()["sigma_theta"][k] ** 2 for k in range(len(globals()["sigma_theta"]))], axis=0 [dIQU_dtheta[i, k] * dIQU_dtheta[j, k] * globals()["sigma_theta"][k] ** 2 for k in range(len(globals()["sigma_theta"]))], axis=0
) )
# Add quadratically the uncertainty to the Stokes covariance matrix # Add quadratically the uncertainty to the Stokes covariance matrix
Stokes_cov += s_IQU_axis + s_IQU_stat Stokes_cov += Stokes_cov_axis + Stokes_cov_stat
# Save values to single header # Save values to single header
header_stokes = pol_headers[0] header_stokes = pol_headers[0]
@@ -1445,10 +1445,10 @@ def compute_Stokes(data_array, error_array, data_mask, headers, FWHM=None, scale
header_stokes["PA_int"] = (PA_diluted, "Integrated polarization angle") header_stokes["PA_int"] = (PA_diluted, "Integrated polarization angle")
header_stokes["sPA_int"] = (np.ceil(PA_diluted_err * 10.0) / 10.0, "Integrated polarization angle error") header_stokes["sPA_int"] = (np.ceil(PA_diluted_err * 10.0) / 10.0, "Integrated polarization angle error")
return Stokes, Stokes_cov, header_stokes, s_IQU_stat return Stokes, Stokes_cov, header_stokes, Stokes_cov_stat
def compute_pol(Stokes, Stokes_cov, header_stokes, s_IQU_stat=None): def compute_pol(Stokes, Stokes_cov, header_stokes, Stokes_cov_stat=None):
""" """
Compute the polarization degree (in %) and angle (in deg) and their Compute the polarization degree (in %) and angle (in deg) and their
respective errors from given Stokes parameters. respective errors from given Stokes parameters.
@@ -1524,7 +1524,7 @@ def compute_pol(Stokes, Stokes_cov, header_stokes, s_IQU_stat=None):
s_P_P = np.ones(Stokes[0].shape) * fmax s_P_P = np.ones(Stokes[0].shape) * fmax
s_PA_P = np.ones(Stokes[0].shape) * fmax s_PA_P = np.ones(Stokes[0].shape) * fmax
maskP = np.logical_and(mask, P > 0.0) maskP = np.logical_and(mask, P > 0.0)
if s_IQU_stat is not None: if Stokes_cov_stat is not None:
# If IQU covariance matrix containing only statistical error is given propagate to P and PA # If IQU covariance matrix containing only statistical error is given propagate to P and PA
# Catch Invalid value in sqrt when diagonal terms are big # Catch Invalid value in sqrt when diagonal terms are big
with warnings.catch_warnings(record=True) as _: with warnings.catch_warnings(record=True) as _:
@@ -1532,13 +1532,15 @@ def compute_pol(Stokes, Stokes_cov, header_stokes, s_IQU_stat=None):
P[maskP] P[maskP]
/ Stokes[0][maskP] / Stokes[0][maskP]
* np.sqrt( * np.sqrt(
s_IQU_stat[0, 0][maskP] Stokes_cov_stat[0, 0][maskP]
- 2.0 / (Stokes[0][maskP] * P[maskP] ** 2) * (Stokes[1][maskP] * s_IQU_stat[0, 1][maskP] + Stokes[2][maskP] * s_IQU_stat[0, 2][maskP]) - 2.0
/ (Stokes[0][maskP] * P[maskP] ** 2)
* (Stokes[1][maskP] * Stokes_cov_stat[0, 1][maskP] + Stokes[2][maskP] * Stokes_cov_stat[0, 2][maskP])
+ 1.0 + 1.0
/ (Stokes[0][maskP] ** 2 * P[maskP] ** 4) / (Stokes[0][maskP] ** 2 * P[maskP] ** 4)
* ( * (
Stokes[1][maskP] ** 2 * s_IQU_stat[1, 1][maskP] Stokes[1][maskP] ** 2 * Stokes_cov_stat[1, 1][maskP]
+ Stokes[2][maskP] ** 2 * s_IQU_stat[2, 2][maskP] * Stokes[1][maskP] * Stokes[2][maskP] * s_IQU_stat[1, 2][maskP] + Stokes[2][maskP] ** 2 * Stokes_cov_stat[2, 2][maskP] * Stokes[1][maskP] * Stokes[2][maskP] * Stokes_cov_stat[1, 2][maskP]
) )
) )
) )
@@ -1546,9 +1548,9 @@ def compute_pol(Stokes, Stokes_cov, header_stokes, s_IQU_stat=None):
90.0 90.0
/ (np.pi * Stokes[0][maskP] ** 2 * P[maskP] ** 2) / (np.pi * Stokes[0][maskP] ** 2 * P[maskP] ** 2)
* ( * (
Stokes[1][maskP] ** 2 * s_IQU_stat[2, 2][maskP] Stokes[1][maskP] ** 2 * Stokes_cov_stat[2, 2][maskP]
+ Stokes[2][maskP] * s_IQU_stat[1, 1][maskP] + Stokes[2][maskP] * Stokes_cov_stat[1, 1][maskP]
- 2.0 * Stokes[1][maskP] * Stokes[2][maskP] * s_IQU_stat[1, 2][maskP] - 2.0 * Stokes[1][maskP] * Stokes[2][maskP] * Stokes_cov_stat[1, 2][maskP]
) )
) )
else: else:
@@ -1576,7 +1578,7 @@ def compute_pol(Stokes, Stokes_cov, header_stokes, s_IQU_stat=None):
return P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P return P, debiased_P, s_P, s_P_P, PA, s_PA, s_PA_P
def rotate_Stokes(Stokes, Stokes_cov, data_mask, header_stokes, s_IQU_stat=None, SNRi_cut=None): def rotate_Stokes(Stokes, Stokes_cov, data_mask, header_stokes, Stokes_cov_stat=None, SNRi_cut=None):
""" """
Use scipy.ndimage.rotate to rotate I_stokes to an angle, and a rotation Use scipy.ndimage.rotate to rotate I_stokes to an angle, and a rotation
matrix to rotate Q, U of a given angle in degrees and update header matrix to rotate Q, U of a given angle in degrees and update header
@@ -1642,16 +1644,16 @@ def rotate_Stokes(Stokes, Stokes_cov, data_mask, header_stokes, s_IQU_stat=None,
new_Stokes[:3, i, j] = np.dot(mrot, new_Stokes[:3, i, j]) new_Stokes[:3, i, j] = np.dot(mrot, new_Stokes[:3, i, j])
new_Stokes_cov[:3, :3, i, j] = np.dot(mrot, np.dot(new_Stokes_cov[:3, :3, i, j], mrot.T)) new_Stokes_cov[:3, :3, i, j] = np.dot(mrot, np.dot(new_Stokes_cov[:3, :3, i, j], mrot.T))
if s_IQU_stat is not None: if Stokes_cov_stat is not None:
s_IQU_stat = zeropad(s_IQU_stat, [*s_IQU_stat.shape[:-2], *shape]) Stokes_cov_stat = zeropad(Stokes_cov_stat, [*Stokes_cov_stat.shape[:-2], *shape])
new_s_IQU_stat = np.zeros((*s_IQU_stat.shape[:-2], *shape)) new_Stokes_cov_stat = np.zeros((*Stokes_cov_stat.shape[:-2], *shape))
for i in range(3): for i in range(3):
for j in range(3): for j in range(3):
new_s_IQU_stat[i, j] = sc_rotate(s_IQU_stat[i, j], ang, order=1, reshape=False, cval=0.0) new_Stokes_cov_stat[i, j] = sc_rotate(Stokes_cov_stat[i, j], ang, order=1, reshape=False, cval=0.0)
new_s_IQU_stat[i, i] = np.abs(new_s_IQU_stat[i, i]) new_Stokes_cov_stat[i, i] = np.abs(new_Stokes_cov_stat[i, i])
for i in range(shape[0]): for i in range(shape[0]):
for j in range(shape[1]): for j in range(shape[1]):
new_s_IQU_stat[:3, :3, i, j] = np.dot(mrot, np.dot(new_s_IQU_stat[:3, :3, i, j], mrot.T)) new_Stokes_cov_stat[:3, :3, i, j] = np.dot(mrot, np.dot(new_Stokes_cov_stat[:3, :3, i, j], mrot.T))
# Update headers to new angle # Update headers to new angle
mrot = np.array([[np.cos(-alpha), -np.sin(-alpha)], [np.sin(-alpha), np.cos(-alpha)]]) mrot = np.array([[np.cos(-alpha), -np.sin(-alpha)], [np.sin(-alpha), np.cos(-alpha)]])
@@ -1701,8 +1703,8 @@ def rotate_Stokes(Stokes, Stokes_cov, data_mask, header_stokes, s_IQU_stat=None,
new_header_stokes["PA_int"] = (PA_diluted, "Integrated polarization angle") new_header_stokes["PA_int"] = (PA_diluted, "Integrated polarization angle")
new_header_stokes["sPA_int"] = (np.ceil(PA_diluted_err * 10.0) / 10.0, "Integrated polarization angle error") new_header_stokes["sPA_int"] = (np.ceil(PA_diluted_err * 10.0) / 10.0, "Integrated polarization angle error")
if s_IQU_stat is not None: if Stokes_cov_stat is not None:
return new_Stokes, new_Stokes_cov, new_data_mask, new_header_stokes, new_s_IQU_stat return new_Stokes, new_Stokes_cov, new_data_mask, new_header_stokes, new_Stokes_cov_stat
else: else:
return new_Stokes, new_Stokes_cov, new_data_mask, new_header_stokes return new_Stokes, new_Stokes_cov, new_data_mask, new_header_stokes